action_ai 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +23 -0
- data/MIT-LICENSE +21 -0
- data/README.rdoc +107 -0
- data/lib/action_ai/agent.rb +465 -0
- data/lib/action_ai/callbacks.rb +31 -0
- data/lib/action_ai/deprecator.rb +7 -0
- data/lib/action_ai/execution_job.rb +42 -0
- data/lib/action_ai/interaction.rb +105 -0
- data/lib/action_ai/log_subscriber.rb +28 -0
- data/lib/action_ai/parameterized.rb +121 -0
- data/lib/action_ai/preview.rb +90 -0
- data/lib/action_ai/prompt_helper.rb +82 -0
- data/lib/action_ai/queued_execution.rb +12 -0
- data/lib/action_ai/railtie.rb +74 -0
- data/lib/action_ai/rescuable.rb +33 -0
- data/lib/action_ai/test_case.rb +96 -0
- data/lib/action_ai/test_helper.rb +272 -0
- data/lib/action_ai/version.rb +5 -0
- data/lib/action_ai.rb +72 -0
- data/lib/rails/generators/ai/USAGE +20 -0
- data/lib/rails/generators/ai/ai_generator.rb +32 -0
- data/lib/rails/generators/ai/templates/agent.rb.tt +11 -0
- data/lib/rails/generators/ai/templates/application_agent.rb.tt +5 -0
- data/lib/ruby_llm/providers/test/echo.rb +47 -0
- data/lib/ruby_llm/providers/test.rb +40 -0
- data/lib/ruby_llm/tester.rb +40 -0
- metadata +169 -0
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "active_support/core_ext/array/extract_options"
|
|
4
|
+
require "active_job"
|
|
5
|
+
require "ruby_llm/tester"
|
|
6
|
+
|
|
7
|
+
RubyLLM.configure do
|
|
8
|
+
it.default_model = "echo"
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
module ActionAI
|
|
12
|
+
# Provides a list of prompts that have been executed by RubyLLM::Tester
|
|
13
|
+
singleton_class.delegate :interactions, to: RubyLLM::Tester
|
|
14
|
+
|
|
15
|
+
# Provides helper methods for testing Action AI, including #assert_ai_prompts
|
|
16
|
+
# and #assert_no_ai_prompts.
|
|
17
|
+
module TestHelper
|
|
18
|
+
include ActiveJob::TestHelper
|
|
19
|
+
|
|
20
|
+
# Asserts that the number of AI prompts executed matches the given number.
|
|
21
|
+
#
|
|
22
|
+
# def test_prompts
|
|
23
|
+
# assert_ai_prompts 0
|
|
24
|
+
# Generator.code(task).run
|
|
25
|
+
# assert_ai_prompts 1
|
|
26
|
+
# Generator.code(task).run
|
|
27
|
+
# assert_ai_prompts 2
|
|
28
|
+
# end
|
|
29
|
+
#
|
|
30
|
+
# If a block is passed, that block should cause the specified number of
|
|
31
|
+
# prompts to be executed.
|
|
32
|
+
#
|
|
33
|
+
# def test_ai_prompts_again
|
|
34
|
+
# assert_ai_prompts 1 do
|
|
35
|
+
# Generator.code(task).run
|
|
36
|
+
# end
|
|
37
|
+
#
|
|
38
|
+
# assert_ai_prompts 2 do
|
|
39
|
+
# Generator.code(task).run
|
|
40
|
+
# Generator.code(task).later
|
|
41
|
+
# end
|
|
42
|
+
# end
|
|
43
|
+
def assert_ai_prompts(number, &block)
|
|
44
|
+
if block_given?
|
|
45
|
+
diff = capture_ai_prompts(&block).length
|
|
46
|
+
assert_equal number, diff, "#{number} prompts expected, but #{diff} were executed"
|
|
47
|
+
else
|
|
48
|
+
assert_equal number, ActionAI.interactions.size
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
# Asserts that no AI prompts have been executed.
|
|
53
|
+
#
|
|
54
|
+
# def test_prompts
|
|
55
|
+
# assert_no_ai_prompts
|
|
56
|
+
# Generator.code(task).run
|
|
57
|
+
# assert_ai_prompts 1
|
|
58
|
+
# end
|
|
59
|
+
#
|
|
60
|
+
# If a block is passed, that block should not cause any prompts to be executed.
|
|
61
|
+
#
|
|
62
|
+
# def test_prompts_again
|
|
63
|
+
# assert_no_ai_prompts do
|
|
64
|
+
# # No prompts should be executed from this block
|
|
65
|
+
# end
|
|
66
|
+
# end
|
|
67
|
+
#
|
|
68
|
+
# Note: This assertion is simply a shortcut for:
|
|
69
|
+
#
|
|
70
|
+
# assert_ai_prompts 0, &block
|
|
71
|
+
def assert_no_ai_prompts(&block)
|
|
72
|
+
assert_ai_prompts 0, &block
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Asserts that the number of AI jobs enqueued for later processing matches
|
|
76
|
+
# the given number.
|
|
77
|
+
#
|
|
78
|
+
# def test_jobs
|
|
79
|
+
# assert_enqueued_ai_jobs 0
|
|
80
|
+
# Generator.code(task).later
|
|
81
|
+
# assert_enqueued_ai_jobs 1
|
|
82
|
+
# Generator.code(task).later
|
|
83
|
+
# assert_enqueued_ai_jobs 2
|
|
84
|
+
# end
|
|
85
|
+
#
|
|
86
|
+
# If a block is passed, that block should cause the specified number of
|
|
87
|
+
# jobs to be enqueued.
|
|
88
|
+
#
|
|
89
|
+
# def test_jobs_again
|
|
90
|
+
# assert_enqueued_ai_jobs 1 do
|
|
91
|
+
# Generator.code(task).later
|
|
92
|
+
# end
|
|
93
|
+
#
|
|
94
|
+
# assert_enqueued_ai_jobs 2 do
|
|
95
|
+
# Generator.code(task).later
|
|
96
|
+
# Generator.code(task).later
|
|
97
|
+
# end
|
|
98
|
+
# end
|
|
99
|
+
def assert_enqueued_ai_jobs(number, &block)
|
|
100
|
+
assert_enqueued_jobs(number, only: ->(job) { ai_job_filter(job) }, &block)
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
# Asserts that a specific AI job has been enqueued, optionally
|
|
104
|
+
# matching arguments and/or params.
|
|
105
|
+
#
|
|
106
|
+
# def test_job
|
|
107
|
+
# Generator.code.later
|
|
108
|
+
# assert_enqueued_ai_job_with Generator, :code
|
|
109
|
+
# end
|
|
110
|
+
#
|
|
111
|
+
# def test_job_with_parameters
|
|
112
|
+
# Generator.with(context: "MVP").later
|
|
113
|
+
# assert_enqueued_ai_job_with Generator, :code, params: { context: "MVP" }
|
|
114
|
+
# end
|
|
115
|
+
#
|
|
116
|
+
# def test_job_with_arguments
|
|
117
|
+
# Generator.code(task).later
|
|
118
|
+
# assert_enqueued_ai_job_with Generator, :code, args: [task]
|
|
119
|
+
# end
|
|
120
|
+
#
|
|
121
|
+
# def test_job_with_named_arguments
|
|
122
|
+
# Generator.code(task:).later
|
|
123
|
+
# assert_enqueued_ai_job_with Generator, :code, args: [{task:}]
|
|
124
|
+
# end
|
|
125
|
+
#
|
|
126
|
+
# def test_job_with_parameters_and_arguments
|
|
127
|
+
# Generator.with(context: "MVP").code(task).later
|
|
128
|
+
# assert_enqueued_ai_job_with Generator, :code, params: { context: "MVP" }, args: [task]
|
|
129
|
+
# end
|
|
130
|
+
#
|
|
131
|
+
# def test_job_with_parameters_and_named_arguments
|
|
132
|
+
# Generator.with(context: "MVP").code(task:).later
|
|
133
|
+
# assert_enqueued_ai_job_with Generator, :code, params: { context: "MVP" }, args: [{task:}]
|
|
134
|
+
# end
|
|
135
|
+
#
|
|
136
|
+
# def test_job_with_parameterized_agent
|
|
137
|
+
# Generator.with(context: "MVP").code.later
|
|
138
|
+
# assert_enqueued_ai_job_with Generator.with(context: "MVP"), :code
|
|
139
|
+
# end
|
|
140
|
+
#
|
|
141
|
+
# def test_job_with_matchers
|
|
142
|
+
# Generator.with(context: "MVP").code(task).later
|
|
143
|
+
# assert_enqueued_ai_job_with Generator, :code,
|
|
144
|
+
# params: ->(params) { /mvp/i.match?(params[:context]) },
|
|
145
|
+
# args: ->(args) { task == args[0] }
|
|
146
|
+
# end
|
|
147
|
+
#
|
|
148
|
+
# If a block is passed, that block should cause the specified job
|
|
149
|
+
# to be enqueued.
|
|
150
|
+
#
|
|
151
|
+
# def test_job_in_block
|
|
152
|
+
# assert_enqueued_ai_job_with Generator, :code do
|
|
153
|
+
# Generator.code(task).later
|
|
154
|
+
# end
|
|
155
|
+
# end
|
|
156
|
+
#
|
|
157
|
+
# If +args+ is provided as a Hash, a parameterized job is matched.
|
|
158
|
+
#
|
|
159
|
+
# def test_parameterized_job
|
|
160
|
+
# assert_enqueued_ai_job_with Generator, :code,
|
|
161
|
+
# args: {context: "MVP"} do
|
|
162
|
+
# Generator.with(context: "MVP").code.later
|
|
163
|
+
# end
|
|
164
|
+
# end
|
|
165
|
+
def assert_enqueued_ai_job_with(agent, method, params: nil, args: nil, queue: nil, &block)
|
|
166
|
+
if agent.is_a? ActionAI::Parameterized::Agent
|
|
167
|
+
params = agent.instance_variable_get(:@params)
|
|
168
|
+
agent = agent.instance_variable_get(:@agent)
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
args = Array(args) unless args.is_a?(Proc)
|
|
172
|
+
queue ||= agent.execute_later_queue_name || ActiveJob::Base.default_queue_name
|
|
173
|
+
|
|
174
|
+
expected = ->(job_args) do
|
|
175
|
+
job_kwargs = job_args.extract_options!
|
|
176
|
+
|
|
177
|
+
[agent.to_s, method.to_s] == job_args &&
|
|
178
|
+
params === job_kwargs[:params] && args === job_kwargs[:args]
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
assert_enqueued_with(job: agent.execution_job, args: expected, queue: queue.to_s, &block)
|
|
182
|
+
end
|
|
183
|
+
|
|
184
|
+
# Asserts that no AI jobs are enqueued for later processing.
|
|
185
|
+
#
|
|
186
|
+
# def test_no_jobs
|
|
187
|
+
# assert_no_enqueued_ai_jobs
|
|
188
|
+
# Generator.code(task).later
|
|
189
|
+
# assert_enqueued_ai_jobs 1
|
|
190
|
+
# end
|
|
191
|
+
#
|
|
192
|
+
# If a block is provided, it should not cause any AI jobs to be enqueued.
|
|
193
|
+
#
|
|
194
|
+
# def test_no_jobs
|
|
195
|
+
# assert_no_enqueued_ai_jobs do
|
|
196
|
+
# # No AI jobs should be enqueued from this block
|
|
197
|
+
# end
|
|
198
|
+
# end
|
|
199
|
+
def assert_no_enqueued_ai_jobs(&block)
|
|
200
|
+
assert_enqueued_ai_jobs 0, &block
|
|
201
|
+
end
|
|
202
|
+
|
|
203
|
+
# Executes all enqueued AI jobs. If a block is given, executes all of the jobs
|
|
204
|
+
# that were enqueued throughout the duration of the block. If a block is
|
|
205
|
+
# not given, executes all the enqueued jobs up to this point in the test.
|
|
206
|
+
#
|
|
207
|
+
# def test_execute_enqueued_jobs
|
|
208
|
+
# perform_enqueued_ai_jobs do
|
|
209
|
+
# Generator.code(task).later
|
|
210
|
+
# end
|
|
211
|
+
#
|
|
212
|
+
# assert_ai_prompts 1
|
|
213
|
+
# end
|
|
214
|
+
#
|
|
215
|
+
# def test_execute_enqueued_jobs_without_block
|
|
216
|
+
# Generator.code(task).later
|
|
217
|
+
#
|
|
218
|
+
# perform_enqueued_ai_jobs
|
|
219
|
+
#
|
|
220
|
+
# assert_ai_prompts 1
|
|
221
|
+
# end
|
|
222
|
+
#
|
|
223
|
+
# If the +:queue+ option is specified,
|
|
224
|
+
# then only the prompts enqueued to a specific queue will be performed.
|
|
225
|
+
#
|
|
226
|
+
# def test_execute_enqueued_jobs_with_queue
|
|
227
|
+
# perform_enqueued_ai_jobs queue: :external_agents do
|
|
228
|
+
# Generator.execution_later_queue_name = :external_agents
|
|
229
|
+
# Generator.code(task).later # will be performed
|
|
230
|
+
# Notifier.execution_later_queue_name = :internal_agents
|
|
231
|
+
# Notifier.welcome.later # will not be performed
|
|
232
|
+
# end
|
|
233
|
+
#
|
|
234
|
+
# assert_ai_prompts 1
|
|
235
|
+
# end
|
|
236
|
+
#
|
|
237
|
+
# If the +:at+ option is specified, then only executes prompts enqueued to execution
|
|
238
|
+
# immediately or before the given time.
|
|
239
|
+
def perform_enqueued_ai_jobs(queue: nil, at: nil, &block)
|
|
240
|
+
perform_enqueued_jobs(only: ->(job) { ai_job_filter(job) }, queue: queue, at: at, &block)
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
# Returns any AI prompts that are executed in the block.
|
|
244
|
+
#
|
|
245
|
+
# def test_ai_prompts
|
|
246
|
+
# prompts = capture_ai_prompts do
|
|
247
|
+
# Generator.code(task).content
|
|
248
|
+
# end
|
|
249
|
+
# assert_match /Write .*code/, prompts.first.content
|
|
250
|
+
#
|
|
251
|
+
# prompts = capture_ai_prompts do
|
|
252
|
+
# Generator.code(task).content
|
|
253
|
+
# Generator.code(task).later
|
|
254
|
+
# end
|
|
255
|
+
# assert_match /Write .*code/, prompts.first.content
|
|
256
|
+
# end
|
|
257
|
+
def capture_ai_prompts(&block)
|
|
258
|
+
original_count = ActionAI.interactions.size
|
|
259
|
+
perform_enqueued_ai_jobs(&block)
|
|
260
|
+
new_count = ActionAI.interactions.size
|
|
261
|
+
diff = new_count - original_count
|
|
262
|
+
ActionAI.interactions.last(diff)
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
private
|
|
266
|
+
def ai_job_filter(job)
|
|
267
|
+
job_class = job.is_a?(Hash) ? job.fetch(:job) : job.class
|
|
268
|
+
|
|
269
|
+
Agent.descendants.map(&:execution_job).include?(job_class)
|
|
270
|
+
end
|
|
271
|
+
end
|
|
272
|
+
end
|
data/lib/action_ai.rb
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
#--
|
|
4
|
+
# Copyright (c) David Heinemeier Hansson
|
|
5
|
+
#
|
|
6
|
+
# Permission is hereby granted, free of charge, to any person obtaining
|
|
7
|
+
# a copy of this software and associated documentation files (the
|
|
8
|
+
# "Software"), to deal in the Software without restriction, including
|
|
9
|
+
# without limitation the rights to use, copy, modify, merge, publish,
|
|
10
|
+
# distribute, sublicense, and/or sell copies of the Software, and to
|
|
11
|
+
# permit persons to whom the Software is furnished to do so, subject to
|
|
12
|
+
# the following conditions:
|
|
13
|
+
#
|
|
14
|
+
# The above copyright notice and this permission notice shall be
|
|
15
|
+
# included in all copies or substantial portions of the Software.
|
|
16
|
+
#
|
|
17
|
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
18
|
+
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
19
|
+
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
20
|
+
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
21
|
+
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
22
|
+
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
23
|
+
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
24
|
+
#++
|
|
25
|
+
|
|
26
|
+
require "abstract_controller"
|
|
27
|
+
require "action_ai/version"
|
|
28
|
+
require "action_ai/deprecator"
|
|
29
|
+
|
|
30
|
+
# Common Active Support usage in Action AI
|
|
31
|
+
require "active_support"
|
|
32
|
+
require "active_support/rails"
|
|
33
|
+
require "active_support/core_ext/class"
|
|
34
|
+
require "active_support/core_ext/module/attr_internal"
|
|
35
|
+
require "active_support/core_ext/string/inflections"
|
|
36
|
+
require "active_support/lazy_load_hooks"
|
|
37
|
+
|
|
38
|
+
# :include: ../README.rdoc
|
|
39
|
+
module ActionAI
|
|
40
|
+
extend ::ActiveSupport::Autoload
|
|
41
|
+
|
|
42
|
+
autoload :Agent
|
|
43
|
+
autoload :Callbacks
|
|
44
|
+
autoload :PromptHelper
|
|
45
|
+
autoload :Parameterized
|
|
46
|
+
autoload :Preview
|
|
47
|
+
autoload :Previews, "action_ai/preview"
|
|
48
|
+
autoload :TestCase
|
|
49
|
+
autoload :TestHelper
|
|
50
|
+
autoload :Interaction
|
|
51
|
+
autoload :ExecutionJob
|
|
52
|
+
autoload :QueuedExecution
|
|
53
|
+
|
|
54
|
+
def self.eager_load!
|
|
55
|
+
super
|
|
56
|
+
|
|
57
|
+
require "ruby_llm"
|
|
58
|
+
RubyLLM.eager_autoload!
|
|
59
|
+
|
|
60
|
+
Agent.descendants
|
|
61
|
+
.reject(&:abstract?)
|
|
62
|
+
.each(&:eager_load!)
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
autoload :Mime, "action_dispatch/http/mime_type"
|
|
67
|
+
|
|
68
|
+
ActiveSupport.on_load(:action_view) do
|
|
69
|
+
ActionView::Base.default_formats ||= Mime::SET.symbols
|
|
70
|
+
ActionView::Template.mime_types_implementation = Mime
|
|
71
|
+
ActionView::LookupContext::DetailsKey.clear
|
|
72
|
+
end
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
Description:
|
|
2
|
+
Generates a new AI agent and its prompts. Passes the agent name, either
|
|
3
|
+
CamelCased or under_scored, and an optional list of prompts as arguments.
|
|
4
|
+
|
|
5
|
+
This generates an agent class in app/ai/agents and invokes your template
|
|
6
|
+
engine and test framework generators.
|
|
7
|
+
|
|
8
|
+
Examples:
|
|
9
|
+
`bin/rails generate ai generator`
|
|
10
|
+
|
|
11
|
+
creates a generator agent class, prompts, and test:
|
|
12
|
+
Agent: app/ai/agents/generator.rb
|
|
13
|
+
Views: app/ai/prompts/generator/code.erb [...]
|
|
14
|
+
Test: test/ai/agents/generator_test.rb
|
|
15
|
+
|
|
16
|
+
`bin/rails generate ai generator text image video`
|
|
17
|
+
|
|
18
|
+
creates a generator agent with text, image, and video actions.
|
|
19
|
+
|
|
20
|
+
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Rails
|
|
4
|
+
module Generators
|
|
5
|
+
class AIGenerator < NamedBase
|
|
6
|
+
source_root File.expand_path("templates", __dir__)
|
|
7
|
+
|
|
8
|
+
argument :actions, type: :array, default: [], banner: "method method"
|
|
9
|
+
|
|
10
|
+
def create_agent_file
|
|
11
|
+
template "agent.rb", File.join("app/ai/agents", class_path, "#{file_name}.rb")
|
|
12
|
+
|
|
13
|
+
in_root do
|
|
14
|
+
if behavior == :invoke && !File.exist?(application_agent_file_name)
|
|
15
|
+
template "application_agent.rb", application_agent_file_name
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
hook_for :template_engine, :test_framework
|
|
21
|
+
|
|
22
|
+
private
|
|
23
|
+
def application_agent_file_name
|
|
24
|
+
@_application_agent_file_name ||= if mountable_engine?
|
|
25
|
+
"app/ai/agents/#{namespaced_path}/application_ai.rb"
|
|
26
|
+
else
|
|
27
|
+
"app/ai/agents/application_ai.rb"
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
end
|
|
32
|
+
end
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module RubyLLM
|
|
4
|
+
module Providers
|
|
5
|
+
# Echo model behavior for the RubyLLM Test provider.
|
|
6
|
+
#
|
|
7
|
+
# A deterministic response builder (`#echo_response`) returns
|
|
8
|
+
# incoming prompt content.
|
|
9
|
+
#
|
|
10
|
+
# It is intended to be mixed into `RubyLLM::Providers::Test`.
|
|
11
|
+
module Test::Echo
|
|
12
|
+
def self.id = "echo"
|
|
13
|
+
|
|
14
|
+
def self.info = Model::Info.new(
|
|
15
|
+
id:,
|
|
16
|
+
name:,
|
|
17
|
+
provider: Test.slug,
|
|
18
|
+
capabilities: Test.capabilities,
|
|
19
|
+
|
|
20
|
+
modalities: {
|
|
21
|
+
input: %w[text],
|
|
22
|
+
output: %w[text],
|
|
23
|
+
},
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
private
|
|
27
|
+
|
|
28
|
+
def echo_response(messages, **) = {
|
|
29
|
+
content: RubyLLM.concat_content(messages.map(&:content)),
|
|
30
|
+
}
|
|
31
|
+
end
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
def self.concat_content *parts
|
|
35
|
+
strings, contents = parts
|
|
36
|
+
.flatten
|
|
37
|
+
.partition { it.is_a? String }
|
|
38
|
+
|
|
39
|
+
Content.new(
|
|
40
|
+
[*strings, *contents.map(&:text)]
|
|
41
|
+
.reject(&:empty?)
|
|
42
|
+
.join("\n\n"),
|
|
43
|
+
contents.sum([], &:attachments)
|
|
44
|
+
.map(&:source)
|
|
45
|
+
)
|
|
46
|
+
end
|
|
47
|
+
end
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "ruby_llm"
|
|
4
|
+
|
|
5
|
+
module RubyLLM
|
|
6
|
+
module Providers
|
|
7
|
+
# In-memory provider intended for tests.
|
|
8
|
+
#
|
|
9
|
+
# This provider mirrors the behavior of a local fake adapter:
|
|
10
|
+
# - it does not initialize any remote connections,
|
|
11
|
+
# - it returns deterministic responses based on model-specific helpers
|
|
12
|
+
# (for example, `echo_response` from `Test::Echo`).
|
|
13
|
+
class Test < Provider
|
|
14
|
+
autoload :Echo, "ruby_llm/providers/test/echo"
|
|
15
|
+
|
|
16
|
+
include Echo
|
|
17
|
+
|
|
18
|
+
def self.local? = true
|
|
19
|
+
|
|
20
|
+
def initialize(...)
|
|
21
|
+
# configuration not needed
|
|
22
|
+
# skip any connections
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def complete(messages, model:, **)
|
|
26
|
+
Message.new(
|
|
27
|
+
role: :assistant,
|
|
28
|
+
model_id: model.id,
|
|
29
|
+
|
|
30
|
+
**send("#{model.id}_response", messages)
|
|
31
|
+
).tap do |message|
|
|
32
|
+
yield message.content if block_given?
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
def list_models = [Echo]
|
|
37
|
+
.map(&:info)
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "ruby_llm/providers/test"
|
|
4
|
+
|
|
5
|
+
module RubyLLM
|
|
6
|
+
# == Testing with RubyLLM::Tester
|
|
7
|
+
#
|
|
8
|
+
# For tests, require +ruby_llm/tester+ to switch to an in-memory provider.
|
|
9
|
+
# It behaves like +Mail::TestMailer+ by collecting prompt interactions in an array:
|
|
10
|
+
#
|
|
11
|
+
# require "ruby_llm/tester"
|
|
12
|
+
#
|
|
13
|
+
# RubyLLM::Tester.interactions.clear
|
|
14
|
+
#
|
|
15
|
+
# RubyLLM.chat.say "Hello!"
|
|
16
|
+
#
|
|
17
|
+
# RubyLLM::Tester.interactions.size # => 1
|
|
18
|
+
# RubyLLM::Tester.interactions.last.role # => :user
|
|
19
|
+
# RubyLLM::Tester.interactions.last.content # => "Hello!"
|
|
20
|
+
#
|
|
21
|
+
# The test provider uses an +echo+ model and returns the prompt content back as
|
|
22
|
+
# the assistant response, so assertions stay deterministic and offline.
|
|
23
|
+
module Tester
|
|
24
|
+
mattr_reader :interactions, default: []
|
|
25
|
+
|
|
26
|
+
def self.register *messages
|
|
27
|
+
interactions.concat messages.flatten
|
|
28
|
+
end
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
Provider.register :test, Class.new(Providers::Test) {
|
|
32
|
+
def complete(messages, ...)
|
|
33
|
+
Tester.register messages
|
|
34
|
+
|
|
35
|
+
super
|
|
36
|
+
end
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
models.refresh!
|
|
40
|
+
end
|