llm.rb 4.20.2 → 4.22.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +70 -0
- data/README.md +286 -52
- data/data/anthropic.json +35 -2
- data/data/google.json +7 -2
- data/data/openai.json +0 -30
- data/lib/llm/active_record/acts_as_agent.rb +11 -64
- data/lib/llm/active_record/acts_as_llm.rb +81 -61
- data/lib/llm/agent.rb +28 -4
- data/lib/llm/context.rb +14 -0
- data/lib/llm/sequel/agent.rb +94 -0
- data/lib/llm/sequel/plugin.rb +82 -60
- data/lib/llm/skill.rb +131 -0
- data/lib/llm/version.rb +1 -1
- data/lib/llm.rb +1 -0
- data/lib/sequel/plugins/agent.rb +8 -0
- data/llm.gemspec +3 -0
- metadata +46 -1
data/lib/llm/sequel/plugin.rb
CHANGED
|
@@ -22,6 +22,76 @@ module LLM::Sequel
|
|
|
22
22
|
output_tokens: :output_tokens,
|
|
23
23
|
total_tokens: :total_tokens
|
|
24
24
|
}.freeze
|
|
25
|
+
|
|
26
|
+
##
|
|
27
|
+
# Shared helper methods for the ORM wrapper.
|
|
28
|
+
#
|
|
29
|
+
# These utilities keep persistence plumbing out of the wrapped model's
|
|
30
|
+
# method namespace so the injected surface stays focused on the runtime
|
|
31
|
+
# API itself.
|
|
32
|
+
# @api private
|
|
33
|
+
module Utils
|
|
34
|
+
##
|
|
35
|
+
# Resolves a single configured option against a model instance.
|
|
36
|
+
# @return [Object]
|
|
37
|
+
def self.resolve_option(obj, option)
|
|
38
|
+
case option
|
|
39
|
+
when Proc then obj.instance_exec(&option)
|
|
40
|
+
when Symbol then obj.send(option)
|
|
41
|
+
when Hash then option.dup
|
|
42
|
+
else option
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
##
|
|
47
|
+
# Resolves hash-like wrapper options against a model instance.
|
|
48
|
+
# @return [Hash]
|
|
49
|
+
def self.resolve_options(obj, option, empty_hash)
|
|
50
|
+
case option
|
|
51
|
+
when Proc, Symbol, Hash then resolve_option(obj, option)
|
|
52
|
+
else empty_hash.dup
|
|
53
|
+
end
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
##
|
|
57
|
+
# Serializes the runtime into the configured storage format.
|
|
58
|
+
# @return [String, Hash]
|
|
59
|
+
def self.serialize_context(ctx, format)
|
|
60
|
+
case format
|
|
61
|
+
when :string then ctx.to_json
|
|
62
|
+
when :json, :jsonb then ctx.to_h
|
|
63
|
+
else raise ArgumentError, "Unknown format: #{format.inspect}"
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
##
|
|
68
|
+
# Maps wrapper options onto the record's storage columns.
|
|
69
|
+
# @return [Hash]
|
|
70
|
+
def self.columns(options)
|
|
71
|
+
usage_columns = options[:usage_columns]
|
|
72
|
+
{
|
|
73
|
+
provider_column: options[:provider_column],
|
|
74
|
+
model_column: options[:model_column],
|
|
75
|
+
data_column: options[:data_column],
|
|
76
|
+
input_tokens: usage_columns[:input_tokens],
|
|
77
|
+
output_tokens: usage_columns[:output_tokens],
|
|
78
|
+
total_tokens: usage_columns[:total_tokens]
|
|
79
|
+
}.freeze
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
##
|
|
83
|
+
# Persists the runtime state and usage columns back onto the record.
|
|
84
|
+
# @return [void]
|
|
85
|
+
def self.save(obj, ctx, options)
|
|
86
|
+
columns = self.columns(options)
|
|
87
|
+
obj.update(
|
|
88
|
+
columns[:data_column] => serialize_context(ctx, options[:format]),
|
|
89
|
+
columns[:input_tokens] => ctx.usage.input_tokens,
|
|
90
|
+
columns[:output_tokens] => ctx.usage.output_tokens,
|
|
91
|
+
columns[:total_tokens] => ctx.usage.total_tokens
|
|
92
|
+
)
|
|
93
|
+
end
|
|
94
|
+
end
|
|
25
95
|
DEFAULTS = {
|
|
26
96
|
provider_column: :provider,
|
|
27
97
|
model_column: :model,
|
|
@@ -84,12 +154,15 @@ module LLM::Sequel
|
|
|
84
154
|
end
|
|
85
155
|
|
|
86
156
|
module Plugin::InstanceMethods
|
|
157
|
+
Utils = Plugin::Utils
|
|
158
|
+
|
|
87
159
|
##
|
|
88
160
|
# Continues the stored context with new input and flushes it.
|
|
89
161
|
# @see LLM::Context#talk
|
|
90
162
|
# @return [LLM::Response]
|
|
91
163
|
def talk(...)
|
|
92
|
-
|
|
164
|
+
options = self.class.llm_plugin_options
|
|
165
|
+
ctx.talk(...).tap { Utils.save(self, ctx, options) }
|
|
93
166
|
end
|
|
94
167
|
|
|
95
168
|
##
|
|
@@ -97,7 +170,8 @@ module LLM::Sequel
|
|
|
97
170
|
# @see LLM::Context#respond
|
|
98
171
|
# @return [LLM::Response]
|
|
99
172
|
def respond(...)
|
|
100
|
-
|
|
173
|
+
options = self.class.llm_plugin_options
|
|
174
|
+
ctx.respond(...).tap { Utils.save(self, ctx, options) }
|
|
101
175
|
end
|
|
102
176
|
|
|
103
177
|
##
|
|
@@ -173,6 +247,7 @@ module LLM::Sequel
|
|
|
173
247
|
# Returns usage from the mapped usage columns.
|
|
174
248
|
# @return [LLM::Object]
|
|
175
249
|
def usage
|
|
250
|
+
columns = Utils.columns(self.class.llm_plugin_options)
|
|
176
251
|
LLM::Object.from(
|
|
177
252
|
input_tokens: self[columns[:input_tokens]] || 0,
|
|
178
253
|
output_tokens: self[columns[:output_tokens]] || 0,
|
|
@@ -229,11 +304,12 @@ module LLM::Sequel
|
|
|
229
304
|
# @return [LLM::Provider]
|
|
230
305
|
def llm
|
|
231
306
|
options = self.class.llm_plugin_options
|
|
307
|
+
columns = Utils.columns(options)
|
|
232
308
|
provider = self[columns[:provider_column]]
|
|
233
|
-
kwargs = resolve_options(options[:provider])
|
|
309
|
+
kwargs = Utils.resolve_options(self, options[:provider], Plugin::EMPTY_HASH)
|
|
234
310
|
return @llm if @llm
|
|
235
311
|
@llm = LLM.method(provider).call(**kwargs)
|
|
236
|
-
@llm.tracer = resolve_option(options[:tracer]) if options[:tracer]
|
|
312
|
+
@llm.tracer = Utils.resolve_option(self, options[:tracer]) if options[:tracer]
|
|
237
313
|
@llm
|
|
238
314
|
end
|
|
239
315
|
|
|
@@ -244,7 +320,8 @@ module LLM::Sequel
|
|
|
244
320
|
def ctx
|
|
245
321
|
@ctx ||= begin
|
|
246
322
|
options = self.class.llm_plugin_options
|
|
247
|
-
|
|
323
|
+
columns = Utils.columns(options)
|
|
324
|
+
params = Utils.resolve_options(self, options[:context], Plugin::EMPTY_HASH).dup
|
|
248
325
|
params[:model] ||= self[columns[:model_column]]
|
|
249
326
|
ctx = LLM::Context.new(llm, params.compact)
|
|
250
327
|
data = self[columns[:data_column]]
|
|
@@ -259,60 +336,5 @@ module LLM::Sequel
|
|
|
259
336
|
end
|
|
260
337
|
end
|
|
261
338
|
end
|
|
262
|
-
|
|
263
|
-
##
|
|
264
|
-
# @return [void]
|
|
265
|
-
def flush
|
|
266
|
-
options = self.class.llm_plugin_options
|
|
267
|
-
update({
|
|
268
|
-
columns[:data_column] => serialize_context(options[:format]),
|
|
269
|
-
columns[:input_tokens] => ctx.usage.input_tokens,
|
|
270
|
-
columns[:output_tokens] => ctx.usage.output_tokens,
|
|
271
|
-
columns[:total_tokens] => ctx.usage.total_tokens
|
|
272
|
-
})
|
|
273
|
-
end
|
|
274
|
-
|
|
275
|
-
##
|
|
276
|
-
# @return [Hash]
|
|
277
|
-
def resolve_option(option)
|
|
278
|
-
case option
|
|
279
|
-
when Proc then instance_exec(&option)
|
|
280
|
-
when Symbol then send(option)
|
|
281
|
-
when Hash then option.dup
|
|
282
|
-
else option
|
|
283
|
-
end
|
|
284
|
-
end
|
|
285
|
-
|
|
286
|
-
##
|
|
287
|
-
# @return [Hash]
|
|
288
|
-
def resolve_options(option)
|
|
289
|
-
case option
|
|
290
|
-
when Proc, Symbol, Hash then resolve_option(option)
|
|
291
|
-
else Plugin::EMPTY_HASH.dup
|
|
292
|
-
end
|
|
293
|
-
end
|
|
294
|
-
|
|
295
|
-
def serialize_context(format)
|
|
296
|
-
case format
|
|
297
|
-
when :string then ctx.to_json
|
|
298
|
-
when :json, :jsonb then ctx.to_h
|
|
299
|
-
else raise ArgumentError, "Unknown format: #{format.inspect}"
|
|
300
|
-
end
|
|
301
|
-
end
|
|
302
|
-
|
|
303
|
-
def columns
|
|
304
|
-
@columns ||= begin
|
|
305
|
-
options = self.class.llm_plugin_options
|
|
306
|
-
usage_columns = options[:usage_columns]
|
|
307
|
-
{
|
|
308
|
-
provider_column: options[:provider_column],
|
|
309
|
-
model_column: options[:model_column],
|
|
310
|
-
data_column: options[:data_column],
|
|
311
|
-
input_tokens: usage_columns[:input_tokens],
|
|
312
|
-
output_tokens: usage_columns[:output_tokens],
|
|
313
|
-
total_tokens: usage_columns[:total_tokens]
|
|
314
|
-
}.freeze
|
|
315
|
-
end
|
|
316
|
-
end
|
|
317
339
|
end
|
|
318
340
|
end
|
data/lib/llm/skill.rb
ADDED
|
@@ -0,0 +1,131 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module LLM
|
|
4
|
+
##
|
|
5
|
+
# {LLM::Skill LLM::Skill} represents a directory-backed packaged capability.
|
|
6
|
+
# A skill directory must contain a `SKILL.md` file with YAML frontmatter.
|
|
7
|
+
# Skills can expose themselves as normal {LLM::Tool LLM::Tool} classes through
|
|
8
|
+
# {#to_tool}. This keeps skills on the same execution path as local tools.
|
|
9
|
+
class Skill
|
|
10
|
+
##
|
|
11
|
+
# Load a skill from a directory.
|
|
12
|
+
# @param [String, Pathname] path
|
|
13
|
+
# @return [LLM::Skill]
|
|
14
|
+
def self.load(path)
|
|
15
|
+
new(path).tap(&:load!)
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
##
|
|
19
|
+
# Returns the skill directory.
|
|
20
|
+
# @return [String]
|
|
21
|
+
attr_reader :path
|
|
22
|
+
|
|
23
|
+
##
|
|
24
|
+
# Returns the skill name.
|
|
25
|
+
# @return [String]
|
|
26
|
+
attr_reader :name
|
|
27
|
+
|
|
28
|
+
##
|
|
29
|
+
# Returns the skill description.
|
|
30
|
+
# @return [String]
|
|
31
|
+
attr_reader :description
|
|
32
|
+
|
|
33
|
+
##
|
|
34
|
+
# Returns the skill instructions.
|
|
35
|
+
# @return [String]
|
|
36
|
+
attr_reader :instructions
|
|
37
|
+
|
|
38
|
+
##
|
|
39
|
+
# Returns the skill frontmatter.
|
|
40
|
+
# @return [LLM::Object]
|
|
41
|
+
attr_reader :frontmatter
|
|
42
|
+
|
|
43
|
+
##
|
|
44
|
+
# Returns the skill tools.
|
|
45
|
+
# @return [Array<Class<LLM::Tool>>]
|
|
46
|
+
attr_reader :tools
|
|
47
|
+
|
|
48
|
+
##
|
|
49
|
+
# @param [String] path
|
|
50
|
+
# The path to a directory
|
|
51
|
+
# @return [LLM::Skill]
|
|
52
|
+
def initialize(path)
|
|
53
|
+
@path = path.to_s
|
|
54
|
+
@name = ::File.basename(@path)
|
|
55
|
+
@description = "Skill: #{@name}"
|
|
56
|
+
@instructions = ""
|
|
57
|
+
@frontmatter = LLM::Object.from({})
|
|
58
|
+
@tools = []
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
##
|
|
62
|
+
# Load and parse the skill.
|
|
63
|
+
# @return [LLM::Skill]
|
|
64
|
+
def load!
|
|
65
|
+
path = ::File.join(@path, "SKILL.md")
|
|
66
|
+
parse(::File.read(path))
|
|
67
|
+
self
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
##
|
|
71
|
+
# Execute the skill by wrapping it in a small agent with the skill
|
|
72
|
+
# instructions. The context is bound explicitly by the caller so the
|
|
73
|
+
# nested agent can inherit context-level behavior such as streaming.
|
|
74
|
+
# @param [LLM::Context] ctx
|
|
75
|
+
# @return [Hash]
|
|
76
|
+
def call(ctx)
|
|
77
|
+
instructions, tools = self.instructions, self.tools
|
|
78
|
+
params = ctx.params.merge(mode: ctx.mode).reject { [:tools, :schema].include?(_1) }
|
|
79
|
+
agent = Class.new(LLM::Agent) do
|
|
80
|
+
instructions(instructions)
|
|
81
|
+
tools(*tools)
|
|
82
|
+
end.new(ctx.llm, params)
|
|
83
|
+
agent.messages.concat(messages_for(ctx))
|
|
84
|
+
res = agent.talk("Solve the user's query.")
|
|
85
|
+
{content: res.content}
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
##
|
|
89
|
+
# Expose the skill as a normal LLM::Tool. The context is bound explicitly
|
|
90
|
+
# when the tool class is built.
|
|
91
|
+
# @param [LLM::Context] ctx
|
|
92
|
+
# @return [Class<LLM::Tool>]
|
|
93
|
+
def to_tool(ctx)
|
|
94
|
+
skill = self
|
|
95
|
+
Class.new(LLM::Tool) do
|
|
96
|
+
name skill.name
|
|
97
|
+
description skill.description
|
|
98
|
+
|
|
99
|
+
define_method(:call) do
|
|
100
|
+
skill.call(ctx)
|
|
101
|
+
end
|
|
102
|
+
end
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
private
|
|
106
|
+
|
|
107
|
+
def messages_for(ctx)
|
|
108
|
+
messages = ctx.messages
|
|
109
|
+
.to_a
|
|
110
|
+
.select { _1.user? || _1.assistant? }
|
|
111
|
+
.reject { _1.tool_call? || _1.tool_return? }
|
|
112
|
+
.last(8)
|
|
113
|
+
return messages if messages.empty?
|
|
114
|
+
[LLM::Message.new(:user, "Recent context:"), *messages]
|
|
115
|
+
end
|
|
116
|
+
|
|
117
|
+
def parse(content)
|
|
118
|
+
match = content.match(/\A---\s*\n(.*?)\n---\s*\n?(.*)\z/m)
|
|
119
|
+
unless match
|
|
120
|
+
@instructions = content
|
|
121
|
+
return
|
|
122
|
+
end
|
|
123
|
+
require "yaml" unless defined?(::YAML)
|
|
124
|
+
@frontmatter = LLM::Object.from(YAML.safe_load(match[1]) || {})
|
|
125
|
+
@name = @frontmatter.name || @name
|
|
126
|
+
@description = @frontmatter.description || @description
|
|
127
|
+
@tools = [*@frontmatter.tools].map { LLM::Tool.find_by_name!(_1) }
|
|
128
|
+
@instructions = match[2]
|
|
129
|
+
end
|
|
130
|
+
end
|
|
131
|
+
end
|
data/lib/llm/version.rb
CHANGED
data/lib/llm.rb
CHANGED
data/llm.gemspec
CHANGED
|
@@ -54,4 +54,7 @@ Gem::Specification.new do |spec|
|
|
|
54
54
|
spec.add_development_dependency "net-http-persistent", "~> 4.0"
|
|
55
55
|
spec.add_development_dependency "opentelemetry-sdk", "~> 1.10"
|
|
56
56
|
spec.add_development_dependency "logger", "~> 1.7"
|
|
57
|
+
spec.add_development_dependency "activerecord", "~> 8.0"
|
|
58
|
+
spec.add_development_dependency "sequel", "~> 5.0"
|
|
59
|
+
spec.add_development_dependency "sqlite3", "~> 2.0"
|
|
57
60
|
end
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: llm.rb
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 4.
|
|
4
|
+
version: 4.22.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Antar Azri
|
|
@@ -194,6 +194,48 @@ dependencies:
|
|
|
194
194
|
- - "~>"
|
|
195
195
|
- !ruby/object:Gem::Version
|
|
196
196
|
version: '1.7'
|
|
197
|
+
- !ruby/object:Gem::Dependency
|
|
198
|
+
name: activerecord
|
|
199
|
+
requirement: !ruby/object:Gem::Requirement
|
|
200
|
+
requirements:
|
|
201
|
+
- - "~>"
|
|
202
|
+
- !ruby/object:Gem::Version
|
|
203
|
+
version: '8.0'
|
|
204
|
+
type: :development
|
|
205
|
+
prerelease: false
|
|
206
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
207
|
+
requirements:
|
|
208
|
+
- - "~>"
|
|
209
|
+
- !ruby/object:Gem::Version
|
|
210
|
+
version: '8.0'
|
|
211
|
+
- !ruby/object:Gem::Dependency
|
|
212
|
+
name: sequel
|
|
213
|
+
requirement: !ruby/object:Gem::Requirement
|
|
214
|
+
requirements:
|
|
215
|
+
- - "~>"
|
|
216
|
+
- !ruby/object:Gem::Version
|
|
217
|
+
version: '5.0'
|
|
218
|
+
type: :development
|
|
219
|
+
prerelease: false
|
|
220
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
221
|
+
requirements:
|
|
222
|
+
- - "~>"
|
|
223
|
+
- !ruby/object:Gem::Version
|
|
224
|
+
version: '5.0'
|
|
225
|
+
- !ruby/object:Gem::Dependency
|
|
226
|
+
name: sqlite3
|
|
227
|
+
requirement: !ruby/object:Gem::Requirement
|
|
228
|
+
requirements:
|
|
229
|
+
- - "~>"
|
|
230
|
+
- !ruby/object:Gem::Version
|
|
231
|
+
version: '2.0'
|
|
232
|
+
type: :development
|
|
233
|
+
prerelease: false
|
|
234
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
235
|
+
requirements:
|
|
236
|
+
- - "~>"
|
|
237
|
+
- !ruby/object:Gem::Version
|
|
238
|
+
version: '2.0'
|
|
197
239
|
description: |
|
|
198
240
|
llm.rb is a lightweight runtime for building capable AI systems in Ruby.
|
|
199
241
|
It is not just an API wrapper. llm.rb gives you one runtime for providers,
|
|
@@ -371,9 +413,11 @@ files:
|
|
|
371
413
|
- lib/llm/schema/parser.rb
|
|
372
414
|
- lib/llm/schema/string.rb
|
|
373
415
|
- lib/llm/schema/version.rb
|
|
416
|
+
- lib/llm/sequel/agent.rb
|
|
374
417
|
- lib/llm/sequel/plugin.rb
|
|
375
418
|
- lib/llm/server_tool.rb
|
|
376
419
|
- lib/llm/session.rb
|
|
420
|
+
- lib/llm/skill.rb
|
|
377
421
|
- lib/llm/stream.rb
|
|
378
422
|
- lib/llm/stream/queue.rb
|
|
379
423
|
- lib/llm/tool.rb
|
|
@@ -386,6 +430,7 @@ files:
|
|
|
386
430
|
- lib/llm/usage.rb
|
|
387
431
|
- lib/llm/utils.rb
|
|
388
432
|
- lib/llm/version.rb
|
|
433
|
+
- lib/sequel/plugins/agent.rb
|
|
389
434
|
- lib/sequel/plugins/llm.rb
|
|
390
435
|
- llm.gemspec
|
|
391
436
|
homepage: https://github.com/llmrb/llm.rb
|