lammy 0.5.0 → 0.7.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/lammy/chat.rb +3 -5
- data/lib/lammy/openai.rb +29 -9
- data/lib/lammy/schema.rb +16 -4
- metadata +16 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 160025e8046546a063a9a75deb7e1f29186973f9b7fe2096018734c6d9e1a523
|
4
|
+
data.tar.gz: 0402c839e540115883c2b3bc99b065ab2b7f590824aa05aa9b50ac42e288fd76
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 3db10fe3e0615d2a0c8a739e6a9baf5c406aa3ed8d7dc18f9521a470ffcd51a2544a9365f51614ff2f0f70d34719b485de3bbd18a82cacde05b529314808b93e
|
7
|
+
data.tar.gz: 65ab65d92be1cca2f88382ab3c347c83c2182493dc4bf358379c35fb16d06d13053ed57d3b266dd2674e337c5db07090cc937e5ffa05ed4d113225d8cabf0a06
|
data/lib/lammy/chat.rb
CHANGED
@@ -17,16 +17,14 @@ module L
|
|
17
17
|
define_method(method_name) do |*args, &block|
|
18
18
|
# Initialize context
|
19
19
|
@system_message = nil
|
20
|
-
@prefilled_message = nil
|
21
20
|
|
22
21
|
# `context` sets the system message and is available within the instance
|
23
22
|
define_singleton_method(:context) do |message|
|
24
23
|
@system_message = message
|
25
24
|
end
|
26
25
|
|
27
|
-
|
28
|
-
|
29
|
-
@prefilled_message = message
|
26
|
+
define_singleton_method(:stream) do |proc|
|
27
|
+
@stream = proc
|
30
28
|
end
|
31
29
|
|
32
30
|
# Call the original method to get the user message
|
@@ -41,7 +39,7 @@ module L
|
|
41
39
|
raise "Unsupported model: #{settings[:model]}"
|
42
40
|
end
|
43
41
|
|
44
|
-
client.chat(user_message, @system_message, @
|
42
|
+
client.chat(user_message, @system_message, @stream)
|
45
43
|
end
|
46
44
|
end
|
47
45
|
end
|
data/lib/lammy/openai.rb
CHANGED
@@ -22,19 +22,26 @@ module L
|
|
22
22
|
end
|
23
23
|
|
24
24
|
# Generate a response with support for structured output
|
25
|
-
def chat(user_message, system_message = nil,
|
25
|
+
def chat(user_message, system_message = nil, stream = nil)
|
26
26
|
schema = schema(settings)
|
27
|
-
|
27
|
+
messages = messages(user_message, system_message)
|
28
|
+
|
29
|
+
request = client.chat(
|
28
30
|
parameters: {
|
29
|
-
model: settings[:model],
|
30
|
-
|
31
|
-
|
32
|
-
|
31
|
+
model: settings[:model],
|
32
|
+
response_format: schema,
|
33
|
+
messages: messages,
|
34
|
+
stream: stream ? ->(chunk) { stream.call(stream_content(chunk)) } : nil
|
33
35
|
}.compact
|
34
|
-
)
|
36
|
+
)
|
35
37
|
|
36
|
-
|
37
|
-
|
38
|
+
if stream.nil?
|
39
|
+
response = request.dig('choices', 0, 'message', 'content')
|
40
|
+
content = schema ? ::Hashie::Mash.new(JSON.parse(response)) : response
|
41
|
+
array?(schema) ? content.items : content
|
42
|
+
else
|
43
|
+
stream
|
44
|
+
end
|
38
45
|
end
|
39
46
|
|
40
47
|
# OpenAI’s text embeddings measure the relatedness of text strings. An embedding is a vector of floating point
|
@@ -66,10 +73,23 @@ module L
|
|
66
73
|
}
|
67
74
|
end
|
68
75
|
|
76
|
+
def messages(user_message, system_message)
|
77
|
+
return user_message if user_message.is_a?(Array)
|
78
|
+
|
79
|
+
[
|
80
|
+
system_message ? L.system(system_message) : nil,
|
81
|
+
L.user(user_message)
|
82
|
+
].compact
|
83
|
+
end
|
84
|
+
|
69
85
|
def array?(schema)
|
70
86
|
schema.is_a?(Hash) && schema.dig('json_schema', 'schema', 'properties', 'items', 'type') == 'array'
|
71
87
|
end
|
72
88
|
|
89
|
+
def stream_content(chunk)
|
90
|
+
chunk.dig('choices', 0, 'delta', 'content')
|
91
|
+
end
|
92
|
+
|
73
93
|
def client
|
74
94
|
return settings[:client] if settings[:client]
|
75
95
|
|
data/lib/lammy/schema.rb
CHANGED
@@ -1,11 +1,23 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
module L
|
4
|
-
# Structured Outputs is a feature that ensures the model will always generate responses
|
5
|
-
# that adhere to your supplied JSON Schema, so you don't need to worry about the model
|
6
|
-
# omitting a required key, or hallucinating an invalid enum value. This is a set of
|
7
|
-
# helper methods to help you define your JSON Schema easily.
|
8
4
|
module Schema
|
5
|
+
def system(content)
|
6
|
+
{ role: :system, content: content }
|
7
|
+
end
|
8
|
+
|
9
|
+
def user(content)
|
10
|
+
{ role: :user, content: content }
|
11
|
+
end
|
12
|
+
|
13
|
+
def assistant(content)
|
14
|
+
{ role: :assistant, content: content }
|
15
|
+
end
|
16
|
+
|
17
|
+
# Structured Outputs is a feature that ensures the model will always generate responses
|
18
|
+
# that adhere to your supplied JSON Schema, so you don't need to worry about the model
|
19
|
+
# omitting a required key, or hallucinating an invalid enum value. This is a set of
|
20
|
+
# helper methods to help you define your JSON Schema easily.
|
9
21
|
def to_a(object)
|
10
22
|
{
|
11
23
|
'type' => 'object',
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: lammy
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.7.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Kamil Nicieja
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-10-
|
11
|
+
date: 2024-10-06 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: anthropic
|
@@ -52,6 +52,20 @@ dependencies:
|
|
52
52
|
- - "~>"
|
53
53
|
- !ruby/object:Gem::Version
|
54
54
|
version: '7.1'
|
55
|
+
- !ruby/object:Gem::Dependency
|
56
|
+
name: pry
|
57
|
+
requirement: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - "~>"
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: 0.14.2
|
62
|
+
type: :development
|
63
|
+
prerelease: false
|
64
|
+
version_requirements: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - "~>"
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: 0.14.2
|
55
69
|
description: An LLM library for Ruby
|
56
70
|
email: kamil@nicieja.co
|
57
71
|
executables: []
|