llm.rb 0.4.0 → 0.4.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/LICENSE +17 -0
- data/README.md +3 -5
- data/lib/json/schema/array.rb +1 -2
- data/lib/json/schema/leaf.rb +27 -0
- data/lib/json/schema/version.rb +6 -0
- data/lib/json/schema.rb +24 -13
- data/lib/llm/version.rb +1 -1
- data/llm.gemspec +2 -3
- metadata +4 -25
- data/spec/anthropic/completion_spec.rb +0 -96
- data/spec/anthropic/embedding_spec.rb +0 -25
- data/spec/anthropic/models_spec.rb +0 -21
- data/spec/gemini/completion_spec.rb +0 -85
- data/spec/gemini/conversation_spec.rb +0 -31
- data/spec/gemini/embedding_spec.rb +0 -25
- data/spec/gemini/files_spec.rb +0 -124
- data/spec/gemini/images_spec.rb +0 -39
- data/spec/gemini/models_spec.rb +0 -21
- data/spec/llm/conversation_spec.rb +0 -261
- data/spec/ollama/completion_spec.rb +0 -43
- data/spec/ollama/conversation_spec.rb +0 -31
- data/spec/ollama/embedding_spec.rb +0 -24
- data/spec/ollama/models_spec.rb +0 -20
- data/spec/openai/audio_spec.rb +0 -55
- data/spec/openai/completion_spec.rb +0 -116
- data/spec/openai/embedding_spec.rb +0 -25
- data/spec/openai/files_spec.rb +0 -204
- data/spec/openai/images_spec.rb +0 -91
- data/spec/openai/models_spec.rb +0 -21
- data/spec/openai/responses_spec.rb +0 -51
- data/spec/readme_spec.rb +0 -61
- data/spec/setup.rb +0 -28
@@ -1,116 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "setup"
|
4
|
-
|
5
|
-
RSpec.describe "LLM::OpenAI: completions" do
|
6
|
-
subject(:openai) { LLM.openai(token) }
|
7
|
-
let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
|
8
|
-
|
9
|
-
context "when given a successful response",
|
10
|
-
vcr: {cassette_name: "openai/completions/successful_response"} do
|
11
|
-
subject(:response) { openai.complete("Hello!", :user) }
|
12
|
-
|
13
|
-
it "returns a completion" do
|
14
|
-
expect(response).to be_a(LLM::Response::Completion)
|
15
|
-
end
|
16
|
-
|
17
|
-
it "returns a model" do
|
18
|
-
expect(response.model).to eq("gpt-4o-mini-2024-07-18")
|
19
|
-
end
|
20
|
-
|
21
|
-
it "includes token usage" do
|
22
|
-
expect(response).to have_attributes(
|
23
|
-
prompt_tokens: 9,
|
24
|
-
completion_tokens: 10,
|
25
|
-
total_tokens: 19
|
26
|
-
)
|
27
|
-
end
|
28
|
-
|
29
|
-
context "with a choice" do
|
30
|
-
subject(:choice) { response.choices[0] }
|
31
|
-
|
32
|
-
it "has choices" do
|
33
|
-
expect(choice).to have_attributes(
|
34
|
-
role: "assistant",
|
35
|
-
content: "Hello! How can I assist you today?"
|
36
|
-
)
|
37
|
-
end
|
38
|
-
|
39
|
-
it "includes the response" do
|
40
|
-
expect(choice.extra[:response]).to eq(response)
|
41
|
-
end
|
42
|
-
end
|
43
|
-
end
|
44
|
-
|
45
|
-
context "when given a thread of messages",
|
46
|
-
vcr: {cassette_name: "openai/completions/successful_response_thread"} do
|
47
|
-
subject(:response) do
|
48
|
-
openai.complete "What is your name? What age are you?", :user, messages: [
|
49
|
-
{role: "system", content: "Answer all of my questions"},
|
50
|
-
{role: "system", content: "Answer in the format: My name is <name> and I am <age> years old"},
|
51
|
-
{role: "system", content: "Your name is Pablo and you are 25 years old"}
|
52
|
-
]
|
53
|
-
end
|
54
|
-
|
55
|
-
it "has choices" do
|
56
|
-
expect(response).to have_attributes(
|
57
|
-
choices: [
|
58
|
-
have_attributes(
|
59
|
-
role: "assistant",
|
60
|
-
content: %r|\AMy name is Pablo and I am 25 years old|
|
61
|
-
)
|
62
|
-
]
|
63
|
-
)
|
64
|
-
end
|
65
|
-
end
|
66
|
-
|
67
|
-
context "when asked to describe an audio file",
|
68
|
-
vcr: {cassette_name: "openai/completions/describe_pdf_document"} do
|
69
|
-
let(:file) { LLM::File("spec/fixtures/documents/freebsd.sysctl.pdf") }
|
70
|
-
let(:response) do
|
71
|
-
openai.complete([
|
72
|
-
"This PDF document describes sysctl nodes on FreeBSD",
|
73
|
-
"Answer yes or no.",
|
74
|
-
"Nothing else",
|
75
|
-
file
|
76
|
-
], :user)
|
77
|
-
end
|
78
|
-
|
79
|
-
subject { response.choices[0].content.downcase[0..2] }
|
80
|
-
|
81
|
-
it "is successful" do
|
82
|
-
is_expected.to eq("yes")
|
83
|
-
end
|
84
|
-
end
|
85
|
-
|
86
|
-
context "when given a 'bad request' response",
|
87
|
-
vcr: {cassette_name: "openai/completions/bad_request"} do
|
88
|
-
subject(:response) { openai.complete(URI("/foobar.exe"), :user) }
|
89
|
-
|
90
|
-
it "raises an error" do
|
91
|
-
expect { response }.to raise_error(LLM::Error::ResponseError)
|
92
|
-
end
|
93
|
-
|
94
|
-
it "includes the response" do
|
95
|
-
response
|
96
|
-
rescue LLM::Error => ex
|
97
|
-
expect(ex.response).to be_instance_of(Net::HTTPBadRequest)
|
98
|
-
end
|
99
|
-
end
|
100
|
-
|
101
|
-
context "when given an unauthorized response",
|
102
|
-
vcr: {cassette_name: "openai/completions/unauthorized_response"} do
|
103
|
-
subject(:response) { openai.complete(LLM::Message.new(:user, "Hello!")) }
|
104
|
-
let(:token) { "BADTOKEN" }
|
105
|
-
|
106
|
-
it "raises an error" do
|
107
|
-
expect { response }.to raise_error(LLM::Error::Unauthorized)
|
108
|
-
end
|
109
|
-
|
110
|
-
it "includes the response" do
|
111
|
-
response
|
112
|
-
rescue LLM::Error::Unauthorized => ex
|
113
|
-
expect(ex.response).to be_kind_of(Net::HTTPResponse)
|
114
|
-
end
|
115
|
-
end
|
116
|
-
end
|
@@ -1,25 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "setup"
|
4
|
-
|
5
|
-
RSpec.describe "LLM::OpenAI: embeddings" do
|
6
|
-
let(:openai) { LLM.openai(token) }
|
7
|
-
let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
|
8
|
-
|
9
|
-
context "when given a successful response",
|
10
|
-
vcr: {cassette_name: "openai/embeddings/successful_response"} do
|
11
|
-
subject(:response) { openai.embed("Hello, world") }
|
12
|
-
|
13
|
-
it "returns an embedding" do
|
14
|
-
expect(response).to be_instance_of(LLM::Response::Embedding)
|
15
|
-
end
|
16
|
-
|
17
|
-
it "returns a model" do
|
18
|
-
expect(response.model).to eq("text-embedding-3-small")
|
19
|
-
end
|
20
|
-
|
21
|
-
it "has embeddings" do
|
22
|
-
expect(response.embeddings).to be_instance_of(Array)
|
23
|
-
end
|
24
|
-
end
|
25
|
-
end
|
data/spec/openai/files_spec.rb
DELETED
@@ -1,204 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "setup"
|
4
|
-
|
5
|
-
RSpec.describe "LLM::OpenAI::Files" do
|
6
|
-
let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
|
7
|
-
let(:provider) { LLM.openai(token) }
|
8
|
-
|
9
|
-
context "when given a successful create operation (haiku1.txt)",
|
10
|
-
vcr: {cassette_name: "openai/files/successful_create_haiku1"} do
|
11
|
-
subject(:file) { provider.files.create(file: LLM::File("spec/fixtures/documents/haiku1.txt")) }
|
12
|
-
|
13
|
-
it "is successful" do
|
14
|
-
expect(file).to be_instance_of(LLM::Response::File)
|
15
|
-
ensure
|
16
|
-
provider.files.delete(file:)
|
17
|
-
end
|
18
|
-
|
19
|
-
it "returns a file object" do
|
20
|
-
expect(file).to have_attributes(
|
21
|
-
id: instance_of(String),
|
22
|
-
filename: "haiku1.txt",
|
23
|
-
purpose: "assistants"
|
24
|
-
)
|
25
|
-
ensure
|
26
|
-
provider.files.delete(file:)
|
27
|
-
end
|
28
|
-
end
|
29
|
-
|
30
|
-
context "when given a successful create operation (haiku2.txt)",
|
31
|
-
vcr: {cassette_name: "openai/files/successful_create_haiku2"} do
|
32
|
-
subject(:file) { provider.files.create(file: LLM::File("spec/fixtures/documents/haiku2.txt")) }
|
33
|
-
|
34
|
-
it "is successful" do
|
35
|
-
expect(file).to be_instance_of(LLM::Response::File)
|
36
|
-
ensure
|
37
|
-
provider.files.delete(file:)
|
38
|
-
end
|
39
|
-
|
40
|
-
it "returns a file object" do
|
41
|
-
expect(file).to have_attributes(
|
42
|
-
id: instance_of(String),
|
43
|
-
filename: "haiku2.txt",
|
44
|
-
purpose: "assistants"
|
45
|
-
)
|
46
|
-
ensure
|
47
|
-
provider.files.delete(file:)
|
48
|
-
end
|
49
|
-
end
|
50
|
-
|
51
|
-
context "when given a successful delete operation (haiku3.txt)",
|
52
|
-
vcr: {cassette_name: "openai/files/successful_delete_haiku3"} do
|
53
|
-
let(:file) { provider.files.create(file: LLM::File("spec/fixtures/documents/haiku3.txt")) }
|
54
|
-
subject { provider.files.delete(file:) }
|
55
|
-
|
56
|
-
it "is successful" do
|
57
|
-
is_expected.to be_instance_of(OpenStruct)
|
58
|
-
end
|
59
|
-
|
60
|
-
it "returns deleted status" do
|
61
|
-
is_expected.to have_attributes(
|
62
|
-
deleted: true
|
63
|
-
)
|
64
|
-
end
|
65
|
-
end
|
66
|
-
|
67
|
-
context "when given a successful get operation (haiku4.txt)",
|
68
|
-
vcr: {cassette_name: "openai/files/successful_get_haiku4"} do
|
69
|
-
let(:file) { provider.files.create(file: LLM::File("spec/fixtures/documents/haiku4.txt")) }
|
70
|
-
subject { provider.files.get(file:) }
|
71
|
-
|
72
|
-
it "is successful" do
|
73
|
-
is_expected.to be_instance_of(LLM::Response::File)
|
74
|
-
ensure
|
75
|
-
provider.files.delete(file:)
|
76
|
-
end
|
77
|
-
|
78
|
-
it "returns a file object" do
|
79
|
-
is_expected.to have_attributes(
|
80
|
-
id: instance_of(String),
|
81
|
-
filename: "haiku4.txt",
|
82
|
-
purpose: "assistants"
|
83
|
-
)
|
84
|
-
ensure
|
85
|
-
provider.files.delete(file:)
|
86
|
-
end
|
87
|
-
end
|
88
|
-
|
89
|
-
context "when given a successful all operation",
|
90
|
-
vcr: {cassette_name: "openai/files/successful_all"} do
|
91
|
-
let!(:files) do
|
92
|
-
[
|
93
|
-
provider.files.create(file: LLM::File("spec/fixtures/documents/haiku1.txt")),
|
94
|
-
provider.files.create(file: LLM::File("spec/fixtures/documents/haiku2.txt"))
|
95
|
-
]
|
96
|
-
end
|
97
|
-
subject(:filelist) { provider.files.all }
|
98
|
-
|
99
|
-
it "is successful" do
|
100
|
-
expect(filelist).to be_instance_of(LLM::Response::FileList)
|
101
|
-
ensure
|
102
|
-
files.each { |file| provider.files.delete(file:) }
|
103
|
-
end
|
104
|
-
|
105
|
-
it "returns an array of file objects" do
|
106
|
-
expect(filelist.files[0..1]).to match_array(
|
107
|
-
[
|
108
|
-
have_attributes(
|
109
|
-
id: instance_of(String),
|
110
|
-
filename: "haiku1.txt",
|
111
|
-
purpose: "assistants"
|
112
|
-
),
|
113
|
-
have_attributes(
|
114
|
-
id: instance_of(String),
|
115
|
-
filename: "haiku2.txt",
|
116
|
-
purpose: "assistants"
|
117
|
-
)
|
118
|
-
]
|
119
|
-
)
|
120
|
-
ensure
|
121
|
-
files.each { |file| provider.files.delete(file:) }
|
122
|
-
end
|
123
|
-
end
|
124
|
-
|
125
|
-
context "when asked to describe the contents of a file",
|
126
|
-
vcr: {cassette_name: "openai/files/describe_freebsd.sysctl.pdf"} do
|
127
|
-
subject { bot.last_message.content.downcase[0..2] }
|
128
|
-
let(:bot) { LLM::Chat.new(provider).lazy }
|
129
|
-
let(:file) { provider.files.create(file: LLM::File("spec/fixtures/documents/freebsd.sysctl.pdf")) }
|
130
|
-
|
131
|
-
before do
|
132
|
-
bot.respond(file)
|
133
|
-
bot.respond("Is this PDF document about FreeBSD?")
|
134
|
-
bot.respond("Answer with yes or no. Nothing else.")
|
135
|
-
end
|
136
|
-
|
137
|
-
it "describes the document" do
|
138
|
-
is_expected.to eq("yes")
|
139
|
-
ensure
|
140
|
-
provider.files.delete(file:)
|
141
|
-
end
|
142
|
-
end
|
143
|
-
|
144
|
-
context "when asked to describe the contents of a file",
|
145
|
-
vcr: {cassette_name: "openai/files/describe_freebsd.sysctl_2.pdf"} do
|
146
|
-
subject { bot.last_message.content.downcase[0..2] }
|
147
|
-
let(:bot) { LLM::Chat.new(provider).lazy }
|
148
|
-
let(:file) { provider.files.create(file: LLM::File("spec/fixtures/documents/freebsd.sysctl.pdf")) }
|
149
|
-
|
150
|
-
before do
|
151
|
-
bot.respond([
|
152
|
-
"Is this PDF document about FreeBSD?",
|
153
|
-
"Answer with yes or no. Nothing else.",
|
154
|
-
file
|
155
|
-
])
|
156
|
-
end
|
157
|
-
|
158
|
-
it "describes the document" do
|
159
|
-
is_expected.to eq("yes")
|
160
|
-
ensure
|
161
|
-
provider.files.delete(file:)
|
162
|
-
end
|
163
|
-
end
|
164
|
-
|
165
|
-
context "when asked to describe the contents of a file",
|
166
|
-
vcr: {cassette_name: "openai/files/describe_freebsd.sysctl_3.pdf"} do
|
167
|
-
subject { bot.last_message.content.downcase[0..2] }
|
168
|
-
let(:bot) { LLM::Chat.new(provider).lazy }
|
169
|
-
let(:file) { provider.files.create(file: LLM::File("spec/fixtures/documents/freebsd.sysctl.pdf")) }
|
170
|
-
|
171
|
-
before do
|
172
|
-
bot.chat(file)
|
173
|
-
bot.chat("Is this PDF document about FreeBSD?")
|
174
|
-
bot.chat("Answer with yes or no. Nothing else.")
|
175
|
-
end
|
176
|
-
|
177
|
-
it "describes the document" do
|
178
|
-
is_expected.to eq("yes")
|
179
|
-
ensure
|
180
|
-
provider.files.delete(file:)
|
181
|
-
end
|
182
|
-
end
|
183
|
-
|
184
|
-
context "when asked to describe the contents of a file",
|
185
|
-
vcr: {cassette_name: "openai/files/describe_freebsd.sysctl_4.pdf"} do
|
186
|
-
subject { bot.last_message.content.downcase[0..2] }
|
187
|
-
let(:bot) { LLM::Chat.new(provider).lazy }
|
188
|
-
let(:file) { provider.files.create(file: LLM::File("spec/fixtures/documents/freebsd.sysctl.pdf")) }
|
189
|
-
|
190
|
-
before do
|
191
|
-
bot.chat([
|
192
|
-
"Is this PDF document about FreeBSD?",
|
193
|
-
"Answer with yes or no. Nothing else.",
|
194
|
-
file
|
195
|
-
])
|
196
|
-
end
|
197
|
-
|
198
|
-
it "describes the document" do
|
199
|
-
is_expected.to eq("yes")
|
200
|
-
ensure
|
201
|
-
provider.files.delete(file:)
|
202
|
-
end
|
203
|
-
end
|
204
|
-
end
|
data/spec/openai/images_spec.rb
DELETED
@@ -1,91 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "setup"
|
4
|
-
|
5
|
-
RSpec.describe "LLM::OpenAI::Images" do
|
6
|
-
let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
|
7
|
-
let(:provider) { LLM.openai(token) }
|
8
|
-
|
9
|
-
context "when given a successful create operation (urls)",
|
10
|
-
vcr: {cassette_name: "openai/images/successful_create_urls"} do
|
11
|
-
subject(:response) { provider.images.create(prompt: "A dog on a rocket to the moon") }
|
12
|
-
|
13
|
-
it "is successful" do
|
14
|
-
expect(response).to be_instance_of(LLM::Response::Image)
|
15
|
-
end
|
16
|
-
|
17
|
-
it "returns an array of urls" do
|
18
|
-
expect(response.urls).to be_instance_of(Array)
|
19
|
-
end
|
20
|
-
|
21
|
-
it "returns a url" do
|
22
|
-
expect(response.urls[0]).to be_instance_of(String)
|
23
|
-
end
|
24
|
-
end
|
25
|
-
|
26
|
-
context "when given a successful create operation (base64)",
|
27
|
-
vcr: {cassette_name: "openai/images/successful_create_base64"} do
|
28
|
-
subject(:response) do
|
29
|
-
provider.images.create(
|
30
|
-
prompt: "A dog on a rocket to the moon",
|
31
|
-
response_format: "b64_json"
|
32
|
-
)
|
33
|
-
end
|
34
|
-
|
35
|
-
it "is successful" do
|
36
|
-
expect(response).to be_instance_of(LLM::Response::Image)
|
37
|
-
end
|
38
|
-
|
39
|
-
it "returns an array of images" do
|
40
|
-
expect(response.images).to be_instance_of(Array)
|
41
|
-
end
|
42
|
-
|
43
|
-
it "returns an IO-like object" do
|
44
|
-
expect(response.images[0]).to be_instance_of(StringIO)
|
45
|
-
end
|
46
|
-
end
|
47
|
-
|
48
|
-
context "when given a successful variation operation",
|
49
|
-
vcr: {cassette_name: "openai/images/successful_variation"} do
|
50
|
-
subject(:response) do
|
51
|
-
provider.images.create_variation(
|
52
|
-
image: LLM::File("spec/fixtures/images/bluebook.png"),
|
53
|
-
n: 5
|
54
|
-
)
|
55
|
-
end
|
56
|
-
|
57
|
-
it "is successful" do
|
58
|
-
expect(response).to be_instance_of(LLM::Response::Image)
|
59
|
-
end
|
60
|
-
|
61
|
-
it "returns data" do
|
62
|
-
expect(response.urls.size).to eq(5)
|
63
|
-
end
|
64
|
-
|
65
|
-
it "returns multiple variations" do
|
66
|
-
response.urls.each { expect(_1).to be_instance_of(String) }
|
67
|
-
end
|
68
|
-
end
|
69
|
-
|
70
|
-
context "when given a successful edit",
|
71
|
-
vcr: {cassette_name: "openai/images/successful_edit"} do
|
72
|
-
subject(:response) do
|
73
|
-
provider.images.edit(
|
74
|
-
image: LLM::File("spec/fixtures/images/bluebook.png"),
|
75
|
-
prompt: "Add white background"
|
76
|
-
)
|
77
|
-
end
|
78
|
-
|
79
|
-
it "is successful" do
|
80
|
-
expect(response).to be_instance_of(LLM::Response::Image)
|
81
|
-
end
|
82
|
-
|
83
|
-
it "returns data" do
|
84
|
-
expect(response.urls).to be_instance_of(Array)
|
85
|
-
end
|
86
|
-
|
87
|
-
it "returns a url" do
|
88
|
-
expect(response.urls[0]).to be_instance_of(String)
|
89
|
-
end
|
90
|
-
end
|
91
|
-
end
|
data/spec/openai/models_spec.rb
DELETED
@@ -1,21 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "setup"
|
4
|
-
|
5
|
-
RSpec.describe "LLM::OpenAI::Models" do
|
6
|
-
let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
|
7
|
-
let(:provider) { LLM.openai(token) }
|
8
|
-
|
9
|
-
context "when given a successful list operation",
|
10
|
-
vcr: {cassette_name: "openai/models/successful_list"} do
|
11
|
-
subject { provider.models.all }
|
12
|
-
|
13
|
-
it "is successful" do
|
14
|
-
is_expected.to be_instance_of(LLM::Response::ModelList)
|
15
|
-
end
|
16
|
-
|
17
|
-
it "returns a list of models" do
|
18
|
-
expect(subject.models).to all(be_a(LLM::Model))
|
19
|
-
end
|
20
|
-
end
|
21
|
-
end
|
@@ -1,51 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "setup"
|
4
|
-
|
5
|
-
RSpec.describe "LLM::OpenAI::Responses" do
|
6
|
-
let(:token) { ENV["LLM_SECRET"] || "TOKEN" }
|
7
|
-
let(:provider) { LLM.openai(token) }
|
8
|
-
|
9
|
-
context "when given a successful create operation",
|
10
|
-
vcr: {cassette_name: "openai/responses/successful_create"} do
|
11
|
-
subject { provider.responses.create("Hello", :developer) }
|
12
|
-
|
13
|
-
it "is successful" do
|
14
|
-
is_expected.to be_instance_of(LLM::Response::Output)
|
15
|
-
end
|
16
|
-
|
17
|
-
it "has outputs" do
|
18
|
-
is_expected.to have_attributes(
|
19
|
-
outputs: [instance_of(LLM::Message)]
|
20
|
-
)
|
21
|
-
end
|
22
|
-
end
|
23
|
-
|
24
|
-
context "when given a successful get operation",
|
25
|
-
vcr: {cassette_name: "openai/responses/successful_get"} do
|
26
|
-
let(:response) { provider.responses.create("Hello", :developer) }
|
27
|
-
subject { provider.responses.get(response) }
|
28
|
-
|
29
|
-
it "is successful" do
|
30
|
-
is_expected.to be_instance_of(LLM::Response::Output)
|
31
|
-
end
|
32
|
-
|
33
|
-
it "has outputs" do
|
34
|
-
is_expected.to have_attributes(
|
35
|
-
outputs: [instance_of(LLM::Message)]
|
36
|
-
)
|
37
|
-
end
|
38
|
-
end
|
39
|
-
|
40
|
-
context "when given a successful delete operation",
|
41
|
-
vcr: {cassette_name: "openai/responses/successful_delete"} do
|
42
|
-
let(:response) { provider.responses.create("Hello", :developer) }
|
43
|
-
subject { provider.responses.delete(response) }
|
44
|
-
|
45
|
-
it "is successful" do
|
46
|
-
is_expected.to have_attributes(
|
47
|
-
deleted: true
|
48
|
-
)
|
49
|
-
end
|
50
|
-
end
|
51
|
-
end
|
data/spec/readme_spec.rb
DELETED
@@ -1,61 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "setup"
|
4
|
-
require "test/cmd"
|
5
|
-
|
6
|
-
RSpec.describe "The README examples" do
|
7
|
-
before { ENV["key"] = key }
|
8
|
-
after { ENV["key"] = nil }
|
9
|
-
let(:key) { "" }
|
10
|
-
|
11
|
-
context "when given the lazy conversation example" do
|
12
|
-
subject(:command) do
|
13
|
-
cmd RbConfig.ruby,
|
14
|
-
"-Ilib",
|
15
|
-
"-r", webmock("lazy_conversation.rb"),
|
16
|
-
readme_example("lazy_conversation.rb")
|
17
|
-
end
|
18
|
-
|
19
|
-
let(:actual_conversation) do
|
20
|
-
command.stdout.each_line.map(&:strip)
|
21
|
-
end
|
22
|
-
|
23
|
-
let(:expected_conversation) do
|
24
|
-
[
|
25
|
-
"[system] You are my math assistant.",
|
26
|
-
"I will provide you with (simple) equations.",
|
27
|
-
"You will provide answers in the format \"The answer to <equation> is <answer>\".",
|
28
|
-
"I will provide you a set of messages. Reply to all of them.",
|
29
|
-
"A message is considered unanswered if there is no corresponding assistant response.",
|
30
|
-
|
31
|
-
"[user] Tell me the answer to 5 + 15",
|
32
|
-
"[user] Tell me the answer to (5 + 15) * 2",
|
33
|
-
"[user] Tell me the answer to ((5 + 15) * 2) / 10",
|
34
|
-
|
35
|
-
"[assistant] The answer to 5 + 15 is 20.",
|
36
|
-
"The answer to (5 + 15) * 2 is 40.",
|
37
|
-
"The answer to ((5 + 15) * 2) / 10 is 4."
|
38
|
-
].map(&:strip)
|
39
|
-
end
|
40
|
-
|
41
|
-
it "is successful" do
|
42
|
-
is_expected.to be_success
|
43
|
-
end
|
44
|
-
|
45
|
-
it "emits output" do
|
46
|
-
expect(join(actual_conversation)).to eq(join(expected_conversation))
|
47
|
-
end
|
48
|
-
end
|
49
|
-
|
50
|
-
def webmock(example)
|
51
|
-
File.join(Dir.getwd, "share", "llm", "webmocks", example)
|
52
|
-
end
|
53
|
-
|
54
|
-
def readme_example(example)
|
55
|
-
File.join(Dir.getwd, "share", "llm", "examples", example)
|
56
|
-
end
|
57
|
-
|
58
|
-
def join(lines)
|
59
|
-
lines.reject(&:empty?).join("\n")
|
60
|
-
end
|
61
|
-
end
|
data/spec/setup.rb
DELETED
@@ -1,28 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require "llm"
|
4
|
-
require "webmock/rspec"
|
5
|
-
require "vcr"
|
6
|
-
|
7
|
-
RSpec.configure do |config|
|
8
|
-
config.disable_monkey_patching!
|
9
|
-
|
10
|
-
config.expect_with :rspec do |c|
|
11
|
-
c.syntax = :expect
|
12
|
-
end
|
13
|
-
end
|
14
|
-
|
15
|
-
VCR.configure do |config|
|
16
|
-
config.cassette_library_dir = "spec/fixtures/cassettes"
|
17
|
-
config.hook_into :webmock
|
18
|
-
config.configure_rspec_metadata!
|
19
|
-
|
20
|
-
##
|
21
|
-
# scrub
|
22
|
-
config.filter_sensitive_data("TOKEN") { ENV["LLM_SECRET"] }
|
23
|
-
config.before_record do
|
24
|
-
body = _1.response.body
|
25
|
-
body.gsub! %r|#{Regexp.escape("https://oaidalleapiprodscus.blob.core.windows.net/")}[^"]+|,
|
26
|
-
"https://openai.com/generated/image.png"
|
27
|
-
end
|
28
|
-
end
|