llm.rb 0.16.0 → 0.16.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 38502ab4a41dba8177cb7b21db68f3e0dd5492323ac8b132b1775926b46ffffc
4
- data.tar.gz: ebe196962c43934ae979e298f80b4bc2e30a147ad0f42595eef59880be9fc01e
3
+ metadata.gz: c46802e2152430f164990a91499be669e928201e7793b162f8f62152349078af
4
+ data.tar.gz: 58269b584d08e9d236a3d85ac5728f4e4453a845ace9ebbf8c1ac22890609a1f
5
5
  SHA512:
6
- metadata.gz: 5100d71b851771137a86e799bc2cadab360fc0ced297288b09fa701a8f434c671fe739427b889243e9704c5ae2a05b6b8761c85f0b0bea9268700bf770e80f13
7
- data.tar.gz: 3aed0f826c229a37d30b2f0d41678976ba00df72216588a9302320c52e22d9d1b814df0b76bca8f6e713e105d73006339121a8bb900a73ac8896cc1f7c3f0051
6
+ metadata.gz: 9003c9cac451081ce0589f54acb2d72813e53a8708426e320e30732aa5842d99187af14f1c54bab69cfcc3bcc401529375377547a090f530aa1da140fd0a4b3f
7
+ data.tar.gz: 691e4ebfd80fcfa7af22f1ca26393256a69ddf6957be8960590eebddc5e20a1a30db3ba8cfc1ae65dba10491df6f2844fd72d3ca2444420cb23db357a0225e25
data/README.md CHANGED
@@ -34,6 +34,25 @@ GitHub Copilot but for the terminal.
34
34
  * [llm-shell](https://github.com/llmrb/llm-shell) – a developer-oriented console for Large Language Model communication
35
35
  * [llm-spell](https://github.com/llmrb/llm-spell) – a utility that can correct spelling mistakes with a Large Language Model
36
36
 
37
+ #### Show code
38
+
39
+ A simple chatbot that maintains a conversation and streams
40
+ responses in real-time:
41
+
42
+ ```ruby
43
+ #!/usr/bin/env ruby
44
+ require "llm"
45
+
46
+ llm = LLM.openai(key: ENV["KEY"])
47
+ bot = LLM::Bot.new(llm, stream: $stdout)
48
+ loop do
49
+ print "> "
50
+ input = $stdin.gets&.chomp || break
51
+ bot.chat(input).flush
52
+ print "\n"
53
+ end
54
+ ```
55
+
37
56
  ## Features
38
57
 
39
58
  #### General
@@ -129,9 +148,11 @@ and the gem should be installed separately:
129
148
  #!/usr/bin/env ruby
130
149
  require "llm"
131
150
 
132
- llm = LLM.openai(key: ENV["KEY"], persistent: true)
133
- res = llm.responses.create "Hello world"
134
- llm.responses.create "Adios", last_response_id: res.response_id
151
+ llm = LLM.openai(key: ENV["KEY"], persistent: true)
152
+ res1 = llm.responses.create "message 1"
153
+ res2 = llm.responses.create "message 2", previous_response_id: res1.response_id
154
+ res3 = llm.responses.create "message 3", previous_response_id: res2.response_id
155
+ print res3.output_text, "\n"
135
156
  ```
136
157
 
137
158
  ### Conversations
@@ -195,7 +216,7 @@ bot.chat(stream: $stdout) do |prompt|
195
216
  prompt.user ["Tell me about this URL", URI(url)]
196
217
  prompt.user ["Tell me about this PDF", File.open("handbook.pdf", "rb")]
197
218
  prompt.user "Are the URL and PDF similar to each other?"
198
- end.to_a
219
+ end.flush
199
220
  ```
200
221
 
201
222
  ### Schema
data/lib/llm/bot.rb CHANGED
@@ -123,5 +123,17 @@ module LLM
123
123
  .flat_map(&:functions)
124
124
  .select(&:pending?)
125
125
  end
126
+
127
+ ##
128
+ # @example
129
+ # llm = LLM.openai(key: ENV["KEY"])
130
+ # bot = LLM::Bot.new(llm, stream: $stdout)
131
+ # bot.chat("Hello", role: :user).flush
132
+ # Drains the buffer and returns all messages as an array
133
+ # @return [Array<LLM::Message>]
134
+ def drain
135
+ messages.drain
136
+ end
137
+ alias_method :flush, :drain
126
138
  end
127
139
  end
data/lib/llm/buffer.rb CHANGED
@@ -92,7 +92,8 @@ module LLM
92
92
  # llm = LLM.openai(key: ENV["KEY"])
93
93
  # bot = LLM::Bot.new(llm, stream: $stdout)
94
94
  # bot.chat "Hello", role: :user
95
- # bot.messages.drain
95
+ # bot.messages.flush
96
+ # @see LLM::Bot#drain
96
97
  # @note
97
98
  # This method is especially useful when using the streaming API.
98
99
  # Drains the buffer and returns all messages as an array
@@ -100,6 +101,7 @@ module LLM
100
101
  def drain
101
102
  to_a
102
103
  end
104
+ alias_method :flush, :drain
103
105
 
104
106
  private
105
107
 
@@ -8,8 +8,7 @@ class LLM::OpenAI
8
8
  # @example
9
9
  # llm = LLM.openai(key: ENV["OPENAI_SECRET"])
10
10
  # files = %w(foo.pdf bar.pdf).map { llm.files.create(file: _1) }
11
- # store = llm.vector_stores.create(name: "PDF Store", file_ids: files.map(&:id))
12
- # store = llm.vector_stores.poll(vector: store)
11
+ # store = llm.vector_stores.create_and_poll(name: "PDF Store", file_ids: files.map(&:id))
13
12
  # print "[-] store is ready", "\n"
14
13
  # chunks = llm.vector_stores.search(vector: store, query: "What is Ruby?")
15
14
  # chunks.each { |chunk| puts chunk }
@@ -50,6 +49,14 @@ class LLM::OpenAI
50
49
  LLM::Response.new(res)
51
50
  end
52
51
 
52
+ ##
53
+ # Create a vector store and poll until its status is "completed"
54
+ # @param (see LLM::OpenAI::VectorStores#create)
55
+ # @return (see LLM::OpenAI::VectorStores#poll)
56
+ def create_and_poll(...)
57
+ poll(vector: create(...))
58
+ end
59
+
53
60
  ##
54
61
  # Get a vector store
55
62
  # @param [String, #id] vector The ID of the vector store
data/lib/llm/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LLM
4
- VERSION = "0.16.0"
4
+ VERSION = "0.16.1"
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm.rb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.16.0
4
+ version: 0.16.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Antar Azri