llm_memory 0.1.9 → 0.1.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 723ea7f6c27fd43b5cb447f476e802af917b678c68e3aa999bbad1807a950b20
4
- data.tar.gz: e65bf3f2d0555d0c101f5fe550f436760dcecd28b57e67f54d89a1bcecde3ab3
3
+ metadata.gz: c72d46a390a534d27ea5ee90bfb2950b75fd16f5320af2a431a314bcee13be30
4
+ data.tar.gz: 974f529b0158cf5b4a8d4330dca59fe2fd3c2ee37cdca6465d98abee0f84db18
5
5
  SHA512:
6
- metadata.gz: 975b970c1f2a8c447470755b74b115f3411b072f9bb9e2536c52462cdcedd120c93a342df169dd955ae4cbc9267569393039c5b319fea9487e1c49105a5cbf91
7
- data.tar.gz: feb02279f9f41132237ef2649d0f3582a83d85c16240d8a79cbc0e8d85c485b659f2d7d2458367af5446ba08a856992bfd12f9bbd9c97510ccbee99bbf1140c6
6
+ metadata.gz: 697d7a93bc637ec00004e68cd83dac1077e9705706e6b0123131acfaac201ce7979eeef63772bae35c53fa1b95ea7123f23399e5ee0ea1b454c689b6aa0f6a33
7
+ data.tar.gz: ab67fd9ec6c1297b5308dfc28b9475bab46e28aa3b4f2af1694a9053ed7203c80e478320c11663a49583e43762814b85e0bdc52d2ea7002274a282609b1db9bd
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- llm_memory (0.1.9)
4
+ llm_memory (0.1.11)
5
5
  redis (~> 4.6.0)
6
6
  ruby-openai (~> 3.7.0)
7
7
  tokenizers (~> 0.3.3)
@@ -43,6 +43,46 @@ module LlmMemory
43
43
  end
44
44
  end
45
45
 
46
+ def respond_with_schema(context: {}, schema: {})
47
+ response_content = respond(context)
48
+ begin
49
+ response = client.chat(
50
+ parameters: {
51
+ model: "gpt-3.5-turbo-0613", # as of July 3, 2023
52
+ messages: [
53
+ {
54
+ role: "user",
55
+ content: response_content
56
+ }
57
+ ],
58
+ functions: [
59
+ {
60
+ name: "broca",
61
+ description: "Formating the content with the specified schema",
62
+ parameters: schema
63
+ }
64
+ ]
65
+ }
66
+ )
67
+ LlmMemory.logger.debug(response)
68
+ message = response.dig("choices", 0, "message")
69
+ if message["role"] == "assistant" && message["function_call"]
70
+ function_name = message.dig("function_call", "name")
71
+ args =
72
+ JSON.parse(
73
+ message.dig("function_call", "arguments"),
74
+ {symbolize_names: true}
75
+ )
76
+ if function_name == "broca"
77
+ args
78
+ end
79
+ end
80
+ rescue => e
81
+ LlmMemory.logger.info(e.inspect)
82
+ nil
83
+ end
84
+ end
85
+
46
86
  def generate_prompt(args)
47
87
  erb = ERB.new(@prompt)
48
88
  erb.result_with_hash(args)
@@ -54,7 +54,7 @@ module LlmMemory
54
54
  end
55
55
 
56
56
  def forget_all
57
- @store.drop_index
57
+ @store.drop_index if @store.index_exists?
58
58
  end
59
59
 
60
60
  def add_vectors(docs)
@@ -15,6 +15,10 @@ module LlmMemory
15
15
  raise NotImplementedError, "Each store must implement the 'create_index' method."
16
16
  end
17
17
 
18
+ def index_exists?
19
+ raise NotImplementedError, "Each store must implement the 'index_exists?' method."
20
+ end
21
+
18
22
  def drop_index
19
23
  raise NotImplementedError, "Each store must implement the 'drop_index' method."
20
24
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LlmMemory
4
- VERSION = "0.1.9"
4
+ VERSION = "0.1.11"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm_memory
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.9
4
+ version: 0.1.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Shohei Kameda
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-18 00:00:00.000000000 Z
11
+ date: 2023-07-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: tokenizers