llm_memory 0.1.10 → 0.1.11

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5117e4a12dc1c9a3848817dc949038d6c80aa3cfb62b2c10709331fd19baaea8
4
- data.tar.gz: 3117e77cd3f0741e94c419cfbc1cb7390aaf84f63aea5df181fdf29df77eb7a4
3
+ metadata.gz: c72d46a390a534d27ea5ee90bfb2950b75fd16f5320af2a431a314bcee13be30
4
+ data.tar.gz: 974f529b0158cf5b4a8d4330dca59fe2fd3c2ee37cdca6465d98abee0f84db18
5
5
  SHA512:
6
- metadata.gz: 05ea60dbd58ea1aabe974e32b22253febe355701940e16f04da685ecefadcdf58323bd8ab28354af6675cc57f17a9a3cc30e64595684deb1be79d3c8251e1eb0
7
- data.tar.gz: da8cf3c76bb436e9665241a135ba1154b45581de08a6cb77a3022066c6c3898ea6beb749d561e7264e19cd2bf900e21c908980c4f03ab0dcdbf52b36036ccb26
6
+ metadata.gz: 697d7a93bc637ec00004e68cd83dac1077e9705706e6b0123131acfaac201ce7979eeef63772bae35c53fa1b95ea7123f23399e5ee0ea1b454c689b6aa0f6a33
7
+ data.tar.gz: ab67fd9ec6c1297b5308dfc28b9475bab46e28aa3b4f2af1694a9053ed7203c80e478320c11663a49583e43762814b85e0bdc52d2ea7002274a282609b1db9bd
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- llm_memory (0.1.10)
4
+ llm_memory (0.1.11)
5
5
  redis (~> 4.6.0)
6
6
  ruby-openai (~> 3.7.0)
7
7
  tokenizers (~> 0.3.3)
@@ -43,6 +43,46 @@ module LlmMemory
43
43
  end
44
44
  end
45
45
 
46
+ def respond_with_schema(context: {}, schema: {})
47
+ response_content = respond(context)
48
+ begin
49
+ response = client.chat(
50
+ parameters: {
51
+ model: "gpt-3.5-turbo-0613", # as of July 3, 2023
52
+ messages: [
53
+ {
54
+ role: "user",
55
+ content: response_content
56
+ }
57
+ ],
58
+ functions: [
59
+ {
60
+ name: "broca",
61
+ description: "Formating the content with the specified schema",
62
+ parameters: schema
63
+ }
64
+ ]
65
+ }
66
+ )
67
+ LlmMemory.logger.debug(response)
68
+ message = response.dig("choices", 0, "message")
69
+ if message["role"] == "assistant" && message["function_call"]
70
+ function_name = message.dig("function_call", "name")
71
+ args =
72
+ JSON.parse(
73
+ message.dig("function_call", "arguments"),
74
+ {symbolize_names: true}
75
+ )
76
+ if function_name == "broca"
77
+ args
78
+ end
79
+ end
80
+ rescue => e
81
+ LlmMemory.logger.info(e.inspect)
82
+ nil
83
+ end
84
+ end
85
+
46
86
  def generate_prompt(args)
47
87
  erb = ERB.new(@prompt)
48
88
  erb.result_with_hash(args)
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LlmMemory
4
- VERSION = "0.1.10"
4
+ VERSION = "0.1.11"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm_memory
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.10
4
+ version: 0.1.11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Shohei Kameda
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-06-02 00:00:00.000000000 Z
11
+ date: 2023-07-02 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: tokenizers