llm_memory 0.1.9 → 0.1.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile.lock +1 -1
- data/lib/llm_memory/broca.rb +40 -0
- data/lib/llm_memory/hippocampus.rb +1 -1
- data/lib/llm_memory/store.rb +4 -0
- data/lib/llm_memory/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c72d46a390a534d27ea5ee90bfb2950b75fd16f5320af2a431a314bcee13be30
|
4
|
+
data.tar.gz: 974f529b0158cf5b4a8d4330dca59fe2fd3c2ee37cdca6465d98abee0f84db18
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 697d7a93bc637ec00004e68cd83dac1077e9705706e6b0123131acfaac201ce7979eeef63772bae35c53fa1b95ea7123f23399e5ee0ea1b454c689b6aa0f6a33
|
7
|
+
data.tar.gz: ab67fd9ec6c1297b5308dfc28b9475bab46e28aa3b4f2af1694a9053ed7203c80e478320c11663a49583e43762814b85e0bdc52d2ea7002274a282609b1db9bd
|
data/Gemfile.lock
CHANGED
data/lib/llm_memory/broca.rb
CHANGED
@@ -43,6 +43,46 @@ module LlmMemory
|
|
43
43
|
end
|
44
44
|
end
|
45
45
|
|
46
|
+
def respond_with_schema(context: {}, schema: {})
|
47
|
+
response_content = respond(context)
|
48
|
+
begin
|
49
|
+
response = client.chat(
|
50
|
+
parameters: {
|
51
|
+
model: "gpt-3.5-turbo-0613", # as of July 3, 2023
|
52
|
+
messages: [
|
53
|
+
{
|
54
|
+
role: "user",
|
55
|
+
content: response_content
|
56
|
+
}
|
57
|
+
],
|
58
|
+
functions: [
|
59
|
+
{
|
60
|
+
name: "broca",
|
61
|
+
description: "Formating the content with the specified schema",
|
62
|
+
parameters: schema
|
63
|
+
}
|
64
|
+
]
|
65
|
+
}
|
66
|
+
)
|
67
|
+
LlmMemory.logger.debug(response)
|
68
|
+
message = response.dig("choices", 0, "message")
|
69
|
+
if message["role"] == "assistant" && message["function_call"]
|
70
|
+
function_name = message.dig("function_call", "name")
|
71
|
+
args =
|
72
|
+
JSON.parse(
|
73
|
+
message.dig("function_call", "arguments"),
|
74
|
+
{symbolize_names: true}
|
75
|
+
)
|
76
|
+
if function_name == "broca"
|
77
|
+
args
|
78
|
+
end
|
79
|
+
end
|
80
|
+
rescue => e
|
81
|
+
LlmMemory.logger.info(e.inspect)
|
82
|
+
nil
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
46
86
|
def generate_prompt(args)
|
47
87
|
erb = ERB.new(@prompt)
|
48
88
|
erb.result_with_hash(args)
|
data/lib/llm_memory/store.rb
CHANGED
@@ -15,6 +15,10 @@ module LlmMemory
|
|
15
15
|
raise NotImplementedError, "Each store must implement the 'create_index' method."
|
16
16
|
end
|
17
17
|
|
18
|
+
def index_exists?
|
19
|
+
raise NotImplementedError, "Each store must implement the 'index_exists?' method."
|
20
|
+
end
|
21
|
+
|
18
22
|
def drop_index
|
19
23
|
raise NotImplementedError, "Each store must implement the 'drop_index' method."
|
20
24
|
end
|
data/lib/llm_memory/version.rb
CHANGED
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llm_memory
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.11
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Shohei Kameda
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-07-02 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: tokenizers
|