llm_memory 0.1.4 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/Gemfile +1 -1
- data/Gemfile.lock +5 -5
- data/lib/llm_memory/broca.rb +9 -7
- data/lib/llm_memory/version.rb +1 -1
- data/lib/llm_memory.rb +8 -3
- data/llm_memory.gemspec +1 -1
- metadata +5 -5
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 72e01db010504ae3ec7877d5122ecba01bf7c1f15140e7ed94d9e66c62e2ea40
|
4
|
+
data.tar.gz: 8d170f4f7fda8c1af72fe62bb7873f3758fba9960babb1dc562b048bb4201f09
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: be7ac41f195ae267c60b917f5640be23bf3a458fea0d6fd9de1084367db6b89bc517a6285fc4459ad5f0a36fb82257bf7cb0dc925233910a567f15fb74e1fb7e
|
7
|
+
data.tar.gz: '0888eea1d469818a6ea2272b164218d934b3a1591e7e7179d74d5748e4e3765082a0ea40a3177793a8be60b82f702341f8eb4ac387639c8d219cbb09de263908'
|
data/Gemfile
CHANGED
data/Gemfile.lock
CHANGED
@@ -1,10 +1,10 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
llm_memory (0.1.
|
4
|
+
llm_memory (0.1.6)
|
5
5
|
redis (~> 4.6.0)
|
6
6
|
ruby-openai (~> 3.7.0)
|
7
|
-
|
7
|
+
tokenizers (~> 0.3.3)
|
8
8
|
|
9
9
|
GEM
|
10
10
|
remote: https://rubygems.org/
|
@@ -81,8 +81,8 @@ GEM
|
|
81
81
|
standard-performance (1.0.1)
|
82
82
|
lint_roller (~> 1.0)
|
83
83
|
rubocop-performance (~> 1.16.0)
|
84
|
-
|
85
|
-
|
84
|
+
tokenizers (0.3.3-arm64-darwin)
|
85
|
+
tokenizers (0.3.3-x86_64-linux)
|
86
86
|
unicode-display_width (2.4.2)
|
87
87
|
vcr (6.1.0)
|
88
88
|
webmock (3.18.1)
|
@@ -103,7 +103,7 @@ DEPENDENCIES
|
|
103
103
|
rspec (~> 3.0)
|
104
104
|
ruby-openai
|
105
105
|
standard (~> 1.3)
|
106
|
-
|
106
|
+
tokenizers
|
107
107
|
vcr (~> 6.1.0)
|
108
108
|
webmock (~> 3.18.1)
|
109
109
|
|
data/lib/llm_memory/broca.rb
CHANGED
@@ -1,5 +1,5 @@
|
|
1
1
|
require "erb"
|
2
|
-
require "
|
2
|
+
require "tokenizers"
|
3
3
|
|
4
4
|
module LlmMemory
|
5
5
|
class Broca
|
@@ -32,11 +32,12 @@ module LlmMemory
|
|
32
32
|
temperature: @temperature
|
33
33
|
}
|
34
34
|
)
|
35
|
+
LlmMemory.logger.debug(response)
|
35
36
|
response_content = response.dig("choices", 0, "message", "content")
|
36
|
-
@messages.push({role: "system", content: response_content})
|
37
|
+
@messages.push({role: "system", content: response_content}) unless response_content.nil?
|
37
38
|
response_content
|
38
39
|
rescue => e
|
39
|
-
|
40
|
+
LlmMemory.logger.info(e.inspect)
|
40
41
|
# @messages = []
|
41
42
|
nil
|
42
43
|
end
|
@@ -51,9 +52,10 @@ module LlmMemory
|
|
51
52
|
count = 0
|
52
53
|
new_messages = []
|
53
54
|
@messages.reverse_each do |message|
|
54
|
-
encoded = tokenizer.encode(message[:content])
|
55
|
-
|
56
|
-
|
55
|
+
encoded = tokenizer.encode(message[:content], add_special_tokens: true)
|
56
|
+
token_count = encoded.tokens.length
|
57
|
+
count += token_count
|
58
|
+
if count <= @max_token
|
57
59
|
new_messages.push(message)
|
58
60
|
else
|
59
61
|
break
|
@@ -63,7 +65,7 @@ module LlmMemory
|
|
63
65
|
end
|
64
66
|
|
65
67
|
def tokenizer
|
66
|
-
@tokenizer ||=
|
68
|
+
@tokenizer ||= Tokenizers.from_pretrained("gpt2")
|
67
69
|
end
|
68
70
|
end
|
69
71
|
end
|
data/lib/llm_memory/version.rb
CHANGED
data/lib/llm_memory.rb
CHANGED
@@ -1,19 +1,24 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
|
+
require "logger"
|
3
4
|
# config
|
4
5
|
require_relative "llm_memory/configuration"
|
5
|
-
|
6
6
|
require_relative "llm_memory/hippocampus"
|
7
7
|
require_relative "llm_memory/broca"
|
8
8
|
require_relative "llm_memory/wernicke"
|
9
|
-
|
10
9
|
require_relative "llm_memory/version"
|
11
10
|
|
12
11
|
module LlmMemory
|
13
12
|
class Error < StandardError; end
|
14
13
|
|
15
14
|
class << self
|
16
|
-
attr_accessor :configuration
|
15
|
+
attr_accessor :configuration, :log_level
|
16
|
+
|
17
|
+
def logger
|
18
|
+
@logger ||= Logger.new($stdout).tap do |logger|
|
19
|
+
logger.level = log_level || Logger::INFO
|
20
|
+
end
|
21
|
+
end
|
17
22
|
end
|
18
23
|
|
19
24
|
def self.configure
|
data/llm_memory.gemspec
CHANGED
@@ -31,7 +31,7 @@ Gem::Specification.new do |spec|
|
|
31
31
|
|
32
32
|
# Uncomment to register a new dependency of your gem
|
33
33
|
# spec.add_dependency "example-gem", "~> 1.0"
|
34
|
-
spec.add_dependency "
|
34
|
+
spec.add_dependency "tokenizers", "~> 0.3.3"
|
35
35
|
spec.add_dependency "ruby-openai", "~> 3.7.0"
|
36
36
|
spec.add_dependency "redis", "~> 4.6.0"
|
37
37
|
|
metadata
CHANGED
@@ -1,29 +1,29 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: llm_memory
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Shohei Kameda
|
8
8
|
autorequire:
|
9
9
|
bindir: exe
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-05-
|
11
|
+
date: 2023-05-11 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
-
name:
|
14
|
+
name: tokenizers
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: 0.
|
19
|
+
version: 0.3.3
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: 0.
|
26
|
+
version: 0.3.3
|
27
27
|
- !ruby/object:Gem::Dependency
|
28
28
|
name: ruby-openai
|
29
29
|
requirement: !ruby/object:Gem::Requirement
|