llm_memory 0.1.5 → 0.1.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: aea28ca9fb65d35a8d98964ac433cb7c445d8137a2672bcf542d35fa31935582
4
- data.tar.gz: 02d4d7a619eb3031df0310c2fa9bef9105035f732bc1d9e5381a3dd2d8ded836
3
+ metadata.gz: 72e01db010504ae3ec7877d5122ecba01bf7c1f15140e7ed94d9e66c62e2ea40
4
+ data.tar.gz: 8d170f4f7fda8c1af72fe62bb7873f3758fba9960babb1dc562b048bb4201f09
5
5
  SHA512:
6
- metadata.gz: d30618749a0b4016a2ca9cd2815cb6b7b4971a46c50ea83fa3e5b30d1e0813127053b576882587634f166cff7312f3898c6183f71a32ea724b53deea5d676936
7
- data.tar.gz: 4957d9857a4a5b05cd725b45e44c44b9ecfb695034e6c2e6c45ed80c9c819d0b581fdc4f9977d4af9180b15af983f544df62507e5d25001f0e508ff90acfa8b6
6
+ metadata.gz: be7ac41f195ae267c60b917f5640be23bf3a458fea0d6fd9de1084367db6b89bc517a6285fc4459ad5f0a36fb82257bf7cb0dc925233910a567f15fb74e1fb7e
7
+ data.tar.gz: '0888eea1d469818a6ea2272b164218d934b3a1591e7e7179d74d5748e4e3765082a0ea40a3177793a8be60b82f702341f8eb4ac387639c8d219cbb09de263908'
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- llm_memory (0.1.5)
4
+ llm_memory (0.1.6)
5
5
  redis (~> 4.6.0)
6
6
  ruby-openai (~> 3.7.0)
7
7
  tokenizers (~> 0.3.3)
@@ -32,11 +32,12 @@ module LlmMemory
32
32
  temperature: @temperature
33
33
  }
34
34
  )
35
+ LlmMemory.logger.debug(response)
35
36
  response_content = response.dig("choices", 0, "message", "content")
36
- @messages.push({role: "system", content: response_content})
37
+ @messages.push({role: "system", content: response_content}) unless response_content.nil?
37
38
  response_content
38
39
  rescue => e
39
- puts e.inspect
40
+ LlmMemory.logger.info(e.inspect)
40
41
  # @messages = []
41
42
  nil
42
43
  end
@@ -52,8 +53,9 @@ module LlmMemory
52
53
  new_messages = []
53
54
  @messages.reverse_each do |message|
54
55
  encoded = tokenizer.encode(message[:content], add_special_tokens: true)
55
- if count < @max_token
56
- count += encoded.tokens.length
56
+ token_count = encoded.tokens.length
57
+ count += token_count
58
+ if count <= @max_token
57
59
  new_messages.push(message)
58
60
  else
59
61
  break
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LlmMemory
4
- VERSION = "0.1.5"
4
+ VERSION = "0.1.6"
5
5
  end
data/lib/llm_memory.rb CHANGED
@@ -1,19 +1,24 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require "logger"
3
4
  # config
4
5
  require_relative "llm_memory/configuration"
5
-
6
6
  require_relative "llm_memory/hippocampus"
7
7
  require_relative "llm_memory/broca"
8
8
  require_relative "llm_memory/wernicke"
9
-
10
9
  require_relative "llm_memory/version"
11
10
 
12
11
  module LlmMemory
13
12
  class Error < StandardError; end
14
13
 
15
14
  class << self
16
- attr_accessor :configuration
15
+ attr_accessor :configuration, :log_level
16
+
17
+ def logger
18
+ @logger ||= Logger.new($stdout).tap do |logger|
19
+ logger.level = log_level || Logger::INFO
20
+ end
21
+ end
17
22
  end
18
23
 
19
24
  def self.configure
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm_memory
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.5
4
+ version: 0.1.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Shohei Kameda