llm_memory 0.1.1 → 0.1.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b81b1033f4e0ade103fd1c73317e0ef575244225ccce51fab50f232f2d138e37
4
- data.tar.gz: 945bd74694fc40c020df96763dfe96df2b21d402648b2b7f948c526a49532b7c
3
+ metadata.gz: 85f3330ed767cc28b5a3276a7772678c36726ce04562c184a4e6dcf287d14d1d
4
+ data.tar.gz: 3e1ae83b2517f7f80bb857de5024b62b886bb6a045428f2d40159e48818cf286
5
5
  SHA512:
6
- metadata.gz: 357abfa9fcc316129c40e3fc869cab7460812e32ea1005efa2b039b84d537f6a307f6fb80f112920bc0c3e4be2fe02fa0af94e8451b9f1e6280658e79513133b
7
- data.tar.gz: 818c51b4c18f0277d99ec85dbf0e80c88d05caea894035cd0cbb90cab9e3853b2f760b4a1ff8690afa00172d1ae7656df1db2a7c24618d73fc5fafa39b855458
6
+ metadata.gz: dba158d07c4a97a5b2e6bb1a98987173ba427a18a1a06d0a422f366ab2f7335ac168aea0351657b0c7f957352e6ffe747a2d817deb7c8d5454010c2ec535999a
7
+ data.tar.gz: 1db3ccb501f3b56d6e6dc00997f3f085b74f395521ffbbc1775bc5799bc388410523f6f209cfc185efbbf1437177c07dd5ee1725a49f6a27b207c6fc296067aa
data/Gemfile.lock CHANGED
@@ -1,7 +1,10 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- llm_memory (0.1.0)
4
+ llm_memory (0.1.3)
5
+ redis (~> 4.6.0)
6
+ ruby-openai (~> 3.7.0)
7
+ tiktoken_ruby (~> 0.0.4)
5
8
 
6
9
  GEM
7
10
  remote: https://rubygems.org/
@@ -10,23 +13,20 @@ GEM
10
13
  public_suffix (>= 2.0.2, < 6.0)
11
14
  ast (2.4.2)
12
15
  coderay (1.1.3)
13
- connection_pool (2.4.0)
14
16
  crack (0.4.5)
15
17
  rexml
16
18
  diff-lcs (1.5.0)
17
19
  dotenv (2.8.1)
18
- faraday (2.7.4)
19
- faraday-net_http (>= 2.0, < 3.1)
20
- ruby2_keywords (>= 0.0.4)
21
- faraday-multipart (1.0.4)
22
- multipart-post (~> 2)
23
- faraday-net_http (3.0.2)
24
20
  hashdiff (1.0.1)
21
+ httparty (0.21.0)
22
+ mini_mime (>= 1.0.0)
23
+ multi_xml (>= 0.5.2)
25
24
  json (2.6.3)
26
25
  language_server-protocol (3.17.0.3)
27
26
  lint_roller (1.0.0)
28
27
  method_source (1.0.0)
29
- multipart-post (2.3.0)
28
+ mini_mime (1.1.2)
29
+ multi_xml (0.6.0)
30
30
  parallel (1.23.0)
31
31
  parser (3.2.2.1)
32
32
  ast (~> 2.4.1)
@@ -36,10 +36,7 @@ GEM
36
36
  public_suffix (5.0.1)
37
37
  rainbow (3.1.1)
38
38
  rake (13.0.6)
39
- redis (5.0.6)
40
- redis-client (>= 0.9.0)
41
- redis-client (0.14.1)
42
- connection_pool
39
+ redis (4.6.0)
43
40
  regexp_parser (2.8.0)
44
41
  rexml (3.2.5)
45
42
  rspec (3.12.0)
@@ -70,11 +67,9 @@ GEM
70
67
  rubocop-performance (1.16.0)
71
68
  rubocop (>= 1.7.0, < 2.0)
72
69
  rubocop-ast (>= 0.4.0)
73
- ruby-openai (4.0.0)
74
- faraday (>= 1)
75
- faraday-multipart (>= 1)
70
+ ruby-openai (3.7.0)
71
+ httparty (>= 0.18.1)
76
72
  ruby-progressbar (1.13.0)
77
- ruby2_keywords (0.0.5)
78
73
  standard (1.28.0)
79
74
  language_server-protocol (~> 3.17.0.2)
80
75
  lint_roller (~> 1.0)
data/README.md CHANGED
@@ -16,7 +16,6 @@ This enables better integration with systems such as Rails and web services whil
16
16
 
17
17
  ![image](https://user-images.githubusercontent.com/1880965/236099477-421b2003-79d2-4a7c-8f80-1afac4fd616d.png)
18
18
 
19
-
20
19
  1. LlmMemory::Wernicke: Responsible for loading external data (currently from files). More loader types are planned for future development.
21
20
 
22
21
  > Wernicke's area in brain is involved in the comprehension of written and spoken language
@@ -86,7 +85,7 @@ related_docs = hippocampus.query(query_str, limit: 3)
86
85
  #},,,]
87
86
 
88
87
  # ERB
89
- template = <<-TEMPLATE
88
+ prompt = <<-TEMPLATE
90
89
  Context information is below.
91
90
  ---------------------
92
91
  <% related_docs.each do |doc| %>
@@ -99,7 +98,7 @@ Given the context information and not prior knowledge,
99
98
  answer the question: <%= query_str %>
100
99
  TEMPLATE
101
100
 
102
- broca = LlmMemory::Broca.new(prompt: tempate, model: 'gpt-3.5-turbo')
101
+ broca = LlmMemory::Broca.new(prompt: prompt, model: 'gpt-3.5-turbo')
103
102
  messages = broca.respond(query_str: query_str, related_docs: related_docs)
104
103
 
105
104
  ...
@@ -108,6 +107,14 @@ related_docs = hippocampus.query(query_str2, limit: 3)
108
107
  message2 = broca.respond(query_str: query_str2, related_docs: related_docs)
109
108
  ```
110
109
 
110
+ ## Plugins
111
+
112
+ The table below provides a list of plugins utilized by llm_memory. The aim is to keep the core llm_memory lightweight while allowing for easy extensibility through the use of plugins.
113
+
114
+ | Plugin Name | Type | Module | Link |
115
+ | ----------------------- | ------ | -------- | ------------------------------------------------------------- |
116
+ | llm_memory_gmail_loader | Loader | Wernicke | [link](https://github.com/shohey1226/llm_memory_gmail_loader) |
117
+
111
118
  ## Development
112
119
 
113
120
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
@@ -20,26 +20,31 @@ module LlmMemory
20
20
  @max_token = max_token
21
21
  end
22
22
 
23
- def respond(*args)
24
- final_prompt = generate_prompt(*args)
23
+ def respond(args)
24
+ final_prompt = generate_prompt(args)
25
25
  @messages.push({role: "user", content: final_prompt})
26
26
  adjust_token_count
27
- response = client.chat(
28
- parameters: {
29
- model: @model,
30
- messages: @messages,
31
- temperature: @temperature
32
- }
33
- )
34
- response_conent = response.dig("choices", 0, "message", "content")
35
- @messages.push({role: "system", content: response_conent})
36
- response_conent
27
+ begin
28
+ response = client.chat(
29
+ parameters: {
30
+ model: @model,
31
+ messages: @messages,
32
+ temperature: @temperature
33
+ }
34
+ )
35
+ response_content = response.dig("choices", 0, "message", "content")
36
+ @messages.push({role: "system", content: response_content})
37
+ response_content
38
+ rescue => e
39
+ puts e.inspect
40
+ # @messages = []
41
+ nil
42
+ end
37
43
  end
38
44
 
39
- def generate_prompt(*args)
40
- merged_args = args.reduce(:merge)
45
+ def generate_prompt(args)
41
46
  erb = ERB.new(@prompt)
42
- erb.result_with_hash(merged_args)
47
+ erb.result_with_hash(args)
43
48
  end
44
49
 
45
50
  def adjust_token_count
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LlmMemory
4
- VERSION = "0.1.1"
4
+ VERSION = "0.1.3"
5
5
  end
data/llm_memory.gemspec CHANGED
@@ -31,6 +31,9 @@ Gem::Specification.new do |spec|
31
31
 
32
32
  # Uncomment to register a new dependency of your gem
33
33
  # spec.add_dependency "example-gem", "~> 1.0"
34
+ spec.add_dependency "tiktoken_ruby", "~> 0.0.4"
35
+ spec.add_dependency "ruby-openai", "~> 3.7.0"
36
+ spec.add_dependency "redis", "~> 4.6.0"
34
37
 
35
38
  # For more information and examples about making a new gem, check out our
36
39
  # guide at: https://bundler.io/guides/creating_gem.html
metadata CHANGED
@@ -1,15 +1,57 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm_memory
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Shohei Kameda
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-09 00:00:00.000000000 Z
12
- dependencies: []
11
+ date: 2023-05-10 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: tiktoken_ruby
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: 0.0.4
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: 0.0.4
27
+ - !ruby/object:Gem::Dependency
28
+ name: ruby-openai
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: 3.7.0
34
+ type: :runtime
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: 3.7.0
41
+ - !ruby/object:Gem::Dependency
42
+ name: redis
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: 4.6.0
48
+ type: :runtime
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: 4.6.0
13
55
  description: LLM Memory is a Ruby gem designed to provide large language models (LLMs)
14
56
  like ChatGPT with memory using in-context learning. This enables better integration
15
57
  with systems such as Rails and web services while providing a more user-friendly