langchainrb 0.1.4 → 0.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7cdb0b5fcbe9d764f04794320bf23e8cbea4dcd5ba7c504981c10a83cb73f09a
4
- data.tar.gz: da4a97f7ae0fbc3739ee4a37f879a438773768fc1217e236508dbf370a319ade
3
+ metadata.gz: c43d156af4f33487e653c0f72d0a27a327ebe07b1ecc7558409b8d877854318c
4
+ data.tar.gz: dba1509aa4494e1a35a08a40ca872bcb9f0b3ad03b7f17c6e8c3716c7d5ce805
5
5
  SHA512:
6
- metadata.gz: 2be391ab8a2f2a235c4c853b1aec09a1c90fc4f537a6c98860cbbd924b07264b9173d1f28c0372440fd4c6a910c12c7d4fc1068c5ed02e744c18bedac37ed552
7
- data.tar.gz: d856e73f6e4fd1df1107c3cbffd9bcaf141bfe1c1a028321ab66c8495b120d90be08d6ab982c442eda3ce4481a955b360601b1daf4754db43b0916c0214af0a2
6
+ metadata.gz: b800cf71cb8c9193082383d9994f1f40f61d7479e1db57ad970a089dd26e749be684f8f17b388a704aaba595909acb5babe9fa4ab37836953f0d11dbc55efa66
7
+ data.tar.gz: 9fdb6337593f3fa5902af104255a3e9f9a06214965b8dcb026d3b914094804b1d546c7b46c0e923d8327a216639ee4eb64e0febeafa9616c73d3706d74032037
data/CHANGELOG.md CHANGED
@@ -1,5 +1,16 @@
1
1
  ## [Unreleased]
2
2
 
3
- ## [0.1.0] - 2023-04-26
3
+ ## [0.2.0] - 2023-05-09
4
+
5
+ - Prompt Templating
6
+ - Ability to create prompt templates and save them to JSON files
7
+ - Default `Prompt::FewShotPromptTemplate`
8
+ - New examples added to `examples/`
9
+
10
+ ## [0.1.4] - 2023-05-02
11
+
12
+ - Backfilling missing specs
13
+
14
+ ## [0.1.3] - 2023-05-01
4
15
 
5
16
  - Initial release
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- langchainrb (0.1.4)
4
+ langchainrb (0.2.0)
5
5
  cohere-ruby (~> 0.9.1)
6
6
  milvus (~> 0.9.0)
7
7
  pinecone (~> 0.1.6)
data/README.md CHANGED
@@ -114,12 +114,101 @@ cohere.embed(text: "foo bar")
114
114
  cohere.complete(prompt: "What is the meaning of life?")
115
115
  ```
116
116
 
117
+ ### Using Prompts
118
+
119
+ #### Prompt Templates
120
+
121
+ Create a prompt with one input variable:
122
+
123
+ ```ruby
124
+ prompt = Prompt::PromptTemplate.new(template: "Tell me a {adjective} joke.", input_variables: ["adjective"])
125
+ prompt.format(adjective: "funny") # "Tell me a funny joke."
126
+ ```
127
+
128
+ Create a prompt with multiple input variables:
129
+
130
+ ```ruby
131
+ prompt = Prompt::PromptTemplate.new(template: "Tell me a {adjective} joke about {content}.", input_variables: ["adjective", "content"])
132
+ prompt.format(adjective: "funny", content: "chickens") # "Tell me a funny joke about chickens."
133
+ ```
134
+
135
+ Creating a PromptTemplate using just a prompt and no input_variables:
136
+
137
+ ```ruby
138
+ prompt = Prompt::PromptTemplate.from_template("Tell me a {adjective} joke about {content}.")
139
+ prompt.input_variables # ["adjective", "content"]
140
+ prompt.format(adjective: "funny", content: "chickens") # "Tell me a funny joke about chickens."
141
+ ```
142
+
143
+ Save prompt template to JSON file:
144
+
145
+ ```ruby
146
+ prompt.save(file_path: "spec/fixtures/prompt/prompt_template.json")
147
+ ```
148
+
149
+ Loading a new prompt template using a JSON file:
150
+
151
+ ```ruby
152
+ prompt = Prompt.load_from_path(file_path: "spec/fixtures/prompt/prompt_template.json")
153
+ prompt.input_variables # ["adjective", "content"]
154
+ ```
155
+
156
+ #### Few Shot Prompt Templates
157
+
158
+ Create a prompt with a few shot examples:
159
+
160
+ ```ruby
161
+ prompt = Prompt::FewShotPromptTemplate.new(
162
+ prefix: "Write antonyms for the following words.",
163
+ suffix: "Input: {adjective}\nOutput:",
164
+ example_prompt: Prompt::PromptTemplate.new(
165
+ input_variables: ["input", "output"],
166
+ template: "Input: {input}\nOutput: {output}"
167
+ ),
168
+ examples: [
169
+ { "input": "happy", "output": "sad" },
170
+ { "input": "tall", "output": "short" }
171
+ ],
172
+ input_variables: ["adjective"]
173
+ )
174
+
175
+ prompt.format(adjective: "good")
176
+
177
+ # Write antonyms for the following words.
178
+ #
179
+ # Input: happy
180
+ # Output: sad
181
+ #
182
+ # Input: tall
183
+ # Output: short
184
+ #
185
+ # Input: good
186
+ # Output:
187
+ ```
188
+
189
+ Save prompt template to JSON file:
190
+
191
+ ```ruby
192
+ prompt.save(file_path: "spec/fixtures/prompt/few_shot_prompt_template.json")
193
+ ```
194
+
195
+ Loading a new prompt template using a JSON file:
196
+
197
+ ```ruby
198
+ prompt = Prompt.load_from_path(file_path: "spec/fixtures/prompt/few_shot_prompt_template.json")
199
+ prompt.prefix # "Write antonyms for the following words."
200
+ ```
201
+
117
202
  ## Development
118
203
 
119
204
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
120
205
 
121
206
  To install this gem onto your local machine, run `bundle exec rake install`. To release a new version, update the version number in `version.rb`, and then run `bundle exec rake release`, which will create a git tag for the version, push git commits and the created tag, and push the `.gem` file to [rubygems.org](https://rubygems.org).
122
207
 
208
+ ## Honorary Contributors
209
+ [<img style="border-radius:50%" alt="Andrei Bondarev" src="https://avatars.githubusercontent.com/u/541665?v=4" width="80" height="80" class="avatar">](https://github.com/andreibondarev)
210
+ [<img style="border-radius:50%" alt="Rafael Figueiredo" src="https://avatars.githubusercontent.com/u/35845775?v=4" width="80" height="80" class="avatar">](https://github.com/rafaelqfigueiredo)
211
+
123
212
  ## Contributing
124
213
 
125
214
  Bug reports and pull requests are welcome on GitHub at https://github.com/andreibondarev/langchain.
@@ -0,0 +1,36 @@
1
+ require "langchain"
2
+
3
+ # Create a prompt with a few shot examples
4
+ prompt = Prompt::FewShotPromptTemplate.new(
5
+ prefix: "Write antonyms for the following words.",
6
+ suffix: "Input: {adjective}\nOutput:",
7
+ example_prompt: Prompt::PromptTemplate.new(
8
+ input_variables: ["input", "output"],
9
+ template: "Input: {input}\nOutput: {output}"
10
+ ),
11
+ examples: [
12
+ { "input": "happy", "output": "sad" },
13
+ { "input": "tall", "output": "short" }
14
+ ],
15
+ input_variables: ["adjective"]
16
+ )
17
+
18
+ prompt.format(adjective: "good")
19
+
20
+ # Write antonyms for the following words.
21
+ #
22
+ # Input: happy
23
+ # Output: sad
24
+ #
25
+ # Input: tall
26
+ # Output: short
27
+ #
28
+ # Input: good
29
+ # Output:
30
+
31
+ # Save prompt template to JSON file
32
+ prompt.save(file_path: "spec/fixtures/prompt/few_shot_prompt_template.json")
33
+
34
+ # Loading a new prompt template using a JSON file
35
+ prompt = Prompt.load_from_path(file_path: "spec/fixtures/prompt/few_shot_prompt_template.json")
36
+ prompt.prefix # "Write antonyms for the following words."
@@ -0,0 +1,21 @@
1
+ require "langchain"
2
+
3
+ # Create a prompt with one input variable
4
+ prompt = Prompt::PromptTemplate.new(template: "Tell me a {adjective} joke.", input_variables: ["adjective"])
5
+ prompt.format(adjective: "funny") # "Tell me a funny joke."
6
+
7
+ # Create a prompt with multiple input variables
8
+ prompt = Prompt::PromptTemplate.new(template: "Tell me a {adjective} joke about {content}.", input_variables: ["adjective", "content"])
9
+ prompt.format(adjective: "funny", content: "chickens") # "Tell me a funny joke about chickens."
10
+
11
+ # Creating a PromptTemplate using just a prompt and no input_variables
12
+ prompt = Prompt::PromptTemplate.from_template("Tell me a {adjective} joke about {content}.")
13
+ prompt.input_variables # ["adjective", "content"]
14
+ prompt.format(adjective: "funny", content: "chickens") # "Tell me a funny joke about chickens."
15
+
16
+ # Save prompt template to JSON file
17
+ prompt.save(file_path: "spec/fixtures/prompt/prompt_template.json")
18
+
19
+ # Loading a new prompt template using a JSON file
20
+ prompt = Prompt.load_from_path(file_path: "spec/fixtures/prompt/prompt_template.json")
21
+ prompt.input_variables # ["adjective", "content"]
data/lib/langchain.rb CHANGED
@@ -14,4 +14,12 @@ module LLM
14
14
  autoload :Base, "llm/base"
15
15
  autoload :Cohere, "llm/cohere"
16
16
  autoload :OpenAI, "llm/openai"
17
+ end
18
+
19
+ module Prompt
20
+ require_relative "prompt/loading"
21
+
22
+ autoload :Base, "prompt/base"
23
+ autoload :PromptTemplate, "prompt/prompt_template"
24
+ autoload :FewShotPromptTemplate, "prompt/few_shot_prompt_template"
17
25
  end
@@ -0,0 +1,86 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'strscan'
4
+
5
+ module Prompt
6
+ class Base
7
+ def format(**kwargs)
8
+ raise NotImplementedError
9
+ end
10
+
11
+ def prompt_type
12
+ raise NotImplementedError
13
+ end
14
+
15
+ def to_h
16
+ raise NotImplementedError
17
+ end
18
+
19
+ #
20
+ # Validate the input variables against the template.
21
+ #
22
+ # @param template [String] The template to validate against.
23
+ # @param input_variables [Array<String>] The input variables to validate.
24
+ #
25
+ # @raise [ArgumentError] If there are missing or extra variables.
26
+ #
27
+ # @return [void]
28
+ #
29
+ def validate(template:, input_variables:)
30
+ input_variables_set = @input_variables.uniq
31
+ variables_from_template = Prompt::Base.extract_variables_from_template(template)
32
+
33
+ missing_variables = variables_from_template - input_variables_set
34
+ extra_variables = input_variables_set - variables_from_template
35
+
36
+ raise ArgumentError, "Missing variables: #{missing_variables}" if missing_variables.any?
37
+ raise ArgumentError, "Extra variables: #{extra_variables}" if extra_variables.any?
38
+ end
39
+
40
+ #
41
+ # Save the object to a file in JSON format.
42
+ #
43
+ # @param file_path [String, Pathname] The path to the file to save the object to
44
+ #
45
+ # @raise [ArgumentError] If file_path doesn't end with .json
46
+ #
47
+ # @return [void]
48
+ #
49
+ def save(file_path:)
50
+ save_path = file_path.is_a?(String) ? Pathname.new(file_path) : file_path
51
+ directory_path = save_path.dirname
52
+ FileUtils.mkdir_p(directory_path) unless directory_path.directory?
53
+
54
+ if save_path.extname == ".json"
55
+ File.open(file_path, "w") { |f| f.write(to_h.to_json) }
56
+ else
57
+ raise ArgumentError, "#{file_path} must be json"
58
+ end
59
+ end
60
+
61
+ private
62
+
63
+ #
64
+ # Extracts variables from a template string.
65
+ #
66
+ # This method takes a template string and returns an array of input variable names
67
+ # contained within the template. Input variables are defined as text enclosed in
68
+ # curly braces (e.g. "{variable_name}").
69
+ #
70
+ # @param template [String] The template string to extract variables from.
71
+ #
72
+ # @return [Array<String>] An array of input variable names.
73
+ #
74
+ def self.extract_variables_from_template(template)
75
+ input_variables = []
76
+ scanner = StringScanner.new(template)
77
+
78
+ while scanner.scan_until(/\{([^{}]*)\}/)
79
+ variable = scanner[1].strip
80
+ input_variables << variable unless variable.empty?
81
+ end
82
+
83
+ input_variables
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,73 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Prompt
4
+ class FewShotPromptTemplate < Base
5
+ attr_reader :examples, :example_prompt, :input_variables, :prefix, :suffix, :example_separator
6
+
7
+ #
8
+ # Initializes a new instance of the class.
9
+ #
10
+ # @param examples [Array<Hash>] Examples to format into the prompt.
11
+ # @param example_prompt [PromptTemplate] PromptTemplate used to format an individual example.
12
+ # @param suffix [String] A prompt template string to put after the examples.
13
+ # @param input_variables [Array<String>] A list of the names of the variables the prompt template expects.
14
+ # @param example_separator [String] String separator used to join the prefix, the examples, and suffix.
15
+ # @param prefix [String] A prompt template string to put before the examples.
16
+ # @param validate_template [Boolean] Whether or not to try validating the template.
17
+ #
18
+ def initialize(
19
+ examples:,
20
+ example_prompt:,
21
+ input_variables:,
22
+ suffix:,
23
+ prefix: "",
24
+ example_separator: "\n\n",
25
+ validate_template: true
26
+ )
27
+ @examples = examples
28
+ @example_prompt = example_prompt
29
+ @input_variables = input_variables
30
+ @prefix = prefix
31
+ @suffix = suffix
32
+ @example_separator = example_separator
33
+
34
+ validate(template: @prefix + @suffix, input_variables: @input_variables) if @validate_template
35
+ end
36
+
37
+ #
38
+ # Format the prompt with the inputs.
39
+ #
40
+ # @param kwargs [Hash] Any arguments to be passed to the prompt template.
41
+ #
42
+ # @return [String] A formatted string.
43
+ #
44
+ def format(**kwargs)
45
+ example_string = @examples.map { |example| @example_prompt.format(**example) }
46
+
47
+ suffix_string = @suffix
48
+ kwargs.each { |key, value| suffix_string = suffix_string.gsub(/\{#{key}\}/, value.to_s) }
49
+
50
+ [@prefix, *example_string, suffix_string].join(@example_separator)
51
+ end
52
+
53
+ #
54
+ # Returns the key type of prompt as a string.
55
+ #
56
+ # @return [String] the prompt type key
57
+ #
58
+ def prompt_type
59
+ "few_shot"
60
+ end
61
+
62
+ def to_h
63
+ {
64
+ _type: prompt_type,
65
+ input_variables: @input_variables,
66
+ prefix: @prefix,
67
+ example_prompt: @example_prompt.to_h,
68
+ examples: @examples,
69
+ suffix: @suffix
70
+ }
71
+ end
72
+ end
73
+ end
@@ -0,0 +1,87 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'strscan'
4
+ require 'pathname'
5
+
6
+ module Prompt
7
+ TYPE_TO_LOADER = {
8
+ "prompt" => ->(config) { Prompt.load_prompt(config) },
9
+ "few_shot" => ->(config) { Prompt.load_few_shot_prompt(config) }
10
+ }
11
+
12
+ class << self
13
+ #
14
+ # Load prompt from file.
15
+ #
16
+ # @param file_path [String, Pathname] The path of the file to read the configuration data from.
17
+ #
18
+ # @return [Object] The loaded prompt loaded.
19
+ #
20
+ # @raise [ArgumentError] If the file type of the specified file path is not supported.
21
+ #
22
+ def load_from_path(file_path:)
23
+ file_path = file_path.is_a?(String) ? Pathname.new(file_path) : file_path
24
+
25
+ if file_path.extname == ".json"
26
+ config = JSON.parse(File.read(file_path))
27
+ else
28
+ raise ArgumentError, "Got unsupported file type #{file_path.extname}"
29
+ end
30
+
31
+ load_from_config(config)
32
+ end
33
+
34
+ #
35
+ # Loads a prompt template with the given configuration.
36
+ #
37
+ # @param config [Hash] A hash containing the configuration for the prompt.
38
+ #
39
+ # @return [PromptTemplate] The loaded prompt loaded.
40
+ #
41
+ def load_prompt(config)
42
+ template, input_variables = config.values_at("template", "input_variables")
43
+ PromptTemplate.new(template: template, input_variables: input_variables)
44
+ end
45
+
46
+ #
47
+ # Loads a prompt template with the given configuration.
48
+ #
49
+ # @param config [Hash] A hash containing the configuration for the prompt.
50
+ #
51
+ # @return [FewShotPromptTemplate] The loaded prompt loaded.
52
+ #
53
+ def load_few_shot_prompt(config)
54
+ prefix, suffix, example_prompt, examples, input_variables = config.values_at("prefix", "suffix", "example_prompt", "examples", "input_variables")
55
+ example_prompt = load_prompt(example_prompt)
56
+ FewShotPromptTemplate.new(prefix: prefix, suffix: suffix, example_prompt: example_prompt, examples: examples, input_variables: input_variables)
57
+ end
58
+
59
+ private
60
+
61
+ #
62
+ # Loads the prompt from the given configuration hash
63
+ #
64
+ # @param config [Hash] the configuration hash to load from
65
+ #
66
+ # @return [Object] the loaded prompt
67
+ #
68
+ # @raise [ArgumentError] if the prompt type specified in the config is not supported
69
+ #
70
+ def load_from_config(config)
71
+ # If `_type` key is not present in the configuration hash, add it with a default value of `prompt`
72
+ unless config.key?("_type")
73
+ puts "[WARN] No `_type` key found, defaulting to `prompt`"
74
+ config["_type"] = "prompt"
75
+ end
76
+
77
+ # If the prompt type specified in the configuration hash is not supported, raise an exception
78
+ unless TYPE_TO_LOADER.key?(config["_type"])
79
+ raise ArgumentError, "Loading #{config["_type"]} prompt not supported"
80
+ end
81
+
82
+ # Load the prompt using the corresponding loader function from the `TYPE_TO_LOADER` hash
83
+ prompt_loader = TYPE_TO_LOADER[config["_type"]]
84
+ prompt_loader.call(config)
85
+ end
86
+ end
87
+ end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Prompt
4
+ class PromptTemplate < Base
5
+ attr_reader :template, :input_variables, :validate_template
6
+
7
+ #
8
+ # Initializes a new instance of the class.
9
+ #
10
+ # @param template [String] The prompt template.
11
+ # @param input_variables [Array<String>] A list of the names of the variables the prompt template expects.
12
+ # @param validate_template [Boolean] Whether or not to try validating the template.
13
+ #
14
+ def initialize(template:, input_variables:, validate_template: true)
15
+ @template = template
16
+ @input_variables = input_variables
17
+ @validate_template = validate_template
18
+
19
+ validate(template: @template, input_variables: @input_variables) if @validate_template
20
+ end
21
+
22
+ #
23
+ # Format the prompt with the inputs.
24
+ #
25
+ # @param kwargs [Hash] Any arguments to be passed to the prompt template.
26
+ # @return [String] A formatted string.
27
+ #
28
+ def format(**kwargs)
29
+ result = @template
30
+ kwargs.each { |key, value| result = result.gsub(/\{#{key}\}/, value.to_s) }
31
+ result
32
+ end
33
+
34
+ #
35
+ # Returns the key type of prompt as a string.
36
+ #
37
+ # @return [String] the prompt type key
38
+ #
39
+ def prompt_type
40
+ "prompt"
41
+ end
42
+
43
+ def to_h
44
+ {
45
+ _type: prompt_type,
46
+ input_variables: @input_variables,
47
+ template: @template
48
+ }
49
+ end
50
+
51
+ #
52
+ # Creates a new instance of the class using the given template.
53
+ #
54
+ # @param template [String] The template to use
55
+ #
56
+ # @return [Object] A new instance of the class
57
+ #
58
+ def self.from_template(template)
59
+ new(template: template, input_variables: extract_variables_from_template(template))
60
+ end
61
+ end
62
+ end
@@ -38,12 +38,21 @@ module Vectorsearch
38
38
  :default_dimension
39
39
 
40
40
  def generate_prompt(question:, context:)
41
- "Context:\n" +
42
- "#{context}\n" +
43
- "---\n" +
44
- "Question: #{question}\n" +
45
- "---\n" +
46
- "Answer:"
41
+ prompt_template = Prompt::FewShotPromptTemplate.new(
42
+ prefix: "Context:",
43
+ suffix: "---\nQuestion: {question}\n---\nAnswer:",
44
+ example_prompt: Prompt::PromptTemplate.new(
45
+ template: "{context}",
46
+ input_variables: ["context"]
47
+ ),
48
+ examples: [
49
+ { context: context }
50
+ ],
51
+ input_variables: ["question"],
52
+ example_separator: "\n"
53
+ )
54
+
55
+ prompt_template.format(question: question)
47
56
  end
48
57
 
49
58
  private
data/lib/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Langchain
4
- VERSION = "0.1.4"
4
+ VERSION = "0.2.0"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langchainrb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.4
4
+ version: 0.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrei Bondarev
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-05-02 00:00:00.000000000 Z
11
+ date: 2023-05-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: pry-byebug
@@ -137,6 +137,8 @@ files:
137
137
  - README.md
138
138
  - Rakefile
139
139
  - examples/.keep
140
+ - examples/create_and_manage_few_shot_prompt_templates.rb
141
+ - examples/create_and_manage_prompt_templates.rb
140
142
  - examples/store_and_query_with_pinecone.rb
141
143
  - examples/store_and_query_with_qdrant.rb
142
144
  - examples/store_and_query_with_weaviate.rb
@@ -144,6 +146,10 @@ files:
144
146
  - lib/llm/base.rb
145
147
  - lib/llm/cohere.rb
146
148
  - lib/llm/openai.rb
149
+ - lib/prompt/base.rb
150
+ - lib/prompt/few_shot_prompt_template.rb
151
+ - lib/prompt/loading.rb
152
+ - lib/prompt/prompt_template.rb
147
153
  - lib/vectorsearch/base.rb
148
154
  - lib/vectorsearch/milvus.rb
149
155
  - lib/vectorsearch/pinecone.rb