active_prompt_rails 0.1.20 → 0.1.22
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -0
- data/README.md +1 -1
- data/active_prompt.gemspec +1 -2
- data/lib/active_prompt/base.rb +4 -0
- data/lib/active_prompt/version.rb +1 -1
- data/lib/generators/active_prompt/prompt_generator.rb +24 -1
- metadata +2 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: b8ebc1137820c3403f5f5dd1e9f30566df779b5cfa512fdc02561a4b073515ca
|
4
|
+
data.tar.gz: 90f1bab204127f53a1ef82c284d9e086fb25ac39f2b08c07497dd0a596d1ce71
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: eeca007dedff098f36cfb1e7e259c12952f45c9708968114c2556c1d13433c552711da1486ef2b0afb4b0109970624e3b5ba512da5868e140dd23b6d3d3a814f
|
7
|
+
data.tar.gz: bde818476db497761bf66b6f4dd2cebe639142d1e48f15ca2e9b15b3a88282147100acce30afaedc5c6f0d4c037a0c4e18f3753e9b461f176e1a48a790bf0910
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,15 @@
|
|
1
1
|
## [Unreleased]
|
2
2
|
|
3
|
+
## [0.1.22] - 2024-05-16
|
4
|
+
|
5
|
+
- Added stub for `run()` method, which enables users to directly prompt the LLM by calling an ActivePrompt::Base object.
|
6
|
+
|
7
|
+
- Added example code in comments for how one could implement an OpenAI LLM integration.
|
8
|
+
|
9
|
+
## [0.1.21] - 2024-05-15
|
10
|
+
|
11
|
+
- Fixed all bugs and prompts are now working. First release.
|
12
|
+
|
3
13
|
## [0.1.3] - 2024-05-15
|
4
14
|
|
5
15
|
- Require all the lib files into the gem root file
|
data/README.md
CHANGED
@@ -7,7 +7,7 @@ ActivePrompt is a gem that helps you structure and manage LLM prompts in your ra
|
|
7
7
|
Add the following line to your Gemfile:
|
8
8
|
|
9
9
|
```Gemfile
|
10
|
-
gem "active_prompt_rails", "~> 0.1.
|
10
|
+
gem "active_prompt_rails", "~> 0.1.21", require: "active_prompt"
|
11
11
|
```
|
12
12
|
|
13
13
|
and run `bundle install`
|
data/active_prompt.gemspec
CHANGED
@@ -9,8 +9,7 @@ Gem::Specification.new do |spec|
|
|
9
9
|
spec.authors = ["Shane Perreault"]
|
10
10
|
spec.email = ["shaneprrlt@gmail.com"]
|
11
11
|
spec.summary = "Easily create and manage LLM prompt templates in your rails app."
|
12
|
-
spec.description = "Easily create and manage LLM prompt templates in your rails app."
|
13
|
-
"Use your own custom templating engine like ERB or Shopify Liquid."
|
12
|
+
spec.description = "Easily create and manage LLM prompt templates in your rails app."
|
14
13
|
spec.homepage = "https://www.github.com/Shaneprrlt/active_prompt"
|
15
14
|
spec.required_ruby_version = ">= 2.6.0"
|
16
15
|
spec.metadata["homepage_uri"] = spec.homepage
|
data/lib/active_prompt/base.rb
CHANGED
@@ -9,18 +9,41 @@ module ActivePrompt
|
|
9
9
|
desc "This generator creates a new prompt in the app/prompts directory"
|
10
10
|
source_root File.expand_path("templates", __dir__)
|
11
11
|
|
12
|
+
# rubocop:disable Metrics/MethodLength
|
12
13
|
def create_prompt_files
|
14
|
+
unless File.exist?("app/prompts/application_prompt.rb")
|
15
|
+
create_file "app/prompts/application_prompt.rb", <<~RUBY
|
16
|
+
# frozen_string_literal: true
|
17
|
+
|
18
|
+
class ApplicationPrompt < ActivePrompt::Base
|
19
|
+
def run(model: 'gpt-3.5-turbo-0613', json_output: false, temperature: 1.0)
|
20
|
+
# client = OpenAI::Client.new
|
21
|
+
# parameters = {
|
22
|
+
# model: model,
|
23
|
+
# messages: render_messages,
|
24
|
+
# temperature: temperature
|
25
|
+
# }
|
26
|
+
# parameters[:response_format] = { type: 'json_object' } if json_output
|
27
|
+
# client.chat(parameters: parameters)
|
28
|
+
|
29
|
+
raise NotImplementedError, 'please implement this function using your chosen LLM library'
|
30
|
+
end
|
31
|
+
end
|
32
|
+
RUBY
|
33
|
+
end
|
34
|
+
|
13
35
|
empty_directory "app/prompts/templates/#{name.underscore}_prompt"
|
14
36
|
template "system.liquid", "app/prompts/templates/#{name.underscore}_prompt/system.liquid"
|
15
37
|
template "user.liquid", "app/prompts/templates/#{name.underscore}_prompt/user.liquid"
|
16
38
|
create_file "app/prompts/#{name.underscore}_prompt.rb", <<~RUBY
|
17
39
|
# frozen_string_literal: true
|
18
40
|
|
19
|
-
class #{name.camelize}Prompt <
|
41
|
+
class #{name.camelize}Prompt < ApplicationPrompt
|
20
42
|
# variable :name
|
21
43
|
end
|
22
44
|
RUBY
|
23
45
|
end
|
46
|
+
# rubocop:enable Metrics/MethodLength
|
24
47
|
end
|
25
48
|
end
|
26
49
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: active_prompt_rails
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.22
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Shane Perreault
|
@@ -50,8 +50,7 @@ dependencies:
|
|
50
50
|
- - ">="
|
51
51
|
- !ruby/object:Gem::Version
|
52
52
|
version: 6.0.0
|
53
|
-
description: Easily create and manage LLM prompt templates in your rails app.
|
54
|
-
own custom templating engine like ERB or Shopify Liquid.
|
53
|
+
description: Easily create and manage LLM prompt templates in your rails app.
|
55
54
|
email:
|
56
55
|
- shaneprrlt@gmail.com
|
57
56
|
executables: []
|