active_prompt_rails 0.1.21 → 0.1.22

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: e9c2658bd0d092b2a01c16cb74ea28785c6add022bc740ab5aca3a6f0f85f566
4
- data.tar.gz: a940568ddd601c386746285ce5923a23715efb7a9ddf25c2befe17c6b1f3c0ff
3
+ metadata.gz: b8ebc1137820c3403f5f5dd1e9f30566df779b5cfa512fdc02561a4b073515ca
4
+ data.tar.gz: 90f1bab204127f53a1ef82c284d9e086fb25ac39f2b08c07497dd0a596d1ce71
5
5
  SHA512:
6
- metadata.gz: 2507294a695ca1573613a1fbcbab247cb3164ed76a757e1242d04c991d96571ff885330c7c3c2e5acc2a3d9556a0b70ace6fc6f08cad0bf0780e8168f2430913
7
- data.tar.gz: 13b5cc0a9066b8750d72059eed2e8768690156aeaedda108f3ce99dfb0559071e1a8be66fec2fd0812b92c644d221d3c8cb100dae127d2930cea61cb675a5e0a
6
+ metadata.gz: eeca007dedff098f36cfb1e7e259c12952f45c9708968114c2556c1d13433c552711da1486ef2b0afb4b0109970624e3b5ba512da5868e140dd23b6d3d3a814f
7
+ data.tar.gz: bde818476db497761bf66b6f4dd2cebe639142d1e48f15ca2e9b15b3a88282147100acce30afaedc5c6f0d4c037a0c4e18f3753e9b461f176e1a48a790bf0910
data/CHANGELOG.md CHANGED
@@ -1,5 +1,15 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.1.22] - 2024-05-16
4
+
5
+ - Added stub for `run()` method, which enables users to directly prompt the LLM by calling an ActivePrompt::Base object.
6
+
7
+ - Added example code in comments for how one could implement an OpenAI LLM integration.
8
+
9
+ ## [0.1.21] - 2024-05-15
10
+
11
+ - Fixed all bugs and prompts are now working. First release.
12
+
3
13
  ## [0.1.3] - 2024-05-15
4
14
 
5
15
  - Require all the lib files into the gem root file
@@ -40,6 +40,10 @@ module ActivePrompt
40
40
  ]
41
41
  end
42
42
 
43
+ def run
44
+ raise NotImplementedError, "Please implement this function using your chosen LLM library"
45
+ end
46
+
43
47
  private
44
48
 
45
49
  def render_template(template_name)
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module ActivePrompt
4
- VERSION = "0.1.21"
4
+ VERSION = "0.1.22"
5
5
  end
@@ -9,18 +9,41 @@ module ActivePrompt
9
9
  desc "This generator creates a new prompt in the app/prompts directory"
10
10
  source_root File.expand_path("templates", __dir__)
11
11
 
12
+ # rubocop:disable Metrics/MethodLength
12
13
  def create_prompt_files
14
+ unless File.exist?("app/prompts/application_prompt.rb")
15
+ create_file "app/prompts/application_prompt.rb", <<~RUBY
16
+ # frozen_string_literal: true
17
+
18
+ class ApplicationPrompt < ActivePrompt::Base
19
+ def run(model: 'gpt-3.5-turbo-0613', json_output: false, temperature: 1.0)
20
+ # client = OpenAI::Client.new
21
+ # parameters = {
22
+ # model: model,
23
+ # messages: render_messages,
24
+ # temperature: temperature
25
+ # }
26
+ # parameters[:response_format] = { type: 'json_object' } if json_output
27
+ # client.chat(parameters: parameters)
28
+
29
+ raise NotImplementedError, 'please implement this function using your chosen LLM library'
30
+ end
31
+ end
32
+ RUBY
33
+ end
34
+
13
35
  empty_directory "app/prompts/templates/#{name.underscore}_prompt"
14
36
  template "system.liquid", "app/prompts/templates/#{name.underscore}_prompt/system.liquid"
15
37
  template "user.liquid", "app/prompts/templates/#{name.underscore}_prompt/user.liquid"
16
38
  create_file "app/prompts/#{name.underscore}_prompt.rb", <<~RUBY
17
39
  # frozen_string_literal: true
18
40
 
19
- class #{name.camelize}Prompt < ActivePrompt::Base
41
+ class #{name.camelize}Prompt < ApplicationPrompt
20
42
  # variable :name
21
43
  end
22
44
  RUBY
23
45
  end
46
+ # rubocop:enable Metrics/MethodLength
24
47
  end
25
48
  end
26
49
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: active_prompt_rails
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.21
4
+ version: 0.1.22
5
5
  platform: ruby
6
6
  authors:
7
7
  - Shane Perreault