luogu 0.1.6 → 0.1.8

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5dc8912fdf448f5a9cb2f27d32125bf5a82b5231a8c74f36c03c2269d3345a14
4
- data.tar.gz: 9039e6a2ebf32390d96625e734e63c49a9fda770945d8240a4a11dc3d8af114b
3
+ metadata.gz: 1d415e44469be65939709764b50b5283da141d5e68663acf5e3c376044407970
4
+ data.tar.gz: 18846bbcd392e9581f6d9becff711b2e551983759460464b169824e14f3c05a1
5
5
  SHA512:
6
- metadata.gz: bfc85f6ebf5dc6970243291b277654a86adc93d3a917e4c571f7e2a7b08bbc74fd7d20ee5fbacd4981ba8b983f9b1da05fe97a60da9b8a3b607a4fe5bf8b6b7b
7
- data.tar.gz: e5b8db6cb68a539ec09f706fed396dfafccc352817f1072616b66db77843d95fea9c67374fda515407c75cb71273523d0c8ede5793387a4663ab8c8ca9848480
6
+ metadata.gz: 5eb13f0a6b90843366879a773f816d55dbcdba55f2cc6f06915f9e7f7dbc4a73ceb1916505d482a670755d8197519792d649f0731c12f53071fa87b131bcd62a
7
+ data.tar.gz: b25c5d0dfa8a2b4d4df66ec9695d226521551ddb77f674722d122ba8e9252546abcfa916d2d379a794f520fedf32247e1dcbeef575739675bdd84ddd705e78f5
data/Gemfile.lock CHANGED
@@ -1,8 +1,9 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- luogu (0.1.1)
4
+ luogu (0.1.7)
5
5
  dotenv (~> 2.8, >= 2.8.1)
6
+ dry-cli (~> 1.0)
6
7
  ruby-openai (~> 3.7)
7
8
  tty-prompt (~> 0.23.1)
8
9
 
@@ -10,6 +11,7 @@ GEM
10
11
  remote: https://rubygems.org/
11
12
  specs:
12
13
  dotenv (2.8.1)
14
+ dry-cli (1.0.0)
13
15
  httparty (0.21.0)
14
16
  mini_mime (>= 1.0.0)
15
17
  multi_xml (>= 0.5.2)
data/README.md CHANGED
@@ -10,10 +10,14 @@
10
10
  - 如果需要在终端显示markdown,需要 [glow](https://github.com/charmbracelet/glow)
11
11
 
12
12
  ### 使用
13
- - luogu build <file> 编译成对应的json
14
- - luogu run <file> 测试prompt
15
- - luogu gen <prompt.json file> <target.md> 用来逆向生成md文件的命令
16
- - luogu test <file> <yml> 用来跑自动化测试 yaml 一行一句话
13
+ ```Bash
14
+ Commands:
15
+ luogu build PROMPT_FILE [TARGET_FILE] # 编译 Prompt.md 成能够提交给 ChatGPT API 的 messages. 默认输出为 <同文件名>.json
16
+ luogu generate JSON_FILE [PROMPT_FILE] # 根据 ChatGPT messages JSON 来生成 Prompt.md
17
+ luogu run PROMPT_FILE # 编译 Prompt.md 成能够提交给 ChatGPT API 的 messages. 默认输出为 <同文件名>.json
18
+ luogu test [PROMPT_FILE] [TEST_FILE] # 测试 Prompt 文件
19
+ luogu version # 打印版本
20
+ ```
17
21
 
18
22
  你可以在项目目录的.env中设置下面的环境变量,或者直接系统设置
19
23
  ```
data/lib/luogu/chatgpt.rb CHANGED
@@ -1,26 +1,50 @@
1
1
  module Luogu
2
2
  class ChatGPT
3
- def initialize(file)
3
+
4
+ attr_accessor :template, :limit_history, :prompt, :row_history, :history, :temperature, :model_name
5
+
6
+ def initialize(file, history_path='.', plugin_file_path=nil)
7
+ @plugin_file_path = plugin_file_path || file.sub(File.extname(file), ".plugin.rb")
8
+
9
+ if File.exist?(@plugin_file_path)
10
+ @plugin = Plugin.new(@plugin_file_path).load()
11
+ else
12
+ @plugin = Plugin.new(@plugin_file_path)
13
+ end
14
+
4
15
  @temperature = ENV.fetch('OPENAI_TEMPERATURE', '0.7').to_f
5
16
  @limit_history = ENV.fetch('OPENAI_LIMIT_HISTORY', '6').to_i * 2
17
+ @model_name = "gpt-3.5-turbo"
6
18
 
19
+ @history_path = history_path
20
+ @prompt_file = file
21
+
7
22
  @prompt = PromptParser.new(file)
8
23
  @row_history = []
9
24
  @history = HistoryQueue.new @limit_history
25
+
26
+ @plugin.setup_proc.call(self) if @plugin.setup_proc
10
27
  end
11
28
 
12
29
  def request(messages)
13
- response = client.chat(
14
- parameters: {
15
- model: "gpt-3.5-turbo",
16
- messages: messages,
17
- temperature: 0.7,
18
- })
30
+ params = {
31
+ model: @model_name,
32
+ messages: messages,
33
+ temperature: @temperature,
34
+ }
35
+
36
+ params = @plugin.before_request_proc.call(self, params) if @plugin.before_request_proc
37
+ response = client.chat(parameters: params)
38
+ @plugin.after_request_proc.call(self, response) if @plugin.after_request_proc
39
+
19
40
  response.dig("choices", 0, "message", "content")
20
41
  end
21
42
 
22
43
  def chat(user_message)
44
+ user_message = @plugin.before_input_proc.call(self, user_message) if @plugin.before_input_proc
23
45
  messages = (@prompt.render + @history.to_a) << {role: "user", content: user_message}
46
+ @plugin.after_input_proc.call(self, messages) if @plugin.after_input_proc
47
+
24
48
  assistant_message = self.request(messages)
25
49
 
26
50
  self.push_row_history(user_message, assistant_message)
@@ -28,6 +52,8 @@ module Luogu
28
52
  if @prompt.ruby_code
29
53
  puts "执行文档中的callback"
30
54
  instance_eval @prompt.ruby_code, @prompt.file_path, @prompt.ruby_code_line
55
+ elsif @plugin.before_save_history_proc
56
+ @plugin.before_save_history_proc.call(self, user_message, assistant_message)
31
57
  else
32
58
  puts "执行默认的历史记录"
33
59
  self.push_history(user_message, assistant_message)
@@ -44,6 +70,7 @@ module Luogu
44
70
  def push_history(user_message, assistant_message)
45
71
  @history.enqueue({role: "user", content: user_message})
46
72
  @history.enqueue({role: "assistant", content: assistant_message})
73
+ @plugin.after_save_history_proc.call(self, user_message, assistant_message) if @plugin.after_save_history_proc
47
74
  end
48
75
 
49
76
  def ask(message)
@@ -70,8 +97,9 @@ module Luogu
70
97
  # 根据用户输入执行相应的操作
71
98
  case input
72
99
  when "save"
73
- self.class.save @row_history, "./prompt.row_history.md"
74
- self.class.save @history.to_a, "./prompt.history.md"
100
+ file_name = File.basename(@prompt_file, ".*")
101
+ self.class.save @row_history, File.join(@history_path, "#{file_name}.row_history.md")
102
+ self.class.save @history.to_a, File.join(@history_path, "#{file_name}.history.md")
75
103
  when "row history"
76
104
  p @row_history
77
105
  when "history"
@@ -91,8 +119,10 @@ module Luogu
91
119
  self.puts self.chat(message)
92
120
  end
93
121
  now = Time.now.to_i
94
- self.class.save @row_history, "./prompt.row_history.test-#{now}.md"
95
- self.class.save @history.to_a, "./prompt.history.test-#{now}.md"
122
+ file_name = File.basename(@prompt_file, ".*")
123
+
124
+ self.class.save @row_history, File.join(@history_path, "#{file_name}-#{now}.row_history.md")
125
+ self.class.save @history.to_a, File.join(@history_path, "#{file_name}-#{now}.history.md")
96
126
  end
97
127
 
98
128
  class << self
@@ -105,6 +135,7 @@ module Luogu
105
135
  text += item[:content]
106
136
  text += "\n\n"
107
137
  end
138
+ FileUtils.mkdir_p(File.dirname(file_path))
108
139
  File.open(file_path, 'w') do |f|
109
140
  f.write(text)
110
141
  end
data/lib/luogu/cli.rb CHANGED
@@ -1,58 +1,90 @@
1
1
  module Luogu
2
- module_function
3
- def cli
4
- options = {}
5
- subcommands = {}
6
-
7
- OptionParser.new do |opts|
8
- opts.banner = "Usage: luogu [command]"
9
-
10
- opts.on("-h", "--help", "Prints help") do
11
- puts """
12
- luogu build <file> -> 编译prompt
13
- luogu run <file> -> 测试 prompt
14
- luogu gen <file> <target> -> 根据 json 生成 prompt 文件
15
- luogu test <file> <test_file.yml> -> 根据 yaml 来对 prompt 进行测试
16
- """
17
- exit
2
+ module CLI
3
+ module Commands
4
+ extend Dry::CLI::Registry
5
+
6
+ class Version < Dry::CLI::Command
7
+ desc "打印版本"
8
+
9
+ def call(*)
10
+ puts Luogu::VERSION
11
+ end
18
12
  end
19
13
 
20
- end.parse!
14
+ class Build < Dry::CLI::Command
15
+
16
+ desc "编译 Prompt.md 成能够提交给 ChatGPT API 的 messages. 默认输出为 <同文件名>.json"
17
+ argument :prompt_file, type: :string, required: true, deec: "Prompt文件, 使用markdown书写"
18
+ argument :target_file, type: :string, required: false, deec: "输出 JSON 文件"
19
+
20
+ def call(prompt_file: nil, target_file: nil, **)
21
+ target_file ||= prompt_file.sub(File.extname(prompt_file), ".json")
22
+ data = PromptParser.new(prompt_file).to_json
23
+ File.open(target_file, 'w') do |f|
24
+ f.write(data)
25
+ end
26
+ end
21
27
 
22
-
23
- subcommands['build'] = Proc.new do |args|
24
- data = PromptParser.new(args.first).to_json
25
- target_path = args[1] || "./prompt.json"
26
- File.open(target_path, 'w') do |f|
27
- f.write(data)
28
28
  end
29
- end
30
29
 
31
- subcommands['run'] = Proc.new do |args|
32
- chatgpt = ChatGPT.new(args.first)
33
- chatgpt.run
34
- end
30
+ class Run < Dry::CLI::Command
35
31
 
36
- subcommands['gen'] = Proc.new do |args|
37
- json = JSON.parse File.read(args.first), symbolize_names: true
38
- chatgpt = ChatGPT.save(json, args.last)
39
- end
32
+ desc "编译 Prompt.md 成能够提交给 ChatGPT API 的 messages. 默认输出为 <同文件名>.json"
33
+ argument :prompt_file, type: :string, required: true, desc: "Prompt文件, 使用markdown书写"
34
+ option :out, type: :string, default: ".", desc: "保存历史时存放的目录,默认为当前目录"
35
+ option :plugin, type: :string, desc: "运行的时候载入对应的插件"
40
36
 
41
- subcommands['test'] = Proc.new do |args|
42
- promtpt_file = args.first
43
- promtpt_test_file = args.last
37
+ def call(prompt_file: nil, **options)
38
+ chatgpt = ChatGPT.new(prompt_file, options.fetch(:out), options.fetch(:plugin, nil))
39
+ chatgpt.run
40
+ end
44
41
 
45
- chatgpt = ChatGPT.new(args.first)
42
+ end
46
43
 
47
- messages = YAML.load_file(promtpt_test_file)
48
- chatgpt.playload messages
49
- end
44
+ class Generate < Dry::CLI::Command
45
+
46
+ desc "根据 ChatGPT messages JSON 来生成 Prompt.md"
47
+ argument :json_file, type: :string, required: true, deec: "ChatGPT 生成的 messages json 文件"
48
+ argument :prompt_file, type: :string, required: false, deec: "要输出的Prompt文件路径, 默认生成 <同名>.md"
49
+
50
+ def call(json_file: nil, prompt_file: nil, **)
51
+ json = JSON.parse(File.read(json_file), symbolize_names: true)
52
+ prompt_file ||= json_file.sub(File.extname(json_file), ".md")
53
+
54
+ chatgpt = ChatGPT.save(json, prompt_file)
55
+ end
56
+
57
+ end
58
+
59
+ class Test < Dry::CLI::Command
60
+
61
+ desc "测试 Prompt 文件"
62
+ argument :prompt_file, type: :string, require: true, desc: "输出 Prompt 文件"
63
+ argument :test_file, type: :string, require: false, desc: "测试文件, 使用 YAML 文件, 一个字符串数组。默认为 同名.test.yml"
64
+ option :out, type: :string, default: ".", desc: "保存测试历史时存放的目录,默认为当前目录"
65
+ option :plugin, type: :string, desc: "运行的时候载入对应的插件"
66
+
67
+ def call(prompt_file: nil, test_file:nil, **options)
68
+ test_file ||= prompt_file.sub(File.extname(prompt_file), ".test.yml")
69
+
70
+ chatgpt = ChatGPT.new(prompt_file, options.fetch(:out), options.fetch(:plugin, nil))
71
+ messages = YAML.load_file(test_file)
72
+ chatgpt.playload messages
73
+ end
74
+
75
+ end
76
+
77
+ register "version", Version, aliases: ["v", "-v", "--version"]
78
+ register "build", Build, aliases: ["b"]
79
+ register "run", Run, aliases: ["r"]
80
+ register "generate", Generate, aliases: ["g"]
81
+ register "test", Test, aliases: ["t"]
50
82
 
51
- if subcommands.key?(ARGV.first)
52
- subcommands[ARGV.first].call(ARGV[1..-1])
53
- else
54
- puts "Invalid command. Use -h or --help for usage information."
55
83
  end
84
+ end
56
85
 
86
+ module_function
87
+ def cli
88
+ Dry::CLI.new(Luogu::CLI::Commands).call
57
89
  end
58
- end
90
+ end
data/lib/luogu/init.rb CHANGED
@@ -3,8 +3,10 @@ require 'dotenv/load'
3
3
  require "tty-prompt"
4
4
  require 'json'
5
5
  require 'yaml'
6
- require 'optparse'
6
+ require "dry/cli"
7
+ require 'fileutils'
7
8
 
9
+ require_relative 'plugin'
8
10
  require_relative 'history_queue'
9
11
  require_relative "prompt_parser"
10
12
  require_relative "chatgpt"
@@ -0,0 +1,52 @@
1
+ module Luogu
2
+ class Plugin
3
+ attr_reader :before_input_proc, :before_save_history_proc, :after_input_proc, :after_save_history_proc,
4
+ :setup_proc, :before_request_proc, :after_request_proc
5
+
6
+ def initialize(plugin_file_path)
7
+ @plugin_file_path = plugin_file_path
8
+
9
+ @before_input_proc = nil
10
+ @before_save_history_proc = nil
11
+
12
+ @after_input_proc = nil
13
+ @after_save_history_proc = nil
14
+
15
+ @setup_proc = nil
16
+ end
17
+
18
+ def before_input(&block)
19
+ @before_input_proc = block
20
+ end
21
+
22
+ def before_save_history(&block)
23
+ @before_save_history_proc = block
24
+ end
25
+
26
+ def after_input(&block)
27
+ @after_input_proc = block
28
+ end
29
+
30
+ def after_save_history(&block)
31
+ @after_save_history_proc = block
32
+ end
33
+
34
+ def setup(&block)
35
+ @setup_proc = block
36
+ end
37
+
38
+ def before_request(&block)
39
+ @before_request_proc = block
40
+ end
41
+
42
+ def after_request(&block)
43
+ @after_request_proc = block
44
+ end
45
+
46
+ def load()
47
+ self.instance_eval File.read(@plugin_file_path)
48
+ self
49
+ end
50
+
51
+ end
52
+ end
data/lib/luogu/version.rb CHANGED
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Luogu
4
- VERSION = "0.1.6"
4
+ VERSION = "0.1.8"
5
5
  end
data/luogu.gemspec CHANGED
@@ -35,6 +35,7 @@ Gem::Specification.new do |spec|
35
35
  spec.add_dependency 'ruby-openai', '~> 3.7'
36
36
  spec.add_dependency 'dotenv', '~> 2.8', '>= 2.8.1'
37
37
  spec.add_dependency 'tty-prompt', '~> 0.23.1'
38
+ spec.add_dependency 'dry-cli', '~> 1.0'
38
39
 
39
40
  # For more information and examples about making a new gem, check out our
40
41
  # guide at: https://bundler.io/guides/creating_gem.html
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: luogu
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.6
4
+ version: 0.1.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - MJ
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2023-04-09 00:00:00.000000000 Z
11
+ date: 2023-04-11 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: ruby-openai
@@ -58,6 +58,20 @@ dependencies:
58
58
  - - "~>"
59
59
  - !ruby/object:Gem::Version
60
60
  version: 0.23.1
61
+ - !ruby/object:Gem::Dependency
62
+ name: dry-cli
63
+ requirement: !ruby/object:Gem::Requirement
64
+ requirements:
65
+ - - "~>"
66
+ - !ruby/object:Gem::Version
67
+ version: '1.0'
68
+ type: :runtime
69
+ prerelease: false
70
+ version_requirements: !ruby/object:Gem::Requirement
71
+ requirements:
72
+ - - "~>"
73
+ - !ruby/object:Gem::Version
74
+ version: '1.0'
61
75
  description: 使用markdown来快速实现 Prompt工程研发
62
76
  email:
63
77
  - tywf91@gmail.com
@@ -76,6 +90,7 @@ files:
76
90
  - lib/luogu/cli.rb
77
91
  - lib/luogu/history_queue.rb
78
92
  - lib/luogu/init.rb
93
+ - lib/luogu/plugin.rb
79
94
  - lib/luogu/prompt_parser.rb
80
95
  - lib/luogu/version.rb
81
96
  - luogu.gemspec