smart_prompt 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE.txt +21 -0
- data/README.md +36 -0
- data/Rakefile +4 -0
- data/lib/smart_prompt/conversation.rb +48 -0
- data/lib/smart_prompt/engine.rb +42 -0
- data/lib/smart_prompt/llm_adapter.rb +92 -0
- data/lib/smart_prompt/prompt_template.rb +52 -0
- data/lib/smart_prompt/version.rb +3 -0
- data/lib/smart_prompt/worker.rb +57 -0
- data/lib/smart_prompt.rb +19 -0
- data/sig/smart_prompt.rbs +4 -0
- metadata +58 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 426b1977292513d846008ab4152b5dec906ea2bd873aad06cb18ec761e71a409
|
4
|
+
data.tar.gz: 598c08216f9c84c99e15884d2b04efa3c480f27efa9cc54d676ac68a9f866d86
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 0c25152f944e0bc7cb92825045efb645668b71bf7efa3c4fd5d8fc78fb1c67cbda7f05abe1f2483a04df528bdcf54e651f98d0cf8e6f21e314f26837103a59a9
|
7
|
+
data.tar.gz: 93af52707e43b9da0da55f2f1873bc7ad487350140617a97929e4a867062b2387469c84631f1696081d8f4e5dfeb28d3f017e5235879565b92e9da3355cd71ac
|
data/LICENSE.txt
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
The MIT License (MIT)
|
2
|
+
|
3
|
+
Copyright (c) 2024 zhuang biaowei
|
4
|
+
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
7
|
+
in the Software without restriction, including without limitation the rights
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
10
|
+
furnished to do so, subject to the following conditions:
|
11
|
+
|
12
|
+
The above copyright notice and this permission notice shall be included in
|
13
|
+
all copies or substantial portions of the Software.
|
14
|
+
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
21
|
+
THE SOFTWARE.
|
data/README.md
ADDED
@@ -0,0 +1,36 @@
|
|
1
|
+
# SmartPrompt
|
2
|
+
|
3
|
+
SmartPrompt 是一个强大的 Ruby gem,提供了一种领域特定语言(DSL),使其他 Ruby 程序能够更加方便、自然地调用各种大型语言模型(LLM)的能力。
|
4
|
+
|
5
|
+
## 主要特性
|
6
|
+
|
7
|
+
- 灵活的任务组合:以特定服务提供商 + 特定 LLM + 特定 prompt 的方式组合各种任务
|
8
|
+
- 子任务嵌套:支持以 DSL 形式组合调用其他子任务
|
9
|
+
- 性能优化:在保证质量的同时,提供性能最优或成本最低的解决方案
|
10
|
+
|
11
|
+
## 安装
|
12
|
+
|
13
|
+
将 gem 安装并添加到应用程序的 Gemfile 中,执行以下命令:
|
14
|
+
|
15
|
+
```
|
16
|
+
$ bundle add smart_prompt
|
17
|
+
```
|
18
|
+
|
19
|
+
如果不使用 bundler 来管理依赖,可以通过执行以下命令来安装 gem:
|
20
|
+
|
21
|
+
```
|
22
|
+
$ gem install smart_prompt
|
23
|
+
```
|
24
|
+
|
25
|
+
## 用法
|
26
|
+
|
27
|
+
以下是一些基本用法示例:
|
28
|
+
|
29
|
+
### 基本使用
|
30
|
+
|
31
|
+
```
|
32
|
+
require 'smart_prompt'
|
33
|
+
engine = SmartPrompt::Engine.new('./config/llm_config.yml')
|
34
|
+
result = engine.call_worker(:daily_report, {location: "Shanghai"})
|
35
|
+
puts result
|
36
|
+
```
|
data/Rakefile
ADDED
@@ -0,0 +1,48 @@
|
|
1
|
+
require 'yaml'
|
2
|
+
|
3
|
+
module SmartPrompt
|
4
|
+
class Conversation
|
5
|
+
attr_reader :messages, :last_response, :config_file
|
6
|
+
|
7
|
+
def initialize(engine)
|
8
|
+
@messages = []
|
9
|
+
@engine = engine
|
10
|
+
@adapters = engine.adapters
|
11
|
+
@templates = engine.templates
|
12
|
+
@current_adapter = engine.current_adapter
|
13
|
+
@last_response = nil
|
14
|
+
end
|
15
|
+
|
16
|
+
def use(adapter_name)
|
17
|
+
raise "Adapter #{adapter_name} not configured" unless @adapters.key?(adapter_name)
|
18
|
+
@current_adapter = adapter_name
|
19
|
+
self
|
20
|
+
end
|
21
|
+
|
22
|
+
def model(model_name)
|
23
|
+
@model_name = model_name
|
24
|
+
end
|
25
|
+
|
26
|
+
def prompt(template_name, params = {})
|
27
|
+
template_name = template_name.to_s
|
28
|
+
raise "Template #{template_name} not found" unless @templates.key?(template_name)
|
29
|
+
content = @templates[template_name].render(params)
|
30
|
+
@messages << { role: 'user', content: content }
|
31
|
+
self
|
32
|
+
end
|
33
|
+
|
34
|
+
def sys_msg(message)
|
35
|
+
@sys_msg = message
|
36
|
+
@messages << { role: 'system', content: message }
|
37
|
+
self
|
38
|
+
end
|
39
|
+
|
40
|
+
def send_msg
|
41
|
+
raise "No adapter selected" if @current_adapter.nil?
|
42
|
+
@last_response = @adapters[@current_adapter].send_request(@messages, @model_name)
|
43
|
+
@messages=[]
|
44
|
+
@messages << { role: 'system', content: @sys_msg }
|
45
|
+
@last_response
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
module SmartPrompt
|
2
|
+
class Engine
|
3
|
+
attr_reader :config_file, :config, :adapters, :current_adapter, :templates
|
4
|
+
def initialize(config_file)
|
5
|
+
@config_file = config_file
|
6
|
+
@adapters={}
|
7
|
+
@templates={}
|
8
|
+
load_config(config_file)
|
9
|
+
end
|
10
|
+
|
11
|
+
def load_config(config_file)
|
12
|
+
@config_file = config_file
|
13
|
+
@config = YAML.load_file(config_file)
|
14
|
+
@config['adapters'].each do |adapter_name, adapter_config|
|
15
|
+
adapter_class = SmartPrompt.const_get("#{adapter_name.capitalize}Adapter")
|
16
|
+
@adapters[adapter_name] = adapter_class.new(adapter_config)
|
17
|
+
end
|
18
|
+
@current_adapter = @config['default_adapter'] if @config['default_adapter']
|
19
|
+
@config['templates'].each do |template_name, template_file|
|
20
|
+
@templates[template_name] = PromptTemplate.new(template_file)
|
21
|
+
end
|
22
|
+
load_workers
|
23
|
+
end
|
24
|
+
|
25
|
+
def load_workers
|
26
|
+
Dir.glob(File.join(@config['worker_path'], '*.rb')).each do |file|
|
27
|
+
require(file)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
def call_worker(worker_name, params = {})
|
32
|
+
worker = get_worker(worker_name)
|
33
|
+
worker.execute(params)
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
def get_worker(worker_name)
|
39
|
+
worker = Worker.new(worker_name, self)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,92 @@
|
|
1
|
+
require 'net/http'
|
2
|
+
require 'json'
|
3
|
+
require 'uri'
|
4
|
+
require 'openai'
|
5
|
+
require 'ollama-ai'
|
6
|
+
|
7
|
+
module SmartPrompt
|
8
|
+
class LLMAdapter
|
9
|
+
def initialize(config)
|
10
|
+
@config = config
|
11
|
+
end
|
12
|
+
|
13
|
+
def send_request(messages)
|
14
|
+
raise NotImplementedError, "Subclasses must implement send_request"
|
15
|
+
end
|
16
|
+
end
|
17
|
+
|
18
|
+
class OpenaiAdapter < LLMAdapter
|
19
|
+
def initialize(config)
|
20
|
+
super
|
21
|
+
@client = OpenAI::Client.new(
|
22
|
+
access_token: @config['api_key'],
|
23
|
+
uri_base: @config['url'],
|
24
|
+
request_timeout: 240
|
25
|
+
)
|
26
|
+
end
|
27
|
+
|
28
|
+
def send_request(messages, model=nil)
|
29
|
+
if model
|
30
|
+
model_name = model
|
31
|
+
else
|
32
|
+
model_name = @config['model']
|
33
|
+
end
|
34
|
+
response = @client.chat(
|
35
|
+
parameters: {
|
36
|
+
model: model_name,
|
37
|
+
messages: messages,
|
38
|
+
temperature: @config['temperature'] || 0.7
|
39
|
+
}
|
40
|
+
)
|
41
|
+
response.dig("choices", 0, "message", "content")
|
42
|
+
end
|
43
|
+
end
|
44
|
+
|
45
|
+
class LlamacppAdapter < LLMAdapter
|
46
|
+
def initialize(config)
|
47
|
+
super
|
48
|
+
@client = OpenAI::Client.new(
|
49
|
+
uri_base: @config['url']
|
50
|
+
)
|
51
|
+
end
|
52
|
+
def send_request(messages, model=nil)
|
53
|
+
response = @client.chat(
|
54
|
+
parameters: {
|
55
|
+
messages: messages,
|
56
|
+
temperature: @config['temperature'] || 0.7
|
57
|
+
}
|
58
|
+
)
|
59
|
+
response.dig("choices", 0, "message", "content")
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
class OllamaAdapter < LLMAdapter
|
64
|
+
def initialize(config)
|
65
|
+
super
|
66
|
+
@client = Ollama.new(credentials: { address: @config['url'] })
|
67
|
+
end
|
68
|
+
|
69
|
+
def send_request(messages, model=nil)
|
70
|
+
if model
|
71
|
+
model_name = model
|
72
|
+
else
|
73
|
+
model_name = @config['model']
|
74
|
+
end
|
75
|
+
response = @client.generate(
|
76
|
+
{
|
77
|
+
model: model_name,
|
78
|
+
prompt: messages.to_s,
|
79
|
+
stream: false
|
80
|
+
}
|
81
|
+
)
|
82
|
+
return response[0]["response"]
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
class MockAdapter < LLMAdapter
|
87
|
+
def send_request(messages)
|
88
|
+
puts "Mock adapter received #{messages.length} messages"
|
89
|
+
"This is a mock response from the LLM adapter."
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
require 'erb'
|
2
|
+
|
3
|
+
module SmartPrompt
|
4
|
+
class PromptTemplate
|
5
|
+
def initialize(template_file)
|
6
|
+
@template_file = template_file
|
7
|
+
@template = File.read(template_file)
|
8
|
+
end
|
9
|
+
|
10
|
+
def render(params = {})
|
11
|
+
ERB.new(@template, trim_mode: '-').result(binding_with_params(params))
|
12
|
+
end
|
13
|
+
|
14
|
+
def reload
|
15
|
+
@template = File.read(@template_file)
|
16
|
+
end
|
17
|
+
|
18
|
+
private
|
19
|
+
|
20
|
+
def binding_with_params(params)
|
21
|
+
params_binding = binding
|
22
|
+
params.each do |key, value|
|
23
|
+
params_binding.local_variable_set(key, value)
|
24
|
+
end
|
25
|
+
params_binding
|
26
|
+
end
|
27
|
+
|
28
|
+
class << self
|
29
|
+
def load_templates(template_dir)
|
30
|
+
templates = {}
|
31
|
+
Dir.glob(File.join(template_dir, '*.erb')).each do |file|
|
32
|
+
name = File.basename(file, '.erb')
|
33
|
+
templates[name] = new(file)
|
34
|
+
end
|
35
|
+
templates
|
36
|
+
end
|
37
|
+
|
38
|
+
def create(name, content)
|
39
|
+
File.write(File.join(template_dir, "#{name}.erb"), content)
|
40
|
+
new(File.join(template_dir, "#{name}.erb"))
|
41
|
+
end
|
42
|
+
|
43
|
+
def template_dir
|
44
|
+
@template_dir ||= 'templates'
|
45
|
+
end
|
46
|
+
|
47
|
+
def template_dir=(dir)
|
48
|
+
@template_dir = dir
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
module SmartPrompt
|
2
|
+
class Worker
|
3
|
+
attr_reader :name, :config_file
|
4
|
+
|
5
|
+
def initialize(name, engine)
|
6
|
+
@name = name
|
7
|
+
@engine = engine
|
8
|
+
@config = engine.config
|
9
|
+
@code = self.class.workers[name]
|
10
|
+
end
|
11
|
+
|
12
|
+
def execute(params = {})
|
13
|
+
conversation = Conversation.new(@engine)
|
14
|
+
context = WorkerContext.new(conversation, params, @engine)
|
15
|
+
context.instance_eval(&@code)
|
16
|
+
end
|
17
|
+
|
18
|
+
class << self
|
19
|
+
def workers
|
20
|
+
@workers ||= {}
|
21
|
+
end
|
22
|
+
|
23
|
+
def define(name, &block)
|
24
|
+
workers[name] = block
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
class WorkerContext
|
30
|
+
def initialize(conversation, params, engine)
|
31
|
+
@conversation = conversation
|
32
|
+
@params = params
|
33
|
+
@engine = engine
|
34
|
+
end
|
35
|
+
|
36
|
+
def method_missing(method, *args, &block)
|
37
|
+
if @conversation.respond_to?(method)
|
38
|
+
@conversation.send(method, *args, &block)
|
39
|
+
else
|
40
|
+
super
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
def respond_to_missing?(method, include_private = false)
|
45
|
+
@conversation.respond_to?(method) || super
|
46
|
+
end
|
47
|
+
|
48
|
+
def params
|
49
|
+
@params
|
50
|
+
end
|
51
|
+
|
52
|
+
def call_worker(worker_name, params = {})
|
53
|
+
worker = Worker.new(worker_name, @engine)
|
54
|
+
worker.execute(params)
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
data/lib/smart_prompt.rb
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
require File.expand_path('../smart_prompt/version', __FILE__)
|
2
|
+
require File.expand_path('../smart_prompt/engine', __FILE__)
|
3
|
+
require File.expand_path('../smart_prompt/conversation', __FILE__)
|
4
|
+
require File.expand_path('../smart_prompt/llm_adapter', __FILE__)
|
5
|
+
require File.expand_path('../smart_prompt/prompt_template', __FILE__)
|
6
|
+
require File.expand_path('../smart_prompt/worker', __FILE__)
|
7
|
+
|
8
|
+
module SmartPrompt
|
9
|
+
class Error < StandardError; end
|
10
|
+
|
11
|
+
def self.define_worker(name, &block)
|
12
|
+
Worker.define(name, &block)
|
13
|
+
end
|
14
|
+
|
15
|
+
def self.run_worker(name, config_file, params = {})
|
16
|
+
worker = Worker.new(name, config_file)
|
17
|
+
worker.execute(params)
|
18
|
+
end
|
19
|
+
end
|
metadata
ADDED
@@ -0,0 +1,58 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: smart_prompt
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- zhuang biaowei
|
8
|
+
autorequire:
|
9
|
+
bindir: exe
|
10
|
+
cert_chain: []
|
11
|
+
date: 2024-09-29 00:00:00.000000000 Z
|
12
|
+
dependencies: []
|
13
|
+
description: SmartPrompt provides a flexible DSL for managing prompts, interacting
|
14
|
+
with multiple LLMs, and creating composable task workers.
|
15
|
+
email:
|
16
|
+
- zbw@kaiyuanshe.org
|
17
|
+
executables: []
|
18
|
+
extensions: []
|
19
|
+
extra_rdoc_files: []
|
20
|
+
files:
|
21
|
+
- LICENSE.txt
|
22
|
+
- README.md
|
23
|
+
- Rakefile
|
24
|
+
- lib/smart_prompt.rb
|
25
|
+
- lib/smart_prompt/conversation.rb
|
26
|
+
- lib/smart_prompt/engine.rb
|
27
|
+
- lib/smart_prompt/llm_adapter.rb
|
28
|
+
- lib/smart_prompt/prompt_template.rb
|
29
|
+
- lib/smart_prompt/version.rb
|
30
|
+
- lib/smart_prompt/worker.rb
|
31
|
+
- sig/smart_prompt.rbs
|
32
|
+
homepage: https://github.com/zhuangbiaowei/smart_prompt
|
33
|
+
licenses:
|
34
|
+
- MIT
|
35
|
+
metadata:
|
36
|
+
homepage_uri: https://github.com/zhuangbiaowei/smart_prompt
|
37
|
+
source_code_uri: https://github.com/zhuangbiaowei/smart_prompt
|
38
|
+
changelog_uri: https://github.com/zhuangbiaowei/smart_prompt/blob/master/CHANGELOG.md
|
39
|
+
post_install_message:
|
40
|
+
rdoc_options: []
|
41
|
+
require_paths:
|
42
|
+
- lib
|
43
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ">="
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: 3.0.0
|
48
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
49
|
+
requirements:
|
50
|
+
- - ">="
|
51
|
+
- !ruby/object:Gem::Version
|
52
|
+
version: '0'
|
53
|
+
requirements: []
|
54
|
+
rubygems_version: 3.5.18
|
55
|
+
signing_key:
|
56
|
+
specification_version: 4
|
57
|
+
summary: A smart prompt management and LLM interaction gem
|
58
|
+
test_files: []
|