smart_prompt 0.3.4 → 0.3.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/smart_prompt/conversation.rb +0 -32
- data/lib/smart_prompt/engine.rb +0 -4
- data/lib/smart_prompt/version.rb +1 -1
- metadata +2 -16
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: ccf371f6f175e6a300b3b2bbd61e6311d6753a3da00d35f1beb32ceeb0ae3160
|
|
4
|
+
data.tar.gz: 92b8725b6720e4b6f9e71fc9ea5b77f51f5697720dd82b777f8c44b7d5e2629a
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 8a73f2a6d5911348d57d3acc80def2686e6858359744c2bb761af41ccfe7f58eb4f81aa268c0c974219f12f76a9cbfc7283abb63f41042eba6f51635b1cac396
|
|
7
|
+
data.tar.gz: 4e9e0a5dda6830c8c5adb8e58e7bda2b88958085d764271809aaf5502c12c910385de94e73fd7f9973c3b25ffb61e97c8036635d91851fbadcc877c3108e940f
|
|
@@ -31,9 +31,6 @@ module SmartPrompt
|
|
|
31
31
|
|
|
32
32
|
def model(model_name)
|
|
33
33
|
@model_name = model_name
|
|
34
|
-
if @engine.config["better_prompt_db"]
|
|
35
|
-
BetterPrompt.add_model(@current_llm_name, @model_name)
|
|
36
|
-
end
|
|
37
34
|
end
|
|
38
35
|
|
|
39
36
|
def temperature(temperature)
|
|
@@ -58,15 +55,9 @@ module SmartPrompt
|
|
|
58
55
|
raise "Template #{template_name} not found" unless @templates.key?(template_name)
|
|
59
56
|
content = @templates[template_name].render(params)
|
|
60
57
|
add_message({ role: "user", content: content }, with_history)
|
|
61
|
-
if @engine.config["better_prompt_db"]
|
|
62
|
-
BetterPrompt.add_prompt(template_name, "user", content)
|
|
63
|
-
end
|
|
64
58
|
self
|
|
65
59
|
else
|
|
66
60
|
add_message({ role: "user", content: template_name }, with_history)
|
|
67
|
-
if @engine.config["better_prompt_db"]
|
|
68
|
-
BetterPrompt.add_prompt("NULL", "user", template_name)
|
|
69
|
-
end
|
|
70
61
|
self
|
|
71
62
|
end
|
|
72
63
|
end
|
|
@@ -74,9 +65,6 @@ module SmartPrompt
|
|
|
74
65
|
def sys_msg(message, params)
|
|
75
66
|
@sys_msg = message
|
|
76
67
|
add_message({ role: "system", content: message }, params[:with_history])
|
|
77
|
-
if @engine.config["better_prompt_db"]
|
|
78
|
-
BetterPrompt.add_prompt("NULL", "system", message)
|
|
79
|
-
end
|
|
80
68
|
self
|
|
81
69
|
end
|
|
82
70
|
|
|
@@ -91,13 +79,6 @@ module SmartPrompt
|
|
|
91
79
|
def send_msg(params = {})
|
|
92
80
|
Retriable.retriable(RETRY_OPTIONS) do
|
|
93
81
|
raise ConfigurationError, "No LLM selected" if @current_llm.nil?
|
|
94
|
-
if @engine.config["better_prompt_db"]
|
|
95
|
-
if params[:with_history]
|
|
96
|
-
@last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, history_messages, false, @temperature, 0, 0.0, 0, @tools)
|
|
97
|
-
else
|
|
98
|
-
@last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, @messages, false, @temperature, 0, 0.0, 0, @tools)
|
|
99
|
-
end
|
|
100
|
-
end
|
|
101
82
|
if params[:with_history]
|
|
102
83
|
@last_response = @current_llm.send_request(history_messages, @model_name, @temperature, @tools, nil)
|
|
103
84
|
else
|
|
@@ -106,9 +87,6 @@ module SmartPrompt
|
|
|
106
87
|
if @last_response == ""
|
|
107
88
|
@last_response = @current_llm.last_response
|
|
108
89
|
end
|
|
109
|
-
if @engine.config["better_prompt_db"]
|
|
110
|
-
BetterPrompt.add_response(@last_call_id, @last_response, false)
|
|
111
|
-
end
|
|
112
90
|
@messages = []
|
|
113
91
|
@messages << { role: "system", content: @sys_msg }
|
|
114
92
|
@last_response
|
|
@@ -120,21 +98,11 @@ module SmartPrompt
|
|
|
120
98
|
def send_msg_by_stream(params = {}, &proc)
|
|
121
99
|
Retriable.retriable(RETRY_OPTIONS) do
|
|
122
100
|
raise ConfigurationError, "No LLM selected" if @current_llm.nil?
|
|
123
|
-
if @engine.config["better_prompt_db"]
|
|
124
|
-
if params[:with_history]
|
|
125
|
-
@last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, history_messages, true, @temperature, 0, 0.0, 0, @tools)
|
|
126
|
-
else
|
|
127
|
-
@last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, @messages, true, @temperature, 0, 0.0, 0, @tools)
|
|
128
|
-
end
|
|
129
|
-
end
|
|
130
101
|
if params[:with_history]
|
|
131
102
|
@current_llm.send_request(history_messages, @model_name, @temperature, @tools, proc)
|
|
132
103
|
else
|
|
133
104
|
@current_llm.send_request(@messages, @model_name, @temperature, @tools, proc)
|
|
134
105
|
end
|
|
135
|
-
if @engine.config["better_prompt_db"]
|
|
136
|
-
BetterPrompt.add_response(@last_call_id, @engine.stream_response, true)
|
|
137
|
-
end
|
|
138
106
|
@messages = []
|
|
139
107
|
@messages << { role: "system", content: @sys_msg }
|
|
140
108
|
end
|
data/lib/smart_prompt/engine.rb
CHANGED
|
@@ -64,10 +64,6 @@ module SmartPrompt
|
|
|
64
64
|
SmartPrompt.logger = Logger.new(@config["logger_file"])
|
|
65
65
|
end
|
|
66
66
|
SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
|
|
67
|
-
if @config["better_prompt_db"]
|
|
68
|
-
require "better_prompt"
|
|
69
|
-
BetterPrompt.setup(db_path: @config["better_prompt_db"])
|
|
70
|
-
end
|
|
71
67
|
@config["adapters"].each do |adapter_name, adapter_class|
|
|
72
68
|
adapter_class = SmartPrompt.const_get(adapter_class)
|
|
73
69
|
@adapters[adapter_name] = adapter_class
|
data/lib/smart_prompt/version.rb
CHANGED
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: smart_prompt
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.3.
|
|
4
|
+
version: 0.3.5
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- zhuang biaowei
|
|
@@ -93,20 +93,6 @@ dependencies:
|
|
|
93
93
|
- - "~>"
|
|
94
94
|
- !ruby/object:Gem::Version
|
|
95
95
|
version: 0.9.2.1
|
|
96
|
-
- !ruby/object:Gem::Dependency
|
|
97
|
-
name: better_prompt
|
|
98
|
-
requirement: !ruby/object:Gem::Requirement
|
|
99
|
-
requirements:
|
|
100
|
-
- - "~>"
|
|
101
|
-
- !ruby/object:Gem::Version
|
|
102
|
-
version: 0.2.1
|
|
103
|
-
type: :runtime
|
|
104
|
-
prerelease: false
|
|
105
|
-
version_requirements: !ruby/object:Gem::Requirement
|
|
106
|
-
requirements:
|
|
107
|
-
- - "~>"
|
|
108
|
-
- !ruby/object:Gem::Version
|
|
109
|
-
version: 0.2.1
|
|
110
96
|
description: SmartPrompt provides a flexible DSL for managing prompts, interacting
|
|
111
97
|
with multiple LLMs, and creating composable task workers.
|
|
112
98
|
email:
|
|
@@ -151,7 +137,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
151
137
|
- !ruby/object:Gem::Version
|
|
152
138
|
version: '0'
|
|
153
139
|
requirements: []
|
|
154
|
-
rubygems_version:
|
|
140
|
+
rubygems_version: 4.0.6
|
|
155
141
|
specification_version: 4
|
|
156
142
|
summary: A smart prompt management and LLM interaction gem
|
|
157
143
|
test_files: []
|