smart_prompt 0.3.3 → 0.3.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/smart_prompt/conversation.rb +0 -32
- data/lib/smart_prompt/engine.rb +7 -6
- data/lib/smart_prompt/version.rb +1 -1
- metadata +2 -2
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: ccf371f6f175e6a300b3b2bbd61e6311d6753a3da00d35f1beb32ceeb0ae3160
|
|
4
|
+
data.tar.gz: 92b8725b6720e4b6f9e71fc9ea5b77f51f5697720dd82b777f8c44b7d5e2629a
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 8a73f2a6d5911348d57d3acc80def2686e6858359744c2bb761af41ccfe7f58eb4f81aa268c0c974219f12f76a9cbfc7283abb63f41042eba6f51635b1cac396
|
|
7
|
+
data.tar.gz: 4e9e0a5dda6830c8c5adb8e58e7bda2b88958085d764271809aaf5502c12c910385de94e73fd7f9973c3b25ffb61e97c8036635d91851fbadcc877c3108e940f
|
|
@@ -31,9 +31,6 @@ module SmartPrompt
|
|
|
31
31
|
|
|
32
32
|
def model(model_name)
|
|
33
33
|
@model_name = model_name
|
|
34
|
-
if @engine.config["better_prompt_db"]
|
|
35
|
-
BetterPrompt.add_model(@current_llm_name, @model_name)
|
|
36
|
-
end
|
|
37
34
|
end
|
|
38
35
|
|
|
39
36
|
def temperature(temperature)
|
|
@@ -58,15 +55,9 @@ module SmartPrompt
|
|
|
58
55
|
raise "Template #{template_name} not found" unless @templates.key?(template_name)
|
|
59
56
|
content = @templates[template_name].render(params)
|
|
60
57
|
add_message({ role: "user", content: content }, with_history)
|
|
61
|
-
if @engine.config["better_prompt_db"]
|
|
62
|
-
BetterPrompt.add_prompt(template_name, "user", content)
|
|
63
|
-
end
|
|
64
58
|
self
|
|
65
59
|
else
|
|
66
60
|
add_message({ role: "user", content: template_name }, with_history)
|
|
67
|
-
if @engine.config["better_prompt_db"]
|
|
68
|
-
BetterPrompt.add_prompt("NULL", "user", template_name)
|
|
69
|
-
end
|
|
70
61
|
self
|
|
71
62
|
end
|
|
72
63
|
end
|
|
@@ -74,9 +65,6 @@ module SmartPrompt
|
|
|
74
65
|
def sys_msg(message, params)
|
|
75
66
|
@sys_msg = message
|
|
76
67
|
add_message({ role: "system", content: message }, params[:with_history])
|
|
77
|
-
if @engine.config["better_prompt_db"]
|
|
78
|
-
BetterPrompt.add_prompt("NULL", "system", message)
|
|
79
|
-
end
|
|
80
68
|
self
|
|
81
69
|
end
|
|
82
70
|
|
|
@@ -91,13 +79,6 @@ module SmartPrompt
|
|
|
91
79
|
def send_msg(params = {})
|
|
92
80
|
Retriable.retriable(RETRY_OPTIONS) do
|
|
93
81
|
raise ConfigurationError, "No LLM selected" if @current_llm.nil?
|
|
94
|
-
if @engine.config["better_prompt_db"]
|
|
95
|
-
if params[:with_history]
|
|
96
|
-
@last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, history_messages, false, @temperature, 0, 0.0, 0, @tools)
|
|
97
|
-
else
|
|
98
|
-
@last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, @messages, false, @temperature, 0, 0.0, 0, @tools)
|
|
99
|
-
end
|
|
100
|
-
end
|
|
101
82
|
if params[:with_history]
|
|
102
83
|
@last_response = @current_llm.send_request(history_messages, @model_name, @temperature, @tools, nil)
|
|
103
84
|
else
|
|
@@ -106,9 +87,6 @@ module SmartPrompt
|
|
|
106
87
|
if @last_response == ""
|
|
107
88
|
@last_response = @current_llm.last_response
|
|
108
89
|
end
|
|
109
|
-
if @engine.config["better_prompt_db"]
|
|
110
|
-
BetterPrompt.add_response(@last_call_id, @last_response, false)
|
|
111
|
-
end
|
|
112
90
|
@messages = []
|
|
113
91
|
@messages << { role: "system", content: @sys_msg }
|
|
114
92
|
@last_response
|
|
@@ -120,21 +98,11 @@ module SmartPrompt
|
|
|
120
98
|
def send_msg_by_stream(params = {}, &proc)
|
|
121
99
|
Retriable.retriable(RETRY_OPTIONS) do
|
|
122
100
|
raise ConfigurationError, "No LLM selected" if @current_llm.nil?
|
|
123
|
-
if @engine.config["better_prompt_db"]
|
|
124
|
-
if params[:with_history]
|
|
125
|
-
@last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, history_messages, true, @temperature, 0, 0.0, 0, @tools)
|
|
126
|
-
else
|
|
127
|
-
@last_call_id = BetterPrompt.add_model_call(@current_llm_name, @model_name, @messages, true, @temperature, 0, 0.0, 0, @tools)
|
|
128
|
-
end
|
|
129
|
-
end
|
|
130
101
|
if params[:with_history]
|
|
131
102
|
@current_llm.send_request(history_messages, @model_name, @temperature, @tools, proc)
|
|
132
103
|
else
|
|
133
104
|
@current_llm.send_request(@messages, @model_name, @temperature, @tools, proc)
|
|
134
105
|
end
|
|
135
|
-
if @engine.config["better_prompt_db"]
|
|
136
|
-
BetterPrompt.add_response(@last_call_id, @engine.stream_response, true)
|
|
137
|
-
end
|
|
138
106
|
@messages = []
|
|
139
107
|
@messages << { role: "system", content: @sys_msg }
|
|
140
108
|
end
|
data/lib/smart_prompt/engine.rb
CHANGED
|
@@ -12,7 +12,7 @@ module SmartPrompt
|
|
|
12
12
|
@history_messages = []
|
|
13
13
|
load_config(config_file)
|
|
14
14
|
SmartPrompt.logger.info "Started create the SmartPrompt engine."
|
|
15
|
-
@stream_proc =
|
|
15
|
+
@stream_proc = Proc.new do |chunk, _bytesize|
|
|
16
16
|
if @stream_response.empty?
|
|
17
17
|
@stream_response["id"] = chunk["id"]
|
|
18
18
|
@stream_response["object"] = chunk["object"]
|
|
@@ -31,7 +31,7 @@ module SmartPrompt
|
|
|
31
31
|
@stream_response["system_fingerprint"] = chunk["system_fingerprint"]
|
|
32
32
|
end
|
|
33
33
|
if chunk.dig("choices", 0, "delta", "reasoning_content")
|
|
34
|
-
@stream_response["choices"][0]["message"]["reasoning_content"] += chunk.dig("choices", 0, "delta", "reasoning_content")
|
|
34
|
+
@stream_response["choices"][0]["message"]["reasoning_content"] += chunk.dig("choices", 0, "delta", "reasoning_content")
|
|
35
35
|
end
|
|
36
36
|
if chunk.dig("choices", 0, "delta", "content")
|
|
37
37
|
@stream_response["choices"][0]["message"]["content"] += chunk.dig("choices", 0, "delta", "content")
|
|
@@ -64,10 +64,6 @@ module SmartPrompt
|
|
|
64
64
|
SmartPrompt.logger = Logger.new(@config["logger_file"])
|
|
65
65
|
end
|
|
66
66
|
SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
|
|
67
|
-
if @config["better_prompt_db"]
|
|
68
|
-
require "better_prompt"
|
|
69
|
-
BetterPrompt.setup(db_path: @config["better_prompt_db"])
|
|
70
|
-
end
|
|
71
67
|
@config["adapters"].each do |adapter_name, adapter_class|
|
|
72
68
|
adapter_class = SmartPrompt.const_get(adapter_class)
|
|
73
69
|
@adapters[adapter_name] = adapter_class
|
|
@@ -115,6 +111,11 @@ module SmartPrompt
|
|
|
115
111
|
SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
|
|
116
112
|
worker = get_worker(worker_name)
|
|
117
113
|
begin
|
|
114
|
+
unless params[:with_history]
|
|
115
|
+
if worker.conversation
|
|
116
|
+
worker.conversation.messages.clear
|
|
117
|
+
end
|
|
118
|
+
end
|
|
118
119
|
result = worker.execute(params)
|
|
119
120
|
SmartPrompt.logger.info "Worker #{worker_name} executed successfully"
|
|
120
121
|
if result.class == String
|
data/lib/smart_prompt/version.rb
CHANGED
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: smart_prompt
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.3.
|
|
4
|
+
version: 0.3.5
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- zhuang biaowei
|
|
@@ -137,7 +137,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
137
137
|
- !ruby/object:Gem::Version
|
|
138
138
|
version: '0'
|
|
139
139
|
requirements: []
|
|
140
|
-
rubygems_version:
|
|
140
|
+
rubygems_version: 4.0.6
|
|
141
141
|
specification_version: 4
|
|
142
142
|
summary: A smart prompt management and LLM interaction gem
|
|
143
143
|
test_files: []
|