smart_prompt 0.2.1 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/smart_prompt/conversation.rb +43 -22
- data/lib/smart_prompt/engine.rb +115 -88
- data/lib/smart_prompt/llm_adapter.rb +1 -0
- data/lib/smart_prompt/openai_adapter.rb +26 -30
- data/lib/smart_prompt/version.rb +1 -1
- data/lib/smart_prompt/worker.rb +12 -12
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: ac20d9dc79d5ecdaa9993179fbe2276a6b0437b4cf46d7dc0db60b8688916c6a
|
4
|
+
data.tar.gz: c826a6d051fd60f06a72c7b3dd17366754ba19ef999df59f097705045218e9ae
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: da94c5503130c8e32681d2a11e0b9b3aef5ad2e290035fbc3cda90be64578ad6cb8cc80af9de2509c22ef39988dda5ec5f17e9273987f2df7e815a3acf636d01
|
7
|
+
data.tar.gz: d00dbc828ed0c6b74582635aaf0c0527a3fb5ea95149ef808395bd1cd6d7c8e3cfb4022bc0e0a83dceada6e5334b1765d5f32b0ad5e954b29db8e8b1eed52419
|
@@ -1,5 +1,5 @@
|
|
1
|
-
require
|
2
|
-
require
|
1
|
+
require "yaml"
|
2
|
+
require "retriable"
|
3
3
|
require "numo/narray"
|
4
4
|
|
5
5
|
module SmartPrompt
|
@@ -7,7 +7,7 @@ module SmartPrompt
|
|
7
7
|
include APIHandler
|
8
8
|
attr_reader :messages, :last_response, :config_file
|
9
9
|
|
10
|
-
def initialize(engine)
|
10
|
+
def initialize(engine, tools = nil)
|
11
11
|
SmartPrompt.logger.info "Create Conversation"
|
12
12
|
@messages = []
|
13
13
|
@engine = engine
|
@@ -16,6 +16,7 @@ module SmartPrompt
|
|
16
16
|
@templates = engine.templates
|
17
17
|
@current_adapter = engine.current_adapter
|
18
18
|
@last_response = nil
|
19
|
+
@tools = tools
|
19
20
|
end
|
20
21
|
|
21
22
|
def use(llm_name)
|
@@ -32,52 +33,72 @@ module SmartPrompt
|
|
32
33
|
@temperature = temperature
|
33
34
|
end
|
34
35
|
|
36
|
+
def history_messages
|
37
|
+
@engine.history_messages
|
38
|
+
end
|
39
|
+
|
40
|
+
def add_message(msg)
|
41
|
+
history_messages << msg
|
42
|
+
@messages << msg
|
43
|
+
end
|
44
|
+
|
35
45
|
def prompt(template_name, params = {})
|
36
46
|
if template_name.class == Symbol
|
37
47
|
template_name = template_name.to_s
|
38
48
|
SmartPrompt.logger.info "Use template #{template_name}"
|
39
49
|
raise "Template #{template_name} not found" unless @templates.key?(template_name)
|
40
50
|
content = @templates[template_name].render(params)
|
41
|
-
|
51
|
+
add_message({ role: "user", content: content })
|
42
52
|
self
|
43
53
|
else
|
44
|
-
|
54
|
+
add_message({ role: "user", content: template_name })
|
45
55
|
self
|
46
56
|
end
|
47
57
|
end
|
48
58
|
|
49
59
|
def sys_msg(message)
|
50
60
|
@sys_msg = message
|
51
|
-
|
61
|
+
add_message({ role: "system", content: message })
|
52
62
|
self
|
53
63
|
end
|
54
64
|
|
55
65
|
def send_msg_once
|
56
66
|
raise "No LLM selected" if @current_llm.nil?
|
57
67
|
@last_response = @current_llm.send_request(@messages, @model_name, @temperature)
|
58
|
-
@messages=[]
|
59
|
-
@messages << { role:
|
68
|
+
@messages = []
|
69
|
+
@messages << { role: "system", content: @sys_msg }
|
60
70
|
@last_response
|
61
71
|
end
|
62
72
|
|
63
|
-
def send_msg
|
73
|
+
def send_msg(params = {})
|
64
74
|
Retriable.retriable(RETRY_OPTIONS) do
|
65
75
|
raise ConfigurationError, "No LLM selected" if @current_llm.nil?
|
66
|
-
|
67
|
-
|
68
|
-
|
76
|
+
if params[:with_history]
|
77
|
+
@last_response = @current_llm.send_request(history_messages, @model_name, @temperature, @tools, nil)
|
78
|
+
else
|
79
|
+
@last_response = @current_llm.send_request(@messages, @model_name, @temperature, @tools, nil)
|
80
|
+
end
|
81
|
+
if @last_response == ""
|
82
|
+
@last_response = @current_llm.last_response
|
83
|
+
end
|
84
|
+
@messages = []
|
85
|
+
@messages << { role: "system", content: @sys_msg }
|
69
86
|
@last_response
|
70
87
|
end
|
71
|
-
rescue => e
|
88
|
+
rescue => e
|
72
89
|
return "Failed to call LLM after #{MAX_RETRIES} attempts: #{e.message}"
|
73
90
|
end
|
74
91
|
|
75
|
-
def send_msg_by_stream(&proc)
|
92
|
+
def send_msg_by_stream(params = {}, &proc)
|
76
93
|
Retriable.retriable(RETRY_OPTIONS) do
|
77
94
|
raise ConfigurationError, "No LLM selected" if @current_llm.nil?
|
78
|
-
|
79
|
-
|
80
|
-
|
95
|
+
if params[:with_history]
|
96
|
+
@current_llm.send_request(history_messages, @model_name, @temperature, @tools, proc)
|
97
|
+
else
|
98
|
+
@current_llm.send_request(@messages, @model_name, @temperature, @tools, proc)
|
99
|
+
end
|
100
|
+
@messages = []
|
101
|
+
@messages << { role: "system", content: @sys_msg }
|
81
102
|
end
|
82
103
|
rescue => e
|
83
104
|
return "Failed to call LLM after #{MAX_RETRIES} attempts: #{e.message}"
|
@@ -85,7 +106,7 @@ module SmartPrompt
|
|
85
106
|
|
86
107
|
def normalize(x, length)
|
87
108
|
if x.length > length
|
88
|
-
x = Numo::NArray.cast(x[0..length-1])
|
109
|
+
x = Numo::NArray.cast(x[0..length - 1])
|
89
110
|
norm = Math.sqrt((x * x).sum)
|
90
111
|
return (x / norm).to_a
|
91
112
|
else
|
@@ -98,15 +119,15 @@ module SmartPrompt
|
|
98
119
|
raise ConfigurationError, "No LLM selected" if @current_llm.nil?
|
99
120
|
text = ""
|
100
121
|
@messages.each do |msg|
|
101
|
-
if msg[:role]=="user"
|
122
|
+
if msg[:role] == "user"
|
102
123
|
text = msg[:content]
|
103
124
|
end
|
104
125
|
end
|
105
126
|
@last_response = @current_llm.embeddings(text, @model_name)
|
106
|
-
@messages=[]
|
107
|
-
@messages << { role:
|
127
|
+
@messages = []
|
128
|
+
@messages << { role: "system", content: @sys_msg }
|
108
129
|
normalize(@last_response, length)
|
109
130
|
end
|
110
131
|
end
|
111
132
|
end
|
112
|
-
end
|
133
|
+
end
|
data/lib/smart_prompt/engine.rb
CHANGED
@@ -1,103 +1,130 @@
|
|
1
1
|
module SmartPrompt
|
2
|
-
|
3
|
-
|
4
|
-
|
2
|
+
class Engine
|
3
|
+
attr_reader :config_file, :config, :adapters, :current_adapter, :llms, :templates
|
4
|
+
|
5
|
+
def initialize(config_file)
|
6
|
+
@config_file = config_file
|
7
|
+
@adapters = {}
|
8
|
+
@llms = {}
|
9
|
+
@templates = {}
|
10
|
+
@current_workers = {}
|
11
|
+
@history_messages = []
|
12
|
+
load_config(config_file)
|
13
|
+
SmartPrompt.logger.info "Started create the SmartPrompt engine."
|
14
|
+
end
|
15
|
+
|
16
|
+
def create_dir(filename)
|
17
|
+
path = File::path(filename).to_s
|
18
|
+
parent_dir = File::dirname(path)
|
19
|
+
Dir.mkdir(parent_dir, 0755) unless File.directory?(parent_dir)
|
20
|
+
end
|
21
|
+
|
22
|
+
def load_config(config_file)
|
23
|
+
begin
|
5
24
|
@config_file = config_file
|
6
|
-
@
|
7
|
-
@
|
8
|
-
|
9
|
-
|
10
|
-
|
25
|
+
@config = YAML.load_file(config_file)
|
26
|
+
if @config["logger_file"]
|
27
|
+
create_dir(@config["logger_file"])
|
28
|
+
SmartPrompt.logger = Logger.new(@config["logger_file"])
|
29
|
+
end
|
30
|
+
SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
|
31
|
+
@config["adapters"].each do |adapter_name, adapter_class|
|
32
|
+
adapter_class = SmartPrompt.const_get(adapter_class)
|
33
|
+
@adapters[adapter_name] = adapter_class
|
34
|
+
end
|
35
|
+
@config["llms"].each do |llm_name, llm_config|
|
36
|
+
adapter_class = @adapters[llm_config["adapter"]]
|
37
|
+
@llms[llm_name] = adapter_class.new(llm_config)
|
38
|
+
end
|
39
|
+
@current_llm = @config["default_llm"] if @config["default_llm"]
|
40
|
+
Dir.glob(File.join(@config["template_path"], "*.erb")).each do |file|
|
41
|
+
template_name = file.gsub(@config["template_path"] + "/", "").gsub("\.erb", "")
|
42
|
+
@templates[template_name] = PromptTemplate.new(file)
|
43
|
+
end
|
44
|
+
load_workers
|
45
|
+
rescue Psych::SyntaxError => ex
|
46
|
+
SmartPrompt.logger.error "YAML syntax error in config file: #{ex.message}"
|
47
|
+
raise ConfigurationError, "Invalid YAML syntax in config file: #{ex.message}"
|
48
|
+
rescue Errno::ENOENT => ex
|
49
|
+
SmartPrompt.logger.error "Config file not found: #{ex.message}"
|
50
|
+
raise ConfigurationError, "Config file not found: #{ex.message}"
|
51
|
+
rescue StandardError => ex
|
52
|
+
SmartPrompt.logger.error "Error loading configuration: #{ex.message}"
|
53
|
+
raise ConfigurationError, "Error loading configuration: #{ex.message}"
|
54
|
+
ensure
|
55
|
+
SmartPrompt.logger.info "Configuration loaded successfully"
|
11
56
|
end
|
57
|
+
end
|
12
58
|
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
Dir.mkdir(parent_dir, 0755) unless File.directory?(parent_dir)
|
59
|
+
def load_workers
|
60
|
+
Dir.glob(File.join(@config["worker_path"], "*.rb")).each do |file|
|
61
|
+
require(file)
|
17
62
|
end
|
63
|
+
end
|
18
64
|
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
SmartPrompt.logger = Logger.new(@config['logger_file'])
|
26
|
-
end
|
27
|
-
SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
|
28
|
-
@config['adapters'].each do |adapter_name, adapter_class|
|
29
|
-
adapter_class = SmartPrompt.const_get(adapter_class)
|
30
|
-
@adapters[adapter_name] = adapter_class
|
31
|
-
end
|
32
|
-
@config['llms'].each do |llm_name,llm_config|
|
33
|
-
adapter_class = @adapters[llm_config['adapter']]
|
34
|
-
@llms[llm_name]=adapter_class.new(llm_config)
|
35
|
-
end
|
36
|
-
@current_llm = @config['default_llm'] if @config['default_llm']
|
37
|
-
Dir.glob(File.join(@config['template_path'], '*.erb')).each do |file|
|
38
|
-
template_name = file.gsub(@config['template_path']+"/","").gsub("\.erb","")
|
39
|
-
@templates[template_name] = PromptTemplate.new(file)
|
40
|
-
end
|
41
|
-
load_workers
|
42
|
-
rescue Psych::SyntaxError => ex
|
43
|
-
SmartPrompt.logger.error "YAML syntax error in config file: #{ex.message}"
|
44
|
-
raise ConfigurationError, "Invalid YAML syntax in config file: #{ex.message}"
|
45
|
-
rescue Errno::ENOENT => ex
|
46
|
-
SmartPrompt.logger.error "Config file not found: #{ex.message}"
|
47
|
-
raise ConfigurationError, "Config file not found: #{ex.message}"
|
48
|
-
rescue StandardError => ex
|
49
|
-
SmartPrompt.logger.error "Error loading configuration: #{ex.message}"
|
50
|
-
raise ConfigurationError, "Error loading configuration: #{ex.message}"
|
51
|
-
ensure
|
52
|
-
SmartPrompt.logger.info "Configuration loaded successfully"
|
53
|
-
end
|
54
|
-
end
|
55
|
-
|
56
|
-
def load_workers
|
57
|
-
Dir.glob(File.join(@config['worker_path'], '*.rb')).each do |file|
|
58
|
-
require(file)
|
59
|
-
end
|
65
|
+
def check_worker(worker_name)
|
66
|
+
if SmartPrompt::Worker.workers[worker_name]
|
67
|
+
return true
|
68
|
+
else
|
69
|
+
SmartPrompt.logger.warn "Invalid worker: #{worker_name}"
|
70
|
+
return false
|
60
71
|
end
|
72
|
+
end
|
61
73
|
|
62
|
-
|
63
|
-
|
64
|
-
|
74
|
+
def call_worker(worker_name, params = {})
|
75
|
+
SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
|
76
|
+
worker = get_worker(worker_name)
|
77
|
+
begin
|
78
|
+
result = worker.execute(params)
|
79
|
+
SmartPrompt.logger.info "Worker #{worker_name} executed successfully"
|
80
|
+
if result.class == String
|
81
|
+
recive_message = {
|
82
|
+
"role": "assistant",
|
83
|
+
"content": result,
|
84
|
+
}
|
65
85
|
else
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
def call_worker(worker_name, params = {})
|
72
|
-
SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
|
73
|
-
worker = get_worker(worker_name)
|
74
|
-
begin
|
75
|
-
result = worker.execute(params)
|
76
|
-
SmartPrompt.logger.info "Worker #{worker_name} executed successfully"
|
77
|
-
result
|
78
|
-
rescue => e
|
79
|
-
SmartPrompt.logger.error "Error executing worker #{worker_name}: #{e.message}"
|
80
|
-
SmartPrompt.logger.debug e.backtrace.join("\n")
|
81
|
-
raise
|
86
|
+
recive_message = {
|
87
|
+
"role": result.dig("choices", 0, "message", "role"),
|
88
|
+
"content": result.dig("choices", 0, "message", "content").to_s + result.dig("choices", 0, "message", "tool_calls").to_s,
|
89
|
+
}
|
82
90
|
end
|
91
|
+
worker.conversation.add_message(recive_message)
|
92
|
+
result
|
93
|
+
rescue => e
|
94
|
+
SmartPrompt.logger.error "Error executing worker #{worker_name}: #{e.message}"
|
95
|
+
SmartPrompt.logger.debug e.backtrace.join("\n")
|
96
|
+
raise
|
83
97
|
end
|
98
|
+
end
|
84
99
|
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
end
|
100
|
+
def call_worker_by_stream(worker_name, params = {}, &proc)
|
101
|
+
SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
|
102
|
+
worker = get_worker(worker_name)
|
103
|
+
begin
|
104
|
+
worker.execute_by_stream(params, &proc)
|
105
|
+
SmartPrompt.logger.info "Worker #{worker_name} executed(stream) successfully"
|
106
|
+
rescue => e
|
107
|
+
SmartPrompt.logger.error "Error executing worker #{worker_name}: #{e.message}"
|
108
|
+
SmartPrompt.logger.debug e.backtrace.join("\n")
|
109
|
+
raise
|
96
110
|
end
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
111
|
+
end
|
112
|
+
|
113
|
+
def get_worker(worker_name)
|
114
|
+
SmartPrompt.logger.info "Creating worker instance for: #{worker_name}"
|
115
|
+
unless worker = @current_workers[worker_name]
|
116
|
+
worker = Worker.new(worker_name, self)
|
117
|
+
@current_workers[worker_name] = worker
|
101
118
|
end
|
119
|
+
return worker
|
120
|
+
end
|
121
|
+
|
122
|
+
def history_messages
|
123
|
+
@history_messages
|
124
|
+
end
|
125
|
+
|
126
|
+
def clear_history_messages
|
127
|
+
@history_messages = []
|
102
128
|
end
|
103
|
-
end
|
129
|
+
end
|
130
|
+
end
|
@@ -1,18 +1,18 @@
|
|
1
|
-
require
|
1
|
+
require "openai"
|
2
2
|
|
3
3
|
module SmartPrompt
|
4
4
|
class OpenAIAdapter < LLMAdapter
|
5
5
|
def initialize(config)
|
6
6
|
super
|
7
|
-
api_key = @config[
|
8
|
-
if api_key.is_a?(String) && api_key.start_with?(
|
7
|
+
api_key = @config["api_key"]
|
8
|
+
if api_key.is_a?(String) && api_key.start_with?("ENV[") && api_key.end_with?("]")
|
9
9
|
api_key = eval(api_key)
|
10
10
|
end
|
11
11
|
begin
|
12
12
|
@client = OpenAI::Client.new(
|
13
13
|
access_token: api_key,
|
14
|
-
uri_base: @config[
|
15
|
-
request_timeout: 240
|
14
|
+
uri_base: @config["url"],
|
15
|
+
request_timeout: 240,
|
16
16
|
)
|
17
17
|
rescue OpenAI::ConfigurationError => e
|
18
18
|
SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.message}"
|
@@ -31,33 +31,28 @@ module SmartPrompt
|
|
31
31
|
end
|
32
32
|
end
|
33
33
|
|
34
|
-
def send_request(messages, model=nil, temperature=0.7, proc)
|
34
|
+
def send_request(messages, model = nil, temperature = 0.7, tools = nil, proc = nil)
|
35
35
|
SmartPrompt.logger.info "OpenAIAdapter: Sending request to OpenAI"
|
36
|
+
temperature = 0.7 if temperature == nil
|
36
37
|
if model
|
37
38
|
model_name = model
|
38
39
|
else
|
39
|
-
model_name = @config[
|
40
|
+
model_name = @config["model"]
|
40
41
|
end
|
41
42
|
SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}"
|
42
43
|
begin
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
)
|
51
|
-
else
|
52
|
-
@client.chat(
|
53
|
-
parameters: {
|
54
|
-
model: model_name,
|
55
|
-
messages: messages,
|
56
|
-
temperature: @config['temperature'] || temperature,
|
57
|
-
stream: proc
|
58
|
-
}
|
59
|
-
)
|
44
|
+
parameters = {
|
45
|
+
model: model_name,
|
46
|
+
messages: messages,
|
47
|
+
temperature: @config["temperature"] || temperature,
|
48
|
+
}
|
49
|
+
if proc
|
50
|
+
parameters[:stream] = proc
|
60
51
|
end
|
52
|
+
if tools
|
53
|
+
parameters[:tools] = tools
|
54
|
+
end
|
55
|
+
response = @client.chat(parameters: parameters)
|
61
56
|
rescue OpenAI::Error => e
|
62
57
|
SmartPrompt.logger.error "OpenAI API error: #{e.message}"
|
63
58
|
raise LLMAPIError, "OpenAI API error: #{e.message}"
|
@@ -75,6 +70,7 @@ module SmartPrompt
|
|
75
70
|
end
|
76
71
|
SmartPrompt.logger.info "OpenAIAdapter: Received response from OpenAI"
|
77
72
|
if proc == nil
|
73
|
+
@last_response = response
|
78
74
|
return response.dig("choices", 0, "message", "content")
|
79
75
|
end
|
80
76
|
end
|
@@ -84,15 +80,15 @@ module SmartPrompt
|
|
84
80
|
if model
|
85
81
|
model_name = model
|
86
82
|
else
|
87
|
-
model_name = @config[
|
83
|
+
model_name = @config["model"]
|
88
84
|
end
|
89
85
|
SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}"
|
90
86
|
begin
|
91
87
|
response = @client.embeddings(
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
88
|
+
parameters: {
|
89
|
+
model: model_name,
|
90
|
+
input: text.to_s,
|
91
|
+
},
|
96
92
|
)
|
97
93
|
rescue => e
|
98
94
|
SmartPrompt.logger.error "Unexpected error during Ollama request: #{e.message}"
|
@@ -103,4 +99,4 @@ module SmartPrompt
|
|
103
99
|
return response.dig("data", 0, "embedding")
|
104
100
|
end
|
105
101
|
end
|
106
|
-
end
|
102
|
+
end
|
data/lib/smart_prompt/version.rb
CHANGED
data/lib/smart_prompt/worker.rb
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
module SmartPrompt
|
2
2
|
class Worker
|
3
|
-
attr_reader :name, :config_file
|
3
|
+
attr_reader :name, :config_file, :conversation
|
4
4
|
|
5
5
|
def initialize(name, engine)
|
6
6
|
SmartPrompt.logger.info "Create worker's name is #{name}"
|
@@ -11,14 +11,14 @@ module SmartPrompt
|
|
11
11
|
end
|
12
12
|
|
13
13
|
def execute(params = {})
|
14
|
-
conversation = Conversation.new(@engine)
|
15
|
-
context = WorkerContext.new(conversation, params, @engine)
|
14
|
+
@conversation = Conversation.new(@engine, params[:tools]) unless @conversation
|
15
|
+
context = WorkerContext.new(@conversation, params, @engine)
|
16
16
|
context.instance_eval(&@code)
|
17
17
|
end
|
18
18
|
|
19
|
-
def execute_by_stream(params = {}, &proc)
|
20
|
-
conversation = Conversation.new(@engine)
|
21
|
-
context = WorkerContext.new(conversation, params, @engine, proc)
|
19
|
+
def execute_by_stream(params = {}, &proc)
|
20
|
+
@conversation = Conversation.new(@engine, params[:tools])
|
21
|
+
context = WorkerContext.new(@conversation, params, @engine, proc)
|
22
22
|
context.instance_eval(&@code)
|
23
23
|
end
|
24
24
|
|
@@ -34,7 +34,7 @@ module SmartPrompt
|
|
34
34
|
end
|
35
35
|
|
36
36
|
class WorkerContext
|
37
|
-
def initialize(conversation, params, engine, proc=nil)
|
37
|
+
def initialize(conversation, params, engine, proc = nil)
|
38
38
|
@conversation = conversation
|
39
39
|
@params = params
|
40
40
|
@engine = engine
|
@@ -43,11 +43,11 @@ module SmartPrompt
|
|
43
43
|
|
44
44
|
def method_missing(method, *args, &block)
|
45
45
|
if @conversation.respond_to?(method)
|
46
|
-
if method
|
47
|
-
if @proc==nil
|
48
|
-
@conversation.send_msg
|
46
|
+
if method == :send_msg
|
47
|
+
if @proc == nil
|
48
|
+
@conversation.send_msg(params)
|
49
49
|
else
|
50
|
-
@conversation.send_msg_by_stream(&@proc)
|
50
|
+
@conversation.send_msg_by_stream(params, &@proc)
|
51
51
|
end
|
52
52
|
else
|
53
53
|
@conversation.send(method, *args, &block)
|
@@ -79,4 +79,4 @@ module SmartPrompt
|
|
79
79
|
worker.execute_by_stream(params, proc)
|
80
80
|
end
|
81
81
|
end
|
82
|
-
end
|
82
|
+
end
|
metadata
CHANGED
@@ -1,13 +1,13 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: smart_prompt
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- zhuang biaowei
|
8
8
|
bindir: exe
|
9
9
|
cert_chain: []
|
10
|
-
date: 2025-
|
10
|
+
date: 2025-04-06 00:00:00.000000000 Z
|
11
11
|
dependencies:
|
12
12
|
- !ruby/object:Gem::Dependency
|
13
13
|
name: yaml
|