smart_prompt 0.2.2 → 0.2.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 671745a347fa5443d42ce018a0c00870f5c1c13771060d1facea3ecf6cff41d6
4
- data.tar.gz: 5a2f1177a1ed5de38daee6422586841c8358c869cd3e9fc71d4b67078a5438e1
3
+ metadata.gz: 74ae2371cb915bd160c90add5b0b2b771b45b9b2fbed32ce05da3140d6cde814
4
+ data.tar.gz: 7898daeb6a6dbf9d963b4daabf603140a14c503aa750aa9917e32a2619213c37
5
5
  SHA512:
6
- metadata.gz: 9fed26cf6880bf7b9bb2e71c2e4013c66a25b9b542bc88e5727025eef57731d5024dc3a01d548ed3d76f404646835666540dc7ea056477f8abd9cc2b28bddeeb
7
- data.tar.gz: 1ac2f3ed8fc1b664a623888193d82fe114ef8a8a8f43b6dd8600875132d617af624ccca6d0c6f10814759ae881e51c2351ba6f4bba3b09f89b426381da470fb7
6
+ metadata.gz: 6a6beb7c8c669c35f7ec3b30bfd67b5d66af9043bcd93beb82c172b7651a907ce0cb40a3835d3871eedb4984e76e6bea8ba6100aec57d50dcd3398fa8027e246
7
+ data.tar.gz: 81088495756cb62aace3f6154f61a5b7d55b1ec43ec9e927c48e9f78d068348e05f90e34802af584ad732ad1db0f736a00f0abae9d0a1a984c86550136875f4e
@@ -33,23 +33,32 @@ module SmartPrompt
33
33
  @temperature = temperature
34
34
  end
35
35
 
36
+ def history_messages
37
+ @engine.history_messages
38
+ end
39
+
40
+ def add_message(msg)
41
+ history_messages << msg
42
+ @messages << msg
43
+ end
44
+
36
45
  def prompt(template_name, params = {})
37
46
  if template_name.class == Symbol
38
47
  template_name = template_name.to_s
39
48
  SmartPrompt.logger.info "Use template #{template_name}"
40
49
  raise "Template #{template_name} not found" unless @templates.key?(template_name)
41
50
  content = @templates[template_name].render(params)
42
- @messages << { role: "user", content: content }
51
+ add_message({ role: "user", content: content })
43
52
  self
44
53
  else
45
- @messages << { role: "user", content: template_name }
54
+ add_message({ role: "user", content: template_name })
46
55
  self
47
56
  end
48
57
  end
49
58
 
50
59
  def sys_msg(message)
51
60
  @sys_msg = message
52
- @messages << { role: "system", content: message }
61
+ add_message({ role: "system", content: message })
53
62
  self
54
63
  end
55
64
 
@@ -61,10 +70,14 @@ module SmartPrompt
61
70
  @last_response
62
71
  end
63
72
 
64
- def send_msg
73
+ def send_msg(params = {})
65
74
  Retriable.retriable(RETRY_OPTIONS) do
66
75
  raise ConfigurationError, "No LLM selected" if @current_llm.nil?
67
- @last_response = @current_llm.send_request(@messages, @model_name, @temperature, @tools, nil)
76
+ if params[:with_history]
77
+ @last_response = @current_llm.send_request(history_messages, @model_name, @temperature, @tools, nil)
78
+ else
79
+ @last_response = @current_llm.send_request(@messages, @model_name, @temperature, @tools, nil)
80
+ end
68
81
  if @last_response == ""
69
82
  @last_response = @current_llm.last_response
70
83
  end
@@ -76,10 +89,14 @@ module SmartPrompt
76
89
  return "Failed to call LLM after #{MAX_RETRIES} attempts: #{e.message}"
77
90
  end
78
91
 
79
- def send_msg_by_stream(&proc)
92
+ def send_msg_by_stream(params = {}, &proc)
80
93
  Retriable.retriable(RETRY_OPTIONS) do
81
94
  raise ConfigurationError, "No LLM selected" if @current_llm.nil?
82
- @current_llm.send_request(@messages, @model_name, @temperature, @tools, proc)
95
+ if params[:with_history]
96
+ @current_llm.send_request(history_messages, @model_name, @temperature, @tools, proc)
97
+ else
98
+ @current_llm.send_request(@messages, @model_name, @temperature, @tools, proc)
99
+ end
83
100
  @messages = []
84
101
  @messages << { role: "system", content: @sys_msg }
85
102
  end
@@ -1,103 +1,130 @@
1
1
  module SmartPrompt
2
- class Engine
3
- attr_reader :config_file, :config, :adapters, :current_adapter, :llms, :templates
4
- def initialize(config_file)
2
+ class Engine
3
+ attr_reader :config_file, :config, :adapters, :current_adapter, :llms, :templates
4
+
5
+ def initialize(config_file)
6
+ @config_file = config_file
7
+ @adapters = {}
8
+ @llms = {}
9
+ @templates = {}
10
+ @current_workers = {}
11
+ @history_messages = []
12
+ load_config(config_file)
13
+ SmartPrompt.logger.info "Started create the SmartPrompt engine."
14
+ end
15
+
16
+ def create_dir(filename)
17
+ path = File::path(filename).to_s
18
+ parent_dir = File::dirname(path)
19
+ Dir.mkdir(parent_dir, 0755) unless File.directory?(parent_dir)
20
+ end
21
+
22
+ def load_config(config_file)
23
+ begin
5
24
  @config_file = config_file
6
- @adapters={}
7
- @llms={}
8
- @templates={}
9
- load_config(config_file)
10
- SmartPrompt.logger.info "Started create the SmartPrompt engine."
25
+ @config = YAML.load_file(config_file)
26
+ if @config["logger_file"]
27
+ create_dir(@config["logger_file"])
28
+ SmartPrompt.logger = Logger.new(@config["logger_file"])
29
+ end
30
+ SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
31
+ @config["adapters"].each do |adapter_name, adapter_class|
32
+ adapter_class = SmartPrompt.const_get(adapter_class)
33
+ @adapters[adapter_name] = adapter_class
34
+ end
35
+ @config["llms"].each do |llm_name, llm_config|
36
+ adapter_class = @adapters[llm_config["adapter"]]
37
+ @llms[llm_name] = adapter_class.new(llm_config)
38
+ end
39
+ @current_llm = @config["default_llm"] if @config["default_llm"]
40
+ Dir.glob(File.join(@config["template_path"], "*.erb")).each do |file|
41
+ template_name = file.gsub(@config["template_path"] + "/", "").gsub("\.erb", "")
42
+ @templates[template_name] = PromptTemplate.new(file)
43
+ end
44
+ load_workers
45
+ rescue Psych::SyntaxError => ex
46
+ SmartPrompt.logger.error "YAML syntax error in config file: #{ex.message}"
47
+ raise ConfigurationError, "Invalid YAML syntax in config file: #{ex.message}"
48
+ rescue Errno::ENOENT => ex
49
+ SmartPrompt.logger.error "Config file not found: #{ex.message}"
50
+ raise ConfigurationError, "Config file not found: #{ex.message}"
51
+ rescue StandardError => ex
52
+ SmartPrompt.logger.error "Error loading configuration: #{ex.message}"
53
+ raise ConfigurationError, "Error loading configuration: #{ex.message}"
54
+ ensure
55
+ SmartPrompt.logger.info "Configuration loaded successfully"
11
56
  end
57
+ end
12
58
 
13
- def create_dir(filename)
14
- path = File::path(filename).to_s
15
- parent_dir = File::dirname(path)
16
- Dir.mkdir(parent_dir, 0755) unless File.directory?(parent_dir)
59
+ def load_workers
60
+ Dir.glob(File.join(@config["worker_path"], "*.rb")).each do |file|
61
+ require(file)
17
62
  end
63
+ end
18
64
 
19
- def load_config(config_file)
20
- begin
21
- @config_file = config_file
22
- @config = YAML.load_file(config_file)
23
- if @config['logger_file']
24
- create_dir(@config['logger_file'])
25
- SmartPrompt.logger = Logger.new(@config['logger_file'])
26
- end
27
- SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
28
- @config['adapters'].each do |adapter_name, adapter_class|
29
- adapter_class = SmartPrompt.const_get(adapter_class)
30
- @adapters[adapter_name] = adapter_class
31
- end
32
- @config['llms'].each do |llm_name,llm_config|
33
- adapter_class = @adapters[llm_config['adapter']]
34
- @llms[llm_name]=adapter_class.new(llm_config)
35
- end
36
- @current_llm = @config['default_llm'] if @config['default_llm']
37
- Dir.glob(File.join(@config['template_path'], '*.erb')).each do |file|
38
- template_name = file.gsub(@config['template_path']+"/","").gsub("\.erb","")
39
- @templates[template_name] = PromptTemplate.new(file)
40
- end
41
- load_workers
42
- rescue Psych::SyntaxError => ex
43
- SmartPrompt.logger.error "YAML syntax error in config file: #{ex.message}"
44
- raise ConfigurationError, "Invalid YAML syntax in config file: #{ex.message}"
45
- rescue Errno::ENOENT => ex
46
- SmartPrompt.logger.error "Config file not found: #{ex.message}"
47
- raise ConfigurationError, "Config file not found: #{ex.message}"
48
- rescue StandardError => ex
49
- SmartPrompt.logger.error "Error loading configuration: #{ex.message}"
50
- raise ConfigurationError, "Error loading configuration: #{ex.message}"
51
- ensure
52
- SmartPrompt.logger.info "Configuration loaded successfully"
53
- end
54
- end
55
-
56
- def load_workers
57
- Dir.glob(File.join(@config['worker_path'], '*.rb')).each do |file|
58
- require(file)
59
- end
65
+ def check_worker(worker_name)
66
+ if SmartPrompt::Worker.workers[worker_name]
67
+ return true
68
+ else
69
+ SmartPrompt.logger.warn "Invalid worker: #{worker_name}"
70
+ return false
60
71
  end
72
+ end
61
73
 
62
- def check_worker(worker_name)
63
- if SmartPrompt::Worker.workers[worker_name]
64
- return true
74
+ def call_worker(worker_name, params = {})
75
+ SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
76
+ worker = get_worker(worker_name)
77
+ begin
78
+ result = worker.execute(params)
79
+ SmartPrompt.logger.info "Worker #{worker_name} executed successfully"
80
+ if result.class == String
81
+ recive_message = {
82
+ "role": "assistant",
83
+ "content": result,
84
+ }
65
85
  else
66
- SmartPrompt.logger.warn "Invalid worker: #{worker_name}"
67
- return false
68
- end
69
- end
70
-
71
- def call_worker(worker_name, params = {})
72
- SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
73
- worker = get_worker(worker_name)
74
- begin
75
- result = worker.execute(params)
76
- SmartPrompt.logger.info "Worker #{worker_name} executed successfully"
77
- result
78
- rescue => e
79
- SmartPrompt.logger.error "Error executing worker #{worker_name}: #{e.message}"
80
- SmartPrompt.logger.debug e.backtrace.join("\n")
81
- raise
86
+ recive_message = {
87
+ "role": result.dig("choices", 0, "message", "role"),
88
+ "content": result.dig("choices", 0, "message", "content").to_s + result.dig("choices", 0, "message", "tool_calls").to_s,
89
+ }
82
90
  end
91
+ worker.conversation.add_message(recive_message)
92
+ result
93
+ rescue => e
94
+ SmartPrompt.logger.error "Error executing worker #{worker_name}: #{e.message}"
95
+ SmartPrompt.logger.debug e.backtrace.join("\n")
96
+ raise
83
97
  end
98
+ end
84
99
 
85
- def call_worker_by_stream(worker_name, params = {}, &proc)
86
- SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
87
- worker = get_worker(worker_name)
88
- begin
89
- worker.execute_by_stream(params, &proc)
90
- SmartPrompt.logger.info "Worker #{worker_name} executed(stream) successfully"
91
- rescue => e
92
- SmartPrompt.logger.error "Error executing worker #{worker_name}: #{e.message}"
93
- SmartPrompt.logger.debug e.backtrace.join("\n")
94
- raise
95
- end
100
+ def call_worker_by_stream(worker_name, params = {}, &proc)
101
+ SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
102
+ worker = get_worker(worker_name)
103
+ begin
104
+ worker.execute_by_stream(params, &proc)
105
+ SmartPrompt.logger.info "Worker #{worker_name} executed(stream) successfully"
106
+ rescue => e
107
+ SmartPrompt.logger.error "Error executing worker #{worker_name}: #{e.message}"
108
+ SmartPrompt.logger.debug e.backtrace.join("\n")
109
+ raise
96
110
  end
97
- private
98
- def get_worker(worker_name)
99
- SmartPrompt.logger.info "Creating worker instance for: #{worker_name}"
100
- Worker.new(worker_name, self)
111
+ end
112
+
113
+ def get_worker(worker_name)
114
+ SmartPrompt.logger.info "Creating worker instance for: #{worker_name}"
115
+ unless worker = @current_workers[worker_name]
116
+ worker = Worker.new(worker_name, self)
117
+ @current_workers[worker_name] = worker
101
118
  end
119
+ return worker
120
+ end
121
+
122
+ def history_messages
123
+ @history_messages
124
+ end
125
+
126
+ def clear_history_messages
127
+ @history_messages = []
102
128
  end
103
- end
129
+ end
130
+ end
@@ -1,18 +1,18 @@
1
- require 'openai'
1
+ require "openai"
2
2
 
3
3
  module SmartPrompt
4
4
  class OpenAIAdapter < LLMAdapter
5
5
  def initialize(config)
6
6
  super
7
- api_key = @config['api_key']
8
- if api_key.is_a?(String) && api_key.start_with?('ENV[') && api_key.end_with?(']')
7
+ api_key = @config["api_key"]
8
+ if api_key.is_a?(String) && api_key.start_with?("ENV[") && api_key.end_with?("]")
9
9
  api_key = eval(api_key)
10
10
  end
11
11
  begin
12
12
  @client = OpenAI::Client.new(
13
13
  access_token: api_key,
14
- uri_base: @config['url'],
15
- request_timeout: 240
14
+ uri_base: @config["url"],
15
+ request_timeout: 240,
16
16
  )
17
17
  rescue OpenAI::ConfigurationError => e
18
18
  SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.message}"
@@ -31,25 +31,26 @@ module SmartPrompt
31
31
  end
32
32
  end
33
33
 
34
- def send_request(messages, model=nil, temperature=0.7, tools=nil, proc=nil)
34
+ def send_request(messages, model = nil, temperature = 0.7, tools = nil, proc = nil)
35
35
  SmartPrompt.logger.info "OpenAIAdapter: Sending request to OpenAI"
36
+ temperature = 0.7 if temperature == nil
36
37
  if model
37
38
  model_name = model
38
39
  else
39
- model_name = @config['model']
40
+ model_name = @config["model"]
40
41
  end
41
42
  SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}"
42
43
  begin
43
44
  parameters = {
44
45
  model: model_name,
45
46
  messages: messages,
46
- temperature: @config['temperature'] || temperature
47
+ temperature: @config["temperature"] || temperature,
47
48
  }
48
49
  if proc
49
- parameters[:stream]=proc
50
+ parameters[:stream] = proc
50
51
  end
51
52
  if tools
52
- parameters[:tools]=tools
53
+ parameters[:tools] = tools
53
54
  end
54
55
  response = @client.chat(parameters: parameters)
55
56
  rescue OpenAI::Error => e
@@ -79,15 +80,15 @@ module SmartPrompt
79
80
  if model
80
81
  model_name = model
81
82
  else
82
- model_name = @config['model']
83
+ model_name = @config["model"]
83
84
  end
84
85
  SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}"
85
86
  begin
86
87
  response = @client.embeddings(
87
- parameters: {
88
- model: model_name,
89
- input: text.to_s
90
- }
88
+ parameters: {
89
+ model: model_name,
90
+ input: text.to_s,
91
+ },
91
92
  )
92
93
  rescue => e
93
94
  SmartPrompt.logger.error "Unexpected error during Ollama request: #{e.message}"
@@ -98,4 +99,4 @@ module SmartPrompt
98
99
  return response.dig("data", 0, "embedding")
99
100
  end
100
101
  end
101
- end
102
+ end
@@ -1,3 +1,3 @@
1
1
  module SmartPrompt
2
- VERSION = "0.2.2"
2
+ VERSION = "0.2.4"
3
3
  end
@@ -1,6 +1,6 @@
1
1
  module SmartPrompt
2
2
  class Worker
3
- attr_reader :name, :config_file
3
+ attr_reader :name, :config_file, :conversation
4
4
 
5
5
  def initialize(name, engine)
6
6
  SmartPrompt.logger.info "Create worker's name is #{name}"
@@ -11,14 +11,14 @@ module SmartPrompt
11
11
  end
12
12
 
13
13
  def execute(params = {})
14
- conversation = Conversation.new(@engine, params[:tools])
15
- context = WorkerContext.new(conversation, params, @engine)
14
+ @conversation = Conversation.new(@engine, params[:tools]) unless @conversation
15
+ context = WorkerContext.new(@conversation, params, @engine)
16
16
  context.instance_eval(&@code)
17
17
  end
18
18
 
19
- def execute_by_stream(params = {}, &proc)
20
- conversation = Conversation.new(@engine, params[:tools])
21
- context = WorkerContext.new(conversation, params, @engine, proc)
19
+ def execute_by_stream(params = {}, &proc)
20
+ @conversation = Conversation.new(@engine, params[:tools])
21
+ context = WorkerContext.new(@conversation, params, @engine, proc)
22
22
  context.instance_eval(&@code)
23
23
  end
24
24
 
@@ -34,7 +34,7 @@ module SmartPrompt
34
34
  end
35
35
 
36
36
  class WorkerContext
37
- def initialize(conversation, params, engine, proc=nil)
37
+ def initialize(conversation, params, engine, proc = nil)
38
38
  @conversation = conversation
39
39
  @params = params
40
40
  @engine = engine
@@ -43,11 +43,11 @@ module SmartPrompt
43
43
 
44
44
  def method_missing(method, *args, &block)
45
45
  if @conversation.respond_to?(method)
46
- if method==:send_msg
47
- if @proc==nil
48
- @conversation.send_msg
46
+ if method == :send_msg
47
+ if @proc == nil
48
+ @conversation.send_msg(params)
49
49
  else
50
- @conversation.send_msg_by_stream(&@proc)
50
+ @conversation.send_msg_by_stream(params, &@proc)
51
51
  end
52
52
  else
53
53
  @conversation.send(method, *args, &block)
@@ -79,4 +79,4 @@ module SmartPrompt
79
79
  worker.execute_by_stream(params, proc)
80
80
  end
81
81
  end
82
- end
82
+ end
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: smart_prompt
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.2
4
+ version: 0.2.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - zhuang biaowei
8
8
  bindir: exe
9
9
  cert_chain: []
10
- date: 2025-03-28 00:00:00.000000000 Z
10
+ date: 2025-04-06 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: yaml