smart_prompt 0.1.4 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 846477d8ddb503119a9233f9ce85d2094c1755be52adcfc0515b0a7209524a3d
4
- data.tar.gz: 4f165cedebc54e9d8ae292e3fd03329ec04150a8e0804fcb90234b7837135e6d
3
+ metadata.gz: 844c87d47dccd945bedcab69c58377ae144238ac9087b49219041382db539146
4
+ data.tar.gz: 3e70f88fbe8d26a0a15408e73ae9060b9f711a18e6b44eb561910b90156ca14b
5
5
  SHA512:
6
- metadata.gz: 7f3521861b4690b233c9b82d6f08123b9859c0fed28e40274fb4eaa014aa93b2df63121aff77451c9b1f5f93f9eeb683c5b044c94cef5e7d11a8e2151c5e97f8
7
- data.tar.gz: 9c78f290101fd158403d2feb9e84312d8ffb90a22a32a57e91d96f5c287a38cfd8cb8c318817e860d7fb09bd5584310e6d397fce490deaa29ca52bafb3db7fba
6
+ metadata.gz: 398854ce070f96f794ae944cc656a10d2f4752268cb35b4f9a857fcd8a722489b3ee112de97e5cdfe1cc1efb75cf92e4cd4a9b36cb338cbd8de6e22f1d6cc7dc
7
+ data.tar.gz: babac8b7d0479eb376df22b00cd693e6e657293e6b28b654aaa6b2ef4f9fcfdccb9709d6f1dd0b066cb6f22c07eed0173426816121061a80c91fc9afdd62ba06
@@ -5,6 +5,7 @@ module SmartPrompt
5
5
  attr_reader :messages, :last_response, :config_file
6
6
 
7
7
  def initialize(engine)
8
+ SmartPrompt.logger.info "Create Conversation"
8
9
  @messages = []
9
10
  @engine = engine
10
11
  @adapters = engine.adapters
@@ -26,6 +27,7 @@ module SmartPrompt
26
27
 
27
28
  def prompt(template_name, params = {})
28
29
  template_name = template_name.to_s
30
+ SmartPrompt.logger.info "Use template #{template_name}"
29
31
  raise "Template #{template_name} not found" unless @templates.key?(template_name)
30
32
  content = @templates[template_name].render(params)
31
33
  @messages << { role: 'user', content: content }
@@ -2,6 +2,7 @@ module SmartPrompt
2
2
  class Engine
3
3
  attr_reader :config_file, :config, :adapters, :current_adapter, :llms, :templates
4
4
  def initialize(config_file)
5
+ SmartPrompt.logger.info "Start create the SmartPrompt engine."
5
6
  @config_file = config_file
6
7
  @adapters={}
7
8
  @llms={}
@@ -10,6 +11,7 @@ module SmartPrompt
10
11
  end
11
12
 
12
13
  def load_config(config_file)
14
+ SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
13
15
  @config_file = config_file
14
16
  @config = YAML.load_file(config_file)
15
17
  @config['adapters'].each do |adapter_name, adapter_class|
@@ -35,14 +37,25 @@ module SmartPrompt
35
37
  end
36
38
 
37
39
  def call_worker(worker_name, params = {})
40
+ SmartPrompt.logger.info "Calling worker: #{worker_name} with params: #{params}"
38
41
  worker = get_worker(worker_name)
39
- worker.execute(params)
42
+
43
+ begin
44
+ result = worker.execute(params)
45
+ SmartPrompt.logger.info "Worker #{worker_name} executed successfully"
46
+ result
47
+ rescue => e
48
+ SmartPrompt.logger.error "Error executing worker #{worker_name}: #{e.message}"
49
+ SmartPrompt.logger.debug e.backtrace.join("\n")
50
+ raise
51
+ end
40
52
  end
41
53
 
42
54
  private
43
55
 
44
56
  def get_worker(worker_name)
45
- worker = Worker.new(worker_name, self)
57
+ SmartPrompt.logger.info "Creating worker instance for: #{worker_name}"
58
+ Worker.new(worker_name, self)
46
59
  end
47
60
  end
48
61
  end
@@ -7,10 +7,12 @@ require 'ollama-ai'
7
7
  module SmartPrompt
8
8
  class LLMAdapter
9
9
  def initialize(config)
10
+ SmartPrompt.logger.info "Start create the SmartPrompt LLMAdapter."
10
11
  @config = config
11
12
  end
12
13
 
13
14
  def send_request(messages)
15
+ SmartPrompt.logger.error "LLMAdapter: Subclasses must implement send_request"
14
16
  raise NotImplementedError, "Subclasses must implement send_request"
15
17
  end
16
18
  end
@@ -30,11 +32,13 @@ module SmartPrompt
30
32
  end
31
33
 
32
34
  def send_request(messages, model=nil)
35
+ SmartPrompt.logger.info "OpenAIAdapter: Sending request to OpenAI"
33
36
  if model
34
37
  model_name = model
35
38
  else
36
39
  model_name = @config['model']
37
40
  end
41
+ SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}"
38
42
  response = @client.chat(
39
43
  parameters: {
40
44
  model: model_name,
@@ -42,6 +46,7 @@ module SmartPrompt
42
46
  temperature: @config['temperature'] || 0.7
43
47
  }
44
48
  )
49
+ SmartPrompt.logger.info "OpenAIAdapter: Received response from OpenAI"
45
50
  response.dig("choices", 0, "message", "content")
46
51
  end
47
52
  end
@@ -54,12 +59,14 @@ module SmartPrompt
54
59
  )
55
60
  end
56
61
  def send_request(messages, model=nil)
62
+ SmartPrompt.logger.info "LlamacppAdapter: Sending request to Llamacpp"
57
63
  response = @client.chat(
58
64
  parameters: {
59
65
  messages: messages,
60
66
  temperature: @config['temperature'] || 0.7
61
67
  }
62
68
  )
69
+ SmartPrompt.logger.info "LlamacppAdapter: Received response from Llamacpp"
63
70
  response.dig("choices", 0, "message", "content")
64
71
  end
65
72
  end
@@ -71,11 +78,13 @@ module SmartPrompt
71
78
  end
72
79
 
73
80
  def send_request(messages, model=nil)
81
+ SmartPrompt.logger.info "OllamaAdapter: Sending request to Ollama"
74
82
  if model
75
83
  model_name = model
76
84
  else
77
85
  model_name = @config['model']
78
86
  end
87
+ SmartPrompt.logger.info "OllamaAdapter: Using model #{model_name}"
79
88
  response = @client.generate(
80
89
  {
81
90
  model: model_name,
@@ -83,6 +92,7 @@ module SmartPrompt
83
92
  stream: false
84
93
  }
85
94
  )
95
+ SmartPrompt.logger.info "OllamaAdapter: Received response from Ollama"
86
96
  return response[0]["response"]
87
97
  end
88
98
  end
@@ -1,3 +1,3 @@
1
1
  module SmartPrompt
2
- VERSION = "0.1.4"
2
+ VERSION = "0.1.5"
3
3
  end
@@ -3,6 +3,7 @@ module SmartPrompt
3
3
  attr_reader :name, :config_file
4
4
 
5
5
  def initialize(name, engine)
6
+ SmartPrompt.logger.info "Create worker's name is #{name}"
6
7
  @name = name
7
8
  @engine = engine
8
9
  @config = engine.config
data/lib/smart_prompt.rb CHANGED
@@ -7,6 +7,7 @@ require File.expand_path('../smart_prompt/worker', __FILE__)
7
7
 
8
8
  module SmartPrompt
9
9
  class Error < StandardError; end
10
+ attr_writer :logger
10
11
 
11
12
  def self.define_worker(name, &block)
12
13
  Worker.define(name, &block)
@@ -16,4 +17,10 @@ module SmartPrompt
16
17
  worker = Worker.new(name, config_file)
17
18
  worker.execute(params)
18
19
  end
20
+
21
+ def self.logger
22
+ @logger ||= Logger.new($stdout).tap do |log|
23
+ log.progname = self.name
24
+ end
25
+ end
19
26
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: smart_prompt
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.4
4
+ version: 0.1.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - zhuang biaowei
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-10-02 00:00:00.000000000 Z
11
+ date: 2024-10-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: yaml