smart_prompt 0.1.6 → 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 18a10421c03b8707b2ff6d7cf40a5e58eee621b62a177c7ab3d10c2b5e75b767
4
- data.tar.gz: 4f43968f0b7cf8e9dab3ce5959f2a837729b335b0a1fcc533852339f130b7e43
3
+ metadata.gz: 3bf70c6e843930ff7e1a03c9dd8bfc97b3d097786f84613aab65863e1e43a624
4
+ data.tar.gz: e9ee9d99378453d90e2ac25b88248f3226f241074c7f80e8bf35104279f05202
5
5
  SHA512:
6
- metadata.gz: e0fd7f1e1995821f53135e01f92c3d3cdf1296405fec138c588ff23450c9be452d81b8a3dd42fb1e9fdf73748a1cc0119fe09285bac281eb035c2bec4742987c
7
- data.tar.gz: 45e702684683ff912f149a2ae2f7eb94f0f4efa183f991923db8858223a1c721e975821972f7183fcb958e902168a65b22d5ea14f84be6cea87a434ced74e447
6
+ metadata.gz: 02fbfcdb0ae2292994404219eaa2f1218d54de132ceab061ca4466603186cdc9db5826b7e6e1fcfaa8a37f2527f42c3f863154dfd4cfe94b5b7df58969183285
7
+ data.tar.gz: b0034a25a0fa4bc6c197beee4635528c3b4887f36a4c8981ebb9894b7c5db61a245a0ff823f6ce171d629ddc65a61a960a172d4235d319d7220050225b7a8a48
@@ -11,26 +11,39 @@ module SmartPrompt
11
11
  end
12
12
 
13
13
  def load_config(config_file)
14
- @config_file = config_file
15
- @config = YAML.load_file(config_file)
16
- if @config['logger_file']
17
- SmartPrompt.logger = Logger.new(@config['logger_file'])
18
- end
19
- SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
20
- @config['adapters'].each do |adapter_name, adapter_class|
21
- adapter_class = SmartPrompt.const_get(adapter_class)
22
- @adapters[adapter_name] = adapter_class
23
- end
24
- @config['llms'].each do |llm_name,llm_config|
25
- adapter_class = @adapters[llm_config['adapter']]
26
- @llms[llm_name]=adapter_class.new(llm_config)
27
- end
28
- @current_llm = @config['default_llm'] if @config['default_llm']
29
- Dir.glob(File.join(@config['template_path'], '*.erb')).each do |file|
30
- template_name = file.gsub(@config['template_path']+"/","").gsub("\.erb","")
31
- @templates[template_name] = PromptTemplate.new(file)
32
- end
33
- load_workers
14
+ begin
15
+ @config_file = config_file
16
+ @config = YAML.load_file(config_file)
17
+ if @config['logger_file']
18
+ SmartPrompt.logger = Logger.new(@config['logger_file'])
19
+ end
20
+ SmartPrompt.logger.info "Loading configuration from file: #{config_file}"
21
+ @config['adapters'].each do |adapter_name, adapter_class|
22
+ adapter_class = SmartPrompt.const_get(adapter_class)
23
+ @adapters[adapter_name] = adapter_class
24
+ end
25
+ @config['llms'].each do |llm_name,llm_config|
26
+ adapter_class = @adapters[llm_config['adapter']]
27
+ @llms[llm_name]=adapter_class.new(llm_config)
28
+ end
29
+ @current_llm = @config['default_llm'] if @config['default_llm']
30
+ Dir.glob(File.join(@config['template_path'], '*.erb')).each do |file|
31
+ template_name = file.gsub(@config['template_path']+"/","").gsub("\.erb","")
32
+ @templates[template_name] = PromptTemplate.new(file)
33
+ end
34
+ load_workers
35
+ rescue Psych::SyntaxError => ex
36
+ SmartPrompt.logger.error "YAML syntax error in config file: #{ex.message}"
37
+ raise ConfigurationError, "Invalid YAML syntax in config file: #{ex.message}"
38
+ rescue Errno::ENOENT => ex
39
+ SmartPrompt.logger.error "Config file not found: #{ex.message}"
40
+ raise ConfigurationError, "Config file not found: #{ex.message}"
41
+ rescue StandardError => ex
42
+ SmartPrompt.logger.error "Error loading configuration: #{ex.message}"
43
+ raise ConfigurationError, "Error loading configuration: #{ex.message}"
44
+ ensure
45
+ SmartPrompt.logger.info "Configuration loaded successfully"
46
+ end
34
47
  end
35
48
 
36
49
  def load_workers
@@ -0,0 +1,68 @@
1
+ require 'openai'
2
+
3
+ module SmartPrompt
4
+ class LlamacppAdapter < LLMAdapter
5
+ def initialize(config)
6
+ super
7
+ begin
8
+ @client = OpenAI::Client.new(
9
+ uri_base: @config['url']
10
+ )
11
+ rescue OpenAI::ConfigurationError => e
12
+ SmartPrompt.logger.error "Failed to initialize Llamacpp client: #{e.message}"
13
+ raise LLMAPIError, "Invalid Llamacpp configuration: #{e.message}"
14
+ rescue OpenAI::AuthenticationError => e
15
+ SmartPrompt.logger.error "Failed to initialize Llamacpp client: #{e.message}"
16
+ raise LLMAPIError, "Llamacpp authentication failed: #{e.message}"
17
+ rescue SocketError => e
18
+ SmartPrompt.logger.error "Failed to initialize Llamacpp client: #{e.message}"
19
+ raise LLMAPIError, "Network error: Unable to connect to Llamacpp API"
20
+ rescue => e
21
+ SmartPrompt.logger.error "Failed to initialize Llamacpp client: #{e.message}"
22
+ raise Error, "Unexpected error initializing Llamacpp client: #{e.message}"
23
+ ensure
24
+ SmartPrompt.logger.info "Successful creation an Llamacpp client."
25
+ end
26
+ end
27
+
28
+ def send_request(messages, model=nil)
29
+ SmartPrompt.logger.info "LlamacppAdapter: Sending request to Llamacpp"
30
+ begin
31
+ response = @client.chat(
32
+ parameters: {
33
+ messages: messages,
34
+ temperature: @config['temperature'] || 0.7
35
+ }
36
+ )
37
+ rescue OpenAI::APIError => e
38
+ SmartPrompt.logger.error "Llamacpp API error: #{e.message}"
39
+ raise LLMAPIError, "Llamacpp API error: #{e.message}"
40
+ rescue OpenAI::APIConnectionError => e
41
+ SmartPrompt.logger.error "Connection error: Unable to reach Llamacpp API"
42
+ raise LLMAPIError, "Connection error: Unable to reach Llamacpp API"
43
+ rescue OpenAI::APITimeoutError => e
44
+ SmartPrompt.logger.error "Request to Llamacpp API timed out"
45
+ raise LLMAPIError, "Request to Llamacpp API timed out"
46
+ rescue OpenAI::InvalidRequestError => e
47
+ SmartPrompt.logger.error "Invalid request to Llamacpp API: #{e.message}"
48
+ raise LLMAPIError, "Invalid request to Llamacpp API: #{e.message}"
49
+ rescue OpenAI::AuthenticationError => e
50
+ SmartPrompt.logger.error "Authentication error with Llamacpp API: #{e.message}"
51
+ raise LLMAPIError, "Authentication error with Llamacpp API: #{e.message}"
52
+ rescue OpenAI::RateLimitError => e
53
+ SmartPrompt.logger.error "Rate limit exceeded for Llamacpp API"
54
+ raise LLMAPIError, "Rate limit exceeded for Llamacpp API"
55
+ rescue JSON::ParserError => e
56
+ SmartPrompt.logger.error "Failed to parse Llamacpp API response"
57
+ raise LLMAPIError, "Failed to parse Llamacpp API response"
58
+ rescue => e
59
+ SmartPrompt.logger.error "Unexpected error during Llamacpp request: #{e.message}"
60
+ raise Error, "Unexpected error during Llamacpp request: #{e.message}"
61
+ ensure
62
+ SmartPrompt.logger.info "Successful send a message"
63
+ end
64
+ SmartPrompt.logger.info "LlamacppAdapter: Received response from Llamacpp"
65
+ response.dig("choices", 0, "message", "content")
66
+ end
67
+ end
68
+ end
@@ -1,8 +1,6 @@
1
1
  require 'net/http'
2
2
  require 'json'
3
3
  require 'uri'
4
- require 'openai'
5
- require 'ollama-ai'
6
4
 
7
5
  module SmartPrompt
8
6
  class LLMAdapter
@@ -17,86 +15,6 @@ module SmartPrompt
17
15
  end
18
16
  end
19
17
 
20
- class OpenAIAdapter < LLMAdapter
21
- def initialize(config)
22
- super
23
- api_key = @config['api_key']
24
- if api_key.is_a?(String) && api_key.start_with?('ENV[') && api_key.end_with?(']')
25
- api_key = eval(api_key)
26
- end
27
- @client = OpenAI::Client.new(
28
- access_token: api_key,
29
- uri_base: @config['url'],
30
- request_timeout: 240
31
- )
32
- end
33
-
34
- def send_request(messages, model=nil)
35
- SmartPrompt.logger.info "OpenAIAdapter: Sending request to OpenAI"
36
- if model
37
- model_name = model
38
- else
39
- model_name = @config['model']
40
- end
41
- SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}"
42
- response = @client.chat(
43
- parameters: {
44
- model: model_name,
45
- messages: messages,
46
- temperature: @config['temperature'] || 0.7
47
- }
48
- )
49
- SmartPrompt.logger.info "OpenAIAdapter: Received response from OpenAI"
50
- response.dig("choices", 0, "message", "content")
51
- end
52
- end
53
-
54
- class LlamacppAdapter < LLMAdapter
55
- def initialize(config)
56
- super
57
- @client = OpenAI::Client.new(
58
- uri_base: @config['url']
59
- )
60
- end
61
- def send_request(messages, model=nil)
62
- SmartPrompt.logger.info "LlamacppAdapter: Sending request to Llamacpp"
63
- response = @client.chat(
64
- parameters: {
65
- messages: messages,
66
- temperature: @config['temperature'] || 0.7
67
- }
68
- )
69
- SmartPrompt.logger.info "LlamacppAdapter: Received response from Llamacpp"
70
- response.dig("choices", 0, "message", "content")
71
- end
72
- end
73
-
74
- class OllamaAdapter < LLMAdapter
75
- def initialize(config)
76
- super
77
- @client = Ollama.new(credentials: { address: @config['url'] })
78
- end
79
-
80
- def send_request(messages, model=nil)
81
- SmartPrompt.logger.info "OllamaAdapter: Sending request to Ollama"
82
- if model
83
- model_name = model
84
- else
85
- model_name = @config['model']
86
- end
87
- SmartPrompt.logger.info "OllamaAdapter: Using model #{model_name}"
88
- response = @client.generate(
89
- {
90
- model: model_name,
91
- prompt: messages.to_s,
92
- stream: false
93
- }
94
- )
95
- SmartPrompt.logger.info "OllamaAdapter: Received response from Ollama"
96
- return response[0]["response"]
97
- end
98
- end
99
-
100
18
  class MockAdapter < LLMAdapter
101
19
  def send_request(messages)
102
20
  puts "Mock adapter received #{messages.length} messages"
@@ -0,0 +1,64 @@
1
+ require 'ollama-ai'
2
+
3
+ module SmartPrompt
4
+ class OllamaAdapter < LLMAdapter
5
+ def initialize(config)
6
+ super
7
+ begin
8
+ @client = Ollama.new(credentials: { address: @config['url'] })
9
+ rescue Ollama::Error => e
10
+ SmartPrompt.logger.error "Failed to initialize Ollama client: #{e.message}"
11
+ raise LLMAPIError, "Invalid Ollama configuration: #{e.message}"
12
+ rescue SocketError => e
13
+ SmartPrompt.logger.error "Failed to initialize Ollama client: #{e.message}"
14
+ raise LLMAPIError, "Network error: Unable to connect to Ollama API"
15
+ rescue => e
16
+ SmartPrompt.logger.error "Failed to initialize Ollama client: #{e.message}"
17
+ raise Error, "Unexpected error initializing Ollama client: #{e.message}"
18
+ ensure
19
+ SmartPrompt.logger.info "Successful creation an Ollama client."
20
+ end
21
+ end
22
+
23
+ def send_request(messages, model=nil)
24
+ SmartPrompt.logger.info "OllamaAdapter: Sending request to Ollama"
25
+ if model
26
+ model_name = model
27
+ else
28
+ model_name = @config['model']
29
+ end
30
+ SmartPrompt.logger.info "OllamaAdapter: Using model #{model_name}"
31
+ begin
32
+ response = @client.generate(
33
+ {
34
+ model: model_name,
35
+ prompt: messages.to_s,
36
+ stream: false
37
+ }
38
+ )
39
+ rescue Ollama::Error => e
40
+ SmartPrompt.logger.error "Ollama API error: #{e.message}"
41
+ raise LLMAPIError, "Ollama API error: #{e.message}"
42
+ rescue Ollama::ConnectionError => e
43
+ SmartPrompt.logger.error "Connection error: Unable to reach Ollama API"
44
+ raise LLMAPIError, "Connection error: Unable to reach Ollama API"
45
+ rescue Ollama::TimeoutError => e
46
+ SmartPrompt.logger.error "Request to Ollama API timed out"
47
+ raise LLMAPIError, "Request to Ollama API timed out"
48
+ rescue Ollama::InvalidRequestError => e
49
+ SmartPrompt.logger.error "Invalid request to Ollama API: #{e.message}"
50
+ raise LLMAPIError, "Invalid request to Ollama API: #{e.message}"
51
+ rescue JSON::ParserError => e
52
+ SmartPrompt.logger.error "Failed to parse Ollama API response"
53
+ raise LLMAPIError, "Failed to parse Ollama API response"
54
+ rescue => e
55
+ SmartPrompt.logger.error "Unexpected error during Ollama request: #{e.message}"
56
+ raise Error, "Unexpected error during Ollama request: #{e.message}"
57
+ ensure
58
+ SmartPrompt.logger.info "Successful send a message"
59
+ end
60
+ SmartPrompt.logger.info "OllamaAdapter: Received response from Ollama"
61
+ return response[0]["response"]
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,81 @@
1
+ require 'openai'
2
+
3
+ module SmartPrompt
4
+ class OpenAIAdapter < LLMAdapter
5
+ def initialize(config)
6
+ super
7
+ api_key = @config['api_key']
8
+ if api_key.is_a?(String) && api_key.start_with?('ENV[') && api_key.end_with?(']')
9
+ api_key = eval(api_key)
10
+ end
11
+ begin
12
+ @client = OpenAI::Client.new(
13
+ access_token: api_key,
14
+ uri_base: @config['url'],
15
+ request_timeout: 240
16
+ )
17
+ rescue OpenAI::ConfigurationError => e
18
+ SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.message}"
19
+ raise LLMAPIError, "Invalid OpenAI configuration: #{e.message}"
20
+ rescue OpenAI::AuthenticationError => e
21
+ SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.message}"
22
+ raise LLMAPIError, "OpenAI authentication failed: #{e.message}"
23
+ rescue SocketError => e
24
+ SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.message}"
25
+ raise LLMAPIError, "Network error: Unable to connect to OpenAI API"
26
+ rescue => e
27
+ SmartPrompt.logger.error "Failed to initialize OpenAI client: #{e.message}"
28
+ raise Error, "Unexpected error initializing OpenAI client: #{e.message}"
29
+ ensure
30
+ SmartPrompt.logger.info "Successful creation an OpenAI client."
31
+ end
32
+ end
33
+
34
+ def send_request(messages, model=nil)
35
+ SmartPrompt.logger.info "OpenAIAdapter: Sending request to OpenAI"
36
+ if model
37
+ model_name = model
38
+ else
39
+ model_name = @config['model']
40
+ end
41
+ SmartPrompt.logger.info "OpenAIAdapter: Using model #{model_name}"
42
+ begin
43
+ response = @client.chat(
44
+ parameters: {
45
+ model: model_name,
46
+ messages: messages,
47
+ temperature: @config['temperature'] || 0.7
48
+ }
49
+ )
50
+ rescue OpenAI::APIError => e
51
+ SmartPrompt.logger.error "OpenAI API error: #{e.message}"
52
+ raise LLMAPIError, "OpenAI API error: #{e.message}"
53
+ rescue OpenAI::APIConnectionError => e
54
+ SmartPrompt.logger.error "Connection error: Unable to reach OpenAI API"
55
+ raise LLMAPIError, "Connection error: Unable to reach OpenAI API"
56
+ rescue OpenAI::APITimeoutError => e
57
+ SmartPrompt.logger.error "Request to OpenAI API timed out"
58
+ raise LLMAPIError, "Request to OpenAI API timed out"
59
+ rescue OpenAI::InvalidRequestError => e
60
+ SmartPrompt.logger.error "Invalid request to OpenAI API: #{e.message}"
61
+ raise LLMAPIError, "Invalid request to OpenAI API: #{e.message}"
62
+ rescue OpenAI::AuthenticationError => e
63
+ SmartPrompt.logger.error "Authentication error with OpenAI API: #{e.message}"
64
+ raise LLMAPIError, "Authentication error with OpenAI API: #{e.message}"
65
+ rescue OpenAI::RateLimitError => e
66
+ SmartPrompt.logger.error "Rate limit exceeded for OpenAI API"
67
+ raise LLMAPIError, "Rate limit exceeded for OpenAI API"
68
+ rescue JSON::ParserError => e
69
+ SmartPrompt.logger.error "Failed to parse OpenAI API response"
70
+ raise LLMAPIError, "Failed to parse OpenAI API response"
71
+ rescue => e
72
+ SmartPrompt.logger.error "Unexpected error during OpenAI request: #{e.message}"
73
+ raise Error, "Unexpected error during OpenAI request: #{e.message}"
74
+ ensure
75
+ SmartPrompt.logger.info "Successful send a message"
76
+ end
77
+ SmartPrompt.logger.info "OpenAIAdapter: Received response from OpenAI"
78
+ response.dig("choices", 0, "message", "content")
79
+ end
80
+ end
81
+ end
@@ -1,3 +1,3 @@
1
1
  module SmartPrompt
2
- VERSION = "0.1.6"
2
+ VERSION = "0.1.7"
3
3
  end
data/lib/smart_prompt.rb CHANGED
@@ -2,11 +2,18 @@ require File.expand_path('../smart_prompt/version', __FILE__)
2
2
  require File.expand_path('../smart_prompt/engine', __FILE__)
3
3
  require File.expand_path('../smart_prompt/conversation', __FILE__)
4
4
  require File.expand_path('../smart_prompt/llm_adapter', __FILE__)
5
+ require File.expand_path('../smart_prompt/openai_adapter', __FILE__)
6
+ require File.expand_path('../smart_prompt/llamacpp_adapter', __FILE__)
7
+ require File.expand_path('../smart_prompt/ollama_adapter', __FILE__)
5
8
  require File.expand_path('../smart_prompt/prompt_template', __FILE__)
6
9
  require File.expand_path('../smart_prompt/worker', __FILE__)
7
10
 
8
11
  module SmartPrompt
9
12
  class Error < StandardError; end
13
+ class ConfigurationError < Error; end
14
+ class LLMAPIError < Error; end
15
+ class CallWorkerError < Error; end
16
+
10
17
  attr_writer :logger
11
18
 
12
19
  def self.define_worker(name, &block)
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: smart_prompt
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.6
4
+ version: 0.1.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - zhuang biaowei
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-10-08 00:00:00.000000000 Z
11
+ date: 2024-10-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: yaml
@@ -95,7 +95,10 @@ files:
95
95
  - lib/smart_prompt.rb
96
96
  - lib/smart_prompt/conversation.rb
97
97
  - lib/smart_prompt/engine.rb
98
+ - lib/smart_prompt/llamacpp_adapter.rb
98
99
  - lib/smart_prompt/llm_adapter.rb
100
+ - lib/smart_prompt/ollama_adapter.rb
101
+ - lib/smart_prompt/openai_adapter.rb
99
102
  - lib/smart_prompt/prompt_template.rb
100
103
  - lib/smart_prompt/version.rb
101
104
  - lib/smart_prompt/worker.rb
@@ -122,7 +125,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
122
125
  - !ruby/object:Gem::Version
123
126
  version: '0'
124
127
  requirements: []
125
- rubygems_version: 3.5.18
128
+ rubygems_version: 3.5.22
126
129
  signing_key:
127
130
  specification_version: 4
128
131
  summary: A smart prompt management and LLM interaction gem