llm_conductor 1.1.2 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,76 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative 'concerns/vision_support'
4
+
5
+ module LlmConductor
6
+ module Clients
7
+ # Z.ai client implementation for accessing GLM models including GLM-4.5V
8
+ # Supports both text-only and multimodal (vision) requests
9
+ #
10
+ # Note: Z.ai uses OpenAI-compatible API format but with /v4/ path instead of /v1/
11
+ # We use Faraday directly instead of the ruby-openai gem to properly handle the API path
12
+ class ZaiClient < BaseClient
13
+ include Concerns::VisionSupport
14
+
15
+ private
16
+
17
+ def generate_content(prompt)
18
+ content = format_content(prompt)
19
+
20
+ # Retry logic for transient errors (similar to OpenRouter)
21
+ max_retries = 3
22
+ retry_count = 0
23
+
24
+ begin
25
+ # Make direct HTTP request to Z.ai API since they use /v4/ instead of /v1/
26
+ response = http_client.post('chat/completions') do |req|
27
+ req.body = {
28
+ model:,
29
+ messages: [{ role: 'user', content: }]
30
+ }.to_json
31
+ end
32
+
33
+ # Response body is already parsed as Hash by Faraday's JSON middleware
34
+ response_data = response.body.is_a?(String) ? JSON.parse(response.body) : response.body
35
+ response_data.dig('choices', 0, 'message', 'content')
36
+ rescue Faraday::ServerError => e
37
+ retry_count += 1
38
+
39
+ # Log retry attempts if logger is configured
40
+ configuration.logger&.warn(
41
+ "Z.ai API error (attempt #{retry_count}/#{max_retries}): #{e.message}"
42
+ )
43
+
44
+ raise unless retry_count < max_retries
45
+
46
+ wait_time = 2**retry_count # Exponential backoff: 2, 4, 8 seconds
47
+ configuration.logger&.info("Retrying in #{wait_time}s...")
48
+ sleep(wait_time)
49
+ retry
50
+ end
51
+ end
52
+
53
+ # HTTP client for making requests to Z.ai API
54
+ # Z.ai uses /v4/ in their path, not /v1/ like OpenAI, so we use Faraday directly
55
+ def http_client
56
+ @http_client ||= begin
57
+ config = LlmConductor.configuration.provider_config(:zai)
58
+ base_url = config[:uri_base] || 'https://api.z.ai/api/paas/v4'
59
+
60
+ Faraday.new(url: base_url) do |f|
61
+ f.request :json
62
+ f.response :json
63
+ f.headers['Authorization'] = "Bearer #{config[:api_key]}"
64
+ f.headers['Content-Type'] = 'application/json'
65
+ f.adapter Faraday.default_adapter
66
+ end
67
+ end
68
+ end
69
+
70
+ # Legacy client method for compatibility (not used, but kept for reference)
71
+ def client
72
+ http_client
73
+ end
74
+ end
75
+ end
76
+ end
@@ -72,6 +72,14 @@ module LlmConductor
72
72
  }
73
73
  end
74
74
 
75
+ # Configure Z.ai provider
76
+ def zai(api_key: nil, **options)
77
+ @providers[:zai] = {
78
+ api_key: api_key || ENV['ZAI_API_KEY'],
79
+ **options
80
+ }
81
+ end
82
+
75
83
  # Get provider configuration
76
84
  def provider_config(provider)
77
85
  @providers[provider.to_sym] || {}
@@ -126,6 +134,14 @@ module LlmConductor
126
134
  groq(api_key: value)
127
135
  end
128
136
 
137
+ def zai_api_key
138
+ provider_config(:zai)[:api_key]
139
+ end
140
+
141
+ def zai_api_key=(value)
142
+ zai(api_key: value)
143
+ end
144
+
129
145
  private
130
146
 
131
147
  def setup_defaults_from_env
@@ -135,6 +151,7 @@ module LlmConductor
135
151
  openrouter if ENV['OPENROUTER_API_KEY']
136
152
  gemini if ENV['GEMINI_API_KEY']
137
153
  groq if ENV['GROQ_API_KEY']
154
+ zai if ENV['ZAI_API_KEY']
138
155
  ollama # Always configure Ollama with default URL
139
156
  end
140
157
  end
@@ -59,9 +59,7 @@ module LlmConductor
59
59
  def validate_prompt_class!(prompt_class)
60
60
  raise InvalidPromptClassError, 'Prompt must be a class' unless prompt_class.is_a?(Class)
61
61
 
62
- unless prompt_class < Prompts::BasePrompt
63
- raise InvalidPromptClassError, 'Prompt class must inherit from BasePrompt'
64
- end
62
+ raise InvalidPromptClassError, 'Prompt class must inherit from BasePrompt' unless prompt_class < Prompts::BasePrompt
65
63
 
66
64
  return if prompt_class.instance_methods(false).include?(:render)
67
65
 
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module LlmConductor
4
- VERSION = '1.1.2'
4
+ VERSION = '1.3.0'
5
5
  end
data/lib/llm_conductor.rb CHANGED
@@ -14,10 +14,11 @@ require_relative 'llm_conductor/clients/groq_client'
14
14
  require_relative 'llm_conductor/clients/ollama_client'
15
15
  require_relative 'llm_conductor/clients/openrouter_client'
16
16
  require_relative 'llm_conductor/clients/gemini_client'
17
+ require_relative 'llm_conductor/clients/zai_client'
17
18
  require_relative 'llm_conductor/client_factory'
18
19
 
19
20
  # LLM Conductor provides a unified interface for multiple Language Model providers
20
- # including OpenAI GPT, Anthropic Claude, Google Gemini, Groq, OpenRouter, and Ollama
21
+ # including OpenAI GPT, Anthropic Claude, Google Gemini, Groq, OpenRouter, Z.ai, and Ollama
21
22
  # with built-in prompt templates, token counting, and extensible client architecture.
22
23
  module LlmConductor
23
24
  class Error < StandardError; end
@@ -63,16 +64,17 @@ module LlmConductor
63
64
  when :ollama then Clients::OllamaClient
64
65
  when :gemini, :google then Clients::GeminiClient
65
66
  when :groq then Clients::GroqClient
67
+ when :zai then Clients::ZaiClient
66
68
  else
67
69
  raise ArgumentError,
68
70
  "Unsupported vendor: #{vendor}. " \
69
- 'Supported vendors: anthropic, openai, openrouter, ollama, gemini, groq'
71
+ 'Supported vendors: anthropic, openai, openrouter, ollama, gemini, groq, zai'
70
72
  end
71
73
  end
72
74
  end
73
75
 
74
76
  # List of supported vendors
75
- SUPPORTED_VENDORS = %i[anthropic openai openrouter ollama gemini groq].freeze
77
+ SUPPORTED_VENDORS = %i[anthropic openai openrouter ollama gemini groq zai].freeze
76
78
 
77
79
  # List of supported prompt types
78
80
  SUPPORTED_PROMPT_TYPES = %i[
metadata CHANGED
@@ -1,13 +1,13 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: llm_conductor
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.2
4
+ version: 1.3.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Ben Zheng
8
8
  bindir: exe
9
9
  cert_chain: []
10
- date: 2025-10-29 00:00:00.000000000 Z
10
+ date: 2025-11-04 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
13
  name: activesupport
@@ -154,22 +154,27 @@ files:
154
154
  - Rakefile
155
155
  - VISION_USAGE.md
156
156
  - config/initializers/llm_conductor.rb
157
+ - examples/claude_vision_usage.rb
157
158
  - examples/data_builder_usage.rb
158
159
  - examples/gemini_usage.rb
160
+ - examples/gpt_vision_usage.rb
159
161
  - examples/groq_usage.rb
160
162
  - examples/openrouter_vision_usage.rb
161
163
  - examples/prompt_registration.rb
162
164
  - examples/rag_usage.rb
163
165
  - examples/simple_usage.rb
166
+ - examples/zai_usage.rb
164
167
  - lib/llm_conductor.rb
165
168
  - lib/llm_conductor/client_factory.rb
166
169
  - lib/llm_conductor/clients/anthropic_client.rb
167
170
  - lib/llm_conductor/clients/base_client.rb
171
+ - lib/llm_conductor/clients/concerns/vision_support.rb
168
172
  - lib/llm_conductor/clients/gemini_client.rb
169
173
  - lib/llm_conductor/clients/gpt_client.rb
170
174
  - lib/llm_conductor/clients/groq_client.rb
171
175
  - lib/llm_conductor/clients/ollama_client.rb
172
176
  - lib/llm_conductor/clients/openrouter_client.rb
177
+ - lib/llm_conductor/clients/zai_client.rb
173
178
  - lib/llm_conductor/configuration.rb
174
179
  - lib/llm_conductor/data_builder.rb
175
180
  - lib/llm_conductor/prompt_manager.rb