langfuse-rb 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +60 -0
- data/LICENSE +21 -0
- data/README.md +106 -0
- data/lib/langfuse/api_client.rb +330 -0
- data/lib/langfuse/cache_warmer.rb +219 -0
- data/lib/langfuse/chat_prompt_client.rb +98 -0
- data/lib/langfuse/client.rb +338 -0
- data/lib/langfuse/config.rb +135 -0
- data/lib/langfuse/observations.rb +615 -0
- data/lib/langfuse/otel_attributes.rb +275 -0
- data/lib/langfuse/otel_setup.rb +123 -0
- data/lib/langfuse/prompt_cache.rb +131 -0
- data/lib/langfuse/propagation.rb +471 -0
- data/lib/langfuse/rails_cache_adapter.rb +200 -0
- data/lib/langfuse/score_client.rb +321 -0
- data/lib/langfuse/span_processor.rb +61 -0
- data/lib/langfuse/text_prompt_client.rb +67 -0
- data/lib/langfuse/types.rb +353 -0
- data/lib/langfuse/version.rb +5 -0
- data/lib/langfuse.rb +457 -0
- metadata +177 -0
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Langfuse
|
|
4
|
+
# Cache warming utility for pre-loading prompts into cache
|
|
5
|
+
#
|
|
6
|
+
# Useful for deployment scenarios where you want to warm the cache
|
|
7
|
+
# before serving traffic, preventing cold-start API calls.
|
|
8
|
+
#
|
|
9
|
+
# @example Warm cache with specific prompts
|
|
10
|
+
# warmer = Langfuse::CacheWarmer.new
|
|
11
|
+
# results = warmer.warm(['greeting', 'conversation', 'rag-pipeline'])
|
|
12
|
+
# puts "Cached #{results[:success].size} prompts"
|
|
13
|
+
#
|
|
14
|
+
# @example Warm cache with error handling
|
|
15
|
+
# warmer = Langfuse::CacheWarmer.new(client: my_client)
|
|
16
|
+
# results = warmer.warm(['greeting', 'conversation'])
|
|
17
|
+
#
|
|
18
|
+
# results[:failed].each do |failure|
|
|
19
|
+
# logger.warn "Failed to cache #{failure[:name]}: #{failure[:error]}"
|
|
20
|
+
# end
|
|
21
|
+
#
|
|
22
|
+
class CacheWarmer
|
|
23
|
+
attr_reader :client
|
|
24
|
+
|
|
25
|
+
# Initialize a new cache warmer
|
|
26
|
+
#
|
|
27
|
+
# @param client [Client, nil] Optional Langfuse client (defaults to global client)
|
|
28
|
+
def initialize(client: nil)
|
|
29
|
+
@client = client || Langfuse.client
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
# Warm the cache with specified prompts
|
|
33
|
+
#
|
|
34
|
+
# Fetches each prompt and populates the cache. This is idempotent -
|
|
35
|
+
# safe to call multiple times.
|
|
36
|
+
#
|
|
37
|
+
# @param prompt_names [Array<String>] List of prompt names to cache
|
|
38
|
+
# @param versions [Hash<String, Integer>, nil] Optional version numbers per prompt
|
|
39
|
+
# @param labels [Hash<String, String>, nil] Optional labels per prompt
|
|
40
|
+
# @return [Hash] Results with :success and :failed arrays
|
|
41
|
+
#
|
|
42
|
+
# @example Basic warming
|
|
43
|
+
# results = warmer.warm(['greeting', 'conversation'])
|
|
44
|
+
# # => { success: ['greeting', 'conversation'], failed: [] }
|
|
45
|
+
#
|
|
46
|
+
# @example With specific versions
|
|
47
|
+
# results = warmer.warm(
|
|
48
|
+
# ['greeting', 'conversation'],
|
|
49
|
+
# versions: { 'greeting' => 2, 'conversation' => 1 }
|
|
50
|
+
# )
|
|
51
|
+
#
|
|
52
|
+
# @example With labels
|
|
53
|
+
# results = warmer.warm(
|
|
54
|
+
# ['greeting', 'conversation'],
|
|
55
|
+
# labels: { 'greeting' => 'production' }
|
|
56
|
+
# )
|
|
57
|
+
def warm(prompt_names, versions: {}, labels: {})
|
|
58
|
+
results = { success: [], failed: [] }
|
|
59
|
+
|
|
60
|
+
prompt_names.each do |name|
|
|
61
|
+
warm_single_prompt(name, results, versions, labels)
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
results
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
# Warm the cache with all prompts (auto-discovery)
|
|
68
|
+
#
|
|
69
|
+
# Automatically discovers all prompts in your Langfuse project via
|
|
70
|
+
# the list_prompts API and warms the cache with all of them.
|
|
71
|
+
# By default, fetches prompts with the "production" label.
|
|
72
|
+
# Useful for deployment scenarios where you want to ensure all prompts
|
|
73
|
+
# are cached without manually specifying them.
|
|
74
|
+
#
|
|
75
|
+
# @param default_label [String, nil] Label to use for all prompts (default: "production")
|
|
76
|
+
# @param versions [Hash<String, Integer>, nil] Optional version numbers per prompt
|
|
77
|
+
# @param labels [Hash<String, String>, nil] Optional labels per specific prompts (overrides default_label)
|
|
78
|
+
# @return [Hash] Results with :success and :failed arrays
|
|
79
|
+
#
|
|
80
|
+
# @example Auto-discover and warm all prompts with "production" label
|
|
81
|
+
# results = warmer.warm_all
|
|
82
|
+
# puts "Cached #{results[:success].size} prompts"
|
|
83
|
+
#
|
|
84
|
+
# @example Warm with a different default label
|
|
85
|
+
# results = warmer.warm_all(default_label: "staging")
|
|
86
|
+
#
|
|
87
|
+
# @example Warm without any label (latest versions)
|
|
88
|
+
# results = warmer.warm_all(default_label: nil)
|
|
89
|
+
#
|
|
90
|
+
# @example With specific versions for some prompts
|
|
91
|
+
# results = warmer.warm_all(versions: { 'greeting' => 2 })
|
|
92
|
+
#
|
|
93
|
+
# @example Override label for specific prompts
|
|
94
|
+
# results = warmer.warm_all(
|
|
95
|
+
# default_label: "production",
|
|
96
|
+
# labels: { 'greeting' => 'staging' } # Use staging for this one
|
|
97
|
+
# )
|
|
98
|
+
def warm_all(default_label: "production", versions: {}, labels: {})
|
|
99
|
+
prompt_list = client.list_prompts
|
|
100
|
+
prompt_names = prompt_list.map { |p| p["name"] }.uniq
|
|
101
|
+
|
|
102
|
+
# Build labels hash: apply default_label to all prompts, then merge overrides
|
|
103
|
+
# BUT: if a version is specified for a prompt, don't apply a label (version takes precedence)
|
|
104
|
+
final_labels = {}
|
|
105
|
+
if default_label
|
|
106
|
+
prompt_names.each do |name|
|
|
107
|
+
# Only apply default label if no version specified for this prompt
|
|
108
|
+
final_labels[name] = default_label unless versions[name]
|
|
109
|
+
end
|
|
110
|
+
end
|
|
111
|
+
final_labels.merge!(labels) # Specific label overrides win
|
|
112
|
+
|
|
113
|
+
warm(prompt_names, versions: versions, labels: final_labels)
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
# Warm the cache and raise on any failures
|
|
117
|
+
#
|
|
118
|
+
# Same as #warm but raises an error if any prompts fail to cache.
|
|
119
|
+
# Useful when you want to abort deployment if cache warming fails.
|
|
120
|
+
#
|
|
121
|
+
# @param prompt_names [Array<String>] List of prompt names to cache
|
|
122
|
+
# @param versions [Hash<String, Integer>, nil] Optional version numbers per prompt
|
|
123
|
+
# @param labels [Hash<String, String>, nil] Optional labels per prompt
|
|
124
|
+
# @return [Hash] Results with :success array
|
|
125
|
+
# @raise [CacheWarmingError] if any prompts fail to cache
|
|
126
|
+
#
|
|
127
|
+
# @example
|
|
128
|
+
# begin
|
|
129
|
+
# warmer.warm!(['greeting', 'conversation'])
|
|
130
|
+
# rescue Langfuse::CacheWarmingError => e
|
|
131
|
+
# abort "Cache warming failed: #{e.message}"
|
|
132
|
+
# end
|
|
133
|
+
def warm!(prompt_names, versions: {}, labels: {})
|
|
134
|
+
results = warm(prompt_names, versions: versions, labels: labels)
|
|
135
|
+
|
|
136
|
+
if results[:failed].any?
|
|
137
|
+
failed_names = results[:failed].map { |f| f[:name] }.join(", ")
|
|
138
|
+
raise CacheWarmingError, "Failed to cache prompts: #{failed_names}"
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
results
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
# Check if cache warming is enabled
|
|
145
|
+
#
|
|
146
|
+
# Returns false if caching is disabled (cache_ttl = 0)
|
|
147
|
+
#
|
|
148
|
+
# @return [Boolean]
|
|
149
|
+
def cache_enabled?
|
|
150
|
+
cache = client.api_client.cache
|
|
151
|
+
return false if cache.nil?
|
|
152
|
+
|
|
153
|
+
cache.ttl&.positive? || false
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
# Get cache statistics (if supported by backend)
|
|
157
|
+
#
|
|
158
|
+
# @return [Hash, nil] Cache stats or nil if not supported
|
|
159
|
+
def cache_stats
|
|
160
|
+
cache = client.api_client.cache
|
|
161
|
+
return nil unless cache
|
|
162
|
+
|
|
163
|
+
stats = {}
|
|
164
|
+
stats[:backend] = cache.class.name.split("::").last
|
|
165
|
+
stats[:ttl] = cache.ttl if cache.respond_to?(:ttl)
|
|
166
|
+
stats[:size] = cache.size if cache.respond_to?(:size)
|
|
167
|
+
stats[:max_size] = cache.max_size if cache.respond_to?(:max_size)
|
|
168
|
+
stats
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
private
|
|
172
|
+
|
|
173
|
+
# Warm a single prompt and update results
|
|
174
|
+
#
|
|
175
|
+
# @param name [String] Prompt name
|
|
176
|
+
# @param results [Hash] Results hash to update
|
|
177
|
+
# @param versions [Hash] Version numbers per prompt
|
|
178
|
+
# @param labels [Hash] Labels per prompt
|
|
179
|
+
# @return [void]
|
|
180
|
+
def warm_single_prompt(name, results, versions, labels)
|
|
181
|
+
options = build_prompt_options(name, versions, labels)
|
|
182
|
+
|
|
183
|
+
client.get_prompt(name, **options)
|
|
184
|
+
results[:success] << name
|
|
185
|
+
rescue NotFoundError
|
|
186
|
+
record_failure(results, name, "Not found")
|
|
187
|
+
rescue UnauthorizedError
|
|
188
|
+
record_failure(results, name, "Unauthorized")
|
|
189
|
+
rescue ApiError, StandardError => e
|
|
190
|
+
record_failure(results, name, e.message)
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
# Build options hash for get_prompt
|
|
194
|
+
#
|
|
195
|
+
# @param name [String] Prompt name
|
|
196
|
+
# @param versions [Hash] Version numbers per prompt
|
|
197
|
+
# @param labels [Hash] Labels per prompt
|
|
198
|
+
# @return [Hash] Options hash
|
|
199
|
+
def build_prompt_options(name, versions, labels)
|
|
200
|
+
options = {}
|
|
201
|
+
options[:version] = versions[name] if versions[name]
|
|
202
|
+
options[:label] = labels[name] if labels[name]
|
|
203
|
+
options
|
|
204
|
+
end
|
|
205
|
+
|
|
206
|
+
# Record a prompt failure
|
|
207
|
+
#
|
|
208
|
+
# @param results [Hash] Results hash to update
|
|
209
|
+
# @param name [String] Prompt name
|
|
210
|
+
# @param error [String] Error message
|
|
211
|
+
# @return [void]
|
|
212
|
+
def record_failure(results, name, error)
|
|
213
|
+
results[:failed] << { name: name, error: error }
|
|
214
|
+
end
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
# Error raised when cache warming fails with warm!
|
|
218
|
+
class CacheWarmingError < Error; end
|
|
219
|
+
end
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "mustache"
|
|
4
|
+
|
|
5
|
+
module Langfuse
|
|
6
|
+
# Chat prompt client for compiling chat prompts with variable substitution
|
|
7
|
+
#
|
|
8
|
+
# Handles chat-based prompts from Langfuse, providing Mustache templating
|
|
9
|
+
# for variable substitution in role-based messages.
|
|
10
|
+
#
|
|
11
|
+
# @example Basic usage
|
|
12
|
+
# prompt_data = api_client.get_prompt("support_chat")
|
|
13
|
+
# chat_prompt = Langfuse::ChatPromptClient.new(prompt_data)
|
|
14
|
+
# chat_prompt.compile(variables: { user_name: "Alice", issue: "login" })
|
|
15
|
+
# # => [{ role: "system", content: "You are a support agent..." }, ...]
|
|
16
|
+
#
|
|
17
|
+
# @example Accessing metadata
|
|
18
|
+
# chat_prompt.name # => "support_chat"
|
|
19
|
+
# chat_prompt.version # => 1
|
|
20
|
+
# chat_prompt.labels # => ["production"]
|
|
21
|
+
#
|
|
22
|
+
class ChatPromptClient
|
|
23
|
+
attr_reader :name, :version, :labels, :tags, :config, :prompt
|
|
24
|
+
|
|
25
|
+
# Initialize a new chat prompt client
|
|
26
|
+
#
|
|
27
|
+
# @param prompt_data [Hash] The prompt data from the API
|
|
28
|
+
# @raise [ArgumentError] if prompt data is invalid
|
|
29
|
+
def initialize(prompt_data)
|
|
30
|
+
validate_prompt_data!(prompt_data)
|
|
31
|
+
|
|
32
|
+
@name = prompt_data["name"]
|
|
33
|
+
@version = prompt_data["version"]
|
|
34
|
+
@prompt = prompt_data["prompt"]
|
|
35
|
+
@labels = prompt_data["labels"] || []
|
|
36
|
+
@tags = prompt_data["tags"] || []
|
|
37
|
+
@config = prompt_data["config"] || {}
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
# Compile the chat prompt with variable substitution
|
|
41
|
+
#
|
|
42
|
+
# Returns an array of message hashes with roles and compiled content.
|
|
43
|
+
# Each message in the prompt will have its content compiled with the
|
|
44
|
+
# provided variables using Mustache templating.
|
|
45
|
+
#
|
|
46
|
+
# @param kwargs [Hash] Variables to substitute in message templates (as keyword arguments)
|
|
47
|
+
# @return [Array<Hash>] Array of compiled messages with :role and :content keys
|
|
48
|
+
#
|
|
49
|
+
# @example
|
|
50
|
+
# chat_prompt.compile(name: "Alice", topic: "Ruby")
|
|
51
|
+
# # => [
|
|
52
|
+
# # { role: :system, content: "You are a helpful assistant." },
|
|
53
|
+
# # { role: :user, content: "Hello Alice, let's discuss Ruby!" }
|
|
54
|
+
# # ]
|
|
55
|
+
def compile(**kwargs)
|
|
56
|
+
prompt.map do |message|
|
|
57
|
+
compile_message(message, kwargs)
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
private
|
|
62
|
+
|
|
63
|
+
# Validate prompt data structure
|
|
64
|
+
#
|
|
65
|
+
# @param prompt_data [Hash] The prompt data to validate
|
|
66
|
+
# @raise [ArgumentError] if validation fails
|
|
67
|
+
def validate_prompt_data!(prompt_data)
|
|
68
|
+
raise ArgumentError, "prompt_data must be a Hash" unless prompt_data.is_a?(Hash)
|
|
69
|
+
raise ArgumentError, "prompt_data must include 'prompt' field" unless prompt_data.key?("prompt")
|
|
70
|
+
raise ArgumentError, "prompt_data must include 'name' field" unless prompt_data.key?("name")
|
|
71
|
+
raise ArgumentError, "prompt_data must include 'version' field" unless prompt_data.key?("version")
|
|
72
|
+
raise ArgumentError, "prompt must be an Array" unless prompt_data["prompt"].is_a?(Array)
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Compile a single message with variable substitution
|
|
76
|
+
#
|
|
77
|
+
# @param message [Hash] The message with role and content
|
|
78
|
+
# @param variables [Hash] Variables to substitute
|
|
79
|
+
# @return [Hash] Compiled message with :role and :content as symbols
|
|
80
|
+
def compile_message(message, variables)
|
|
81
|
+
content = message["content"] || ""
|
|
82
|
+
compiled_content = variables.empty? ? content : Mustache.render(content, variables)
|
|
83
|
+
|
|
84
|
+
{
|
|
85
|
+
role: normalize_role(message["role"]),
|
|
86
|
+
content: compiled_content
|
|
87
|
+
}
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Normalize role to symbol
|
|
91
|
+
#
|
|
92
|
+
# @param role [String, Symbol] The role
|
|
93
|
+
# @return [Symbol] Normalized role as symbol
|
|
94
|
+
def normalize_role(role)
|
|
95
|
+
role.to_s.downcase.to_sym
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
end
|
|
@@ -0,0 +1,338 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Langfuse
|
|
4
|
+
# Main client for Langfuse SDK
|
|
5
|
+
#
|
|
6
|
+
# Provides a unified interface for interacting with the Langfuse API.
|
|
7
|
+
# Handles prompt fetching and returns the appropriate prompt client
|
|
8
|
+
# (TextPromptClient or ChatPromptClient) based on the prompt type.
|
|
9
|
+
#
|
|
10
|
+
# @example
|
|
11
|
+
# config = Langfuse::Config.new(
|
|
12
|
+
# public_key: "pk_...",
|
|
13
|
+
# secret_key: "sk_...",
|
|
14
|
+
# cache_ttl: 120
|
|
15
|
+
# )
|
|
16
|
+
# client = Langfuse::Client.new(config)
|
|
17
|
+
# prompt = client.get_prompt("greeting")
|
|
18
|
+
# compiled = prompt.compile(name: "Alice")
|
|
19
|
+
#
|
|
20
|
+
class Client
|
|
21
|
+
attr_reader :config, :api_client
|
|
22
|
+
|
|
23
|
+
# Initialize a new Langfuse client
|
|
24
|
+
#
|
|
25
|
+
# @param config [Config] Configuration object
|
|
26
|
+
def initialize(config)
|
|
27
|
+
@config = config
|
|
28
|
+
@config.validate!
|
|
29
|
+
|
|
30
|
+
# Create cache if enabled
|
|
31
|
+
cache = create_cache if cache_enabled?
|
|
32
|
+
|
|
33
|
+
# Create API client with cache
|
|
34
|
+
@api_client = ApiClient.new(
|
|
35
|
+
public_key: config.public_key,
|
|
36
|
+
secret_key: config.secret_key,
|
|
37
|
+
base_url: config.base_url,
|
|
38
|
+
timeout: config.timeout,
|
|
39
|
+
logger: config.logger,
|
|
40
|
+
cache: cache
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
# Initialize score client for batching score events
|
|
44
|
+
@score_client = ScoreClient.new(api_client: @api_client, config: config)
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
# Fetch a prompt and return the appropriate client
|
|
48
|
+
#
|
|
49
|
+
# Fetches the prompt from the Langfuse API and returns either a
|
|
50
|
+
# TextPromptClient or ChatPromptClient based on the prompt type.
|
|
51
|
+
#
|
|
52
|
+
# @param name [String] The name of the prompt
|
|
53
|
+
# @param version [Integer, nil] Optional specific version number
|
|
54
|
+
# @param label [String, nil] Optional label (e.g., "production", "latest")
|
|
55
|
+
# @param fallback [String, Array, nil] Optional fallback prompt to use on error
|
|
56
|
+
# @param type [Symbol, nil] Required when fallback is provided (:text or :chat)
|
|
57
|
+
# @return [TextPromptClient, ChatPromptClient] The prompt client
|
|
58
|
+
# @raise [ArgumentError] if both version and label are provided
|
|
59
|
+
# @raise [ArgumentError] if fallback is provided without type
|
|
60
|
+
# @raise [NotFoundError] if the prompt is not found and no fallback provided
|
|
61
|
+
# @raise [UnauthorizedError] if authentication fails and no fallback provided
|
|
62
|
+
# @raise [ApiError] for other API errors and no fallback provided
|
|
63
|
+
#
|
|
64
|
+
# @example With fallback for graceful degradation
|
|
65
|
+
# prompt = client.get_prompt("greeting", fallback: "Hello {{name}}!", type: :text)
|
|
66
|
+
def get_prompt(name, version: nil, label: nil, fallback: nil, type: nil)
|
|
67
|
+
# Validate fallback usage
|
|
68
|
+
if fallback && !type
|
|
69
|
+
raise ArgumentError, "type parameter is required when fallback is provided (use :text or :chat)"
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
# Try to fetch from API
|
|
73
|
+
prompt_data = api_client.get_prompt(name, version: version, label: label)
|
|
74
|
+
build_prompt_client(prompt_data)
|
|
75
|
+
rescue ApiError, NotFoundError, UnauthorizedError => e
|
|
76
|
+
# If no fallback, re-raise the error
|
|
77
|
+
raise e unless fallback
|
|
78
|
+
|
|
79
|
+
# Log warning and return fallback
|
|
80
|
+
config.logger.warn("Langfuse API error for prompt '#{name}': #{e.message}. Using fallback.")
|
|
81
|
+
build_fallback_prompt_client(name, fallback, type)
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
# List all prompts in the Langfuse project
|
|
85
|
+
#
|
|
86
|
+
# Fetches a list of all prompt names available in your project.
|
|
87
|
+
# Returns metadata only, not full prompt content.
|
|
88
|
+
#
|
|
89
|
+
# @param page [Integer, nil] Optional page number for pagination
|
|
90
|
+
# @param limit [Integer, nil] Optional limit per page
|
|
91
|
+
# @return [Array<Hash>] Array of prompt metadata hashes
|
|
92
|
+
# @raise [UnauthorizedError] if authentication fails
|
|
93
|
+
# @raise [ApiError] for other API errors
|
|
94
|
+
#
|
|
95
|
+
# @example
|
|
96
|
+
# prompts = client.list_prompts
|
|
97
|
+
# prompts.each do |prompt|
|
|
98
|
+
# puts "#{prompt['name']} (v#{prompt['version']})"
|
|
99
|
+
# end
|
|
100
|
+
def list_prompts(page: nil, limit: nil)
|
|
101
|
+
api_client.list_prompts(page: page, limit: limit)
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
# Convenience method: fetch and compile a prompt in one call
|
|
105
|
+
#
|
|
106
|
+
# This is a shorthand for calling get_prompt followed by compile.
|
|
107
|
+
# Returns the compiled prompt ready to use with your LLM.
|
|
108
|
+
#
|
|
109
|
+
# @param name [String] The name of the prompt
|
|
110
|
+
# @param variables [Hash] Variables to substitute in the prompt
|
|
111
|
+
# @param version [Integer, nil] Optional specific version number
|
|
112
|
+
# @param label [String, nil] Optional label (e.g., "production", "latest")
|
|
113
|
+
# @param fallback [String, Array, nil] Optional fallback prompt to use on error
|
|
114
|
+
# @param type [Symbol, nil] Required when fallback is provided (:text or :chat)
|
|
115
|
+
# @return [String, Array<Hash>] Compiled prompt (String for text, Array for chat)
|
|
116
|
+
# @raise [ArgumentError] if both version and label are provided
|
|
117
|
+
# @raise [ArgumentError] if fallback is provided without type
|
|
118
|
+
# @raise [NotFoundError] if the prompt is not found and no fallback provided
|
|
119
|
+
# @raise [UnauthorizedError] if authentication fails and no fallback provided
|
|
120
|
+
# @raise [ApiError] for other API errors and no fallback provided
|
|
121
|
+
#
|
|
122
|
+
# @example Compile a text prompt
|
|
123
|
+
# text = client.compile_prompt("greeting", variables: { name: "Alice" })
|
|
124
|
+
# # => "Hello Alice!"
|
|
125
|
+
#
|
|
126
|
+
# @example Compile a chat prompt
|
|
127
|
+
# messages = client.compile_prompt("support-bot", variables: { company: "Acme" })
|
|
128
|
+
# # => [{ role: :system, content: "You are a support agent for Acme" }]
|
|
129
|
+
#
|
|
130
|
+
# @example With fallback
|
|
131
|
+
# text = client.compile_prompt(
|
|
132
|
+
# "greeting",
|
|
133
|
+
# variables: { name: "Alice" },
|
|
134
|
+
# fallback: "Hello {{name}}!",
|
|
135
|
+
# type: :text
|
|
136
|
+
# )
|
|
137
|
+
def compile_prompt(name, variables: {}, version: nil, label: nil, fallback: nil, type: nil)
|
|
138
|
+
prompt = get_prompt(name, version: version, label: label, fallback: fallback, type: type)
|
|
139
|
+
prompt.compile(**variables)
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
# Generate URL for viewing a trace in Langfuse UI
|
|
143
|
+
#
|
|
144
|
+
# @param trace_id [String] The trace ID (hex-encoded, 32 characters)
|
|
145
|
+
# @return [String] URL to view the trace
|
|
146
|
+
#
|
|
147
|
+
# @example
|
|
148
|
+
# url = client.trace_url("abc123...")
|
|
149
|
+
# puts "View trace at: #{url}"
|
|
150
|
+
def trace_url(trace_id)
|
|
151
|
+
"#{config.base_url}/traces/#{trace_id}"
|
|
152
|
+
end
|
|
153
|
+
|
|
154
|
+
# Create a score event and queue it for batching
|
|
155
|
+
#
|
|
156
|
+
# @param name [String] Score name (required)
|
|
157
|
+
# @param value [Numeric, Integer, String] Score value (type depends on data_type)
|
|
158
|
+
# @param trace_id [String, nil] Trace ID to associate with the score
|
|
159
|
+
# @param observation_id [String, nil] Observation ID to associate with the score
|
|
160
|
+
# @param comment [String, nil] Optional comment
|
|
161
|
+
# @param metadata [Hash, nil] Optional metadata hash
|
|
162
|
+
# @param data_type [Symbol] Data type (:numeric, :boolean, :categorical)
|
|
163
|
+
# @return [void]
|
|
164
|
+
# @raise [ArgumentError] if validation fails
|
|
165
|
+
#
|
|
166
|
+
# @example Numeric score
|
|
167
|
+
# client.create_score(name: "quality", value: 0.85, trace_id: "abc123")
|
|
168
|
+
#
|
|
169
|
+
# @example Boolean score
|
|
170
|
+
# client.create_score(name: "passed", value: true, trace_id: "abc123", data_type: :boolean)
|
|
171
|
+
#
|
|
172
|
+
# @example Categorical score
|
|
173
|
+
# client.create_score(name: "category", value: "high", trace_id: "abc123", data_type: :categorical)
|
|
174
|
+
# rubocop:disable Metrics/ParameterLists
|
|
175
|
+
def create_score(name:, value:, trace_id: nil, observation_id: nil, comment: nil, metadata: nil,
|
|
176
|
+
data_type: :numeric)
|
|
177
|
+
@score_client.create(
|
|
178
|
+
name: name,
|
|
179
|
+
value: value,
|
|
180
|
+
trace_id: trace_id,
|
|
181
|
+
observation_id: observation_id,
|
|
182
|
+
comment: comment,
|
|
183
|
+
metadata: metadata,
|
|
184
|
+
data_type: data_type
|
|
185
|
+
)
|
|
186
|
+
end
|
|
187
|
+
# rubocop:enable Metrics/ParameterLists
|
|
188
|
+
|
|
189
|
+
# Create a score for the currently active observation (from OTel span)
|
|
190
|
+
#
|
|
191
|
+
# Extracts observation_id and trace_id from the active OpenTelemetry span.
|
|
192
|
+
#
|
|
193
|
+
# @param name [String] Score name (required)
|
|
194
|
+
# @param value [Numeric, Integer, String] Score value
|
|
195
|
+
# @param comment [String, nil] Optional comment
|
|
196
|
+
# @param metadata [Hash, nil] Optional metadata hash
|
|
197
|
+
# @param data_type [Symbol] Data type (:numeric, :boolean, :categorical)
|
|
198
|
+
# @return [void]
|
|
199
|
+
# @raise [ArgumentError] if no active span or validation fails
|
|
200
|
+
#
|
|
201
|
+
# @example
|
|
202
|
+
# Langfuse.observe("operation") do |obs|
|
|
203
|
+
# client.score_active_observation(name: "accuracy", value: 0.92)
|
|
204
|
+
# end
|
|
205
|
+
def score_active_observation(name:, value:, comment: nil, metadata: nil, data_type: :numeric)
|
|
206
|
+
@score_client.score_active_observation(
|
|
207
|
+
name: name,
|
|
208
|
+
value: value,
|
|
209
|
+
comment: comment,
|
|
210
|
+
metadata: metadata,
|
|
211
|
+
data_type: data_type
|
|
212
|
+
)
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
# Create a score for the currently active trace (from OTel span)
|
|
216
|
+
#
|
|
217
|
+
# Extracts trace_id from the active OpenTelemetry span.
|
|
218
|
+
#
|
|
219
|
+
# @param name [String] Score name (required)
|
|
220
|
+
# @param value [Numeric, Integer, String] Score value
|
|
221
|
+
# @param comment [String, nil] Optional comment
|
|
222
|
+
# @param metadata [Hash, nil] Optional metadata hash
|
|
223
|
+
# @param data_type [Symbol] Data type (:numeric, :boolean, :categorical)
|
|
224
|
+
# @return [void]
|
|
225
|
+
# @raise [ArgumentError] if no active span or validation fails
|
|
226
|
+
#
|
|
227
|
+
# @example
|
|
228
|
+
# Langfuse.observe("operation") do |obs|
|
|
229
|
+
# client.score_active_trace(name: "overall_quality", value: 5)
|
|
230
|
+
# end
|
|
231
|
+
def score_active_trace(name:, value:, comment: nil, metadata: nil, data_type: :numeric)
|
|
232
|
+
@score_client.score_active_trace(
|
|
233
|
+
name: name,
|
|
234
|
+
value: value,
|
|
235
|
+
comment: comment,
|
|
236
|
+
metadata: metadata,
|
|
237
|
+
data_type: data_type
|
|
238
|
+
)
|
|
239
|
+
end
|
|
240
|
+
|
|
241
|
+
# Force flush all queued score events
|
|
242
|
+
#
|
|
243
|
+
# Sends all queued score events to the API immediately.
|
|
244
|
+
#
|
|
245
|
+
# @return [void]
|
|
246
|
+
#
|
|
247
|
+
# @example
|
|
248
|
+
# client.flush_scores
|
|
249
|
+
def flush_scores
|
|
250
|
+
@score_client.flush
|
|
251
|
+
end
|
|
252
|
+
|
|
253
|
+
# Shutdown the client and flush any pending scores
|
|
254
|
+
#
|
|
255
|
+
# @return [void]
|
|
256
|
+
def shutdown
|
|
257
|
+
@score_client.shutdown
|
|
258
|
+
end
|
|
259
|
+
|
|
260
|
+
private
|
|
261
|
+
|
|
262
|
+
attr_reader :score_client
|
|
263
|
+
|
|
264
|
+
# Check if caching is enabled in configuration
|
|
265
|
+
#
|
|
266
|
+
# @return [Boolean]
|
|
267
|
+
def cache_enabled?
|
|
268
|
+
config.cache_ttl&.positive?
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
# Create a cache instance based on configuration
|
|
272
|
+
#
|
|
273
|
+
# @return [PromptCache, RailsCacheAdapter]
|
|
274
|
+
def create_cache
|
|
275
|
+
case config.cache_backend
|
|
276
|
+
when :memory
|
|
277
|
+
PromptCache.new(
|
|
278
|
+
ttl: config.cache_ttl,
|
|
279
|
+
max_size: config.cache_max_size
|
|
280
|
+
)
|
|
281
|
+
when :rails
|
|
282
|
+
RailsCacheAdapter.new(
|
|
283
|
+
ttl: config.cache_ttl,
|
|
284
|
+
lock_timeout: config.cache_lock_timeout
|
|
285
|
+
)
|
|
286
|
+
else
|
|
287
|
+
raise ConfigurationError, "Unknown cache backend: #{config.cache_backend}"
|
|
288
|
+
end
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
# Build the appropriate prompt client based on prompt type
|
|
292
|
+
#
|
|
293
|
+
# @param prompt_data [Hash] The prompt data from API
|
|
294
|
+
# @return [TextPromptClient, ChatPromptClient]
|
|
295
|
+
# @raise [ApiError] if prompt type is unknown
|
|
296
|
+
def build_prompt_client(prompt_data)
|
|
297
|
+
type = prompt_data["type"]
|
|
298
|
+
|
|
299
|
+
case type
|
|
300
|
+
when "text"
|
|
301
|
+
TextPromptClient.new(prompt_data)
|
|
302
|
+
when "chat"
|
|
303
|
+
ChatPromptClient.new(prompt_data)
|
|
304
|
+
else
|
|
305
|
+
raise ApiError, "Unknown prompt type: #{type}"
|
|
306
|
+
end
|
|
307
|
+
end
|
|
308
|
+
|
|
309
|
+
# Build a fallback prompt client from fallback data
|
|
310
|
+
#
|
|
311
|
+
# @param name [String] The prompt name
|
|
312
|
+
# @param fallback [String, Array] The fallback prompt content
|
|
313
|
+
# @param type [Symbol] The prompt type (:text or :chat)
|
|
314
|
+
# @return [TextPromptClient, ChatPromptClient]
|
|
315
|
+
# @raise [ArgumentError] if type is invalid
|
|
316
|
+
def build_fallback_prompt_client(name, fallback, type)
|
|
317
|
+
# Create minimal prompt data structure
|
|
318
|
+
prompt_data = {
|
|
319
|
+
"name" => name,
|
|
320
|
+
"version" => 0,
|
|
321
|
+
"type" => type.to_s,
|
|
322
|
+
"prompt" => fallback,
|
|
323
|
+
"labels" => [],
|
|
324
|
+
"tags" => ["fallback"],
|
|
325
|
+
"config" => {}
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
case type
|
|
329
|
+
when :text
|
|
330
|
+
TextPromptClient.new(prompt_data)
|
|
331
|
+
when :chat
|
|
332
|
+
ChatPromptClient.new(prompt_data)
|
|
333
|
+
else
|
|
334
|
+
raise ArgumentError, "Invalid type: #{type}. Must be :text or :chat"
|
|
335
|
+
end
|
|
336
|
+
end
|
|
337
|
+
end
|
|
338
|
+
end
|