langfuse-rb 0.1.0 → 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +37 -51
- data/README.md +33 -20
- data/lib/langfuse/api_client.rb +142 -24
- data/lib/langfuse/client.rb +169 -10
- data/lib/langfuse/config.rb +69 -0
- data/lib/langfuse/prompt_cache.rb +119 -8
- data/lib/langfuse/propagation.rb +5 -16
- data/lib/langfuse/rails_cache_adapter.rb +115 -55
- data/lib/langfuse/stale_while_revalidate.rb +262 -0
- data/lib/langfuse/version.rb +1 -1
- metadata +37 -10
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
# frozen_string_literal: true
|
|
2
2
|
|
|
3
|
+
require_relative "prompt_cache"
|
|
4
|
+
require_relative "stale_while_revalidate"
|
|
5
|
+
|
|
3
6
|
module Langfuse
|
|
4
7
|
# Rails.cache adapter for distributed caching with Redis
|
|
5
8
|
#
|
|
@@ -12,20 +15,30 @@ module Langfuse
|
|
|
12
15
|
# adapter.get("greeting:1") # => prompt_data
|
|
13
16
|
#
|
|
14
17
|
class RailsCacheAdapter
|
|
15
|
-
|
|
18
|
+
include StaleWhileRevalidate
|
|
19
|
+
|
|
20
|
+
attr_reader :ttl, :namespace, :lock_timeout, :stale_ttl, :thread_pool, :logger
|
|
16
21
|
|
|
17
22
|
# Initialize a new Rails.cache adapter
|
|
18
23
|
#
|
|
19
24
|
# @param ttl [Integer] Time-to-live in seconds (default: 60)
|
|
20
25
|
# @param namespace [String] Cache key namespace (default: "langfuse")
|
|
21
26
|
# @param lock_timeout [Integer] Lock timeout in seconds for stampede protection (default: 10)
|
|
27
|
+
# @param stale_ttl [Integer] Stale TTL for SWR in seconds (default: 0, SWR disabled).
|
|
28
|
+
# Note: :indefinite is normalized to 1000 years by Config before being passed here.
|
|
29
|
+
# @param refresh_threads [Integer] Number of background refresh threads (default: 5)
|
|
30
|
+
# @param logger [Logger, nil] Logger instance for error reporting (default: nil, creates new logger)
|
|
22
31
|
# @raise [ConfigurationError] if Rails.cache is not available
|
|
23
|
-
def initialize(ttl: 60, namespace: "langfuse", lock_timeout: 10
|
|
32
|
+
def initialize(ttl: 60, namespace: "langfuse", lock_timeout: 10, stale_ttl: 0, refresh_threads: 5,
|
|
33
|
+
logger: default_logger)
|
|
24
34
|
validate_rails_cache!
|
|
25
35
|
|
|
26
36
|
@ttl = ttl
|
|
27
37
|
@namespace = namespace
|
|
28
38
|
@lock_timeout = lock_timeout
|
|
39
|
+
@stale_ttl = stale_ttl
|
|
40
|
+
@logger = logger
|
|
41
|
+
initialize_swr(refresh_threads: refresh_threads) if swr_enabled?
|
|
29
42
|
end
|
|
30
43
|
|
|
31
44
|
# Get a value from the cache
|
|
@@ -42,14 +55,57 @@ module Langfuse
|
|
|
42
55
|
# @param value [Object] Value to cache
|
|
43
56
|
# @return [Object] The cached value
|
|
44
57
|
def set(key, value)
|
|
45
|
-
|
|
58
|
+
# Calculate expiration: use total_ttl if SWR enabled, otherwise just ttl
|
|
59
|
+
expires_in = swr_enabled? ? total_ttl : ttl
|
|
60
|
+
Rails.cache.write(namespaced_key(key), value, expires_in:)
|
|
46
61
|
value
|
|
47
62
|
end
|
|
48
63
|
|
|
49
|
-
#
|
|
64
|
+
# Clear the entire Langfuse cache namespace
|
|
65
|
+
#
|
|
66
|
+
# Note: This uses delete_matched which may not be available on all cache stores.
|
|
67
|
+
# Works with Redis, Memcached, and memory stores. File store support varies.
|
|
68
|
+
#
|
|
69
|
+
# @return [void]
|
|
70
|
+
def clear
|
|
71
|
+
# Delete all keys matching the namespace pattern
|
|
72
|
+
Rails.cache.delete_matched("#{namespace}:*")
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
# Get current cache size
|
|
76
|
+
#
|
|
77
|
+
# Note: Rails.cache doesn't provide a size method, so we return nil
|
|
78
|
+
# to indicate this operation is not supported.
|
|
79
|
+
#
|
|
80
|
+
# @return [nil]
|
|
81
|
+
def size
|
|
82
|
+
nil
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Check if cache is empty
|
|
86
|
+
#
|
|
87
|
+
# Note: Rails.cache doesn't provide an efficient way to check if empty,
|
|
88
|
+
# so we return false to indicate this operation is not supported.
|
|
89
|
+
#
|
|
90
|
+
# @return [Boolean] Always returns false (unsupported operation)
|
|
91
|
+
def empty?
|
|
92
|
+
false
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
# Build a cache key from prompt name and options
|
|
96
|
+
#
|
|
97
|
+
# @param name [String] Prompt name
|
|
98
|
+
# @param version [Integer, nil] Optional version
|
|
99
|
+
# @param label [String, nil] Optional label
|
|
100
|
+
# @return [String] Cache key
|
|
101
|
+
def self.build_key(name, version: nil, label: nil)
|
|
102
|
+
PromptCache.build_key(name, version: version, label: label)
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
# Fetch a value from cache with lock for stampede protection
|
|
50
106
|
#
|
|
51
107
|
# This method prevents cache stampedes (thundering herd) by ensuring only one
|
|
52
|
-
# process fetches from the source when the cache is empty.
|
|
108
|
+
# process/thread fetches from the source when the cache is empty. Others wait
|
|
53
109
|
# for the first one to populate the cache.
|
|
54
110
|
#
|
|
55
111
|
# Uses exponential backoff: 50ms, 100ms, 200ms (3 retries max, ~350ms total).
|
|
@@ -60,7 +116,7 @@ module Langfuse
|
|
|
60
116
|
# @return [Object] Cached or freshly fetched value
|
|
61
117
|
#
|
|
62
118
|
# @example
|
|
63
|
-
#
|
|
119
|
+
# cache.fetch_with_lock("greeting:v1") do
|
|
64
120
|
# api_client.get_prompt("greeting")
|
|
65
121
|
# end
|
|
66
122
|
def fetch_with_lock(key)
|
|
@@ -68,8 +124,8 @@ module Langfuse
|
|
|
68
124
|
cached = get(key)
|
|
69
125
|
return cached if cached
|
|
70
126
|
|
|
71
|
-
# 2. Cache miss - try to acquire
|
|
72
|
-
lock_key =
|
|
127
|
+
# 2. Cache miss - try to acquire lock
|
|
128
|
+
lock_key = build_lock_key(key)
|
|
73
129
|
|
|
74
130
|
if acquire_lock(lock_key)
|
|
75
131
|
begin
|
|
@@ -92,74 +148,57 @@ module Langfuse
|
|
|
92
148
|
end
|
|
93
149
|
end
|
|
94
150
|
|
|
95
|
-
|
|
96
|
-
#
|
|
97
|
-
# Note: This uses delete_matched which may not be available on all cache stores.
|
|
98
|
-
# Works with Redis, Memcached, and memory stores. File store support varies.
|
|
99
|
-
#
|
|
100
|
-
# @return [void]
|
|
101
|
-
def clear
|
|
102
|
-
# Delete all keys matching the namespace pattern
|
|
103
|
-
Rails.cache.delete_matched("#{namespace}:*")
|
|
104
|
-
end
|
|
151
|
+
private
|
|
105
152
|
|
|
106
|
-
#
|
|
107
|
-
#
|
|
108
|
-
# Note: Rails.cache doesn't provide a size method, so we return nil
|
|
109
|
-
# to indicate this operation is not supported.
|
|
110
|
-
#
|
|
111
|
-
# @return [nil]
|
|
112
|
-
def size
|
|
113
|
-
nil
|
|
114
|
-
end
|
|
153
|
+
# Implementation of StaleWhileRevalidate abstract methods
|
|
115
154
|
|
|
116
|
-
#
|
|
117
|
-
#
|
|
118
|
-
# Note: Rails.cache doesn't provide an efficient way to check if empty,
|
|
119
|
-
# so we return false to indicate this operation is not supported.
|
|
155
|
+
# Get value from cache (SWR interface)
|
|
120
156
|
#
|
|
121
|
-
# @
|
|
122
|
-
|
|
123
|
-
|
|
157
|
+
# @param key [String] Cache key
|
|
158
|
+
# @return [Object, nil] Cached value
|
|
159
|
+
def cache_get(key)
|
|
160
|
+
get(key)
|
|
124
161
|
end
|
|
125
162
|
|
|
126
|
-
#
|
|
163
|
+
# Set value in cache (SWR interface)
|
|
127
164
|
#
|
|
128
|
-
# @param
|
|
129
|
-
# @param
|
|
130
|
-
# @
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
PromptCache.build_key(name, version: version, label: label)
|
|
165
|
+
# @param key [String] Cache key
|
|
166
|
+
# @param value [Object] Value to cache (expects CacheEntry)
|
|
167
|
+
# @return [Object] The cached value
|
|
168
|
+
def cache_set(key, value)
|
|
169
|
+
set(key, value)
|
|
134
170
|
end
|
|
135
171
|
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
# Add namespace prefix to cache key
|
|
172
|
+
# Build lock key with namespace
|
|
139
173
|
#
|
|
140
|
-
#
|
|
141
|
-
#
|
|
142
|
-
|
|
143
|
-
|
|
174
|
+
# Used for both fetch operations (stampede protection) and refresh operations
|
|
175
|
+
# (preventing duplicate background refreshes).
|
|
176
|
+
#
|
|
177
|
+
# @param key [String] Cache key
|
|
178
|
+
# @return [String] Namespaced lock key
|
|
179
|
+
def build_lock_key(key)
|
|
180
|
+
"#{namespaced_key(key)}:lock"
|
|
144
181
|
end
|
|
145
182
|
|
|
146
|
-
# Acquire a
|
|
183
|
+
# Acquire a lock using Rails.cache
|
|
147
184
|
#
|
|
148
|
-
#
|
|
149
|
-
#
|
|
185
|
+
# Used for both fetch operations and refresh operations.
|
|
186
|
+
# Uses the configured lock_timeout for all locking scenarios.
|
|
150
187
|
#
|
|
151
188
|
# @param lock_key [String] Full lock key (already namespaced)
|
|
152
|
-
# @return [Boolean] true if lock was acquired, false if already held
|
|
189
|
+
# @return [Boolean] true if lock was acquired, false if already held
|
|
153
190
|
def acquire_lock(lock_key)
|
|
154
191
|
Rails.cache.write(
|
|
155
192
|
lock_key,
|
|
156
193
|
true,
|
|
157
194
|
unless_exist: true, # Atomic: only write if key doesn't exist
|
|
158
|
-
expires_in: lock_timeout #
|
|
195
|
+
expires_in: lock_timeout # Use configured lock timeout
|
|
159
196
|
)
|
|
160
197
|
end
|
|
161
198
|
|
|
162
|
-
# Release a
|
|
199
|
+
# Release a lock
|
|
200
|
+
#
|
|
201
|
+
# Used for both fetch and refresh operations.
|
|
163
202
|
#
|
|
164
203
|
# @param lock_key [String] Full lock key (already namespaced)
|
|
165
204
|
# @return [void]
|
|
@@ -172,7 +211,7 @@ module Langfuse
|
|
|
172
211
|
# Uses exponential backoff: 50ms, 100ms, 200ms (3 retries, ~350ms total).
|
|
173
212
|
# This gives the lock holder time to fetch and populate the cache.
|
|
174
213
|
#
|
|
175
|
-
# @param key [String] Cache key
|
|
214
|
+
# @param key [String] Cache key
|
|
176
215
|
# @return [Object, nil] Cached value if found, nil if still empty after waiting
|
|
177
216
|
def wait_for_cache(key)
|
|
178
217
|
intervals = [0.05, 0.1, 0.2] # 50ms, 100ms, 200ms (exponential backoff)
|
|
@@ -186,6 +225,16 @@ module Langfuse
|
|
|
186
225
|
nil # Cache still empty after all retries
|
|
187
226
|
end
|
|
188
227
|
|
|
228
|
+
# Rails.cache-specific helper methods
|
|
229
|
+
|
|
230
|
+
# Add namespace prefix to cache key
|
|
231
|
+
#
|
|
232
|
+
# @param key [String] Original cache key
|
|
233
|
+
# @return [String] Namespaced cache key
|
|
234
|
+
def namespaced_key(key)
|
|
235
|
+
"#{namespace}:#{key}"
|
|
236
|
+
end
|
|
237
|
+
|
|
189
238
|
# Validate that Rails.cache is available
|
|
190
239
|
#
|
|
191
240
|
# @raise [ConfigurationError] if Rails.cache is not available
|
|
@@ -196,5 +245,16 @@ module Langfuse
|
|
|
196
245
|
raise ConfigurationError,
|
|
197
246
|
"Rails.cache is not available. Rails cache backend requires Rails with a configured cache store."
|
|
198
247
|
end
|
|
248
|
+
|
|
249
|
+
# Create a default logger
|
|
250
|
+
#
|
|
251
|
+
# @return [Logger]
|
|
252
|
+
def default_logger
|
|
253
|
+
if defined?(Rails) && Rails.respond_to?(:logger) && Rails.logger
|
|
254
|
+
Rails.logger
|
|
255
|
+
else
|
|
256
|
+
Logger.new($stdout, level: Logger::WARN)
|
|
257
|
+
end
|
|
258
|
+
end
|
|
199
259
|
end
|
|
200
260
|
end
|
|
@@ -0,0 +1,262 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "concurrent"
|
|
4
|
+
|
|
5
|
+
module Langfuse
|
|
6
|
+
# Stale-While-Revalidate caching pattern module
|
|
7
|
+
#
|
|
8
|
+
# Provides SWR functionality for cache implementations. When included,
|
|
9
|
+
# allows serving stale data immediately while refreshing in the background.
|
|
10
|
+
#
|
|
11
|
+
# Including classes must implement:
|
|
12
|
+
# - cache_get(key) - Read from cache
|
|
13
|
+
# - cache_set(key, value) - Write to cache
|
|
14
|
+
# - acquire_lock(lock_key) - Acquire lock for background refresh
|
|
15
|
+
# - release_lock(lock_key) - Release refresh lock
|
|
16
|
+
#
|
|
17
|
+
# @example
|
|
18
|
+
# class MyCache
|
|
19
|
+
# include Langfuse::StaleWhileRevalidate
|
|
20
|
+
#
|
|
21
|
+
# def initialize(ttl: 60, stale_ttl: 0)
|
|
22
|
+
# @ttl = ttl
|
|
23
|
+
# @stale_ttl = stale_ttl
|
|
24
|
+
# @logger = Logger.new($stdout)
|
|
25
|
+
# initialize_swr if stale_ttl.positive?
|
|
26
|
+
# end
|
|
27
|
+
#
|
|
28
|
+
# def cache_get(key)
|
|
29
|
+
# @storage[key]
|
|
30
|
+
# end
|
|
31
|
+
#
|
|
32
|
+
# def cache_set(key, value)
|
|
33
|
+
# @storage[key] = value
|
|
34
|
+
# end
|
|
35
|
+
#
|
|
36
|
+
# def acquire_lock(lock_key)
|
|
37
|
+
# # Implementation-specific lock acquisition
|
|
38
|
+
# end
|
|
39
|
+
#
|
|
40
|
+
# def release_lock(lock_key)
|
|
41
|
+
# # Implementation-specific lock release
|
|
42
|
+
# end
|
|
43
|
+
# end
|
|
44
|
+
module StaleWhileRevalidate
|
|
45
|
+
# Initialize SWR infrastructure
|
|
46
|
+
#
|
|
47
|
+
# Must be called by including class after setting @stale_ttl, @ttl, and @logger.
|
|
48
|
+
# Typically called in the class's initialize method when stale_ttl is provided.
|
|
49
|
+
#
|
|
50
|
+
# @param refresh_threads [Integer] Number of background refresh threads (default: 5)
|
|
51
|
+
# @return [void]
|
|
52
|
+
def initialize_swr(refresh_threads: 5)
|
|
53
|
+
@thread_pool = initialize_thread_pool(refresh_threads)
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
# Fetch a value from cache with Stale-While-Revalidate support
|
|
57
|
+
#
|
|
58
|
+
# This method implements SWR caching: serves stale data immediately while
|
|
59
|
+
# refreshing in the background. Requires SWR to be enabled (stale_ttl must be positive).
|
|
60
|
+
#
|
|
61
|
+
# Three cache states:
|
|
62
|
+
# - FRESH: Return immediately, no action needed
|
|
63
|
+
# - STALE: Return stale data + trigger background refresh
|
|
64
|
+
# - EXPIRED: Must fetch fresh data synchronously
|
|
65
|
+
#
|
|
66
|
+
# @param key [String] Cache key
|
|
67
|
+
# @yield Block to execute to fetch fresh data
|
|
68
|
+
# @return [Object] Cached, stale, or freshly fetched value
|
|
69
|
+
# @raise [ConfigurationError] if SWR is not enabled (stale_ttl is not positive)
|
|
70
|
+
#
|
|
71
|
+
# @example
|
|
72
|
+
# cache.fetch_with_stale_while_revalidate("greeting:v1") do
|
|
73
|
+
# api_client.get_prompt("greeting")
|
|
74
|
+
# end
|
|
75
|
+
def fetch_with_stale_while_revalidate(key, &)
|
|
76
|
+
raise ConfigurationError, "fetch_with_stale_while_revalidate requires a positive stale_ttl" unless swr_enabled?
|
|
77
|
+
|
|
78
|
+
entry = cache_get(key)
|
|
79
|
+
|
|
80
|
+
if entry&.fresh?
|
|
81
|
+
# FRESH - return immediately
|
|
82
|
+
logger.debug("CACHE HIT!")
|
|
83
|
+
entry.data
|
|
84
|
+
elsif entry&.stale?
|
|
85
|
+
# REVALIDATE - return stale + refresh in background
|
|
86
|
+
logger.debug("CACHE STALE!")
|
|
87
|
+
schedule_refresh(key, &)
|
|
88
|
+
entry.data # Instant response!
|
|
89
|
+
else
|
|
90
|
+
# MISS - must fetch synchronously
|
|
91
|
+
logger.debug("CACHE MISS!")
|
|
92
|
+
fetch_and_cache(key, &)
|
|
93
|
+
end
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
# Check if SWR is enabled
|
|
97
|
+
#
|
|
98
|
+
# SWR is enabled when stale_ttl is positive, meaning there's a grace period
|
|
99
|
+
# where stale data can be served while revalidating in the background.
|
|
100
|
+
#
|
|
101
|
+
# @return [Boolean] true if stale_ttl is positive
|
|
102
|
+
def swr_enabled?
|
|
103
|
+
stale_ttl.positive?
|
|
104
|
+
end
|
|
105
|
+
|
|
106
|
+
# Shutdown the cache refresh thread pool gracefully
|
|
107
|
+
#
|
|
108
|
+
# @return [void]
|
|
109
|
+
def shutdown
|
|
110
|
+
return unless @thread_pool
|
|
111
|
+
|
|
112
|
+
@thread_pool.shutdown
|
|
113
|
+
@thread_pool.wait_for_termination(5) # Wait up to 5 seconds
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
private
|
|
117
|
+
|
|
118
|
+
# Initialize thread pool for background refresh operations
|
|
119
|
+
#
|
|
120
|
+
# @param refresh_threads [Integer] Maximum number of refresh threads
|
|
121
|
+
# @return [Concurrent::CachedThreadPool]
|
|
122
|
+
def initialize_thread_pool(refresh_threads)
|
|
123
|
+
Concurrent::CachedThreadPool.new(
|
|
124
|
+
max_threads: refresh_threads,
|
|
125
|
+
min_threads: 0,
|
|
126
|
+
max_queue: 50,
|
|
127
|
+
fallback_policy: :discard
|
|
128
|
+
)
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
# Schedule a background refresh for a cache key
|
|
132
|
+
#
|
|
133
|
+
# Prevents duplicate refreshes by using a fetch lock. If another process/thread
|
|
134
|
+
# is already refreshing this key, this method returns immediately.
|
|
135
|
+
#
|
|
136
|
+
# Errors during refresh are caught and logged to prevent thread crashes.
|
|
137
|
+
#
|
|
138
|
+
# @param key [String] Cache key
|
|
139
|
+
# @yield Block to execute to fetch fresh data
|
|
140
|
+
# @return [void]
|
|
141
|
+
def schedule_refresh(key, &block)
|
|
142
|
+
# Prevent duplicate refreshes
|
|
143
|
+
lock_key = build_lock_key(key)
|
|
144
|
+
return unless acquire_lock(lock_key)
|
|
145
|
+
|
|
146
|
+
@thread_pool.post do
|
|
147
|
+
value = yield block
|
|
148
|
+
set_cache_entry(key, value)
|
|
149
|
+
rescue StandardError => e
|
|
150
|
+
logger.error("Langfuse cache refresh failed for key '#{key}': #{e.class} - #{e.message}")
|
|
151
|
+
ensure
|
|
152
|
+
release_lock(lock_key)
|
|
153
|
+
end
|
|
154
|
+
end
|
|
155
|
+
|
|
156
|
+
# Fetch data and cache it with SWR metadata
|
|
157
|
+
#
|
|
158
|
+
# @param key [String] Cache key
|
|
159
|
+
# @yield Block to execute to fetch fresh data
|
|
160
|
+
# @return [Object] Freshly fetched value
|
|
161
|
+
def fetch_and_cache(key, &block)
|
|
162
|
+
value = yield block
|
|
163
|
+
set_cache_entry(key, value)
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
# Set value in cache with SWR metadata (CacheEntry)
|
|
167
|
+
#
|
|
168
|
+
# @param key [String] Cache key
|
|
169
|
+
# @param value [Object] Value to cache
|
|
170
|
+
# @return [Object] The cached value
|
|
171
|
+
def set_cache_entry(key, value)
|
|
172
|
+
now = Time.now
|
|
173
|
+
fresh_until = now + ttl
|
|
174
|
+
stale_until = fresh_until + stale_ttl
|
|
175
|
+
entry = PromptCache::CacheEntry.new(value, fresh_until, stale_until)
|
|
176
|
+
|
|
177
|
+
cache_set(key, entry)
|
|
178
|
+
|
|
179
|
+
value
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
# Build a lock key for fetch operations
|
|
183
|
+
#
|
|
184
|
+
# Can be overridden by including class if custom key format is needed.
|
|
185
|
+
#
|
|
186
|
+
# @param key [String] Cache key
|
|
187
|
+
# @return [String] Lock key
|
|
188
|
+
def build_lock_key(key)
|
|
189
|
+
"#{key}:lock"
|
|
190
|
+
end
|
|
191
|
+
|
|
192
|
+
# Calculate total TTL (fresh + stale)
|
|
193
|
+
#
|
|
194
|
+
# @return [Integer] Total TTL in seconds
|
|
195
|
+
def total_ttl
|
|
196
|
+
ttl + stale_ttl
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
# Abstract methods that must be implemented by including class
|
|
200
|
+
|
|
201
|
+
# Get a value from cache
|
|
202
|
+
#
|
|
203
|
+
# @param key [String] Cache key
|
|
204
|
+
# @return [Object, nil] Cached value or nil
|
|
205
|
+
# @raise [NotImplementedError] if not implemented by including class
|
|
206
|
+
def cache_get(_key)
|
|
207
|
+
raise NotImplementedError, "#{self.class} must implement #cache_get"
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
# Set a value in cache
|
|
211
|
+
#
|
|
212
|
+
# @param key [String] Cache key
|
|
213
|
+
# @param value [Object] Value to cache
|
|
214
|
+
# @return [Object] The cached value
|
|
215
|
+
# @raise [NotImplementedError] if not implemented by including class
|
|
216
|
+
def cache_set(_key, _value)
|
|
217
|
+
raise NotImplementedError, "#{self.class} must implement #cache_set"
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
# Acquire a lock
|
|
221
|
+
#
|
|
222
|
+
# @param lock_key [String] Lock key
|
|
223
|
+
# @return [Boolean] true if lock was acquired
|
|
224
|
+
# @raise [NotImplementedError] if not implemented by including class
|
|
225
|
+
def acquire_lock(_lock_key)
|
|
226
|
+
raise NotImplementedError, "#{self.class} must implement #acquire_lock"
|
|
227
|
+
end
|
|
228
|
+
|
|
229
|
+
# Release a lock
|
|
230
|
+
#
|
|
231
|
+
# @param lock_key [String] Lock key
|
|
232
|
+
# @return [void]
|
|
233
|
+
# @raise [NotImplementedError] if not implemented by including class
|
|
234
|
+
def release_lock(_lock_key)
|
|
235
|
+
raise NotImplementedError, "#{self.class} must implement #release_lock"
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
# Get TTL value
|
|
239
|
+
#
|
|
240
|
+
# @return [Integer] TTL in seconds
|
|
241
|
+
# @raise [NotImplementedError] if not implemented by including class
|
|
242
|
+
def ttl
|
|
243
|
+
@ttl || raise(NotImplementedError, "#{self.class} must provide @ttl")
|
|
244
|
+
end
|
|
245
|
+
|
|
246
|
+
# Get stale TTL value
|
|
247
|
+
#
|
|
248
|
+
# @return [Integer] Stale TTL in seconds
|
|
249
|
+
# @raise [NotImplementedError] if not implemented by including class
|
|
250
|
+
def stale_ttl
|
|
251
|
+
@stale_ttl || raise(NotImplementedError, "#{self.class} must provide @stale_ttl")
|
|
252
|
+
end
|
|
253
|
+
|
|
254
|
+
# Get logger instance
|
|
255
|
+
#
|
|
256
|
+
# @return [Logger] Logger instance
|
|
257
|
+
# @raise [NotImplementedError] if not implemented by including class
|
|
258
|
+
def logger
|
|
259
|
+
@logger || raise(NotImplementedError, "#{self.class} must provide @logger")
|
|
260
|
+
end
|
|
261
|
+
end
|
|
262
|
+
end
|
data/lib/langfuse/version.rb
CHANGED
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: langfuse-rb
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.
|
|
4
|
+
version: 0.3.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- SimplePractice
|
|
@@ -13,30 +13,42 @@ dependencies:
|
|
|
13
13
|
name: faraday
|
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
|
15
15
|
requirements:
|
|
16
|
-
- - "
|
|
16
|
+
- - ">="
|
|
17
|
+
- !ruby/object:Gem::Version
|
|
18
|
+
version: '1.0'
|
|
19
|
+
- - "<"
|
|
17
20
|
- !ruby/object:Gem::Version
|
|
18
|
-
version: '
|
|
21
|
+
version: '3'
|
|
19
22
|
type: :runtime
|
|
20
23
|
prerelease: false
|
|
21
24
|
version_requirements: !ruby/object:Gem::Requirement
|
|
22
25
|
requirements:
|
|
23
|
-
- - "
|
|
26
|
+
- - ">="
|
|
24
27
|
- !ruby/object:Gem::Version
|
|
25
|
-
version: '
|
|
28
|
+
version: '1.0'
|
|
29
|
+
- - "<"
|
|
30
|
+
- !ruby/object:Gem::Version
|
|
31
|
+
version: '3'
|
|
26
32
|
- !ruby/object:Gem::Dependency
|
|
27
33
|
name: faraday-retry
|
|
28
34
|
requirement: !ruby/object:Gem::Requirement
|
|
29
35
|
requirements:
|
|
30
|
-
- - "
|
|
36
|
+
- - ">="
|
|
37
|
+
- !ruby/object:Gem::Version
|
|
38
|
+
version: '1.0'
|
|
39
|
+
- - "<"
|
|
31
40
|
- !ruby/object:Gem::Version
|
|
32
|
-
version: '
|
|
41
|
+
version: '3.0'
|
|
33
42
|
type: :runtime
|
|
34
43
|
prerelease: false
|
|
35
44
|
version_requirements: !ruby/object:Gem::Requirement
|
|
36
45
|
requirements:
|
|
37
|
-
- - "
|
|
46
|
+
- - ">="
|
|
47
|
+
- !ruby/object:Gem::Version
|
|
48
|
+
version: '1.0'
|
|
49
|
+
- - "<"
|
|
38
50
|
- !ruby/object:Gem::Version
|
|
39
|
-
version: '
|
|
51
|
+
version: '3.0'
|
|
40
52
|
- !ruby/object:Gem::Dependency
|
|
41
53
|
name: mustache
|
|
42
54
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -51,6 +63,20 @@ dependencies:
|
|
|
51
63
|
- - "~>"
|
|
52
64
|
- !ruby/object:Gem::Version
|
|
53
65
|
version: '1.1'
|
|
66
|
+
- !ruby/object:Gem::Dependency
|
|
67
|
+
name: concurrent-ruby
|
|
68
|
+
requirement: !ruby/object:Gem::Requirement
|
|
69
|
+
requirements:
|
|
70
|
+
- - "~>"
|
|
71
|
+
- !ruby/object:Gem::Version
|
|
72
|
+
version: '1.2'
|
|
73
|
+
type: :runtime
|
|
74
|
+
prerelease: false
|
|
75
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
76
|
+
requirements:
|
|
77
|
+
- - "~>"
|
|
78
|
+
- !ruby/object:Gem::Version
|
|
79
|
+
version: '1.2'
|
|
54
80
|
- !ruby/object:Gem::Dependency
|
|
55
81
|
name: opentelemetry-api
|
|
56
82
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -146,6 +172,7 @@ files:
|
|
|
146
172
|
- lib/langfuse/rails_cache_adapter.rb
|
|
147
173
|
- lib/langfuse/score_client.rb
|
|
148
174
|
- lib/langfuse/span_processor.rb
|
|
175
|
+
- lib/langfuse/stale_while_revalidate.rb
|
|
149
176
|
- lib/langfuse/text_prompt_client.rb
|
|
150
177
|
- lib/langfuse/types.rb
|
|
151
178
|
- lib/langfuse/version.rb
|
|
@@ -171,7 +198,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
171
198
|
- !ruby/object:Gem::Version
|
|
172
199
|
version: '0'
|
|
173
200
|
requirements: []
|
|
174
|
-
rubygems_version:
|
|
201
|
+
rubygems_version: 4.0.3
|
|
175
202
|
specification_version: 4
|
|
176
203
|
summary: Ruby SDK for Langfuse - LLM observability and prompt management
|
|
177
204
|
test_files: []
|