openlayer 0.5.0 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0e4cc5a98379f9ae05ca709024af12cf7858ad34ee8b0f5ea210aa81bd304ef4
4
- data.tar.gz: 50a1a617e5bfd90d6b1e4de15bed85f96b76a79280ba6b09441db937552f0e66
3
+ metadata.gz: 676a996fd68cfe29c8abf62e4d4406e93842df91c067b4733deadf5ef7e6d583
4
+ data.tar.gz: 47d1c0944fa95169bfc003b26b7b12698820fec07503d562f88ee7baeaf192c1
5
5
  SHA512:
6
- metadata.gz: 466925ac2e63aa675d10967906e219c1ded253af1d032ca9b524d0f2e5e9fe32fd859e9b679ad5a90c1d224f2166efc5ab9bb459456cbea3aa0f5df6756c523a
7
- data.tar.gz: afeb1a7249dde3173191f80f141ce84afc5423882a4749fb75acd82ef6fcf03dccd0b9b233686cd61036337efa6f5890072f0116772735d84efa72111c6d562f
6
+ metadata.gz: 69fc4228f987067c305b5078f6ce37d1f02a20ccf877cb38c1b578ed70e085ab15429fdfbe41ccdf157a4d42416ae24c28cda425a59b1370c6524186fd70810c
7
+ data.tar.gz: 10fe98ac7f9ae76e30f30641bf49c0d729a6e2f2718baeb4b5dd33a5a8d2947f82a165cc2fed674cc90b63adaf67d184a90f883a9d83f1ff3b7194bd9fbc5f4f
data/CHANGELOG.md CHANGED
@@ -1,5 +1,26 @@
1
1
  # Changelog
2
2
 
3
+ ## 0.7.0 (2025-12-17)
4
+
5
+ Full Changelog: [v0.6.0...v0.7.0](https://github.com/openlayer-ai/openlayer-ruby/compare/v0.6.0...v0.7.0)
6
+
7
+ ### Features
8
+
9
+ * **closes OPEN-8478:** add ConversationalSearchService tracer to Ruby SDK ([a33bc2e](https://github.com/openlayer-ai/openlayer-ruby/commit/a33bc2eab12db4b8e2d95a66435b1f7fbc1d0397))
10
+
11
+ ## 0.6.0 (2025-12-17)
12
+
13
+ Full Changelog: [v0.5.0...v0.6.0](https://github.com/openlayer-ai/openlayer-ruby/compare/v0.5.0...v0.6.0)
14
+
15
+ ### Features
16
+
17
+ * **api:** add test evaluation method ([18f2855](https://github.com/openlayer-ai/openlayer-ruby/commit/18f28556f722aad06666f8a247e597328d218b14))
18
+
19
+
20
+ ### Bug Fixes
21
+
22
+ * calling `break` out of streams should be instantaneous ([8fe6142](https://github.com/openlayer-ai/openlayer-ruby/commit/8fe6142fdb2c5590bb28678f8fbf4958bdcfb9f9))
23
+
3
24
  ## 0.5.0 (2025-12-16)
4
25
 
5
26
  Full Changelog: [v0.4.1...v0.5.0](https://github.com/openlayer-ai/openlayer-ruby/compare/v0.4.1...v0.5.0)
data/README.md CHANGED
@@ -17,7 +17,7 @@ To use this gem, install via Bundler by adding the following to your application
17
17
  <!-- x-release-please-start-version -->
18
18
 
19
19
  ```ruby
20
- gem "openlayer", "~> 0.5.0"
20
+ gem "openlayer", "~> 0.7.0"
21
21
  ```
22
22
 
23
23
  <!-- x-release-please-end -->
@@ -30,6 +30,9 @@ module Openlayer
30
30
  # @return [Openlayer::Resources::Storage]
31
31
  attr_reader :storage
32
32
 
33
+ # @return [Openlayer::Resources::Tests]
34
+ attr_reader :tests
35
+
33
36
  # @api private
34
37
  #
35
38
  # @return [Hash{String=>String}]
@@ -77,6 +80,7 @@ module Openlayer
77
80
  @commits = Openlayer::Resources::Commits.new(client: self)
78
81
  @inference_pipelines = Openlayer::Resources::InferencePipelines.new(client: self)
79
82
  @storage = Openlayer::Resources::Storage.new(client: self)
83
+ @tests = Openlayer::Resources::Tests.new(client: self)
80
84
  end
81
85
  end
82
86
  end
@@ -0,0 +1,352 @@
1
+ # frozen_string_literal: true
2
+ # typed: false
3
+
4
+ require "json"
5
+ require "time"
6
+
7
+ module Openlayer
8
+ module Integrations
9
+ # Tracer for Google Cloud DiscoveryEngine ConversationalSearchService
10
+ #
11
+ # This class provides integration with Google's ConversationalSearchService
12
+ # to automatically trace answer_query calls and send them to the Openlayer platform.
13
+ #
14
+ # @example Basic usage
15
+ # require 'openlayer/integrations/google_conversational_search_tracer'
16
+ # require 'google/cloud/discovery_engine/v1'
17
+ #
18
+ # google_client = Google::Cloud::DiscoveryEngine::V1::ConversationalSearchService::Client.new
19
+ # openlayer = Openlayer::Client.new(api_key: ENV['OPENLAYER_API_KEY'])
20
+ #
21
+ # Openlayer::Integrations::GoogleConversationalSearchTracer.trace_client(
22
+ # google_client,
23
+ # openlayer_client: openlayer,
24
+ # inference_pipeline_id: 'your-pipeline-id'
25
+ # )
26
+ #
27
+ # # Now all answer_query calls are automatically traced
28
+ # response = google_client.answer_query(
29
+ # serving_config: "projects/.../servingConfigs/default",
30
+ # query: { text: "What is the meaning of life?" }
31
+ # )
32
+ class GoogleConversationalSearchTracer
33
+ # Enable tracing on a Google ConversationalSearchService client
34
+ #
35
+ # @param client [Google::Cloud::DiscoveryEngine::V1::ConversationalSearchService::Client]
36
+ # The Google client instance to trace
37
+ # @param openlayer_client [Openlayer::Client]
38
+ # The Openlayer client instance for sending traces
39
+ # @param inference_pipeline_id [String]
40
+ # The Openlayer inference pipeline ID to send traces to
41
+ # @return [void]
42
+ def self.trace_client(client, openlayer_client:, inference_pipeline_id:)
43
+ # Store original method reference
44
+ original_answer_query = client.method(:answer_query)
45
+
46
+ # Define traced wrapper method
47
+ client.define_singleton_method(:answer_query) do |*args, **kwargs, &block|
48
+ # Capture start time
49
+ start_time = Time.now
50
+
51
+ # Execute the original method
52
+ response = original_answer_query.call(*args, **kwargs, &block)
53
+
54
+ # Capture end time
55
+ end_time = Time.now
56
+
57
+ # Send trace to Openlayer (with error handling)
58
+ begin
59
+ GoogleConversationalSearchTracer.send_trace(
60
+ args: args,
61
+ kwargs: kwargs,
62
+ response: response,
63
+ start_time: start_time,
64
+ end_time: end_time,
65
+ openlayer_client: openlayer_client,
66
+ inference_pipeline_id: inference_pipeline_id
67
+ )
68
+ rescue StandardError => e
69
+ # Never break the user's application due to tracing errors
70
+ GoogleConversationalSearchTracer.warn_if_debug("[Openlayer] Failed to send trace: #{e.message}")
71
+ GoogleConversationalSearchTracer.warn_if_debug("[Openlayer] #{e.backtrace.first(3).join("\n")}") if e.backtrace
72
+ end
73
+
74
+ # Always return the original response
75
+ response
76
+ end
77
+
78
+ nil
79
+ end
80
+
81
+ # Send trace data to Openlayer platform
82
+ #
83
+ # @param args [Array] Original method positional arguments
84
+ # @param kwargs [Hash] Original method keyword arguments
85
+ # @param response [Google::Cloud::DiscoveryEngine::V1::AnswerQueryResponse] The API response
86
+ # @param start_time [Time] Request start time
87
+ # @param end_time [Time] Request end time
88
+ # @param openlayer_client [Openlayer::Client] Openlayer client instance
89
+ # @param inference_pipeline_id [String] Pipeline ID
90
+ # @return [void]
91
+ def self.send_trace(args:, kwargs:, response:, start_time:, end_time:, openlayer_client:, inference_pipeline_id:)
92
+ # Calculate latency
93
+ latency_ms = ((end_time - start_time) * 1000).round(2)
94
+
95
+ # Extract query from request
96
+ query_text = extract_query(args, kwargs)
97
+
98
+ # Extract answer and metadata from response
99
+ answer_data = extract_answer_data(response)
100
+
101
+ # Extract additional metadata
102
+ metadata = extract_metadata(args, kwargs, response, latency_ms)
103
+
104
+ # Rough estimate of prompt and completion tokens
105
+ prompt_tokens = (query_text.length / 4.0).ceil
106
+ completion_tokens = (answer_data[:answer_text].length / 4.0).ceil
107
+
108
+ # Build trace data in Openlayer format
109
+ trace_data = {
110
+ config: {
111
+ inputVariableNames: ["query"],
112
+ outputColumnName: "answer",
113
+ latencyColumnName: "latency_ms",
114
+ timestampColumnName: "timestamp"
115
+ },
116
+ rows: [
117
+ {
118
+ query: query_text,
119
+ answer: answer_data[:answer_text],
120
+ latency_ms: latency_ms,
121
+ timestamp: start_time.to_i,
122
+ metadata: metadata,
123
+ steps: [
124
+ {
125
+ name: "Conversational Search answer_query",
126
+ type: "chat_completion",
127
+ provider: "Google",
128
+ startTime: start_time.to_i,
129
+ endTime: end_time.to_i,
130
+ latency: latency_ms,
131
+ metadata: metadata,
132
+ inputs: {
133
+ prompt: [
134
+ {role: "user", content: query_text}
135
+ ]
136
+ },
137
+ output: answer_data[:answer_text],
138
+ promptTokens: prompt_tokens,
139
+ completionTokens: completion_tokens,
140
+ tokens: prompt_tokens + completion_tokens,
141
+ model: "google-discovery-engine"
142
+ }
143
+ ]
144
+ }
145
+ ]
146
+ }
147
+
148
+ # Send to Openlayer
149
+ openlayer_client
150
+ .inference_pipelines
151
+ .data
152
+ .stream(
153
+ inference_pipeline_id,
154
+ **trace_data
155
+ )
156
+ end
157
+
158
+ # Extract query text from request arguments
159
+ #
160
+ # Handles both calling styles:
161
+ # 1. Request object: answer_query(request_object)
162
+ # 2. Keyword arguments: answer_query(query: {...}, serving_config: ...)
163
+ #
164
+ # @param args [Array] Positional arguments
165
+ # @param kwargs [Hash] Keyword arguments
166
+ # @return [String, nil] The query text or nil if not found
167
+ def self.extract_query(args, kwargs)
168
+ query_obj = nil
169
+
170
+ # Try to get query from request object (first positional arg)
171
+ if args.length.positive? && args[0].respond_to?(:query)
172
+ query_obj = args[0].query
173
+ # Try to get query from keyword arguments
174
+ elsif kwargs[:query]
175
+ query_obj = kwargs[:query]
176
+ end
177
+
178
+ return nil if query_obj.nil?
179
+
180
+ # Extract text from query object
181
+ if query_obj.respond_to?(:text)
182
+ query_obj.text
183
+ elsif query_obj.is_a?(Hash) && query_obj[:text]
184
+ query_obj[:text]
185
+ elsif query_obj.is_a?(Hash) && query_obj["text"]
186
+ query_obj["text"]
187
+ else
188
+ query_obj.to_s
189
+ end
190
+ rescue StandardError => e
191
+ warn_if_debug("[Openlayer] Failed to extract query: #{e.message}")
192
+ nil
193
+ end
194
+
195
+ # Extract answer data from response
196
+ #
197
+ # @param response [Google::Cloud::DiscoveryEngine::V1::AnswerQueryResponse] API response
198
+ # @return [Hash] Hash containing answer_text and other answer fields
199
+ def self.extract_answer_data(response)
200
+ return {answer_text: nil} unless response.respond_to?(:answer)
201
+
202
+ answer = response.answer
203
+ return {answer_text: nil} if answer.nil?
204
+
205
+ {
206
+ answer_text: safe_extract(answer, :answer_text),
207
+ state: safe_extract(answer, :state)&.to_s,
208
+ grounding_score: safe_extract(answer, :grounding_score),
209
+ create_time: extract_timestamp(answer, :create_time),
210
+ complete_time: extract_timestamp(answer, :complete_time),
211
+ citations_count: safe_count(answer, :citations),
212
+ references_count: safe_count(answer, :references)
213
+ }
214
+ rescue StandardError => e
215
+ warn_if_debug("[Openlayer] Failed to extract answer data: #{e.message}")
216
+ {answer_text: nil}
217
+ end
218
+
219
+ # Extract metadata from request and response
220
+ #
221
+ # @param args [Array] Positional arguments
222
+ # @param kwargs [Hash] Keyword arguments
223
+ # @param response [Google::Cloud::DiscoveryEngine::V1::AnswerQueryResponse] API response
224
+ # @param latency_ms [Float] Request latency in milliseconds
225
+ # @return [Hash] Metadata hash
226
+ def self.extract_metadata(args, kwargs, response, latency_ms)
227
+ answer_data = extract_answer_data(response)
228
+
229
+ metadata = {
230
+ provider: "google",
231
+ service: "conversational_search",
232
+ latency_ms: latency_ms
233
+ }
234
+
235
+ # Add answer metadata
236
+ metadata[:grounding_score] = answer_data[:grounding_score] if answer_data[:grounding_score]
237
+ metadata[:state] = answer_data[:state] if answer_data[:state]
238
+ metadata[:citations_count] = answer_data[:citations_count] if answer_data[:citations_count]
239
+ metadata[:references_count] = answer_data[:references_count] if answer_data[:references_count]
240
+
241
+ # Add request metadata
242
+ metadata[:serving_config] = extract_serving_config(args, kwargs)
243
+ metadata[:session] = extract_session(args, kwargs)
244
+
245
+ # Add timing metadata
246
+ if answer_data[:create_time] && answer_data[:complete_time]
247
+ generation_time_ms = ((answer_data[:complete_time] - answer_data[:create_time]) * 1000).round(2)
248
+ metadata[:generation_time_ms] = generation_time_ms
249
+ end
250
+
251
+ metadata.compact
252
+ rescue StandardError => e
253
+ warn_if_debug("[Openlayer] Failed to extract metadata: #{e.message}")
254
+ {
255
+ provider: "google",
256
+ service: "conversational_search",
257
+ latency_ms: latency_ms
258
+ }
259
+ end
260
+
261
+ # Extract serving_config from request
262
+ #
263
+ # @param args [Array] Positional arguments
264
+ # @param kwargs [Hash] Keyword arguments
265
+ # @return [String, nil] Serving config or nil
266
+ def self.extract_serving_config(args, kwargs)
267
+ if args.length.positive? && args[0].respond_to?(:serving_config)
268
+ args[0].serving_config
269
+ elsif kwargs[:serving_config]
270
+ kwargs[:serving_config]
271
+ end
272
+ rescue StandardError
273
+ nil
274
+ end
275
+
276
+ # Extract session from request
277
+ #
278
+ # @param args [Array] Positional arguments
279
+ # @param kwargs [Hash] Keyword arguments
280
+ # @return [String, nil] Session ID or nil
281
+ def self.extract_session(args, kwargs)
282
+ if args.length.positive? && args[0].respond_to?(:session)
283
+ args[0].session
284
+ elsif kwargs[:session]
285
+ kwargs[:session]
286
+ end
287
+ rescue StandardError
288
+ nil
289
+ end
290
+
291
+ # Safely extract a field from an object
292
+ #
293
+ # @param obj [Object] Object to extract from
294
+ # @param field [Symbol] Field name
295
+ # @return [Object, nil] Field value or nil
296
+ def self.safe_extract(obj, field)
297
+ obj.respond_to?(field) ? obj.public_send(field) : nil
298
+ rescue StandardError
299
+ nil
300
+ end
301
+
302
+ # Safely count elements in a collection field
303
+ #
304
+ # @param obj [Object] Object containing the collection
305
+ # @param field [Symbol] Field name
306
+ # @return [Integer, nil] Count or nil
307
+ def self.safe_count(obj, field)
308
+ collection = safe_extract(obj, field)
309
+ collection.respond_to?(:length) ? collection.length : nil
310
+ rescue StandardError
311
+ nil
312
+ end
313
+
314
+ # Extract timestamp and convert to Unix timestamp
315
+ #
316
+ # @param obj [Object] Object containing timestamp
317
+ # @param field [Symbol] Field name
318
+ # @return [Integer, nil] Unix timestamp or nil
319
+ def self.extract_timestamp(obj, field)
320
+ timestamp = safe_extract(obj, field)
321
+ return nil if timestamp.nil?
322
+
323
+ if timestamp.respond_to?(:to_time)
324
+ timestamp.to_time.to_i
325
+ elsif timestamp.respond_to?(:to_i)
326
+ timestamp.to_i
327
+ end
328
+ rescue StandardError
329
+ nil
330
+ end
331
+
332
+ # Log warning message if debug mode is enabled
333
+ #
334
+ # @param message [String] Warning message
335
+ # @return [void]
336
+ def self.warn_if_debug(message)
337
+ warn(message) if ENV["OPENLAYER_DEBUG"]
338
+ end
339
+
340
+ # send_trace and warn_if_debug need to be public because they're called
341
+ # from the singleton method context
342
+ private_class_method :extract_query,
343
+ :extract_answer_data,
344
+ :extract_metadata,
345
+ :extract_serving_config,
346
+ :extract_session,
347
+ :safe_extract,
348
+ :safe_count,
349
+ :extract_timestamp
350
+ end
351
+ end
352
+ end
@@ -153,17 +153,19 @@ module Openlayer
153
153
  end
154
154
 
155
155
  self.class.calibrate_socket_timeout(conn, deadline)
156
- conn.request(req) do |rsp|
157
- y << [req, rsp]
158
- break if finished
159
-
160
- rsp.read_body do |bytes|
161
- y << bytes.force_encoding(Encoding::BINARY)
162
- break if finished
163
-
164
- self.class.calibrate_socket_timeout(conn, deadline)
156
+ ::Kernel.catch(:jump) do
157
+ conn.request(req) do |rsp|
158
+ y << [req, rsp]
159
+ ::Kernel.throw(:jump) if finished
160
+
161
+ rsp.read_body do |bytes|
162
+ y << bytes.force_encoding(Encoding::BINARY)
163
+ ::Kernel.throw(:jump) if finished
164
+
165
+ self.class.calibrate_socket_timeout(conn, deadline)
166
+ end
167
+ eof = true
165
168
  end
166
- eof = true
167
169
  end
168
170
  end
169
171
  ensure
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Openlayer
4
+ module Models
5
+ # @see Openlayer::Resources::Tests#evaluate
6
+ class TestEvaluateParams < Openlayer::Internal::Type::BaseModel
7
+ extend Openlayer::Internal::Type::RequestParameters::Converter
8
+ include Openlayer::Internal::Type::RequestParameters
9
+
10
+ # @!attribute end_timestamp
11
+ # End timestamp in seconds (Unix epoch)
12
+ #
13
+ # @return [Integer]
14
+ required :end_timestamp, Integer, api_name: :endTimestamp
15
+
16
+ # @!attribute start_timestamp
17
+ # Start timestamp in seconds (Unix epoch)
18
+ #
19
+ # @return [Integer]
20
+ required :start_timestamp, Integer, api_name: :startTimestamp
21
+
22
+ # @!attribute inference_pipeline_id
23
+ # ID of the inference pipeline to evaluate. If not provided, all inference
24
+ # pipelines the test applies to will be evaluated.
25
+ #
26
+ # @return [String, nil]
27
+ optional :inference_pipeline_id, String, api_name: :inferencePipelineId
28
+
29
+ # @!attribute overwrite_results
30
+ # Whether to overwrite existing test results
31
+ #
32
+ # @return [Boolean, nil]
33
+ optional :overwrite_results, Openlayer::Internal::Type::Boolean, api_name: :overwriteResults
34
+
35
+ # @!method initialize(end_timestamp:, start_timestamp:, inference_pipeline_id: nil, overwrite_results: nil, request_options: {})
36
+ # Some parameter documentations has been truncated, see
37
+ # {Openlayer::Models::TestEvaluateParams} for more details.
38
+ #
39
+ # @param end_timestamp [Integer] End timestamp in seconds (Unix epoch)
40
+ #
41
+ # @param start_timestamp [Integer] Start timestamp in seconds (Unix epoch)
42
+ #
43
+ # @param inference_pipeline_id [String] ID of the inference pipeline to evaluate. If not provided, all inference pipelin
44
+ #
45
+ # @param overwrite_results [Boolean] Whether to overwrite existing test results
46
+ #
47
+ # @param request_options [Openlayer::RequestOptions, Hash{Symbol=>Object}]
48
+ end
49
+ end
50
+ end
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Openlayer
4
+ module Models
5
+ # @see Openlayer::Resources::Tests#evaluate
6
+ class TestEvaluateResponse < Openlayer::Internal::Type::BaseModel
7
+ # @!attribute message
8
+ #
9
+ # @return [String]
10
+ required :message, String
11
+
12
+ # @!attribute pipeline_count
13
+ # Number of inference pipelines the test was queued for evaluation on
14
+ #
15
+ # @return [Integer]
16
+ required :pipeline_count, Integer, api_name: :pipelineCount
17
+
18
+ # @!attribute requested_end_timestamp
19
+ # The end timestamp you requested (in seconds)
20
+ #
21
+ # @return [Integer]
22
+ required :requested_end_timestamp, Integer, api_name: :requestedEndTimestamp
23
+
24
+ # @!attribute requested_start_timestamp
25
+ # The start timestamp you requested (in seconds)
26
+ #
27
+ # @return [Integer]
28
+ required :requested_start_timestamp, Integer, api_name: :requestedStartTimestamp
29
+
30
+ # @!attribute tasks
31
+ # Array of background task information for each pipeline evaluation
32
+ #
33
+ # @return [Array<Openlayer::Models::TestEvaluateResponse::Task>]
34
+ required :tasks, -> { Openlayer::Internal::Type::ArrayOf[Openlayer::Models::TestEvaluateResponse::Task] }
35
+
36
+ # @!method initialize(message:, pipeline_count:, requested_end_timestamp:, requested_start_timestamp:, tasks:)
37
+ # @param message [String]
38
+ #
39
+ # @param pipeline_count [Integer] Number of inference pipelines the test was queued for evaluation on
40
+ #
41
+ # @param requested_end_timestamp [Integer] The end timestamp you requested (in seconds)
42
+ #
43
+ # @param requested_start_timestamp [Integer] The start timestamp you requested (in seconds)
44
+ #
45
+ # @param tasks [Array<Openlayer::Models::TestEvaluateResponse::Task>] Array of background task information for each pipeline evaluation
46
+
47
+ class Task < Openlayer::Internal::Type::BaseModel
48
+ # @!attribute pipeline_id
49
+ # ID of the inference pipeline this task is for
50
+ #
51
+ # @return [String]
52
+ required :pipeline_id, String, api_name: :pipelineId
53
+
54
+ # @!attribute task_result_id
55
+ # ID of the background task
56
+ #
57
+ # @return [String]
58
+ required :task_result_id, String, api_name: :taskResultId
59
+
60
+ # @!attribute task_result_url
61
+ # URL to check the status of this background task
62
+ #
63
+ # @return [String]
64
+ required :task_result_url, String, api_name: :taskResultUrl
65
+
66
+ # @!method initialize(pipeline_id:, task_result_id:, task_result_url:)
67
+ # @param pipeline_id [String] ID of the inference pipeline this task is for
68
+ #
69
+ # @param task_result_id [String] ID of the background task
70
+ #
71
+ # @param task_result_url [String] URL to check the status of this background task
72
+ end
73
+ end
74
+ end
75
+ end
@@ -58,4 +58,6 @@ module Openlayer
58
58
  Projects = Openlayer::Models::Projects
59
59
 
60
60
  Storage = Openlayer::Models::Storage
61
+
62
+ TestEvaluateParams = Openlayer::Models::TestEvaluateParams
61
63
  end
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Openlayer
4
+ module Resources
5
+ class Tests
6
+ # Some parameter documentations has been truncated, see
7
+ # {Openlayer::Models::TestEvaluateParams} for more details.
8
+ #
9
+ # Triggers one-off evaluation of a specific monitoring test for a custom timestamp
10
+ # range. This allows evaluating tests for historical data or custom time periods
11
+ # outside the regular evaluation window schedule. It also allows overwriting the
12
+ # existing test results.
13
+ #
14
+ # @overload evaluate(test_id, end_timestamp:, start_timestamp:, inference_pipeline_id: nil, overwrite_results: nil, request_options: {})
15
+ #
16
+ # @param test_id [String] The test id.
17
+ #
18
+ # @param end_timestamp [Integer] End timestamp in seconds (Unix epoch)
19
+ #
20
+ # @param start_timestamp [Integer] Start timestamp in seconds (Unix epoch)
21
+ #
22
+ # @param inference_pipeline_id [String] ID of the inference pipeline to evaluate. If not provided, all inference pipelin
23
+ #
24
+ # @param overwrite_results [Boolean] Whether to overwrite existing test results
25
+ #
26
+ # @param request_options [Openlayer::RequestOptions, Hash{Symbol=>Object}, nil]
27
+ #
28
+ # @return [Openlayer::Models::TestEvaluateResponse]
29
+ #
30
+ # @see Openlayer::Models::TestEvaluateParams
31
+ def evaluate(test_id, params)
32
+ parsed, options = Openlayer::TestEvaluateParams.dump_request(params)
33
+ @client.request(
34
+ method: :post,
35
+ path: ["tests/%1$s/evaluate", test_id],
36
+ body: parsed,
37
+ model: Openlayer::Models::TestEvaluateResponse,
38
+ options: options
39
+ )
40
+ end
41
+
42
+ # @api private
43
+ #
44
+ # @param client [Openlayer::Client]
45
+ def initialize(client:)
46
+ @client = client
47
+ end
48
+ end
49
+ end
50
+ end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Openlayer
4
- VERSION = "0.5.0"
4
+ VERSION = "0.7.0"
5
5
  end
data/lib/openlayer.rb CHANGED
@@ -87,6 +87,8 @@ require_relative "openlayer/models/projects/test_update_params"
87
87
  require_relative "openlayer/models/projects/test_update_response"
88
88
  require_relative "openlayer/models/storage/presigned_url_create_params"
89
89
  require_relative "openlayer/models/storage/presigned_url_create_response"
90
+ require_relative "openlayer/models/test_evaluate_params"
91
+ require_relative "openlayer/models/test_evaluate_response"
90
92
  require_relative "openlayer/models"
91
93
  require_relative "openlayer/resources/commits"
92
94
  require_relative "openlayer/resources/commits/test_results"
@@ -100,3 +102,4 @@ require_relative "openlayer/resources/projects/inference_pipelines"
100
102
  require_relative "openlayer/resources/projects/tests"
101
103
  require_relative "openlayer/resources/storage"
102
104
  require_relative "openlayer/resources/storage/presigned_url"
105
+ require_relative "openlayer/resources/tests"
@@ -0,0 +1,44 @@
1
+ # typed: strong
2
+ # frozen_string_literal: true
3
+
4
+ # Stub for google-cloud-discovery_engine-v1 gem
5
+ module Google
6
+ module Cloud
7
+ module DiscoveryEngine
8
+ module V1
9
+ class Query
10
+ sig { params(text: String).void }
11
+ def initialize(
12
+ text:
13
+ )
14
+ end
15
+ sig { returns(String) }
16
+ def text; end
17
+ end
18
+
19
+ class ConversationalSearchService
20
+ class Client
21
+ sig { void }
22
+ def initialize; end
23
+
24
+ sig do
25
+ params(
26
+ serving_config: String,
27
+ query: Query,
28
+ session: T.nilable(String),
29
+ kwargs: T.untyped
30
+ ).returns(T.untyped)
31
+ end
32
+ def answer_query(
33
+ serving_config:,
34
+ query:,
35
+ session: nil,
36
+ **kwargs
37
+ )
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
43
+ end
44
+ end
@@ -25,6 +25,9 @@ module Openlayer
25
25
  sig { returns(Openlayer::Resources::Storage) }
26
26
  attr_reader :storage
27
27
 
28
+ sig { returns(Openlayer::Resources::Tests) }
29
+ attr_reader :tests
30
+
28
31
  # @api private
29
32
  sig { override.returns(T::Hash[String, String]) }
30
33
  private def auth_headers
@@ -0,0 +1,22 @@
1
+ # typed: strong
2
+ # frozen_string_literal: true
3
+
4
+ module Openlayer
5
+ module Integrations
6
+ class GoogleConversationalSearchTracer
7
+ sig do
8
+ params(
9
+ client: T.untyped,
10
+ openlayer_client: Openlayer::Client,
11
+ inference_pipeline_id: String
12
+ ).void
13
+ end
14
+ def self.trace_client(
15
+ client,
16
+ openlayer_client:,
17
+ inference_pipeline_id:
18
+ )
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,75 @@
1
+ # typed: strong
2
+
3
+ module Openlayer
4
+ module Models
5
+ class TestEvaluateParams < Openlayer::Internal::Type::BaseModel
6
+ extend Openlayer::Internal::Type::RequestParameters::Converter
7
+ include Openlayer::Internal::Type::RequestParameters
8
+
9
+ OrHash =
10
+ T.type_alias do
11
+ T.any(Openlayer::TestEvaluateParams, Openlayer::Internal::AnyHash)
12
+ end
13
+
14
+ # End timestamp in seconds (Unix epoch)
15
+ sig { returns(Integer) }
16
+ attr_accessor :end_timestamp
17
+
18
+ # Start timestamp in seconds (Unix epoch)
19
+ sig { returns(Integer) }
20
+ attr_accessor :start_timestamp
21
+
22
+ # ID of the inference pipeline to evaluate. If not provided, all inference
23
+ # pipelines the test applies to will be evaluated.
24
+ sig { returns(T.nilable(String)) }
25
+ attr_reader :inference_pipeline_id
26
+
27
+ sig { params(inference_pipeline_id: String).void }
28
+ attr_writer :inference_pipeline_id
29
+
30
+ # Whether to overwrite existing test results
31
+ sig { returns(T.nilable(T::Boolean)) }
32
+ attr_reader :overwrite_results
33
+
34
+ sig { params(overwrite_results: T::Boolean).void }
35
+ attr_writer :overwrite_results
36
+
37
+ sig do
38
+ params(
39
+ end_timestamp: Integer,
40
+ start_timestamp: Integer,
41
+ inference_pipeline_id: String,
42
+ overwrite_results: T::Boolean,
43
+ request_options: Openlayer::RequestOptions::OrHash
44
+ ).returns(T.attached_class)
45
+ end
46
+ def self.new(
47
+ # End timestamp in seconds (Unix epoch)
48
+ end_timestamp:,
49
+ # Start timestamp in seconds (Unix epoch)
50
+ start_timestamp:,
51
+ # ID of the inference pipeline to evaluate. If not provided, all inference
52
+ # pipelines the test applies to will be evaluated.
53
+ inference_pipeline_id: nil,
54
+ # Whether to overwrite existing test results
55
+ overwrite_results: nil,
56
+ request_options: {}
57
+ )
58
+ end
59
+
60
+ sig do
61
+ override.returns(
62
+ {
63
+ end_timestamp: Integer,
64
+ start_timestamp: Integer,
65
+ inference_pipeline_id: String,
66
+ overwrite_results: T::Boolean,
67
+ request_options: Openlayer::RequestOptions
68
+ }
69
+ )
70
+ end
71
+ def to_hash
72
+ end
73
+ end
74
+ end
75
+ end
@@ -0,0 +1,121 @@
1
+ # typed: strong
2
+
3
+ module Openlayer
4
+ module Models
5
+ class TestEvaluateResponse < Openlayer::Internal::Type::BaseModel
6
+ OrHash =
7
+ T.type_alias do
8
+ T.any(
9
+ Openlayer::Models::TestEvaluateResponse,
10
+ Openlayer::Internal::AnyHash
11
+ )
12
+ end
13
+
14
+ sig { returns(String) }
15
+ attr_accessor :message
16
+
17
+ # Number of inference pipelines the test was queued for evaluation on
18
+ sig { returns(Integer) }
19
+ attr_accessor :pipeline_count
20
+
21
+ # The end timestamp you requested (in seconds)
22
+ sig { returns(Integer) }
23
+ attr_accessor :requested_end_timestamp
24
+
25
+ # The start timestamp you requested (in seconds)
26
+ sig { returns(Integer) }
27
+ attr_accessor :requested_start_timestamp
28
+
29
+ # Array of background task information for each pipeline evaluation
30
+ sig { returns(T::Array[Openlayer::Models::TestEvaluateResponse::Task]) }
31
+ attr_accessor :tasks
32
+
33
+ sig do
34
+ params(
35
+ message: String,
36
+ pipeline_count: Integer,
37
+ requested_end_timestamp: Integer,
38
+ requested_start_timestamp: Integer,
39
+ tasks: T::Array[Openlayer::Models::TestEvaluateResponse::Task::OrHash]
40
+ ).returns(T.attached_class)
41
+ end
42
+ def self.new(
43
+ message:,
44
+ # Number of inference pipelines the test was queued for evaluation on
45
+ pipeline_count:,
46
+ # The end timestamp you requested (in seconds)
47
+ requested_end_timestamp:,
48
+ # The start timestamp you requested (in seconds)
49
+ requested_start_timestamp:,
50
+ # Array of background task information for each pipeline evaluation
51
+ tasks:
52
+ )
53
+ end
54
+
55
+ sig do
56
+ override.returns(
57
+ {
58
+ message: String,
59
+ pipeline_count: Integer,
60
+ requested_end_timestamp: Integer,
61
+ requested_start_timestamp: Integer,
62
+ tasks: T::Array[Openlayer::Models::TestEvaluateResponse::Task]
63
+ }
64
+ )
65
+ end
66
+ def to_hash
67
+ end
68
+
69
+ class Task < Openlayer::Internal::Type::BaseModel
70
+ OrHash =
71
+ T.type_alias do
72
+ T.any(
73
+ Openlayer::Models::TestEvaluateResponse::Task,
74
+ Openlayer::Internal::AnyHash
75
+ )
76
+ end
77
+
78
+ # ID of the inference pipeline this task is for
79
+ sig { returns(String) }
80
+ attr_accessor :pipeline_id
81
+
82
+ # ID of the background task
83
+ sig { returns(String) }
84
+ attr_accessor :task_result_id
85
+
86
+ # URL to check the status of this background task
87
+ sig { returns(String) }
88
+ attr_accessor :task_result_url
89
+
90
+ sig do
91
+ params(
92
+ pipeline_id: String,
93
+ task_result_id: String,
94
+ task_result_url: String
95
+ ).returns(T.attached_class)
96
+ end
97
+ def self.new(
98
+ # ID of the inference pipeline this task is for
99
+ pipeline_id:,
100
+ # ID of the background task
101
+ task_result_id:,
102
+ # URL to check the status of this background task
103
+ task_result_url:
104
+ )
105
+ end
106
+
107
+ sig do
108
+ override.returns(
109
+ {
110
+ pipeline_id: String,
111
+ task_result_id: String,
112
+ task_result_url: String
113
+ }
114
+ )
115
+ end
116
+ def to_hash
117
+ end
118
+ end
119
+ end
120
+ end
121
+ end
@@ -23,4 +23,6 @@ module Openlayer
23
23
  Projects = Openlayer::Models::Projects
24
24
 
25
25
  Storage = Openlayer::Models::Storage
26
+
27
+ TestEvaluateParams = Openlayer::Models::TestEvaluateParams
26
28
  end
@@ -0,0 +1,42 @@
1
+ # typed: strong
2
+
3
+ module Openlayer
4
+ module Resources
5
+ class Tests
6
+ # Triggers one-off evaluation of a specific monitoring test for a custom timestamp
7
+ # range. This allows evaluating tests for historical data or custom time periods
8
+ # outside the regular evaluation window schedule. It also allows overwriting the
9
+ # existing test results.
10
+ sig do
11
+ params(
12
+ test_id: String,
13
+ end_timestamp: Integer,
14
+ start_timestamp: Integer,
15
+ inference_pipeline_id: String,
16
+ overwrite_results: T::Boolean,
17
+ request_options: Openlayer::RequestOptions::OrHash
18
+ ).returns(Openlayer::Models::TestEvaluateResponse)
19
+ end
20
+ def evaluate(
21
+ # The test id.
22
+ test_id,
23
+ # End timestamp in seconds (Unix epoch)
24
+ end_timestamp:,
25
+ # Start timestamp in seconds (Unix epoch)
26
+ start_timestamp:,
27
+ # ID of the inference pipeline to evaluate. If not provided, all inference
28
+ # pipelines the test applies to will be evaluated.
29
+ inference_pipeline_id: nil,
30
+ # Whether to overwrite existing test results
31
+ overwrite_results: nil,
32
+ request_options: {}
33
+ )
34
+ end
35
+
36
+ # @api private
37
+ sig { params(client: Openlayer::Client).returns(T.attached_class) }
38
+ def self.new(client:)
39
+ end
40
+ end
41
+ end
42
+ end
@@ -18,6 +18,8 @@ module Openlayer
18
18
 
19
19
  attr_reader storage: Openlayer::Resources::Storage
20
20
 
21
+ attr_reader tests: Openlayer::Resources::Tests
22
+
21
23
  private def auth_headers: -> ::Hash[String, String]
22
24
 
23
25
  def initialize: (
@@ -0,0 +1,45 @@
1
+ module Openlayer
2
+ module Models
3
+ type test_evaluate_params =
4
+ {
5
+ end_timestamp: Integer,
6
+ start_timestamp: Integer,
7
+ inference_pipeline_id: String,
8
+ overwrite_results: bool
9
+ }
10
+ & Openlayer::Internal::Type::request_parameters
11
+
12
+ class TestEvaluateParams < Openlayer::Internal::Type::BaseModel
13
+ extend Openlayer::Internal::Type::RequestParameters::Converter
14
+ include Openlayer::Internal::Type::RequestParameters
15
+
16
+ attr_accessor end_timestamp: Integer
17
+
18
+ attr_accessor start_timestamp: Integer
19
+
20
+ attr_reader inference_pipeline_id: String?
21
+
22
+ def inference_pipeline_id=: (String) -> String
23
+
24
+ attr_reader overwrite_results: bool?
25
+
26
+ def overwrite_results=: (bool) -> bool
27
+
28
+ def initialize: (
29
+ end_timestamp: Integer,
30
+ start_timestamp: Integer,
31
+ ?inference_pipeline_id: String,
32
+ ?overwrite_results: bool,
33
+ ?request_options: Openlayer::request_opts
34
+ ) -> void
35
+
36
+ def to_hash: -> {
37
+ end_timestamp: Integer,
38
+ start_timestamp: Integer,
39
+ inference_pipeline_id: String,
40
+ overwrite_results: bool,
41
+ request_options: Openlayer::RequestOptions
42
+ }
43
+ end
44
+ end
45
+ end
@@ -0,0 +1,63 @@
1
+ module Openlayer
2
+ module Models
3
+ type test_evaluate_response =
4
+ {
5
+ message: String,
6
+ pipeline_count: Integer,
7
+ requested_end_timestamp: Integer,
8
+ requested_start_timestamp: Integer,
9
+ tasks: ::Array[Openlayer::Models::TestEvaluateResponse::Task]
10
+ }
11
+
12
+ class TestEvaluateResponse < Openlayer::Internal::Type::BaseModel
13
+ attr_accessor message: String
14
+
15
+ attr_accessor pipeline_count: Integer
16
+
17
+ attr_accessor requested_end_timestamp: Integer
18
+
19
+ attr_accessor requested_start_timestamp: Integer
20
+
21
+ attr_accessor tasks: ::Array[Openlayer::Models::TestEvaluateResponse::Task]
22
+
23
+ def initialize: (
24
+ message: String,
25
+ pipeline_count: Integer,
26
+ requested_end_timestamp: Integer,
27
+ requested_start_timestamp: Integer,
28
+ tasks: ::Array[Openlayer::Models::TestEvaluateResponse::Task]
29
+ ) -> void
30
+
31
+ def to_hash: -> {
32
+ message: String,
33
+ pipeline_count: Integer,
34
+ requested_end_timestamp: Integer,
35
+ requested_start_timestamp: Integer,
36
+ tasks: ::Array[Openlayer::Models::TestEvaluateResponse::Task]
37
+ }
38
+
39
+ type task =
40
+ { pipeline_id: String, task_result_id: String, task_result_url: String }
41
+
42
+ class Task < Openlayer::Internal::Type::BaseModel
43
+ attr_accessor pipeline_id: String
44
+
45
+ attr_accessor task_result_id: String
46
+
47
+ attr_accessor task_result_url: String
48
+
49
+ def initialize: (
50
+ pipeline_id: String,
51
+ task_result_id: String,
52
+ task_result_url: String
53
+ ) -> void
54
+
55
+ def to_hash: -> {
56
+ pipeline_id: String,
57
+ task_result_id: String,
58
+ task_result_url: String
59
+ }
60
+ end
61
+ end
62
+ end
63
+ end
@@ -18,4 +18,6 @@ module Openlayer
18
18
  module Projects = Openlayer::Models::Projects
19
19
 
20
20
  module Storage = Openlayer::Models::Storage
21
+
22
+ class TestEvaluateParams = Openlayer::Models::TestEvaluateParams
21
23
  end
@@ -0,0 +1,16 @@
1
+ module Openlayer
2
+ module Resources
3
+ class Tests
4
+ def evaluate: (
5
+ String test_id,
6
+ end_timestamp: Integer,
7
+ start_timestamp: Integer,
8
+ ?inference_pipeline_id: String,
9
+ ?overwrite_results: bool,
10
+ ?request_options: Openlayer::request_opts
11
+ ) -> Openlayer::Models::TestEvaluateResponse
12
+
13
+ def initialize: (client: Openlayer::Client) -> void
14
+ end
15
+ end
16
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: openlayer
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.0
4
+ version: 0.7.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Openlayer
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2025-12-16 00:00:00.000000000 Z
11
+ date: 2025-12-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: connection_pool
@@ -39,6 +39,7 @@ files:
39
39
  - lib/openlayer/client.rb
40
40
  - lib/openlayer/errors.rb
41
41
  - lib/openlayer/file_part.rb
42
+ - lib/openlayer/integrations/google_conversational_search_tracer.rb
42
43
  - lib/openlayer/internal.rb
43
44
  - lib/openlayer/internal/transport/base_client.rb
44
45
  - lib/openlayer/internal/transport/pooled_net_requester.rb
@@ -90,6 +91,8 @@ files:
90
91
  - lib/openlayer/models/projects/test_update_response.rb
91
92
  - lib/openlayer/models/storage/presigned_url_create_params.rb
92
93
  - lib/openlayer/models/storage/presigned_url_create_response.rb
94
+ - lib/openlayer/models/test_evaluate_params.rb
95
+ - lib/openlayer/models/test_evaluate_response.rb
93
96
  - lib/openlayer/request_options.rb
94
97
  - lib/openlayer/resources/commits.rb
95
98
  - lib/openlayer/resources/commits/test_results.rb
@@ -103,11 +106,14 @@ files:
103
106
  - lib/openlayer/resources/projects/tests.rb
104
107
  - lib/openlayer/resources/storage.rb
105
108
  - lib/openlayer/resources/storage/presigned_url.rb
109
+ - lib/openlayer/resources/tests.rb
106
110
  - lib/openlayer/version.rb
107
111
  - manifest.yaml
112
+ - rbi/google_discovery_engine.rbi
108
113
  - rbi/openlayer/client.rbi
109
114
  - rbi/openlayer/errors.rbi
110
115
  - rbi/openlayer/file_part.rbi
116
+ - rbi/openlayer/integrations.rbi
111
117
  - rbi/openlayer/internal.rbi
112
118
  - rbi/openlayer/internal/transport/base_client.rbi
113
119
  - rbi/openlayer/internal/transport/pooled_net_requester.rbi
@@ -159,6 +165,8 @@ files:
159
165
  - rbi/openlayer/models/projects/test_update_response.rbi
160
166
  - rbi/openlayer/models/storage/presigned_url_create_params.rbi
161
167
  - rbi/openlayer/models/storage/presigned_url_create_response.rbi
168
+ - rbi/openlayer/models/test_evaluate_params.rbi
169
+ - rbi/openlayer/models/test_evaluate_response.rbi
162
170
  - rbi/openlayer/request_options.rbi
163
171
  - rbi/openlayer/resources/commits.rbi
164
172
  - rbi/openlayer/resources/commits/test_results.rbi
@@ -172,6 +180,7 @@ files:
172
180
  - rbi/openlayer/resources/projects/tests.rbi
173
181
  - rbi/openlayer/resources/storage.rbi
174
182
  - rbi/openlayer/resources/storage/presigned_url.rbi
183
+ - rbi/openlayer/resources/tests.rbi
175
184
  - rbi/openlayer/version.rbi
176
185
  - sig/openlayer/client.rbs
177
186
  - sig/openlayer/errors.rbs
@@ -227,6 +236,8 @@ files:
227
236
  - sig/openlayer/models/projects/test_update_response.rbs
228
237
  - sig/openlayer/models/storage/presigned_url_create_params.rbs
229
238
  - sig/openlayer/models/storage/presigned_url_create_response.rbs
239
+ - sig/openlayer/models/test_evaluate_params.rbs
240
+ - sig/openlayer/models/test_evaluate_response.rbs
230
241
  - sig/openlayer/request_options.rbs
231
242
  - sig/openlayer/resources/commits.rbs
232
243
  - sig/openlayer/resources/commits/test_results.rbs
@@ -240,6 +251,7 @@ files:
240
251
  - sig/openlayer/resources/projects/tests.rbs
241
252
  - sig/openlayer/resources/storage.rbs
242
253
  - sig/openlayer/resources/storage/presigned_url.rbs
254
+ - sig/openlayer/resources/tests.rbs
243
255
  - sig/openlayer/version.rbs
244
256
  homepage: https://gemdocs.org/gems/openlayer
245
257
  licenses: []