openlayer 0.6.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -0
- data/README.md +1 -1
- data/lib/openlayer/integrations/google_conversational_search_tracer.rb +352 -0
- data/lib/openlayer/version.rb +1 -1
- data/rbi/google_discovery_engine.rbi +44 -0
- data/rbi/openlayer/integrations.rbi +22 -0
- metadata +4 -1
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: 676a996fd68cfe29c8abf62e4d4406e93842df91c067b4733deadf5ef7e6d583
|
|
4
|
+
data.tar.gz: 47d1c0944fa95169bfc003b26b7b12698820fec07503d562f88ee7baeaf192c1
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: 69fc4228f987067c305b5078f6ce37d1f02a20ccf877cb38c1b578ed70e085ab15429fdfbe41ccdf157a4d42416ae24c28cda425a59b1370c6524186fd70810c
|
|
7
|
+
data.tar.gz: 10fe98ac7f9ae76e30f30641bf49c0d729a6e2f2718baeb4b5dd33a5a8d2947f82a165cc2fed674cc90b63adaf67d184a90f883a9d83f1ff3b7194bd9fbc5f4f
|
data/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,13 @@
|
|
|
1
1
|
# Changelog
|
|
2
2
|
|
|
3
|
+
## 0.7.0 (2025-12-17)
|
|
4
|
+
|
|
5
|
+
Full Changelog: [v0.6.0...v0.7.0](https://github.com/openlayer-ai/openlayer-ruby/compare/v0.6.0...v0.7.0)
|
|
6
|
+
|
|
7
|
+
### Features
|
|
8
|
+
|
|
9
|
+
* **closes OPEN-8478:** add ConversationalSearchService tracer to Ruby SDK ([a33bc2e](https://github.com/openlayer-ai/openlayer-ruby/commit/a33bc2eab12db4b8e2d95a66435b1f7fbc1d0397))
|
|
10
|
+
|
|
3
11
|
## 0.6.0 (2025-12-17)
|
|
4
12
|
|
|
5
13
|
Full Changelog: [v0.5.0...v0.6.0](https://github.com/openlayer-ai/openlayer-ruby/compare/v0.5.0...v0.6.0)
|
data/README.md
CHANGED
|
@@ -0,0 +1,352 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
# typed: false
|
|
3
|
+
|
|
4
|
+
require "json"
|
|
5
|
+
require "time"
|
|
6
|
+
|
|
7
|
+
module Openlayer
|
|
8
|
+
module Integrations
|
|
9
|
+
# Tracer for Google Cloud DiscoveryEngine ConversationalSearchService
|
|
10
|
+
#
|
|
11
|
+
# This class provides integration with Google's ConversationalSearchService
|
|
12
|
+
# to automatically trace answer_query calls and send them to the Openlayer platform.
|
|
13
|
+
#
|
|
14
|
+
# @example Basic usage
|
|
15
|
+
# require 'openlayer/integrations/google_conversational_search_tracer'
|
|
16
|
+
# require 'google/cloud/discovery_engine/v1'
|
|
17
|
+
#
|
|
18
|
+
# google_client = Google::Cloud::DiscoveryEngine::V1::ConversationalSearchService::Client.new
|
|
19
|
+
# openlayer = Openlayer::Client.new(api_key: ENV['OPENLAYER_API_KEY'])
|
|
20
|
+
#
|
|
21
|
+
# Openlayer::Integrations::GoogleConversationalSearchTracer.trace_client(
|
|
22
|
+
# google_client,
|
|
23
|
+
# openlayer_client: openlayer,
|
|
24
|
+
# inference_pipeline_id: 'your-pipeline-id'
|
|
25
|
+
# )
|
|
26
|
+
#
|
|
27
|
+
# # Now all answer_query calls are automatically traced
|
|
28
|
+
# response = google_client.answer_query(
|
|
29
|
+
# serving_config: "projects/.../servingConfigs/default",
|
|
30
|
+
# query: { text: "What is the meaning of life?" }
|
|
31
|
+
# )
|
|
32
|
+
class GoogleConversationalSearchTracer
|
|
33
|
+
# Enable tracing on a Google ConversationalSearchService client
|
|
34
|
+
#
|
|
35
|
+
# @param client [Google::Cloud::DiscoveryEngine::V1::ConversationalSearchService::Client]
|
|
36
|
+
# The Google client instance to trace
|
|
37
|
+
# @param openlayer_client [Openlayer::Client]
|
|
38
|
+
# The Openlayer client instance for sending traces
|
|
39
|
+
# @param inference_pipeline_id [String]
|
|
40
|
+
# The Openlayer inference pipeline ID to send traces to
|
|
41
|
+
# @return [void]
|
|
42
|
+
def self.trace_client(client, openlayer_client:, inference_pipeline_id:)
|
|
43
|
+
# Store original method reference
|
|
44
|
+
original_answer_query = client.method(:answer_query)
|
|
45
|
+
|
|
46
|
+
# Define traced wrapper method
|
|
47
|
+
client.define_singleton_method(:answer_query) do |*args, **kwargs, &block|
|
|
48
|
+
# Capture start time
|
|
49
|
+
start_time = Time.now
|
|
50
|
+
|
|
51
|
+
# Execute the original method
|
|
52
|
+
response = original_answer_query.call(*args, **kwargs, &block)
|
|
53
|
+
|
|
54
|
+
# Capture end time
|
|
55
|
+
end_time = Time.now
|
|
56
|
+
|
|
57
|
+
# Send trace to Openlayer (with error handling)
|
|
58
|
+
begin
|
|
59
|
+
GoogleConversationalSearchTracer.send_trace(
|
|
60
|
+
args: args,
|
|
61
|
+
kwargs: kwargs,
|
|
62
|
+
response: response,
|
|
63
|
+
start_time: start_time,
|
|
64
|
+
end_time: end_time,
|
|
65
|
+
openlayer_client: openlayer_client,
|
|
66
|
+
inference_pipeline_id: inference_pipeline_id
|
|
67
|
+
)
|
|
68
|
+
rescue StandardError => e
|
|
69
|
+
# Never break the user's application due to tracing errors
|
|
70
|
+
GoogleConversationalSearchTracer.warn_if_debug("[Openlayer] Failed to send trace: #{e.message}")
|
|
71
|
+
GoogleConversationalSearchTracer.warn_if_debug("[Openlayer] #{e.backtrace.first(3).join("\n")}") if e.backtrace
|
|
72
|
+
end
|
|
73
|
+
|
|
74
|
+
# Always return the original response
|
|
75
|
+
response
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
nil
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
# Send trace data to Openlayer platform
|
|
82
|
+
#
|
|
83
|
+
# @param args [Array] Original method positional arguments
|
|
84
|
+
# @param kwargs [Hash] Original method keyword arguments
|
|
85
|
+
# @param response [Google::Cloud::DiscoveryEngine::V1::AnswerQueryResponse] The API response
|
|
86
|
+
# @param start_time [Time] Request start time
|
|
87
|
+
# @param end_time [Time] Request end time
|
|
88
|
+
# @param openlayer_client [Openlayer::Client] Openlayer client instance
|
|
89
|
+
# @param inference_pipeline_id [String] Pipeline ID
|
|
90
|
+
# @return [void]
|
|
91
|
+
def self.send_trace(args:, kwargs:, response:, start_time:, end_time:, openlayer_client:, inference_pipeline_id:)
|
|
92
|
+
# Calculate latency
|
|
93
|
+
latency_ms = ((end_time - start_time) * 1000).round(2)
|
|
94
|
+
|
|
95
|
+
# Extract query from request
|
|
96
|
+
query_text = extract_query(args, kwargs)
|
|
97
|
+
|
|
98
|
+
# Extract answer and metadata from response
|
|
99
|
+
answer_data = extract_answer_data(response)
|
|
100
|
+
|
|
101
|
+
# Extract additional metadata
|
|
102
|
+
metadata = extract_metadata(args, kwargs, response, latency_ms)
|
|
103
|
+
|
|
104
|
+
# Rough estimate of prompt and completion tokens
|
|
105
|
+
prompt_tokens = (query_text.length / 4.0).ceil
|
|
106
|
+
completion_tokens = (answer_data[:answer_text].length / 4.0).ceil
|
|
107
|
+
|
|
108
|
+
# Build trace data in Openlayer format
|
|
109
|
+
trace_data = {
|
|
110
|
+
config: {
|
|
111
|
+
inputVariableNames: ["query"],
|
|
112
|
+
outputColumnName: "answer",
|
|
113
|
+
latencyColumnName: "latency_ms",
|
|
114
|
+
timestampColumnName: "timestamp"
|
|
115
|
+
},
|
|
116
|
+
rows: [
|
|
117
|
+
{
|
|
118
|
+
query: query_text,
|
|
119
|
+
answer: answer_data[:answer_text],
|
|
120
|
+
latency_ms: latency_ms,
|
|
121
|
+
timestamp: start_time.to_i,
|
|
122
|
+
metadata: metadata,
|
|
123
|
+
steps: [
|
|
124
|
+
{
|
|
125
|
+
name: "Conversational Search answer_query",
|
|
126
|
+
type: "chat_completion",
|
|
127
|
+
provider: "Google",
|
|
128
|
+
startTime: start_time.to_i,
|
|
129
|
+
endTime: end_time.to_i,
|
|
130
|
+
latency: latency_ms,
|
|
131
|
+
metadata: metadata,
|
|
132
|
+
inputs: {
|
|
133
|
+
prompt: [
|
|
134
|
+
{role: "user", content: query_text}
|
|
135
|
+
]
|
|
136
|
+
},
|
|
137
|
+
output: answer_data[:answer_text],
|
|
138
|
+
promptTokens: prompt_tokens,
|
|
139
|
+
completionTokens: completion_tokens,
|
|
140
|
+
tokens: prompt_tokens + completion_tokens,
|
|
141
|
+
model: "google-discovery-engine"
|
|
142
|
+
}
|
|
143
|
+
]
|
|
144
|
+
}
|
|
145
|
+
]
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
# Send to Openlayer
|
|
149
|
+
openlayer_client
|
|
150
|
+
.inference_pipelines
|
|
151
|
+
.data
|
|
152
|
+
.stream(
|
|
153
|
+
inference_pipeline_id,
|
|
154
|
+
**trace_data
|
|
155
|
+
)
|
|
156
|
+
end
|
|
157
|
+
|
|
158
|
+
# Extract query text from request arguments
|
|
159
|
+
#
|
|
160
|
+
# Handles both calling styles:
|
|
161
|
+
# 1. Request object: answer_query(request_object)
|
|
162
|
+
# 2. Keyword arguments: answer_query(query: {...}, serving_config: ...)
|
|
163
|
+
#
|
|
164
|
+
# @param args [Array] Positional arguments
|
|
165
|
+
# @param kwargs [Hash] Keyword arguments
|
|
166
|
+
# @return [String, nil] The query text or nil if not found
|
|
167
|
+
def self.extract_query(args, kwargs)
|
|
168
|
+
query_obj = nil
|
|
169
|
+
|
|
170
|
+
# Try to get query from request object (first positional arg)
|
|
171
|
+
if args.length.positive? && args[0].respond_to?(:query)
|
|
172
|
+
query_obj = args[0].query
|
|
173
|
+
# Try to get query from keyword arguments
|
|
174
|
+
elsif kwargs[:query]
|
|
175
|
+
query_obj = kwargs[:query]
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
return nil if query_obj.nil?
|
|
179
|
+
|
|
180
|
+
# Extract text from query object
|
|
181
|
+
if query_obj.respond_to?(:text)
|
|
182
|
+
query_obj.text
|
|
183
|
+
elsif query_obj.is_a?(Hash) && query_obj[:text]
|
|
184
|
+
query_obj[:text]
|
|
185
|
+
elsif query_obj.is_a?(Hash) && query_obj["text"]
|
|
186
|
+
query_obj["text"]
|
|
187
|
+
else
|
|
188
|
+
query_obj.to_s
|
|
189
|
+
end
|
|
190
|
+
rescue StandardError => e
|
|
191
|
+
warn_if_debug("[Openlayer] Failed to extract query: #{e.message}")
|
|
192
|
+
nil
|
|
193
|
+
end
|
|
194
|
+
|
|
195
|
+
# Extract answer data from response
|
|
196
|
+
#
|
|
197
|
+
# @param response [Google::Cloud::DiscoveryEngine::V1::AnswerQueryResponse] API response
|
|
198
|
+
# @return [Hash] Hash containing answer_text and other answer fields
|
|
199
|
+
def self.extract_answer_data(response)
|
|
200
|
+
return {answer_text: nil} unless response.respond_to?(:answer)
|
|
201
|
+
|
|
202
|
+
answer = response.answer
|
|
203
|
+
return {answer_text: nil} if answer.nil?
|
|
204
|
+
|
|
205
|
+
{
|
|
206
|
+
answer_text: safe_extract(answer, :answer_text),
|
|
207
|
+
state: safe_extract(answer, :state)&.to_s,
|
|
208
|
+
grounding_score: safe_extract(answer, :grounding_score),
|
|
209
|
+
create_time: extract_timestamp(answer, :create_time),
|
|
210
|
+
complete_time: extract_timestamp(answer, :complete_time),
|
|
211
|
+
citations_count: safe_count(answer, :citations),
|
|
212
|
+
references_count: safe_count(answer, :references)
|
|
213
|
+
}
|
|
214
|
+
rescue StandardError => e
|
|
215
|
+
warn_if_debug("[Openlayer] Failed to extract answer data: #{e.message}")
|
|
216
|
+
{answer_text: nil}
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
# Extract metadata from request and response
|
|
220
|
+
#
|
|
221
|
+
# @param args [Array] Positional arguments
|
|
222
|
+
# @param kwargs [Hash] Keyword arguments
|
|
223
|
+
# @param response [Google::Cloud::DiscoveryEngine::V1::AnswerQueryResponse] API response
|
|
224
|
+
# @param latency_ms [Float] Request latency in milliseconds
|
|
225
|
+
# @return [Hash] Metadata hash
|
|
226
|
+
def self.extract_metadata(args, kwargs, response, latency_ms)
|
|
227
|
+
answer_data = extract_answer_data(response)
|
|
228
|
+
|
|
229
|
+
metadata = {
|
|
230
|
+
provider: "google",
|
|
231
|
+
service: "conversational_search",
|
|
232
|
+
latency_ms: latency_ms
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
# Add answer metadata
|
|
236
|
+
metadata[:grounding_score] = answer_data[:grounding_score] if answer_data[:grounding_score]
|
|
237
|
+
metadata[:state] = answer_data[:state] if answer_data[:state]
|
|
238
|
+
metadata[:citations_count] = answer_data[:citations_count] if answer_data[:citations_count]
|
|
239
|
+
metadata[:references_count] = answer_data[:references_count] if answer_data[:references_count]
|
|
240
|
+
|
|
241
|
+
# Add request metadata
|
|
242
|
+
metadata[:serving_config] = extract_serving_config(args, kwargs)
|
|
243
|
+
metadata[:session] = extract_session(args, kwargs)
|
|
244
|
+
|
|
245
|
+
# Add timing metadata
|
|
246
|
+
if answer_data[:create_time] && answer_data[:complete_time]
|
|
247
|
+
generation_time_ms = ((answer_data[:complete_time] - answer_data[:create_time]) * 1000).round(2)
|
|
248
|
+
metadata[:generation_time_ms] = generation_time_ms
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
metadata.compact
|
|
252
|
+
rescue StandardError => e
|
|
253
|
+
warn_if_debug("[Openlayer] Failed to extract metadata: #{e.message}")
|
|
254
|
+
{
|
|
255
|
+
provider: "google",
|
|
256
|
+
service: "conversational_search",
|
|
257
|
+
latency_ms: latency_ms
|
|
258
|
+
}
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
# Extract serving_config from request
|
|
262
|
+
#
|
|
263
|
+
# @param args [Array] Positional arguments
|
|
264
|
+
# @param kwargs [Hash] Keyword arguments
|
|
265
|
+
# @return [String, nil] Serving config or nil
|
|
266
|
+
def self.extract_serving_config(args, kwargs)
|
|
267
|
+
if args.length.positive? && args[0].respond_to?(:serving_config)
|
|
268
|
+
args[0].serving_config
|
|
269
|
+
elsif kwargs[:serving_config]
|
|
270
|
+
kwargs[:serving_config]
|
|
271
|
+
end
|
|
272
|
+
rescue StandardError
|
|
273
|
+
nil
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
# Extract session from request
|
|
277
|
+
#
|
|
278
|
+
# @param args [Array] Positional arguments
|
|
279
|
+
# @param kwargs [Hash] Keyword arguments
|
|
280
|
+
# @return [String, nil] Session ID or nil
|
|
281
|
+
def self.extract_session(args, kwargs)
|
|
282
|
+
if args.length.positive? && args[0].respond_to?(:session)
|
|
283
|
+
args[0].session
|
|
284
|
+
elsif kwargs[:session]
|
|
285
|
+
kwargs[:session]
|
|
286
|
+
end
|
|
287
|
+
rescue StandardError
|
|
288
|
+
nil
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
# Safely extract a field from an object
|
|
292
|
+
#
|
|
293
|
+
# @param obj [Object] Object to extract from
|
|
294
|
+
# @param field [Symbol] Field name
|
|
295
|
+
# @return [Object, nil] Field value or nil
|
|
296
|
+
def self.safe_extract(obj, field)
|
|
297
|
+
obj.respond_to?(field) ? obj.public_send(field) : nil
|
|
298
|
+
rescue StandardError
|
|
299
|
+
nil
|
|
300
|
+
end
|
|
301
|
+
|
|
302
|
+
# Safely count elements in a collection field
|
|
303
|
+
#
|
|
304
|
+
# @param obj [Object] Object containing the collection
|
|
305
|
+
# @param field [Symbol] Field name
|
|
306
|
+
# @return [Integer, nil] Count or nil
|
|
307
|
+
def self.safe_count(obj, field)
|
|
308
|
+
collection = safe_extract(obj, field)
|
|
309
|
+
collection.respond_to?(:length) ? collection.length : nil
|
|
310
|
+
rescue StandardError
|
|
311
|
+
nil
|
|
312
|
+
end
|
|
313
|
+
|
|
314
|
+
# Extract timestamp and convert to Unix timestamp
|
|
315
|
+
#
|
|
316
|
+
# @param obj [Object] Object containing timestamp
|
|
317
|
+
# @param field [Symbol] Field name
|
|
318
|
+
# @return [Integer, nil] Unix timestamp or nil
|
|
319
|
+
def self.extract_timestamp(obj, field)
|
|
320
|
+
timestamp = safe_extract(obj, field)
|
|
321
|
+
return nil if timestamp.nil?
|
|
322
|
+
|
|
323
|
+
if timestamp.respond_to?(:to_time)
|
|
324
|
+
timestamp.to_time.to_i
|
|
325
|
+
elsif timestamp.respond_to?(:to_i)
|
|
326
|
+
timestamp.to_i
|
|
327
|
+
end
|
|
328
|
+
rescue StandardError
|
|
329
|
+
nil
|
|
330
|
+
end
|
|
331
|
+
|
|
332
|
+
# Log warning message if debug mode is enabled
|
|
333
|
+
#
|
|
334
|
+
# @param message [String] Warning message
|
|
335
|
+
# @return [void]
|
|
336
|
+
def self.warn_if_debug(message)
|
|
337
|
+
warn(message) if ENV["OPENLAYER_DEBUG"]
|
|
338
|
+
end
|
|
339
|
+
|
|
340
|
+
# send_trace and warn_if_debug need to be public because they're called
|
|
341
|
+
# from the singleton method context
|
|
342
|
+
private_class_method :extract_query,
|
|
343
|
+
:extract_answer_data,
|
|
344
|
+
:extract_metadata,
|
|
345
|
+
:extract_serving_config,
|
|
346
|
+
:extract_session,
|
|
347
|
+
:safe_extract,
|
|
348
|
+
:safe_count,
|
|
349
|
+
:extract_timestamp
|
|
350
|
+
end
|
|
351
|
+
end
|
|
352
|
+
end
|
data/lib/openlayer/version.rb
CHANGED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# typed: strong
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
# Stub for google-cloud-discovery_engine-v1 gem
|
|
5
|
+
module Google
|
|
6
|
+
module Cloud
|
|
7
|
+
module DiscoveryEngine
|
|
8
|
+
module V1
|
|
9
|
+
class Query
|
|
10
|
+
sig { params(text: String).void }
|
|
11
|
+
def initialize(
|
|
12
|
+
text:
|
|
13
|
+
)
|
|
14
|
+
end
|
|
15
|
+
sig { returns(String) }
|
|
16
|
+
def text; end
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
class ConversationalSearchService
|
|
20
|
+
class Client
|
|
21
|
+
sig { void }
|
|
22
|
+
def initialize; end
|
|
23
|
+
|
|
24
|
+
sig do
|
|
25
|
+
params(
|
|
26
|
+
serving_config: String,
|
|
27
|
+
query: Query,
|
|
28
|
+
session: T.nilable(String),
|
|
29
|
+
kwargs: T.untyped
|
|
30
|
+
).returns(T.untyped)
|
|
31
|
+
end
|
|
32
|
+
def answer_query(
|
|
33
|
+
serving_config:,
|
|
34
|
+
query:,
|
|
35
|
+
session: nil,
|
|
36
|
+
**kwargs
|
|
37
|
+
)
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
end
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
# typed: strong
|
|
2
|
+
# frozen_string_literal: true
|
|
3
|
+
|
|
4
|
+
module Openlayer
|
|
5
|
+
module Integrations
|
|
6
|
+
class GoogleConversationalSearchTracer
|
|
7
|
+
sig do
|
|
8
|
+
params(
|
|
9
|
+
client: T.untyped,
|
|
10
|
+
openlayer_client: Openlayer::Client,
|
|
11
|
+
inference_pipeline_id: String
|
|
12
|
+
).void
|
|
13
|
+
end
|
|
14
|
+
def self.trace_client(
|
|
15
|
+
client,
|
|
16
|
+
openlayer_client:,
|
|
17
|
+
inference_pipeline_id:
|
|
18
|
+
)
|
|
19
|
+
end
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
metadata
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: openlayer
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 0.
|
|
4
|
+
version: 0.7.0
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Openlayer
|
|
@@ -39,6 +39,7 @@ files:
|
|
|
39
39
|
- lib/openlayer/client.rb
|
|
40
40
|
- lib/openlayer/errors.rb
|
|
41
41
|
- lib/openlayer/file_part.rb
|
|
42
|
+
- lib/openlayer/integrations/google_conversational_search_tracer.rb
|
|
42
43
|
- lib/openlayer/internal.rb
|
|
43
44
|
- lib/openlayer/internal/transport/base_client.rb
|
|
44
45
|
- lib/openlayer/internal/transport/pooled_net_requester.rb
|
|
@@ -108,9 +109,11 @@ files:
|
|
|
108
109
|
- lib/openlayer/resources/tests.rb
|
|
109
110
|
- lib/openlayer/version.rb
|
|
110
111
|
- manifest.yaml
|
|
112
|
+
- rbi/google_discovery_engine.rbi
|
|
111
113
|
- rbi/openlayer/client.rbi
|
|
112
114
|
- rbi/openlayer/errors.rbi
|
|
113
115
|
- rbi/openlayer/file_part.rbi
|
|
116
|
+
- rbi/openlayer/integrations.rbi
|
|
114
117
|
- rbi/openlayer/internal.rbi
|
|
115
118
|
- rbi/openlayer/internal/transport/base_client.rbi
|
|
116
119
|
- rbi/openlayer/internal/transport/pooled_net_requester.rbi
|