exaonruby 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +134 -0
- data/README.md +84 -3
- data/exaonruby.gemspec +11 -3
- data/lib/exa/middleware/instrumentation.rb +97 -0
- data/lib/exa/middleware/rate_limiter.rb +72 -0
- data/lib/exa/middleware/request_logger.rb +170 -0
- data/lib/exa/middleware/response_cache.rb +226 -0
- data/lib/exa/rails.rb +157 -0
- data/lib/exa/types.rb +204 -0
- data/lib/exa/utils/parallel.rb +135 -0
- data/lib/exa/utils/sse_client.rb +279 -0
- data/lib/exa/version.rb +1 -1
- data/lib/exa.rb +25 -0
- data/lib/generators/exa/install_generator.rb +94 -0
- metadata +36 -4
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# typed: strict
|
|
4
|
+
|
|
5
|
+
require "digest"
|
|
6
|
+
require "json"
|
|
7
|
+
|
|
8
|
+
module Exa
|
|
9
|
+
module Middleware
|
|
10
|
+
# Response caching middleware for Exa API calls
|
|
11
|
+
#
|
|
12
|
+
# Caches GET requests and idempotent POST requests (like search)
|
|
13
|
+
# to reduce API costs and improve response times.
|
|
14
|
+
#
|
|
15
|
+
# @example Enable caching with memory store
|
|
16
|
+
# client = Exa::Client.new(api_key: key) do |config|
|
|
17
|
+
# config.cache = Exa::Cache::MemoryStore.new
|
|
18
|
+
# config.cache_ttl = 300 # 5 minutes
|
|
19
|
+
# end
|
|
20
|
+
#
|
|
21
|
+
# @example With Redis
|
|
22
|
+
# require 'redis'
|
|
23
|
+
# client = Exa::Client.new(api_key: key) do |config|
|
|
24
|
+
# config.cache = Exa::Cache::RedisStore.new(Redis.new)
|
|
25
|
+
# config.cache_ttl = 3600 # 1 hour
|
|
26
|
+
# end
|
|
27
|
+
class ResponseCache < Faraday::Middleware
|
|
28
|
+
# Cacheable endpoints (idempotent operations)
|
|
29
|
+
CACHEABLE_PATHS = %w[
|
|
30
|
+
/search
|
|
31
|
+
/contents
|
|
32
|
+
/findSimilar
|
|
33
|
+
].freeze
|
|
34
|
+
|
|
35
|
+
# @param app [Faraday::Middleware] Next middleware
|
|
36
|
+
# @param cache [Object] Cache store (must respond to get/set)
|
|
37
|
+
# @param ttl [Integer] Time to live in seconds
|
|
38
|
+
# @param cacheable_paths [Array<String>] Paths to cache
|
|
39
|
+
def initialize(app, cache:, ttl: 300, cacheable_paths: CACHEABLE_PATHS)
|
|
40
|
+
super(app)
|
|
41
|
+
@cache = cache
|
|
42
|
+
@ttl = ttl
|
|
43
|
+
@cacheable_paths = cacheable_paths
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def call(env)
|
|
47
|
+
return @app.call(env) unless cacheable?(env)
|
|
48
|
+
|
|
49
|
+
cache_key = build_cache_key(env)
|
|
50
|
+
|
|
51
|
+
# Try to get from cache
|
|
52
|
+
cached = @cache.get(cache_key)
|
|
53
|
+
if cached
|
|
54
|
+
return build_cached_response(env, cached)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# Make request and cache response
|
|
58
|
+
@app.call(env).on_complete do |response_env|
|
|
59
|
+
if response_env[:status] == 200
|
|
60
|
+
cache_response(cache_key, response_env)
|
|
61
|
+
end
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
private
|
|
66
|
+
|
|
67
|
+
def cacheable?(env)
|
|
68
|
+
path = env[:url].path
|
|
69
|
+
@cacheable_paths.any? { |p| path.include?(p) }
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
def build_cache_key(env)
|
|
73
|
+
# Include method, path, and body in cache key
|
|
74
|
+
components = [
|
|
75
|
+
env[:method].to_s,
|
|
76
|
+
env[:url].path,
|
|
77
|
+
env[:body].to_s
|
|
78
|
+
]
|
|
79
|
+
|
|
80
|
+
digest = Digest::SHA256.hexdigest(components.join("|"))
|
|
81
|
+
"exa:cache:#{digest}"
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def cache_response(key, env)
|
|
85
|
+
data = {
|
|
86
|
+
status: env[:status],
|
|
87
|
+
headers: env[:response_headers].to_h,
|
|
88
|
+
body: env[:body]
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
@cache.set(key, data.to_json, ttl: @ttl)
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
def build_cached_response(env, cached_data)
|
|
95
|
+
data = JSON.parse(cached_data, symbolize_names: true)
|
|
96
|
+
|
|
97
|
+
env[:status] = data[:status]
|
|
98
|
+
env[:response_headers] = Faraday::Utils::Headers.new(data[:headers])
|
|
99
|
+
env[:body] = data[:body]
|
|
100
|
+
|
|
101
|
+
# Add cache hit header
|
|
102
|
+
env[:response_headers]["X-Exa-Cache"] = "HIT"
|
|
103
|
+
|
|
104
|
+
Faraday::Response.new(env)
|
|
105
|
+
end
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
Faraday::Middleware.register_middleware(exa_cache: ResponseCache)
|
|
109
|
+
end
|
|
110
|
+
|
|
111
|
+
module Cache
|
|
112
|
+
# In-memory cache store with TTL support
|
|
113
|
+
#
|
|
114
|
+
# Thread-safe, suitable for single-process applications.
|
|
115
|
+
# For multi-process or distributed apps, use RedisStore.
|
|
116
|
+
class MemoryStore
|
|
117
|
+
def initialize
|
|
118
|
+
@store = {}
|
|
119
|
+
@expirations = {}
|
|
120
|
+
@mutex = Mutex.new
|
|
121
|
+
end
|
|
122
|
+
|
|
123
|
+
# Get value from cache
|
|
124
|
+
# @param key [String] Cache key
|
|
125
|
+
# @return [String, nil] Cached value or nil
|
|
126
|
+
def get(key)
|
|
127
|
+
@mutex.synchronize do
|
|
128
|
+
cleanup_expired
|
|
129
|
+
@store[key]
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
# Set value in cache
|
|
134
|
+
# @param key [String] Cache key
|
|
135
|
+
# @param value [String] Value to cache
|
|
136
|
+
# @param ttl [Integer] Time to live in seconds
|
|
137
|
+
def set(key, value, ttl: 300)
|
|
138
|
+
@mutex.synchronize do
|
|
139
|
+
@store[key] = value
|
|
140
|
+
@expirations[key] = Time.now + ttl
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
# Delete from cache
|
|
145
|
+
# @param key [String] Cache key
|
|
146
|
+
def delete(key)
|
|
147
|
+
@mutex.synchronize do
|
|
148
|
+
@store.delete(key)
|
|
149
|
+
@expirations.delete(key)
|
|
150
|
+
end
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
# Clear entire cache
|
|
154
|
+
def clear
|
|
155
|
+
@mutex.synchronize do
|
|
156
|
+
@store.clear
|
|
157
|
+
@expirations.clear
|
|
158
|
+
end
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
# Get cache statistics
|
|
162
|
+
# @return [Hash] Stats including size and hit count
|
|
163
|
+
def stats
|
|
164
|
+
@mutex.synchronize do
|
|
165
|
+
cleanup_expired
|
|
166
|
+
{ size: @store.size }
|
|
167
|
+
end
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
private
|
|
171
|
+
|
|
172
|
+
def cleanup_expired
|
|
173
|
+
now = Time.now
|
|
174
|
+
expired_keys = @expirations.select { |_, exp| exp < now }.keys
|
|
175
|
+
expired_keys.each do |key|
|
|
176
|
+
@store.delete(key)
|
|
177
|
+
@expirations.delete(key)
|
|
178
|
+
end
|
|
179
|
+
end
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
# Redis cache store for distributed caching
|
|
183
|
+
#
|
|
184
|
+
# Requires redis gem to be installed.
|
|
185
|
+
#
|
|
186
|
+
# @example
|
|
187
|
+
# require 'redis'
|
|
188
|
+
# cache = Exa::Cache::RedisStore.new(Redis.new)
|
|
189
|
+
class RedisStore
|
|
190
|
+
# @param redis [Redis] Redis client instance
|
|
191
|
+
# @param prefix [String] Key prefix
|
|
192
|
+
def initialize(redis, prefix: "exa")
|
|
193
|
+
@redis = redis
|
|
194
|
+
@prefix = prefix
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
def get(key)
|
|
198
|
+
@redis.get(prefixed(key))
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
def set(key, value, ttl: 300)
|
|
202
|
+
@redis.setex(prefixed(key), ttl, value)
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
def delete(key)
|
|
206
|
+
@redis.del(prefixed(key))
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
def clear
|
|
210
|
+
keys = @redis.keys("#{@prefix}:*")
|
|
211
|
+
@redis.del(*keys) if keys.any?
|
|
212
|
+
end
|
|
213
|
+
|
|
214
|
+
def stats
|
|
215
|
+
keys = @redis.keys("#{@prefix}:*")
|
|
216
|
+
{ size: keys.size }
|
|
217
|
+
end
|
|
218
|
+
|
|
219
|
+
private
|
|
220
|
+
|
|
221
|
+
def prefixed(key)
|
|
222
|
+
key.start_with?(@prefix) ? key : "#{@prefix}:#{key}"
|
|
223
|
+
end
|
|
224
|
+
end
|
|
225
|
+
end
|
|
226
|
+
end
|
data/lib/exa/rails.rb
ADDED
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# typed: strict
|
|
4
|
+
|
|
5
|
+
module Exa
|
|
6
|
+
# Rails integration for the Exa gem
|
|
7
|
+
#
|
|
8
|
+
# Provides generators, Active Job adapters, and Rails-specific configuration.
|
|
9
|
+
#
|
|
10
|
+
# Install with: rails generate exa:install
|
|
11
|
+
module Rails
|
|
12
|
+
class Railtie < ::Rails::Railtie
|
|
13
|
+
# Initialize Exa with Rails configuration
|
|
14
|
+
initializer "exa.configure" do |app|
|
|
15
|
+
# Load config from Rails credentials or environment
|
|
16
|
+
Exa.configure do |config|
|
|
17
|
+
config.api_key = rails_api_key(app)
|
|
18
|
+
config.logger = ::Rails.logger if defined?(::Rails.logger)
|
|
19
|
+
config.timeout = ENV.fetch("EXA_TIMEOUT", 60).to_i
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
# Add Exa rake tasks
|
|
24
|
+
rake_tasks do
|
|
25
|
+
namespace :exa do
|
|
26
|
+
desc "Verify Exa API connection"
|
|
27
|
+
task verify: :environment do
|
|
28
|
+
begin
|
|
29
|
+
client = Exa::Client.new
|
|
30
|
+
result = client.search("test", num_results: 1)
|
|
31
|
+
puts "✓ Exa API connection successful"
|
|
32
|
+
puts " Request ID: #{result.request_id}"
|
|
33
|
+
rescue Exa::Error => e
|
|
34
|
+
puts "✗ Exa API error: #{e.message}"
|
|
35
|
+
exit 1
|
|
36
|
+
end
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
desc "Show Exa configuration"
|
|
40
|
+
task config: :environment do
|
|
41
|
+
puts "Exa Configuration:"
|
|
42
|
+
puts " API Key: #{Exa.configuration&.api_key&.slice(0, 8)}..."
|
|
43
|
+
puts " Base URL: #{Exa.configuration&.base_url}"
|
|
44
|
+
puts " Timeout: #{Exa.configuration&.timeout}s"
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
|
|
49
|
+
private
|
|
50
|
+
|
|
51
|
+
def rails_api_key(app)
|
|
52
|
+
# Try Rails credentials first
|
|
53
|
+
if app.credentials.respond_to?(:exa)
|
|
54
|
+
app.credentials.exa[:api_key]
|
|
55
|
+
elsif app.credentials.respond_to?(:dig)
|
|
56
|
+
app.credentials.dig(:exa, :api_key)
|
|
57
|
+
end || ENV["EXA_API_KEY"]
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
# Active Job adapter for async research tasks
|
|
62
|
+
#
|
|
63
|
+
# @example Create a research job
|
|
64
|
+
# class ExaResearchJob < ApplicationJob
|
|
65
|
+
# include Exa::Rails::ResearchJob
|
|
66
|
+
#
|
|
67
|
+
# def perform(instructions)
|
|
68
|
+
# research(instructions) do |task|
|
|
69
|
+
# # Called when research completes
|
|
70
|
+
# save_results(task.output)
|
|
71
|
+
# end
|
|
72
|
+
# end
|
|
73
|
+
# end
|
|
74
|
+
module ResearchJob
|
|
75
|
+
extend ActiveSupport::Concern
|
|
76
|
+
|
|
77
|
+
included do
|
|
78
|
+
queue_as :exa_research
|
|
79
|
+
retry_on Exa::RateLimitError, wait: :exponentially_longer, attempts: 5
|
|
80
|
+
discard_on Exa::AuthenticationError
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
# Start a research task and poll until complete
|
|
84
|
+
#
|
|
85
|
+
# @param instructions [String] Research instructions
|
|
86
|
+
# @param model [String] Research model
|
|
87
|
+
# @param poll_interval [Integer] Seconds between status checks
|
|
88
|
+
#
|
|
89
|
+
# @yield [Exa::Resources::ResearchTask] Called when complete
|
|
90
|
+
def research(instructions, model: "exa-research", poll_interval: 5)
|
|
91
|
+
client = Exa::Client.new
|
|
92
|
+
task = client.create_research(instructions: instructions, model: model)
|
|
93
|
+
|
|
94
|
+
loop do
|
|
95
|
+
task = client.get_research(task.research_id)
|
|
96
|
+
|
|
97
|
+
case task.status
|
|
98
|
+
when "completed"
|
|
99
|
+
yield task if block_given?
|
|
100
|
+
return task
|
|
101
|
+
when "failed", "canceled"
|
|
102
|
+
raise Exa::Error, "Research task #{task.status}: #{task.error_message}"
|
|
103
|
+
else
|
|
104
|
+
sleep poll_interval
|
|
105
|
+
end
|
|
106
|
+
end
|
|
107
|
+
end
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
# Action Cable channel for real-time updates
|
|
111
|
+
#
|
|
112
|
+
# @example Create a channel
|
|
113
|
+
# class ExaSearchChannel < ApplicationCable::Channel
|
|
114
|
+
# include Exa::Rails::StreamingChannel
|
|
115
|
+
#
|
|
116
|
+
# def search(data)
|
|
117
|
+
# stream_answer(data["query"])
|
|
118
|
+
# end
|
|
119
|
+
# end
|
|
120
|
+
module StreamingChannel
|
|
121
|
+
extend ActiveSupport::Concern
|
|
122
|
+
|
|
123
|
+
# Stream answer tokens to the client
|
|
124
|
+
#
|
|
125
|
+
# @param query [String] Question to answer
|
|
126
|
+
def stream_answer(query)
|
|
127
|
+
Exa::Utils::SSEClient.stream_answer(
|
|
128
|
+
api_key: ENV["EXA_API_KEY"],
|
|
129
|
+
query: query
|
|
130
|
+
) do |event|
|
|
131
|
+
case event[:type]
|
|
132
|
+
when :token
|
|
133
|
+
transmit({ type: "token", data: event[:data] })
|
|
134
|
+
when :citation
|
|
135
|
+
transmit({ type: "citation", data: event[:data] })
|
|
136
|
+
when :done
|
|
137
|
+
transmit({ type: "done" })
|
|
138
|
+
when :error
|
|
139
|
+
transmit({ type: "error", data: event[:data] })
|
|
140
|
+
end
|
|
141
|
+
end
|
|
142
|
+
end
|
|
143
|
+
|
|
144
|
+
# Stream research progress to the client
|
|
145
|
+
#
|
|
146
|
+
# @param instructions [String] Research instructions
|
|
147
|
+
def stream_research(instructions)
|
|
148
|
+
Exa::Utils::SSEClient.stream_research(
|
|
149
|
+
api_key: ENV["EXA_API_KEY"],
|
|
150
|
+
instructions: instructions
|
|
151
|
+
) do |event|
|
|
152
|
+
transmit({ type: event[:type].to_s, data: event[:data] })
|
|
153
|
+
end
|
|
154
|
+
end
|
|
155
|
+
end
|
|
156
|
+
end
|
|
157
|
+
end
|
data/lib/exa/types.rb
ADDED
|
@@ -0,0 +1,204 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
# typed: strict
|
|
4
|
+
|
|
5
|
+
require "sorbet-runtime"
|
|
6
|
+
|
|
7
|
+
module Exa
|
|
8
|
+
module Types
|
|
9
|
+
extend T::Sig
|
|
10
|
+
|
|
11
|
+
# Typed request structures for Search API
|
|
12
|
+
class SearchParams < T::Struct
|
|
13
|
+
const :query, String
|
|
14
|
+
const :type, T.nilable(String)
|
|
15
|
+
const :num_results, T.nilable(Integer)
|
|
16
|
+
const :include_domains, T.nilable(T::Array[String])
|
|
17
|
+
const :exclude_domains, T.nilable(T::Array[String])
|
|
18
|
+
const :start_crawl_date, T.nilable(String)
|
|
19
|
+
const :end_crawl_date, T.nilable(String)
|
|
20
|
+
const :start_published_date, T.nilable(String)
|
|
21
|
+
const :end_published_date, T.nilable(String)
|
|
22
|
+
const :include_text, T.nilable(T::Array[String])
|
|
23
|
+
const :exclude_text, T.nilable(T::Array[String])
|
|
24
|
+
const :category, T.nilable(String)
|
|
25
|
+
const :country, T.nilable(String)
|
|
26
|
+
const :text, T.nilable(T::Boolean)
|
|
27
|
+
const :highlights, T.nilable(T::Boolean)
|
|
28
|
+
const :summary, T.nilable(T::Boolean)
|
|
29
|
+
const :livecrawl, T.nilable(String)
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
class ContentsParams < T::Struct
|
|
33
|
+
const :ids, T::Array[String]
|
|
34
|
+
const :text, T.nilable(T::Boolean)
|
|
35
|
+
const :highlights, T.nilable(T::Boolean)
|
|
36
|
+
const :summary, T.nilable(T::Boolean)
|
|
37
|
+
const :livecrawl, T.nilable(String)
|
|
38
|
+
const :subpages, T.nilable(Integer)
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
class AnswerParams < T::Struct
|
|
42
|
+
const :query, String
|
|
43
|
+
const :text, T.nilable(T::Boolean)
|
|
44
|
+
const :stream, T.nilable(T::Boolean)
|
|
45
|
+
const :num_results, T.nilable(Integer)
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
class ResearchParams < T::Struct
|
|
49
|
+
const :instructions, String
|
|
50
|
+
const :model, T.nilable(String)
|
|
51
|
+
const :output_schema, T.nilable(T::Hash[Symbol, T.untyped])
|
|
52
|
+
end
|
|
53
|
+
|
|
54
|
+
# Typed response structures
|
|
55
|
+
class SearchResultData < T::Struct
|
|
56
|
+
const :id, String
|
|
57
|
+
const :url, String
|
|
58
|
+
const :title, T.nilable(String)
|
|
59
|
+
const :score, T.nilable(Float)
|
|
60
|
+
const :published_date, T.nilable(String)
|
|
61
|
+
const :author, T.nilable(String)
|
|
62
|
+
const :text, T.nilable(String)
|
|
63
|
+
const :highlights, T.nilable(T::Array[String])
|
|
64
|
+
const :summary, T.nilable(String)
|
|
65
|
+
const :image, T.nilable(String)
|
|
66
|
+
const :favicon, T.nilable(String)
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
class CostBreakdown < T::Struct
|
|
70
|
+
const :neural_search, T.nilable(Float)
|
|
71
|
+
const :deep_search, T.nilable(Float)
|
|
72
|
+
const :content_text, T.nilable(Float)
|
|
73
|
+
const :content_highlight, T.nilable(Float)
|
|
74
|
+
const :content_summary, T.nilable(Float)
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
class CostData < T::Struct
|
|
78
|
+
const :total, Float
|
|
79
|
+
const :search, T.nilable(Float)
|
|
80
|
+
const :contents, T.nilable(Float)
|
|
81
|
+
const :breakdown, T.nilable(CostBreakdown)
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
class SearchResponseData < T::Struct
|
|
85
|
+
const :request_id, T.nilable(String)
|
|
86
|
+
const :results, T::Array[SearchResultData]
|
|
87
|
+
const :cost_dollars, T.nilable(CostData)
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
class AnswerResponseData < T::Struct
|
|
91
|
+
const :answer, String
|
|
92
|
+
const :citations, T::Array[SearchResultData]
|
|
93
|
+
const :cost_dollars, T.nilable(CostData)
|
|
94
|
+
end
|
|
95
|
+
|
|
96
|
+
class ResearchTaskData < T::Struct
|
|
97
|
+
const :research_id, String
|
|
98
|
+
const :model, String
|
|
99
|
+
const :instructions, String
|
|
100
|
+
const :status, String
|
|
101
|
+
const :created_at, T.nilable(Integer)
|
|
102
|
+
const :completed_at, T.nilable(Integer)
|
|
103
|
+
const :output, T.nilable(T.any(String, T::Hash[Symbol, T.untyped]))
|
|
104
|
+
const :error, T.nilable(String)
|
|
105
|
+
end
|
|
106
|
+
|
|
107
|
+
# Websets types
|
|
108
|
+
class WebsetData < T::Struct
|
|
109
|
+
const :id, String
|
|
110
|
+
const :object, String
|
|
111
|
+
const :status, String
|
|
112
|
+
const :external_id, T.nilable(String)
|
|
113
|
+
const :title, T.nilable(String)
|
|
114
|
+
const :metadata, T.nilable(T::Hash[Symbol, T.untyped])
|
|
115
|
+
const :created_at, T.nilable(String)
|
|
116
|
+
const :updated_at, T.nilable(String)
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
class WebsetItemData < T::Struct
|
|
120
|
+
const :id, String
|
|
121
|
+
const :object, String
|
|
122
|
+
const :webset_id, String
|
|
123
|
+
const :url, String
|
|
124
|
+
const :type, T.nilable(String)
|
|
125
|
+
const :status, T.nilable(String)
|
|
126
|
+
const :created_at, T.nilable(String)
|
|
127
|
+
end
|
|
128
|
+
|
|
129
|
+
class MonitorData < T::Struct
|
|
130
|
+
const :id, String
|
|
131
|
+
const :object, String
|
|
132
|
+
const :status, String
|
|
133
|
+
const :webset_id, String
|
|
134
|
+
const :cron, T.nilable(String)
|
|
135
|
+
const :timezone, T.nilable(String)
|
|
136
|
+
const :next_run_at, T.nilable(String)
|
|
137
|
+
const :created_at, T.nilable(String)
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
class ImportData < T::Struct
|
|
141
|
+
const :id, String
|
|
142
|
+
const :object, String
|
|
143
|
+
const :status, String
|
|
144
|
+
const :format, String
|
|
145
|
+
const :title, T.nilable(String)
|
|
146
|
+
const :count, T.nilable(Integer)
|
|
147
|
+
const :upload_url, T.nilable(String)
|
|
148
|
+
const :upload_valid_until, T.nilable(String)
|
|
149
|
+
const :created_at, T.nilable(String)
|
|
150
|
+
end
|
|
151
|
+
|
|
152
|
+
class WebhookData < T::Struct
|
|
153
|
+
const :id, String
|
|
154
|
+
const :object, String
|
|
155
|
+
const :status, String
|
|
156
|
+
const :url, String
|
|
157
|
+
const :events, T::Array[String]
|
|
158
|
+
const :secret, T.nilable(String)
|
|
159
|
+
const :created_at, T.nilable(String)
|
|
160
|
+
end
|
|
161
|
+
|
|
162
|
+
class EventData < T::Struct
|
|
163
|
+
const :id, String
|
|
164
|
+
const :object, String
|
|
165
|
+
const :type, String
|
|
166
|
+
const :data, T::Hash[Symbol, T.untyped]
|
|
167
|
+
const :created_at, T.nilable(String)
|
|
168
|
+
end
|
|
169
|
+
|
|
170
|
+
# Pagination types
|
|
171
|
+
class PaginatedResponseData < T::Struct
|
|
172
|
+
const :data, T::Array[T.untyped]
|
|
173
|
+
const :has_more, T.nilable(T::Boolean)
|
|
174
|
+
const :next_cursor, T.nilable(String)
|
|
175
|
+
end
|
|
176
|
+
|
|
177
|
+
# Type aliases for common patterns
|
|
178
|
+
SearchType = T.type_alias { T.any(String, Symbol) }
|
|
179
|
+
LivecrawlOption = T.type_alias { T.any(String, Symbol) }
|
|
180
|
+
DateInput = T.type_alias { T.any(String, Time, Date) }
|
|
181
|
+
|
|
182
|
+
# Valid enum values
|
|
183
|
+
SEARCH_TYPES = T.let(%w[auto neural fast deep].freeze, T::Array[String])
|
|
184
|
+
LIVECRAWL_OPTIONS = T.let(%w[never fallback preferred always].freeze, T::Array[String])
|
|
185
|
+
CATEGORIES = T.let(%w[
|
|
186
|
+
company person research_paper news pdf github tweet
|
|
187
|
+
personal_site financial_report
|
|
188
|
+
].freeze, T::Array[String])
|
|
189
|
+
RESEARCH_MODELS = T.let(%w[exa-research-fast exa-research exa-research-pro].freeze, T::Array[String])
|
|
190
|
+
WEBSET_STATUSES = T.let(%w[idle pending running paused].freeze, T::Array[String])
|
|
191
|
+
IMPORT_STATUSES = T.let(%w[pending processing completed failed].freeze, T::Array[String])
|
|
192
|
+
MONITOR_STATUSES = T.let(%w[enabled disabled].freeze, T::Array[String])
|
|
193
|
+
|
|
194
|
+
WEBHOOK_EVENTS = T.let(%w[
|
|
195
|
+
webset.created webset.deleted webset.paused webset.idle
|
|
196
|
+
webset.search.created webset.search.canceled webset.search.completed webset.search.updated
|
|
197
|
+
import.created import.completed
|
|
198
|
+
webset.item.created webset.item.enriched
|
|
199
|
+
monitor.created monitor.updated monitor.deleted
|
|
200
|
+
monitor.run.created monitor.run.completed
|
|
201
|
+
webset.export.created webset.export.completed
|
|
202
|
+
].freeze, T::Array[String])
|
|
203
|
+
end
|
|
204
|
+
end
|