attio 0.2.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/attio/client.rb CHANGED
@@ -152,5 +152,55 @@ module Attio
152
152
  def threads
153
153
  @threads ||= Resources::Threads.new(self)
154
154
  end
155
+
156
+ # Access to the Workspace Members API resource.
157
+ #
158
+ # @return [Resources::WorkspaceMembers] Workspace Members resource instance
159
+ # @example
160
+ # members = client.workspace_members.list
161
+ def workspace_members
162
+ @workspace_members ||= Resources::WorkspaceMembers.new(self)
163
+ end
164
+
165
+ # Access to the Deals API resource.
166
+ #
167
+ # @return [Resources::Deals] Deals resource instance
168
+ # @example
169
+ # deals = client.deals.list
170
+ def deals
171
+ @deals ||= Resources::Deals.new(self)
172
+ end
173
+
174
+ # Access to the Meta API resource.
175
+ #
176
+ # @return [Resources::Meta] Meta resource instance
177
+ # @example
178
+ # info = client.meta.identify
179
+ def meta
180
+ @meta ||= Resources::Meta.new(self)
181
+ end
182
+
183
+ # Access to the Bulk Operations API resource.
184
+ #
185
+ # @return [Resources::Bulk] Bulk operations resource instance
186
+ # @example
187
+ # client.bulk.create_records(object: 'companies', records: [...])
188
+ def bulk
189
+ @bulk ||= Resources::Bulk.new(self)
190
+ end
191
+
192
+ # Get or set the rate limiter for this client.
193
+ #
194
+ # @param limiter [RateLimiter] Optional rate limiter to set
195
+ # @return [RateLimiter] The rate limiter instance
196
+ # @example
197
+ # client.rate_limiter = Attio::RateLimiter.new(max_requests: 100)
198
+ def rate_limiter(limiter = nil)
199
+ if limiter
200
+ @rate_limiter = limiter
201
+ else
202
+ @rate_limiter ||= RateLimiter.new
203
+ end
204
+ end
155
205
  end
156
206
  end
data/lib/attio/errors.rb CHANGED
@@ -1,11 +1,39 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Attio
4
- class Error < StandardError; end
4
+ # Base error class for all Attio errors
5
+ class Error < StandardError
6
+ attr_reader :response, :code
5
7
 
8
+ def initialize(message = nil, response: nil, code: nil)
9
+ @response = response
10
+ @code = code
11
+ super(message)
12
+ end
13
+ end
14
+
15
+ # Raised when authentication fails (401)
6
16
  class AuthenticationError < Error; end
17
+
18
+ # Raised when a resource is not found (404)
7
19
  class NotFoundError < Error; end
20
+
21
+ # Raised when validation fails (400/422)
8
22
  class ValidationError < Error; end
9
- class RateLimitError < Error; end
23
+
24
+ # Raised when rate limit is exceeded (429)
25
+ class RateLimitError < Error
26
+ attr_reader :retry_after
27
+
28
+ def initialize(message = nil, retry_after: nil, **options)
29
+ @retry_after = retry_after
30
+ super(message, **options)
31
+ end
32
+ end
33
+
34
+ # Raised when server error occurs (5xx)
10
35
  class ServerError < Error; end
36
+
37
+ # Raised for generic API errors
38
+ class APIError < Error; end
11
39
  end
@@ -57,6 +57,10 @@ module Attio
57
57
  headers: headers.merge("Content-Type" => "application/json"),
58
58
  timeout: timeout,
59
59
  connecttimeout: timeout,
60
+ # SSL/TLS security settings
61
+ ssl_verifypeer: true,
62
+ ssl_verifyhost: 2,
63
+ followlocation: false, # Prevent following redirects for security
60
64
  }.merge(options)
61
65
 
62
66
  request = Typhoeus::Request.new(url, request_options)
@@ -0,0 +1,212 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Attio
4
+ # Rate limiter with intelligent retry and backoff strategies
5
+ #
6
+ # @example Using the rate limiter
7
+ # limiter = Attio::RateLimiter.new(
8
+ # max_requests: 100,
9
+ # window_seconds: 60,
10
+ # max_retries: 3
11
+ # )
12
+ #
13
+ # limiter.execute { client.records.list }
14
+ class RateLimiter
15
+ attr_reader :max_requests, :window_seconds, :max_retries
16
+ attr_accessor :current_limit, :remaining, :reset_at
17
+
18
+ # Initialize a new rate limiter
19
+ #
20
+ # @param max_requests [Integer] Maximum requests per window
21
+ # @param window_seconds [Integer] Time window in seconds
22
+ # @param max_retries [Integer] Maximum retry attempts
23
+ # @param enable_jitter [Boolean] Add jitter to backoff delays
24
+ def initialize(max_requests: 1000, window_seconds: 3600, max_retries: 3, enable_jitter: true)
25
+ @max_requests = max_requests
26
+ @window_seconds = window_seconds
27
+ @max_retries = max_retries
28
+ @enable_jitter = enable_jitter
29
+
30
+ @current_limit = max_requests
31
+ @remaining = max_requests
32
+ @reset_at = Time.now + window_seconds
33
+
34
+ @mutex = Mutex.new
35
+ @request_queue = []
36
+ @request_times = []
37
+ end
38
+
39
+ # Execute a block with rate limiting
40
+ #
41
+ # @yield The block to execute
42
+ # @return The result of the block
43
+ def execute
44
+ raise ArgumentError, "Block required" unless block_given?
45
+
46
+ @mutex.synchronize do
47
+ wait_if_needed
48
+ track_request
49
+ end
50
+
51
+ attempt = 0
52
+ begin
53
+ result = yield
54
+ # Thread-safe header update
55
+ @mutex.synchronize do
56
+ update_from_headers(result) if result.is_a?(Hash) && result["_headers"]
57
+ end
58
+ result
59
+ rescue Attio::RateLimitError => e
60
+ attempt += 1
61
+ raise e unless attempt <= @max_retries
62
+
63
+ wait_time = calculate_backoff(attempt, e)
64
+ sleep(wait_time)
65
+ retry
66
+ end
67
+ end
68
+
69
+ # Check if rate limit is exceeded
70
+ #
71
+ # @return [Boolean] True if rate limit would be exceeded
72
+ def rate_limited?
73
+ @mutex.synchronize do
74
+ cleanup_old_requests
75
+ @request_times.size >= @max_requests
76
+ end
77
+ end
78
+
79
+ # Get current rate limit status
80
+ #
81
+ # @return [Hash] Current status
82
+ def status
83
+ @mutex.synchronize do
84
+ cleanup_old_requests
85
+ {
86
+ limit: @current_limit,
87
+ remaining: [@remaining, @max_requests - @request_times.size].min,
88
+ reset_at: @reset_at,
89
+ reset_in: [@reset_at - Time.now, 0].max.to_i,
90
+ current_usage: @request_times.size,
91
+ }
92
+ end
93
+ end
94
+
95
+ # Update rate limit info from response headers
96
+ # NOTE: This method should be called within a mutex lock
97
+ #
98
+ # @param response [Hash] Response containing headers
99
+ private def update_from_headers(response)
100
+ return unless response.is_a?(Hash)
101
+
102
+ headers = response["_headers"] || {}
103
+
104
+ @current_limit = headers["x-ratelimit-limit"].to_i if headers["x-ratelimit-limit"]
105
+ @remaining = headers["x-ratelimit-remaining"].to_i if headers["x-ratelimit-remaining"]
106
+ @reset_at = Time.at(headers["x-ratelimit-reset"].to_i) if headers["x-ratelimit-reset"]
107
+ end
108
+
109
+ # Reset the rate limiter
110
+ def reset!
111
+ @mutex.synchronize do
112
+ @request_times.clear
113
+ @remaining = @max_requests
114
+ @reset_at = Time.now + @window_seconds
115
+ end
116
+ end
117
+
118
+ # Queue a request for later execution
119
+ #
120
+ # @param priority [Integer] Priority (lower = higher priority)
121
+ # @yield Block to execute
122
+ def queue_request(priority: 5, &block)
123
+ @mutex.synchronize do
124
+ @request_queue << { priority: priority, block: block, queued_at: Time.now }
125
+ @request_queue.sort_by! { |r| [r[:priority], r[:queued_at]] }
126
+ end
127
+ end
128
+
129
+ # Process queued requests
130
+ #
131
+ # @param max_per_batch [Integer] Maximum requests to process
132
+ # @return [Array] Results from processed requests
133
+ def process_queue(max_per_batch: 10)
134
+ results = []
135
+ processed = 0
136
+
137
+ while processed < max_per_batch
138
+ request = @mutex.synchronize { @request_queue.shift }
139
+ break unless request
140
+
141
+ begin
142
+ result = execute(&request[:block])
143
+ results << { success: true, result: result }
144
+ rescue StandardError => e
145
+ results << { success: false, error: e }
146
+ end
147
+
148
+ processed += 1
149
+ end
150
+
151
+ results
152
+ end
153
+
154
+ private def wait_if_needed
155
+ cleanup_old_requests
156
+
157
+ if @request_times.size >= @max_requests
158
+ wait_time = @request_times.first + @window_seconds - Time.now
159
+ if wait_time > 0
160
+ sleep(wait_time)
161
+ cleanup_old_requests
162
+ end
163
+ end
164
+
165
+ return unless @remaining <= 0 && @reset_at > Time.now
166
+
167
+ wait_time = @reset_at - Time.now
168
+ sleep(wait_time) if wait_time > 0
169
+ end
170
+
171
+ private def track_request
172
+ @request_times << Time.now
173
+ @remaining = [@remaining - 1, 0].max
174
+ end
175
+
176
+ private def cleanup_old_requests
177
+ cutoff = Time.now - @window_seconds
178
+ @request_times.reject! { |time| time < cutoff }
179
+ end
180
+
181
+ private def calculate_backoff(attempt, error = nil)
182
+ base_wait = 2**attempt
183
+
184
+ # Use server-provided retry-after if available
185
+ base_wait = error.retry_after if error && error.respond_to?(:retry_after) && error.retry_after
186
+
187
+ # Add jitter to prevent thundering herd
188
+ if @enable_jitter
189
+ jitter = rand * base_wait * 0.1
190
+ base_wait + jitter
191
+ else
192
+ base_wait
193
+ end
194
+ end
195
+ end
196
+
197
+ # Middleware for automatic rate limiting
198
+ class RateLimitMiddleware
199
+ def initialize(app, rate_limiter)
200
+ @app = app
201
+ @rate_limiter = rate_limiter
202
+ end
203
+
204
+ def call(env)
205
+ @rate_limiter.execute do
206
+ response = @app.call(env)
207
+ # Headers are automatically updated within execute block
208
+ response
209
+ end
210
+ end
211
+ end
212
+ end
@@ -54,6 +54,16 @@ module Attio
54
54
  raise ArgumentError, "#{field_name} is required"
55
55
  end
56
56
 
57
+ # Validates that a hash parameter is present
58
+ # @param value [Hash] The hash to validate
59
+ # @param field_name [String] The field name for the error message
60
+ # @raise [ArgumentError] if value is nil or not a hash
61
+ private def validate_required_hash!(value, field_name)
62
+ return if value.is_a?(Hash) && !value.nil?
63
+
64
+ raise ArgumentError, "#{field_name} must be a hash"
65
+ end
66
+
57
67
  # Validates parent object and record ID together
58
68
  # @param parent_object [String] The parent object type
59
69
  # @param parent_record_id [String] The parent record ID
@@ -63,14 +73,15 @@ module Attio
63
73
  validate_required_string!(parent_record_id, "Parent record ID")
64
74
  end
65
75
 
66
- private def request(method, path, params = {})
76
+ private def request(method, path, params = {}, _headers = {})
77
+ # Path is already safely constructed by the resource methods
67
78
  connection = client.connection
68
79
 
69
80
  case method
70
81
  when :get
71
82
  handle_get_request(connection, path, params)
72
83
  when :post
73
- connection.post(path, params)
84
+ handle_post_request(connection, path, params)
74
85
  when :patch
75
86
  connection.patch(path, params)
76
87
  when :put
@@ -86,6 +97,10 @@ module Attio
86
97
  params.empty? ? connection.get(path) : connection.get(path, params)
87
98
  end
88
99
 
100
+ private def handle_post_request(connection, path, params)
101
+ params.empty? ? connection.post(path) : connection.post(path, params)
102
+ end
103
+
89
104
  private def handle_delete_request(connection, path, params)
90
105
  params.empty? ? connection.delete(path) : connection.delete(path, params)
91
106
  end
@@ -0,0 +1,290 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Attio
4
+ module Resources
5
+ # Bulk operations for efficient batch processing
6
+ #
7
+ # @example Bulk create records
8
+ # client.bulk.create_records(
9
+ # object: "companies",
10
+ # records: [
11
+ # { name: "Acme Corp", domain: "acme.com" },
12
+ # { name: "Tech Co", domain: "techco.com" }
13
+ # ]
14
+ # )
15
+ #
16
+ # @example Bulk update records
17
+ # client.bulk.update_records(
18
+ # object: "people",
19
+ # updates: [
20
+ # { id: "person_123", data: { title: "CEO" } },
21
+ # { id: "person_456", data: { title: "CTO" } }
22
+ # ]
23
+ # )
24
+ class Bulk < Base
25
+ # Maximum number of records per bulk operation
26
+ MAX_BATCH_SIZE = 100
27
+
28
+ # Bulk create multiple records
29
+ #
30
+ # @param object [String] The object type (companies, people, etc.)
31
+ # @param records [Array<Hash>] Array of record data to create
32
+ # @param options [Hash] Additional options
33
+ # @option options [Boolean] :partial_success Allow partial success (default: false)
34
+ # @option options [Boolean] :return_records Return created records (default: true)
35
+ # @return [Hash] Results including created records and any errors
36
+ def create_records(object:, records:, options: {})
37
+ validate_required_string!(object, "Object type")
38
+ validate_bulk_records!(records, "create")
39
+
40
+ batches = records.each_slice(MAX_BATCH_SIZE).to_a
41
+ results = []
42
+
43
+ batches.each_with_index do |batch, index|
44
+ body = {
45
+ records: batch.map { |record| { data: record } },
46
+ partial_success: options.fetch(:partial_success, false),
47
+ return_records: options.fetch(:return_records, true),
48
+ }
49
+
50
+ result = request(:post, "objects/#{object}/records/bulk", body)
51
+ results << result.merge("batch" => index + 1)
52
+ end
53
+
54
+ merge_batch_results(results)
55
+ end
56
+
57
+ # Bulk update multiple records
58
+ #
59
+ # @param object [String] The object type
60
+ # @param updates [Array<Hash>] Array of updates with :id and :data keys
61
+ # @param options [Hash] Additional options
62
+ # @option options [Boolean] :partial_success Allow partial success (default: false)
63
+ # @option options [Boolean] :return_records Return updated records (default: true)
64
+ # @return [Hash] Results including updated records and any errors
65
+ def update_records(object:, updates:, options: {})
66
+ validate_required_string!(object, "Object type")
67
+ validate_bulk_updates!(updates)
68
+
69
+ batches = updates.each_slice(MAX_BATCH_SIZE).to_a
70
+ results = []
71
+
72
+ batches.each_with_index do |batch, index|
73
+ body = {
74
+ updates: batch,
75
+ partial_success: options.fetch(:partial_success, false),
76
+ return_records: options.fetch(:return_records, true),
77
+ }
78
+
79
+ result = request(:patch, "objects/#{object}/records/bulk", body)
80
+ results << result.merge("batch" => index + 1)
81
+ end
82
+
83
+ merge_batch_results(results)
84
+ end
85
+
86
+ # Bulk delete multiple records
87
+ #
88
+ # @param object [String] The object type
89
+ # @param ids [Array<String>] Array of record IDs to delete
90
+ # @param options [Hash] Additional options
91
+ # @option options [Boolean] :partial_success Allow partial success (default: false)
92
+ # @return [Hash] Results including deletion confirmations and any errors
93
+ def delete_records(object:, ids:, options: {})
94
+ validate_required_string!(object, "Object type")
95
+ validate_bulk_ids!(ids)
96
+
97
+ batches = ids.each_slice(MAX_BATCH_SIZE).to_a
98
+ results = []
99
+
100
+ batches.each_with_index do |batch, index|
101
+ body = {
102
+ ids: batch,
103
+ partial_success: options.fetch(:partial_success, false),
104
+ }
105
+
106
+ result = request(:delete, "objects/#{object}/records/bulk", body)
107
+ results << result.merge("batch" => index + 1)
108
+ end
109
+
110
+ merge_batch_results(results)
111
+ end
112
+
113
+ # Bulk upsert records (create or update based on matching criteria)
114
+ #
115
+ # @param object [String] The object type
116
+ # @param records [Array<Hash>] Records to upsert
117
+ # @param match_attribute [String] Attribute to match on (e.g., "email", "domain")
118
+ # @param options [Hash] Additional options
119
+ # @return [Hash] Results including created/updated records
120
+ def upsert_records(object:, records:, match_attribute:, options: {})
121
+ validate_required_string!(object, "Object type")
122
+ validate_required_string!(match_attribute, "Match attribute")
123
+ validate_bulk_records!(records, "upsert")
124
+
125
+ batches = records.each_slice(MAX_BATCH_SIZE).to_a
126
+ results = []
127
+
128
+ batches.each_with_index do |batch, index|
129
+ body = {
130
+ records: batch.map { |record| { data: record } },
131
+ match_attribute: match_attribute,
132
+ partial_success: options.fetch(:partial_success, false),
133
+ return_records: options.fetch(:return_records, true),
134
+ }
135
+
136
+ result = request(:put, "objects/#{object}/records/bulk", body)
137
+ results << result.merge("batch" => index + 1)
138
+ end
139
+
140
+ merge_batch_results(results)
141
+ end
142
+
143
+ # Bulk add entries to a list
144
+ #
145
+ # @param list_id [String] The list ID
146
+ # @param entries [Array<Hash>] Array of entries to add
147
+ # @param options [Hash] Additional options
148
+ # @return [Hash] Results including added entries
149
+ def add_list_entries(list_id:, entries:, options: {})
150
+ validate_id!(list_id, "List")
151
+ validate_bulk_records!(entries, "add to list")
152
+
153
+ batches = entries.each_slice(MAX_BATCH_SIZE).to_a
154
+ results = []
155
+
156
+ batches.each_with_index do |batch, index|
157
+ body = {
158
+ entries: batch,
159
+ partial_success: options.fetch(:partial_success, false),
160
+ }
161
+
162
+ result = request(:post, "lists/#{list_id}/entries/bulk", body)
163
+ results << result.merge("batch" => index + 1)
164
+ end
165
+
166
+ merge_batch_results(results)
167
+ end
168
+
169
+ # Bulk remove entries from a list
170
+ #
171
+ # @param list_id [String] The list ID
172
+ # @param entry_ids [Array<String>] Array of entry IDs to remove
173
+ # @param options [Hash] Additional options
174
+ # @return [Hash] Results including removal confirmations
175
+ def remove_list_entries(list_id:, entry_ids:, options: {})
176
+ validate_id!(list_id, "List")
177
+ validate_bulk_ids!(entry_ids)
178
+
179
+ batches = entry_ids.each_slice(MAX_BATCH_SIZE).to_a
180
+ results = []
181
+
182
+ batches.each_with_index do |batch, index|
183
+ body = {
184
+ entry_ids: batch,
185
+ partial_success: options.fetch(:partial_success, false),
186
+ }
187
+
188
+ result = request(:delete, "lists/#{list_id}/entries/bulk", body)
189
+ results << result.merge("batch" => index + 1)
190
+ end
191
+
192
+ merge_batch_results(results)
193
+ end
194
+
195
+ private def validate_bulk_records!(records, operation)
196
+ raise ArgumentError, "Records array is required for bulk #{operation}" if records.nil?
197
+ raise ArgumentError, "Records must be an array for bulk #{operation}" unless records.is_a?(Array)
198
+ raise ArgumentError, "Records array cannot be empty for bulk #{operation}" if records.empty?
199
+ raise ArgumentError, "Too many records (max #{MAX_BATCH_SIZE * 10})" if records.size > MAX_BATCH_SIZE * 10
200
+
201
+ records.each_with_index do |record, index|
202
+ raise ArgumentError, "Record at index #{index} must be a hash" unless record.is_a?(Hash)
203
+ end
204
+ end
205
+
206
+ private def validate_bulk_updates!(updates)
207
+ validate_array!(updates, "Updates", "bulk update")
208
+ validate_max_size!(updates, "updates")
209
+
210
+ updates.each_with_index do |update, index|
211
+ validate_update_item!(update, index)
212
+ end
213
+ end
214
+
215
+ private def validate_update_item!(update, index)
216
+ raise ArgumentError, "Update at index #{index} must be a hash" unless update.is_a?(Hash)
217
+ raise ArgumentError, "Update at index #{index} must have an :id" unless update[:id]
218
+ raise ArgumentError, "Update at index #{index} must have :data" unless update[:data]
219
+ end
220
+
221
+ private def validate_bulk_ids!(ids)
222
+ validate_array!(ids, "IDs", "bulk operation")
223
+ validate_max_size!(ids, "IDs")
224
+
225
+ ids.each_with_index do |id, index|
226
+ validate_id_item!(id, index)
227
+ end
228
+ end
229
+
230
+ private def validate_id_item!(id, index)
231
+ return unless id.nil? || id.to_s.strip.empty?
232
+
233
+ raise ArgumentError, "ID at index #{index} cannot be nil or empty"
234
+ end
235
+
236
+ private def validate_array!(array, name, operation)
237
+ raise ArgumentError, "#{name} array is required for #{operation}" if array.nil?
238
+ raise ArgumentError, "#{name} must be an array for #{operation}" unless array.is_a?(Array)
239
+ raise ArgumentError, "#{name} array cannot be empty for #{operation}" if array.empty?
240
+ end
241
+
242
+ private def validate_max_size!(array, name)
243
+ max = MAX_BATCH_SIZE * 10
244
+ return unless array.size > max
245
+
246
+ raise ArgumentError, "Too many #{name} (max #{max})"
247
+ end
248
+
249
+ private def merge_batch_results(results)
250
+ merged = initialize_merged_result(results.size)
251
+
252
+ results.each do |result|
253
+ merge_single_result!(merged, result)
254
+ end
255
+
256
+ merged
257
+ end
258
+
259
+ private def initialize_merged_result(batch_count)
260
+ {
261
+ "success" => true,
262
+ "total_batches" => batch_count,
263
+ "records" => [],
264
+ "errors" => [],
265
+ "statistics" => {
266
+ "created" => 0,
267
+ "updated" => 0,
268
+ "deleted" => 0,
269
+ "failed" => 0,
270
+ },
271
+ }
272
+ end
273
+
274
+ private def merge_single_result!(merged, result)
275
+ merged["records"].concat(result["records"] || [])
276
+ merged["errors"].concat(result["errors"] || [])
277
+ merge_statistics!(merged["statistics"], result["statistics"])
278
+ merged["success"] &&= result["success"] != false
279
+ end
280
+
281
+ private def merge_statistics!(target, source)
282
+ return unless source
283
+
284
+ %w[created updated deleted failed].each do |key|
285
+ target[key] += source[key] || 0
286
+ end
287
+ end
288
+ end
289
+ end
290
+ end