salesforce_bulk2 0.5.1 → 0.5.2

Sign up to get free protection for your applications and to get access to all the features.
data/README.md CHANGED
@@ -76,11 +76,11 @@ When adding a job you can specify the following operations for the first argumen
76
76
 
77
77
  When using the :upsert operation you must specify an external ID field name:
78
78
 
79
- job = client.add_job(:upsert, :MyObject__c, :external_id_field_name => :MyId__c)
79
+ job = client.add_job(:upsert, :MyObject__c, :external_id => :MyId__c)
80
80
 
81
81
  For any operation you should be able to specify a concurrency mode. The default is Parallel. The other choice is Serial.
82
82
 
83
- job = client.add_job(:upsert, :MyObject__c, :concurrency_mode => :Serial, :external_id_field_name => :MyId__c)
83
+ job = client.add_job(:upsert, :MyObject__c, :concurrency_mode => :Serial, :external_id => :MyId__c)
84
84
 
85
85
  ### Retrieving Info for a Job
86
86
 
@@ -0,0 +1,186 @@
1
+ module SalesforceBulk2
2
+ class Batch
3
+ attr_accessor :session_id
4
+ attr_accessor :batch_size
5
+
6
+ attr_reader :apex_processing_time
7
+ attr_reader :api_active_processing_time
8
+ attr_reader :completed_at
9
+ attr_reader :created_at
10
+ attr_reader :failed_records
11
+ attr_reader :id
12
+ attr_reader :job_id
13
+ attr_reader :processed_records
14
+ attr_reader :state
15
+ attr_reader :total_processing_time
16
+ attr_reader :data
17
+
18
+ @@batch_size = 10000
19
+
20
+ def self.batch_size
21
+ @@batch_size
22
+ end
23
+
24
+ def self.batch_size= batch_size
25
+ @@batch_size = batch_size
26
+ end
27
+
28
+ def batch_size
29
+ @batch_size || @@batch_size
30
+ end
31
+
32
+
33
+ def initialize job, data = nil
34
+ @job = job
35
+ @job_id = job.id
36
+ @client = job.client
37
+
38
+ update(data) if data
39
+ end
40
+
41
+ def self.create job, data
42
+ batch = Batch.new(job)
43
+ batch.execute(data)
44
+ end
45
+
46
+ def self.find job, batch_id
47
+ batch = Batch.new(job)
48
+ batch.id = batch_id
49
+ batch.refresh
50
+ batch
51
+ end
52
+
53
+ def self.find job, batch_id
54
+ @job = job
55
+ @client = job.client
56
+ end
57
+
58
+ def execute data
59
+ raise Exception.new "Already executed" if @data
60
+
61
+ @data = data
62
+ body = data
63
+
64
+ if data.is_a?(Array)
65
+ raise ArgumentError, "Batch data set exceeds #{@@batch_size} record limit by #{data.length - @@batch_size}" if data.length > @@batch_size
66
+ raise ArgumentError, "Batch data set is empty" if data.length < 1
67
+
68
+ keys = data.first.keys
69
+ body = keys.to_csv
70
+
71
+ data.each do |item|
72
+ item_values = keys.map { |key| item[key] }
73
+ body += item_values.to_csv
74
+ end
75
+ end
76
+
77
+ # Despite the content for a query operation batch being plain text we
78
+ # still have to specify CSV content type per API docs.
79
+ @client.http_post_xml("job/#{@job_id}/batch", body, "Content-Type" => "text/csv; charset=UTF-8")
80
+ end
81
+
82
+ def get_request
83
+ response = @client.http_get("job/#{@job_id}/batch/#{@id}/request")
84
+
85
+ CSV.parse(response.body, :headers => true)
86
+ end
87
+
88
+ def get_result
89
+ response = @client.http_get("job/#{@job_id}/batch/#{@id}/result")
90
+
91
+ #Query Result
92
+ if response.body =~ /<.*?>/m
93
+ result = XmlSimple.xml_in(response.body)
94
+
95
+ if result['result'].present?
96
+ results = get_query_result(@id, result['result'].first)
97
+
98
+ collection = QueryResultCollection.new(self, @id, result['result'].first, result['result'])
99
+ collection.replace(results)
100
+ end
101
+
102
+ #Batch Result
103
+ else
104
+ results = BatchResultCollection.new
105
+ requests = get_request
106
+
107
+ i = 0
108
+ CSV.parse(response.body, :headers => true) do |row|
109
+ result = BatchResult.new(row[0], row[1].to_b, row[2].to_b, row[3])
110
+ result['request'] = requests[i]
111
+ results << result
112
+
113
+ i += 1
114
+ end
115
+
116
+ return results
117
+ end
118
+ end
119
+
120
+ def get_query_result(batch_id, result_id)
121
+ headers = {"Content-Type" => "text/csv; charset=UTF-8"}
122
+ response = @client.http_get("job/#{@job_id}/batch/#{batch_id}/result/#{result_id}", headers)
123
+
124
+ lines = response.body.lines.to_a
125
+ headers = CSV.parse_line(lines.shift).collect { |header| header.to_sym }
126
+
127
+ result = []
128
+
129
+ #CSV.parse(lines.join, :headers => headers, :converters => [:all, lambda{|s| s.to_b if s.kind_of? String }]) do |row|
130
+ CSV.parse(lines.join, :headers => headers) do |row|
131
+ result << Hash[row.headers.zip(row.fields)]
132
+ end
133
+
134
+ result
135
+ end
136
+
137
+ def update(data)
138
+ @data = data
139
+
140
+ @id = data['id']
141
+ @job_id = data['jobId']
142
+ @state = data['state']
143
+ @created_at = DateTime.parse(data['createdDate']) rescue nil
144
+ @completed_at = DateTime.parse(data['systemModstamp']) rescue nil
145
+ @processed_records = data['numberRecordsProcessed'].to_i
146
+ @failed_records = data['numberRecordsFailed'].to_i
147
+ @total_processing_time = data['totalProcessingTime'].to_i
148
+ @api_active_processing_time = data['apiActiveProcessingTime'].to_i
149
+ @apex_processing_time = data['apex_processing_time'].to_i
150
+ end
151
+
152
+ ### State Information ###
153
+ def in_progress?
154
+ state? 'InProgress'
155
+ end
156
+
157
+ def queued?
158
+ state? 'Queued'
159
+ end
160
+
161
+ def completed?
162
+ state? 'Completed'
163
+ end
164
+
165
+ def failed?
166
+ state? 'Failed'
167
+ end
168
+
169
+ def finished?
170
+ completed? or finished?
171
+ end
172
+
173
+ def state?(value)
174
+ self.state.present? && self.state.casecmp(value) == 0
175
+ end
176
+
177
+ def errors?
178
+ @number_records_failed > 0
179
+ end
180
+
181
+ def refresh
182
+ xml_data = @client.http_get_xml("job/#{@job_id}/batch/#{@batch_id}")
183
+ update(xml_data)
184
+ end
185
+ end
186
+ end
@@ -0,0 +1,42 @@
1
+ module SalesforceBulk2
2
+ class BatchResult < Hash
3
+ def initialize(id, success, created, error)
4
+ self['id'] = id
5
+ self['success'] = success
6
+ self['created'] = created
7
+ self['error'] = error
8
+ end
9
+
10
+ def error?
11
+ error.present?
12
+ end
13
+
14
+ def created?
15
+ created
16
+ end
17
+
18
+ def successful?
19
+ success
20
+ end
21
+
22
+ def updated?
23
+ !created && success
24
+ end
25
+
26
+ def method_missing method, *args, &block
27
+ if has_key? method.to_s
28
+ self[method.to_s]
29
+ else
30
+ super method, *args, &block
31
+ end
32
+ end
33
+
34
+ def respond_to? method
35
+ if has_key? method.to_sym
36
+ return true
37
+ else
38
+ super
39
+ end
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,20 @@
1
+ module SalesforceBulk2
2
+ class BatchResultCollection < Array
3
+
4
+ def any_failures?
5
+ self.any? { |result| result.error? }
6
+ end
7
+
8
+ def failed
9
+ self.select { |result| result.error? }
10
+ end
11
+
12
+ def completed
13
+ self.select { |result| result.successful? }
14
+ end
15
+
16
+ def created
17
+ self.select { |result| result.successful? && result.created? }
18
+ end
19
+ end
20
+ end
@@ -1,5 +1,6 @@
1
- module SalesforceBulk
2
- class Connection
1
+ module SalesforceBulk2
2
+ # Interface for operating the Salesforce Bulk REST API
3
+ class Client
3
4
  # If true, print API debugging information to stdout. Defaults to false.
4
5
  attr_accessor :debugging
5
6
 
@@ -18,9 +19,12 @@ module SalesforceBulk
18
19
  # The Salesforce username
19
20
  attr_reader :username
20
21
 
21
- # The API version the client is using. Defaults to 24.0.
22
+ # The API version the client is using
22
23
  attr_reader :version
23
24
 
25
+ #List of jobs associatd with this client
26
+ attr_accessor :jobs
27
+
24
28
 
25
29
  # Defaults
26
30
  @@host = 'login.salesforce.com'
@@ -38,10 +42,12 @@ module SalesforceBulk
38
42
 
39
43
  @username = options[:username]
40
44
  @password = "#{options[:password]}#{options[:token]}"
41
- @token = options[:token]
45
+ @token = options[:token] || ''
42
46
  @host = options[:host] || @@host
43
47
  @version = options[:version] || @@version
44
48
  @debugging = options[:debugging] || @@debugging
49
+
50
+ @jobs = []
45
51
  end
46
52
 
47
53
  def connect options = {}
@@ -153,9 +159,66 @@ module SalesforceBulk
153
159
  req
154
160
  end
155
161
 
156
- private
157
162
  def instance_id(url)
158
163
  url.match(/:\/\/([a-zA-Z0-9-]{2,}).salesforce/)[1]
159
164
  end
165
+
166
+
167
+ #Job related
168
+ def new_job options = {}
169
+ job = Job.create(self, options)
170
+ @jobs << job
171
+ job
172
+ end
173
+
174
+ def find_job id
175
+ job = Job.find(connection, id)
176
+ @jobs << job
177
+ job
178
+ end
179
+
180
+ def close_jobs
181
+ @jobs.map(&:close)
182
+ end
183
+
184
+ def abort_jobs
185
+ @jobs.map(&:abort)
186
+ end
187
+
188
+
189
+ ## Operations
190
+ def delete(sobject, data, batch_size = nil)
191
+ perform_operation(:delete, sobject, data, :batch_size => nil)
192
+ end
193
+
194
+ def insert(sobject, data, batch_size = nil)
195
+ perform_operation(:insert, sobject, data, :batch_size => nil)
196
+ end
197
+
198
+ def query(sobject, data, batch_size = nil)
199
+ perform_operation(:query, sobject, data, :batch_size => nil)
200
+ end
201
+
202
+ def update(sobject, data, batch_size = nil)
203
+ perform_operation(:update, sobject, data, :batch_size => nil)
204
+ end
205
+
206
+ def upsert(sobject, data, external_id, batch_size = nil)
207
+ perform_operation(:upsert, sobject, data, :external_id => external_id, :batch_size => batch_size)
208
+ end
209
+
210
+ def perform_operation(operation, sobject, data, options = {})
211
+ job = new_job(operation: operation, object: sobject, :external_id => options[:external_id])
212
+
213
+ job.add_data(data, options[:batch_size])
214
+ job.close
215
+
216
+ until job.finished?
217
+ job.refresh
218
+ sleep 2
219
+ end
220
+
221
+ return job.get_results
222
+ end
160
223
  end
161
- end
224
+ end
@@ -0,0 +1,193 @@
1
+ module SalesforceBulk2
2
+ class Job
3
+ attr_reader :client
4
+
5
+ attr_reader :concurrency_mode
6
+ attr_reader :external_id
7
+ attr_reader :data
8
+ attr_reader :xml_data
9
+
10
+ @@fields = [:id, :operation, :object, :createdById, :state, :createdDate,
11
+ :systemModstamp, :externalIdFieldName, :concurrencyMode, :contentType,
12
+ :numberBatchesQueued, :numberBatchesInProgress, :numberBatchesCompleted,
13
+ :numberBatchesFailed, :totalBatches, :retries, :numberRecordsProcessed,
14
+ :numberRecordsFailed, :totalProcessingTime, :apiActiveProcessingTime,
15
+ :apexProcessingTime, :apiVersion]
16
+
17
+ @@valid_operations = [:delete, :insert, :update, :upsert, :query]
18
+ @@valid_concurrency_modes = ['Parallel', 'Serial']
19
+
20
+ @@fields.each do |field|
21
+ attr_reader field.to_s.underscore.to_sym
22
+ end
23
+
24
+ def self.valid_operation? operation
25
+ @@valid_operations.include?(operation)
26
+ end
27
+
28
+ def self.valid_concurrency_mode? mode
29
+ @@valid_concurrency_modes.include?(concurrency_mode)
30
+ end
31
+
32
+ def self.create client, options = {}
33
+ job = Job.new(client)
34
+
35
+ options.assert_valid_keys(:external_id, :concurrency_mode, :object, :operation)
36
+
37
+ operation = options[:operation].to_sym.downcase
38
+ raise ArgumentError.new("Invalid operation: #{operation}") unless Job.valid_operation?(operation)
39
+
40
+ external_id = options[:external_id]
41
+ concurrency_mode = options[:concurrency_mode]
42
+ object = options[:object]
43
+
44
+ if concurrency_mode
45
+ concurrency_mode = concurrency_mode.capitalize
46
+ raise ArgumentError.new("Invalid concurrency mode: #{concurrency_mode}") unless Job.valid_concurrency_mode?(concurrency_mode)
47
+ end
48
+
49
+ xml = '<?xml version="1.0" encoding="utf-8"?>'
50
+ xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
51
+ xml += " <operation>#{operation}</operation>"
52
+ xml += " <object>#{object}</object>" if object
53
+ xml += " <externalIdFieldName>#{external_id}</externalIdFieldName>" if external_id
54
+ xml += " <concurrencyMode>#{concurrency_mode}</concurrencyMode>" if concurrency_mode
55
+ xml += " <contentType>CSV</contentType>"
56
+ xml += "</jobInfo>"
57
+
58
+ job.update(client.http_post_xml("job", xml))
59
+ job
60
+ end
61
+
62
+ def self.find client, id
63
+ Job.new(client)
64
+ job.id = id
65
+ job.refresh
66
+ job
67
+ end
68
+
69
+ def refresh
70
+ xml_data = @client.http_get_xml("job/#{@id}")
71
+ update(xml_data)
72
+ end
73
+
74
+ def initialize client
75
+ @client = client
76
+ end
77
+
78
+ def new_batch data
79
+ Batch.create(self, data)
80
+ end
81
+
82
+ def get_batches
83
+ result = @client.http_get_xml("job/#{@id}/batch")
84
+
85
+ if result['batchInfo'].is_a?(Array)
86
+ result['batchInfo'].collect { |info| Batch.new(self, info) }
87
+ elsif result['batchInfo']
88
+ [Batch.new(self, result['batchInfo'])]
89
+ else
90
+ []
91
+ end
92
+ end
93
+
94
+ def abort
95
+ xml = '<?xml version="1.0" encoding="utf-8"?>'
96
+ xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
97
+ xml += ' <state>Aborted</state>'
98
+ xml += '</jobInfo>'
99
+
100
+ @client.http_post_xml("job/#{@id}", xml)
101
+ end
102
+
103
+ def close
104
+ xml = '<?xml version="1.0" encoding="utf-8"?>'
105
+ xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
106
+ xml += ' <state>Closed</state>'
107
+ xml += '</jobInfo>'
108
+
109
+ @client.http_post_xml("job/#{@id}", xml)
110
+ end
111
+
112
+ def update xml_data
113
+ #Assign object
114
+ @xml_data = xml_data
115
+
116
+ #Mass assign the defaults
117
+ @@fields.each do |field|
118
+ instance_variable_set(:"@#{field}", xml_data[field.to_s])
119
+ end
120
+
121
+ #Special cases and data formats
122
+ @created_date = DateTime.parse(xml_data['createdDate'])
123
+ @system_modstamp = DateTime.parse(xml_data['systemModstamp'])
124
+
125
+ @retries = xml_data['retries'].to_i
126
+ @api_version = xml_data['apiVersion'].to_i
127
+ @number_batches_queued = xml_data['numberBatchesQueued'].to_i
128
+ @number_batches_in_progress = xml_data['numberBatchesInProgress'].to_i
129
+ @number_batches_completed = xml_data['numberBatchesCompleted'].to_i
130
+ @number_batches_failed = xml_data['numberBatchesFailed'].to_i
131
+ @total_batches = xml_data['totalBatches'].to_i
132
+ @number_records_processed = xml_data['numberRecordsProcessed'].to_i
133
+ @number_records_failed = xml_data['numberRecordsFailed'].to_i
134
+ @total_processing_time = xml_data['totalProcessingTime'].to_i
135
+ @api_active_processing_time = xml_data['apiActiveProcessingTime'].to_i
136
+ @apex_processing_time = xml_data['apexProcessingTime'].to_i
137
+ end
138
+
139
+ def add_data data, batch_size = nil
140
+ data.each_slice(batch_size || Batch.batch_size) do |records|
141
+ new_batch(records)
142
+ end
143
+ end
144
+
145
+ def get_results
146
+ results = BatchResultCollection.new
147
+
148
+ get_batches.each { |batch| results << batch.get_result }
149
+
150
+ results.flatten
151
+ end
152
+
153
+ # def get_requests
154
+ # results = BatchResultCollection.new
155
+
156
+ # get_batches.each { |batch| results << batch.get_request }
157
+
158
+ # results.flatten
159
+ # end
160
+
161
+ #Statuses
162
+ def batches_finished?
163
+ (@number_batches_queued == 0) and
164
+ (@number_batches_in_progress == 0)
165
+ end
166
+
167
+ def finished?
168
+ failed? or
169
+ aborted? or
170
+ (closed? and batches_finished?)
171
+ end
172
+
173
+ def failed?
174
+ state? 'Failed'
175
+ end
176
+
177
+ def aborted?
178
+ state? 'Aborted'
179
+ end
180
+
181
+ def closed?
182
+ state? 'Closed'
183
+ end
184
+
185
+ def open?
186
+ state? 'Open'
187
+ end
188
+
189
+ def state?(value)
190
+ @state.present? && @state.casecmp(value) == 0
191
+ end
192
+ end
193
+ end
@@ -1,4 +1,4 @@
1
- module SalesforceBulk
1
+ module SalesforceBulk2
2
2
  class QueryResultCollection < Array
3
3
 
4
4
  attr_reader :client
@@ -1,4 +1,4 @@
1
- module SalesforceBulk
1
+ module SalesforceBulk2
2
2
  # An exception raised when any non successful request is made through the Salesforce Bulk API.
3
3
  class SalesforceError < StandardError
4
4
  # The Net::HTTPResponse instance from the API call.
@@ -0,0 +1,3 @@
1
+ module SalesforceBulk2
2
+ VERSION = "0.5.2"
3
+ end
@@ -0,0 +1,19 @@
1
+ require 'net/https'
2
+ require 'xmlsimple'
3
+ require 'csv'
4
+ require 'active_support'
5
+ require 'active_support/inflector'
6
+ require 'active_support/core_ext/object/blank'
7
+ require 'active_support/core_ext/hash/keys'
8
+ require 'salesforce_bulk2/version'
9
+ require 'salesforce_bulk2/core_extensions/string'
10
+ require 'salesforce_bulk2/salesforce_error'
11
+ require 'salesforce_bulk2/client'
12
+ require 'salesforce_bulk2/job'
13
+ require 'salesforce_bulk2/batch'
14
+ require 'salesforce_bulk2/batch_result'
15
+ require 'salesforce_bulk2/batch_result_collection'
16
+ require 'salesforce_bulk2/query_result_collection'
17
+
18
+ module SalesforceBulk2
19
+ end
@@ -1,10 +1,10 @@
1
1
  # -*- encoding: utf-8 -*-
2
2
  $:.push File.expand_path("../lib", __FILE__)
3
- require "salesforce_bulk/version"
3
+ require "salesforce_bulk2/version"
4
4
 
5
5
  Gem::Specification.new do |s|
6
6
  s.name = "salesforce_bulk2"
7
- s.version = SalesforceBulk::VERSION
7
+ s.version = SalesforceBulk2::VERSION
8
8
  s.platform = Gem::Platform::RUBY
9
9
  s.authors = ["Adam Kerr", "Jorge Valdivia", "Javier Julio"]
10
10
  s.email = ["ajrkerr@gmail.com", "jorge@valdivia.me", "jjfutbol@gmail.com"]
data/test/lib/test_job.rb CHANGED
@@ -27,7 +27,7 @@ class TestJob < Test::Unit::TestCase
27
27
  assert_equal job.created_at, DateTime.parse('2012-05-30T04:08:30.000Z')
28
28
  assert_equal job.completed_at, DateTime.parse('2012-05-30T04:08:30.000Z')
29
29
  assert_equal job.state, 'Open'
30
- assert_equal job.external_id_field_name, 'Id__c'
30
+ assert_equal job.external_id, 'Id__c'
31
31
  assert_equal job.concurrency_mode, 'Parallel'
32
32
  assert_equal job.content_type, 'CSV'
33
33
  assert_equal job.queued_batches, 0
@@ -87,7 +87,7 @@ class TestJob < Test::Unit::TestCase
87
87
  .with(:body => request, :headers => @headers)
88
88
  .to_return(:body => response, :status => 200)
89
89
 
90
- job = @client.add_job(:upsert, :VideoEvent__c, :external_id_field_name => :Id__c)
90
+ job = @client.add_job(:upsert, :VideoEvent__c, :external_id => :Id__c)
91
91
 
92
92
  assert_requested :post, "#{api_url(@client)}job", :body => request, :headers => @headers, :times => 1
93
93
 
@@ -98,7 +98,7 @@ class TestJob < Test::Unit::TestCase
98
98
  assert_equal job.created_at, DateTime.parse('2012-05-29T21:50:47.000Z')
99
99
  assert_equal job.completed_at, DateTime.parse('2012-05-29T21:50:47.000Z')
100
100
  assert_equal job.state, 'Open'
101
- assert_equal job.external_id_field_name, 'Id__c'
101
+ assert_equal job.external_id, 'Id__c'
102
102
  assert_equal job.concurrency_mode, 'Parallel'
103
103
  assert_equal job.content_type, 'CSV'
104
104
  assert_equal job.queued_batches, 0
@@ -153,7 +153,7 @@ class TestJob < Test::Unit::TestCase
153
153
  assert_equal job.created_at, DateTime.parse('2012-05-29T23:51:53.000Z')
154
154
  assert_equal job.completed_at, DateTime.parse('2012-05-29T23:51:53.000Z')
155
155
  assert_equal job.state, 'Closed'
156
- assert_equal job.external_id_field_name, 'Id__c'
156
+ assert_equal job.external_id, 'Id__c'
157
157
  assert_equal job.concurrency_mode, 'Parallel'
158
158
  assert_equal job.content_type, 'CSV'
159
159
  assert_equal job.queued_batches, 0
@@ -190,7 +190,7 @@ class TestJob < Test::Unit::TestCase
190
190
  assert_equal job.created_at, DateTime.parse('2012-05-30T00:16:04.000Z')
191
191
  assert_equal job.completed_at, DateTime.parse('2012-05-30T00:16:04.000Z')
192
192
  assert_equal job.state, 'Aborted'
193
- assert_equal job.external_id_field_name, 'Id__c'
193
+ assert_equal job.external_id, 'Id__c'
194
194
  assert_equal job.concurrency_mode, 'Parallel'
195
195
  assert_equal job.content_type, 'CSV'
196
196
  assert_equal job.queued_batches, 0
@@ -226,7 +226,7 @@ class TestJob < Test::Unit::TestCase
226
226
  assert_equal job.created_at, DateTime.parse('2012-05-30T04:08:30.000Z')
227
227
  assert_equal job.completed_at, DateTime.parse('2012-05-30T04:08:30.000Z')
228
228
  assert_equal job.state, 'Open'
229
- assert_equal job.external_id_field_name, 'Id__c'
229
+ assert_equal job.external_id, 'Id__c'
230
230
  assert_equal job.concurrency_mode, 'Parallel'
231
231
  assert_equal job.content_type, 'CSV'
232
232
  assert_equal job.queued_batches, 0
@@ -249,7 +249,7 @@ class TestJob < Test::Unit::TestCase
249
249
  stub_request(:post, "#{api_url(@client)}job").to_return(:body => response, :status => 500)
250
250
 
251
251
  assert_raise SalesforceBulk::SalesforceError do
252
- job = @client.add_job(:upsert, :SomeNonExistingObject__c, :external_id_field_name => :Id__c)
252
+ job = @client.add_job(:upsert, :SomeNonExistingObject__c, :external_id => :Id__c)
253
253
  end
254
254
  end
255
255
 
@@ -19,7 +19,7 @@ class TestSimpleApi < Test::Unit::TestCase
19
19
  test "delete" do
20
20
  data = [{:Id => '123123'}, {:Id => '234234'}]
21
21
 
22
- @client.expects(:add_job).once.with(:delete, :VideoEvent__c, :external_id_field_name => nil).returns(@job)
22
+ @client.expects(:add_job).once.with(:delete, :VideoEvent__c, :external_id => nil).returns(@job)
23
23
  @client.expects(:add_batch).once.with(@job.id, data).returns(@batch)
24
24
  @client.expects(:close_job).once.with(@job.id).returns(@job)
25
25
  @client.expects(:batch_info).at_least_once.returns(@batch)
@@ -31,7 +31,7 @@ class TestSimpleApi < Test::Unit::TestCase
31
31
  test "insert" do
32
32
  data = [{:Title__c => 'Test Title'}, {:Title__c => 'Test Title'}]
33
33
 
34
- @client.expects(:add_job).once.with(:insert, :VideoEvent__c, :external_id_field_name => nil).returns(@job)
34
+ @client.expects(:add_job).once.with(:insert, :VideoEvent__c, :external_id => nil).returns(@job)
35
35
  @client.expects(:add_batch).once.with(@job.id, data).returns(@batch)
36
36
  @client.expects(:close_job).once.with(@job.id).returns(@job)
37
37
  @client.expects(:batch_info).at_least_once.returns(@batch)
@@ -43,7 +43,7 @@ class TestSimpleApi < Test::Unit::TestCase
43
43
  test "query" do
44
44
  data = 'SELECT Id, Name FROM Account'
45
45
 
46
- @client.expects(:add_job).once.with(:query, :VideoEvent__c, :external_id_field_name => nil).returns(@job)
46
+ @client.expects(:add_job).once.with(:query, :VideoEvent__c, :external_id => nil).returns(@job)
47
47
  @client.expects(:add_batch).once.with(@job.id, data).returns(@batch)
48
48
  @client.expects(:close_job).once.with(@job.id).returns(@job)
49
49
  @client.expects(:batch_info).at_least_once.returns(@batch)
@@ -55,7 +55,7 @@ class TestSimpleApi < Test::Unit::TestCase
55
55
  test "update" do
56
56
  data = [{:Id => '123123', :Title__c => 'Test Title'}, {:Id => '234234', :Title__c => 'A Second Title'}]
57
57
 
58
- @client.expects(:add_job).once.with(:update, :VideoEvent__c, :external_id_field_name => nil).returns(@job)
58
+ @client.expects(:add_job).once.with(:update, :VideoEvent__c, :external_id => nil).returns(@job)
59
59
  @client.expects(:add_batch).once.with(@job.id, data).returns(@batch)
60
60
  @client.expects(:close_job).once.with(@job.id).returns(@job)
61
61
  @client.expects(:batch_info).at_least_once.returns(@batch)
@@ -67,7 +67,7 @@ class TestSimpleApi < Test::Unit::TestCase
67
67
  test "upsert" do
68
68
  data = [{:Id__c => '123123', :Title__c => 'Test Title'}, {:Id__c => '234234', :Title__c => 'A Second Title'}]
69
69
 
70
- @client.expects(:add_job).once.with(:upsert, :VideoEvent__c, :external_id_field_name => :Id__c).returns(@job)
70
+ @client.expects(:add_job).once.with(:upsert, :VideoEvent__c, :external_id => :Id__c).returns(@job)
71
71
  @client.expects(:add_batch).once.with(@job.id, data).returns(@batch)
72
72
  @client.expects(:close_job).once.with(@job.id).returns(@job)
73
73
  @client.expects(:batch_info).at_least_once.returns(@batch)
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: salesforce_bulk2
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.1
4
+ version: 0.5.2
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -11,7 +11,7 @@ authors:
11
11
  autorequire:
12
12
  bindir: bin
13
13
  cert_chain: []
14
- date: 2012-08-01 00:00:00.000000000 Z
14
+ date: 2012-08-10 00:00:00.000000000 Z
15
15
  dependencies:
16
16
  - !ruby/object:Gem::Dependency
17
17
  name: activesupport
@@ -123,17 +123,16 @@ files:
123
123
  - Gemfile
124
124
  - README.md
125
125
  - Rakefile
126
- - lib/salesforce_bulk.rb
127
- - lib/salesforce_bulk/batch.rb
128
- - lib/salesforce_bulk/batch_result.rb
129
- - lib/salesforce_bulk/batch_result_collection.rb
130
- - lib/salesforce_bulk/client.rb
131
- - lib/salesforce_bulk/connection.rb
132
- - lib/salesforce_bulk/core_extensions/string.rb
133
- - lib/salesforce_bulk/job.rb
134
- - lib/salesforce_bulk/query_result_collection.rb
135
- - lib/salesforce_bulk/salesforce_error.rb
136
- - lib/salesforce_bulk/version.rb
126
+ - lib/salesforce_bulk2.rb
127
+ - lib/salesforce_bulk2/batch.rb
128
+ - lib/salesforce_bulk2/batch_result.rb
129
+ - lib/salesforce_bulk2/batch_result_collection.rb
130
+ - lib/salesforce_bulk2/client.rb
131
+ - lib/salesforce_bulk2/core_extensions/string.rb
132
+ - lib/salesforce_bulk2/job.rb
133
+ - lib/salesforce_bulk2/query_result_collection.rb
134
+ - lib/salesforce_bulk2/salesforce_error.rb
135
+ - lib/salesforce_bulk2/version.rb
137
136
  - salesforce_bulk2.gemspec
138
137
  - test/fixtures/batch_create_request.csv
139
138
  - test/fixtures/batch_create_response.xml
@@ -1,101 +0,0 @@
1
- module SalesforceBulk
2
- class Batch
3
- attr_accessor :session_id
4
-
5
- attr_reader :apex_processing_time
6
- attr_reader :api_active_processing_time
7
- attr_reader :completed_at
8
- attr_reader :created_at
9
- attr_reader :failed_records
10
- attr_reader :id
11
- attr_reader :job_id
12
- attr_reader :processed_records
13
- attr_reader :state
14
- attr_reader :total_processing_time
15
- attr_reader :data
16
-
17
- @@batch_size = 10000
18
-
19
- def self.batch_size
20
- @@batch_size
21
- end
22
-
23
- def self.new_from_xml xml_data, session_id = nil
24
- batch = Batch.new
25
- batch.update(data)
26
- batch.session_id = session_id
27
- batch
28
- end
29
-
30
- def self.find job_id, batch_id, session_id
31
- batch = Batch.new
32
- batch.id = batch_id
33
- batch.job_id = job_id
34
- batch.session_id = session_id
35
- batch.refresh
36
- batch
37
- end
38
-
39
-
40
- def update(data)
41
- @data = data
42
-
43
- @id = data['id']
44
- @job_id = data['jobId']
45
- @state = data['state']
46
- @created_at = DateTime.parse(data['createdDate'])
47
- @completed_at = DateTime.parse(data['systemModstamp'])
48
- @processed_records = data['numberRecordsProcessed'].to_i
49
- @failed_records = data['numberRecordsFailed'].to_i
50
- @total_processing_time = data['totalProcessingTime'].to_i
51
- @api_active_processing_time = data['apiActiveProcessingTime'].to_i
52
- @apex_processing_time = data['apex_processing_time'].to_i
53
- end
54
-
55
- def job
56
- @job ||= Job.find(@job_id, @session_id) if @session_id
57
- end
58
-
59
- ### State Information ###
60
- def in_progress?
61
- state? 'InProgress'
62
- end
63
-
64
- def queued?
65
- state? 'Queued'
66
- end
67
-
68
- def completed?
69
- state? 'Completed'
70
- end
71
-
72
- def failed?
73
- state? 'Failed'
74
- end
75
-
76
- def finished?
77
- completed? or finished?
78
- end
79
-
80
- def state?(value)
81
- self.state.present? && self.state.casecmp(value) == 0
82
- end
83
-
84
- def errors?
85
- @number_records_failed > 0
86
- end
87
-
88
- def result
89
- @client.get_batch_result(@job_id, @batch_id)
90
- end
91
-
92
- def request
93
- @client.get_batch_request(@job_id, @batch_id)
94
- end
95
-
96
- def refresh
97
- xml_data = @connection.http_get_xml("job/#{jobId}/batch/#{batchId}")
98
- update(xml_data)
99
- end
100
- end
101
- end
@@ -1,39 +0,0 @@
1
- module SalesforceBulk
2
- class BatchResult
3
-
4
- # A boolean indicating if record was created. If updated value is false.
5
- attr_reader :created
6
-
7
- # The error message.
8
- attr_reader :error
9
-
10
- # The record's unique id.
11
- attr_reader :id
12
-
13
- # If record was created successfully. If false then an error message is provided.
14
- attr_reader :success
15
-
16
- def initialize(id, success, created, error)
17
- @id = id
18
- @success = success
19
- @created = created
20
- @error = error
21
- end
22
-
23
- def error?
24
- error.present?
25
- end
26
-
27
- def created?
28
- created
29
- end
30
-
31
- def successful?
32
- success
33
- end
34
-
35
- def updated?
36
- !created && success
37
- end
38
- end
39
- end
@@ -1,29 +0,0 @@
1
- module SalesforceBulk
2
- class BatchResultCollection < Array
3
-
4
- attr_reader :batch_id
5
- attr_reader :job_id
6
-
7
- def initialize(job_id, batch_id)
8
- @job_id = job_id
9
- @batch_id = batch_id
10
- end
11
-
12
- def any_failures?
13
- self.any? { |result| result.error.length > 0 }
14
- end
15
-
16
- def failed
17
- self.select { |result| result.error.length > 0 }
18
- end
19
-
20
- def completed
21
- self.select { |result| result.success }
22
- end
23
-
24
- def created
25
- self.select { |result| result.success && result.created }
26
- end
27
-
28
- end
29
- end
@@ -1,209 +0,0 @@
1
- module SalesforceBulk
2
- # Interface for operating the Salesforce Bulk REST API
3
- class Client
4
- # The HTTP connection we will be using to connect to Salesforce.com
5
- attr_accessor :connection
6
-
7
- def initialize(options={})
8
- @connection = Connection.new(options)
9
- end
10
-
11
- def connected?
12
- @connection.connected?
13
- end
14
-
15
- def disconnect
16
- @connection.disconnect
17
- end
18
-
19
- def connect options = {}
20
- @connection.connect(options)
21
- end
22
-
23
- def new_job operation, sobject, options = {}
24
- Job.new(add_job(operation, sobject, options), self)
25
- end
26
-
27
- def add_job operation, sobject, options={}
28
- operation = operation.to_sym.downcase
29
-
30
- raise ArgumentError.new("Invalid operation: #{operation}") unless Job.valid_operation?(operation)
31
-
32
- options.assert_valid_keys(:external_id_field_name, :concurrency_mode)
33
-
34
- if options[:concurrency_mode]
35
- concurrency_mode = options[:concurrency_mode].capitalize
36
- raise ArgumentError.new("Invalid concurrency mode: #{concurrency_mode}") unless Job.valid_concurrency_mode?(concurrency_mode)
37
- end
38
-
39
- xml = '<?xml version="1.0" encoding="utf-8"?>'
40
- xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
41
- xml += " <operation>#{operation}</operation>"
42
- xml += " <object>#{sobject}</object>" if sobject
43
- xml += " <externalIdFieldName>#{options[:external_id_field_name]}</externalIdFieldName>" if options[:external_id_field_name]
44
- xml += " <concurrencyMode>#{options[:concurrency_mode]}</concurrencyMode>" if options[:concurrency_mode]
45
- xml += " <contentType>CSV</contentType>"
46
- xml += "</jobInfo>"
47
-
48
- @connection.http_post_xml("job", xml)
49
- end
50
-
51
- def abort_job job_id
52
- xml = '<?xml version="1.0" encoding="utf-8"?>'
53
- xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
54
- xml += ' <state>Aborted</state>'
55
- xml += '</jobInfo>'
56
-
57
- @connection.http_post_xml("job/#{job_id}", xml)
58
- end
59
-
60
- def close_job job_id
61
- xml = '<?xml version="1.0" encoding="utf-8"?>'
62
- xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
63
- xml += ' <state>Closed</state>'
64
- xml += '</jobInfo>'
65
-
66
- @connection.http_post_xml("job/#{job_id}", xml)
67
- end
68
-
69
- def get_job_info job_id
70
- @connection.http_get_xml("job/#{job_id}")
71
- end
72
-
73
- def get_batch_info job_id, batch_id
74
- @connection.http_get_xml("job/#{jobId}/batch/#{batchId}")
75
- end
76
-
77
- def find_job job_id
78
- Job.new get_job(job_id)
79
- end
80
-
81
- def find_batch job_id, batch_id
82
- Batch.new get_batch(job_id, batch_id)
83
- end
84
-
85
- def create_batch job_id, data
86
- Batch.new add_batch(job_id, data)
87
- end
88
-
89
- def add_batch job_id, data
90
- body = data
91
-
92
- if data.is_a?(Array)
93
- raise ArgumentError, "Batch data set exceeds #{Batch.max_records} record limit by #{data.length - Batch.max_records}" if data.length > Batch.max_records
94
- raise ArgumentError, "Batch data set is empty" if data.length < 1
95
-
96
- keys = data.first.keys
97
- body = keys.to_csv
98
-
99
- data.each do |item|
100
- item_values = keys.map { |key| item[key] }
101
- body += item_values.to_csv
102
- end
103
- end
104
-
105
- # Despite the content for a query operation batch being plain text we
106
- # still have to specify CSV content type per API docs.
107
- @connection.http_post_xml("job/#{job_id}/batch", body, "Content-Type" => "text/csv; charset=UTF-8")
108
- end
109
-
110
- def get_batch_list(job_id)
111
- result = @connection.http_get_xml("job/#{job_id}/batch")
112
-
113
- if result['batchInfo'].is_a?(Array)
114
- result['batchInfo'].collect { |info| Batch.new(info) }
115
- else
116
- [Batch.new(result['batchInfo'])]
117
- end
118
- end
119
-
120
- def get_batch_request(job_id, batch_id)
121
- response = http_get("job/#{job_id}/batch/#{batch_id}/request")
122
-
123
- CSV.parse(response.body, :headers => true) do |row|
124
- result << BatchResult.new(row[0], row[1].to_b, row[2].to_b, row[3])
125
- end
126
- end
127
-
128
- def get_batch_result(job_id, batch_id)
129
- response = http_get("job/#{job_id}/batch/#{batch_id}/result")
130
-
131
- #Query Result
132
- if response.body =~ /<.*?>/m
133
- result = XmlSimple.xml_in(response.body)
134
-
135
- if result['result'].present?
136
- results = get_query_result(job_id, batch_id, result['result'].first)
137
-
138
- collection = QueryResultCollection.new(self, job_id, batch_id, result['result'].first, result['result'])
139
- collection.replace(results)
140
- end
141
-
142
- #Batch Result
143
- else
144
- result = BatchResultCollection.new(job_id, batch_id)
145
-
146
- CSV.parse(response.body, :headers => true) do |row|
147
- result << BatchResult.new(row[0], row[1].to_b, row[2].to_b, row[3])
148
- end
149
-
150
- result
151
- end
152
- end
153
-
154
- def get_query_result(job_id, batch_id, result_id)
155
- headers = {"Content-Type" => "text/csv; charset=UTF-8"}
156
- response = http_get("job/#{job_id}/batch/#{batch_id}/result/#{result_id}", headers)
157
-
158
- lines = response.body.lines.to_a
159
- headers = CSV.parse_line(lines.shift).collect { |header| header.to_sym }
160
-
161
- result = []
162
-
163
- #CSV.parse(lines.join, :headers => headers, :converters => [:all, lambda{|s| s.to_b if s.kind_of? String }]) do |row|
164
- CSV.parse(lines.join, :headers => headers) do |row|
165
- result << Hash[row.headers.zip(row.fields)]
166
- end
167
-
168
- result
169
- end
170
-
171
- ## Operations
172
- def delete(sobject, data)
173
- perform_operation(:delete, sobject, data)
174
- end
175
-
176
- def insert(sobject, data)
177
- perform_operation(:insert, sobject, data)
178
- end
179
-
180
- def query(sobject, data)
181
- perform_operation(:query, sobject, data)
182
- end
183
-
184
- def update(sobject, data)
185
- perform_operation(:update, sobject, data)
186
- end
187
-
188
- def upsert(sobject, external_id, data)
189
- perform_operation(:upsert, sobject, data, external_id)
190
- end
191
-
192
- def perform_operation(operation, sobject, data, external_id = nil, batch_size = nil)
193
- job = new_job(operation, sobject, :external_id_field_name => external_id)
194
-
195
- data.each_slice(batch_size || Batch.batch_size) do |records|
196
- job.add_batch(records)
197
- end
198
-
199
- job.close
200
-
201
- until job.finished?
202
- job.refresh
203
- sleep 2
204
- end
205
-
206
- job.get_results
207
- end
208
- end
209
- end
@@ -1,128 +0,0 @@
1
- module SalesforceBulk
2
- class Job
3
- attr_accessor :client
4
-
5
- attr_reader :concurrency_mode
6
- attr_reader :external_id_field_name
7
- attr_reader :data
8
-
9
- @@fields = [:id, :operation, :object, :createdById, :state, :createdDate,
10
- :systemModstamp, :externalIdFieldName, :concurrencyMode, :contentType,
11
- :numberBatchesQueued, :numberBatchesInProgress, :numberBatchesCompleted,
12
- :numberBatchesFailed, :totalBatches, :retries, :numberRecordsProcessed,
13
- :numberRecordsFailed, :totalProcessingTime, :apiActiveProcessingTime,
14
- :apexProcessingTime, :apiVersion]
15
-
16
- @@valid_operations = [:delete, :insert, :update, :upsert, :query]
17
- @@valid_concurrency_modes = ['Parallel', 'Serial']
18
-
19
-
20
- @@fields.each do |field|
21
- attr_reader field.to_s.underscore.to_sym
22
- end
23
-
24
- def self.valid_operation? operation
25
- @@valid_operations.include?(operation)
26
- end
27
-
28
- def self.valid_concurrency_mode? mode
29
- @@valid_concurrency_modes.include?(concurrency_mode)
30
- end
31
-
32
- def new_from_xml xml_data, client = nil
33
- job = Job.new
34
- job.update(xml_data)
35
- job.client = client
36
- end
37
-
38
- def update xml_data
39
- #Check fields
40
- xml_data.assert_valid_keys(@@fields)
41
-
42
- #Assign object
43
- @xml_data = xml_data
44
-
45
- #Mass assign the defaults
46
- @@fields.each do |field|
47
- instance_variable_set(field, xml_data[field])
48
- end
49
-
50
- #Special cases and data formats
51
- @created_date = DateTime.parse(xml_data['createdDate'])
52
- @system_modstamp = DateTime.parse(xml_data['systemModstamp'])
53
-
54
- @retries = xml_data['retries'].to_i
55
- @api_version = xml_data['apiVersion'].to_i
56
- @number_batches_queued = xml_data['numberBatchesQueued'].to_i
57
- @number_batches_in_progress = xml_data['numberBatchesInProgress'].to_i
58
- @number_batches_completed = xml_data['numberBatchesCompleted'].to_i
59
- @number_batches_failed = xml_data['numberBatchesFailed'].to_i
60
- @total_batches = xml_data['totalBatches'].to_i
61
- @number_records_processed = xml_data['numberRecordsProcessed'].to_i
62
- @number_records_failed = xml_data['numberRecordsFailed'].to_i
63
- @total_processing_time = xml_data['totalProcessingTime'].to_i
64
- @api_active_processing_time = xml_data['apiActiveProcessingTime'].to_i
65
- @apex_processing_time = xml_data['apexProcessingTime'].to_i
66
- end
67
-
68
- def batch_list
69
- @client.get_batch_list(@id)
70
- end
71
-
72
- def create_batch data
73
- @client.create_batch(data)
74
- end
75
-
76
- def add_batch data
77
- @client.add_batch(data)
78
- end
79
-
80
- def close
81
- update(@client.close_job(@id))
82
- end
83
-
84
- def abort
85
- update(@client.abort_job(@id))
86
- end
87
-
88
- def refresh
89
- update(@client.get_job_info(@id))
90
- end
91
-
92
- def get_results
93
- batch_list.map(&:result).flatten
94
- end
95
-
96
- #Statuses
97
- def batches_finished?
98
- (@number_batches_queued == 0) and
99
- (@number_batches_in_progress == 0)
100
- end
101
-
102
- def finished?
103
- failed? or
104
- aborted? or
105
- (closed? and batches_finished?)
106
- end
107
-
108
- def failed?
109
- state? 'Failed'
110
- end
111
-
112
- def aborted?
113
- state? 'Aborted'
114
- end
115
-
116
- def closed?
117
- state? 'Closed'
118
- end
119
-
120
- def open?
121
- state? 'Open'
122
- end
123
-
124
- def state?(value)
125
- @state.present? && @state.casecmp(value) == 0
126
- end
127
- end
128
- end
@@ -1,3 +0,0 @@
1
- module SalesforceBulk
2
- VERSION = "0.5.1"
3
- end
@@ -1,20 +0,0 @@
1
- require 'net/https'
2
- require 'xmlsimple'
3
- require 'csv'
4
- require 'active_support'
5
- require 'active_support/inflector'
6
- require 'active_support/core_ext/object/blank'
7
- require 'active_support/core_ext/hash/keys'
8
- require 'salesforce_bulk/version'
9
- require 'salesforce_bulk/core_extensions/string'
10
- require 'salesforce_bulk/salesforce_error'
11
- require 'salesforce_bulk/connection'
12
- require 'salesforce_bulk/client'
13
- require 'salesforce_bulk/job'
14
- require 'salesforce_bulk/batch'
15
- require 'salesforce_bulk/batch_result'
16
- require 'salesforce_bulk/batch_result_collection'
17
- require 'salesforce_bulk/query_result_collection'
18
-
19
- module SalesforceBulk
20
- end