salesforce_bulk2 0.5.0
Sign up to get free protection for your applications and to get access to all the features.
- data/.gitignore +4 -0
- data/Gemfile +2 -0
- data/README.md +143 -0
- data/Rakefile +22 -0
- data/lib/salesforce_bulk/batch.rb +86 -0
- data/lib/salesforce_bulk/batch_result.rb +39 -0
- data/lib/salesforce_bulk/batch_result_collection.rb +29 -0
- data/lib/salesforce_bulk/client.rb +209 -0
- data/lib/salesforce_bulk/connection.rb +155 -0
- data/lib/salesforce_bulk/core_extensions/string.rb +14 -0
- data/lib/salesforce_bulk/job.rb +126 -0
- data/lib/salesforce_bulk/query_result_collection.rb +48 -0
- data/lib/salesforce_bulk/salesforce_error.rb +30 -0
- data/lib/salesforce_bulk/version.rb +3 -0
- data/lib/salesforce_bulk.rb +17 -0
- data/salesforce_bulk.gemspec +27 -0
- data/test/fixtures/batch_create_request.csv +3 -0
- data/test/fixtures/batch_create_response.xml +13 -0
- data/test/fixtures/batch_info_list_response.xml +27 -0
- data/test/fixtures/batch_info_response.xml +13 -0
- data/test/fixtures/batch_result_list_response.csv +3 -0
- data/test/fixtures/config.yml +7 -0
- data/test/fixtures/invalid_batch_error.xml +5 -0
- data/test/fixtures/invalid_error.xml +5 -0
- data/test/fixtures/invalid_job_error.xml +5 -0
- data/test/fixtures/invalid_session_error.xml +5 -0
- data/test/fixtures/job_abort_request.xml +1 -0
- data/test/fixtures/job_abort_response.xml +25 -0
- data/test/fixtures/job_close_request.xml +1 -0
- data/test/fixtures/job_close_response.xml +25 -0
- data/test/fixtures/job_create_request.xml +1 -0
- data/test/fixtures/job_create_response.xml +25 -0
- data/test/fixtures/job_info_response.xml +25 -0
- data/test/fixtures/login_error.xml +1 -0
- data/test/fixtures/login_request.xml +1 -0
- data/test/fixtures/login_response.xml +39 -0
- data/test/fixtures/query_result_list_response.xml +1 -0
- data/test/fixtures/query_result_response.csv +5 -0
- data/test/lib/test_batch.rb +258 -0
- data/test/lib/test_batch_result.rb +40 -0
- data/test/lib/test_core_extensions.rb +15 -0
- data/test/lib/test_initialization.rb +86 -0
- data/test/lib/test_job.rb +256 -0
- data/test/lib/test_query_result_collection.rb +87 -0
- data/test/lib/test_simple_api.rb +79 -0
- data/test/test_helper.rb +32 -0
- metadata +222 -0
data/.gitignore
ADDED
data/Gemfile
ADDED
data/README.md
ADDED
@@ -0,0 +1,143 @@
|
|
1
|
+
# salesforce_bulk
|
2
|
+
|
3
|
+
## Overview
|
4
|
+
|
5
|
+
Salesforce Bulk is a simple Ruby gem for connecting to and using the [Salesforce Bulk API](http://www.salesforce.com/us/developer/docs/api_asynch/index.htm).
|
6
|
+
|
7
|
+
## Installation
|
8
|
+
|
9
|
+
Install SalesforceBulk from RubyGems:
|
10
|
+
|
11
|
+
gem install salesforce_bulk
|
12
|
+
|
13
|
+
Or include it in your project's `Gemfile` with Bundler:
|
14
|
+
|
15
|
+
gem 'salesforce_bulk'
|
16
|
+
|
17
|
+
## Contribute
|
18
|
+
|
19
|
+
To contribute, fork this repo, create a topic branch, make changes, then send a pull request. Pull requests without accompanying tests will *not* be accepted. To run tests in your fork, just do:
|
20
|
+
|
21
|
+
bundle install
|
22
|
+
rake
|
23
|
+
|
24
|
+
## Configuration and Initialization
|
25
|
+
|
26
|
+
### Basic Configuration
|
27
|
+
|
28
|
+
require 'salesforce_bulk'
|
29
|
+
|
30
|
+
client = SalesforceBulk::Client.new(username: 'MyUsername', password: 'MyPassword', token: 'MySecurityToken')
|
31
|
+
client.authenticate
|
32
|
+
|
33
|
+
Optional keys include host (default: login.salesforce.com), version (default: 24.0) and debugging (default: false).
|
34
|
+
|
35
|
+
### Configuring from a YAML file
|
36
|
+
|
37
|
+
The optional keys mentioned in the Basic Configuration section can also be used here.
|
38
|
+
|
39
|
+
---
|
40
|
+
username: MyUsername
|
41
|
+
password: MyPassword
|
42
|
+
token: MySecurityToken
|
43
|
+
|
44
|
+
Then in a Ruby script:
|
45
|
+
|
46
|
+
require 'salesforce_bulk'
|
47
|
+
|
48
|
+
client = SalesforceBulk::Client.new("config/salesforce_bulk.yml")
|
49
|
+
client.authenticate
|
50
|
+
|
51
|
+
## Usage Examples
|
52
|
+
|
53
|
+
Some requirements if you are moving from an older version of the gem. You must specify every key even if it has no value for each hash in the data array for a batch.
|
54
|
+
|
55
|
+
### Basic Example
|
56
|
+
|
57
|
+
data1 = [{:Name__c => 'Test 1'}, {:Name__c => 'Test 2'}]
|
58
|
+
data2 = [{:Name__c => 'Test 3'}, {:Name__c => 'Test 4'}]
|
59
|
+
|
60
|
+
job = client.add_job(:insert, :MyObject__c)
|
61
|
+
|
62
|
+
# easily add multiple batches to a job
|
63
|
+
batch = client.add_batch(job.id, data1)
|
64
|
+
batch = client.add_batch(job.id, data2)
|
65
|
+
|
66
|
+
job = client.close_job(job.id) # or use the abort_job(id) method
|
67
|
+
|
68
|
+
### Adding a Job
|
69
|
+
|
70
|
+
When adding a job you can specify the following operations for the first argument:
|
71
|
+
- :delete
|
72
|
+
- :insert
|
73
|
+
- :update
|
74
|
+
- :upsert
|
75
|
+
- :query
|
76
|
+
|
77
|
+
When using the :upsert operation you must specify an external ID field name:
|
78
|
+
|
79
|
+
job = client.add_job(:upsert, :MyObject__c, :external_id_field_name => :MyId__c)
|
80
|
+
|
81
|
+
For any operation you should be able to specify a concurrency mode. The default is Parallel. The other choice is Serial.
|
82
|
+
|
83
|
+
job = client.add_job(:upsert, :MyObject__c, :concurrency_mode => :Serial, :external_id_field_name => :MyId__c)
|
84
|
+
|
85
|
+
### Retrieving Info for a Job
|
86
|
+
|
87
|
+
job = client.job_info(jobId) # returns a Job object
|
88
|
+
|
89
|
+
puts "Job #{job.id} is closed." if job.closed? # other: open?, aborted?
|
90
|
+
|
91
|
+
### Retrieving Info for all Batches
|
92
|
+
|
93
|
+
batches = client.batch_info_list(jobId) # returns an Array of Batch objects
|
94
|
+
|
95
|
+
batches.each do |batch|
|
96
|
+
puts "Batch #{batch.id} failed." if batch.failed? # other: completed?, failed?, in_progress?, queued?
|
97
|
+
end
|
98
|
+
|
99
|
+
### Retrieving Info for a single Batch
|
100
|
+
|
101
|
+
batch = client.batch_info(jobId, batchId) # returns a Batch object
|
102
|
+
|
103
|
+
puts "Batch #{batch.id} is in progress." if batch.in_progress?
|
104
|
+
|
105
|
+
### Retrieving Batch Results (for Delete, Insert, Update and Upsert)
|
106
|
+
|
107
|
+
To verify that a batch completed successfully or failed call the `batch_info` or `batch_info_list` methods first, otherwise if you call `batch_result` without verifying and the batch failed the method will raise an error.
|
108
|
+
|
109
|
+
The object returned from the following example only applies to the operations: delete, insert, update and upsert. Query results are handled differently.
|
110
|
+
|
111
|
+
results = client.batch_result(jobId, batchId) # returns an Array of BatchResult objects
|
112
|
+
|
113
|
+
results.each do |result|
|
114
|
+
puts "Item #{result.id} had an error of: #{result.error}" if result.error?
|
115
|
+
end
|
116
|
+
|
117
|
+
### Retrieving Query based Batch Results
|
118
|
+
|
119
|
+
To verify that a batch completed successfully or failed call the `batch_info` or `batch_info_list` methods first, otherwise if you call `batch_result` without verifying and the batch failed the method will raise an error.
|
120
|
+
|
121
|
+
Query results are handled differently as the response will not contain the full result set. You'll have to page through sets if you added multiple batches to a job.
|
122
|
+
|
123
|
+
# returns a QueryResultCollection object (an Array)
|
124
|
+
results = client.batch_result(jobId, batchId)
|
125
|
+
|
126
|
+
while results.any?
|
127
|
+
|
128
|
+
# Assuming query was: SELECT Id, Name, CustomField__c FROM Account
|
129
|
+
results.each do |result|
|
130
|
+
puts result[:Id], result[:Name], result[:CustomField__c]
|
131
|
+
end
|
132
|
+
|
133
|
+
puts "Another set is available." if results.next?
|
134
|
+
|
135
|
+
results.next
|
136
|
+
|
137
|
+
end
|
138
|
+
|
139
|
+
## Copyright
|
140
|
+
|
141
|
+
Copyright (c) 2011 Jorge Valdivia.
|
142
|
+
|
143
|
+
Copyright (c) 2012 Javier Julio.
|
data/Rakefile
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
require 'bundler'
|
2
|
+
Bundler::GemHelper.install_tasks
|
3
|
+
|
4
|
+
require 'rake/testtask'
|
5
|
+
Rake::TestTask.new(:test) do |test|
|
6
|
+
test.libs << 'lib' << 'test'
|
7
|
+
test.pattern = 'test/**/test_*.rb'
|
8
|
+
test.verbose = true
|
9
|
+
end
|
10
|
+
|
11
|
+
task :default => :test
|
12
|
+
|
13
|
+
namespace :doc do
|
14
|
+
require 'rdoc/task'
|
15
|
+
require File.expand_path('../lib/salesforce_bulk/version', __FILE__)
|
16
|
+
RDoc::Task.new do |rdoc|
|
17
|
+
rdoc.rdoc_dir = 'rdoc'
|
18
|
+
rdoc.title = "SalesforceBulk #{SalesforceBulk::VERSION}"
|
19
|
+
rdoc.main = 'README.md'
|
20
|
+
rdoc.rdoc_files.include('README.md', 'LICENSE.md', 'lib/**/*.rb')
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,86 @@
|
|
1
|
+
module SalesforceBulk
|
2
|
+
class Batch
|
3
|
+
attr_accessor :connection
|
4
|
+
|
5
|
+
attr_reader :apex_processing_time
|
6
|
+
attr_reader :api_active_processing_time
|
7
|
+
attr_reader :completed_at
|
8
|
+
attr_reader :created_at
|
9
|
+
attr_reader :failed_records
|
10
|
+
attr_reader :id
|
11
|
+
attr_reader :job_id
|
12
|
+
attr_reader :processed_records
|
13
|
+
attr_reader :state
|
14
|
+
attr_reader :total_processing_time
|
15
|
+
attr_reader :data
|
16
|
+
|
17
|
+
@@max_records = 10000
|
18
|
+
|
19
|
+
def self.max_records
|
20
|
+
@@max_records
|
21
|
+
end
|
22
|
+
|
23
|
+
#This is not the way this is going to run
|
24
|
+
def initialize(data, job_id, client = nil)
|
25
|
+
update(data)
|
26
|
+
@client = client
|
27
|
+
end
|
28
|
+
|
29
|
+
def update(data)
|
30
|
+
@data = data
|
31
|
+
|
32
|
+
@id = data['id']
|
33
|
+
@job_id = data['jobId']
|
34
|
+
@state = data['state']
|
35
|
+
@created_at = DateTime.parse(data['createdDate'])
|
36
|
+
@completed_at = DateTime.parse(data['systemModstamp'])
|
37
|
+
@processed_records = data['numberRecordsProcessed'].to_i
|
38
|
+
@failed_records = data['numberRecordsFailed'].to_i
|
39
|
+
@total_processing_time = data['totalProcessingTime'].to_i
|
40
|
+
@api_active_processing_time = data['apiActiveProcessingTime'].to_i
|
41
|
+
@apex_processing_time = data['apex_processing_time'].to_i
|
42
|
+
end
|
43
|
+
|
44
|
+
### State Information ###
|
45
|
+
def in_progress?
|
46
|
+
state? 'InProgress'
|
47
|
+
end
|
48
|
+
|
49
|
+
def queued?
|
50
|
+
state? 'Queued'
|
51
|
+
end
|
52
|
+
|
53
|
+
def completed?
|
54
|
+
state? 'Completed'
|
55
|
+
end
|
56
|
+
|
57
|
+
def failed?
|
58
|
+
state? 'Failed'
|
59
|
+
end
|
60
|
+
|
61
|
+
def finished?
|
62
|
+
completed? or finished?
|
63
|
+
end
|
64
|
+
|
65
|
+
def state?(value)
|
66
|
+
self.state.present? && self.state.casecmp(value) == 0
|
67
|
+
end
|
68
|
+
|
69
|
+
def errors?
|
70
|
+
@number_records_failed > 0
|
71
|
+
end
|
72
|
+
|
73
|
+
def result
|
74
|
+
@client.get_batch_result(@job_id, @batch_id)
|
75
|
+
end
|
76
|
+
|
77
|
+
def request
|
78
|
+
@client.get_batch_request(@job_id, @batch_id)
|
79
|
+
end
|
80
|
+
|
81
|
+
def refresh
|
82
|
+
xml_data = http_get_xml("job/#{jobId}/batch/#{batchId}")
|
83
|
+
update(xml_data)
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
module SalesforceBulk
|
2
|
+
class BatchResult
|
3
|
+
|
4
|
+
# A boolean indicating if record was created. If updated value is false.
|
5
|
+
attr_reader :created
|
6
|
+
|
7
|
+
# The error message.
|
8
|
+
attr_reader :error
|
9
|
+
|
10
|
+
# The record's unique id.
|
11
|
+
attr_reader :id
|
12
|
+
|
13
|
+
# If record was created successfully. If false then an error message is provided.
|
14
|
+
attr_reader :success
|
15
|
+
|
16
|
+
def initialize(id, success, created, error)
|
17
|
+
@id = id
|
18
|
+
@success = success
|
19
|
+
@created = created
|
20
|
+
@error = error
|
21
|
+
end
|
22
|
+
|
23
|
+
def error?
|
24
|
+
error.present?
|
25
|
+
end
|
26
|
+
|
27
|
+
def created?
|
28
|
+
created
|
29
|
+
end
|
30
|
+
|
31
|
+
def successful?
|
32
|
+
success
|
33
|
+
end
|
34
|
+
|
35
|
+
def updated?
|
36
|
+
!created && success
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
module SalesforceBulk
|
2
|
+
class BatchResultCollection < Array
|
3
|
+
|
4
|
+
attr_reader :batch_id
|
5
|
+
attr_reader :job_id
|
6
|
+
|
7
|
+
def initialize(job_id, batch_id)
|
8
|
+
@job_id = job_id
|
9
|
+
@batch_id = batch_id
|
10
|
+
end
|
11
|
+
|
12
|
+
def any_failures?
|
13
|
+
self.any? { |result| result.error.length > 0 }
|
14
|
+
end
|
15
|
+
|
16
|
+
def failed
|
17
|
+
self.select { |result| result.error.length > 0 }
|
18
|
+
end
|
19
|
+
|
20
|
+
def completed
|
21
|
+
self.select { |result| result.success }
|
22
|
+
end
|
23
|
+
|
24
|
+
def created
|
25
|
+
self.select { |result| result.success && result.created }
|
26
|
+
end
|
27
|
+
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,209 @@
|
|
1
|
+
module SalesforceBulk
|
2
|
+
# Interface for operating the Salesforce Bulk REST API
|
3
|
+
class Client
|
4
|
+
# The HTTP connection we will be using to connect to Salesforce.com
|
5
|
+
attr_accessor :connection
|
6
|
+
|
7
|
+
def initialize(options={})
|
8
|
+
@connection = Connection.new(options)
|
9
|
+
end
|
10
|
+
|
11
|
+
def connected?
|
12
|
+
@connection.connected?
|
13
|
+
end
|
14
|
+
|
15
|
+
def disconnect
|
16
|
+
@connection.disconnect
|
17
|
+
end
|
18
|
+
|
19
|
+
def connect
|
20
|
+
@connection.connect
|
21
|
+
end
|
22
|
+
|
23
|
+
def create_job operation, sobject, options = {}
|
24
|
+
Job.new add_job(operation, sobject, options), self
|
25
|
+
end
|
26
|
+
|
27
|
+
def add_job operation, sobject, options={}
|
28
|
+
operation = operation.to_sym.downcase
|
29
|
+
|
30
|
+
raise ArgumentError.new("Invalid operation: #{operation}") unless Job.valid_operation?(operation)
|
31
|
+
|
32
|
+
options.assert_valid_keys(:external_id_field_name, :concurrency_mode)
|
33
|
+
|
34
|
+
if options[:concurrency_mode]
|
35
|
+
concurrency_mode = options[:concurrency_mode].capitalize
|
36
|
+
raise ArgumentError.new("Invalid concurrency mode: #{concurrency_mode}") unless Job.valid_concurrency_mode?(concurrency_mode)
|
37
|
+
end
|
38
|
+
|
39
|
+
xml = '<?xml version="1.0" encoding="utf-8"?>'
|
40
|
+
xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
|
41
|
+
xml += " <operation>#{operation}</operation>"
|
42
|
+
xml += " <object>#{sobject}</object>"
|
43
|
+
xml += " <externalIdFieldName>#{options[:external_id_field_name]}</externalIdFieldName>" if options[:external_id_field_name]
|
44
|
+
xml += " <concurrencyMode>#{options[:concurrency_mode]}</concurrencyMode>" if options[:concurrency_mode]
|
45
|
+
xml += " <contentType>CSV</contentType>"
|
46
|
+
xml += "</jobInfo>"
|
47
|
+
|
48
|
+
@connection.http_post_xml("job", xml)
|
49
|
+
end
|
50
|
+
|
51
|
+
def abort_job job_id
|
52
|
+
xml = '<?xml version="1.0" encoding="utf-8"?>'
|
53
|
+
xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
|
54
|
+
xml += ' <state>Aborted</state>'
|
55
|
+
xml += '</jobInfo>'
|
56
|
+
|
57
|
+
@connection.http_post_xml("job/#{job_id}", xml)
|
58
|
+
end
|
59
|
+
|
60
|
+
def close_job job_id
|
61
|
+
xml = '<?xml version="1.0" encoding="utf-8"?>'
|
62
|
+
xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
|
63
|
+
xml += ' <state>Closed</state>'
|
64
|
+
xml += '</jobInfo>'
|
65
|
+
|
66
|
+
@connection.http_post_xml("job/#{job_id}", xml)
|
67
|
+
end
|
68
|
+
|
69
|
+
def get_job job_id
|
70
|
+
@connection.http_get_xml("job/#{job_id}")
|
71
|
+
end
|
72
|
+
|
73
|
+
def get_batch job_id, batch_id
|
74
|
+
@connection.http_get_xml("job/#{jobId}/batch/#{batchId}")
|
75
|
+
end
|
76
|
+
|
77
|
+
def find_job job_id
|
78
|
+
Job.new get_job(job_id)
|
79
|
+
end
|
80
|
+
|
81
|
+
def find_batch job_id, batch_id
|
82
|
+
Batch.new get_batch(job_id, batch_id)
|
83
|
+
end
|
84
|
+
|
85
|
+
def create_batch job_id, data
|
86
|
+
Batch.new add_batch(job_id, data)
|
87
|
+
end
|
88
|
+
|
89
|
+
def add_batch job_id, data
|
90
|
+
body = data
|
91
|
+
|
92
|
+
if data.is_a?(Array)
|
93
|
+
raise ArgumentError, "Batch data set exceeds #{Batch.max_records} record limit by #{data.length - Batch.max_records}" if data.length > Batch.max_records
|
94
|
+
raise ArgumentError, "Batch data set is empty" if data.length < 1
|
95
|
+
|
96
|
+
keys = data.first.keys
|
97
|
+
body = keys.to_csv
|
98
|
+
|
99
|
+
data.each do |item|
|
100
|
+
item_values = keys.map { |key| item[key] }
|
101
|
+
body += item_values.to_csv
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
# Despite the content for a query operation batch being plain text we
|
106
|
+
# still have to specify CSV content type per API docs.
|
107
|
+
@connection.http_post_xml("job/#{job_id}/batch", body, "Content-Type" => "text/csv; charset=UTF-8")
|
108
|
+
end
|
109
|
+
|
110
|
+
def get_batch_list(job_id)
|
111
|
+
result = @connection.http_get_xml("job/#{job_id}/batch")
|
112
|
+
|
113
|
+
if result['batchInfo'].is_a?(Array)
|
114
|
+
result['batchInfo'].collect { |info| Batch.new(info) }
|
115
|
+
else
|
116
|
+
[Batch.new(result['batchInfo'])]
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
def get_batch_request(job_id, batch_id)
|
121
|
+
response = http_get("job/#{job_id}/batch/#{batch_id}/request")
|
122
|
+
|
123
|
+
CSV.parse(response.body, :headers => true) do |row|
|
124
|
+
result << BatchResult.new(row[0], row[1].to_b, row[2].to_b, row[3])
|
125
|
+
end
|
126
|
+
end
|
127
|
+
|
128
|
+
def get_batch_result(job_id, batch_id)
|
129
|
+
response = http_get("job/#{job_id}/batch/#{batch_id}/result")
|
130
|
+
|
131
|
+
#Query Result
|
132
|
+
if response.body =~ /<.*?>/m
|
133
|
+
result = XmlSimple.xml_in(response.body)
|
134
|
+
|
135
|
+
if result['result'].present?
|
136
|
+
results = get_query_result(job_id, batch_id, result['result'].first)
|
137
|
+
|
138
|
+
collection = QueryResultCollection.new(self, job_id, batch_id, result['result'].first, result['result'])
|
139
|
+
collection.replace(results)
|
140
|
+
end
|
141
|
+
|
142
|
+
#Batch Result
|
143
|
+
else
|
144
|
+
result = BatchResultCollection.new(job_id, batch_id)
|
145
|
+
|
146
|
+
CSV.parse(response.body, :headers => true) do |row|
|
147
|
+
result << BatchResult.new(row[0], row[1].to_b, row[2].to_b, row[3])
|
148
|
+
end
|
149
|
+
|
150
|
+
result
|
151
|
+
end
|
152
|
+
end
|
153
|
+
|
154
|
+
def get_query_result(job_id, batch_id, result_id)
|
155
|
+
headers = {"Content-Type" => "text/csv; charset=UTF-8"}
|
156
|
+
response = http_get("job/#{job_id}/batch/#{batch_id}/result/#{result_id}", headers)
|
157
|
+
|
158
|
+
lines = response.body.lines.to_a
|
159
|
+
headers = CSV.parse_line(lines.shift).collect { |header| header.to_sym }
|
160
|
+
|
161
|
+
result = []
|
162
|
+
|
163
|
+
#CSV.parse(lines.join, :headers => headers, :converters => [:all, lambda{|s| s.to_b if s.kind_of? String }]) do |row|
|
164
|
+
CSV.parse(lines.join, :headers => headers) do |row|
|
165
|
+
result << Hash[row.headers.zip(row.fields)]
|
166
|
+
end
|
167
|
+
|
168
|
+
result
|
169
|
+
end
|
170
|
+
|
171
|
+
## Operations
|
172
|
+
def delete(sobject, data)
|
173
|
+
perform_operation(:delete, sobject, data)
|
174
|
+
end
|
175
|
+
|
176
|
+
def insert(sobject, data)
|
177
|
+
perform_operation(:insert, sobject, data)
|
178
|
+
end
|
179
|
+
|
180
|
+
def query(sobject, data)
|
181
|
+
perform_operation(:query, sobject, data)
|
182
|
+
end
|
183
|
+
|
184
|
+
def update(sobject, data)
|
185
|
+
perform_operation(:update, sobject, data)
|
186
|
+
end
|
187
|
+
|
188
|
+
def upsert(sobject, external_id, data)
|
189
|
+
perform_operation(:upsert, sobject, data, external_id)
|
190
|
+
end
|
191
|
+
|
192
|
+
def perform_operation(operation, sobject, data, external_id = nil, batch_size = nil)
|
193
|
+
job = create_job(operation, sobject, :external_id_field_name => external_id)
|
194
|
+
|
195
|
+
data.each_slice(batch_size || Batch.max_records) do |records|
|
196
|
+
job.add_batch(records)
|
197
|
+
end
|
198
|
+
|
199
|
+
job.close
|
200
|
+
|
201
|
+
until job.finished?
|
202
|
+
job.refresh
|
203
|
+
sleep 2
|
204
|
+
end
|
205
|
+
|
206
|
+
job.get_results
|
207
|
+
end
|
208
|
+
end
|
209
|
+
end
|
@@ -0,0 +1,155 @@
|
|
1
|
+
module SalesforceBulk
|
2
|
+
class Connection
|
3
|
+
# If true, print API debugging information to stdout. Defaults to false.
|
4
|
+
attr_accessor :debugging
|
5
|
+
|
6
|
+
# The host to use for authentication. Defaults to login.salesforce.com.
|
7
|
+
attr_reader :host
|
8
|
+
|
9
|
+
# The instance host to use for API calls. Determined from login response.
|
10
|
+
attr_reader :instance_host
|
11
|
+
|
12
|
+
# The Salesforce password
|
13
|
+
attr_reader :password
|
14
|
+
|
15
|
+
# The Salesforce security token
|
16
|
+
attr_reader :token
|
17
|
+
|
18
|
+
# The Salesforce username
|
19
|
+
attr_reader :username
|
20
|
+
|
21
|
+
# The API version the client is using. Defaults to 24.0.
|
22
|
+
attr_reader :version
|
23
|
+
|
24
|
+
|
25
|
+
# Defaults
|
26
|
+
@@host = 'login.salesforce.com'
|
27
|
+
@@version = 24.0
|
28
|
+
@@debugging = false
|
29
|
+
@@api_path_prefix = "/services/async/"
|
30
|
+
|
31
|
+
def initialize options
|
32
|
+
if options.is_a?(String)
|
33
|
+
options = YAML.load_file(options)
|
34
|
+
options.symbolize_keys!
|
35
|
+
end
|
36
|
+
|
37
|
+
options.assert_valid_keys(:username, :password, :token, :debugging, :host, :version)
|
38
|
+
|
39
|
+
@username = options[:username]
|
40
|
+
@password = "#{options[:password]}#{options[:token]}"
|
41
|
+
@token = options[:token]
|
42
|
+
@host = options[:host] || @@host
|
43
|
+
@version = options[:version] || @@version
|
44
|
+
@debugging = options[:debugging] || @@debugging
|
45
|
+
end
|
46
|
+
|
47
|
+
def connect
|
48
|
+
xml = '<?xml version="1.0" encoding="utf-8"?>'
|
49
|
+
xml += '<env:Envelope xmlns:xsd="http://www.w3.org/2001/XMLSchema"'
|
50
|
+
xml += ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
|
51
|
+
xml += ' xmlns:env="http://schemas.xmlsoap.org/soap/envelope/">'
|
52
|
+
xml += ' <env:Body>'
|
53
|
+
xml += ' <n1:login xmlns:n1="urn:partner.soap.sforce.com">'
|
54
|
+
xml += " <n1:username>#{@username}</n1:username>"
|
55
|
+
xml += " <n1:password>#{@password}</n1:password>"
|
56
|
+
xml += " </n1:login>"
|
57
|
+
xml += " </env:Body>"
|
58
|
+
xml += "</env:Envelope>"
|
59
|
+
|
60
|
+
data = http_post_xml("/services/Soap/u/#{@version}", xml, 'Content-Type' => 'text/xml', 'SOAPAction' => 'login')
|
61
|
+
result = data['Body']['loginResponse']['result']
|
62
|
+
|
63
|
+
@session_id = result['sessionId']
|
64
|
+
@server_url = result['serverUrl']
|
65
|
+
@instance_id = instance_id(@server_url)
|
66
|
+
@instance_host = "#{@instance_id}.salesforce.com"
|
67
|
+
|
68
|
+
@api_path_prefix = "#{@@api_path_prefix}/#{@version}/"
|
69
|
+
|
70
|
+
result
|
71
|
+
end
|
72
|
+
|
73
|
+
def disconnect
|
74
|
+
xml = '<?xml version="1.0" encoding="utf-8"?>'
|
75
|
+
xml += '<env:Envelope xmlns:xsd="http://www.w3.org/2001/XMLSchema"'
|
76
|
+
xml += ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
|
77
|
+
xml += ' xmlns:env="http://schemas.xmlsoap.org/soap/envelope/">'
|
78
|
+
xml += ' <env:Body>'
|
79
|
+
xml += ' <n1:logout xmlns:n1="urn:partner.soap.sforce.com" />'
|
80
|
+
xml += ' </env:Body>'
|
81
|
+
xml += '</env:Envelope>'
|
82
|
+
|
83
|
+
result = http_post_xml("/services/Soap/u/#{@version}", xml, 'Content-Type' => 'text/xml', 'SOAPAction' => 'logout')
|
84
|
+
|
85
|
+
@session_id = nil
|
86
|
+
@server_url = nil
|
87
|
+
@instance_id = nil
|
88
|
+
@instance_host = nil
|
89
|
+
@api_path_prefix = nil
|
90
|
+
|
91
|
+
result
|
92
|
+
end
|
93
|
+
|
94
|
+
def connected?
|
95
|
+
!!@session_id
|
96
|
+
end
|
97
|
+
|
98
|
+
def http_post(path, body, headers={})
|
99
|
+
headers = {'Content-Type' => 'application/xml'}.merge(headers)
|
100
|
+
|
101
|
+
#Are we connected?
|
102
|
+
if connected?
|
103
|
+
headers['X-SFDC-Session'] = @session_id
|
104
|
+
host = @instance_host
|
105
|
+
path = "#{@api_path_prefix}#{path}"
|
106
|
+
else
|
107
|
+
host = @host
|
108
|
+
end
|
109
|
+
|
110
|
+
response = https_request(host).post(path, body, headers)
|
111
|
+
|
112
|
+
if response.is_a?(Net::HTTPSuccess)
|
113
|
+
response
|
114
|
+
else
|
115
|
+
raise SalesforceError.new(response)
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
def http_get(path, headers={})
|
120
|
+
path = "#{@api_path_prefix}#{path}"
|
121
|
+
|
122
|
+
headers = {'Content-Type' => 'application/xml'}.merge(headers)
|
123
|
+
|
124
|
+
headers['X-SFDC-Session'] = @session_id if @session_id
|
125
|
+
|
126
|
+
response = https_request(@instance_host).get(path, headers)
|
127
|
+
|
128
|
+
if response.is_a?(Net::HTTPSuccess)
|
129
|
+
response
|
130
|
+
else
|
131
|
+
raise SalesforceError.new(response)
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
def http_post_xml(path, body, headers = {})
|
136
|
+
XmlSimple.xml_in(http_post(path, body, headers).body, :ForceArray => false)
|
137
|
+
end
|
138
|
+
|
139
|
+
def http_get_xml(path, headers = {})
|
140
|
+
XmlSimple.xml_in(http_get(path, headers).body, :ForceArray => false)
|
141
|
+
end
|
142
|
+
|
143
|
+
def https_request(host)
|
144
|
+
req = Net::HTTP.new(host, 443)
|
145
|
+
req.use_ssl = true
|
146
|
+
req.verify_mode = OpenSSL::SSL::VERIFY_NONE
|
147
|
+
req
|
148
|
+
end
|
149
|
+
|
150
|
+
private
|
151
|
+
def instance_id(url)
|
152
|
+
url.match(/:\/\/([a-zA-Z0-9-]{2,}).salesforce/)[1]
|
153
|
+
end
|
154
|
+
end
|
155
|
+
end
|