salesforce_bulk_api 0.0.11 → 0.0.12

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 99c3f558cf30c25184d78f67c158fac104d3deca
4
- data.tar.gz: 4298337ac2a86b50e959eee7c9707a8de423778d
3
+ metadata.gz: ec07d0cc7d5f0c5795459e444948d45ed308dde1
4
+ data.tar.gz: 0faae2875e7ec75bf9a8bb8dfe54025b607cedb6
5
5
  SHA512:
6
- metadata.gz: 035074901c3cbc3aeb06cb47b008c094e1e02b62b0c784021d9b434e8608c8d9bd89472580bf993397420c5849122821c3df3c540ac1d420f97d57178c5b74a4
7
- data.tar.gz: 6b804db79f5ca0b6aa8be4233f72d842c08ab0e1465edfbc606c25615d33a3a5233419cae594b990fcbcefc7c4660afef43aca6b94417f907f7748e18dbe5274
6
+ metadata.gz: c68f01bc8d22feb69025bed933e52049e7227e70a8df2e4496ac89f224e6a7571199510ac2a5c61e9764e8f828a6d776a8b05bd78d88560f8baf1bde5ccc18a2
7
+ data.tar.gz: d91276c381a0f188f105af7442a95ef5c4e75f399f4d55a37906b3e5c1bfa0f6bcaecc6c2d8fde967a10ac8ea15e01913b71a426f7e71a216e18023d9e7d7ecb
data/.gitignore CHANGED
@@ -5,3 +5,4 @@ Gemfile.lock
5
5
  .ruby-version
6
6
  pkg/*
7
7
  auth_credentials.yml
8
+ *.swp
data/README.md CHANGED
@@ -50,7 +50,7 @@ OR
50
50
  salesforce = SalesforceBulkApi::Api.new(client)
51
51
 
52
52
 
53
- Sample operations:
53
+ ### Sample operations:
54
54
 
55
55
  # Insert/Create
56
56
  # Add as many fields per record as needed.
@@ -82,21 +82,26 @@ Sample operations:
82
82
  # Query
83
83
  res = salesforce.query("Account", "select id, name, createddate from Account limit 3") # We just need to pass the sobject name and the query string
84
84
 
85
- Helpful methods:
85
+ ### Helpful methods:
86
86
 
87
87
  # Check status of a job via #job_from_id
88
88
  job = salesforce.job_from_id('a00A0001009zA2m') # Returns a SalesforceBulkApi::Job instance
89
89
  puts "status is: #{job.check_job_status.inspect}"
90
90
 
91
-
92
- Listening to events:
91
+ ### Listening to events:
93
92
 
94
93
  # A job is created
95
94
  # Useful when you need to store the job_id before any work begins, then if you fail during a complex load scenario, you can wait for your
96
95
  # previous job(s) to finish.
97
- salesforce.on_job_created do |job|
98
- puts "Job #{job.job_id} created!"
99
- end
96
+ salesforce.on_job_created do |job|
97
+ puts "Job #{job.job_id} created!"
98
+ end
99
+
100
+ ### Throttling API calls:
101
+
102
+ # By default, this gem (and maybe your app driving it) will query job/batch statuses at an unbounded rate. We
103
+ # can fix that, e.g.:
104
+ salesforce.connection.set_status_throttle(30) # only check status of individual jobs/batches every 30 seconds
100
105
 
101
106
  ## Installation
102
107
 
@@ -3,4 +3,5 @@ salesforce:
3
3
  client_secret: client_secret_here
4
4
  user: sf_user@example.com
5
5
  passwordandtoken: passandtokenhere
6
- test_account_id: 0013000000ymMBh
6
+ test_account_id: 0013000000ymMBh
7
+ host: 'login.salesforce.com' # use test.salesforce.com if it is a sandbox
@@ -1,44 +1,59 @@
1
1
  require 'rubygems'
2
2
  require 'bundler'
3
3
  Bundler.require()
4
- require "salesforce_bulk_api/version"
4
+ require 'salesforce_bulk_api/version'
5
5
  require 'net/https'
6
6
  require 'xmlsimple'
7
7
  require 'csv'
8
+ require 'salesforce_bulk_api/concerns/throttling'
8
9
  require 'salesforce_bulk_api/job'
9
10
  require 'salesforce_bulk_api/connection'
10
11
 
11
12
  module SalesforceBulkApi
12
13
 
13
14
  class Api
15
+ attr_reader :connection
14
16
 
15
17
  @@SALESFORCE_API_VERSION = '32.0'
16
18
 
17
19
  def initialize(client)
18
- @connection = SalesforceBulkApi::Connection.new(@@SALESFORCE_API_VERSION,client)
20
+ @connection = SalesforceBulkApi::Connection.new(@@SALESFORCE_API_VERSION, client)
19
21
  @listeners = { job_created: [] }
20
22
  end
21
23
 
22
24
  def upsert(sobject, records, external_field, get_response = false, send_nulls = false, no_null_list = [], batch_size = 10000, timeout = 1500)
23
- self.do_operation('upsert', sobject, records, external_field, get_response, timeout, batch_size, send_nulls, no_null_list)
25
+ do_operation('upsert', sobject, records, external_field, get_response, timeout, batch_size, send_nulls, no_null_list)
24
26
  end
25
27
 
26
28
  def update(sobject, records, get_response = false, send_nulls = false, no_null_list = [], batch_size = 10000, timeout = 1500)
27
- self.do_operation('update', sobject, records, nil, get_response, timeout, batch_size, send_nulls, no_null_list)
29
+ do_operation('update', sobject, records, nil, get_response, timeout, batch_size, send_nulls, no_null_list)
28
30
  end
29
31
 
30
32
  def create(sobject, records, get_response = false, send_nulls = false, batch_size = 10000, timeout = 1500)
31
- self.do_operation('insert', sobject, records, nil, get_response, timeout, batch_size, send_nulls)
33
+ do_operation('insert', sobject, records, nil, get_response, timeout, batch_size, send_nulls)
32
34
  end
33
35
 
34
36
  def delete(sobject, records, get_response = false, batch_size = 10000, timeout = 1500)
35
- self.do_operation('delete', sobject, records, nil, get_response, timeout, batch_size)
37
+ do_operation('delete', sobject, records, nil, get_response, timeout, batch_size)
36
38
  end
37
39
 
38
40
  def query(sobject, query, batch_size = 10000, timeout = 1500)
39
- self.do_operation('query', sobject, query, nil, true, timeout, batch_size)
41
+ do_operation('query', sobject, query, nil, true, timeout, batch_size)
40
42
  end
41
43
 
44
+ def counters
45
+ {
46
+ http_get: @connection.counters[:get],
47
+ http_post: @connection.counters[:post],
48
+ upsert: get_counters[:upsert],
49
+ update: get_counters[:update],
50
+ create: get_counters[:create],
51
+ delete: get_counters[:delete],
52
+ query: get_counters[:query]
53
+ }
54
+ end
55
+
56
+
42
57
  ##
43
58
  # Allows you to attach a listener that accepts the created job (which has a useful #job_id field). This is useful
44
59
  # for recording a job ID persistently before you begin batch work (i.e. start modifying the salesforce database),
@@ -52,9 +67,9 @@ module SalesforceBulkApi
52
67
  SalesforceBulkApi::Job.new(job_id: job_id, connection: @connection)
53
68
  end
54
69
 
55
- #private
56
-
57
70
  def do_operation(operation, sobject, records, external_field, get_response, timeout, batch_size, send_nulls = false, no_null_list = [])
71
+ count operation.to_sym
72
+
58
73
  job = SalesforceBulkApi::Job.new(operation: operation, sobject: sobject, records: records, external_field: external_field, connection: @connection)
59
74
 
60
75
  job.create_job(batch_size, send_nulls, no_null_list)
@@ -64,5 +79,15 @@ module SalesforceBulkApi
64
79
  response.merge!({'batches' => job.get_job_result(get_response, timeout)}) if get_response == true
65
80
  response
66
81
  end
82
+
83
+ private
84
+ def get_counters
85
+ @counters ||= Hash.new(0)
86
+ end
87
+
88
+ def count(name)
89
+ get_counters[name] += 1
90
+ end
91
+
67
92
  end
68
93
  end
@@ -0,0 +1,60 @@
1
+ module SalesforceBulkApi::Concerns
2
+ module Throttling
3
+
4
+ def throttles
5
+ @throttles.dup
6
+ end
7
+
8
+ def add_throttle(&throttling_callback)
9
+ @throttles ||= []
10
+ @throttles << throttling_callback
11
+ end
12
+
13
+ def set_status_throttle(limit_seconds)
14
+ set_throttle_limit_in_seconds(limit_seconds, [:http_method, :path], ->(details) { details[:path] == :get })
15
+ end
16
+
17
+ def set_throttle_limit_in_seconds(limit_seconds, throttle_by_keys, only_if)
18
+ add_throttle do |details|
19
+ limit_log = get_limit_log(Time.now - limit_seconds)
20
+ key = extract_constraint_key_from(details, throttle_by_keys)
21
+ last_request = limit_log[key]
22
+
23
+ if !last_request.nil? && only_if.call(details)
24
+ seconds_since_last_request = Time.now.to_f - last_request.to_f
25
+ need_to_wait_seconds = limit_seconds - seconds_since_last_request
26
+ sleep(need_to_wait_seconds) if need_to_wait_seconds > 0
27
+ end
28
+
29
+ limit_log[key] = Time.now
30
+ end
31
+ end
32
+
33
+ private
34
+
35
+ def extract_constraint_key_from(details, throttle_by_keys)
36
+ hash = {}
37
+ throttle_by_keys.each { |k| hash[k] = details[k] }
38
+ hash
39
+ end
40
+
41
+ def get_limit_log(prune_older_than)
42
+ @limits ||= Hash.new(0)
43
+
44
+ @limits.delete_if do |k, v|
45
+ v < prune_older_than
46
+ end
47
+
48
+ @limits
49
+ end
50
+
51
+ def throttle(details={})
52
+ (@throttles || []).each do |callback|
53
+ args = [details]
54
+ args = args[0..callback.arity]
55
+ callback.call(*args)
56
+ end
57
+ end
58
+
59
+ end
60
+ end
@@ -2,6 +2,7 @@ module SalesforceBulkApi
2
2
  require 'timeout'
3
3
 
4
4
  class Connection
5
+ include Concerns::Throttling
5
6
 
6
7
  @@XML_HEADER = '<?xml version="1.0" encoding="utf-8" ?>'
7
8
  @@API_VERSION = nil
@@ -20,8 +21,6 @@ require 'timeout'
20
21
  login()
21
22
  end
22
23
 
23
- #private
24
-
25
24
  def login()
26
25
  client_type = @client.class.to_s
27
26
  case client_type
@@ -39,11 +38,13 @@ require 'timeout'
39
38
  def post_xml(host, path, xml, headers)
40
39
  host = host || @@INSTANCE_HOST
41
40
  if host != @@LOGIN_HOST # Not login, need to add session id to header
42
- headers['X-SFDC-Session'] = @session_id;
41
+ headers['X-SFDC-Session'] = @session_id
43
42
  path = "#{@@PATH_PREFIX}#{path}"
44
43
  end
45
44
  i = 0
46
45
  begin
46
+ count :post
47
+ throttle(http_method: :post, path: path)
47
48
  https(host).post(path, xml, headers).body
48
49
  rescue
49
50
  i += 1
@@ -63,6 +64,9 @@ require 'timeout'
63
64
  if host != @@LOGIN_HOST # Not login, need to add session id to header
64
65
  headers['X-SFDC-Session'] = @session_id;
65
66
  end
67
+
68
+ count :get
69
+ throttle(http_method: :get, path: path)
66
70
  https(host).get(path, headers).body
67
71
  end
68
72
 
@@ -74,11 +78,28 @@ require 'timeout'
74
78
  end
75
79
 
76
80
  def parse_instance()
77
- @instance=@server_url.match(/https:\/\/[a-z]{2}[0-9]{1,2}/).to_s.gsub("https://","")
81
+ @instance = @server_url.match(/https:\/\/[a-z]{2}[0-9]{1,2}/).to_s.gsub("https://","")
78
82
  @instance = @server_url.split(".salesforce.com")[0].split("://")[1] if @instance.nil? || @instance.empty?
79
83
  return @instance
80
84
  end
81
85
 
86
+ def counters
87
+ {
88
+ get: get_counters[:get],
89
+ post: get_counters[:post]
90
+ }
91
+ end
92
+
93
+ private
94
+
95
+ def get_counters
96
+ @counters ||= Hash.new(0)
97
+ end
98
+
99
+ def count(http_method)
100
+ get_counters[http_method] += 1
101
+ end
102
+
82
103
  end
83
104
 
84
105
  end
@@ -5,13 +5,13 @@ module SalesforceBulkApi
5
5
 
6
6
  class SalesforceException < StandardError; end
7
7
 
8
- def initialize(operation: nil, sobject: nil, records: nil, external_field: nil, connection: nil, job_id: nil)
9
- @job_id = job_id
10
- @operation = operation
11
- @sobject = sobject
12
- @external_field = external_field
13
- @records = records
14
- @connection = connection
8
+ def initialize(args)
9
+ @job_id = args[:job_id]
10
+ @operation = args[:operation]
11
+ @sobject = args[:sobject]
12
+ @external_field = args[:external_field]
13
+ @records = args[:records]
14
+ @connection = args[:connection]
15
15
  @batch_ids = []
16
16
  @XML_HEADER = '<?xml version="1.0" encoding="utf-8" ?>'
17
17
  end
@@ -69,7 +69,7 @@ module SalesforceBulkApi
69
69
 
70
70
  def add_batches
71
71
  raise 'Records must be an array of hashes.' unless @records.is_a? Array
72
- keys = @records.reduce({}) {|h,pairs| pairs.each {|k,v| (h[k] ||= []) << v}; h}.keys
72
+ keys = @records.reduce({}) {|h, pairs| pairs.each {|k, v| (h[k] ||= []) << v}; h}.keys
73
73
 
74
74
  @records_dup = @records.clone
75
75
 
@@ -103,7 +103,6 @@ module SalesforceBulkApi
103
103
  if k.is_a?(Hash)
104
104
  xml += build_sobject(k)
105
105
  elsif data[k] != :type
106
- #xml += "<type>#{data[:type]}</type>"
107
106
  xml += "<#{k}>#{data[k]}</#{k}>"
108
107
  end
109
108
  end
@@ -1,3 +1,3 @@
1
1
  module SalesforceBulkApi
2
- VERSION = '0.0.11'
2
+ VERSION = '0.0.12'
3
3
  end
@@ -14,15 +14,14 @@ Gem::Specification.new do |s|
14
14
 
15
15
  s.rubyforge_project = 'salesforce_bulk_api'
16
16
 
17
- s.add_dependency(%q<json>, ['>= 0'])
18
- s.add_dependency(%q<xml-simple>, ['>= 0'])
19
-
17
+ s.add_dependency('json', ['>= 0'])
18
+ s.add_dependency('xml-simple', ['>= 0'])
19
+
20
20
  s.add_development_dependency 'rspec'
21
- s.add_development_dependency('webmock', ['~> 1.13'])
22
- s.add_development_dependency('vcr', ['~> 2.5'])
23
- s.add_development_dependency 'databasedotcom'
24
-
25
-
21
+ s.add_development_dependency 'restforce', '~> 1.5.1'
22
+ s.add_development_dependency 'rake', '~> 10.4.2'
23
+ s.add_development_dependency 'pry'
24
+
26
25
  s.files = `git ls-files`.split("\n")
27
26
  s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
28
27
  s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
@@ -1,14 +1,20 @@
1
1
  require 'spec_helper'
2
2
  require 'yaml'
3
- require 'databasedotcom'
3
+ require 'restforce'
4
4
 
5
5
  describe SalesforceBulkApi do
6
6
 
7
7
  before :each do
8
- auth_hash = YAML.load(File.read('auth_credentials.yml'))
9
- @sf_client = Databasedotcom::Client.new(:client_id => auth_hash['salesforce']['client_id'],
10
- :client_secret => auth_hash['salesforce']['client_secret'])
11
- @sf_client.authenticate(:username => auth_hash['salesforce']['user'], :password => auth_hash['salesforce']['passwordandtoken'])
8
+ auth_hash = YAML.load_file('auth_credentials.yml')
9
+ sfdc_auth_hash = auth_hash['salesforce']
10
+
11
+ @sf_client = Restforce.new(
12
+ username: sfdc_auth_hash['user'],
13
+ password: sfdc_auth_hash['passwordandtoken'],
14
+ client_id: sfdc_auth_hash['client_id'],
15
+ client_secret: sfdc_auth_hash['client_secret'],
16
+ host: sfdc_auth_hash['host'])
17
+ @sf_client.authenticate!
12
18
 
13
19
  @account_id = auth_hash['salesforce']['test_account_id']
14
20
 
@@ -22,7 +28,7 @@ describe SalesforceBulkApi do
22
28
  describe 'upsert' do
23
29
 
24
30
  context 'when not passed get_result' do
25
- it "doesn't return the batches array" do
31
+ it "doesn't return the batches array" do
26
32
  res = @api.upsert('Account', [{:Id => @account_id, :Website => 'www.test.com'}], 'Id')
27
33
  res['batches'].should be_nil
28
34
  end
@@ -139,13 +145,10 @@ describe SalesforceBulkApi do
139
145
  res['batches'][0]['response'].length.should > 1
140
146
  res['batches'][0]['response'][0]['Id'].should_not be_nil
141
147
  end
148
+
142
149
  context 'and there are multiple batches' do
143
- it 'returns the query results in a merged hash' do
144
- pending 'need dev to create > 10k records in dev organization'
145
- res = @api.query('Account', "SELECT id, Name From Account WHERE Name LIKE 'Test%'")
146
- res['batches'][0]['response'].length.should > 1
147
- res['batches'][0]['response'][0]['Id'].should_not be_nil
148
- end
150
+ # need dev to create > 10k records in dev organization
151
+ it 'returns the query results in a merged hash'
149
152
  end
150
153
  end
151
154
 
@@ -165,4 +168,26 @@ describe SalesforceBulkApi do
165
168
 
166
169
  end
167
170
 
171
+ describe 'counters' do
172
+ context 'when read operations are called' do
173
+ it 'increments operation count and http GET count' do
174
+ @api.counters[:http_get].should eq 0
175
+ @api.counters[:query].should eq 0
176
+ @api.query('Account', "SELECT Website, Phone From Account WHERE Id = '#{@account_id}'")
177
+ @api.counters[:http_get].should eq 1
178
+ @api.counters[:query].should eq 1
179
+ end
180
+ end
181
+
182
+ context 'when update operations are called' do
183
+ it 'increments operation count and http POST count' do
184
+ @api.counters[:http_post].should eq 0
185
+ @api.counters[:update].should eq 0
186
+ @api.update('Account', [{:Id => @account_id, :Website => 'abc123', :Phone => '5678'}], true)
187
+ @api.counters[:http_post].should eq 1
188
+ @api.counters[:update].should eq 1
189
+ end
190
+ end
191
+ end
192
+
168
193
  end
@@ -1,16 +1,8 @@
1
1
  require 'rubygems'
2
2
  require 'bundler/setup'
3
- #require 'webmock/rspec'
4
- #require 'vcr'
5
3
  require 'salesforce_bulk_api'
6
4
 
7
5
  RSpec.configure do |c|
8
6
  c.filter_run :focus => true
9
7
  c.run_all_when_everything_filtered = true
10
8
  end
11
-
12
- # enable this and record the test requests using a SF developer org.
13
- # VCR.configure do |c|
14
- # c.cassette_library_dir = 'spec/cassettes'
15
- # c.hook_into :webmock
16
- # end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: salesforce_bulk_api
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.11
4
+ version: 0.0.12
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yatish Mehta
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2015-01-10 00:00:00.000000000 Z
11
+ date: 2015-03-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: json
@@ -53,35 +53,35 @@ dependencies:
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
55
  - !ruby/object:Gem::Dependency
56
- name: webmock
56
+ name: restforce
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
59
  - - "~>"
60
60
  - !ruby/object:Gem::Version
61
- version: '1.13'
61
+ version: 1.5.1
62
62
  type: :development
63
63
  prerelease: false
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
66
  - - "~>"
67
67
  - !ruby/object:Gem::Version
68
- version: '1.13'
68
+ version: 1.5.1
69
69
  - !ruby/object:Gem::Dependency
70
- name: vcr
70
+ name: rake
71
71
  requirement: !ruby/object:Gem::Requirement
72
72
  requirements:
73
73
  - - "~>"
74
74
  - !ruby/object:Gem::Version
75
- version: '2.5'
75
+ version: 10.4.2
76
76
  type: :development
77
77
  prerelease: false
78
78
  version_requirements: !ruby/object:Gem::Requirement
79
79
  requirements:
80
80
  - - "~>"
81
81
  - !ruby/object:Gem::Version
82
- version: '2.5'
82
+ version: 10.4.2
83
83
  - !ruby/object:Gem::Dependency
84
- name: databasedotcom
84
+ name: pry
85
85
  requirement: !ruby/object:Gem::Requirement
86
86
  requirements:
87
87
  - - ">="
@@ -108,6 +108,7 @@ files:
108
108
  - Rakefile
109
109
  - example_auth_credentials.yml
110
110
  - lib/salesforce_bulk_api.rb
111
+ - lib/salesforce_bulk_api/concerns/throttling.rb
111
112
  - lib/salesforce_bulk_api/connection.rb
112
113
  - lib/salesforce_bulk_api/job.rb
113
114
  - lib/salesforce_bulk_api/version.rb