dataloaderb 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/.gitignore +5 -0
- data/Gemfile +8 -0
- data/Gemfile.lock +24 -0
- data/README.md +60 -0
- data/Rakefile +9 -0
- data/dataloaderb.gemspec +21 -0
- data/lib/dataloaderb.rb +2 -0
- data/lib/dataloaderb/conf_creator.rb +44 -0
- data/lib/dataloaderb/process_definition.rb +42 -0
- data/lib/dataloaderb/process_runner.rb +70 -0
- data/lib/dataloaderb/support.rb +8 -0
- data/lib/dataloaderb/templates/process-conf.xml.erb +18 -0
- data/lib/dataloaderb/version.rb +5 -0
- data/spec/dataloaderb/conf_creator_spec.rb +41 -0
- data/spec/dataloaderb/process_definition_spec.rb +49 -0
- data/spec/dataloaderb/process_runner_spec.rb +70 -0
- data/spec/dataloaderb/support_spec.rb +17 -0
- data/spec/fixtures/bin/test.sh +2 -0
- data/spec/fixtures/processes/full_sample_proc_one.yml +26 -0
- data/spec/fixtures/processes/full_sample_proc_two.yml +26 -0
- data/spec/fixtures/processes/partial_sample_process_one.yml +13 -0
- data/spec/fixtures/processes/partial_sample_process_two.yml +13 -0
- data/spec/fixtures/processes/partial_sample_shared.yml +16 -0
- data/spec/spec_helper.rb +8 -0
- metadata +98 -0
data/Gemfile
ADDED
data/Gemfile.lock
ADDED
@@ -0,0 +1,24 @@
|
|
1
|
+
PATH
|
2
|
+
remote: .
|
3
|
+
specs:
|
4
|
+
dataloaderb (0.0.1)
|
5
|
+
|
6
|
+
GEM
|
7
|
+
remote: http://rubygems.org/
|
8
|
+
specs:
|
9
|
+
diff-lcs (1.1.2)
|
10
|
+
rspec (2.2.0)
|
11
|
+
rspec-core (~> 2.2)
|
12
|
+
rspec-expectations (~> 2.2)
|
13
|
+
rspec-mocks (~> 2.2)
|
14
|
+
rspec-core (2.2.1)
|
15
|
+
rspec-expectations (2.2.0)
|
16
|
+
diff-lcs (~> 1.1.2)
|
17
|
+
rspec-mocks (2.2.0)
|
18
|
+
|
19
|
+
PLATFORMS
|
20
|
+
ruby
|
21
|
+
|
22
|
+
DEPENDENCIES
|
23
|
+
dataloaderb!
|
24
|
+
rspec
|
data/README.md
ADDED
@@ -0,0 +1,60 @@
|
|
1
|
+
dataloaderb: Create and Run Apex Data Loader Processes on Windows
|
2
|
+
=================================================================
|
3
|
+
|
4
|
+
dataloaderb is a library designed to help create and run Apex Data Loader processes without messing with tedious XML configuration.
|
5
|
+
|
6
|
+
Specify your processes via clean Yaml files and point the `ProcessRunner` at them. The runner will create the XML on the fly and pass the appropriate options to the Apex Data Loader.
|
7
|
+
|
8
|
+
Extend the default `ProcessRunner` to do additional logging, reporting, or cleanup!
|
9
|
+
|
10
|
+
More info coming soon.
|
11
|
+
|
12
|
+
Example
|
13
|
+
-------
|
14
|
+
|
15
|
+
This is just a loose spec of what the code might could should look like; things may change!
|
16
|
+
|
17
|
+
`runner.rb`:
|
18
|
+
|
19
|
+
# run several processes via a block
|
20
|
+
ProcessRunner.new("C:/salesforce/dataloader/bin") do |runner|
|
21
|
+
runner.run "processes/firstUpsert.yml"
|
22
|
+
runner.run "processes/secondUpsert.yml"
|
23
|
+
runner.run "processes/thirdUpsert.yml"
|
24
|
+
end
|
25
|
+
|
26
|
+
# or run without a block
|
27
|
+
runner = ProcessRunner.new("C:/salesforce/dataloader/bin")
|
28
|
+
['firstUpsert.yml', 'secondUpsert.yml', 'thirdUpsert.yml'].each do |process|
|
29
|
+
runner.run "processes/#{process}"
|
30
|
+
end
|
31
|
+
|
32
|
+
`processes/firstUpsert.yml`:
|
33
|
+
|
34
|
+
id: 'firstUpsert'
|
35
|
+
description: 'Upsert of some data somewhere'
|
36
|
+
properties:
|
37
|
+
# endpoint config
|
38
|
+
sfdc.endpoint: 'https://www.salesforce.com'
|
39
|
+
sfdc.username: 'xxxxxxxxxx@xxxxxxxxxx.xxx'
|
40
|
+
sfdc.password: 'xxxxxxxxxxxxxxxxxxxxxxxxx'
|
41
|
+
process.encryptionKeyFile: 'C:/salesforce/dataloader/enc_pass.key'
|
42
|
+
|
43
|
+
# operation config
|
44
|
+
sfdc.timeoutSecs: '600'
|
45
|
+
sfdc.loadBatchSize: '100'
|
46
|
+
sfdc.externalIdField: 'Custom_Field__c'
|
47
|
+
sfdc.entity: 'Account'
|
48
|
+
process.operation: 'upsert'
|
49
|
+
process.mappingFile: '//shared/salesforce/upserts/first.Mapping.sdl'
|
50
|
+
dataAccess.name: '//shared/salesforce/upserts/first.csv'
|
51
|
+
dataAccess.type: 'csvRead'
|
52
|
+
|
53
|
+
# logging config
|
54
|
+
sfdc.debugMessages: 'true'
|
55
|
+
process.statusOutputDirectory: '//shared/salesforce/upserts/first/lastrun'
|
56
|
+
|
57
|
+
# misc config
|
58
|
+
process.enableLastRunOutput: 'false'
|
59
|
+
process.initialLastRunDate: '2010-01-01T00:00:00.000-0800'
|
60
|
+
|
data/Rakefile
ADDED
@@ -0,0 +1,9 @@
|
|
1
|
+
require 'bundler'
|
2
|
+
require 'rspec/core/rake_task'
|
3
|
+
|
4
|
+
Bundler::GemHelper.install_tasks
|
5
|
+
RSpec::Core::RakeTask.new(:spec) do |t|
|
6
|
+
t.rspec_opts = ['--color', '-f progress', '-r ./spec/spec_helper.rb']
|
7
|
+
t.pattern = 'spec/**/*_spec.rb'
|
8
|
+
t.fail_on_error = false
|
9
|
+
end
|
data/dataloaderb.gemspec
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
# -*- encoding: utf-8 -*-
|
2
|
+
$:.push File.expand_path("../lib", __FILE__)
|
3
|
+
require "dataloaderb/version"
|
4
|
+
|
5
|
+
Gem::Specification.new do |s|
|
6
|
+
s.name = "dataloaderb"
|
7
|
+
s.version = Dataloaderb::Version::STRING
|
8
|
+
s.platform = Gem::Platform::RUBY
|
9
|
+
s.license = 'MIT'
|
10
|
+
s.authors = ["Brandon Tilley"]
|
11
|
+
s.email = ["brandon.tilley@fresno.edu"]
|
12
|
+
s.homepage = "https://github.com/FPU/dataloaderb"
|
13
|
+
s.summary = %q{Easily create, run, and extend Apex Data Loader processes on Windows via Ruby}
|
14
|
+
s.description = %q{Easily create, run, and extend Apex Data Loader processes on Windows via Ruby}
|
15
|
+
s.requirements << 'Ruby on Windows (since this is the only supported platform for the Apex Data Loader)'
|
16
|
+
|
17
|
+
s.files = `git ls-files`.split("\n")
|
18
|
+
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
|
19
|
+
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
|
20
|
+
s.require_paths = ["lib"]
|
21
|
+
end
|
data/lib/dataloaderb.rb
ADDED
@@ -0,0 +1,44 @@
|
|
1
|
+
require 'dataloaderb/process_definition'
|
2
|
+
require 'dataloaderb/support'
|
3
|
+
require 'erb'
|
4
|
+
require 'fileutils'
|
5
|
+
require 'tmpdir'
|
6
|
+
require 'yaml'
|
7
|
+
|
8
|
+
module Dataloaderb
|
9
|
+
class ConfCreator
|
10
|
+
attr_reader :processes
|
11
|
+
|
12
|
+
# Create a new instance of a ConfCreator
|
13
|
+
def initialize(yamls, opts = {})
|
14
|
+
@processes = {}
|
15
|
+
@opts = opts
|
16
|
+
build_process_definitions(yamls)
|
17
|
+
end
|
18
|
+
|
19
|
+
def build_process_definitions(yamls)
|
20
|
+
yamls.each do |yaml|
|
21
|
+
if @opts[:merge].nil? || @opts[:merge].empty?
|
22
|
+
proc_def = Dataloaderb::ProcessDefinition.new(yaml)
|
23
|
+
else
|
24
|
+
proc_def = Dataloaderb::ProcessDefinition.new(yaml, @opts[:merge])
|
25
|
+
end
|
26
|
+
process_name = proc_def.id
|
27
|
+
@processes[process_name] = proc_def
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
# Return the text for an Apex Data Loader process-conf.xml file
|
32
|
+
def to_xml
|
33
|
+
# TODO: Don't unindent <%%
|
34
|
+
erb = ERB.new File.new(File.expand_path('templates/process-conf.xml.erb',File.dirname(__FILE__))).readlines.join.gsub(/^\s+<%/, "<%"), nil, '<>'
|
35
|
+
erb.result(get_binding)
|
36
|
+
end
|
37
|
+
|
38
|
+
private
|
39
|
+
|
40
|
+
def get_binding
|
41
|
+
binding
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,42 @@
|
|
1
|
+
module Dataloaderb
|
2
|
+
class ProcessDefinition
|
3
|
+
|
4
|
+
attr_reader :id
|
5
|
+
attr_reader :description
|
6
|
+
attr_reader :entries
|
7
|
+
|
8
|
+
# Create a new instance of a ConfCreator
|
9
|
+
def initialize(yaml, merge = nil)
|
10
|
+
@id = ''
|
11
|
+
@description = ''
|
12
|
+
@entries = {}
|
13
|
+
load_yaml(yaml, merge)
|
14
|
+
end
|
15
|
+
|
16
|
+
def set(key, value)
|
17
|
+
@entries[key] = value
|
18
|
+
end
|
19
|
+
|
20
|
+
def get(key)
|
21
|
+
@entries[key]
|
22
|
+
end
|
23
|
+
|
24
|
+
# Load a process definition from a Yaml file
|
25
|
+
def load_yaml(yaml_file, merge = nil)
|
26
|
+
raise ArgumentError, "Cannot find file #{yaml_file}" unless File.exist?(yaml_file)
|
27
|
+
raise ArgumentError, "Cannot find file #{merge}" unless merge.nil? || File.exist?(merge)
|
28
|
+
unless merge.nil?
|
29
|
+
merge_data = YAML.load_file(merge)
|
30
|
+
merge_data.each do |key, value|
|
31
|
+
self.set(key, value)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
proc = YAML.load_file(yaml_file)
|
35
|
+
@id = proc['id']
|
36
|
+
@description = proc['description']
|
37
|
+
proc['properties'].each do |key, value|
|
38
|
+
self.set(key, value)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
require 'dataloaderb/conf_creator'
|
2
|
+
|
3
|
+
module Dataloaderb
|
4
|
+
class ProcessRunner
|
5
|
+
# Create the process runner and specify the path to
|
6
|
+
# the Apex Data Loader executable (batch) files.
|
7
|
+
def initialize(bin_path, opts = {})
|
8
|
+
@bin_path = bin_path
|
9
|
+
@conf_path = nil
|
10
|
+
@opts = opts
|
11
|
+
end
|
12
|
+
|
13
|
+
# Run one or more processes. Specify the processes to run by passing
|
14
|
+
# one or more paths to process Yaml definitions.
|
15
|
+
def run(*yamls)
|
16
|
+
if yamls.empty? || yamls.flatten.empty?
|
17
|
+
raise ArgumentError, "You must pass at least one argument to Dataloaderb::ProcessRunner#run"
|
18
|
+
end
|
19
|
+
|
20
|
+
creator = Dataloaderb::ConfCreator.new(yamls, @opts)
|
21
|
+
# We now have a Hash of ProcessDefinitions in creator#processes.
|
22
|
+
# We can also access the full XML for the entire set of processes via
|
23
|
+
# creator#to_xml.
|
24
|
+
# We can access a specific process via creator#processes['processName'].
|
25
|
+
begin
|
26
|
+
create_configuration(creator.to_xml)
|
27
|
+
creator.processes.each do |name, definition|
|
28
|
+
execute_process(name)
|
29
|
+
end
|
30
|
+
ensure
|
31
|
+
remove_configuration
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
def execute_process(process_name)
|
36
|
+
# @bin_path and @conf_path are full paths at this point
|
37
|
+
`#{get_process_execute_command @bin_path, @conf_path, process_name}`
|
38
|
+
end
|
39
|
+
|
40
|
+
# Given the path to the Apex Data Loader bin directory, the
|
41
|
+
# path to the folder with the process-conf.xml file, and the
|
42
|
+
# name of a process defined in the XML to run, return the
|
43
|
+
# command that the operating system needs to run to execute
|
44
|
+
# the process.
|
45
|
+
def get_process_execute_command(bin_path, conf_path, process_name)
|
46
|
+
"#{get_process_bat_path(bin_path)} #{conf_path} #{process_name}"
|
47
|
+
end
|
48
|
+
|
49
|
+
# Given the path to the Apex Data Loader bin directory, return
|
50
|
+
# the expanded path of the process.bat file to be executed.
|
51
|
+
def get_process_bat_path(bin_path)
|
52
|
+
File.expand_path "#{bin_path}/process.bat"
|
53
|
+
end
|
54
|
+
|
55
|
+
protected
|
56
|
+
|
57
|
+
def create_configuration(xml)
|
58
|
+
base_tmpdir = @opts[:tmp_dir] || Dir.tmpdir
|
59
|
+
@conf_path = Dir.mktmpdir(['', Dataloaderb::Support.unique_id], base_tmpdir)
|
60
|
+
conf_file_path = "#{File.expand_path(@conf_path)}/process-conf.xml"
|
61
|
+
File.open(conf_file_path, "w+") do |file|
|
62
|
+
file.write(xml)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def remove_configuration
|
67
|
+
FileUtils.remove_entry_secure(@conf_path) unless @conf_path.nil?
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
<!DOCTYPE beans PUBLIC "-//SPRING//DTD BEAN//EN" "http://www.springframework.org/dtd/spring-beans.dtd">
|
2
|
+
<beans>
|
3
|
+
<% @processes.each do |name, proc| %>
|
4
|
+
<bean id="<%= proc.id %>"
|
5
|
+
class="com.salesforce.dataloader.process.ProcessRunner"
|
6
|
+
singleton="false">
|
7
|
+
<description><%= proc.description %></description>
|
8
|
+
<property name="name" value="<%= proc.id %>"/>
|
9
|
+
<property name="configOverrideMap">
|
10
|
+
<map>
|
11
|
+
<% proc.entries.each do |key, value| %>
|
12
|
+
<entry key="<%= key %>" value="<%= value %>"/>
|
13
|
+
<% end %>
|
14
|
+
</map>
|
15
|
+
</property>
|
16
|
+
</bean>
|
17
|
+
<% end %>
|
18
|
+
</beans>
|
@@ -0,0 +1,41 @@
|
|
1
|
+
require 'rspec'
|
2
|
+
require 'dataloaderb/conf_creator'
|
3
|
+
require 'tmpdir'
|
4
|
+
|
5
|
+
describe Dataloaderb::ConfCreator do
|
6
|
+
before :each do
|
7
|
+
@yamls = [
|
8
|
+
FIXTURE_PROCESSES[:full_process_one]
|
9
|
+
]
|
10
|
+
# ConfCreator#new expects multiple arguments, not an array, thus we splat
|
11
|
+
@creator = Dataloaderb::ConfCreator.new(@yamls)
|
12
|
+
end
|
13
|
+
|
14
|
+
describe "#build_process_definitions" do
|
15
|
+
it "should build a process definition for each yaml file" do
|
16
|
+
@creator.processes.count.should == 1
|
17
|
+
@creator = Dataloaderb::ConfCreator.new([FIXTURE_PROCESSES[:full_process_one], FIXTURE_PROCESSES[:full_process_two]])
|
18
|
+
@creator.processes.count.should == 2
|
19
|
+
end
|
20
|
+
|
21
|
+
it "should save processes in a hash based on the name of the process" do
|
22
|
+
@creator = Dataloaderb::ConfCreator.new([FIXTURE_PROCESSES[:full_process_one], FIXTURE_PROCESSES[:full_process_two]])
|
23
|
+
@creator.processes['firstUpsert'].class.should == Dataloaderb::ProcessDefinition
|
24
|
+
@creator.processes['secondUpsert'].class.should == Dataloaderb::ProcessDefinition
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
describe "#to_xml" do
|
29
|
+
it "should create XML with the correct values" do
|
30
|
+
@creator.to_xml.include?('<entry key="sfdc.endpoint" value="https://www.salesforce.com"/>').should be_true
|
31
|
+
@creator.to_xml.include?('<bean id="firstUpsert"').should be_true
|
32
|
+
@creator.to_xml.include?('<description>First sample upsert</description>').should be_true
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
describe "#get_binding" do
|
37
|
+
it "should get an object of class Binding" do
|
38
|
+
@creator.send(:get_binding).class.should == Binding
|
39
|
+
end
|
40
|
+
end
|
41
|
+
end
|
@@ -0,0 +1,49 @@
|
|
1
|
+
require 'rspec'
|
2
|
+
require 'dataloaderb/process_definition'
|
3
|
+
require 'tmpdir'
|
4
|
+
|
5
|
+
describe Dataloaderb::ProcessDefinition do
|
6
|
+
before :each do
|
7
|
+
@conf = Dataloaderb::ProcessDefinition.new(FIXTURE_PROCESSES[:full_process_one])
|
8
|
+
end
|
9
|
+
|
10
|
+
describe "#set/#get" do
|
11
|
+
it "should allow setting process conf variables via the XML entry key as a string" do
|
12
|
+
@conf.set('sfdc.endpoint', 'https://test.salesforce.com')
|
13
|
+
@conf.get('sfdc.endpoint').should == 'https://test.salesforce.com'
|
14
|
+
end
|
15
|
+
|
16
|
+
it "should allow setting process conf variables via the XML entry key as a symbol" do
|
17
|
+
@conf.set(:testsymbol, 'https://test.salesforce.com')
|
18
|
+
@conf.get(:testsymbol).should == 'https://test.salesforce.com'
|
19
|
+
end
|
20
|
+
end
|
21
|
+
|
22
|
+
describe "#load_yaml" do
|
23
|
+
it "should raise an ArgumentError if the file doesn't exist" do
|
24
|
+
lambda {
|
25
|
+
@conf.load_yaml('fakefile.yaml')
|
26
|
+
}.should raise_error ArgumentError
|
27
|
+
end
|
28
|
+
|
29
|
+
it "should raise an ArgumentError if the merge file doesn't exist" do
|
30
|
+
lambda {
|
31
|
+
@conf.load_yaml(FIXTURE_PROCESSES[:full_process_one], "fake_merge.yml")
|
32
|
+
}.should raise_error ArgumentError
|
33
|
+
end
|
34
|
+
|
35
|
+
it "should set the appropriate values via set()" do
|
36
|
+
@conf.get('sfdc.timeoutSecs').should == '600'
|
37
|
+
@conf.get('sfdc.debugMessages').should == 'true'
|
38
|
+
@conf.get('process.initialLastRunDate').should == '2010-01-01T00:00:00.000-0800'
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
it "should correctly merge any partial files" do
|
43
|
+
@conf = Dataloaderb::ProcessDefinition.new(FIXTURE_PROCESSES[:partial_process_one], FIXTURE_PROCESSES[:partial_shared])
|
44
|
+
@conf.get('sfdc.timeoutSecs').should == '600'
|
45
|
+
@conf.get('sfdc.debugMessages').should == 'true'
|
46
|
+
@conf.get('process.initialLastRunDate').should == '2010-01-01T00:00:00.000-0800'
|
47
|
+
@conf.get('process.encryptionKeyFile').should == 'C:/salesforce/dataloader/enc_pass.key'
|
48
|
+
end
|
49
|
+
end
|
@@ -0,0 +1,70 @@
|
|
1
|
+
require 'rspec'
|
2
|
+
require 'dataloaderb/process_runner'
|
3
|
+
|
4
|
+
describe Dataloaderb::ProcessRunner do
|
5
|
+
before :each do
|
6
|
+
@runner = Dataloaderb::ProcessRunner.new('spec/fixtures/bin')
|
7
|
+
end
|
8
|
+
|
9
|
+
describe "#get_process_bat_path" do
|
10
|
+
it "should return the right process.bat path given a relative path" do
|
11
|
+
path = @runner.send(:get_process_bat_path, 'sf/bin')
|
12
|
+
path.should == "#{Dir.getwd}/sf/bin/process.bat"
|
13
|
+
end
|
14
|
+
|
15
|
+
it "should return the right process.bat path given a relative path with a trailing slash" do
|
16
|
+
path = @runner.send(:get_process_bat_path, 'sf/bin/')
|
17
|
+
path.should == "#{Dir.getwd}/sf/bin/process.bat"
|
18
|
+
end
|
19
|
+
|
20
|
+
it "should return the right process.bat path given an absolute path" do
|
21
|
+
path = @runner.send(:get_process_bat_path, '/sf/bin')
|
22
|
+
path.should == "/sf/bin/process.bat"
|
23
|
+
end
|
24
|
+
|
25
|
+
it "should return the right process.bat path given an absolute path with a trailing slash" do
|
26
|
+
path = @runner.send(:get_process_bat_path, '/sf/bin/')
|
27
|
+
path.should == "/sf/bin/process.bat"
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
describe "#create_configuration" do
|
32
|
+
it "should create a process-conf.xml file with the configuration in a temp directory" do
|
33
|
+
@runner.instance_variable_set(:@opts, { :tmp_dir => './tmp/' })
|
34
|
+
@runner.send(:create_configuration, 'fake xml data')
|
35
|
+
tmp_dir = @runner.instance_variable_get(:@conf_path)
|
36
|
+
IO.readlines("#{tmp_dir}/process-conf.xml")[0].should == "fake xml data"
|
37
|
+
FileUtils.remove_entry_secure(tmp_dir)
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
describe "#get_process_execute_command" do
|
42
|
+
it "should return the correct execution command" do
|
43
|
+
path = @runner.send(:get_process_execute_command, "sf/bin", "myconf", "someUpserts")
|
44
|
+
ex_path = @runner.send(:get_process_bat_path, "sf/bin")
|
45
|
+
path.should == "#{ex_path} myconf someUpserts"
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
describe "#run" do
|
50
|
+
it "should raise an exception if no arguments are passed" do
|
51
|
+
lambda {
|
52
|
+
@runner.run
|
53
|
+
}.should raise_error ArgumentError
|
54
|
+
end
|
55
|
+
|
56
|
+
it "should raise an exception if an empty array is passed" do
|
57
|
+
lambda {
|
58
|
+
@runner.run([])
|
59
|
+
}.should raise_error ArgumentError
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
describe "#execute_process" do
|
64
|
+
it "should return the result of the executable" do
|
65
|
+
@runner.stub!(:get_process_execute_command).and_return("./spec/fixtures/bin/test.sh")
|
66
|
+
result = @runner.execute_process 'fixutres/processes/sample_proc.yml'
|
67
|
+
result.strip.should == "result of process"
|
68
|
+
end
|
69
|
+
end
|
70
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
require 'rspec'
|
2
|
+
require 'dataloaderb/support'
|
3
|
+
|
4
|
+
describe Dataloaderb::Support do
|
5
|
+
before :each do
|
6
|
+
@value = Dataloaderb::Support.unique_id
|
7
|
+
end
|
8
|
+
|
9
|
+
it "should generate a non-nil, non-empty value" do
|
10
|
+
@value.should_not be_empty
|
11
|
+
@value.should_not be_nil
|
12
|
+
end
|
13
|
+
|
14
|
+
it "should contain only numeric characters" do
|
15
|
+
@value.sub(/^[\d]*$/, '').should be_empty
|
16
|
+
end
|
17
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
id: 'firstUpsert'
|
2
|
+
description: 'First sample upsert'
|
3
|
+
properties:
|
4
|
+
# endpoint config
|
5
|
+
sfdc.endpoint: 'https://www.salesforce.com'
|
6
|
+
sfdc.username: 'xxxxxxxxxx@xxxxxxxxxx.xxx'
|
7
|
+
sfdc.password: 'xxxxxxxxxxxxxxxxxxxxxxxxx'
|
8
|
+
process.encryptionKeyFile: 'C:/salesforce/dataloader/enc_pass.key'
|
9
|
+
|
10
|
+
# operation config
|
11
|
+
sfdc.timeoutSecs: '600'
|
12
|
+
sfdc.loadBatchSize: '100'
|
13
|
+
sfdc.externalIdField: 'Custom_Field__c'
|
14
|
+
sfdc.entity: 'Account'
|
15
|
+
process.operation: 'upsert'
|
16
|
+
process.mappingFile: '//shared/salesforce/upserts/first.Mapping.sdl'
|
17
|
+
dataAccess.name: '//shared/salesforce/upserts/first.csv'
|
18
|
+
dataAccess.type: 'csvRead'
|
19
|
+
|
20
|
+
# logging config
|
21
|
+
sfdc.debugMessages: 'true'
|
22
|
+
process.statusOutputDirectory: '//shared/salesforce/upserts/first/lastrun'
|
23
|
+
|
24
|
+
# misc config
|
25
|
+
process.enableLastRunOutput: 'false'
|
26
|
+
process.initialLastRunDate: '2010-01-01T00:00:00.000-0800'
|
@@ -0,0 +1,26 @@
|
|
1
|
+
id: 'secondUpsert'
|
2
|
+
description: 'Second sample upsert'
|
3
|
+
properties:
|
4
|
+
# endpoint config
|
5
|
+
sfdc.endpoint: 'https://www.salesforce.com'
|
6
|
+
sfdc.username: 'xxxxxxxxxx@xxxxxxxxxx.xxx'
|
7
|
+
sfdc.password: 'xxxxxxxxxxxxxxxxxxxxxxxxx'
|
8
|
+
process.encryptionKeyFile: 'C:/salesforce/dataloader/enc_pass.key'
|
9
|
+
|
10
|
+
# operation config
|
11
|
+
sfdc.timeoutSecs: '600'
|
12
|
+
sfdc.loadBatchSize: '100'
|
13
|
+
sfdc.externalIdField: 'Custom_Field__c'
|
14
|
+
sfdc.entity: 'Account'
|
15
|
+
process.operation: 'upsert'
|
16
|
+
process.mappingFile: '//shared/salesforce/upserts/second.Mapping.sdl'
|
17
|
+
dataAccess.name: '//shared/salesforce/upserts/second.csv'
|
18
|
+
dataAccess.type: 'csvRead'
|
19
|
+
|
20
|
+
# logging config
|
21
|
+
sfdc.debugMessages: 'true'
|
22
|
+
process.statusOutputDirectory: '//shared/salesforce/upserts/second/lastrun'
|
23
|
+
|
24
|
+
# misc config
|
25
|
+
process.enableLastRunOutput: 'false'
|
26
|
+
process.initialLastRunDate: '2010-01-01T00:00:00.000-0800'
|
@@ -0,0 +1,13 @@
|
|
1
|
+
id: 'firstUpsert'
|
2
|
+
description: 'First sample upsert'
|
3
|
+
properties:
|
4
|
+
# operation config
|
5
|
+
sfdc.externalIdField: 'Custom_Field__c'
|
6
|
+
sfdc.entity: 'Account'
|
7
|
+
process.operation: 'upsert'
|
8
|
+
process.mappingFile: '//shared/salesforce/upserts/first.Mapping.sdl'
|
9
|
+
dataAccess.name: '//shared/salesforce/upserts/first.csv'
|
10
|
+
dataAccess.type: 'csvRead'
|
11
|
+
|
12
|
+
# logging config
|
13
|
+
process.statusOutputDirectory: '//shared/salesforce/upserts/first/lastrun'
|
@@ -0,0 +1,13 @@
|
|
1
|
+
id: 'secondUpsert'
|
2
|
+
description: 'Second sample upsert'
|
3
|
+
properties:
|
4
|
+
# operation config
|
5
|
+
sfdc.externalIdField: 'Custom_Field__c'
|
6
|
+
sfdc.entity: 'Account'
|
7
|
+
process.operation: 'upsert'
|
8
|
+
process.mappingFile: '//shared/salesforce/upserts/second.Mapping.sdl'
|
9
|
+
dataAccess.name: '//shared/salesforce/upserts/second.csv'
|
10
|
+
dataAccess.type: 'csvRead'
|
11
|
+
|
12
|
+
# logging config
|
13
|
+
process.statusOutputDirectory: '//shared/salesforce/upserts/second/lastrun'
|
@@ -0,0 +1,16 @@
|
|
1
|
+
# endpoint config
|
2
|
+
sfdc.endpoint: 'https://www.salesforce.com'
|
3
|
+
sfdc.username: 'xxxxxxxxxx@xxxxxxxxxx.xxx'
|
4
|
+
sfdc.password: 'xxxxxxxxxxxxxxxxxxxxxxxxx'
|
5
|
+
process.encryptionKeyFile: 'C:/salesforce/dataloader/enc_pass.key'
|
6
|
+
|
7
|
+
# operation config
|
8
|
+
sfdc.timeoutSecs: '600'
|
9
|
+
sfdc.loadBatchSize: '100'
|
10
|
+
|
11
|
+
# logging config
|
12
|
+
sfdc.debugMessages: 'true'
|
13
|
+
|
14
|
+
# misc config
|
15
|
+
process.enableLastRunOutput: 'false'
|
16
|
+
process.initialLastRunDate: '2010-01-01T00:00:00.000-0800'
|
data/spec/spec_helper.rb
ADDED
@@ -0,0 +1,8 @@
|
|
1
|
+
# spec helpers
|
2
|
+
FIXTURE_PROCESSES = {
|
3
|
+
:full_process_one => "spec/fixtures/processes/full_sample_proc_one.yml",
|
4
|
+
:full_process_two => "spec/fixtures/processes/full_sample_proc_two.yml",
|
5
|
+
:partial_process_one => "spec/fixtures/processes/partial_sample_process_one.yml",
|
6
|
+
:partial_process_two => "spec/fixtures/processes/partial_sample_process_two.yml",
|
7
|
+
:partial_shared => "spec/fixtures/processes/partial_sample_shared.yml",
|
8
|
+
}
|
metadata
ADDED
@@ -0,0 +1,98 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: dataloaderb
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
prerelease: false
|
5
|
+
segments:
|
6
|
+
- 0
|
7
|
+
- 0
|
8
|
+
- 1
|
9
|
+
version: 0.0.1
|
10
|
+
platform: ruby
|
11
|
+
authors:
|
12
|
+
- Brandon Tilley
|
13
|
+
autorequire:
|
14
|
+
bindir: bin
|
15
|
+
cert_chain: []
|
16
|
+
|
17
|
+
date: 2011-01-07 00:00:00 -08:00
|
18
|
+
default_executable:
|
19
|
+
dependencies: []
|
20
|
+
|
21
|
+
description: Easily create, run, and extend Apex Data Loader processes on Windows via Ruby
|
22
|
+
email:
|
23
|
+
- brandon.tilley@fresno.edu
|
24
|
+
executables: []
|
25
|
+
|
26
|
+
extensions: []
|
27
|
+
|
28
|
+
extra_rdoc_files: []
|
29
|
+
|
30
|
+
files:
|
31
|
+
- .gitignore
|
32
|
+
- Gemfile
|
33
|
+
- Gemfile.lock
|
34
|
+
- README.md
|
35
|
+
- Rakefile
|
36
|
+
- dataloaderb.gemspec
|
37
|
+
- lib/dataloaderb.rb
|
38
|
+
- lib/dataloaderb/conf_creator.rb
|
39
|
+
- lib/dataloaderb/process_definition.rb
|
40
|
+
- lib/dataloaderb/process_runner.rb
|
41
|
+
- lib/dataloaderb/support.rb
|
42
|
+
- lib/dataloaderb/templates/process-conf.xml.erb
|
43
|
+
- lib/dataloaderb/version.rb
|
44
|
+
- spec/dataloaderb/conf_creator_spec.rb
|
45
|
+
- spec/dataloaderb/process_definition_spec.rb
|
46
|
+
- spec/dataloaderb/process_runner_spec.rb
|
47
|
+
- spec/dataloaderb/support_spec.rb
|
48
|
+
- spec/fixtures/bin/test.sh
|
49
|
+
- spec/fixtures/processes/full_sample_proc_one.yml
|
50
|
+
- spec/fixtures/processes/full_sample_proc_two.yml
|
51
|
+
- spec/fixtures/processes/partial_sample_process_one.yml
|
52
|
+
- spec/fixtures/processes/partial_sample_process_two.yml
|
53
|
+
- spec/fixtures/processes/partial_sample_shared.yml
|
54
|
+
- spec/spec_helper.rb
|
55
|
+
has_rdoc: true
|
56
|
+
homepage: https://github.com/FPU/dataloaderb
|
57
|
+
licenses:
|
58
|
+
- MIT
|
59
|
+
post_install_message:
|
60
|
+
rdoc_options: []
|
61
|
+
|
62
|
+
require_paths:
|
63
|
+
- lib
|
64
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
65
|
+
none: false
|
66
|
+
requirements:
|
67
|
+
- - ">="
|
68
|
+
- !ruby/object:Gem::Version
|
69
|
+
segments:
|
70
|
+
- 0
|
71
|
+
version: "0"
|
72
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
73
|
+
none: false
|
74
|
+
requirements:
|
75
|
+
- - ">="
|
76
|
+
- !ruby/object:Gem::Version
|
77
|
+
segments:
|
78
|
+
- 0
|
79
|
+
version: "0"
|
80
|
+
requirements:
|
81
|
+
- Ruby on Windows (since this is the only supported platform for the Apex Data Loader)
|
82
|
+
rubyforge_project:
|
83
|
+
rubygems_version: 1.3.7
|
84
|
+
signing_key:
|
85
|
+
specification_version: 3
|
86
|
+
summary: Easily create, run, and extend Apex Data Loader processes on Windows via Ruby
|
87
|
+
test_files:
|
88
|
+
- spec/dataloaderb/conf_creator_spec.rb
|
89
|
+
- spec/dataloaderb/process_definition_spec.rb
|
90
|
+
- spec/dataloaderb/process_runner_spec.rb
|
91
|
+
- spec/dataloaderb/support_spec.rb
|
92
|
+
- spec/fixtures/bin/test.sh
|
93
|
+
- spec/fixtures/processes/full_sample_proc_one.yml
|
94
|
+
- spec/fixtures/processes/full_sample_proc_two.yml
|
95
|
+
- spec/fixtures/processes/partial_sample_process_one.yml
|
96
|
+
- spec/fixtures/processes/partial_sample_process_two.yml
|
97
|
+
- spec/fixtures/processes/partial_sample_shared.yml
|
98
|
+
- spec/spec_helper.rb
|