remi 0.3.1 → 0.3.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: ea2d2971479e9e0dfcc5de4dd01ac13f5274a6f8
4
- data.tar.gz: ea5e3a3280613d00ae29f5265c342740973ea57c
3
+ metadata.gz: 0de4e8f2de3129e2e4b93c3d22dc5f718a05b56a
4
+ data.tar.gz: d963548c553f1918b33bd391038bc3481ce4a5d8
5
5
  SHA512:
6
- metadata.gz: a09f8f926a99891356bcd6363ef4729b3a27bcf9cbfacce2a6bc8a8b60a262cfe83fe40d1e0f1f0c79945693aacb0fb1099b5bad299e022511846730e98642fe
7
- data.tar.gz: d160e9840162558b1d9e203825f8e724ce1626e9f8a6922b86f894249b78c5c6680b37fef7e24574d794aac41db025bd887c707df35f6485a3540914552a3293
6
+ metadata.gz: d01e67e38c2a76784e65a22536d2d9cba7c9f56dc3686e8d0d23ea1e5176cb8495cd06977ba85357d336cbf7fd91641f79795d68df69542ce9e94b39bc85c6ec
7
+ data.tar.gz: 07cec77fc7c40299207081f5ea7390cdd3cf863ac49295e9de166e4736ff95ae5d57e642de2a10ea74d1d812aeeff2ed4ffdb6a93c232658efc7552332a9f1e9
data/Gemfile CHANGED
@@ -3,7 +3,8 @@ source 'https://rubygems.org'
3
3
 
4
4
  gemspec
5
5
  gem 'google-api-client', '~> 0.9'
6
- gem 'daru', '0.1.4.1', git: 'git@github.com:inside-track/daru.git', branch: '0.1.4.1-Remi'
6
+ gem 'daru', '0.1.4.1', git: 'git@github.com:inside-track/daru.git', branch: '0.1.4.1.2-Remi'
7
7
  gem 'restforce', '~> 2.1'
8
8
  gem 'salesforce_bulk_api', git: 'git@github.com:inside-track/salesforce_bulk_api.git', branch: 'master'
9
+ gem 'soapforce', '~> 0.5'
9
10
  gem 'aws-sdk', '~> 2.3'
@@ -1,7 +1,7 @@
1
1
  GIT
2
2
  remote: git@github.com:inside-track/daru.git
3
- revision: e38e98890c99dee03ceedaca3a5faf40859042ab
4
- branch: 0.1.4.1-Remi
3
+ revision: c8d407ee55b8d5f3143b9a030fe36c38cf3537d1
4
+ branch: 0.1.4.1.2-Remi
5
5
  specs:
6
6
  daru (0.1.4.1)
7
7
  backports
@@ -18,7 +18,7 @@ GIT
18
18
  PATH
19
19
  remote: .
20
20
  specs:
21
- remi (0.3.1)
21
+ remi (0.3.2)
22
22
  activesupport (~> 4.2)
23
23
  bond (~> 0.5)
24
24
  cucumber (~> 2.1)
@@ -40,6 +40,9 @@ GEM
40
40
  thread_safe (~> 0.3, >= 0.3.4)
41
41
  tzinfo (~> 1.1)
42
42
  addressable (2.4.0)
43
+ akami (1.3.1)
44
+ gyoku (>= 0.4.0)
45
+ nokogiri
43
46
  aws-sdk (2.3.5)
44
47
  aws-sdk-resources (= 2.3.5)
45
48
  aws-sdk-core (2.3.5)
@@ -87,8 +90,13 @@ GEM
87
90
  multi_json (~> 1.11)
88
91
  os (~> 0.9)
89
92
  signet (~> 0.7)
93
+ gyoku (1.3.1)
94
+ builder (>= 2.1.2)
90
95
  hashie (3.4.3)
91
96
  httpclient (2.8.2.4)
97
+ httpi (2.4.2)
98
+ rack
99
+ socksify
92
100
  hurley (0.2)
93
101
  i18n (0.7.0)
94
102
  iruby (0.2.7)
@@ -110,6 +118,7 @@ GEM
110
118
  mime-types-data (~> 3.2015)
111
119
  mime-types-data (3.2016.0521)
112
120
  mimemagic (0.3.1)
121
+ mini_portile2 (2.1.0)
113
122
  minitest (5.8.4)
114
123
  multi_json (1.11.2)
115
124
  multi_test (0.1.2)
@@ -117,8 +126,12 @@ GEM
117
126
  net-sftp (2.1.2)
118
127
  net-ssh (>= 2.6.5)
119
128
  net-ssh (3.1.1)
129
+ nokogiri (1.7.0.1)
130
+ mini_portile2 (~> 2.1.0)
131
+ nori (2.6.0)
120
132
  os (0.9.6)
121
133
  pg (0.18.4)
134
+ rack (2.0.1)
122
135
  rbczmq (1.7.9)
123
136
  redcarpet (3.3.4)
124
137
  regex_sieve (0.1.0)
@@ -144,15 +157,29 @@ GEM
144
157
  diff-lcs (>= 1.2.0, < 2.0)
145
158
  rspec-support (~> 3.4.0)
146
159
  rspec-support (3.4.1)
160
+ savon (2.11.1)
161
+ akami (~> 1.2)
162
+ builder (>= 2.1.2)
163
+ gyoku (~> 1.2)
164
+ httpi (~> 2.3)
165
+ nokogiri (>= 1.4.0)
166
+ nori (~> 2.4)
167
+ wasabi (~> 3.4)
147
168
  signet (0.7.3)
148
169
  addressable (~> 2.3)
149
170
  faraday (~> 0.9)
150
171
  jwt (~> 1.5)
151
172
  multi_json (~> 1.10)
173
+ soapforce (0.5.0)
174
+ savon (>= 2.3.0, < 3.0.0)
175
+ socksify (1.7.0)
152
176
  thread_safe (0.3.5)
153
177
  tzinfo (1.2.2)
154
178
  thread_safe (~> 0.1)
155
179
  uber (0.0.15)
180
+ wasabi (3.5.0)
181
+ httpi (~> 2.0)
182
+ nokogiri (>= 1.4.2)
156
183
  xml-simple (1.1.5)
157
184
  yard (0.9.0)
158
185
 
@@ -169,7 +196,8 @@ DEPENDENCIES
169
196
  remi!
170
197
  restforce (~> 2.1)
171
198
  salesforce_bulk_api!
199
+ soapforce (~> 0.5)
172
200
  yard (~> 0.9)
173
201
 
174
202
  BUNDLED WITH
175
- 1.13.5
203
+ 1.14.3
@@ -8,6 +8,28 @@ Feature: This tests the creation of example records.
8
8
  And the source 'Source Data'
9
9
  And the target 'Target Data'
10
10
 
11
+ Scenario: Handling date formulas in the example data with minute units.
12
+
13
+ Given the following example record for 'Source Data':
14
+ | 1MinuteAgo | 15MinutesAgo | OneMinuteAgo | 1MinuteFromNow | 15MinutesFromNow |
15
+ | *1 minute ago* | *15 minutes ago* | *1 minute ago* | *1 minute from now* | *15 minutes from now* |
16
+ Then the target field '1MinuteAgo' is the time 1 minute ago
17
+ And the target field '2MinutesAgo' is the time 15 minutes from now
18
+ And the target field 'OneMinuteAgo' is the time 1 minute ago
19
+ And the target field '1MinuteFromNow' is the time 1 minute from now
20
+ And the target field '2MinutesFromNow' is the time 15 minutes from now
21
+
22
+ Scenario: Handling date formulas in the example data with hour units.
23
+
24
+ Given the following example record for 'Source Data':
25
+ | 1HourAgo | 2HoursAgo | OneHourAgo | 1HourFromNow | 2HoursFromNow |
26
+ | *1 hour ago* | *2 hours ago* | *1 hour ago* | *1 hour from now* | *2 hours from now* |
27
+ Then the target field '1HourAgo' is the time 1 hour ago
28
+ And the target field '2HoursAgo' is the time 2 hours from now
29
+ And the target field 'OneHourAgo' is the time 1 hour ago
30
+ And the target field '1HourFromNow' is the time 1 hour from now
31
+ And the target field '2HoursFromNow' is the time 2 hours from now
32
+
11
33
  Scenario: Handling date formulas in the example data with day units.
12
34
 
13
35
  Given the following example record for 'Source Data':
@@ -124,7 +124,6 @@ Given /^the (source|target) file contains all of the following headers in this o
124
124
  expect(@brt.send(st.to_sym).data_subject.df.vectors.to_a).to eq @brt.send(st.to_sym).fields.field_names
125
125
  end
126
126
 
127
-
128
127
  ### Source
129
128
 
130
129
  Given /^the source '([[:alnum:]\s\-_]+)'$/ do |arg|
@@ -219,6 +218,20 @@ Given /^the source field is not a valid email address$/ do
219
218
  @brt.source.field.value = 'invalid!example.com'
220
219
  end
221
220
 
221
+ Given /^the source field '([^']+)' is a valid email address$/ do |source_field|
222
+ step "the source field '#{source_field}'"
223
+
224
+ source_name, source_field_name = @brt.sources.parse_full_field(source_field)
225
+ @brt.sources[source_name].fields[source_field_name].value = 'valid@example.com'
226
+ end
227
+
228
+ Given /^the source field '([^']+)' is not a valid email address$/ do |source_field|
229
+ step "the source field '#{source_field}'"
230
+
231
+ source_name, source_field_name = @brt.sources.parse_full_field(source_field)
232
+ @brt.sources[source_name].fields[source_field_name].value = 'invalid!example.com'
233
+ end
234
+
222
235
  ### Target
223
236
 
224
237
  Given /^the target '([[:alnum:]\s\-_]+)'$/ do |arg|
@@ -242,6 +255,13 @@ Then /^the target field '([^']+)' is copied from the source field '([^']+)'$/ do
242
255
  end
243
256
  end
244
257
 
258
+ Then /^the target field '([^']+)' has the label '([^']+)'$/ do |target_field, label|
259
+ step "the target field '#{target_field}'"
260
+ data_field = @brt.targets.fields.next
261
+ expect(data_field.metadata[:label]).to eq label
262
+ expect(data_field.name).to eq target_field
263
+ end
264
+
245
265
  Then /^the target field '([^']+)' is copied from the source field$/ do |target_field|
246
266
  @brt.sources.fields.each do |source_field|
247
267
  step "the target field '#{target_field}' is copied from the source field '#{source_field.full_name}'"
@@ -483,9 +503,10 @@ Then /^the target field '([^']+)' is populated from the source field '([^']+)' u
483
503
 
484
504
  source_name, source_field_name = @brt.sources.parse_full_field(source_field)
485
505
  target_names, target_field_name = @brt.targets.parse_full_field(target_field, multi: true)
506
+ inferred_type = target_format =~ /(%H|%M|%S)/ ? :datetime : :date
486
507
 
487
508
  source_format = @brt.sources[source_name].fields[source_field_name].metadata[:in_format]
488
- source_reformatted = Remi::Transform::FormatDate.new(in_format: source_format, out_format: target_format).to_proc
509
+ source_reformatted = Remi::Transform::FormatDate.new(in_format: source_format, out_format: target_format, type: inferred_type).to_proc
489
510
  .call(@brt.sources[source_name].fields[source_field_name].value)
490
511
 
491
512
  @brt.run_transforms
@@ -74,8 +74,6 @@ require 'remi/data_frame/daru'
74
74
 
75
75
  require 'remi/transform'
76
76
 
77
- require 'remi/monkeys/daru'
78
-
79
77
  # Remi is Ruby Extract Modify and Integrate, a framework for writing ETL job in Ruby.
80
78
  module Remi
81
79
  end
@@ -78,6 +78,12 @@ module Remi
78
78
  processed_filename = preprocess(filename)
79
79
  csv_df = Daru::DataFrame.from_csv processed_filename, @csv_options
80
80
 
81
+ # Daru 0.1.4 doesn't add vectors if it's a headers-only file
82
+ if csv_df.vectors.size == 0
83
+ headers_df = Daru::DataFrame.from_csv processed_filename, @csv_options.merge(return_headers: true)
84
+ csv_df = Daru::DataFrame.new([], order: headers_df.vectors.to_a)
85
+ end
86
+
81
87
  csv_df[@filename_field] = Daru::Vector.new([filename] * csv_df.size, index: csv_df.index) if @filename_field
82
88
  if idx == 0
83
89
  result_df = csv_df
@@ -153,15 +159,24 @@ module Remi
153
159
  attr_reader :csv_options
154
160
 
155
161
  # Converts the dataframe to a CSV file stored in the local work directory.
162
+ # If labels are present write the CSV file with those headers but maintain
163
+ # the structure of the original dataframe
156
164
  #
157
165
  # @param dataframe [Remi::DataFrame] The dataframe to be encoded
158
166
  # @return [Object] The path to the file
159
167
  def encode(dataframe)
160
168
  logger.info "Writing CSV file to temporary location #{@working_file}"
169
+
170
+ label_columns = self.fields.reduce({}) { |h, (k, v)|
171
+ if v[:label]
172
+ h[k] = v[:label].to_sym
173
+ end
174
+ h
175
+ }
176
+ dataframe.rename_vectors label_columns
161
177
  dataframe.write_csv @working_file, @csv_options
162
178
  @working_file
163
179
  end
164
-
165
180
  private
166
181
  def init_csv_file_encoder(*args, work_path: Settings.work_dir, csv_options: {}, **kargs, &block)
167
182
  @working_file = File.join(work_path, SecureRandom.uuid)
@@ -46,8 +46,8 @@ module Remi
46
46
  service.list_files(q: "'#{folder_id}' in parents", page_size: 10, order_by: 'createdTime desc', fields: 'nextPageToken, files(id, name, createdTime, mimeType)')
47
47
  end
48
48
 
49
- def get_spreadsheet_vals(service, spreadsheet_id)
50
- service.get_spreadsheet_values(spreadsheet_id, 'Sheet1')
49
+ def get_spreadsheet_vals(service, spreadsheet_id, sheet_name = 'Sheet1')
50
+ service.get_spreadsheet_values(spreadsheet_id, sheet_name)
51
51
  end
52
52
 
53
53
  def extract
@@ -57,7 +57,8 @@ module Remi
57
57
  @data = []
58
58
 
59
59
  entries.each do |file|
60
- response = get_spreadsheet_vals(service, file.raw)
60
+ logger.info "Extracting Google Sheet data from #{file.pathname}, with sheet name : #{@sheet_name}"
61
+ response = get_spreadsheet_vals(service, file.raw, @sheet_name)
61
62
  data.push(response)
62
63
  end
63
64
 
@@ -85,8 +86,9 @@ module Remi
85
86
 
86
87
  private
87
88
 
88
- def init_gsheet_extractor(*args, credentials:, folder_id:, **kargs)
89
+ def init_gsheet_extractor(*args, credentials:, folder_id:, sheet_name: 'Sheet1', **kargs)
89
90
  @default_folder_id = folder_id
91
+ @sheet_name = sheet_name
90
92
  @oob_uri = 'urn:ietf:wg:oauth:2.0:oob'
91
93
  @application_name = credentials.fetch(:application_name)
92
94
 
@@ -111,25 +113,23 @@ module Remi
111
113
  class Parser::Gsheet < Parser
112
114
 
113
115
  def parse(gs_extract)
114
- google_vals = gs_extract.data
115
116
  return_hash = nil
116
- google_vals.each do |google_val|
117
+ gs_extract.data.each do |gs_data|
117
118
 
118
119
  if return_hash.nil?
119
120
  return_hash = Hash.new
120
- google_val.values[0].each do |header|
121
+ gs_data.values[0].each do |header|
121
122
  return_hash[field_symbolizer.call(header)] = []
122
123
  end
123
124
  end
124
125
 
125
- keys_temp = return_hash.keys
126
+ headers = return_hash.keys
127
+ header_idx = headers.each_with_index.to_h
126
128
 
127
- google_val.values[1..-1].each do |rows|
128
- col_num = 0
129
-
130
- rows.each do |value|
131
- return_hash[keys_temp[col_num]] << value
132
- col_num +=1
129
+ gs_data.values[1..-1].each do |row|
130
+ headers.each do |header|
131
+ idx = header_idx[header]
132
+ return_hash[header] << (idx < row.size ? row[idx] : nil)
133
133
  end
134
134
  end
135
135
  end
@@ -168,7 +168,7 @@ module Remi
168
168
  # @option credentials [String] :password Salesforce password
169
169
  # @option credentials [String] :security_token Salesforce security token
170
170
  # @param object [Symbol] Salesforce object to extract
171
- # @param operation [Symbol] Salesforce operation to perform (`:update`, `:create`, `:upsert`)
171
+ # @param operation [Symbol] Salesforce operation to perform (`:update`, `:create`, `:upsert`, `:delete`)
172
172
  # @param batch_size [Integer] Size of batch to use for updates (1-10000)
173
173
  # @param external_id [Symbol, String] Field to use as an external id for upsert operations
174
174
  # @param api [Symbol] Salesforce API to use (only option supported is `:bulk`)
@@ -188,6 +188,8 @@ module Remi
188
188
  Remi::SfBulkHelper::SfBulkCreate.create(restforce_client, @sfo, data, batch_size: @batch_size, logger: logger)
189
189
  elsif @operation == :upsert
190
190
  Remi::SfBulkHelper::SfBulkUpsert.upsert(restforce_client, @sfo, data, batch_size: @batch_size, external_id: @external_id, logger: logger)
191
+ elsif @operation == :delete
192
+ Remi::SfBulkHelper::SfBulkDelete.upsert(restforce_client, @sfo, data, batch_size: @batch_size, logger: logger)
191
193
  else
192
194
  raise ArgumentError, "Unknown operation: #{@operation}"
193
195
  end
@@ -0,0 +1,98 @@
1
+ require 'soapforce'
2
+
3
+ module Remi
4
+ module DataSubject::SalesforceSoap
5
+ def soapforce_client
6
+ @soapforce_client ||= begin
7
+ client = Soapforce::Client.new(host: @credentials[:host], logger: logger)
8
+ client.authenticate(
9
+ username: @credentials[:username],
10
+ password: "#{@credentials[:password]}#{@credentials[:security_token]}"
11
+ )
12
+ client
13
+ end
14
+ end
15
+ end
16
+
17
+ # Salesforce SOAP encoder
18
+ class Encoder::SalesforceSoap < Encoder
19
+ # Converts the dataframe to an array of hashes, which can be used
20
+ # by the salesforce soap api.
21
+ #
22
+ # @param dataframe [Remi::DataFrame] The dataframe to be encoded
23
+ # @return [Object] The encoded data to be loaded into the target
24
+ def encode(dataframe)
25
+ dataframe.to_a[0]
26
+ end
27
+ end
28
+
29
+ # Salesforce SOAP loader
30
+ # The Salesforce SOAP loader can be used to merge salesforce objects (for those
31
+ # objects that support the merge operation). To do so, each row of the dataframe must
32
+ # contain a field called `:Id` that references the master record that survives the
33
+ # merge operation. It must also contain a `:Merge_Id` field that specifies the
34
+ # salesforce Id of the record that is to be merged into the master. Other fields
35
+ # may also be specified that will be used to update the master record.
36
+ #
37
+ # @example
38
+ # class MyJob < Remi::Job
39
+ # target :merge_contacts do
40
+ # encoder Remi::Encoder::SalesforceSoap.new
41
+ # loader Remi::Loader::SalesforceSoap.new(
42
+ # credentials: { },
43
+ # object: :Contact,
44
+ # operation: :merge,
45
+ # merge_id_field: :Merge_Id
46
+ # )
47
+ # end
48
+ # end
49
+ #
50
+ # job = MyJob.new
51
+ # job.merge_contacts.df = Remi::DataFrame::Daru.new({ Id: ['003g000001IX4HcAAL'], Note__c: ['Cheeseburger in Paradise'], Merge_Id: ['003g000001LE7dXAAT']})
52
+ # job.merge_contacts.load
53
+ #
54
+ class Loader::SalesforceSoap < Loader
55
+ include Remi::DataSubject::SalesforceSoap
56
+
57
+ # @param credentials [Hash] Used to authenticate with salesforce
58
+ # @option credentials [String] :host Salesforce host (e.g., login.salesforce.com)
59
+ # @option credentials [String] :username Salesforce username
60
+ # @option credentials [String] :password Salesforce password
61
+ # @option credentials [String] :security_token Salesforce security token
62
+ # @param object [Symbol] Salesforce object to extract
63
+ # @param operation [Symbol] Salesforce operation to perform (`:merge`) <- Merge is the only operation currently supported
64
+ # @param merge_id_field [Symbol] For merge operations, this is the name of the field containing the id of the record to be merged (default: :Merge_Id)
65
+ def initialize(*args, **kargs, &block)
66
+ super
67
+ init_salesforce_loader(*args, **kargs, &block)
68
+ end
69
+
70
+ # @param data [Encoder::Salesforce] Data that has been encoded appropriately to be loaded into the target
71
+ # @return [true] On success
72
+ def load(data)
73
+ logger.info "Performing Salesforce Soap #{@operation} on object #{@sfo}"
74
+ if @operation == :merge
75
+ # The Soapforce gem only supports one slow-ass merge at a time :(
76
+ data.each do |row|
77
+ unless row.include?(@merge_id_field)
78
+ raise KeyError, "Merge id field not found: #{@merge_id_field}"
79
+ end
80
+
81
+ merge_id = Array(row.delete(@merge_id_field))
82
+ soapforce_client.merge(@sfo, row, merge_id)
83
+ end
84
+ else
85
+ raise ArgumentError, "Unknown soap operation: #{@operation}"
86
+ end
87
+ end
88
+
89
+ private
90
+
91
+ def init_salesforce_loader(*args, object:, credentials:, operation:, merge_id_field: :Merge_Id, **kargs, &block)
92
+ @sfo = object
93
+ @credentials = credentials
94
+ @operation = operation
95
+ @merge_id_field = merge_id_field
96
+ end
97
+ end
98
+ end
@@ -81,6 +81,7 @@ module Remi
81
81
  next unless batch['response']
82
82
 
83
83
  batch['response'].each do |record|
84
+ @logger.error "Salesforce error: #{record}" if record['success'] && record['success'][0] == 'false'
84
85
  @result << record.inject({}) { |h, (k,v)| h[k] = v.first unless ['xsi:type','type'].include? k; h }
85
86
  end
86
87
 
@@ -245,6 +246,24 @@ module Remi
245
246
  end
246
247
  end
247
248
 
249
+ # Public: Class used to execute SF Bulk Delete operations (see SfBulkOperation class for
250
+ # more details).
251
+ class SfBulkDelete < SfBulkOperation
252
+ def self.delete(*args, **kargs)
253
+ SfBulkDelete.new(*args, **kargs).tap { |sf| sf.send(:execute) }
254
+ end
255
+
256
+ def operation
257
+ :delete
258
+ end
259
+
260
+ private
261
+
262
+ def send_bulk_operation
263
+ sf_bulk.send(operation, @object, @data, true, @batch_size)
264
+ end
265
+ end
266
+
248
267
  # Public: Class used to execute SF Bulk Query operations (see SfBulkOperation class for
249
268
  # more details).
250
269
  class SfBulkQuery < SfBulkOperation
@@ -30,6 +30,7 @@ module Remi::Testing::BusinessRules
30
30
  def formulas
31
31
  @formulas ||= RegexSieve.new({
32
32
  /\*now(|:[^*]+)\*/i => [:time_reference, :match_now],
33
+ /\*(\d+)\s(hour|hours|minute|minutes) (ago|from now)(|:[^*]+)\*/i => [:time_reference, :match_time],
33
34
  /\*(today|yesterday|tomorrow)(|:[^*]+)\*/i => [:date_reference, :match_single_day],
34
35
  /\*(this|last|previous|next) (day|month|year|week)(|:[^*]+)\*/i => [:date_reference, :match_single_unit],
35
36
  /\*(\d+)\s(day|days|month|months|year|years|week|weeks) (ago|from now)(|:[^*]+)\*/i => [:date_reference, :match_multiple]
@@ -44,12 +45,12 @@ module Remi::Testing::BusinessRules
44
45
 
45
46
  to_replace = form.match(base_regex)[0]
46
47
  replace_with = if form_opt[:value][0] == :date_reference
47
- date_reference(form_opt[:value][1], form_opt[:match])
48
- elsif form_opt[:value][0] == :time_reference
49
- time_reference(form_opt[:value][1], form_opt[:match])
50
- else
51
- to_replace
52
- end
48
+ date_reference(form_opt[:value][1], form_opt[:match])
49
+ elsif form_opt[:value][0] == :time_reference
50
+ time_reference(form_opt[:value][1], form_opt[:match])
51
+ else
52
+ to_replace
53
+ end
53
54
 
54
55
  form.gsub(to_replace, replace_with)
55
56
  end
@@ -62,6 +63,7 @@ module Remi::Testing::BusinessRules
62
63
  def date_reference(formula, captured)
63
64
  parsed = self.send("date_reference_#{formula}", *captured)
64
65
  Date.current.send("#{parsed[:unit]}_#{parsed[:direction]}", parsed[:quantity]).strftime(parsed[:format])
66
+
65
67
  end
66
68
 
67
69
  def parse_colon_date_format(str)
@@ -80,6 +82,21 @@ module Remi::Testing::BusinessRules
80
82
  format: parse_colon_time_format(format)
81
83
  }
82
84
  end
85
+ def time_reference_match_time(form, quantity, unit, direction, format=nil)
86
+ divisor = 1.0
87
+ if unit.downcase.pluralize =='hours'
88
+ divisor = 24.0
89
+ elsif unit.downcase.pluralize == 'minutes'
90
+ divisor = 24.0*60.0
91
+ end
92
+
93
+ {
94
+ quantity: quantity.to_i/divisor,
95
+ unit: 'days',
96
+ direction: { 'ago' => 'ago', 'from now' => 'since' }[direction.downcase],
97
+ format: parse_colon_time_format(format)
98
+ }
99
+ end
83
100
 
84
101
  def date_reference_match_single_day(form, direction, format=nil)
85
102
  {
@@ -481,10 +498,10 @@ module Remi::Testing::BusinessRules
481
498
 
482
499
  def value=(arg)
483
500
  typed_arg = if metadata[:type] == :json
484
- JSON.parse(arg)
485
- else
486
- arg
487
- end
501
+ JSON.parse(arg)
502
+ else
503
+ arg
504
+ end
488
505
 
489
506
  vector.recode! { |_v| typed_arg }
490
507
  end
@@ -524,10 +541,10 @@ module Remi::Testing::BusinessRules
524
541
  def parse_formula(value)
525
542
  parsed_value = ParseFormula.parse(value)
526
543
  case parsed_value
527
- when '\nil'
528
- nil
529
- else
530
- parsed_value
544
+ when '\nil'
545
+ nil
546
+ else
547
+ parsed_value
531
548
  end
532
549
  end
533
550
 
@@ -1,3 +1,3 @@
1
1
  module Remi
2
- VERSION = '0.3.1'
2
+ VERSION = '0.3.2'
3
3
  end
@@ -65,5 +65,60 @@ describe Parser::CsvFile do
65
65
 
66
66
  expect(csv.parse(two_files).to_a).to eq expected_df.to_a
67
67
  end
68
+ it 'returns empty vectors if the csv contains headers only' do
69
+ csv = Parser::CsvFile.new
70
+
71
+ expected_df = Remi::DataFrame::Daru.new(
72
+ {
73
+ column_a: [],
74
+ column_b: []
75
+ }
76
+ )
77
+
78
+ expect(csv.parse('spec/fixtures/empty.csv').to_h).to eq expected_df.to_h
79
+ end
80
+ end
68
81
 
82
+ describe Encoder::CsvFile do
83
+ let(:basic_dataframe) do
84
+ Remi::DataFrame::Daru.new(
85
+ {
86
+ column_a: ['value 1A', 'value 2A'],
87
+ column_b: ['value 1B', 'value 2B']
88
+ }
89
+ )
90
+ end
91
+ it 'creates a csv from a provided dataframe' do
92
+ encoder = Encoder::CsvFile.new
93
+ parser = Parser::CsvFile.new
94
+ provided_df = Remi::DataFrame::Daru.new(
95
+ {
96
+ column_a: ['value 1A', 'value 2A', 'value 1A', 'value 2A'],
97
+ column_b: ['value 1B', 'value 2B', nil, nil],
98
+ column_c: [nil, nil, 'value 1C', 'value 2C']
99
+ }
100
+ )
101
+ expected_contents = "column_a,column_b,column_c\nvalue 1A,value 1B,\nvalue 2A,value 2B,\nvalue 1A,,value 1C\nvalue 2A,,value 2C\n"
102
+ file_name = encoder.encode(provided_df)
103
+ expect(File.read(file_name)).to eq expected_contents
104
+ end
105
+ it 'uses label headers when provided' do
106
+ provided_df = Remi::DataFrame::Daru.new(
107
+ {
108
+ column_a: ['value 1A', 'value 2A', 'value 1A', 'value 2A'],
109
+ column_b: ['value 1B', 'value 2B', nil, nil],
110
+ column_c: [nil, nil, 'value 1C', 'value 2C']
111
+ }
112
+ )
113
+ expected_contents = "Column A,Column B,Column C\nvalue 1A,value 1B,\nvalue 2A,value 2B,\nvalue 1A,,value 1C\nvalue 2A,,value 2C\n"
114
+ column_fields = Remi::Fields.new({
115
+ :column_a => { label: 'Column A' },
116
+ :column_b => { label: 'Column B' },
117
+ :column_c => { label: 'Column C' }
118
+ })
119
+ encoder = Encoder::CsvFile.new(fields: column_fields)
120
+ file_name = encoder.encode(provided_df)
121
+ expect(File.read(file_name)).to eq expected_contents
122
+ end
69
123
  end
124
+
@@ -21,7 +21,8 @@ describe Extractor::Gsheet do
21
21
  {
22
22
  credentials: credentials,
23
23
  folder_id: 'some_google_folder_id',
24
- remote_path: remote_path
24
+ remote_path: remote_path,
25
+ sheet_name: 'some_google_sheet_name'
25
26
  }
26
27
  }
27
28
 
@@ -109,7 +110,9 @@ describe Parser::Gsheet do
109
110
  let(:gs_extract) { double('gs_extract') }
110
111
  let(:example_data) do
111
112
  [{"headers" => ["header_1", "header_2", "header_3"],
112
- "row 1" => ["value 1", "value 2", "value 3"]
113
+ "row 1" => ["value 11", "value 12", "value 13"],
114
+ "row 2" => ["value 21", "value 22", "value 23"],
115
+ "row 3" => ["value 31", "value 32", "value 33"],
113
116
  }]
114
117
  end
115
118
 
@@ -123,11 +126,21 @@ describe Parser::Gsheet do
123
126
 
124
127
  it 'converted data into the correct dataframe' do
125
128
  expected_df = Daru::DataFrame.new(
126
- :header_1 => ['value 1'],
127
- :header_2 => ['value 2'],
128
- :header_3 => ['value 3'],
129
+ :header_1 => ['value 11', 'value 21', 'value 31'],
130
+ :header_2 => ['value 12', 'value 22', 'value 32'],
131
+ :header_3 => ['value 13', 'value 23', 'value 33']
129
132
  )
130
133
  expect(parser.parse(gs_extract).to_a).to eq expected_df.to_a
131
134
  end
132
135
 
136
+ it 'works when the last column contains blanks' do
137
+ # Google API only returns an array of dimensions up to the last non-blank column
138
+ example_data[0]['row 2'].pop
139
+ expected_df = Daru::DataFrame.new(
140
+ :header_1 => ['value 11', 'value 21', 'value 31'],
141
+ :header_2 => ['value 12', 'value 22', 'value 32'],
142
+ :header_3 => ['value 13', nil, 'value 33']
143
+ )
144
+ expect(parser.parse(gs_extract).to_a).to eq expected_df.to_a
145
+ end
133
146
  end
@@ -0,0 +1,80 @@
1
+ require_relative '../remi_spec'
2
+ require 'remi/data_subjects/salesforce_soap.rb'
3
+
4
+
5
+
6
+ describe Encoder::SalesforceSoap do
7
+ let(:encoder) { Encoder::SalesforceSoap.new }
8
+ let(:dataframe) do
9
+ Daru::DataFrame.new(
10
+ :Id => ['003G000001cKYaUIA4', '003G000001cKYbXIA4'],
11
+ :Student_ID__c => ['FJD385628', nil],
12
+ :Merge_Id__c => ['003g000001LE7dXAAT','003g000001IX4HcAAL']
13
+ )
14
+ end
15
+
16
+ it 'converts the dataframe into an array of hashes' do
17
+ expected_result = [
18
+ { :Id => '003G000001cKYaUIA4', :Student_ID__c => 'FJD385628', :Merge_Id__c => '003g000001LE7dXAAT' },
19
+ { :Id => '003G000001cKYbXIA4', :Student_ID__c => nil, :Merge_Id__c => '003g000001IX4HcAAL' },
20
+ ]
21
+ expect(encoder.encode dataframe).to eq expected_result
22
+ end
23
+ end
24
+
25
+
26
+ describe Loader::SalesforceSoap do
27
+ let(:loader) { Loader::SalesforceSoap.new(object: :Contact, credentials: {}, operation: :merge) }
28
+ let(:soapforce_client) { double('soapforce_client') }
29
+
30
+ before do
31
+ allow(loader).to receive(:soapforce_client) { soapforce_client }
32
+ end
33
+
34
+ it 'raises an error if an unknown operation is requested' do
35
+ data = [
36
+ { Id: '1234', Custom__c: 'something', Merge_Id: '5678' }
37
+ ]
38
+
39
+ loader = Loader::SalesforceSoap.new(object: :Contact, credentials: {}, operation: :not_defined)
40
+ expect { loader.load(data) }.to raise_error ArgumentError
41
+ end
42
+
43
+ it 'submits the right merge command' do
44
+ data = [
45
+ { Id: '1234', Custom__c: 'something', Merge_Id: '5678' }
46
+ ]
47
+
48
+ expect(soapforce_client).to receive(:merge) do
49
+ [
50
+ :Contact,
51
+ {
52
+ Id: '1234',
53
+ Custom__c: 'something'
54
+ },
55
+ ['5678']
56
+ ]
57
+ end
58
+
59
+ loader.load(data)
60
+ end
61
+
62
+ it 'submits a merge command for each row of data' do
63
+ data = [
64
+ { Id: '1', Custom__c: 'something', Merge_Id: '10' },
65
+ { Id: '2', Custom__c: 'something', Merge_Id: '20' }
66
+ ]
67
+
68
+ expect(soapforce_client).to receive(:merge).twice
69
+ loader.load(data)
70
+ end
71
+
72
+ it 'raises an error if the merge id field is not found' do
73
+ data = [
74
+ { Id: '1234', Custom__c: 'something', Alt_Merge_Id: '5678' }
75
+ ]
76
+
77
+ expect { loader.load(data) }.to raise_error KeyError
78
+ end
79
+
80
+ end
@@ -0,0 +1 @@
1
+ column A,column B
@@ -17,6 +17,123 @@ module Remi::SfBulkHelperStubs
17
17
  EOT
18
18
  end
19
19
 
20
+ def delete_raw_result
21
+ {
22
+ "xmlns" => "http://www.force.com/2009/06/asyncapi/dataload",
23
+ "id" => [
24
+ "750g0000004iys2AAA"
25
+ ],
26
+ "operation" => [
27
+ "delete"
28
+ ],
29
+ "object" => [
30
+ "Contact"
31
+ ],
32
+ "createdById" => [
33
+ "005A0000000eJ57IAE"
34
+ ],
35
+ "createdDate" => [
36
+ "2017-01-25T20:06:30.000Z"
37
+ ],
38
+ "systemModstamp" => [
39
+ "2017-01-25T20:06:30.000Z"
40
+ ],
41
+ "state" => [
42
+ "Closed"
43
+ ],
44
+ "concurrencyMode" => [
45
+ "Parallel"
46
+ ],
47
+ "contentType" => [
48
+ "XML"
49
+ ],
50
+ "numberBatchesQueued" => [
51
+ "1"
52
+ ],
53
+ "numberBatchesInProgress" => [
54
+ "0"
55
+ ],
56
+ "numberBatchesCompleted" => [
57
+ "0"
58
+ ],
59
+ "numberBatchesFailed" => [
60
+ "0"
61
+ ],
62
+ "numberBatchesTotal" => [
63
+ "1"
64
+ ],
65
+ "numberRecordsProcessed" => [
66
+ "0"
67
+ ],
68
+ "numberRetries" => [
69
+ "0"
70
+ ],
71
+ "apiVersion" => [
72
+ "32.0"
73
+ ],
74
+ "numberRecordsFailed" => [
75
+ "0"
76
+ ],
77
+ "totalProcessingTime" => [
78
+ "0"
79
+ ],
80
+ "apiActiveProcessingTime" => [
81
+ "0"
82
+ ],
83
+ "apexProcessingTime" => [
84
+ "0"
85
+ ],
86
+ "batches" => [
87
+ {
88
+ "xmlns" => "http://www.force.com/2009/06/asyncapi/dataload",
89
+ "id" => [
90
+ "751g0000002ozU5AAI"
91
+ ],
92
+ "jobId" => [
93
+ "750g0000004iys2AAA"
94
+ ],
95
+ "state" => [
96
+ "Completed"
97
+ ],
98
+ "createdDate" => [
99
+ "2017-01-25T20:06:31.000Z"
100
+ ],
101
+ "systemModstamp" => [
102
+ "2017-01-25T20:07:19.000Z"
103
+ ],
104
+ "numberRecordsProcessed" => [
105
+ "1"
106
+ ],
107
+ "numberRecordsFailed" => [
108
+ "0"
109
+ ],
110
+ "totalProcessingTime" => [
111
+ "684"
112
+ ],
113
+ "apiActiveProcessingTime" => [
114
+ "459"
115
+ ],
116
+ "apexProcessingTime" => [
117
+ "74"
118
+ ],
119
+ "response" => [
120
+ {
121
+ "id" => [
122
+ "003g000001LVMx3AAH"
123
+ ],
124
+ "success" => [
125
+ "true"
126
+ ],
127
+ "created" => [
128
+ "false"
129
+ ]
130
+ }
131
+ ]
132
+ }
133
+ ]
134
+ }
135
+ end
136
+
20
137
  def contact_query_raw_result
21
138
  {
22
139
  "xmlns" => "http://www.force.com/2009/06/asyncapi/dataload",
@@ -114,4 +114,19 @@ describe Remi::SfBulkHelper do
114
114
  end
115
115
  end
116
116
  end
117
+
118
+ describe SfBulkHelper::SfBulkDelete do
119
+ let(:sf_delete) { SfBulkHelper::SfBulkDelete.new({}, 'Contact', [{ 'Id' => '1234' }]) }
120
+ let(:sf_bulk) { double('sf_bulk') }
121
+
122
+ before do
123
+ allow(sf_delete).to receive(:sf_bulk) { sf_bulk }
124
+ allow(sf_bulk).to receive(:delete) { SfBulkHelperStubs.delete_raw_result }
125
+ end
126
+
127
+ it 'sends a delete request to the salesforce bulk api' do
128
+ expect(sf_bulk).to receive(:delete) { SfBulkHelperStubs.delete_raw_result }
129
+ sf_delete.send(:execute)
130
+ end
131
+ end
117
132
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: remi
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.1
4
+ version: 0.3.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sterling Paramore
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-10-25 00:00:00.000000000 Z
11
+ date: 2017-01-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bond
@@ -283,6 +283,7 @@ files:
283
283
  - lib/remi/data_subjects/postgres.rb
284
284
  - lib/remi/data_subjects/s3_file.rb
285
285
  - lib/remi/data_subjects/salesforce.rb
286
+ - lib/remi/data_subjects/salesforce_soap.rb
286
287
  - lib/remi/data_subjects/sftp_file.rb
287
288
  - lib/remi/data_subjects/sub_job.rb
288
289
  - lib/remi/dsl.rb
@@ -295,7 +296,6 @@ files:
295
296
  - lib/remi/job/sub_job.rb
296
297
  - lib/remi/job/transform.rb
297
298
  - lib/remi/loader.rb
298
- - lib/remi/monkeys/daru.rb
299
299
  - lib/remi/parser.rb
300
300
  - lib/remi/refinements/symbolizer.rb
301
301
  - lib/remi/settings.rb
@@ -317,6 +317,7 @@ files:
317
317
  - spec/data_subjects/none_spec.rb
318
318
  - spec/data_subjects/postgres_spec.rb
319
319
  - spec/data_subjects/s3_file_spec.rb
320
+ - spec/data_subjects/salesforce_soap_spec.rb
320
321
  - spec/data_subjects/salesforce_spec.rb
321
322
  - spec/data_subjects/sftp_file_spec.rb
322
323
  - spec/data_subjects/sub_job_spec.rb
@@ -325,6 +326,7 @@ files:
325
326
  - spec/fields_spec.rb
326
327
  - spec/fixtures/basic.csv
327
328
  - spec/fixtures/basic2.csv
329
+ - spec/fixtures/empty.csv
328
330
  - spec/fixtures/sf_bulk_helper_stubs.rb
329
331
  - spec/fixtures/unsupported_escape.csv
330
332
  - spec/job/transform_spec.rb
@@ -396,6 +398,7 @@ test_files:
396
398
  - spec/data_subjects/none_spec.rb
397
399
  - spec/data_subjects/postgres_spec.rb
398
400
  - spec/data_subjects/s3_file_spec.rb
401
+ - spec/data_subjects/salesforce_soap_spec.rb
399
402
  - spec/data_subjects/salesforce_spec.rb
400
403
  - spec/data_subjects/sftp_file_spec.rb
401
404
  - spec/data_subjects/sub_job_spec.rb
@@ -404,6 +407,7 @@ test_files:
404
407
  - spec/fields_spec.rb
405
408
  - spec/fixtures/basic.csv
406
409
  - spec/fixtures/basic2.csv
410
+ - spec/fixtures/empty.csv
407
411
  - spec/fixtures/sf_bulk_helper_stubs.rb
408
412
  - spec/fixtures/unsupported_escape.csv
409
413
  - spec/job/transform_spec.rb
@@ -1,4 +0,0 @@
1
- # Needed to fix issue in Daru 0.1.4.1
2
- class Daru::DataFrame
3
- remove_method :to_hash
4
- end