gooddata_eloqua 0.1.3 → 0.1.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/gooddata_eloqua/client.rb +9 -0
- data/lib/gooddata_eloqua/helpers/request.rb +193 -130
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 294f5ae81c518f15f02473db8460e6acf16bb07d
|
4
|
+
data.tar.gz: 9e830b5735f25b8c0f5734410fafe5eb7bf024a3
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1c4fbb45c71de382b763669b2f24fef92857d942313b2576d8a989527eafd65aebccb5728666759f019441e6dcaa7579b8fecc0e9ed222915befbf9cde46093a
|
7
|
+
data.tar.gz: bda53300869551d6ed5a2fefd12a733e251c7ecd4cacc6c1f8910abd349433d3eb38e0d23bc0280281894b7f68e66143bba31927b5191514e0a7684715d54196
|
@@ -11,11 +11,20 @@ require 'rest_client'
|
|
11
11
|
class GoodDataEloqua::Client
|
12
12
|
|
13
13
|
def initialize(config={})
|
14
|
+
|
14
15
|
$global_id = SecureRandom.hex
|
15
16
|
$client = config[:eloqua] || config[:client]
|
16
17
|
|
18
|
+
if config[:threading]
|
19
|
+
$threading = config[:threading]
|
20
|
+
else
|
21
|
+
$threading = true
|
22
|
+
end
|
23
|
+
|
17
24
|
self.system_check
|
18
25
|
|
26
|
+
puts "#{Time.now} => Initialized job with session id: #{$global_id}."
|
27
|
+
|
19
28
|
end
|
20
29
|
|
21
30
|
def campaigns(config={})
|
@@ -65,19 +65,37 @@ class GoodDataEloqua::Request
|
|
65
65
|
puts "#{Time.now} => #{iterations.length} pages queued for download."
|
66
66
|
count = iterations.length
|
67
67
|
mutex = Mutex.new
|
68
|
-
iterations.pmap { |page|
|
69
68
|
|
70
|
-
|
71
|
-
print "\r#{Time.now} => Extracting IDs - Remaining Pages: #{count}\s\s\s"
|
69
|
+
if $threading
|
72
70
|
|
73
|
-
|
74
|
-
|
75
|
-
response
|
76
|
-
|
71
|
+
iterations.pmap { |page|
|
72
|
+
|
73
|
+
response = self.get_all_by_page(page)
|
74
|
+
print "\r#{Time.now} => Extracting IDs - Remaining Pages: #{count}\s\s\s"
|
75
|
+
|
76
|
+
mutex.synchronize {
|
77
|
+
count -= 1
|
78
|
+
response['elements'].each { |element|
|
79
|
+
csv << [element['id']]
|
80
|
+
}
|
77
81
|
}
|
82
|
+
|
78
83
|
}
|
84
|
+
else
|
85
|
+
iterations.each { |page|
|
79
86
|
|
80
|
-
|
87
|
+
response = self.get_all_by_page(page)
|
88
|
+
print "\r#{Time.now} => Extracting IDs - Remaining Pages: #{count}\s\s\s"
|
89
|
+
|
90
|
+
mutex.synchronize {
|
91
|
+
count -= 1
|
92
|
+
response['elements'].each { |element|
|
93
|
+
csv << [element['id']]
|
94
|
+
}
|
95
|
+
}
|
96
|
+
|
97
|
+
}
|
98
|
+
end
|
81
99
|
|
82
100
|
csv.flush
|
83
101
|
puts "#{Time.now} => #{count} IDs extracted."
|
@@ -103,25 +121,118 @@ class GoodDataEloqua::Request
|
|
103
121
|
write_fd = IO.sysopen(file_name, 'w+')
|
104
122
|
@csv = CSV.new(IO.new(write_fd))
|
105
123
|
|
124
|
+
read_csv = CSV.open(fd)
|
106
125
|
|
107
|
-
|
126
|
+
ids = read_csv.map { |row|
|
127
|
+
row[0]
|
128
|
+
}
|
108
129
|
|
109
|
-
|
110
|
-
row[0]
|
111
|
-
}
|
130
|
+
return if ids.empty?
|
112
131
|
|
113
|
-
|
132
|
+
cache = []
|
133
|
+
@headers = nil
|
134
|
+
set_headers = []
|
135
|
+
have_not_set_headers = false
|
136
|
+
count = 0
|
114
137
|
|
115
|
-
|
116
|
-
|
117
|
-
count = 0
|
118
|
-
mutex = Mutex.new
|
138
|
+
if $threading
|
139
|
+
ids.pmap { |id| # <- Change this back to parallel
|
119
140
|
|
141
|
+
unless set_headers.empty?
|
142
|
+
puts "\nWAS ABLE TO SET HEADER\n"
|
143
|
+
@csv << set_headers.pop
|
144
|
+
end
|
120
145
|
|
121
|
-
|
146
|
+
if cache.length > 2000
|
147
|
+
batch = cache.slice!(1..500)
|
148
|
+
batch.map { |row| @csv << row }
|
149
|
+
end
|
122
150
|
|
123
|
-
|
124
|
-
|
151
|
+
count += 1
|
152
|
+
response = self.get_one_by_id(id)
|
153
|
+
|
154
|
+
case response['type']
|
155
|
+
when "Contact"
|
156
|
+
keys = 'fieldValues'
|
157
|
+
when "Campaign"
|
158
|
+
keys = 'elements'
|
159
|
+
when "Email"
|
160
|
+
keys = 'htmlContent'
|
161
|
+
when "Form"
|
162
|
+
keys = 'elements'
|
163
|
+
else
|
164
|
+
keys = 'elements'
|
165
|
+
end
|
166
|
+
|
167
|
+
if response.empty?
|
168
|
+
|
169
|
+
payload = [[id]]
|
170
|
+
|
171
|
+
else
|
172
|
+
|
173
|
+
if @headers == nil
|
174
|
+
|
175
|
+
begin
|
176
|
+
unless response[keys].empty?
|
177
|
+
|
178
|
+
element_keys = response[keys][0].keys.map { |key|
|
179
|
+
"element_#{key}"
|
180
|
+
}
|
181
|
+
|
182
|
+
else
|
183
|
+
element_keys = []
|
184
|
+
end
|
185
|
+
|
186
|
+
@headers = response.keys + element_keys
|
187
|
+
set_headers << response.keys + element_keys
|
188
|
+
|
189
|
+
rescue NoMethodError
|
190
|
+
headers = [id]
|
191
|
+
end
|
192
|
+
|
193
|
+
end
|
194
|
+
|
195
|
+
if response[keys].is_a? Array
|
196
|
+
if response[keys].empty?
|
197
|
+
payload = [response.values]
|
198
|
+
else
|
199
|
+
payload = []
|
200
|
+
|
201
|
+
response[keys].each { |element|
|
202
|
+
payload << response.values + element.values
|
203
|
+
}
|
204
|
+
end
|
205
|
+
else
|
206
|
+
payload = [response]
|
207
|
+
end
|
208
|
+
|
209
|
+
end
|
210
|
+
|
211
|
+
######### EDITING PAYLOAD TO REMOVE KEY ROW VALUES #####
|
212
|
+
|
213
|
+
payload = payload[0].map { |key_value|
|
214
|
+
if key_value.is_a? Array or key_value.is_a? Hash
|
215
|
+
key_value.to_json
|
216
|
+
else
|
217
|
+
key_value
|
218
|
+
end
|
219
|
+
}
|
220
|
+
|
221
|
+
cache << payload
|
222
|
+
|
223
|
+
print "\r#{Time.now} => Extracting Profiles - IDs Remaining: #{ids.length-count} Cache: #{cache.length}\s\s\s"
|
224
|
+
|
225
|
+
}
|
226
|
+
else
|
227
|
+
ids.each { |id| # <- Change this back to parallel
|
228
|
+
|
229
|
+
unless set_headers.empty?
|
230
|
+
puts "\nWAS ABLE TO SET HEADER\n"
|
231
|
+
@csv << set_headers.pop
|
232
|
+
end
|
233
|
+
|
234
|
+
if cache.length > 2000
|
235
|
+
batch = cache.slice!(1..500)
|
125
236
|
batch.map { |row| @csv << row }
|
126
237
|
end
|
127
238
|
|
@@ -147,7 +258,7 @@ class GoodDataEloqua::Request
|
|
147
258
|
|
148
259
|
else
|
149
260
|
|
150
|
-
if headers == nil
|
261
|
+
if @headers == nil
|
151
262
|
|
152
263
|
begin
|
153
264
|
unless response[keys].empty?
|
@@ -160,7 +271,8 @@ class GoodDataEloqua::Request
|
|
160
271
|
element_keys = []
|
161
272
|
end
|
162
273
|
|
163
|
-
headers = response.keys + element_keys
|
274
|
+
@headers = response.keys + element_keys
|
275
|
+
set_headers << response.keys + element_keys
|
164
276
|
|
165
277
|
rescue NoMethodError
|
166
278
|
headers = [id]
|
@@ -184,106 +296,57 @@ class GoodDataEloqua::Request
|
|
184
296
|
|
185
297
|
end
|
186
298
|
|
299
|
+
######### EDITING PAYLOAD TO REMOVE KEY ROW VALUES #####
|
300
|
+
|
301
|
+
payload = payload[0].map { |key_value|
|
302
|
+
if key_value.is_a? Array or key_value.is_a? Hash
|
303
|
+
key_value.to_json
|
304
|
+
else
|
305
|
+
key_value
|
306
|
+
end
|
307
|
+
}
|
308
|
+
|
187
309
|
cache << payload
|
188
310
|
|
189
311
|
print "\r#{Time.now} => Extracting Profiles - IDs Remaining: #{ids.length-count} Cache: #{cache.length}\s\s\s"
|
190
312
|
|
191
313
|
}
|
192
314
|
|
193
|
-
|
194
|
-
|
315
|
+
end
|
316
|
+
|
317
|
+
puts "\n#{Time.now} => Threads complete!\n"
|
318
|
+
count = cache.length
|
319
|
+
|
320
|
+
if $threading
|
321
|
+
|
195
322
|
cache.pmap { |row|
|
196
323
|
print "\r#{Time.now} => Dumping pool to CSV #{count}\s\s\s\s "
|
197
324
|
count -= 1
|
198
325
|
@csv << row
|
199
|
-
|
326
|
+
|
200
327
|
}
|
201
328
|
|
202
|
-
|
329
|
+
else
|
330
|
+
|
331
|
+
cache.each { |row|
|
332
|
+
print "\r#{Time.now} => Dumping pool to CSV #{count}\s\s\s\s "
|
333
|
+
count -= 1
|
334
|
+
@csv << row
|
335
|
+
|
336
|
+
}
|
203
337
|
|
204
338
|
end
|
205
339
|
|
340
|
+
|
341
|
+
@csv.flush
|
342
|
+
|
343
|
+
puts "\n#{Time.now} => Flushing IO to \"#{file_name}\"\n"
|
344
|
+
|
206
345
|
puts "\n#{Time.now} => Complete: #{file_name}\n\n"
|
207
346
|
|
208
347
|
file_name
|
209
348
|
|
210
|
-
|
211
|
-
#
|
212
|
-
# CSV.open(fd) do |read_csv|
|
213
|
-
#
|
214
|
-
#
|
215
|
-
#
|
216
|
-
# puts "#{Time.now} => Extracted from CSV: #{ids.length}\n"
|
217
|
-
#
|
218
|
-
# elapsed_times = []
|
219
|
-
#
|
220
|
-
# loop do
|
221
|
-
#
|
222
|
-
# batch = ids.slice!(1..20)
|
223
|
-
#
|
224
|
-
# started_batch = Time.now
|
225
|
-
#
|
226
|
-
# if elapsed_times.length > 5
|
227
|
-
# estimated_time = elapsed_times.inject{ |sum, el| sum + el }.to_f / elapsed_times.size
|
228
|
-
# else
|
229
|
-
# estimated_time = 0
|
230
|
-
# end
|
231
|
-
#
|
232
|
-
# if (estimated_time/60) > 60 && estimated_time != 0
|
233
|
-
# time_readable = "#{((estimated_time/60)/60).round(2)} hours"
|
234
|
-
#
|
235
|
-
# elsif (estimated_time/60) < 60 && (estimated_time/60) > 1 && estimated_time != 0
|
236
|
-
# time_readable = "#{(estimated_time/60).round(2)} minutes"
|
237
|
-
#
|
238
|
-
# elsif (estimated_time/60) > 0 && estimated_time != 0
|
239
|
-
# time_readable = "#{(estimated_time).round(2)} seconds"
|
240
|
-
#
|
241
|
-
# else
|
242
|
-
# time_readable = 'Estimating...'
|
243
|
-
#
|
244
|
-
# end
|
245
|
-
#
|
246
|
-
# break unless batch.length > 0 && pool.list.length
|
247
|
-
#
|
248
|
-
# #
|
249
|
-
# batch.each { |i|
|
250
|
-
# pool.thread {
|
251
|
-
# response = self.get_one_by_id(i)
|
252
|
-
# csv << response.values
|
253
|
-
# csv.flush
|
254
|
-
# print "\r#{Time.now} => IDs Remaing: #{ids.length} - Complete: #{time_readable} - Thread Pool: #{pool.active_threads} \s\s\s\s"
|
255
|
-
# }
|
256
|
-
# }
|
257
|
-
#
|
258
|
-
# ended_batch = Time.now
|
259
|
-
#
|
260
|
-
# elapsed_time = ended_batch - started_batch
|
261
|
-
# number_of_estimated_chunks = ids.length/20
|
262
|
-
# estimated_batch_time = (elapsed_time*number_of_estimated_chunks).round
|
263
|
-
#
|
264
|
-
# if elapsed_times.length < 10
|
265
|
-
# elapsed_times << estimated_batch_time
|
266
|
-
# elsif elapsed_times.length > 50
|
267
|
-
# 40.times do
|
268
|
-
# elapsed_times.pop
|
269
|
-
# end
|
270
|
-
# end
|
271
|
-
#
|
272
|
-
#
|
273
|
-
# if pool.active_threads > 21
|
274
|
-
# sleep 3
|
275
|
-
# end
|
276
|
-
#
|
277
|
-
# break if ids.length == 0 && pool.list.length
|
278
|
-
#
|
279
|
-
#
|
280
|
-
# end
|
281
|
-
#
|
282
|
-
#
|
283
|
-
#
|
284
|
-
# puts "#{Time.now} => Complete. CSV written to \"./downloads/#{@session_id}_complete.csv\""
|
285
|
-
#
|
286
|
-
# end
|
349
|
+
|
287
350
|
|
288
351
|
|
289
352
|
|
@@ -312,33 +375,33 @@ class GoodDataEloqua::Request
|
|
312
375
|
|
313
376
|
csv.flush
|
314
377
|
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
341
|
-
|
378
|
+
pool = Pool.new
|
379
|
+
|
380
|
+
while pool.running do
|
381
|
+
|
382
|
+
batch = iterations.slice!(1..20)
|
383
|
+
|
384
|
+
break unless batch
|
385
|
+
|
386
|
+
batch.each { |i|
|
387
|
+
pool.thread {
|
388
|
+
|
389
|
+
|
390
|
+
print "\r#{Time.now} => Pages #{iterations.length} of #{num_iterations} - Workers: #{pool.active_threads}\s"
|
391
|
+
}
|
392
|
+
}
|
393
|
+
|
394
|
+
if pool.active_threads > 20
|
395
|
+
sleep 2
|
396
|
+
end
|
397
|
+
|
398
|
+
if iterations.length == 0 && pool.active_threads == 0
|
399
|
+
pool.running = false
|
400
|
+
else
|
401
|
+
next
|
402
|
+
end
|
403
|
+
|
404
|
+
end
|
342
405
|
|
343
406
|
end
|
344
407
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: gooddata_eloqua
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Patrick McConlogue
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-02-
|
11
|
+
date: 2015-02-23 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: eloqua_api
|