allplayers_imports 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README.md +22 -0
- data/allplayers_imports.gemspec +19 -0
- data/lib/allplayers_imports.rb +839 -0
- metadata +133 -0
data/README.md
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
allplayers_imports.rb
|
|
2
|
+
=====================
|
|
3
|
+
|
|
4
|
+
Ruby tool that parses data and imports into AllPlayers.com via AllPlayers public API.
|
|
5
|
+
|
|
6
|
+
Currently works with gdata (Google Spreadsheet API) but can also accept a parsed csv as input.
|
|
7
|
+
|
|
8
|
+
To install, run <code>gem install allplayers_imports</code>
|
|
9
|
+
|
|
10
|
+
Extend your allplayers object to include AllPlayersImports
|
|
11
|
+
|
|
12
|
+
Example:
|
|
13
|
+
```
|
|
14
|
+
include 'allplayers'
|
|
15
|
+
include 'allplayers_imports'
|
|
16
|
+
|
|
17
|
+
allplayers_session = AllPlayers::Client.new(nil, 'www.allplayers.com')
|
|
18
|
+
allplayers_session.add_headers({:Authorization => 'Basic ' + Base64.encode64(user + ':' + pass)})
|
|
19
|
+
allplayers_session.extend AllPlayersImports
|
|
20
|
+
|
|
21
|
+
allplayers_session.import_sheet(spreadsheet, 'Groups or Participant Information')
|
|
22
|
+
```
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
Gem::Specification.new do |spec|
|
|
3
|
+
spec.add_dependency 'ci_reporter', ['~> 1.7.0']
|
|
4
|
+
spec.add_dependency 'fastercsv', ['~> 1.5.3']
|
|
5
|
+
spec.add_dependency 'highline', ['~> 1.6.11']
|
|
6
|
+
spec.add_dependency 'allplayers', ['~> 0.1.0']
|
|
7
|
+
spec.authors = ["AllPlayers.com"]
|
|
8
|
+
spec.description = %q{A Ruby tool to handle import spreadsheets into AllPlayers API.}
|
|
9
|
+
spec.email = ['support@allplayers.com']
|
|
10
|
+
spec.files = %w(README.md allplayers_imports.gemspec)
|
|
11
|
+
spec.files += Dir.glob("lib/**/*.rb")
|
|
12
|
+
spec.homepage = 'http://www.allplayers.com/'
|
|
13
|
+
spec.licenses = ['MIT']
|
|
14
|
+
spec.name = 'allplayers_imports'
|
|
15
|
+
spec.require_paths = ['lib']
|
|
16
|
+
spec.required_rubygems_version = Gem::Requirement.new('>= 1.3.6')
|
|
17
|
+
spec.summary = spec.description
|
|
18
|
+
spec.version = '0.1.0'
|
|
19
|
+
end
|
|
@@ -0,0 +1,839 @@
|
|
|
1
|
+
# Provides ImportActions using AllPlayers API.
|
|
2
|
+
|
|
3
|
+
require 'rubygems'
|
|
4
|
+
require 'highline/import'
|
|
5
|
+
require 'active_support'
|
|
6
|
+
require 'active_support/core_ext/time/conversions.rb'
|
|
7
|
+
require 'thread'
|
|
8
|
+
require 'logger'
|
|
9
|
+
require 'resolv'
|
|
10
|
+
require 'date'
|
|
11
|
+
require 'faster_csv'
|
|
12
|
+
|
|
13
|
+
# Stop EOF errors in Highline
|
|
14
|
+
HighLine.track_eof = false
|
|
15
|
+
|
|
16
|
+
class DuplicateUserExists < StandardError
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
# Add some tools to Array to make parsing spreadsheet rows easier.
|
|
20
|
+
class Array
|
|
21
|
+
# Little utility to convert array to Hash with defined keys.
|
|
22
|
+
def to_hash(other)
|
|
23
|
+
Hash[ *(0...other.size()).inject([]) { |arr, ix| arr.push(other[ix], self[ix]) } ]
|
|
24
|
+
end
|
|
25
|
+
# Split off first element in each array item, after splitting by pattern, then
|
|
26
|
+
# strip trailing and preceding whitespaces.
|
|
27
|
+
def split_first(pattern)
|
|
28
|
+
arr = []
|
|
29
|
+
self.each do | item |
|
|
30
|
+
arr.push(item.split(pattern)[0].strip)
|
|
31
|
+
end
|
|
32
|
+
arr
|
|
33
|
+
end
|
|
34
|
+
def downcase
|
|
35
|
+
arr = []
|
|
36
|
+
self.each do |item|
|
|
37
|
+
arr.push(item.downcase)
|
|
38
|
+
end
|
|
39
|
+
arr
|
|
40
|
+
end
|
|
41
|
+
def gsub(pattern,replacement)
|
|
42
|
+
arr = []
|
|
43
|
+
self.each do |item|
|
|
44
|
+
arr.push(item.gsub(pattern,replacement))
|
|
45
|
+
end
|
|
46
|
+
arr
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
class Hash
|
|
51
|
+
def key_filter(pattern, replacement = '')
|
|
52
|
+
hsh = {}
|
|
53
|
+
filtered = self.reject { |key,value| key.match(pattern).nil? }
|
|
54
|
+
filtered.each { |key,value| hsh[key.sub(pattern, replacement)] = value }
|
|
55
|
+
hsh
|
|
56
|
+
end
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
class Date
|
|
60
|
+
def to_age
|
|
61
|
+
now = Time.now.utc.to_date
|
|
62
|
+
now.year - self.year - ((now.month > self.month || (now.month == self.month && now.day >= self.day)) ? 0 : 1)
|
|
63
|
+
end
|
|
64
|
+
end
|
|
65
|
+
|
|
66
|
+
# valid_email_address port from Drupal
|
|
67
|
+
class String
|
|
68
|
+
def valid_email_address?
|
|
69
|
+
return !self.match(/^[a-zA-Z0-9_\-\.\+\^!#\$%&*+\/\=\?\`\|\{\}~\']+@((?:(?:[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.?)+|(\[([0-9]{1,3}(\.[0-9]{1,3}){3}|[0-9a-fA-F]{1,4}(\:[0-9a-fA-F]{1,4}){7})\]))$/).nil?
|
|
70
|
+
end
|
|
71
|
+
def active_email_domain?
|
|
72
|
+
domain = self.match(/\@(.+)/)[1]
|
|
73
|
+
Resolv::DNS.open do |dns|
|
|
74
|
+
@mx = dns.getresources(domain, Resolv::DNS::Resource::IN::MX)
|
|
75
|
+
@a = dns.getresources(domain, Resolv::DNS::Resource::IN::A)
|
|
76
|
+
end
|
|
77
|
+
@mx.size > 0 || @a.size > 0
|
|
78
|
+
end
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
# Write the correct header for the csv log
|
|
82
|
+
class ApciLogDevice < Logger::LogDevice
|
|
83
|
+
private
|
|
84
|
+
def add_log_header(file)
|
|
85
|
+
file.write(
|
|
86
|
+
"\"Severity\",\"Date\",\"Severity (Full)\",\"Row\",\"Info\"\n"
|
|
87
|
+
)
|
|
88
|
+
end
|
|
89
|
+
end
|
|
90
|
+
|
|
91
|
+
# Build a Logger::Formatter subclass.
|
|
92
|
+
class ApciFormatter < Logger::Formatter
|
|
93
|
+
Format = "\"%s\",\"[%s#%d]\",\"%5s\",\"%s\",\"%s\"\n"
|
|
94
|
+
def initialize
|
|
95
|
+
@highline = HighLine.new
|
|
96
|
+
super
|
|
97
|
+
end
|
|
98
|
+
# Provide a call() method that returns the formatted message.
|
|
99
|
+
def call(severity, time, program_name, message)
|
|
100
|
+
message_color = severity == 'ERROR' ? @highline.color(message, :red, :bold) : message
|
|
101
|
+
message_color = severity == 'WARN' ? @highline.color(message, :bold) : message_color
|
|
102
|
+
if program_name == program_name.to_i.to_s
|
|
103
|
+
# Abuse program_name as row #
|
|
104
|
+
if program_name.to_i.even?
|
|
105
|
+
say @highline.color('Row ' + program_name + ': ', :cyan, :bold) + message_color
|
|
106
|
+
else
|
|
107
|
+
say @highline.color('Row ' + program_name + ': ', :magenta, :bold) + message_color
|
|
108
|
+
end
|
|
109
|
+
else
|
|
110
|
+
say message_color
|
|
111
|
+
end
|
|
112
|
+
message.gsub!('"', "'")
|
|
113
|
+
Format % [severity[0..0], format_datetime(time), $$, severity, program_name,
|
|
114
|
+
msg2str(message)]
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
# Functions to aid importing any type of spreadsheet to Allplayers.com.
|
|
119
|
+
module AllPlayersImports
|
|
120
|
+
@@sheet_mutex = Mutex.new
|
|
121
|
+
@@stats_mutex = Mutex.new
|
|
122
|
+
@@user_mutex = Mutex.new
|
|
123
|
+
@@email_mutexes = {}
|
|
124
|
+
|
|
125
|
+
# Static UID to email
|
|
126
|
+
@@uuid_map = {}
|
|
127
|
+
# Statistics about operations performed
|
|
128
|
+
@@stats = {}
|
|
129
|
+
|
|
130
|
+
# Cache and honor locks on email to UID req's.
|
|
131
|
+
def email_to_uuid(email, action = nil)
|
|
132
|
+
@@user_mutex.synchronize do
|
|
133
|
+
# If we've cached it, short circuit.
|
|
134
|
+
return @@uuid_map[email] if @@uuid_map.has_key?(email)
|
|
135
|
+
# Haven't cached it, create a targeted Mutex for it.
|
|
136
|
+
# Changed to using monitor, see http://www.velocityreviews.com/forums/t857319-thread-and-mutex-in-ruby-1-8-7-a.html
|
|
137
|
+
@@email_mutexes[email] = Monitor.new unless @@email_mutexes.has_key?(email)
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
user = nil
|
|
141
|
+
# Try to get a targeted lock.
|
|
142
|
+
@@email_mutexes[email].synchronize {
|
|
143
|
+
# Got the lock, short circuit if another thread found our UID.
|
|
144
|
+
return @@uuid_map[email] if @@uuid_map.has_key?(email)
|
|
145
|
+
user = self.user_get_email(email)
|
|
146
|
+
@@uuid_map[email] = user['uuid'] if user.include?('uuid')
|
|
147
|
+
}
|
|
148
|
+
# Caller wants the lock while it tries to generate a user.
|
|
149
|
+
return user['uuid'], @@email_mutexes[email] if action == :lock
|
|
150
|
+
user['uuid']
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
def verify_children(row, description = 'User', matched_uuid = nil)
|
|
154
|
+
# Fields to match
|
|
155
|
+
import = row.reject {|k,v| k != 'first_name' && k != 'last_name'}
|
|
156
|
+
prefixes = ['parent_1_', 'parent_2_']
|
|
157
|
+
matched_parents = []
|
|
158
|
+
ret = nil
|
|
159
|
+
prefixes.each {|prefix|
|
|
160
|
+
parent_description = prefix.split('_').join(' ').strip.capitalize
|
|
161
|
+
if row.has_key?(prefix + 'uuid')
|
|
162
|
+
children = self.user_children_list(row[prefix + 'uuid']['item'].first['uuid'])
|
|
163
|
+
next if children.nil? || children.length <= 0
|
|
164
|
+
children['item'].each do |child|
|
|
165
|
+
kid = self.user_get(child['uuid'])
|
|
166
|
+
next if kid['firstname'].nil?
|
|
167
|
+
if (matched_uuid.nil? || matched_uuid != child['uuid'])
|
|
168
|
+
system = {}
|
|
169
|
+
system['first_name'] = kid['firstname'].downcase if kid.has_key?('firstname')
|
|
170
|
+
system['last_name'] = kid['lastname'].downcase if kid.has_key?('lastname')
|
|
171
|
+
import['first_name'] = import['first_name'].downcase
|
|
172
|
+
import['last_name'] = import['last_name'].downcase
|
|
173
|
+
if (system != import)
|
|
174
|
+
# Keep looking
|
|
175
|
+
next
|
|
176
|
+
end
|
|
177
|
+
# Found it
|
|
178
|
+
@logger.info(get_row_count.to_s) {parent_description + ' has matching child: ' + description + ' ' + row['first_name'] + ' ' + row['last_name']} if ret.nil?
|
|
179
|
+
if matched_uuid.nil?
|
|
180
|
+
matched_uuid = child['uuid']
|
|
181
|
+
end
|
|
182
|
+
if !child.nil?
|
|
183
|
+
ret = {'mail' => kid['email'], 'uuid' => matched_uuid }
|
|
184
|
+
end
|
|
185
|
+
matched_parents.push(prefix)
|
|
186
|
+
break
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
end
|
|
190
|
+
}
|
|
191
|
+
# Add existing child to other parent if needed.
|
|
192
|
+
unless matched_uuid.nil?
|
|
193
|
+
prefixes.each {|prefix|
|
|
194
|
+
parent_description = prefix.split('_').join(' ').strip.capitalize
|
|
195
|
+
if row.has_key?(prefix + 'uuid') && !matched_parents.include?(prefix)
|
|
196
|
+
@logger.info(get_row_count.to_s) {'Adding existing child, ' + description + ' ' + row['first_name'] + ' ' + row['last_name'] + ' has matching child : ' + parent_description}
|
|
197
|
+
self.user_create_child(row[prefix + 'uuid']['item'].first['uuid'], '', '', '', '', {:child_uuid => matched_uuid})
|
|
198
|
+
end
|
|
199
|
+
}
|
|
200
|
+
end
|
|
201
|
+
|
|
202
|
+
return ret
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
def get_group_names_from_file
|
|
206
|
+
groups_uuids = {}
|
|
207
|
+
if FileTest.exist?("imported_groups.csv")
|
|
208
|
+
FasterCSV.foreach("imported_groups.csv") do |row|
|
|
209
|
+
groups_uuids[row[1]] = row[2]
|
|
210
|
+
end
|
|
211
|
+
end
|
|
212
|
+
groups_uuids
|
|
213
|
+
end
|
|
214
|
+
|
|
215
|
+
def get_group_rows_from_file
|
|
216
|
+
groups_uuids = {}
|
|
217
|
+
if FileTest.exist?("imported_groups.csv")
|
|
218
|
+
FasterCSV.foreach("imported_groups.csv") do |row|
|
|
219
|
+
groups_uuids[row[0]] = row[2]
|
|
220
|
+
end
|
|
221
|
+
end
|
|
222
|
+
groups_uuids
|
|
223
|
+
end
|
|
224
|
+
|
|
225
|
+
def prepare_row(row_array, column_defs, row_count = nil)
|
|
226
|
+
if row_count
|
|
227
|
+
set_row_count(row_count)
|
|
228
|
+
else
|
|
229
|
+
increment_row_count
|
|
230
|
+
end
|
|
231
|
+
row = row_array.to_hash(column_defs)
|
|
232
|
+
# Convert everything to a string and strip whitespace.
|
|
233
|
+
row.each { |key,value| row.store(key,value.to_s.strip)}
|
|
234
|
+
# Delete empty values.
|
|
235
|
+
row.delete_if { |key,value| value.empty? }
|
|
236
|
+
end
|
|
237
|
+
|
|
238
|
+
def get_row_count
|
|
239
|
+
Thread.current['row_count'] = 0 if Thread.current['row_count'].nil?
|
|
240
|
+
Thread.current['row_count']
|
|
241
|
+
end
|
|
242
|
+
|
|
243
|
+
def increment_row_count
|
|
244
|
+
set_row_count(get_row_count + 1)
|
|
245
|
+
end
|
|
246
|
+
|
|
247
|
+
def set_row_count(count)
|
|
248
|
+
Thread.current['row_count'] = count
|
|
249
|
+
end
|
|
250
|
+
|
|
251
|
+
def increment_stat(type)
|
|
252
|
+
@@stats_mutex.synchronize do
|
|
253
|
+
if @@stats.has_key?(type)
|
|
254
|
+
@@stats[type]+=1
|
|
255
|
+
else
|
|
256
|
+
@@stats[type] = 1
|
|
257
|
+
end
|
|
258
|
+
end
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
def import_sheet(sheet, name, g = nil, wuri = nil, run_character = nil, skip_emails = nil)
|
|
262
|
+
if skip_emails.nil?
|
|
263
|
+
self.remove_headers({:NOTIFICATION_BYPASS => nil, :API_USER_AGENT => nil})
|
|
264
|
+
else
|
|
265
|
+
self.add_headers({:NOTIFICATION_BYPASS => 1, :API_USER_AGENT => 'AllPlayers-Import-Client'})
|
|
266
|
+
end
|
|
267
|
+
|
|
268
|
+
run_char = run_character
|
|
269
|
+
run_char = $run_character unless $run_character.nil?
|
|
270
|
+
rerun_sheet = []
|
|
271
|
+
rerun_row_count = {}
|
|
272
|
+
start_time = Time.now
|
|
273
|
+
@logger.debug('import') {'Started ' + start_time.to_s}
|
|
274
|
+
|
|
275
|
+
set_row_count(0)
|
|
276
|
+
increment_row_count
|
|
277
|
+
# Pull the first row and chunk it, it's just extended field descriptions.
|
|
278
|
+
@logger.info(get_row_count.to_s) {"Skipping Descriptions"}
|
|
279
|
+
sheet.shift
|
|
280
|
+
|
|
281
|
+
# Pull the second row and use it to define columns.
|
|
282
|
+
increment_row_count
|
|
283
|
+
@logger.info(get_row_count.to_s) {"Parsing column labels"}
|
|
284
|
+
begin
|
|
285
|
+
column_defs = sheet.shift.split_first("\n").gsub(/[^0-9a-z]/i, '_').downcase
|
|
286
|
+
rescue
|
|
287
|
+
@logger.info(get_row_count.to_s) {"Error parsing column labels"}
|
|
288
|
+
return
|
|
289
|
+
end
|
|
290
|
+
|
|
291
|
+
if $skip_rows
|
|
292
|
+
@logger.info(get_row_count.to_s) {'Skipping ' + $skip_rows.to_s + ' rows'}
|
|
293
|
+
while get_row_count < $skip_rows do
|
|
294
|
+
sheet.shift
|
|
295
|
+
increment_row_count
|
|
296
|
+
end
|
|
297
|
+
@logger.debug(get_row_count.to_s) {'Skipped ' + $skip_rows.to_s + ' rows'}
|
|
298
|
+
end
|
|
299
|
+
|
|
300
|
+
row_count = get_row_count
|
|
301
|
+
# TODO - Detect sheet type / sanity check by searching column_defs
|
|
302
|
+
if (name == 'Participant Information')
|
|
303
|
+
# mixed sheet... FUN!
|
|
304
|
+
@logger.info(get_row_count.to_s) {"Importing Participants, Parents and Group assignments\n"}
|
|
305
|
+
# Multi-thread
|
|
306
|
+
threads = []
|
|
307
|
+
# Set default thread_count to 7, accept global to change it.
|
|
308
|
+
thread_count = $thread_count.nil? ? 7 : $thread_count
|
|
309
|
+
|
|
310
|
+
for i in 0..thread_count do
|
|
311
|
+
threads << Thread.new {
|
|
312
|
+
until sheet.nil?
|
|
313
|
+
row = nil
|
|
314
|
+
@@sheet_mutex.synchronize do
|
|
315
|
+
row = sheet.shift
|
|
316
|
+
row_count+=1
|
|
317
|
+
end
|
|
318
|
+
unless row.nil?
|
|
319
|
+
formatted_row = self.prepare_row(row, column_defs, row_count)
|
|
320
|
+
if run_char.nil?
|
|
321
|
+
self.import_mixed_user(formatted_row)
|
|
322
|
+
else
|
|
323
|
+
if formatted_row['run_character'].to_s == run_char.to_s
|
|
324
|
+
self.import_mixed_user(formatted_row)
|
|
325
|
+
else
|
|
326
|
+
@logger.info(get_row_count.to_s) {'Skipping row ' + row_count.to_s}
|
|
327
|
+
end
|
|
328
|
+
end
|
|
329
|
+
else
|
|
330
|
+
break
|
|
331
|
+
end
|
|
332
|
+
end
|
|
333
|
+
}
|
|
334
|
+
end
|
|
335
|
+
threads.each_index {|i|
|
|
336
|
+
threads[i].join
|
|
337
|
+
puts 'Thread ' + i.to_s + ' exited.'
|
|
338
|
+
}
|
|
339
|
+
elsif (name == 'Users')
|
|
340
|
+
#if (2 <= (column_defs & ['First Name', 'Last Name']).length)
|
|
341
|
+
@logger.info(get_row_count.to_s) {"Importing Users\n"}
|
|
342
|
+
sheet.each {|row| self.import_user(self.prepare_row(row, column_defs))}
|
|
343
|
+
elsif (name == 'Groups' || name == 'Group Information' || name == 'Duplicates')
|
|
344
|
+
#elsif (2 <= (column_defs & ['Group Name', 'Category']).length)
|
|
345
|
+
@logger.info(get_row_count.to_s) {"Importing Groups\n"}
|
|
346
|
+
|
|
347
|
+
# Multi-thread
|
|
348
|
+
threads = []
|
|
349
|
+
# Set default thread_count to 5, accept global to change it.
|
|
350
|
+
thread_count = $thread_count.nil? ? 5 : $thread_count
|
|
351
|
+
for i in 0..thread_count do
|
|
352
|
+
threads << Thread.new {
|
|
353
|
+
until sheet.nil?
|
|
354
|
+
row = nil
|
|
355
|
+
@@sheet_mutex.synchronize do
|
|
356
|
+
row = sheet.shift
|
|
357
|
+
row_count+=1
|
|
358
|
+
end
|
|
359
|
+
unless row.nil?
|
|
360
|
+
formatted_row = self.prepare_row(row, column_defs, row_count)
|
|
361
|
+
if run_char.nil?
|
|
362
|
+
self.import_group(formatted_row)
|
|
363
|
+
else
|
|
364
|
+
if formatted_row['run_character'].to_s == run_char.to_s
|
|
365
|
+
title = self.import_group(formatted_row)
|
|
366
|
+
if title == formatted_row['group_name']
|
|
367
|
+
rerun_sheet.push(row) if title == formatted_row['group_name']
|
|
368
|
+
rerun_row_count = rerun_row_count.merge(title => get_row_count)
|
|
369
|
+
end
|
|
370
|
+
else
|
|
371
|
+
@logger.info(get_row_count.to_s) {'Skipping row ' + row_count.to_s}
|
|
372
|
+
end
|
|
373
|
+
end
|
|
374
|
+
else
|
|
375
|
+
break
|
|
376
|
+
end
|
|
377
|
+
end
|
|
378
|
+
}
|
|
379
|
+
end
|
|
380
|
+
threads.each_index {|i|
|
|
381
|
+
threads[i].join
|
|
382
|
+
puts 'Thread ' + i.to_s + ' exited.'
|
|
383
|
+
}
|
|
384
|
+
# Retrying rows that didn't find group above.
|
|
385
|
+
rerun_sheet.each {|row|
|
|
386
|
+
formatted_row = self.prepare_row(row, column_defs)
|
|
387
|
+
set_row_count(rerun_row_count[formatted_row['group_name']])
|
|
388
|
+
self.import_group(formatted_row)
|
|
389
|
+
}
|
|
390
|
+
elsif (name == 'Events')
|
|
391
|
+
#elsif (2 <= (column_defs & ['Title', 'Groups Involved', 'Duration (in minutes)']).length)
|
|
392
|
+
@logger.info(get_row_count.to_s) {"Importing Events\n"}
|
|
393
|
+
sheet.each {|row|
|
|
394
|
+
row_count+=1
|
|
395
|
+
response = self.import_event(self.prepare_row(row, column_defs))
|
|
396
|
+
unless g.nil? || wuri.nil?
|
|
397
|
+
g.put_cell_content(wuri.to_s+'/R'+row_count.to_s+'C6', response['nid'], row_count, 6) if response != 'update'
|
|
398
|
+
end
|
|
399
|
+
}
|
|
400
|
+
elsif (name == 'Users in Groups')
|
|
401
|
+
#elsif (2 <= (column_defs & ['Group Name', 'User email', 'Role (Admin, Coach, Player, etc)']).length)
|
|
402
|
+
@logger.info(get_row_count.to_s) {"Importing Users in Groups\n"}
|
|
403
|
+
sheet.each {|row| self.import_user_group_role(self.prepare_row(row, column_defs))}
|
|
404
|
+
else
|
|
405
|
+
@logger.info(get_row_count.to_s) {"Don't know what to do with sheet " + name + "\n"}
|
|
406
|
+
next # Go to the next sheet.
|
|
407
|
+
end
|
|
408
|
+
# Output stats
|
|
409
|
+
seconds = (Time.now - start_time).to_i
|
|
410
|
+
@logger.debug('import') {' stopped ' + Time.now.to_s}
|
|
411
|
+
stats_array = []
|
|
412
|
+
@@stats.each { |key,value| stats_array.push(key.to_s + ': ' + value.to_s) unless value.nil? or value == 0}
|
|
413
|
+
puts
|
|
414
|
+
puts
|
|
415
|
+
@logger.info('import') {'Imported ' + stats_array.sort.join(', ')}
|
|
416
|
+
@logger.info('import') {' in ' + (seconds / 60).to_s + ' minutes ' + (seconds % 60).to_s + ' seconds.'}
|
|
417
|
+
puts
|
|
418
|
+
# End stats
|
|
419
|
+
end
|
|
420
|
+
|
|
421
|
+
def import_mixed_user(row)
|
|
422
|
+
@logger.info(get_row_count.to_s) {'Processing...'}
|
|
423
|
+
# Import Users (Make sure parents come first).
|
|
424
|
+
responses = {}
|
|
425
|
+
['parent_1_', 'parent_2_', 'participant_'].each {|prefix|
|
|
426
|
+
user = row.key_filter(prefix)
|
|
427
|
+
# Add in Parent email addresses if this is the participant.
|
|
428
|
+
user.merge!(row.reject {|key, value| !key.include?('email_address')}) if prefix == 'participant_'
|
|
429
|
+
description = prefix.split('_').join(' ').strip.capitalize
|
|
430
|
+
|
|
431
|
+
responses[prefix] = import_user(user, description) unless user.empty?
|
|
432
|
+
if !responses[prefix].respond_to?(:has_key?)
|
|
433
|
+
responses[prefix] = {}
|
|
434
|
+
end
|
|
435
|
+
}
|
|
436
|
+
|
|
437
|
+
if responses.has_key?('participant_') && !responses['participant_'].nil?
|
|
438
|
+
# Update participant with responses. We're done with parents.
|
|
439
|
+
row['participant_uuid'] = responses['participant_']['uuid'] if responses['participant_'].has_key?('uuid')
|
|
440
|
+
row['participant_email_address'] = responses['participant_']['mail'] if responses['participant_'].has_key?('mail')
|
|
441
|
+
|
|
442
|
+
# Find the max number of groups being imported
|
|
443
|
+
group_list = row.reject {|key, value| key.match('group_').nil?}
|
|
444
|
+
number_of_groups = 0
|
|
445
|
+
key_int_value = 0
|
|
446
|
+
group_list.each {|key, value|
|
|
447
|
+
key_parts = key.split('_')
|
|
448
|
+
key_parts.each {|part|
|
|
449
|
+
key_int_value = part.to_i
|
|
450
|
+
if (key_int_value > number_of_groups)
|
|
451
|
+
number_of_groups = key_int_value
|
|
452
|
+
end
|
|
453
|
+
}
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
# Create the list of group names to iterate through
|
|
457
|
+
group_names = []
|
|
458
|
+
for i in 1..number_of_groups
|
|
459
|
+
group_names.push('group_' + i.to_s + '_')
|
|
460
|
+
end
|
|
461
|
+
|
|
462
|
+
# Group Assignment + Participant
|
|
463
|
+
group_names.each {|prefix|
|
|
464
|
+
group = row.key_filter(prefix, 'group_')
|
|
465
|
+
user = row.key_filter('participant_')
|
|
466
|
+
responses[prefix] = import_user_group_role(user.merge(group)) unless group.empty?
|
|
467
|
+
}
|
|
468
|
+
end
|
|
469
|
+
end
|
|
470
|
+
|
|
471
|
+
def import_user(row, description = 'User')
|
|
472
|
+
more_params = {}
|
|
473
|
+
|
|
474
|
+
if row['birthdate'].nil?
|
|
475
|
+
@logger.error(get_row_count.to_s) {'No Birth Date Listed. Failed to import ' + description + '.'}
|
|
476
|
+
return {}
|
|
477
|
+
else
|
|
478
|
+
begin
|
|
479
|
+
birthdate = Date.parse(row['birthdate'])
|
|
480
|
+
rescue ArgumentError => err
|
|
481
|
+
@logger.error(get_row_count.to_s) {'Invalid Birth Date. Failed to import ' + description + '.'}
|
|
482
|
+
@logger.error(get_row_count.to_s) {err.message.to_s}
|
|
483
|
+
return {}
|
|
484
|
+
end
|
|
485
|
+
end
|
|
486
|
+
|
|
487
|
+
('1'..'2').each { |i|
|
|
488
|
+
key = 'parent_' + i + '_email_address'
|
|
489
|
+
if row.has_key?(key)
|
|
490
|
+
parent_uuid = nil
|
|
491
|
+
begin
|
|
492
|
+
parent_uuid = self.user_get_email(row[key])
|
|
493
|
+
rescue DuplicateUserExists => dup_e
|
|
494
|
+
@logger.error(get_row_count.to_s) {'Parent ' + i + ' ' + dup_e.message.to_s}
|
|
495
|
+
end
|
|
496
|
+
if parent_uuid.nil?
|
|
497
|
+
@logger.warn(get_row_count.to_s) {"Can't find account for Parent " + i + ": " + row[key]}
|
|
498
|
+
else
|
|
499
|
+
row['parent_' + i + '_uuid'] = parent_uuid
|
|
500
|
+
end
|
|
501
|
+
end
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
# If 13 or under, verify parent, request allplayers.net email if needed.
|
|
505
|
+
if birthdate.to_age < 14
|
|
506
|
+
# If 13 or under, no email & has parent, request allplayers.net email.
|
|
507
|
+
if !(row.has_key?('parent_1_uuid') || row.has_key?('parent_2_uuid'))
|
|
508
|
+
@logger.error(get_row_count.to_s) {'Missing parents for '+ description +' age 13 or less.'}
|
|
509
|
+
return {}
|
|
510
|
+
end
|
|
511
|
+
end
|
|
512
|
+
|
|
513
|
+
lock = nil
|
|
514
|
+
# Request allplayers.net email if needed.
|
|
515
|
+
if !row.has_key?('email_address')
|
|
516
|
+
# If 13 or under, no email & has parent, request allplayers.net email.
|
|
517
|
+
if row.has_key?('parent_1_uuid') || row.has_key?('parent_2_uuid')
|
|
518
|
+
# Request allplayers.net email
|
|
519
|
+
more_params['email_alternative'] = {:value => 1}
|
|
520
|
+
# TODO - Consider how to send welcome email to parent. (Queue allplayers.net emails in Drupal for cron playback)
|
|
521
|
+
# Create a lock for these parents
|
|
522
|
+
@@user_mutex.synchronize do
|
|
523
|
+
parent_uuids = []
|
|
524
|
+
parent_uuids.push(row['parent_1_uuid']['item'].first['uuid']) if row.has_key?('parent_1_uuid')
|
|
525
|
+
parent_uuids.push(row['parent_2_uuid']['item'].first['uuid']) if row.has_key?('parent_2_uuid')
|
|
526
|
+
parents_key = parent_uuids.sort.join('_')
|
|
527
|
+
# Haven't cached it, create a targeted Mutex for it.
|
|
528
|
+
@@email_mutexes[parents_key] = Mutex.new unless @@email_mutexes.has_key?(parents_key)
|
|
529
|
+
lock = @@email_mutexes[parents_key]
|
|
530
|
+
end
|
|
531
|
+
else
|
|
532
|
+
@logger.error(get_row_count.to_s) {'Missing parents for '+ description +' without email address.'}
|
|
533
|
+
return {}
|
|
534
|
+
end
|
|
535
|
+
else
|
|
536
|
+
# Check if user already
|
|
537
|
+
begin
|
|
538
|
+
uuid, lock = email_to_uuid(row['email_address'], :lock)
|
|
539
|
+
rescue DuplicateUserExists => dup_e
|
|
540
|
+
@logger.error(get_row_count.to_s) {description + ' ' + dup_e.message.to_s}
|
|
541
|
+
return {}
|
|
542
|
+
end
|
|
543
|
+
|
|
544
|
+
unless uuid.nil?
|
|
545
|
+
@logger.warn(get_row_count.to_s) {description + ' already exists: ' + row['email_address'] + ' at UUID: ' + uuid + '. Participant will still be added to groups.'}
|
|
546
|
+
self.verify_children(row, description, uuid)
|
|
547
|
+
return {'mail' => row['email_address'], 'uuid' => uuid}
|
|
548
|
+
else
|
|
549
|
+
if !row['email_address'].valid_email_address?
|
|
550
|
+
@logger.error(get_row_count.to_s) {description + ' has an invalid email address: ' + row['email_address'] + '. Skipping.'}
|
|
551
|
+
return {}
|
|
552
|
+
end
|
|
553
|
+
if !row['email_address'].active_email_domain?
|
|
554
|
+
@logger.error(get_row_count.to_s) {description + ' has an email address with an invalid or inactive domain: ' + row['email_address'] + '. Skipping.'}
|
|
555
|
+
return {}
|
|
556
|
+
end
|
|
557
|
+
end
|
|
558
|
+
end
|
|
559
|
+
|
|
560
|
+
# Check required fields
|
|
561
|
+
missing_fields = ['first_name', 'last_name', 'gender', 'birthdate'].reject {
|
|
562
|
+
|field| row.has_key?(field) && !row[field].nil? && !row[field].empty?
|
|
563
|
+
}
|
|
564
|
+
if !missing_fields.empty?
|
|
565
|
+
@logger.error(get_row_count.to_s) {'Missing required fields for '+ description +': ' + missing_fields.join(', ')}
|
|
566
|
+
return {}
|
|
567
|
+
end
|
|
568
|
+
|
|
569
|
+
@logger.info(get_row_count.to_s) {'Importing ' + description +': ' + row['first_name'] + ' ' + row['last_name']}
|
|
570
|
+
|
|
571
|
+
response = {}
|
|
572
|
+
|
|
573
|
+
# Lock down this email address.
|
|
574
|
+
lock.synchronize {
|
|
575
|
+
# Last minute checks.
|
|
576
|
+
if !row['email_address'].nil? && @@uuid_map.has_key?(row['email_address'])
|
|
577
|
+
@logger.warn(get_row_count.to_s) {description + ' already exists: ' + row['email_address'] + ' at UUID: ' + @@uuid_map[row['email_address']] + '. Participant will still be added to groups.'}
|
|
578
|
+
return {'mail' => row['email_address'], 'uuid' => @@uuid_map[row['email_address']] }
|
|
579
|
+
end
|
|
580
|
+
|
|
581
|
+
# Avoid creating duplicate children.
|
|
582
|
+
existing_child = self.verify_children(row, description)
|
|
583
|
+
return existing_child unless existing_child.nil?
|
|
584
|
+
if row.has_key?('email_address') && row.has_key?('parent_1_uuid')
|
|
585
|
+
more_params['email'] = row['email_address']
|
|
586
|
+
end
|
|
587
|
+
if row.has_key?('parent_1_uuid')
|
|
588
|
+
response = self.user_create_child(
|
|
589
|
+
row['parent_1_uuid']['item'].first['uuid'],
|
|
590
|
+
row['first_name'],
|
|
591
|
+
row['last_name'],
|
|
592
|
+
birthdate,
|
|
593
|
+
row['gender'],
|
|
594
|
+
more_params
|
|
595
|
+
)
|
|
596
|
+
else
|
|
597
|
+
response = self.user_create(
|
|
598
|
+
row['email_address'],
|
|
599
|
+
row['first_name'],
|
|
600
|
+
row['last_name'],
|
|
601
|
+
row['gender'],
|
|
602
|
+
birthdate,
|
|
603
|
+
more_params
|
|
604
|
+
)
|
|
605
|
+
end
|
|
606
|
+
|
|
607
|
+
if !response.nil? && response.has_key?('uuid')
|
|
608
|
+
# Cache the new users UID while we have the lock.
|
|
609
|
+
@@user_mutex.synchronize { @@uuid_map[response['email']] = response['uuid'] }
|
|
610
|
+
end
|
|
611
|
+
}
|
|
612
|
+
|
|
613
|
+
if !response.nil? && response.has_key?('uuid')
|
|
614
|
+
increment_stat('Users')
|
|
615
|
+
increment_stat(description + 's') if description != 'User'
|
|
616
|
+
|
|
617
|
+
# Don't add parent 1, already added with user_create_child.
|
|
618
|
+
response['parenting_2_response'] = self.user_create_child(row['parent_2_uuid']['item'].first['uuid'], '', '', '', '', {:child_uuid => response['uuid']}) if row.has_key?('parent_2_uuid')
|
|
619
|
+
end
|
|
620
|
+
|
|
621
|
+
return response
|
|
622
|
+
rescue RestClient::Exception => e
|
|
623
|
+
@logger.error(get_row_count.to_s) {'Failed to import ' + description + ': ' + e.message}
|
|
624
|
+
end
|
|
625
|
+
|
|
626
|
+
def import_group(row)
|
|
627
|
+
@groups_map = get_group_names_from_file unless defined? @groups_map
|
|
628
|
+
@group_rows = get_group_rows_from_file unless defined? @group_rows
|
|
629
|
+
# Checking name duplication, if duplicate add identifier by type division, league, etc..
|
|
630
|
+
if @group_rows.has_key?(get_row_count.to_s)
|
|
631
|
+
row['uuid'] = @group_rows[get_row_count.to_s]
|
|
632
|
+
end
|
|
633
|
+
begin
|
|
634
|
+
if row['delete']
|
|
635
|
+
begin
|
|
636
|
+
# Make sure registration settings are turned off by making group inactive.
|
|
637
|
+
self.group_update(row['uuid'], {'active' => 0})
|
|
638
|
+
self.group_delete(row['uuid'])
|
|
639
|
+
rescue RestClient::Exception => e
|
|
640
|
+
puts 'There was a problem deleting group:' + row['uuid']
|
|
641
|
+
@logger.info(get_row_count.to_s) {'There was a problem deleting group:' + row['uuid'] + ' ' + e.message}
|
|
642
|
+
else
|
|
643
|
+
@logger.info(get_row_count.to_s) {'Deleting group:' + row['uuid']}
|
|
644
|
+
puts 'Deleting group:' + row['uuid']
|
|
645
|
+
end
|
|
646
|
+
return
|
|
647
|
+
end
|
|
648
|
+
if row.has_key?('group_clone') && row.has_key?('uuid') && !row['group_clone'].empty? && !row['uuid'].empty?
|
|
649
|
+
begin
|
|
650
|
+
self.group_get(row['uuid'])
|
|
651
|
+
self.group_get(row['group_clone'])
|
|
652
|
+
rescue RestClient::Exception => e
|
|
653
|
+
puts 'The group you are trying to clone from can not be found, moving on to creating the group.'
|
|
654
|
+
else
|
|
655
|
+
@logger.info(get_row_count.to_s) {'Cloning settings from group: ' + row['group_clone']}
|
|
656
|
+
self.group_clone(row['uuid'], row['group_clone'])
|
|
657
|
+
return
|
|
658
|
+
end
|
|
659
|
+
elsif row.has_key?('uuid')
|
|
660
|
+
puts 'Group already imported.'
|
|
661
|
+
@logger.info(get_row_count.to_s) {'Group already imported.'}
|
|
662
|
+
return
|
|
663
|
+
end
|
|
664
|
+
if row['owner_uuid']
|
|
665
|
+
begin
|
|
666
|
+
owner = self.user_get(row['owner_uuid'])
|
|
667
|
+
raise if !owner.has_key?('uuid')
|
|
668
|
+
rescue
|
|
669
|
+
puts "Couldn't get group owner from UUID: " + row['owner_uuid'].to_s
|
|
670
|
+
return {}
|
|
671
|
+
end
|
|
672
|
+
else
|
|
673
|
+
puts 'Group import requires group owner'
|
|
674
|
+
return {}
|
|
675
|
+
end
|
|
676
|
+
location = row.key_filter('address_')
|
|
677
|
+
if location['zip'].nil?
|
|
678
|
+
@logger.error(get_row_count.to_s) {'Location ZIP required for group import.'}
|
|
679
|
+
return {}
|
|
680
|
+
end
|
|
681
|
+
|
|
682
|
+
categories = row['group_categories'].split(',') unless row['group_categories'].nil?
|
|
683
|
+
if categories.nil?
|
|
684
|
+
@logger.error(get_row_count.to_s) {'Group Type required for group import.'}
|
|
685
|
+
return {}
|
|
686
|
+
end
|
|
687
|
+
more_params = {}
|
|
688
|
+
more_params['group_type'] = row['group_type'] unless row['group_type'].nil?
|
|
689
|
+
|
|
690
|
+
# Checking name duplication, if duplicate add identifier by type division, league, etc..
|
|
691
|
+
# Only one level deep.
|
|
692
|
+
if @groups_map.has_key?(row['group_name'])
|
|
693
|
+
if row['group_type'] == 'Club'
|
|
694
|
+
if @groups_map.has_key?(row['group_name'] + ' Club')
|
|
695
|
+
row['group_name'] = row['group_name'] + ' Club 1'
|
|
696
|
+
else
|
|
697
|
+
row['group_name'] = row['group_name'] + ' Club'
|
|
698
|
+
end
|
|
699
|
+
elsif row['group_type'] == 'Team'
|
|
700
|
+
if @groups_map.has_key?(row['group_name'] + ' Team')
|
|
701
|
+
if @groups_map.has_key(row['group_name'] + ' 1')
|
|
702
|
+
row['group_above'] = row['group_name'] + ' Club 1'
|
|
703
|
+
row['group_name'] = row['group_name'] + ' 2'
|
|
704
|
+
else
|
|
705
|
+
row['group_above'] = row['group_name'] + ' Club'
|
|
706
|
+
row['group_name'] = row['group_name'] + ' 1'
|
|
707
|
+
end
|
|
708
|
+
else
|
|
709
|
+
row['group_name'] = row['group_name'] + ' Team'
|
|
710
|
+
end
|
|
711
|
+
end
|
|
712
|
+
end
|
|
713
|
+
|
|
714
|
+
if row.has_key?('group_uuid') && !row['group_uuid'].empty?
|
|
715
|
+
more_params['groups_above'] = {'0' => row['group_uuid']}
|
|
716
|
+
elsif row.has_key?('group_above') && !row['group_above'].empty?
|
|
717
|
+
if @groups_map.has_key?(row['group_above'])
|
|
718
|
+
@logger.info(get_row_count.to_s) {'Found group above: ' + row['group_above'] + ' at UUID ' + @groups_map[row['group_above']]}
|
|
719
|
+
more_params['groups_above'] = {@groups_map[row['group_above']] => @groups_map[row['group_above']]}
|
|
720
|
+
else
|
|
721
|
+
response = self.group_search({:title => row['group_above']})
|
|
722
|
+
if response.kind_of?(Array)
|
|
723
|
+
response.each { |group|
|
|
724
|
+
if group['title'] == row['group_above']
|
|
725
|
+
row['group_name'] = row['group_name'] + ' ' + row['group_type'] if group['title'] == row['group_name']
|
|
726
|
+
more_params['groups_above'] = {group['uuid'] => group['uuid']}
|
|
727
|
+
end
|
|
728
|
+
}
|
|
729
|
+
if more_params['groups_above'].nil?
|
|
730
|
+
puts 'Row ' + get_row_count.to_s + "Couldn't find group above: " + row['group_above']
|
|
731
|
+
@logger.error(get_row_count.to_s) {"Couldn't find group above: " + row['group_above']}
|
|
732
|
+
return row['group_name']
|
|
733
|
+
end
|
|
734
|
+
else
|
|
735
|
+
puts 'Row ' + get_row_count.to_s + "Couldn't find group above: " + row['group_above']
|
|
736
|
+
@logger.error(get_row_count.to_s) {"Couldn't find group above: " + row['group_above']}
|
|
737
|
+
return row['group_name']
|
|
738
|
+
end
|
|
739
|
+
end
|
|
740
|
+
end
|
|
741
|
+
|
|
742
|
+
@logger.info(get_row_count.to_s) {'Importing group: ' + row['group_name']}
|
|
743
|
+
response = self.group_create(
|
|
744
|
+
row['group_name'], # Title
|
|
745
|
+
row['group_description'], # Description field
|
|
746
|
+
location,
|
|
747
|
+
categories.last,
|
|
748
|
+
more_params
|
|
749
|
+
)
|
|
750
|
+
@logger.info(get_row_count.to_s) {'Group UUID: ' + response['uuid']}
|
|
751
|
+
rescue RestClient::Exception => e
|
|
752
|
+
@logger.error(get_row_count.to_s) {'Failed to import group: ' + e.message}
|
|
753
|
+
else
|
|
754
|
+
if (response && response.has_key?('uuid'))
|
|
755
|
+
increment_stat('Groups')
|
|
756
|
+
# Writing data into a csv file
|
|
757
|
+
@groups_map[row['group_name']] = response['uuid']
|
|
758
|
+
FasterCSV.open("imported_groups.csv", "a") do |csv|
|
|
759
|
+
csv << [get_row_count, row['group_name'],response['uuid']]
|
|
760
|
+
end
|
|
761
|
+
if row.has_key?('group_clone') && !row['group_clone'].empty?
|
|
762
|
+
@logger.info(get_row_count.to_s) {'Cloning settings from group: ' + row['group_clone']}
|
|
763
|
+
response = self.group_clone(
|
|
764
|
+
response['uuid'],
|
|
765
|
+
row['group_clone'],
|
|
766
|
+
nil
|
|
767
|
+
)
|
|
768
|
+
end
|
|
769
|
+
end
|
|
770
|
+
end
|
|
771
|
+
end
|
|
772
|
+
|
|
773
|
+
def import_user_group_role(row)
|
|
774
|
+
# Check User.
|
|
775
|
+
if row.has_key?('uuid')
|
|
776
|
+
user = self.user_get(row['uuid'])
|
|
777
|
+
elsif row.has_key?('email_address') && !row['email_address'].respond_to?(:to_s)
|
|
778
|
+
begin
|
|
779
|
+
user = self.user_get_email(row['email_address'])
|
|
780
|
+
rescue
|
|
781
|
+
@logger.error(get_row_count.to_s) {"User " + row['email_address'] + " doesn't exist to add to group"}
|
|
782
|
+
return
|
|
783
|
+
end
|
|
784
|
+
else
|
|
785
|
+
@logger.error(get_row_count.to_s) {"User can't be added to group without email address."}
|
|
786
|
+
return
|
|
787
|
+
end
|
|
788
|
+
|
|
789
|
+
# Check Group
|
|
790
|
+
if row.has_key?('group_uuid')
|
|
791
|
+
group_uuid = row['group_uuid']
|
|
792
|
+
else
|
|
793
|
+
@logger.error(get_row_count.to_s) {'User ' + row['email_address'] + " can't be added to group without group uuid."}
|
|
794
|
+
return
|
|
795
|
+
end
|
|
796
|
+
|
|
797
|
+
response = {}
|
|
798
|
+
# Join user to group.
|
|
799
|
+
begin
|
|
800
|
+
if row.has_key?('group_role')
|
|
801
|
+
# Break up any comma separated list of roles into individual roles
|
|
802
|
+
group_roles = row['group_role'].split(',')
|
|
803
|
+
options = {}
|
|
804
|
+
if row.has_key?('group_fee')
|
|
805
|
+
options = case row['group_fee']
|
|
806
|
+
when 'full' then {:should_pay => 1, :payment_method => :full}
|
|
807
|
+
when 'plan' then {:should_pay => 1, :payment_method => :plan}
|
|
808
|
+
else {}
|
|
809
|
+
end
|
|
810
|
+
end
|
|
811
|
+
group_roles.each {|group_role|
|
|
812
|
+
# Remove whitespace
|
|
813
|
+
group_role = group_role.strip
|
|
814
|
+
if row.has_key?('group_webform_id')
|
|
815
|
+
webform_ids = row['group_webform_id'].split(',')
|
|
816
|
+
response = self.user_join_group(group_uuid, user['uuid'], group_role, options, webform_ids)
|
|
817
|
+
else
|
|
818
|
+
response = self.user_join_group(group_uuid, user['uuid'], group_role, options)
|
|
819
|
+
end
|
|
820
|
+
}
|
|
821
|
+
else
|
|
822
|
+
response = self.user_join_group(group_uuid, user['uuid'])
|
|
823
|
+
end
|
|
824
|
+
rescue RestClient::Exception => e
|
|
825
|
+
@logger.error(get_row_count.to_s) {'User ' + user['uuid'] + " failed to join group " + group_uuid.to_s + ': ' + e.message}
|
|
826
|
+
else
|
|
827
|
+
if row.has_key?('group_role')
|
|
828
|
+
@logger.info(get_row_count.to_s) {'User ' + user['uuid'] + " joined group " + group_uuid.to_s + ' with role(s) ' + row['group_role']}
|
|
829
|
+
else
|
|
830
|
+
@logger.info(get_row_count.to_s) {'User ' + user['uuid'] + " joined group " + group_uuid.to_s}
|
|
831
|
+
end
|
|
832
|
+
end
|
|
833
|
+
|
|
834
|
+
#log stuff!!
|
|
835
|
+
|
|
836
|
+
response
|
|
837
|
+
end
|
|
838
|
+
|
|
839
|
+
end
|
metadata
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: allplayers_imports
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
hash: 27
|
|
5
|
+
prerelease:
|
|
6
|
+
segments:
|
|
7
|
+
- 0
|
|
8
|
+
- 1
|
|
9
|
+
- 0
|
|
10
|
+
version: 0.1.0
|
|
11
|
+
platform: ruby
|
|
12
|
+
authors:
|
|
13
|
+
- AllPlayers.com
|
|
14
|
+
autorequire:
|
|
15
|
+
bindir: bin
|
|
16
|
+
cert_chain: []
|
|
17
|
+
|
|
18
|
+
date: 2013-01-16 00:00:00 Z
|
|
19
|
+
dependencies:
|
|
20
|
+
- !ruby/object:Gem::Dependency
|
|
21
|
+
name: ci_reporter
|
|
22
|
+
prerelease: false
|
|
23
|
+
requirement: &id001 !ruby/object:Gem::Requirement
|
|
24
|
+
none: false
|
|
25
|
+
requirements:
|
|
26
|
+
- - ~>
|
|
27
|
+
- !ruby/object:Gem::Version
|
|
28
|
+
hash: 11
|
|
29
|
+
segments:
|
|
30
|
+
- 1
|
|
31
|
+
- 7
|
|
32
|
+
- 0
|
|
33
|
+
version: 1.7.0
|
|
34
|
+
type: :runtime
|
|
35
|
+
version_requirements: *id001
|
|
36
|
+
- !ruby/object:Gem::Dependency
|
|
37
|
+
name: fastercsv
|
|
38
|
+
prerelease: false
|
|
39
|
+
requirement: &id002 !ruby/object:Gem::Requirement
|
|
40
|
+
none: false
|
|
41
|
+
requirements:
|
|
42
|
+
- - ~>
|
|
43
|
+
- !ruby/object:Gem::Version
|
|
44
|
+
hash: 5
|
|
45
|
+
segments:
|
|
46
|
+
- 1
|
|
47
|
+
- 5
|
|
48
|
+
- 3
|
|
49
|
+
version: 1.5.3
|
|
50
|
+
type: :runtime
|
|
51
|
+
version_requirements: *id002
|
|
52
|
+
- !ruby/object:Gem::Dependency
|
|
53
|
+
name: highline
|
|
54
|
+
prerelease: false
|
|
55
|
+
requirement: &id003 !ruby/object:Gem::Requirement
|
|
56
|
+
none: false
|
|
57
|
+
requirements:
|
|
58
|
+
- - ~>
|
|
59
|
+
- !ruby/object:Gem::Version
|
|
60
|
+
hash: 25
|
|
61
|
+
segments:
|
|
62
|
+
- 1
|
|
63
|
+
- 6
|
|
64
|
+
- 11
|
|
65
|
+
version: 1.6.11
|
|
66
|
+
type: :runtime
|
|
67
|
+
version_requirements: *id003
|
|
68
|
+
- !ruby/object:Gem::Dependency
|
|
69
|
+
name: allplayers
|
|
70
|
+
prerelease: false
|
|
71
|
+
requirement: &id004 !ruby/object:Gem::Requirement
|
|
72
|
+
none: false
|
|
73
|
+
requirements:
|
|
74
|
+
- - ~>
|
|
75
|
+
- !ruby/object:Gem::Version
|
|
76
|
+
hash: 27
|
|
77
|
+
segments:
|
|
78
|
+
- 0
|
|
79
|
+
- 1
|
|
80
|
+
- 0
|
|
81
|
+
version: 0.1.0
|
|
82
|
+
type: :runtime
|
|
83
|
+
version_requirements: *id004
|
|
84
|
+
description: A Ruby tool to handle import spreadsheets into AllPlayers API.
|
|
85
|
+
email:
|
|
86
|
+
- support@allplayers.com
|
|
87
|
+
executables: []
|
|
88
|
+
|
|
89
|
+
extensions: []
|
|
90
|
+
|
|
91
|
+
extra_rdoc_files: []
|
|
92
|
+
|
|
93
|
+
files:
|
|
94
|
+
- README.md
|
|
95
|
+
- allplayers_imports.gemspec
|
|
96
|
+
- lib/allplayers_imports.rb
|
|
97
|
+
homepage: http://www.allplayers.com/
|
|
98
|
+
licenses:
|
|
99
|
+
- MIT
|
|
100
|
+
post_install_message:
|
|
101
|
+
rdoc_options: []
|
|
102
|
+
|
|
103
|
+
require_paths:
|
|
104
|
+
- lib
|
|
105
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
106
|
+
none: false
|
|
107
|
+
requirements:
|
|
108
|
+
- - ">="
|
|
109
|
+
- !ruby/object:Gem::Version
|
|
110
|
+
hash: 3
|
|
111
|
+
segments:
|
|
112
|
+
- 0
|
|
113
|
+
version: "0"
|
|
114
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
115
|
+
none: false
|
|
116
|
+
requirements:
|
|
117
|
+
- - ">="
|
|
118
|
+
- !ruby/object:Gem::Version
|
|
119
|
+
hash: 23
|
|
120
|
+
segments:
|
|
121
|
+
- 1
|
|
122
|
+
- 3
|
|
123
|
+
- 6
|
|
124
|
+
version: 1.3.6
|
|
125
|
+
requirements: []
|
|
126
|
+
|
|
127
|
+
rubyforge_project:
|
|
128
|
+
rubygems_version: 1.8.24
|
|
129
|
+
signing_key:
|
|
130
|
+
specification_version: 3
|
|
131
|
+
summary: A Ruby tool to handle import spreadsheets into AllPlayers API.
|
|
132
|
+
test_files: []
|
|
133
|
+
|