table_importer 0.1.1 → 0.2.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: c222799798d8ddd74dd9122e21210fdf195eb454
4
- data.tar.gz: 742761c4969e4515560024965c34df055b792f8c
3
+ metadata.gz: 4235b6be7a6e1a540674326e18fd5ff08734672e
4
+ data.tar.gz: f0085296dac6201c7d47fe4120fec5eb70120154
5
5
  SHA512:
6
- metadata.gz: 0649b415d943e1967ff3618794bfc3290dd56fc50b795319e988b4ebbc51308c86a4451d92215fe1ba50b4761d236941fa776b2688c4852c1d9be3f7b4bdb845
7
- data.tar.gz: 02610c56bba2bde7681fcf22556ab3c46714f115c89bb25f7de3b27d3de982d87e5f1c5de3e31a630c976d3b08f3552935ba0151ff493f46c771589ee5e8b579
6
+ metadata.gz: fd616c41592477e07e9ab682c6f4b4e40c264317a8169aa5f01cb0909bfe6516363957f73bb92153ebcadce4084138c1824c6a55ed0c42d9e7890d35eba50023
7
+ data.tar.gz: cdc2f3dc97445c32229fe413a196ed109d887be20e32ac28814a9a5aa984d697c4ecdfca096df2ff6af784e94186b9c28e384712a7410c7a1a718e232898a730
data/README.md CHANGED
@@ -4,7 +4,7 @@
4
4
  Table Importer
5
5
  ==============
6
6
 
7
- Given a file (or a string) containing a container, along with options, it will return a hash of those values. Great for importing poorly formatted CSV files.
7
+ Given a file (or a string) containing a container, along with options, it will return a hash of those values. Great for importing poorly formatted CSV files. It can handle CSV, Excel (xls and xlsx), Google Drive Spreadsheet, and a copy and pasted string.
8
8
 
9
9
  Only works for ruby versions >= 1.9.3.
10
10
 
@@ -19,3 +19,104 @@ Then, you'll need to install bundler and the gem dependencies:
19
19
  You should now be able to run the local tests:
20
20
 
21
21
  `bundle exec rake`
22
+
23
+ Interact with table_importer by creating a TableImporter instance, and then calling methods on that instance.
24
+
25
+ `importer = TableImporter::Source.new({options})`
26
+
27
+ The options you pass in are:
28
+
29
+ ```
30
+ # The type of the spreadsheet/input you want to import
31
+ :type => "google" # Google Drive spreadsheet
32
+ => "csv" # CSV file
33
+ => "xls" # Excel spreadsheet
34
+ => "copy_and_paste" # Copy and pasted input
35
+
36
+ # The content to input. Either a file, a string, or google oauth keys.
37
+ :content => File.open("path/to/file") # for types csv, xls
38
+ => "Name, Email, Phone Number
39
+ Nick, nick@example.com, 6412345678" # For type copy_and_paste
40
+ => "google_access_token, spreadsheet_id" # For type google
41
+
42
+ # Whether the first row of input contains column headers
43
+ :headers_present => true # First row of input is headers
44
+ => false # First row of input is not headers
45
+
46
+ # Optionally you can provide mapping for the columns. (This can be incomplete).
47
+ :user_headers => {
48
+ "email"=>"0",
49
+ "organization"=>"4",
50
+ "url"=>"9"
51
+ }
52
+ # Used to separate columns. Pass in 'nil' if using Google Spreadsheet, Excel or you don't know.
53
+ :column_separator => :comma # ','
54
+ => :space # ' '
55
+ => :tab # '\t'
56
+ => :semicolon # ';'
57
+
58
+ # Used to separate rows. Pass in 'nil' if using Google Spreadsheet, Excel or you don't know.
59
+ :record_separator => :newline_mac # '\n'
60
+ => :newline_windows # '\r\n'
61
+ => :old_newline_mac # '\r' (from OSX 9 days)
62
+
63
+ # A hash of compulsory headers. At the moment only "email" is supported.
64
+ :compulsory_headers => {
65
+ :email => true, false # Does each record require an email address to be valid?
66
+ }
67
+
68
+ ```
69
+
70
+ There are a few ways to interact with the table importer:
71
+
72
+ ```
73
+ importer = TableImporter::Source.new({options})
74
+
75
+ # get the type
76
+ puts importer.get_type
77
+ => "csv"
78
+
79
+ # get the column separator
80
+ puts importer.get_column_separator
81
+ => "semicolon"
82
+
83
+ # get the row separator
84
+ puts importer.get_record_separator
85
+ => "newline_mac"
86
+
87
+ # Get the headers (either the first row if headers are provided, or else default headers
88
+ puts importer.get_headers
89
+ => "column_1, column_2, column_3"
90
+
91
+ # Get the first 8 lines (useful for providing a matching option for the user to map their own headers, like mailchimps contact import.
92
+ puts importer.get_preview_lines
93
+ => [{:column_1 => "r1c1", :column_2 => "r1c2", :column_3 => "r1c3"}, {:column_1 => "r2c1", :column_2 => "r2c2", :column_3 => "r2c3"} etc]
94
+
95
+ # Get input chunked in an input size (size defaults to 50)
96
+ puts importer.get_chunks
97
+ => All input chunked into 50 line blocks.
98
+
99
+ puts importer.get_chunks(25)
100
+ => All input chunked into 25 line blocks.
101
+
102
+ # The format for the returned chunks is not a simple array of hashes, like get_preview_lines
103
+ puts importer.get_chunks(2)
104
+ => [{:lines => [{:column_1 => "r1c1", :column_2 => "r1c2", :column_3 => "r1c3"}, {:column_1 => "r2c1", :column_2 => "r2c2", :column_3 => "r2c3"}], :errors => []}, {:lines => [{:column_1 => "r3c1", :column_2 => "r3c2", :column_3 => "r3c3"}, {:column_1 => "r4c1", :column_2 => "r4c2", :column_3 => "r4c3"}], :errors => []}]
105
+
106
+ # The errors hash is for lines that don't contain the compulsory headers, are blank/empty, or the entire line contains no alphanumeric characters.
107
+
108
+ # Gets lines of input returned in an array of hashes (doesn't work for CSV yet)
109
+ # Pass in start and end points
110
+ puts importer.get_lines(0, 1)
111
+ => [{:column_1 => "r1c1", :column_2 => "r1c2", :column_3 => "r1c3"}]
112
+
113
+ # Or let it default to getting all lines
114
+ puts importer.get_lines
115
+ => All of the lines
116
+
117
+ puts importer.get_lines(5, 25)
118
+ => Line 5 up to line 25
119
+
120
+ puts importer.get_lines(5, -1)
121
+ => Line 5 to the end of the input.
122
+
@@ -5,7 +5,8 @@ module TableImporter
5
5
  def initialize(data)
6
6
  @data = assign_data(data[:content])
7
7
  @column_separator, @record_separator = assign_separators(data[:column_separator], data[:record_separator])
8
- @headers, @headers_present = assign_headers(data[:headers], data[:headers_present])
8
+ @headers, @headers_present = assign_headers(data[:headers_present])
9
+ @mapping = data[:user_headers]
9
10
  @compulsory_headers = data[:compulsory_headers]
10
11
  @delete_empty_columns = @data.length < 50000
11
12
  end
@@ -39,8 +40,8 @@ module TableImporter
39
40
  return col_sep, rec_sep
40
41
  end
41
42
 
42
- def assign_headers(headers, headers_present)
43
- headers = headers_present ? get_first_line : get_headers if headers.blank?
43
+ def assign_headers(headers_present)
44
+ headers = headers_present ? get_first_line : get_headers
44
45
  return headers, headers_present
45
46
  end
46
47
 
@@ -90,7 +91,7 @@ module TableImporter
90
91
  end
91
92
 
92
93
  def get_chunks(chunk_size)
93
- @headers = convert_headers(get_first_line, @headers, @headers_present)
94
+ @headers = convert_headers(get_first_line, @mapping.present? ? @mapping : @headers, @headers_present)
94
95
  lines = get_lines(0, -1).in_groups_of(chunk_size, false)
95
96
  clean_chunks(lines, @compulsory_headers)
96
97
  end
@@ -6,7 +6,6 @@ module TableImporter
6
6
 
7
7
  def initialize(data)
8
8
  @headers_present = data[:headers_present] # user has indicated headers are provided
9
- @headers = data[:headers]
10
9
  @column_separator, @record_separator = initialize_separators(data[:column_separator], data[:record_separator])
11
10
  @compulsory_headers = data[:compulsory_headers]
12
11
  @file = data[:content]
@@ -18,9 +17,10 @@ module TableImporter
18
17
  end
19
18
  get_column_separator(first_line)
20
19
  raise TableImporter::EmptyFileImportError.new unless file_has_content
21
- @headers = @headers_present ? first_line.split(@column_separator) : default_headers(100) if @headers.blank?
20
+ @headers = @headers_present ? first_line.split(@column_separator) : default_headers(100)
22
21
  rescue ArgumentError
23
22
  @file = clean_file(@file)
23
+ @column_separator = get_column_separator
24
24
  retry
25
25
  end
26
26
  end
@@ -35,9 +35,9 @@ module TableImporter
35
35
  begin
36
36
  SmarterCSV.process(@file.path, default_options({:col_sep => @column_separator.present? ? @column_separator : "\n", :row_sep => @record_separator != nil ? @record_separator : "\n", :chunk_size => 2})) do |chunk|
37
37
  if @headers_present
38
- return chunk.first.keys[0].to_s
38
+ return line_count(chunk.first.keys)
39
39
  else
40
- return chunk.first.values[0].to_s
40
+ return line_count(chunk.first.values)
41
41
  end
42
42
  end
43
43
  rescue EOFError
@@ -45,6 +45,10 @@ module TableImporter
45
45
  end
46
46
  end
47
47
 
48
+ def line_count(vals)
49
+ vals.count == 1 ? vals[0].to_s : vals.join(@column_separator)
50
+ end
51
+
48
52
  def file_has_content
49
53
  begin
50
54
  lines = get_preview_lines
@@ -104,6 +108,7 @@ module TableImporter
104
108
  clean_chunks(chunks, @compulsory_headers, @delete_empty_columns)
105
109
  rescue ArgumentError
106
110
  @file = clean_file(@file)
111
+ @column_separator = get_column_separator
107
112
  retry
108
113
  end
109
114
  end
@@ -146,7 +151,7 @@ module TableImporter
146
151
  def clean_file(file)
147
152
  contents = file.read
148
153
  import = Tempfile.new(["import", ".xls"], :encoding => "UTF-8")
149
- utf8_content = contents.force_encoding('UTF-8').encode('UTF-16', :invalid => :replace, :replace => '?').encode('UTF-8').gsub!(/\r\n|\r/, "\n").squeeze("\n")
154
+ utf8_content = contents.force_encoding('UTF-8').encode('UTF-16', :invalid => :replace, :replace => '?').encode('UTF-8').gsub(/\r\n|\r/, "\n").squeeze("\n")
150
155
  clean_contents = utf8_content[0] == "\n" ? utf8_content[1..-1] : utf8_content
151
156
  import.write(clean_contents)
152
157
  import.close
@@ -5,29 +5,24 @@ module TableImporter
5
5
  def initialize(data)
6
6
  begin
7
7
  @type = File.extname(data[:content]) == ".xls" ? "xls" : "xlsx"
8
- @file_path = data[:content].path
9
8
  @headers_present = data[:headers_present]
10
- @file = get_file
9
+ @file = get_file(data[:content].path)
11
10
  @compulsory_headers = data[:compulsory_headers]
12
- @delete_empty_columns = (File.size(@file_path) < 100000)
13
- @mapping = !data[:user_headers].blank? ? data[:user_headers] : data[:headers]
11
+ @delete_empty_columns = (File.size(data[:content].path) < 100000)
12
+ @mapping = data[:user_headers]
14
13
  raise TableImporter::EmptyFileImportError.new if !@file.first_row
15
- if !data[:headers].nil?
16
- @headers = data[:headers]
17
- else
18
- @headers = @headers_present ? @file.row(1).map.with_index { |header, index| header.present? ? header.to_sym : "column_#{index}"} : default_headers
19
- end
14
+ @headers = @headers_present ? @file.row(1).map.with_index { |header, index| header.present? ? header.to_sym : "column_#{index}"} : default_headers
20
15
  rescue NoMethodError
21
16
  raise TableImporter::HeaderMismatchError.new
22
17
  end
23
18
  end
24
19
 
25
- def get_file
20
+ def get_file(path)
26
21
  begin
27
22
  if @type == "xls"
28
- Roo::Excel.new(@file_path).sheet(0)
23
+ Roo::Excel.new(path).sheet(0)
29
24
  elsif @type == "xlsx"
30
- Roo::Excelx.new(@file_path).sheet(0)
25
+ Roo::Excelx.new(path).sheet(0)
31
26
  end
32
27
  rescue TypeError
33
28
  raise TableImporter::IncorrectFileError.new
@@ -8,13 +8,9 @@ module TableImporter
8
8
  @file = get_file(data[:content].split(", ")[1], data[:content].split(", ")[0])
9
9
  @compulsory_headers = data[:compulsory_headers]
10
10
  @delete_empty_columns = false
11
- @mapping = !data[:user_headers].blank? ? data[:user_headers] : data[:headers]
11
+ @mapping = data[:user_headers] if data[:user_headers].present?
12
12
  raise TableImporter::EmptyFileImportError.new if !@file.first_row
13
- if !data[:headers].nil?
14
- @headers = data[:headers]
15
- else
16
- @headers = @headers_present ? @file.row(1).map.with_index { |header, index| header.present? ? header.to_sym : "column_#{index}"} : default_headers
17
- end
13
+ @headers = @headers_present ? @file.row(1).map.with_index { |header, index| header.present? ? header.to_sym : "column_#{index}"} : default_headers
18
14
  rescue NoMethodError
19
15
  raise TableImporter::HeaderMismatchError.new
20
16
  end
@@ -25,7 +25,7 @@ module TableImporter
25
25
  finish = [@last_row, start + number_of_lines].min
26
26
  mapped_lines = []
27
27
  (start...finish).each do |row_number|
28
- mapped_lines << Hash[@headers.zip(@file.row(row_number))]
28
+ mapped_lines << Hash[@headers.zip(@file.row(row_number + 1))]
29
29
  end
30
30
  mapped_lines
31
31
  end
@@ -1,3 +1,3 @@
1
1
  module TableImporter
2
- VERSION = "0.1.1"
2
+ VERSION = "0.2.1"
3
3
  end
@@ -0,0 +1,229 @@
1
+ internet@example.com
2
+ radio@example.com
3
+ redactie@example.com
4
+ mvdlaan@example.com
5
+ redactie@example.com
6
+ nieuws@example.com
7
+ info@example.com
8
+ heleen@example.com
9
+ gezondheid@example.com
10
+ mensennatuur@example.com
11
+ wilma@example.com
12
+ editor@example.com
13
+ info@example.com
14
+ m.aandebrugh@example.com
15
+ daphne.van.paassen@example.com
16
+ redactienosop3@example.com
17
+ redactie@example.com
18
+ redactie@example.com
19
+ l.nieber@example.com
20
+ t.vansoest@example.com
21
+ wvhengel@example.com
22
+ hdboer@example.com
23
+ editienl@example.com
24
+ ad@example.com
25
+ radio@example.com
26
+ ct@example.com
27
+ e.kreulen@example.com
28
+ kunststof@example.com
29
+ redactie@example.com
30
+ webred@example.com
31
+ pers@example.com
32
+ nieuwsredactie@example.com
33
+ rsteenhorst@example.com
34
+ wester@example.com
35
+ foto@example.com
36
+ webredactie@example.com
37
+ c.paulussen@example.com
38
+ bnrredactie@example.com
39
+ karlijnmarchildon@example.com
40
+ leon@example.com
41
+ a.gelder@example.com
42
+ barbara.van.gool@example.com
43
+ redactie@example.com
44
+ ps@example.com
45
+ next@example.com
46
+ ahilten@example.com
47
+ weekend@example.com
48
+ redactie@example.com
49
+ info@example.com
50
+ e.van.der.velden@example.com
51
+ merel.izaks@example.com
52
+ deochtend@example.com
53
+ webredactie@example.com
54
+ beleef@example.com
55
+ info@example.com
56
+ h.vanhouwelingen@example.com
57
+ redactie@example.com
58
+ redactie-i@example.com
59
+ sv@example.com
60
+ j.bas@example.com
61
+ albertdelouw@example.com
62
+ saskia.haitsma@example.com
63
+ cosmopolitan@example.com
64
+ bladredactie@example.com
65
+ m.ham@example.com
66
+ stephanie.brandes@example.com
67
+ rtlboulevard@example.com
68
+ redactiesecretariaat@example.com
69
+ nieuwsdienst@example.com
70
+ erik.feenstra@example.com
71
+ j.vdoetelaar@example.com
72
+ a.karimi@example.com
73
+ redactie@example.com
74
+ redactie-flow@example.com
75
+ redactie@example.com
76
+ redactie@example.com
77
+ redactie@example.com
78
+ redactie@example.com
79
+ multimedia@example.com
80
+ pauw@example.com
81
+ deborah.blekkenhorst@example.com
82
+ redactie@example.com
83
+ wetenschap@example.com
84
+ fogteloo@example.com
85
+ info@example.com
86
+ redactie@example.com
87
+ pat.boon@example.com
88
+ patrick.smit@example.com
89
+ mensenleven@example.com
90
+ h.salm@example.com
91
+ cielke@example.com
92
+ m.t.hart@example.com
93
+ r.boxsem@example.com
94
+ nieuwsdienst@example.com
95
+ a.engbers@example.com
96
+ jeugdjournaal@example.com
97
+ b.vandeweijer@example.com
98
+ josefin.hoenders@example.com
99
+ redactie@example.com
100
+ floor.ligtvoet@example.com
101
+ j.koelewijn@example.com
102
+ planning.show@example.com
103
+ noordhuis@example.com
104
+ redactie@example.com
105
+ nathalie.groeneveld@example.com
106
+ redactie@example.com
107
+ pjansen@example.com
108
+ info@example.com
109
+ coleta@example.com
110
+ wart.krol@example.com
111
+ info@example.com
112
+ pers@example.com
113
+ verslaggeverij@example.com
114
+ martijn.verburg@example.com
115
+ mavdmarel@example.com
116
+ redactie@example.com
117
+ e.devisser@example.com
118
+ info@example.com
119
+ info@example.com
120
+ nieuws@example.com
121
+ t.voermans@example.com
122
+ r.brouwer@example.com
123
+ marieke.de.witte@example.com
124
+ redactie@example.com
125
+ binnenland@example.com
126
+ redactie@example.com
127
+ koen.van.huijgevoort@example.com
128
+ redactie@example.com
129
+ msienot@example.com
130
+ kunststoftv@example.com
131
+ opinie@example.com
132
+ vrouwmagazine@example.com
133
+ linda@example.com
134
+ dwdd@example.com
135
+ nrc@example.com
136
+ anna.pruis@example.com
137
+ m.kranenburg@example.com
138
+ info@example.com
139
+ redactie@example.com
140
+ wmeteren@example.com
141
+ amsterdam@example.com
142
+ noud.broekhof@example.com
143
+ nieuwsdienst@example.com
144
+ jildou@example.com
145
+ arjan.poggenklaas@example.com
146
+ marjan.vandenberg@example.com
147
+ binnenland@example.com
148
+ ditisdedag@example.com
149
+ vrij@example.com
150
+ info@example.com
151
+ l.verhoeven@example.com
152
+ m.verburg@example.com
153
+ leven@example.com
154
+ redactiegiel@example.com
155
+ nieuwsdienst@example.com
156
+ foto@example.com
157
+ brittekoppel@example.com
158
+ g.vanteeffelen@example.com
159
+ redactie@example.com
160
+ c.muis@example.com
161
+ servaas.van.der.laan@example.com
162
+ nieuwsdienst@example.com
163
+ erwin@example.com
164
+ show@example.com
165
+ info@example.com
166
+ magazine@example.com
167
+ hart@example.com
168
+ nosbinnenland@example.com
169
+ richt.kooistra@example.com
170
+ almar@example.com
171
+ k.ullah@example.com
172
+ a.crielaard@example.com
173
+ s.lautenbach@example.com
174
+ nosop3@example.com
175
+ redactie@example.com
176
+ autoshow@example.com
177
+ pers@example.com
178
+ boven@example.com
179
+ redactie@example.com
180
+ gezondheid@example.com
181
+ laura.vanbaars@example.com
182
+ esther.monsanto@example.com
183
+ internet@example.com
184
+ frank.thies@example.com
185
+ redactie@example.com
186
+ hvdberge@example.com
187
+ info@example.com
188
+ akerkum@example.com
189
+ redactie@example.com
190
+ saskiavanommen@example.com
191
+ redactie@example.com
192
+ m.kerres@example.com
193
+ s.heijne@example.com
194
+ stad@example.com
195
+ koffietijd@example.com
196
+ redactie@example.com
197
+ c.vanduin@example.com
198
+ webredactie@example.com
199
+ tatiana.pijnenburg@example.com
200
+ bert.heuvelman@example.com
201
+ judith.van.de.hulsbeek@example.com
202
+ dick.van.bolhuis@example.com
203
+ ugamedia@example.com
204
+ lambert.teuwissen@example.com
205
+ kiki.duren@example.com
206
+ redactie@example.com
207
+ rtlnieuws@example.com
208
+ hgillissen@example.com
209
+ blog@example.com
210
+ sjaak@example.com
211
+ jolanda.van.duyvenbode@example.com
212
+ redactie@example.com
213
+ internet@example.com
214
+ t.staal@example.com
215
+ margreet.botter@example.com
216
+ roodshow@example.com
217
+ marijn.lansbergen@example.com
218
+ redactie@example.com
219
+ red@example.com
220
+ redactie@example.com
221
+ nieuws@example.com
222
+ info@example.com
223
+ redactie-i@example.com
224
+ info@example.com
225
+ info@example.com
226
+ info@example.com
227
+ info@example.com
228
+ communicatie@example.com
229
+ advertising@example.com
Binary file
Binary file
@@ -8,32 +8,32 @@ describe TableImporter::Source do
8
8
  before(:each) do
9
9
  @source = TableImporter::Source.new({
10
10
  :content => "nick@pr.co\ndennis@pr.co\nlorenzo@pr.co",
11
- :headers_present => false, :headers => nil, :user_headers => nil, :type => "copy_and_paste", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
11
+ :headers_present => false, :user_headers => nil, :type => "copy_and_paste", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
12
12
  end
13
13
 
14
14
  it "creates a source object" do
15
- TableImporter::Source.new({:content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co", :headers_present => false, :headers => nil, :user_headers => nil, :type => "copy_and_paste", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
15
+ TableImporter::Source.new({:content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co", :headers_present => false, :user_headers => nil, :type => "copy_and_paste", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
16
16
  end
17
17
 
18
18
  it "gets the correct copy and paste chunks" do
19
19
  source = TableImporter::Source.new({
20
20
  :content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co",
21
- :headers_present => false, :headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "copy_and_paste", :column_separator => :space, :record_separator => :comma, :compulsory_headers => {:email => true}})
21
+ :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "copy_and_paste", :column_separator => :space, :record_separator => :comma, :compulsory_headers => {:email => true}})
22
22
  source.get_chunks.first[:lines].first[:email].should eql("nick@pr.co")
23
23
  end
24
24
 
25
25
  it "has the correct number of lines" do
26
- source = TableImporter::Source.new({:content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co", :headers_present => false, :headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "copy_and_paste", :column_separator => :space, :record_separator => :comma, :compulsory_headers => {:email => true}})
26
+ source = TableImporter::Source.new({:content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co", :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "copy_and_paste", :column_separator => :space, :record_separator => :comma, :compulsory_headers => {:email => true}})
27
27
  source.get_chunks(1).count.should eql(3)
28
28
  end
29
29
 
30
30
  it "has the correct number of chunks" do
31
- source = TableImporter::Source.new({:content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co", :headers_present => false, :headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "copy_and_paste", :column_separator => :space, :record_separator => :comma, :compulsory_headers => {:email => true}})
31
+ source = TableImporter::Source.new({:content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co", :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "copy_and_paste", :column_separator => :space, :record_separator => :comma, :compulsory_headers => {:email => true}})
32
32
  source.get_chunks(2).count.should eql(2)
33
33
  end
34
34
 
35
35
  it "does not have extra spaces in the final chunk" do
36
- source = TableImporter::Source.new({:content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co", :headers_present => false, :headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "copy_and_paste", :column_separator => :space, :record_separator => :comma, :compulsory_headers => {:email => true}})
36
+ source = TableImporter::Source.new({:content => "nick@pr.co, dennis@pr.co, lorenzo@pr.co", :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "copy_and_paste", :column_separator => :space, :record_separator => :comma, :compulsory_headers => {:email => true}})
37
37
  last_chunk = source.get_chunks(2).last
38
38
  (last_chunk[:lines].count + last_chunk[:errors].count).should eql(1)
39
39
  end
@@ -62,7 +62,7 @@ describe TableImporter::Source do
62
62
  context 'when source is a different string' do
63
63
 
64
64
  before(:each) do
65
- @source = TableImporter::Source.new({:content => "Nick Dowse <nick@pr.co>, Dennis van der Vliet <dennis@pr.co>, Jeroen Bos <jeroen@pr.co>", :headers_present => false, :headers => {"first_name"=>"0", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"1", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "copy_and_paste", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
65
+ @source = TableImporter::Source.new({:content => "Nick Dowse <nick@pr.co>, Dennis van der Vliet <dennis@pr.co>, Jeroen Bos <jeroen@pr.co>", :headers_present => false, :user_headers => {"first_name"=>"0", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"1", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "copy_and_paste", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
66
66
  end
67
67
 
68
68
  it "gets the correct chunks" do
@@ -107,7 +107,7 @@ describe TableImporter::Source do
107
107
 
108
108
  lorenzo,\"lorenzo@pr.co\"
109
109
  HÐ, “nick¯â@test”, ¾,€",
110
- :headers_present => false, :headers => nil, :user_headers => nil, :type => "copy_and_paste", :column_separator => :comma, :record_separator => :newline_mac, :compulsory_headers => {:email => true}})
110
+ :headers_present => false, :user_headers => nil, :type => "copy_and_paste", :column_separator => :comma, :record_separator => :newline_mac, :compulsory_headers => {:email => true}})
111
111
  end
112
112
 
113
113
  it "has the correct number of lines" do
@@ -131,7 +131,7 @@ describe TableImporter::Source do
131
131
  context 'when string is empty' do
132
132
  it 'raises an error when creating a source object' do
133
133
  expect{
134
- TableImporter::Source.new({:content => "", :headers_present => false, :headers => nil, :user_headers => nil, :type => "copy_and_paste", :column_separator => :comma, :record_separator => :newline_mac, :compulsory_headers => {:email => true}})
134
+ TableImporter::Source.new({:content => "", :headers_present => false, :user_headers => nil, :type => "copy_and_paste", :column_separator => :comma, :record_separator => :newline_mac, :compulsory_headers => {:email => true}})
135
135
  }.to raise_error(TableImporter::EmptyFileImportError)
136
136
  end
137
137
  end
@@ -6,7 +6,7 @@ describe TableImporter::Source do
6
6
 
7
7
  context 'when source is a csv file with headers' do
8
8
  before(:each) do
9
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/with_headers.csv"].join), :headers_present => true, :headers => nil, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
9
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/with_headers.csv"].join), :headers_present => true, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
10
10
  end
11
11
 
12
12
  it "has the correct headers" do
@@ -42,20 +42,20 @@ describe TableImporter::Source do
42
42
  context 'when source is a csv file without headers it' do
43
43
  before(:each) do
44
44
  @source_headers = "false"
45
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/without_headers.csv"].join), :headers_present => false, :headers => nil, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
45
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/without_headers.csv"].join), :headers_present => false, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
46
46
  end
47
47
 
48
48
  it "creates a source object" do
49
- TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/without_headers.csv"].join), :headers_present => false, :headers => nil, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
49
+ TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/without_headers.csv"].join), :headers_present => false, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
50
50
  end
51
51
 
52
52
  it "has the correct number of chunks" do
53
- source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/without_headers.csv"].join), :headers_present => false, :headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"5", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "csv", :column_separator => :semicolon, :record_separator => :newline_mac, :compulsory_headers => {:email => true}})
53
+ source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/without_headers.csv"].join), :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"5", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "csv", :column_separator => :semicolon, :record_separator => :newline_mac, :compulsory_headers => {:email => true}})
54
54
  source.get_chunks(4).count.should eql(3)
55
55
  end
56
56
 
57
57
  it "does not have extra spaces in the final chunk" do
58
- source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/without_headers.csv"].join), :headers_present => false, :headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"5", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "csv", :column_separator => :semicolon, :record_separator => :newline_mac, :compulsory_headers => {:email => true}})
58
+ source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/without_headers.csv"].join), :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"5", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "csv", :column_separator => :semicolon, :record_separator => :newline_mac, :compulsory_headers => {:email => true}})
59
59
  source.get_chunks(4).last[:lines].count.should eql(1)
60
60
  end
61
61
 
@@ -67,15 +67,15 @@ describe TableImporter::Source do
67
67
  context 'when source is an edge-case csv file without headers' do
68
68
  before(:each) do
69
69
  @source_headers = "false"
70
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/edge_cases.csv"].join), :headers_present => false, :headers => nil, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
70
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/edge_cases.csv"].join), :headers_present => false, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
71
71
  end
72
72
 
73
73
  it "creates a source object" do
74
- TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/edge_cases.csv"].join), :headers_present => false, :headers => nil, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
74
+ TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/edge_cases.csv"].join), :headers_present => false, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
75
75
  end
76
76
 
77
77
  it "has the correct number of chunks" do
78
- source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/edge_cases.csv"].join), :headers_present => false, :headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"1", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
78
+ source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/edge_cases.csv"].join), :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"1", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
79
79
  source.get_chunks(4).count.should eql(3)
80
80
  end
81
81
 
@@ -86,7 +86,7 @@ describe TableImporter::Source do
86
86
 
87
87
  context 'when source is a badly encoded file' do
88
88
  it 'can still get the correct chunks' do
89
- source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/mexico2013_pressdoc.csv"].join), :headers_present => true, :headers => nil, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
89
+ source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/mexico2013_pressdoc.csv"].join), :headers_present => true, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
90
90
  source.get_chunks.first[:lines].count.should eql(49)
91
91
  end
92
92
  end
@@ -95,7 +95,7 @@ describe TableImporter::Source do
95
95
 
96
96
  it 'raises an error when creating a source object' do
97
97
  begin
98
- TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/no_content.csv"].join), :headers_present => true, :headers => nil, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
98
+ TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/no_content.csv"].join), :headers_present => true, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
99
99
  rescue TableImporter::EmptyFileImportError => e
100
100
  e.message
101
101
  end
@@ -105,7 +105,7 @@ describe TableImporter::Source do
105
105
  context 'when source has empty lines at start' do
106
106
 
107
107
  before(:each) do
108
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/empty_lines_at_start.csv"].join), :headers_present => true, :headers => nil, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
108
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/empty_lines_at_start.csv"].join), :headers_present => true, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
109
109
  end
110
110
 
111
111
  it "Gets the preview lines without error" do
@@ -116,4 +116,19 @@ describe TableImporter::Source do
116
116
  @source = nil
117
117
  end
118
118
  end
119
+
120
+ context 'when source is badly encoded partway through the file' do
121
+
122
+ before(:each) do
123
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/csv/partway.csv"].join), :headers_present => false, :user_headers => nil, :type => "csv", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
124
+ end
125
+
126
+ it "Gets the first chunk without error" do
127
+ @source.get_chunks[0][:lines].count.should eql(50)
128
+ end
129
+
130
+ after(:each) do
131
+ @source = nil
132
+ end
133
+ end
119
134
  end
@@ -8,11 +8,11 @@ describe TableImporter::Source do
8
8
  context 'when mapping has not been set' do
9
9
 
10
10
  before(:each) do
11
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/with_headers.xls"].join), :headers_present => true, :user_headers => nil, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
11
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/with_headers.xls"].join), :headers_present => true, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
12
12
  end
13
13
 
14
14
  it "gets the preview lines" do
15
- @source.get_preview_lines.count.should eql(5)
15
+ @source.get_preview_lines.count.should eql(6)
16
16
  end
17
17
 
18
18
  it "has the correct type" do
@@ -27,7 +27,7 @@ describe TableImporter::Source do
27
27
  context 'when mapping has been set' do
28
28
 
29
29
  before(:each) do
30
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/with_headers.xls"].join), :headers_present => true, :headers_present => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
30
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/with_headers.xls"].join), :headers_present => true, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
31
31
  end
32
32
 
33
33
  it "has the correct headers" do
@@ -53,12 +53,10 @@ describe TableImporter::Source do
53
53
  end
54
54
  end
55
55
 
56
-
57
-
58
56
  context 'when source is an xls file without headers' do
59
57
  context 'when mapping has not been set' do
60
58
  before(:each) do
61
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/without_headers.xls"].join), :headers_present => false, :user_headers => nil, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
59
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/without_headers.xls"].join), :headers_present => false, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
62
60
  end
63
61
 
64
62
  it "has the correct number of columns" do
@@ -73,7 +71,7 @@ describe TableImporter::Source do
73
71
  context 'when mapping has been set' do
74
72
 
75
73
  before(:each) do
76
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/without_headers.xls"].join), :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
74
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/without_headers.xls"].join), :headers_present => false, :user_headers => {"first_name"=>"", "last_name"=>"", "salutation"=>"", "tag_list"=>"", "email"=>"0", "organization"=>"", "url"=>"", "phone"=>"", "job_title"=>"", "second_url"=>"", "notes"=>"", "twitter_username"=>"", "skype_username"=>"", "pinterest_username"=>"", "instagram_username"=>"", "facebook_username"=>"", "last_name_prefix"=>"", "second_email"=>"", "phone_mobile"=>"", "street"=>"", "street_number"=>"", "zipcode"=>"", "city"=>"", "country"=>""}, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
77
75
  end
78
76
 
79
77
  it "has the correct number of lines" do
@@ -121,7 +119,7 @@ describe TableImporter::Source do
121
119
  context 'when source has empty lines' do
122
120
 
123
121
  before(:each) do
124
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/empty_lines.xlsx"].join), :headers_present => false, :user_headers => nil, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
122
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/empty_lines.xlsx"].join), :headers_present => false, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
125
123
  end
126
124
 
127
125
  it "does not throw an error" do
@@ -136,7 +134,7 @@ describe TableImporter::Source do
136
134
  context 'when source has 20 empty lines at the beginning' do
137
135
 
138
136
  before(:each) do
139
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/empty_lines_at_start.xlsx"].join), :headers_present => true, :user_headers => nil, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
137
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/empty_lines_at_start.xlsx"].join), :headers_present => true, :user_headers => nil, :type => "xls", :column_separator => "", :record_separator => "", :compulsory_headers => {:email => true}})
140
138
  end
141
139
 
142
140
  it "does not throw an error" do
@@ -159,16 +157,40 @@ describe TableImporter::Source do
159
157
  end
160
158
  end
161
159
 
162
- context 'mediaprofiler' do
160
+ context 'premapped_1' do
163
161
 
164
162
  before(:each) do
165
- @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/mediaprofiler.xls"].join), :headers_present => "true", :type => "xls", :column_separator => "", :record_separator => "",
166
- :user_headers => {:first_name=>0, :last_name_prefix=>1, :last_name=>2, :organization=>3, :email=>5, :second_email=>6, :phone=>7, :phone_mobile=>8, :twitter_username=>9, :url=>10, :street=>11, :street_number=>12, :zipcode=>13, :country=>18}
163
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/premapped_1.xls"].join), :headers_present => "true", :type => "xls", :column_separator => "", :record_separator => "",
164
+ :user_headers => {:first_name=>0, :last_name_prefix=>1, :last_name=>2, :organization=>3, :second_email=>5, :email=>6, :phone=>7, :phone_mobile=>8, :twitter_username=>9, :url=>10, :street=>11, :street_number=>12, :zipcode=>13, :country=>18},
165
+ :compulsory_headers => {:email => true}
167
166
  })
168
167
  end
169
168
 
170
169
  it "has correct mapping" do
171
- @source.get_preview_lines.first.keys.first.should == :first_name
170
+ expect(@source.get_preview_lines.first.keys.first).to eql(:first_name)
171
+ end
172
+
173
+ after(:each) do
174
+ @source = nil
175
+ end
176
+ end
177
+
178
+ context 'premapped_2' do
179
+
180
+ before(:each) do
181
+ @source = TableImporter::Source.new({:content => File.open([Dir.pwd, "/spec/files/excel/premapped_2.xls"].join), :headers_present => "true", :type => "xls", :column_separator => "", :record_separator => "",
182
+ :user_headers => {:organization=>0, :salutation=>2, :first_name=>3, :last_name_prefix=>4, :last_name=>5, :street=>6, :zipcode=>9, :city=>10, :country=>11,
183
+ :url=>12, :email=>13, :phone=>14, :notes=>18, :secondary_tags=>19, cached_tag_list: 24},
184
+ :compulsory_headers => {:email => true}
185
+ })
186
+ end
187
+
188
+ it "has correct mapping" do
189
+ expect(@source.get_preview_lines.first.keys.first).to eql(:organization)
190
+ end
191
+
192
+ it "gets the correct number of preview lines" do
193
+ expect(@source.get_preview_lines.count).to eql(1)
172
194
  end
173
195
 
174
196
  after(:each) do
@@ -21,7 +21,7 @@ Gem::Specification.new do |spec|
21
21
  spec.add_dependency "spreadsheet", "0.9.1"
22
22
  spec.add_dependency 'roo'
23
23
  spec.add_dependency 'google_drive'
24
- spec.add_dependency 'smarter_csv'
24
+ spec.add_dependency 'smarter_csv', '1.0.17'
25
25
 
26
26
  spec.add_development_dependency "bundler", "~> 1.3"
27
27
  spec.add_development_dependency "rake"
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: table_importer
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.2.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Nick Dowse
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-09-26 00:00:00.000000000 Z
11
+ date: 2014-11-20 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: spreadsheet
@@ -56,16 +56,16 @@ dependencies:
56
56
  name: smarter_csv
57
57
  requirement: !ruby/object:Gem::Requirement
58
58
  requirements:
59
- - - '>='
59
+ - - '='
60
60
  - !ruby/object:Gem::Version
61
- version: '0'
61
+ version: 1.0.17
62
62
  type: :runtime
63
63
  prerelease: false
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
- - - '>='
66
+ - - '='
67
67
  - !ruby/object:Gem::Version
68
- version: '0'
68
+ version: 1.0.17
69
69
  - !ruby/object:Gem::Dependency
70
70
  name: bundler
71
71
  requirement: !ruby/object:Gem::Requirement
@@ -208,13 +208,15 @@ files:
208
208
  - spec/files/csv/empty_lines_at_start.csv
209
209
  - spec/files/csv/mexico2013_pressdoc.csv
210
210
  - spec/files/csv/no_content.csv
211
+ - spec/files/csv/partway.csv
211
212
  - spec/files/csv/with_headers.csv
212
213
  - spec/files/csv/without_headers.csv
213
214
  - spec/files/excel/edge_cases.xls
214
215
  - spec/files/excel/empty_lines.xlsx
215
216
  - spec/files/excel/empty_lines_at_start.xlsx
216
- - spec/files/excel/mediaprofiler.xls
217
217
  - spec/files/excel/no_content.xlsx
218
+ - spec/files/excel/premapped_1.xls
219
+ - spec/files/excel/premapped_2.xls
218
220
  - spec/files/excel/with_headers.xls
219
221
  - spec/files/excel/without_headers.xls
220
222
  - spec/sources/copy_and_paste_spec.rb
@@ -256,13 +258,15 @@ test_files:
256
258
  - spec/files/csv/empty_lines_at_start.csv
257
259
  - spec/files/csv/mexico2013_pressdoc.csv
258
260
  - spec/files/csv/no_content.csv
261
+ - spec/files/csv/partway.csv
259
262
  - spec/files/csv/with_headers.csv
260
263
  - spec/files/csv/without_headers.csv
261
264
  - spec/files/excel/edge_cases.xls
262
265
  - spec/files/excel/empty_lines.xlsx
263
266
  - spec/files/excel/empty_lines_at_start.xlsx
264
- - spec/files/excel/mediaprofiler.xls
265
267
  - spec/files/excel/no_content.xlsx
268
+ - spec/files/excel/premapped_1.xls
269
+ - spec/files/excel/premapped_2.xls
266
270
  - spec/files/excel/with_headers.xls
267
271
  - spec/files/excel/without_headers.xls
268
272
  - spec/sources/copy_and_paste_spec.rb
Binary file