table_importer 0.1.0 → 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/lib/table_importer/excel.rb +1 -54
- data/lib/table_importer/google.rb +2 -55
- data/lib/table_importer/roo_spreadsheet_source.rb +58 -0
- data/lib/table_importer/source.rb +1 -0
- data/lib/table_importer/version.rb +1 -1
- data/spec/sources/google_spec.rb +26 -1
- data/spec/spec_helper.rb +1 -0
- data/spec/vcr_cassettes/google_authentication.yml +754 -0
- data/spec/vcr_setup.rb +6 -0
- data/table_importer.gemspec +2 -0
- metadata +35 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: c222799798d8ddd74dd9122e21210fdf195eb454
|
4
|
+
data.tar.gz: 742761c4969e4515560024965c34df055b792f8c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0649b415d943e1967ff3618794bfc3290dd56fc50b795319e988b4ebbc51308c86a4451d92215fe1ba50b4761d236941fa776b2688c4852c1d9be3f7b4bdb845
|
7
|
+
data.tar.gz: 02610c56bba2bde7681fcf22556ab3c46714f115c89bb25f7de3b27d3de982d87e5f1c5de3e31a630c976d3b08f3552935ba0151ff493f46c771589ee5e8b579
|
data/lib/table_importer/excel.rb
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
module TableImporter
|
2
2
|
|
3
|
-
class Excel <
|
3
|
+
class Excel < RooSpreadsheetSource
|
4
4
|
|
5
5
|
def initialize(data)
|
6
6
|
begin
|
@@ -22,10 +22,6 @@ module TableImporter
|
|
22
22
|
end
|
23
23
|
end
|
24
24
|
|
25
|
-
def get_headers
|
26
|
-
@headers
|
27
|
-
end
|
28
|
-
|
29
25
|
def get_file
|
30
26
|
begin
|
31
27
|
if @type == "xls"
|
@@ -41,54 +37,5 @@ module TableImporter
|
|
41
37
|
def get_type
|
42
38
|
"xls"
|
43
39
|
end
|
44
|
-
|
45
|
-
def get_preview_lines(start_point = 0, end_point = 10)
|
46
|
-
begin
|
47
|
-
@headers = @mapping.present? && @mapping != false ? convert_headers : @headers
|
48
|
-
lines = clean_chunks([get_lines(start_point, end_point)], @compulsory_headers)[0][:lines]
|
49
|
-
if lines.first.nil?
|
50
|
-
get_preview_lines(start_point+10, end_point+10)
|
51
|
-
else
|
52
|
-
lines[0..8]
|
53
|
-
end
|
54
|
-
rescue SystemStackError, NoMethodError
|
55
|
-
raise TableImporter::EmptyFileImportError.new
|
56
|
-
end
|
57
|
-
end
|
58
|
-
|
59
|
-
def get_lines(start, number_of_lines)
|
60
|
-
@last_row ||= @file.last_row
|
61
|
-
finish = [@last_row, start + number_of_lines].min
|
62
|
-
mapped_lines = []
|
63
|
-
(start...finish).each do |row_number|
|
64
|
-
mapped_lines << Hash[@headers.zip(@file.row(row_number))]
|
65
|
-
end
|
66
|
-
mapped_lines
|
67
|
-
end
|
68
|
-
|
69
|
-
def convert_headers
|
70
|
-
new_headers = @headers_present ? @file.row(1) : default_headers
|
71
|
-
new_headers = default_headers(new_headers.count)
|
72
|
-
return new_headers unless @mapping
|
73
|
-
@mapping.each do |key, value|
|
74
|
-
if value.to_i.to_s == value.to_s
|
75
|
-
new_headers[value.to_i] = key.to_sym
|
76
|
-
end
|
77
|
-
end
|
78
|
-
new_headers
|
79
|
-
end
|
80
|
-
|
81
|
-
def get_chunks(chunk_size)
|
82
|
-
@headers = convert_headers
|
83
|
-
@last_row ||= @file.last_row
|
84
|
-
chunks = []
|
85
|
-
start_point = @headers_present ? 2 : 1
|
86
|
-
while chunks.count <= @last_row/chunk_size
|
87
|
-
chunks << get_lines(start_point, chunk_size)
|
88
|
-
start_point += chunk_size
|
89
|
-
end
|
90
|
-
chunks.last << Hash[@headers.zip(@file.row(@last_row))]
|
91
|
-
clean_chunks(chunks, @compulsory_headers)
|
92
|
-
end
|
93
40
|
end
|
94
41
|
end
|
@@ -1,13 +1,13 @@
|
|
1
1
|
module TableImporter
|
2
2
|
|
3
|
-
class Google <
|
3
|
+
class Google < RooSpreadsheetSource
|
4
4
|
|
5
5
|
def initialize(data)
|
6
6
|
begin
|
7
7
|
@headers_present = data[:headers_present]
|
8
8
|
@file = get_file(data[:content].split(", ")[1], data[:content].split(", ")[0])
|
9
9
|
@compulsory_headers = data[:compulsory_headers]
|
10
|
-
@delete_empty_columns = false
|
10
|
+
@delete_empty_columns = false
|
11
11
|
@mapping = !data[:user_headers].blank? ? data[:user_headers] : data[:headers]
|
12
12
|
raise TableImporter::EmptyFileImportError.new if !@file.first_row
|
13
13
|
if !data[:headers].nil?
|
@@ -20,10 +20,6 @@ module TableImporter
|
|
20
20
|
end
|
21
21
|
end
|
22
22
|
|
23
|
-
def get_headers
|
24
|
-
@headers
|
25
|
-
end
|
26
|
-
|
27
23
|
def get_file(file_key, access_token)
|
28
24
|
begin
|
29
25
|
Roo::Google.new(file_key, {:access_token => access_token})
|
@@ -35,54 +31,5 @@ module TableImporter
|
|
35
31
|
def get_type
|
36
32
|
"google"
|
37
33
|
end
|
38
|
-
|
39
|
-
def get_preview_lines(start_point = 0, end_point = 10)
|
40
|
-
begin
|
41
|
-
@headers = @mapping.present? && @mapping != false ? convert_headers : @headers
|
42
|
-
lines = clean_chunks([get_lines(start_point, end_point)], @compulsory_headers)[0][:lines]
|
43
|
-
if lines.first.nil?
|
44
|
-
get_preview_lines(start_point+10, end_point+10)
|
45
|
-
else
|
46
|
-
lines[0..8]
|
47
|
-
end
|
48
|
-
rescue SystemStackError, NoMethodError
|
49
|
-
raise TableImporter::EmptyFileImportError.new
|
50
|
-
end
|
51
|
-
end
|
52
|
-
|
53
|
-
def get_lines(start, number_of_lines)
|
54
|
-
@last_row ||= @file.last_row
|
55
|
-
finish = [@last_row, start + number_of_lines].min
|
56
|
-
mapped_lines = []
|
57
|
-
(start...finish).each do |row_number|
|
58
|
-
mapped_lines << Hash[@headers.zip(@file.row(row_number))]
|
59
|
-
end
|
60
|
-
mapped_lines
|
61
|
-
end
|
62
|
-
|
63
|
-
def convert_headers
|
64
|
-
new_headers = @headers_present ? @file.row(1) : default_headers
|
65
|
-
new_headers = default_headers(new_headers.count)
|
66
|
-
return new_headers unless @mapping
|
67
|
-
@mapping.each do |key, value|
|
68
|
-
if value.to_i.to_s == value.to_s
|
69
|
-
new_headers[value.to_i] = key.to_sym
|
70
|
-
end
|
71
|
-
end
|
72
|
-
new_headers
|
73
|
-
end
|
74
|
-
|
75
|
-
def get_chunks(chunk_size)
|
76
|
-
@headers = convert_headers
|
77
|
-
@last_row ||= @file.last_row
|
78
|
-
chunks = []
|
79
|
-
start_point = @headers_present ? 2 : 1
|
80
|
-
while chunks.count <= @last_row/chunk_size
|
81
|
-
chunks << get_lines(start_point, chunk_size)
|
82
|
-
start_point += chunk_size
|
83
|
-
end
|
84
|
-
chunks.last << Hash[@headers.zip(@file.row(@last_row))]
|
85
|
-
clean_chunks(chunks, @compulsory_headers)
|
86
|
-
end
|
87
34
|
end
|
88
35
|
end
|
@@ -0,0 +1,58 @@
|
|
1
|
+
module TableImporter
|
2
|
+
|
3
|
+
class RooSpreadsheetSource < Source
|
4
|
+
|
5
|
+
def get_headers
|
6
|
+
@headers
|
7
|
+
end
|
8
|
+
|
9
|
+
def get_preview_lines(start_point = 0, end_point = 10)
|
10
|
+
begin
|
11
|
+
@headers = @mapping.present? && @mapping != false ? convert_headers : @headers
|
12
|
+
lines = clean_chunks([get_lines(start_point, end_point)], @compulsory_headers)[0][:lines]
|
13
|
+
if lines.first.nil?
|
14
|
+
get_preview_lines(start_point+10, end_point+10)
|
15
|
+
else
|
16
|
+
lines[0..8]
|
17
|
+
end
|
18
|
+
rescue SystemStackError, NoMethodError
|
19
|
+
raise TableImporter::EmptyFileImportError.new
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
def get_lines(start, number_of_lines)
|
24
|
+
@last_row ||= @file.last_row
|
25
|
+
finish = [@last_row, start + number_of_lines].min
|
26
|
+
mapped_lines = []
|
27
|
+
(start...finish).each do |row_number|
|
28
|
+
mapped_lines << Hash[@headers.zip(@file.row(row_number))]
|
29
|
+
end
|
30
|
+
mapped_lines
|
31
|
+
end
|
32
|
+
|
33
|
+
def convert_headers
|
34
|
+
new_headers = @headers_present ? @file.row(1) : default_headers
|
35
|
+
new_headers = default_headers(new_headers.count)
|
36
|
+
return new_headers unless @mapping
|
37
|
+
@mapping.each do |key, value|
|
38
|
+
if value.to_i.to_s == value.to_s
|
39
|
+
new_headers[value.to_i] = key.to_sym
|
40
|
+
end
|
41
|
+
end
|
42
|
+
new_headers
|
43
|
+
end
|
44
|
+
|
45
|
+
def get_chunks(chunk_size)
|
46
|
+
@headers = convert_headers
|
47
|
+
@last_row ||= @file.last_row
|
48
|
+
chunks = []
|
49
|
+
start_point = @headers_present ? 2 : 1
|
50
|
+
while chunks.count <= @last_row/chunk_size
|
51
|
+
chunks << get_lines(start_point, chunk_size)
|
52
|
+
start_point += chunk_size
|
53
|
+
end
|
54
|
+
chunks.last << Hash[@headers.zip(@file.row(@last_row))]
|
55
|
+
clean_chunks(chunks, @compulsory_headers)
|
56
|
+
end
|
57
|
+
end
|
58
|
+
end
|
data/spec/sources/google_spec.rb
CHANGED
@@ -1,10 +1,35 @@
|
|
1
1
|
# encoding: UTF-8
|
2
|
+
require 'vcr_setup'
|
2
3
|
require 'spec_helper'
|
3
4
|
require 'roo'
|
4
5
|
require 'google_drive'
|
5
6
|
|
6
7
|
describe TableImporter::Source do
|
8
|
+
context 'when source is a google file' do
|
9
|
+
before(:each) do
|
10
|
+
VCR.use_cassette('google_authentication', :record => :new_episodes) do
|
11
|
+
@source = TableImporter::Source.new({
|
12
|
+
:content => 'CLIENT_ID, ACCESS_TOKEN',
|
13
|
+
:headers_present => true,
|
14
|
+
:user_headers => nil,
|
15
|
+
:user_headers => nil,
|
16
|
+
:type => "google",
|
17
|
+
:column_separator => "",
|
18
|
+
:record_separator => "",
|
19
|
+
:compulsory_headers =>
|
20
|
+
{:email => true}
|
21
|
+
})
|
22
|
+
end
|
23
|
+
end
|
7
24
|
|
8
|
-
|
25
|
+
it "gets the correct type" do
|
26
|
+
VCR.use_cassette('google_authentication') do
|
27
|
+
@source.get_type.should eql("google")
|
28
|
+
end
|
29
|
+
end
|
9
30
|
|
31
|
+
after(:each) do
|
32
|
+
@source = nil
|
33
|
+
end
|
34
|
+
end
|
10
35
|
end
|