active_admin_csv_import 1.1.1 → 1.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,3 @@
1
1
  module ActiveAdminCsvImport
2
- VERSION = "1.1.1"
2
+ VERSION = "1.1.2"
3
3
  end
@@ -2,53 +2,79 @@ this.recline = this.recline || {};
2
2
  this.recline.Backend = this.recline.Backend || {};
3
3
  this.recline.Backend.CSV = this.recline.Backend.CSV || {};
4
4
 
5
+ // Note that provision of jQuery is optional (it is **only** needed if you use fetch on a remote file)
5
6
  (function(my) {
7
+ "use strict";
8
+ my.__type__ = 'csv';
9
+
10
+ // use either jQuery or Underscore Deferred depending on what is available
11
+ var Deferred = (typeof jQuery !== "undefined" && jQuery.Deferred) || _.Deferred;
12
+
6
13
  // ## fetch
7
14
  //
8
- // 3 options
15
+ // fetch supports 3 options depending on the attribute provided on the dataset argument
9
16
  //
10
- // 1. CSV local fileobject -> HTML5 file object + CSV parser
11
- // 2. Already have CSV string (in data) attribute -> CSV parser
12
- // 2. online CSV file that is ajax-able -> ajax + csv parser
17
+ // 1. `dataset.file`: `file` is an HTML5 file object. This is opened and parsed with the CSV parser.
18
+ // 2. `dataset.data`: `data` is a string in CSV format. This is passed directly to the CSV parser
19
+ // 3. `dataset.url`: a url to an online CSV file that is ajax accessible (note this usually requires either local or on a server that is CORS enabled). The file is then loaded using jQuery.ajax and parsed using the CSV parser (NB: this requires jQuery)
13
20
  //
14
- // All options generates similar data and give a memory store outcome
21
+ // All options generates similar data and use the memory store outcome, that is they return something like:
22
+ //
23
+ // <pre>
24
+ // {
25
+ // records: [ [...], [...], ... ],
26
+ // metadata: { may be some metadata e.g. file name }
27
+ // useMemoryStore: true
28
+ // }
29
+ // </pre>
15
30
  my.fetch = function(dataset) {
16
- var dfd = $.Deferred();
31
+ var dfd = new Deferred();
17
32
  if (dataset.file) {
18
33
  var reader = new FileReader();
19
34
  var encoding = dataset.encoding || 'UTF-8';
20
35
  reader.onload = function(e) {
21
- var rows = my.parseCSV(e.target.result, dataset);
22
- dfd.resolve({
23
- records: rows,
24
- metadata: {
25
- filename: dataset.file.name
26
- },
27
- useMemoryStore: true
28
- });
36
+ var out = my.extractFields(my.parseCSV(e.target.result, dataset), dataset);
37
+ out.useMemoryStore = true;
38
+ out.metadata = {
39
+ filename: dataset.file.name
40
+ }
41
+ dfd.resolve(out);
29
42
  };
30
43
  reader.onerror = function(e) {
31
44
  alert('Failed to load file. Code: ' + e.target.error.code);
32
45
  };
33
46
  reader.readAsText(dataset.file, encoding);
34
47
  } else if (dataset.data) {
35
- var rows = my.parseCSV(dataset.data, dataset);
36
- dfd.resolve({
37
- records: rows,
38
- useMemoryStore: true
39
- });
48
+ var out = my.extractFields(my.parseCSV(dataset.data, dataset), dataset);
49
+ out.useMemoryStore = true;
50
+ dfd.resolve(out);
40
51
  } else if (dataset.url) {
41
- $.get(dataset.url).done(function(data) {
42
- var rows = my.parseCSV(data, dataset);
43
- dfd.resolve({
44
- records: rows,
45
- useMemoryStore: true
46
- });
52
+ jQuery.get(dataset.url).done(function(data) {
53
+ var out = my.extractFields(my.parseCSV(data, dataset), dataset);
54
+ out.useMemoryStore = true;
55
+ dfd.resolve(out);
47
56
  });
48
57
  }
49
58
  return dfd.promise();
50
59
  };
51
60
 
61
+ // Convert array of rows in { records: [ ...] , fields: [ ... ] }
62
+ // @param {Boolean} noHeaderRow If true assume that first row is not a header (i.e. list of fields but is data.
63
+ my.extractFields = function(rows, noFields) {
64
+ if (noFields.noHeaderRow !== true && rows.length > 0) {
65
+ return {
66
+ fields: rows[0],
67
+ records: rows.slice(1)
68
+ }
69
+ } else {
70
+ return {
71
+ records: rows
72
+ }
73
+ }
74
+ };
75
+
76
+ // ## parseCSV
77
+ //
52
78
  // Converts a Comma Separated Values string into an array of arrays.
53
79
  // Each line in the CSV becomes an array.
54
80
  //
@@ -56,11 +82,19 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {};
56
82
  //
57
83
  // @return The CSV parsed as an array
58
84
  // @type Array
59
- //
85
+ //
60
86
  // @param {String} s The string to convert
61
87
  // @param {Object} options Options for loading CSV including
62
- // @param {Boolean} [trim=false] If set to True leading and trailing whitespace is stripped off of each non-quoted field as it is imported
63
- // @param {String} [separator=','] Separator for CSV file
88
+ // @param {Boolean} [trim=false] If set to True leading and trailing
89
+ // whitespace is stripped off of each non-quoted field as it is imported
90
+ // @param {String} [delimiter=','] A one-character string used to separate
91
+ // fields. It defaults to ','
92
+ // @param {String} [quotechar='"'] A one-character string used to quote
93
+ // fields containing special characters, such as the delimiter or
94
+ // quotechar, or which contain new-line characters. It defaults to '"'
95
+ //
96
+ // @param {Integer} skipInitialRows A integer number of rows to skip (default 0)
97
+ //
64
98
  // Heavily based on uselesscode's JS CSV parser (MIT Licensed):
65
99
  // http://www.uselesscode.org/javascript/csv/
66
100
  my.parseCSV = function(s, options) {
@@ -69,8 +103,9 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {};
69
103
 
70
104
  var options = options || {};
71
105
  var trm = (options.trim === false) ? false : true;
72
- var separator = options.separator || ',';
73
- var delimiter = options.delimiter || '"';
106
+ var delimiter = options.delimiter || ',';
107
+ var quotechar = options.quotechar || '"';
108
+
74
109
  var cur = '', // The character we are currently processing.
75
110
  inQuote = false,
76
111
  fieldQuoted = false,
@@ -104,7 +139,7 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {};
104
139
  cur = s.charAt(i);
105
140
 
106
141
  // If we are at a EOF or EOR
107
- if (inQuote === false && (cur === separator || cur === "\n")) {
142
+ if (inQuote === false && (cur === delimiter || cur === "\n")) {
108
143
  field = processField(field);
109
144
  // Add the current field to the current row
110
145
  row.push(field);
@@ -117,8 +152,8 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {};
117
152
  field = '';
118
153
  fieldQuoted = false;
119
154
  } else {
120
- // If it's not a delimiter, add it to the field buffer
121
- if (cur !== delimiter) {
155
+ // If it's not a quotechar, add it to the field buffer
156
+ if (cur !== quotechar) {
122
157
  field += cur;
123
158
  } else {
124
159
  if (!inQuote) {
@@ -126,9 +161,9 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {};
126
161
  inQuote = true;
127
162
  fieldQuoted = true;
128
163
  } else {
129
- // Next char is delimiter, this is an escaped delimiter
130
- if (s.charAt(i + 1) === delimiter) {
131
- field += delimiter;
164
+ // Next char is quotechar, this is an escaped quotechar
165
+ if (s.charAt(i + 1) === quotechar) {
166
+ field += quotechar;
132
167
  // Skip the next char
133
168
  i += 1;
134
169
  } else {
@@ -145,6 +180,97 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {};
145
180
  row.push(field);
146
181
  out.push(row);
147
182
 
183
+ // Expose the ability to discard initial rows
184
+ if (options.skipInitialRows) out = out.slice(options.skipInitialRows);
185
+
186
+ return out;
187
+ };
188
+
189
+ // ## serializeCSV
190
+ //
191
+ // Convert an Object or a simple array of arrays into a Comma
192
+ // Separated Values string.
193
+ //
194
+ // Nulls are converted to empty fields and integers or floats are converted to non-quoted numbers.
195
+ //
196
+ // @return The array serialized as a CSV
197
+ // @type String
198
+ //
199
+ // @param {Object or Array} dataToSerialize The Object or array of arrays to convert. Object structure must be as follows:
200
+ //
201
+ // {
202
+ // fields: [ {id: .., ...}, {id: ...,
203
+ // records: [ { record }, { record }, ... ]
204
+ // ... // more attributes we do not care about
205
+ // }
206
+ //
207
+ // @param {object} options Options for serializing the CSV file including
208
+ // delimiter and quotechar (see parseCSV options parameter above for
209
+ // details on these).
210
+ //
211
+ // Heavily based on uselesscode's JS CSV serializer (MIT Licensed):
212
+ // http://www.uselesscode.org/javascript/csv/
213
+ my.serializeCSV = function(dataToSerialize, options) {
214
+ var a = null;
215
+ if (dataToSerialize instanceof Array) {
216
+ a = dataToSerialize;
217
+ } else {
218
+ a = [];
219
+ var fieldNames = _.pluck(dataToSerialize.fields, 'id');
220
+ a.push(fieldNames);
221
+ _.each(dataToSerialize.records, function(record, index) {
222
+ var tmp = _.map(fieldNames, function(fn) {
223
+ return record[fn];
224
+ });
225
+ a.push(tmp);
226
+ });
227
+ }
228
+ var options = options || {};
229
+ var delimiter = options.delimiter || ',';
230
+ var quotechar = options.quotechar || '"';
231
+
232
+ var cur = '', // The character we are currently processing.
233
+ field = '', // Buffer for building up the current field
234
+ row = '',
235
+ out = '',
236
+ i,
237
+ j,
238
+ processField;
239
+
240
+ processField = function(field) {
241
+ if (field === null) {
242
+ // If field is null set to empty string
243
+ field = '';
244
+ } else if (typeof field === "string" && rxNeedsQuoting.test(field)) {
245
+ // Convert string to delimited string
246
+ field = quotechar + field + quotechar;
247
+ } else if (typeof field === "number") {
248
+ // Convert number to string
249
+ field = field.toString(10);
250
+ }
251
+
252
+ return field;
253
+ };
254
+
255
+ for (i = 0; i < a.length; i += 1) {
256
+ cur = a[i];
257
+
258
+ for (j = 0; j < cur.length; j += 1) {
259
+ field = processField(cur[j]);
260
+ // If this is EOR append row to output and flush row
261
+ if (j === (cur.length - 1)) {
262
+ row += field;
263
+ out += row + "\n";
264
+ row = '';
265
+ } else {
266
+ // Add the current field to the current row
267
+ row += field + delimiter;
268
+ }
269
+ // Flush the field buffer
270
+ field = '';
271
+ }
272
+ }
273
+
148
274
  return out;
149
275
  };
150
276
 
@@ -178,4 +304,4 @@ this.recline.Backend.CSV = this.recline.Backend.CSV || {};
178
304
  }
179
305
 
180
306
 
181
- }(this.recline.Backend.CSV));
307
+ }(this.recline.Backend.CSV));
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: active_admin_csv_import
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.1
4
+ version: 1.1.2
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,11 +9,11 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-12-09 00:00:00.000000000 Z
12
+ date: 2014-01-04 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: rails
16
- requirement: &70147043295120 !ruby/object:Gem::Requirement
16
+ requirement: &70178438708240 !ruby/object:Gem::Requirement
17
17
  none: false
18
18
  requirements:
19
19
  - - ! '>='
@@ -21,10 +21,10 @@ dependencies:
21
21
  version: '3.1'
22
22
  type: :runtime
23
23
  prerelease: false
24
- version_requirements: *70147043295120
24
+ version_requirements: *70178438708240
25
25
  - !ruby/object:Gem::Dependency
26
26
  name: railties
27
- requirement: &70147043307680 !ruby/object:Gem::Requirement
27
+ requirement: &70178438707680 !ruby/object:Gem::Requirement
28
28
  none: false
29
29
  requirements:
30
30
  - - ! '>='
@@ -32,7 +32,7 @@ dependencies:
32
32
  version: '3.1'
33
33
  type: :runtime
34
34
  prerelease: false
35
- version_requirements: *70147043307680
35
+ version_requirements: *70178438707680
36
36
  description: CSV import for Active Admin capable of handling CSV files too large to
37
37
  import via direct file upload to Heroku
38
38
  email: