@loaders.gl/shapefile 3.1.3 → 4.0.0-alpha.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bundle.js +2 -2
- package/dist/bundle.js.map +1 -0
- package/dist/dbf-loader.js +20 -29
- package/dist/dbf-loader.js.map +1 -0
- package/dist/dbf-worker.js +1 -1
- package/dist/index.js +4 -11
- package/dist/index.js.map +1 -0
- package/dist/lib/parsers/parse-dbf.js +264 -300
- package/dist/lib/parsers/parse-dbf.js.map +1 -0
- package/dist/lib/parsers/parse-shapefile.js +209 -231
- package/dist/lib/parsers/parse-shapefile.js.map +1 -0
- package/dist/lib/parsers/parse-shp-geometry.js +212 -265
- package/dist/lib/parsers/parse-shp-geometry.js.map +1 -0
- package/dist/lib/parsers/parse-shp-header.js +27 -38
- package/dist/lib/parsers/parse-shp-header.js.map +1 -0
- package/dist/lib/parsers/parse-shp.js +136 -152
- package/dist/lib/parsers/parse-shp.js.map +1 -0
- package/dist/lib/parsers/parse-shx.js +19 -25
- package/dist/lib/parsers/parse-shx.js.map +1 -0
- package/dist/lib/streaming/binary-chunk-reader.js +128 -152
- package/dist/lib/streaming/binary-chunk-reader.js.map +1 -0
- package/dist/lib/streaming/binary-reader.js +33 -50
- package/dist/lib/streaming/binary-reader.js.map +1 -0
- package/dist/lib/streaming/zip-batch-iterators.js +48 -57
- package/dist/lib/streaming/zip-batch-iterators.js.map +1 -0
- package/dist/shapefile-loader.js +22 -30
- package/dist/shapefile-loader.js.map +1 -0
- package/dist/shp-loader.js +22 -32
- package/dist/shp-loader.js.map +1 -0
- package/dist/shp-worker.js +1 -1
- package/dist/workers/dbf-worker.js +4 -5
- package/dist/workers/dbf-worker.js.map +1 -0
- package/dist/workers/shp-worker.js +4 -5
- package/dist/workers/shp-worker.js.map +1 -0
- package/package.json +9 -9
- package/dist/es5/bundle.js +0 -7
- package/dist/es5/bundle.js.map +0 -1
- package/dist/es5/dbf-loader.js +0 -68
- package/dist/es5/dbf-loader.js.map +0 -1
- package/dist/es5/index.js +0 -42
- package/dist/es5/index.js.map +0 -1
- package/dist/es5/lib/parsers/parse-dbf.js +0 -454
- package/dist/es5/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shapefile.js +0 -497
- package/dist/es5/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp-geometry.js +0 -288
- package/dist/es5/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp-header.js +0 -39
- package/dist/es5/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shp.js +0 -283
- package/dist/es5/lib/parsers/parse-shp.js.map +0 -1
- package/dist/es5/lib/parsers/parse-shx.js +0 -31
- package/dist/es5/lib/parsers/parse-shx.js.map +0 -1
- package/dist/es5/lib/streaming/binary-chunk-reader.js +0 -218
- package/dist/es5/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/es5/lib/streaming/binary-reader.js +0 -56
- package/dist/es5/lib/streaming/binary-reader.js.map +0 -1
- package/dist/es5/lib/streaming/zip-batch-iterators.js +0 -118
- package/dist/es5/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/es5/shapefile-loader.js +0 -34
- package/dist/es5/shapefile-loader.js.map +0 -1
- package/dist/es5/shp-loader.js +0 -71
- package/dist/es5/shp-loader.js.map +0 -1
- package/dist/es5/workers/dbf-worker.js +0 -8
- package/dist/es5/workers/dbf-worker.js.map +0 -1
- package/dist/es5/workers/shp-worker.js +0 -8
- package/dist/es5/workers/shp-worker.js.map +0 -1
- package/dist/esm/bundle.js +0 -5
- package/dist/esm/bundle.js.map +0 -1
- package/dist/esm/dbf-loader.js +0 -23
- package/dist/esm/dbf-loader.js.map +0 -1
- package/dist/esm/index.js +0 -4
- package/dist/esm/index.js.map +0 -1
- package/dist/esm/lib/parsers/parse-dbf.js +0 -299
- package/dist/esm/lib/parsers/parse-dbf.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shapefile.js +0 -226
- package/dist/esm/lib/parsers/parse-shapefile.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp-geometry.js +0 -234
- package/dist/esm/lib/parsers/parse-shp-geometry.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp-header.js +0 -32
- package/dist/esm/lib/parsers/parse-shp-header.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shp.js +0 -154
- package/dist/esm/lib/parsers/parse-shp.js.map +0 -1
- package/dist/esm/lib/parsers/parse-shx.js +0 -22
- package/dist/esm/lib/parsers/parse-shx.js.map +0 -1
- package/dist/esm/lib/streaming/binary-chunk-reader.js +0 -137
- package/dist/esm/lib/streaming/binary-chunk-reader.js.map +0 -1
- package/dist/esm/lib/streaming/binary-reader.js +0 -35
- package/dist/esm/lib/streaming/binary-reader.js.map +0 -1
- package/dist/esm/lib/streaming/zip-batch-iterators.js +0 -52
- package/dist/esm/lib/streaming/zip-batch-iterators.js.map +0 -1
- package/dist/esm/shapefile-loader.js +0 -23
- package/dist/esm/shapefile-loader.js.map +0 -1
- package/dist/esm/shp-loader.js +0 -25
- package/dist/esm/shp-loader.js.map +0 -1
- package/dist/esm/workers/dbf-worker.js +0 -4
- package/dist/esm/workers/dbf-worker.js.map +0 -1
- package/dist/esm/workers/shp-worker.js +0 -4
- package/dist/esm/workers/shp-worker.js.map +0 -1
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/shp-loader.ts"],"names":["parseSHP","parseSHPInBatches","VERSION","SHP_MAGIC_NUMBER","SHPWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","tests","Uint8Array","buffer","options","shp","_maxDimensions","SHPLoader","parse","arrayBuffer","parseSync","parseInBatches"],"mappings":"AACA,SAAQA,QAAR,EAAkBC,iBAAlB,QAA0C,yBAA1C;AAIA,MAAMC,OAAO,GAAG,2BAAuB,WAAvB,qBAAmD,QAAnE;AAEA,OAAO,MAAMC,gBAAgB,GAAG,CAAC,IAAD,EAAO,IAAP,EAAa,IAAb,EAAmB,IAAnB,CAAzB;AAKP,OAAO,MAAMC,eAAuB,GAAG;AACrCC,EAAAA,IAAI,EAAE,KAD+B;AAErCC,EAAAA,EAAE,EAAE,KAFiC;AAGrCC,EAAAA,MAAM,EAAE,WAH6B;AAIrCC,EAAAA,OAAO,EAAEN,OAJ4B;AAKrCO,EAAAA,MAAM,EAAE,IAL6B;AAMrCC,EAAAA,QAAQ,EAAE,UAN2B;AAOrCC,EAAAA,UAAU,EAAE,CAAC,KAAD,CAPyB;AAQrCC,EAAAA,SAAS,EAAE,CAAC,0BAAD,CAR0B;AAUrCC,EAAAA,KAAK,EAAE,CAAC,IAAIC,UAAJ,CAAeX,gBAAf,EAAiCY,MAAlC,CAV8B;AAWrCC,EAAAA,OAAO,EAAE;AACPC,IAAAA,GAAG,EAAE;AACHC,MAAAA,cAAc,EAAE;AADb;AADE;AAX4B,CAAhC;AAmBP,OAAO,MAAMC,SAA2B,GAAG,EACzC,GAAGf,eADsC;AAEzCgB,EAAAA,KAAK,EAAE,OAAOC,WAAP,EAAoBL,OAApB,KAAiChB,QAAQ,CAACqB,WAAD,EAAcL,OAAd,CAFP;AAGzCM,EAAAA,SAAS,EAAEtB,QAH8B;AAIzCuB,EAAAA,cAAc,EAAEtB;AAJyB,CAApC","sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseSHP, parseSHPInBatches} from './lib/parsers/parse-shp';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\nexport const SHP_MAGIC_NUMBER = [0x00, 0x00, 0x27, 0x0a];\n\n/**\n * SHP file loader\n */\nexport const SHPWorkerLoader: Loader = {\n name: 'SHP',\n id: 'shp',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'geometry',\n extensions: ['shp'],\n mimeTypes: ['application/octet-stream'],\n // ISSUE: This also identifies SHX files, which are identical to SHP for the first 100 bytes...\n tests: [new Uint8Array(SHP_MAGIC_NUMBER).buffer],\n options: {\n shp: {\n _maxDimensions: 4\n }\n }\n};\n\n/** SHP file loader */\nexport const SHPLoader: LoaderWithParser = {\n ...SHPWorkerLoader,\n parse: async (arrayBuffer, options?) => parseSHP(arrayBuffer, options),\n parseSync: parseSHP,\n parseInBatches: parseSHPInBatches\n};\n"],"file":"shp-loader.js"}
|
package/dist/shp-worker.js
CHANGED
|
@@ -424,7 +424,7 @@
|
|
|
424
424
|
}
|
|
425
425
|
|
|
426
426
|
// src/shp-loader.ts
|
|
427
|
-
var VERSION =
|
|
427
|
+
var VERSION = true ? "4.0.0-alpha.5" : "latest";
|
|
428
428
|
var SHP_MAGIC_NUMBER2 = [0, 0, 39, 10];
|
|
429
429
|
var SHPWorkerLoader = {
|
|
430
430
|
name: "SHP",
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
(0, loader_utils_1.createLoaderWorker)(dbf_loader_1.DBFLoader);
|
|
1
|
+
import { DBFLoader } from '../dbf-loader';
|
|
2
|
+
import { createLoaderWorker } from '@loaders.gl/loader-utils';
|
|
3
|
+
createLoaderWorker(DBFLoader);
|
|
4
|
+
//# sourceMappingURL=dbf-worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/workers/dbf-worker.ts"],"names":["DBFLoader","createLoaderWorker"],"mappings":"AAAA,SAAQA,SAAR,QAAwB,eAAxB;AACA,SAAQC,kBAAR,QAAiC,0BAAjC;AAEAA,kBAAkB,CAACD,SAAD,CAAlB","sourcesContent":["import {DBFLoader} from '../dbf-loader';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\n\ncreateLoaderWorker(DBFLoader);\n"],"file":"dbf-worker.js"}
|
|
@@ -1,5 +1,4 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
(0, loader_utils_1.createLoaderWorker)(shp_loader_1.SHPLoader);
|
|
1
|
+
import { SHPLoader } from '../shp-loader';
|
|
2
|
+
import { createLoaderWorker } from '@loaders.gl/loader-utils';
|
|
3
|
+
createLoaderWorker(SHPLoader);
|
|
4
|
+
//# sourceMappingURL=shp-worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/workers/shp-worker.ts"],"names":["SHPLoader","createLoaderWorker"],"mappings":"AAAA,SAAQA,SAAR,QAAwB,eAAxB;AACA,SAAQC,kBAAR,QAAiC,0BAAjC;AAEAA,kBAAkB,CAACD,SAAD,CAAlB","sourcesContent":["import {SHPLoader} from '../shp-loader';\nimport {createLoaderWorker} from '@loaders.gl/loader-utils';\n\ncreateLoaderWorker(SHPLoader);\n"],"file":"shp-worker.js"}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@loaders.gl/shapefile",
|
|
3
3
|
"description": "Loader for the Shapefile Format",
|
|
4
|
-
"version": "
|
|
4
|
+
"version": "4.0.0-alpha.5",
|
|
5
5
|
"license": "MIT",
|
|
6
6
|
"publishConfig": {
|
|
7
7
|
"access": "public"
|
|
@@ -18,8 +18,8 @@
|
|
|
18
18
|
"shp"
|
|
19
19
|
],
|
|
20
20
|
"types": "dist/index.d.ts",
|
|
21
|
-
"main": "dist/
|
|
22
|
-
"module": "dist/
|
|
21
|
+
"main": "dist/index.js",
|
|
22
|
+
"module": "dist/index.js",
|
|
23
23
|
"sideEffects": false,
|
|
24
24
|
"browser": {
|
|
25
25
|
"./src/lib/filesystems/node-filesystem.js": false,
|
|
@@ -33,14 +33,14 @@
|
|
|
33
33
|
"scripts": {
|
|
34
34
|
"pre-build": "npm run build-worker-shp && npm run build-worker-dbf && npm run build-bundle",
|
|
35
35
|
"build-bundle": "esbuild src/bundle.ts --bundle --outfile=dist/dist.min.js",
|
|
36
|
-
"build-worker-shp": "esbuild src/workers/shp-worker.ts --bundle --outfile=dist/shp-worker.js",
|
|
37
|
-
"build-worker-dbf": "esbuild src/workers/dbf-worker.ts --bundle --outfile=dist/dbf-worker.js"
|
|
36
|
+
"build-worker-shp": "esbuild src/workers/shp-worker.ts --bundle --outfile=dist/shp-worker.js --define:__VERSION__=\\\"$npm_package_version\\\"",
|
|
37
|
+
"build-worker-dbf": "esbuild src/workers/dbf-worker.ts --bundle --outfile=dist/dbf-worker.js --define:__VERSION__=\\\"$npm_package_version\\\""
|
|
38
38
|
},
|
|
39
39
|
"dependencies": {
|
|
40
|
-
"@loaders.gl/gis": "
|
|
41
|
-
"@loaders.gl/loader-utils": "
|
|
42
|
-
"@loaders.gl/schema": "
|
|
40
|
+
"@loaders.gl/gis": "4.0.0-alpha.5",
|
|
41
|
+
"@loaders.gl/loader-utils": "4.0.0-alpha.5",
|
|
42
|
+
"@loaders.gl/schema": "4.0.0-alpha.5",
|
|
43
43
|
"@math.gl/proj4": "^3.5.1"
|
|
44
44
|
},
|
|
45
|
-
"gitHead": "
|
|
45
|
+
"gitHead": "7a71a54bdf1ddf985cc3af3db90b82e7fa97d025"
|
|
46
46
|
}
|
package/dist/es5/bundle.js
DELETED
package/dist/es5/bundle.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/bundle.ts"],"names":["moduleExports","require","globalThis","loaders","module","exports","Object","assign"],"mappings":";;AACA,IAAMA,aAAa,GAAGC,OAAO,CAAC,SAAD,CAA7B;;AACAC,UAAU,CAACC,OAAX,GAAqBD,UAAU,CAACC,OAAX,IAAsB,EAA3C;AACAC,MAAM,CAACC,OAAP,GAAiBC,MAAM,CAACC,MAAP,CAAcL,UAAU,CAACC,OAAzB,EAAkCH,aAAlC,CAAjB","sourcesContent":["// @ts-nocheck\nconst moduleExports = require('./index');\nglobalThis.loaders = globalThis.loaders || {};\nmodule.exports = Object.assign(globalThis.loaders, moduleExports);\n"],"file":"bundle.js"}
|
package/dist/es5/dbf-loader.js
DELETED
|
@@ -1,68 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
|
|
5
|
-
Object.defineProperty(exports, "__esModule", {
|
|
6
|
-
value: true
|
|
7
|
-
});
|
|
8
|
-
exports.DBFLoader = exports.DBFWorkerLoader = void 0;
|
|
9
|
-
|
|
10
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
11
|
-
|
|
12
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
13
|
-
|
|
14
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
15
|
-
|
|
16
|
-
var _parseDbf = require("./lib/parsers/parse-dbf");
|
|
17
|
-
|
|
18
|
-
function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); if (enumerableOnly) { symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; }); } keys.push.apply(keys, symbols); } return keys; }
|
|
19
|
-
|
|
20
|
-
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(Object(source), true).forEach(function (key) { (0, _defineProperty2.default)(target, key, source[key]); }); } else if (Object.getOwnPropertyDescriptors) { Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); } else { ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } } return target; }
|
|
21
|
-
|
|
22
|
-
var VERSION = typeof "3.1.3" !== 'undefined' ? "3.1.3" : 'latest';
|
|
23
|
-
var DBFWorkerLoader = {
|
|
24
|
-
name: 'DBF',
|
|
25
|
-
id: 'dbf',
|
|
26
|
-
module: 'shapefile',
|
|
27
|
-
version: VERSION,
|
|
28
|
-
worker: true,
|
|
29
|
-
category: 'table',
|
|
30
|
-
extensions: ['dbf'],
|
|
31
|
-
mimeTypes: ['application/x-dbf'],
|
|
32
|
-
options: {
|
|
33
|
-
dbf: {
|
|
34
|
-
encoding: 'latin1'
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
};
|
|
38
|
-
exports.DBFWorkerLoader = DBFWorkerLoader;
|
|
39
|
-
|
|
40
|
-
var DBFLoader = _objectSpread(_objectSpread({}, DBFWorkerLoader), {}, {
|
|
41
|
-
parse: function () {
|
|
42
|
-
var _parse = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(arrayBuffer, options) {
|
|
43
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
44
|
-
while (1) {
|
|
45
|
-
switch (_context.prev = _context.next) {
|
|
46
|
-
case 0:
|
|
47
|
-
return _context.abrupt("return", (0, _parseDbf.parseDBF)(arrayBuffer, options));
|
|
48
|
-
|
|
49
|
-
case 1:
|
|
50
|
-
case "end":
|
|
51
|
-
return _context.stop();
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
}, _callee);
|
|
55
|
-
}));
|
|
56
|
-
|
|
57
|
-
function parse(_x, _x2) {
|
|
58
|
-
return _parse.apply(this, arguments);
|
|
59
|
-
}
|
|
60
|
-
|
|
61
|
-
return parse;
|
|
62
|
-
}(),
|
|
63
|
-
parseSync: _parseDbf.parseDBF,
|
|
64
|
-
parseInBatches: _parseDbf.parseDBFInBatches
|
|
65
|
-
});
|
|
66
|
-
|
|
67
|
-
exports.DBFLoader = DBFLoader;
|
|
68
|
-
//# sourceMappingURL=dbf-loader.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/dbf-loader.ts"],"names":["VERSION","DBFWorkerLoader","name","id","module","version","worker","category","extensions","mimeTypes","options","dbf","encoding","DBFLoader","parse","arrayBuffer","parseSync","parseDBF","parseInBatches","parseDBFInBatches"],"mappings":";;;;;;;;;;;;;;;AACA;;;;;;AAIA,IAAMA,OAAO,GAAG,mBAAuB,WAAvB,aAAmD,QAAnE;AAKO,IAAMC,eAAuB,GAAG;AACrCC,EAAAA,IAAI,EAAE,KAD+B;AAErCC,EAAAA,EAAE,EAAE,KAFiC;AAGrCC,EAAAA,MAAM,EAAE,WAH6B;AAIrCC,EAAAA,OAAO,EAAEL,OAJ4B;AAKrCM,EAAAA,MAAM,EAAE,IAL6B;AAMrCC,EAAAA,QAAQ,EAAE,OAN2B;AAOrCC,EAAAA,UAAU,EAAE,CAAC,KAAD,CAPyB;AAQrCC,EAAAA,SAAS,EAAE,CAAC,mBAAD,CAR0B;AASrCC,EAAAA,OAAO,EAAE;AACPC,IAAAA,GAAG,EAAE;AACHC,MAAAA,QAAQ,EAAE;AADP;AADE;AAT4B,CAAhC;;;AAiBA,IAAMC,SAA2B,mCACnCZ,eADmC;AAEtCa,EAAAA,KAAK;AAAA,2EAAE,iBAAOC,WAAP,EAAoBL,OAApB;AAAA;AAAA;AAAA;AAAA;AAAA,+CAAgC,wBAASK,WAAT,EAAsBL,OAAtB,CAAhC;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAAF;;AAAA;AAAA;AAAA;;AAAA;AAAA,KAFiC;AAGtCM,EAAAA,SAAS,EAAEC,kBAH2B;AAItCC,EAAAA,cAAc,EAAEC;AAJsB,EAAjC","sourcesContent":["import type {Loader, LoaderWithParser} from '@loaders.gl/loader-utils';\nimport {parseDBF, parseDBFInBatches} from './lib/parsers/parse-dbf';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * DBFLoader - DBF files are used to contain non-geometry columns in Shapefiles\n */\nexport const DBFWorkerLoader: Loader = {\n name: 'DBF',\n id: 'dbf',\n module: 'shapefile',\n version: VERSION,\n worker: true,\n category: 'table',\n extensions: ['dbf'],\n mimeTypes: ['application/x-dbf'],\n options: {\n dbf: {\n encoding: 'latin1'\n }\n }\n};\n\n/** DBF file loader */\nexport const DBFLoader: LoaderWithParser = {\n ...DBFWorkerLoader,\n parse: async (arrayBuffer, options) => parseDBF(arrayBuffer, options),\n parseSync: parseDBF,\n parseInBatches: parseDBFInBatches\n};\n"],"file":"dbf-loader.js"}
|
package/dist/es5/index.js
DELETED
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
Object.defineProperty(exports, "__esModule", {
|
|
4
|
-
value: true
|
|
5
|
-
});
|
|
6
|
-
Object.defineProperty(exports, "ShapefileLoader", {
|
|
7
|
-
enumerable: true,
|
|
8
|
-
get: function get() {
|
|
9
|
-
return _shapefileLoader.ShapefileLoader;
|
|
10
|
-
}
|
|
11
|
-
});
|
|
12
|
-
Object.defineProperty(exports, "DBFLoader", {
|
|
13
|
-
enumerable: true,
|
|
14
|
-
get: function get() {
|
|
15
|
-
return _dbfLoader.DBFLoader;
|
|
16
|
-
}
|
|
17
|
-
});
|
|
18
|
-
Object.defineProperty(exports, "DBFWorkerLoader", {
|
|
19
|
-
enumerable: true,
|
|
20
|
-
get: function get() {
|
|
21
|
-
return _dbfLoader.DBFWorkerLoader;
|
|
22
|
-
}
|
|
23
|
-
});
|
|
24
|
-
Object.defineProperty(exports, "SHPLoader", {
|
|
25
|
-
enumerable: true,
|
|
26
|
-
get: function get() {
|
|
27
|
-
return _shpLoader.SHPLoader;
|
|
28
|
-
}
|
|
29
|
-
});
|
|
30
|
-
Object.defineProperty(exports, "SHPWorkerLoader", {
|
|
31
|
-
enumerable: true,
|
|
32
|
-
get: function get() {
|
|
33
|
-
return _shpLoader.SHPWorkerLoader;
|
|
34
|
-
}
|
|
35
|
-
});
|
|
36
|
-
|
|
37
|
-
var _shapefileLoader = require("./shapefile-loader");
|
|
38
|
-
|
|
39
|
-
var _dbfLoader = require("./dbf-loader");
|
|
40
|
-
|
|
41
|
-
var _shpLoader = require("./shp-loader");
|
|
42
|
-
//# sourceMappingURL=index.js.map
|
package/dist/es5/index.js.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;;AACA;;AACA","sourcesContent":["export {ShapefileLoader} from './shapefile-loader';\nexport {DBFLoader, DBFWorkerLoader} from './dbf-loader';\nexport {SHPLoader, SHPWorkerLoader} from './shp-loader';\n"],"file":"index.js"}
|
|
@@ -1,454 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
|
|
5
|
-
Object.defineProperty(exports, "__esModule", {
|
|
6
|
-
value: true
|
|
7
|
-
});
|
|
8
|
-
exports.parseDBF = parseDBF;
|
|
9
|
-
exports.parseDBFInBatches = parseDBFInBatches;
|
|
10
|
-
|
|
11
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
12
|
-
|
|
13
|
-
var _classCallCheck2 = _interopRequireDefault(require("@babel/runtime/helpers/classCallCheck"));
|
|
14
|
-
|
|
15
|
-
var _createClass2 = _interopRequireDefault(require("@babel/runtime/helpers/createClass"));
|
|
16
|
-
|
|
17
|
-
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
18
|
-
|
|
19
|
-
var _awaitAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/awaitAsyncGenerator"));
|
|
20
|
-
|
|
21
|
-
var _wrapAsyncGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/wrapAsyncGenerator"));
|
|
22
|
-
|
|
23
|
-
var _asyncIterator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncIterator"));
|
|
24
|
-
|
|
25
|
-
var _schema = require("@loaders.gl/schema");
|
|
26
|
-
|
|
27
|
-
var _binaryChunkReader = _interopRequireDefault(require("../streaming/binary-chunk-reader"));
|
|
28
|
-
|
|
29
|
-
function _createForOfIteratorHelper(o, allowArrayLike) { var it = typeof Symbol !== "undefined" && o[Symbol.iterator] || o["@@iterator"]; if (!it) { if (Array.isArray(o) || (it = _unsupportedIterableToArray(o)) || allowArrayLike && o && typeof o.length === "number") { if (it) o = it; var i = 0; var F = function F() {}; return { s: F, n: function n() { if (i >= o.length) return { done: true }; return { done: false, value: o[i++] }; }, e: function e(_e) { throw _e; }, f: F }; } throw new TypeError("Invalid attempt to iterate non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method."); } var normalCompletion = true, didErr = false, err; return { s: function s() { it = it.call(o); }, n: function n() { var step = it.next(); normalCompletion = step.done; return step; }, e: function e(_e2) { didErr = true; err = _e2; }, f: function f() { try { if (!normalCompletion && it.return != null) it.return(); } finally { if (didErr) throw err; } } }; }
|
|
30
|
-
|
|
31
|
-
function _unsupportedIterableToArray(o, minLen) { if (!o) return; if (typeof o === "string") return _arrayLikeToArray(o, minLen); var n = Object.prototype.toString.call(o).slice(8, -1); if (n === "Object" && o.constructor) n = o.constructor.name; if (n === "Map" || n === "Set") return Array.from(o); if (n === "Arguments" || /^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)) return _arrayLikeToArray(o, minLen); }
|
|
32
|
-
|
|
33
|
-
function _arrayLikeToArray(arr, len) { if (len == null || len > arr.length) len = arr.length; for (var i = 0, arr2 = new Array(len); i < len; i++) { arr2[i] = arr[i]; } return arr2; }
|
|
34
|
-
|
|
35
|
-
var LITTLE_ENDIAN = true;
|
|
36
|
-
var DBF_HEADER_SIZE = 32;
|
|
37
|
-
var STATE;
|
|
38
|
-
|
|
39
|
-
(function (STATE) {
|
|
40
|
-
STATE[STATE["START"] = 0] = "START";
|
|
41
|
-
STATE[STATE["FIELD_DESCRIPTORS"] = 1] = "FIELD_DESCRIPTORS";
|
|
42
|
-
STATE[STATE["FIELD_PROPERTIES"] = 2] = "FIELD_PROPERTIES";
|
|
43
|
-
STATE[STATE["END"] = 3] = "END";
|
|
44
|
-
STATE[STATE["ERROR"] = 4] = "ERROR";
|
|
45
|
-
})(STATE || (STATE = {}));
|
|
46
|
-
|
|
47
|
-
var DBFParser = function () {
|
|
48
|
-
function DBFParser(options) {
|
|
49
|
-
(0, _classCallCheck2.default)(this, DBFParser);
|
|
50
|
-
(0, _defineProperty2.default)(this, "binaryReader", new _binaryChunkReader.default());
|
|
51
|
-
(0, _defineProperty2.default)(this, "textDecoder", void 0);
|
|
52
|
-
(0, _defineProperty2.default)(this, "state", STATE.START);
|
|
53
|
-
(0, _defineProperty2.default)(this, "result", {
|
|
54
|
-
data: []
|
|
55
|
-
});
|
|
56
|
-
this.textDecoder = new TextDecoder(options.encoding);
|
|
57
|
-
}
|
|
58
|
-
|
|
59
|
-
(0, _createClass2.default)(DBFParser, [{
|
|
60
|
-
key: "write",
|
|
61
|
-
value: function write(arrayBuffer) {
|
|
62
|
-
this.binaryReader.write(arrayBuffer);
|
|
63
|
-
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
64
|
-
}
|
|
65
|
-
}, {
|
|
66
|
-
key: "end",
|
|
67
|
-
value: function end() {
|
|
68
|
-
this.binaryReader.end();
|
|
69
|
-
this.state = parseState(this.state, this.result, this.binaryReader, this.textDecoder);
|
|
70
|
-
|
|
71
|
-
if (this.state !== STATE.END) {
|
|
72
|
-
this.state = STATE.ERROR;
|
|
73
|
-
this.result.error = 'DBF incomplete file';
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
}]);
|
|
77
|
-
return DBFParser;
|
|
78
|
-
}();
|
|
79
|
-
|
|
80
|
-
function parseDBF(arrayBuffer) {
|
|
81
|
-
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
|
|
82
|
-
var loaderOptions = options.dbf || {};
|
|
83
|
-
var encoding = loaderOptions.encoding;
|
|
84
|
-
var dbfParser = new DBFParser({
|
|
85
|
-
encoding: encoding
|
|
86
|
-
});
|
|
87
|
-
dbfParser.write(arrayBuffer);
|
|
88
|
-
dbfParser.end();
|
|
89
|
-
var _dbfParser$result = dbfParser.result,
|
|
90
|
-
data = _dbfParser$result.data,
|
|
91
|
-
schema = _dbfParser$result.schema;
|
|
92
|
-
|
|
93
|
-
switch (options.tables && options.tables.format) {
|
|
94
|
-
case 'table':
|
|
95
|
-
return {
|
|
96
|
-
schema: schema,
|
|
97
|
-
rows: data
|
|
98
|
-
};
|
|
99
|
-
|
|
100
|
-
case 'rows':
|
|
101
|
-
default:
|
|
102
|
-
return data;
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
function parseDBFInBatches(_x) {
|
|
107
|
-
return _parseDBFInBatches.apply(this, arguments);
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
function _parseDBFInBatches() {
|
|
111
|
-
_parseDBFInBatches = (0, _wrapAsyncGenerator2.default)(_regenerator.default.mark(function _callee(asyncIterator) {
|
|
112
|
-
var options,
|
|
113
|
-
loaderOptions,
|
|
114
|
-
encoding,
|
|
115
|
-
parser,
|
|
116
|
-
headerReturned,
|
|
117
|
-
_iteratorNormalCompletion,
|
|
118
|
-
_didIteratorError,
|
|
119
|
-
_iteratorError,
|
|
120
|
-
_iterator,
|
|
121
|
-
_step,
|
|
122
|
-
_value,
|
|
123
|
-
arrayBuffer,
|
|
124
|
-
_args = arguments;
|
|
125
|
-
|
|
126
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
127
|
-
while (1) {
|
|
128
|
-
switch (_context.prev = _context.next) {
|
|
129
|
-
case 0:
|
|
130
|
-
options = _args.length > 1 && _args[1] !== undefined ? _args[1] : {};
|
|
131
|
-
loaderOptions = options.dbf || {};
|
|
132
|
-
encoding = loaderOptions.encoding;
|
|
133
|
-
parser = new DBFParser({
|
|
134
|
-
encoding: encoding
|
|
135
|
-
});
|
|
136
|
-
headerReturned = false;
|
|
137
|
-
_iteratorNormalCompletion = true;
|
|
138
|
-
_didIteratorError = false;
|
|
139
|
-
_context.prev = 7;
|
|
140
|
-
_iterator = (0, _asyncIterator2.default)(asyncIterator);
|
|
141
|
-
|
|
142
|
-
case 9:
|
|
143
|
-
_context.next = 11;
|
|
144
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.next());
|
|
145
|
-
|
|
146
|
-
case 11:
|
|
147
|
-
_step = _context.sent;
|
|
148
|
-
_iteratorNormalCompletion = _step.done;
|
|
149
|
-
_context.next = 15;
|
|
150
|
-
return (0, _awaitAsyncGenerator2.default)(_step.value);
|
|
151
|
-
|
|
152
|
-
case 15:
|
|
153
|
-
_value = _context.sent;
|
|
154
|
-
|
|
155
|
-
if (_iteratorNormalCompletion) {
|
|
156
|
-
_context.next = 30;
|
|
157
|
-
break;
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
arrayBuffer = _value;
|
|
161
|
-
parser.write(arrayBuffer);
|
|
162
|
-
|
|
163
|
-
if (!(!headerReturned && parser.result.dbfHeader)) {
|
|
164
|
-
_context.next = 23;
|
|
165
|
-
break;
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
headerReturned = true;
|
|
169
|
-
_context.next = 23;
|
|
170
|
-
return parser.result.dbfHeader;
|
|
171
|
-
|
|
172
|
-
case 23:
|
|
173
|
-
if (!(parser.result.data.length > 0)) {
|
|
174
|
-
_context.next = 27;
|
|
175
|
-
break;
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
_context.next = 26;
|
|
179
|
-
return parser.result.data;
|
|
180
|
-
|
|
181
|
-
case 26:
|
|
182
|
-
parser.result.data = [];
|
|
183
|
-
|
|
184
|
-
case 27:
|
|
185
|
-
_iteratorNormalCompletion = true;
|
|
186
|
-
_context.next = 9;
|
|
187
|
-
break;
|
|
188
|
-
|
|
189
|
-
case 30:
|
|
190
|
-
_context.next = 36;
|
|
191
|
-
break;
|
|
192
|
-
|
|
193
|
-
case 32:
|
|
194
|
-
_context.prev = 32;
|
|
195
|
-
_context.t0 = _context["catch"](7);
|
|
196
|
-
_didIteratorError = true;
|
|
197
|
-
_iteratorError = _context.t0;
|
|
198
|
-
|
|
199
|
-
case 36:
|
|
200
|
-
_context.prev = 36;
|
|
201
|
-
_context.prev = 37;
|
|
202
|
-
|
|
203
|
-
if (!(!_iteratorNormalCompletion && _iterator.return != null)) {
|
|
204
|
-
_context.next = 41;
|
|
205
|
-
break;
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
_context.next = 41;
|
|
209
|
-
return (0, _awaitAsyncGenerator2.default)(_iterator.return());
|
|
210
|
-
|
|
211
|
-
case 41:
|
|
212
|
-
_context.prev = 41;
|
|
213
|
-
|
|
214
|
-
if (!_didIteratorError) {
|
|
215
|
-
_context.next = 44;
|
|
216
|
-
break;
|
|
217
|
-
}
|
|
218
|
-
|
|
219
|
-
throw _iteratorError;
|
|
220
|
-
|
|
221
|
-
case 44:
|
|
222
|
-
return _context.finish(41);
|
|
223
|
-
|
|
224
|
-
case 45:
|
|
225
|
-
return _context.finish(36);
|
|
226
|
-
|
|
227
|
-
case 46:
|
|
228
|
-
parser.end();
|
|
229
|
-
|
|
230
|
-
if (!(parser.result.data.length > 0)) {
|
|
231
|
-
_context.next = 50;
|
|
232
|
-
break;
|
|
233
|
-
}
|
|
234
|
-
|
|
235
|
-
_context.next = 50;
|
|
236
|
-
return parser.result.data;
|
|
237
|
-
|
|
238
|
-
case 50:
|
|
239
|
-
case "end":
|
|
240
|
-
return _context.stop();
|
|
241
|
-
}
|
|
242
|
-
}
|
|
243
|
-
}, _callee, null, [[7, 32, 36, 46], [37,, 41, 45]]);
|
|
244
|
-
}));
|
|
245
|
-
return _parseDBFInBatches.apply(this, arguments);
|
|
246
|
-
}
|
|
247
|
-
|
|
248
|
-
function parseState(state, result, binaryReader, textDecoder) {
|
|
249
|
-
while (true) {
|
|
250
|
-
try {
|
|
251
|
-
switch (state) {
|
|
252
|
-
case STATE.ERROR:
|
|
253
|
-
case STATE.END:
|
|
254
|
-
return state;
|
|
255
|
-
|
|
256
|
-
case STATE.START:
|
|
257
|
-
var dataView = binaryReader.getDataView(DBF_HEADER_SIZE, 'DBF header');
|
|
258
|
-
|
|
259
|
-
if (!dataView) {
|
|
260
|
-
return state;
|
|
261
|
-
}
|
|
262
|
-
|
|
263
|
-
result.dbfHeader = parseDBFHeader(dataView);
|
|
264
|
-
result.progress = {
|
|
265
|
-
bytesUsed: 0,
|
|
266
|
-
rowsTotal: result.dbfHeader.nRecords,
|
|
267
|
-
rows: 0
|
|
268
|
-
};
|
|
269
|
-
state = STATE.FIELD_DESCRIPTORS;
|
|
270
|
-
break;
|
|
271
|
-
|
|
272
|
-
case STATE.FIELD_DESCRIPTORS:
|
|
273
|
-
var fieldDescriptorView = binaryReader.getDataView(result.dbfHeader.headerLength - DBF_HEADER_SIZE, 'DBF field descriptors');
|
|
274
|
-
|
|
275
|
-
if (!fieldDescriptorView) {
|
|
276
|
-
return state;
|
|
277
|
-
}
|
|
278
|
-
|
|
279
|
-
result.dbfFields = parseFieldDescriptors(fieldDescriptorView, textDecoder);
|
|
280
|
-
result.schema = new _schema.Schema(result.dbfFields.map(function (dbfField) {
|
|
281
|
-
return makeField(dbfField);
|
|
282
|
-
}));
|
|
283
|
-
state = STATE.FIELD_PROPERTIES;
|
|
284
|
-
binaryReader.skip(1);
|
|
285
|
-
break;
|
|
286
|
-
|
|
287
|
-
case STATE.FIELD_PROPERTIES:
|
|
288
|
-
var _ref = (result === null || result === void 0 ? void 0 : result.dbfHeader) || {},
|
|
289
|
-
_ref$recordLength = _ref.recordLength,
|
|
290
|
-
recordLength = _ref$recordLength === void 0 ? 0 : _ref$recordLength,
|
|
291
|
-
_ref$nRecords = _ref.nRecords,
|
|
292
|
-
nRecords = _ref$nRecords === void 0 ? 0 : _ref$nRecords;
|
|
293
|
-
|
|
294
|
-
while (result.data.length < nRecords) {
|
|
295
|
-
var recordView = binaryReader.getDataView(recordLength - 1);
|
|
296
|
-
|
|
297
|
-
if (!recordView) {
|
|
298
|
-
return state;
|
|
299
|
-
}
|
|
300
|
-
|
|
301
|
-
binaryReader.skip(1);
|
|
302
|
-
var row = parseRow(recordView, result.dbfFields, textDecoder);
|
|
303
|
-
result.data.push(row);
|
|
304
|
-
result.progress.rows = result.data.length;
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
state = STATE.END;
|
|
308
|
-
break;
|
|
309
|
-
|
|
310
|
-
default:
|
|
311
|
-
state = STATE.ERROR;
|
|
312
|
-
result.error = "illegal parser state ".concat(state);
|
|
313
|
-
return state;
|
|
314
|
-
}
|
|
315
|
-
} catch (error) {
|
|
316
|
-
state = STATE.ERROR;
|
|
317
|
-
result.error = "DBF parsing failed: ".concat(error.message);
|
|
318
|
-
return state;
|
|
319
|
-
}
|
|
320
|
-
}
|
|
321
|
-
}
|
|
322
|
-
|
|
323
|
-
function parseDBFHeader(headerView) {
|
|
324
|
-
return {
|
|
325
|
-
year: headerView.getUint8(1) + 1900,
|
|
326
|
-
month: headerView.getUint8(2),
|
|
327
|
-
day: headerView.getUint8(3),
|
|
328
|
-
nRecords: headerView.getUint32(4, LITTLE_ENDIAN),
|
|
329
|
-
headerLength: headerView.getUint16(8, LITTLE_ENDIAN),
|
|
330
|
-
recordLength: headerView.getUint16(10, LITTLE_ENDIAN),
|
|
331
|
-
languageDriver: headerView.getUint8(29)
|
|
332
|
-
};
|
|
333
|
-
}
|
|
334
|
-
|
|
335
|
-
function parseFieldDescriptors(view, textDecoder) {
|
|
336
|
-
var nFields = (view.byteLength - 1) / 32;
|
|
337
|
-
var fields = [];
|
|
338
|
-
var offset = 0;
|
|
339
|
-
|
|
340
|
-
for (var i = 0; i < nFields; i++) {
|
|
341
|
-
var name = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, 11)).replace(/\u0000/g, '');
|
|
342
|
-
fields.push({
|
|
343
|
-
name: name,
|
|
344
|
-
dataType: String.fromCharCode(view.getUint8(offset + 11)),
|
|
345
|
-
fieldLength: view.getUint8(offset + 16),
|
|
346
|
-
decimal: view.getUint8(offset + 17)
|
|
347
|
-
});
|
|
348
|
-
offset += 32;
|
|
349
|
-
}
|
|
350
|
-
|
|
351
|
-
return fields;
|
|
352
|
-
}
|
|
353
|
-
|
|
354
|
-
function parseRow(view, fields, textDecoder) {
|
|
355
|
-
var out = {};
|
|
356
|
-
var offset = 0;
|
|
357
|
-
|
|
358
|
-
var _iterator2 = _createForOfIteratorHelper(fields),
|
|
359
|
-
_step2;
|
|
360
|
-
|
|
361
|
-
try {
|
|
362
|
-
for (_iterator2.s(); !(_step2 = _iterator2.n()).done;) {
|
|
363
|
-
var field = _step2.value;
|
|
364
|
-
var text = textDecoder.decode(new Uint8Array(view.buffer, view.byteOffset + offset, field.fieldLength));
|
|
365
|
-
out[field.name] = parseField(text, field.dataType);
|
|
366
|
-
offset += field.fieldLength;
|
|
367
|
-
}
|
|
368
|
-
} catch (err) {
|
|
369
|
-
_iterator2.e(err);
|
|
370
|
-
} finally {
|
|
371
|
-
_iterator2.f();
|
|
372
|
-
}
|
|
373
|
-
|
|
374
|
-
return out;
|
|
375
|
-
}
|
|
376
|
-
|
|
377
|
-
function parseField(text, dataType) {
|
|
378
|
-
switch (dataType) {
|
|
379
|
-
case 'B':
|
|
380
|
-
return parseNumber(text);
|
|
381
|
-
|
|
382
|
-
case 'C':
|
|
383
|
-
return parseCharacter(text);
|
|
384
|
-
|
|
385
|
-
case 'F':
|
|
386
|
-
return parseNumber(text);
|
|
387
|
-
|
|
388
|
-
case 'N':
|
|
389
|
-
return parseNumber(text);
|
|
390
|
-
|
|
391
|
-
case 'O':
|
|
392
|
-
return parseNumber(text);
|
|
393
|
-
|
|
394
|
-
case 'D':
|
|
395
|
-
return parseDate(text);
|
|
396
|
-
|
|
397
|
-
case 'L':
|
|
398
|
-
return parseBoolean(text);
|
|
399
|
-
|
|
400
|
-
default:
|
|
401
|
-
throw new Error('Unsupported data type');
|
|
402
|
-
}
|
|
403
|
-
}
|
|
404
|
-
|
|
405
|
-
function parseDate(str) {
|
|
406
|
-
return Date.UTC(str.slice(0, 4), parseInt(str.slice(4, 6), 10) - 1, str.slice(6, 8));
|
|
407
|
-
}
|
|
408
|
-
|
|
409
|
-
function parseBoolean(value) {
|
|
410
|
-
return /^[nf]$/i.test(value) ? false : /^[yt]$/i.test(value) ? true : null;
|
|
411
|
-
}
|
|
412
|
-
|
|
413
|
-
function parseNumber(text) {
|
|
414
|
-
var number = parseFloat(text);
|
|
415
|
-
return isNaN(number) ? null : number;
|
|
416
|
-
}
|
|
417
|
-
|
|
418
|
-
function parseCharacter(text) {
|
|
419
|
-
return text.trim() || null;
|
|
420
|
-
}
|
|
421
|
-
|
|
422
|
-
function makeField(_ref2) {
|
|
423
|
-
var name = _ref2.name,
|
|
424
|
-
dataType = _ref2.dataType,
|
|
425
|
-
fieldLength = _ref2.fieldLength,
|
|
426
|
-
decimal = _ref2.decimal;
|
|
427
|
-
|
|
428
|
-
switch (dataType) {
|
|
429
|
-
case 'B':
|
|
430
|
-
return new _schema.Field(name, new _schema.Float64(), true);
|
|
431
|
-
|
|
432
|
-
case 'C':
|
|
433
|
-
return new _schema.Field(name, new _schema.Utf8(), true);
|
|
434
|
-
|
|
435
|
-
case 'F':
|
|
436
|
-
return new _schema.Field(name, new _schema.Float64(), true);
|
|
437
|
-
|
|
438
|
-
case 'N':
|
|
439
|
-
return new _schema.Field(name, new _schema.Float64(), true);
|
|
440
|
-
|
|
441
|
-
case 'O':
|
|
442
|
-
return new _schema.Field(name, new _schema.Float64(), true);
|
|
443
|
-
|
|
444
|
-
case 'D':
|
|
445
|
-
return new _schema.Field(name, new _schema.TimestampMillisecond(), true);
|
|
446
|
-
|
|
447
|
-
case 'L':
|
|
448
|
-
return new _schema.Field(name, new _schema.Bool(), true);
|
|
449
|
-
|
|
450
|
-
default:
|
|
451
|
-
throw new Error('Unsupported data type');
|
|
452
|
-
}
|
|
453
|
-
}
|
|
454
|
-
//# sourceMappingURL=parse-dbf.js.map
|