@loaders.gl/parquet 3.3.2 → 3.4.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/dist.min.js +16 -16
- package/dist/dist.min.js.map +3 -3
- package/dist/es5/index.js +46 -5
- package/dist/es5/index.js.map +1 -1
- package/dist/es5/lib/arrow/convert-columns-to-row-group.js +2 -0
- package/dist/es5/lib/arrow/convert-columns-to-row-group.js.map +1 -0
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js +20 -0
- package/dist/es5/lib/arrow/convert-row-group-to-columns.js.map +1 -0
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js +98 -0
- package/dist/es5/lib/arrow/convert-schema-from-parquet.js.map +1 -0
- package/dist/es5/lib/{convert-schema.js → arrow/convert-schema-to-parquet.js} +5 -31
- package/dist/es5/lib/arrow/convert-schema-to-parquet.js.map +1 -0
- package/dist/es5/lib/geo/decode-geo-metadata.js +82 -0
- package/dist/es5/lib/geo/decode-geo-metadata.js.map +1 -0
- package/dist/es5/lib/geo/geoparquet-schema.js +83 -0
- package/dist/es5/lib/geo/geoparquet-schema.js.map +1 -0
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js +177 -0
- package/dist/es5/lib/parsers/parse-parquet-to-columns.js.map +1 -0
- package/dist/es5/lib/{parse-parquet.js → parsers/parse-parquet-to-rows.js} +2 -2
- package/dist/es5/lib/parsers/parse-parquet-to-rows.js.map +1 -0
- package/dist/es5/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/dist/es5/parquet-loader.js +3 -2
- package/dist/es5/parquet-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-loader.js +1 -1
- package/dist/es5/parquet-wasm-loader.js.map +1 -1
- package/dist/es5/parquet-wasm-writer.js +1 -1
- package/dist/es5/parquet-wasm-writer.js.map +1 -1
- package/dist/es5/parquet-writer.js +1 -1
- package/dist/es5/parquet-writer.js.map +1 -1
- package/dist/esm/index.js +12 -2
- package/dist/esm/index.js.map +1 -1
- package/dist/esm/lib/arrow/convert-columns-to-row-group.js +2 -0
- package/dist/esm/lib/arrow/convert-columns-to-row-group.js.map +1 -0
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js +10 -0
- package/dist/esm/lib/arrow/convert-row-group-to-columns.js.map +1 -0
- package/dist/esm/lib/{convert-schema.js → arrow/convert-schema-from-parquet.js} +32 -16
- package/dist/esm/lib/arrow/convert-schema-from-parquet.js.map +1 -0
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js +40 -0
- package/dist/esm/lib/arrow/convert-schema-to-parquet.js.map +1 -0
- package/dist/esm/lib/geo/decode-geo-metadata.js +64 -0
- package/dist/esm/lib/geo/decode-geo-metadata.js.map +1 -0
- package/dist/esm/lib/geo/geoparquet-schema.js +78 -0
- package/dist/esm/lib/geo/geoparquet-schema.js.map +1 -0
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js +37 -0
- package/dist/esm/lib/parsers/parse-parquet-to-columns.js.map +1 -0
- package/dist/esm/lib/{parse-parquet.js → parsers/parse-parquet-to-rows.js} +2 -2
- package/dist/esm/lib/parsers/parse-parquet-to-rows.js.map +1 -0
- package/dist/esm/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/dist/esm/parquet-loader.js +3 -2
- package/dist/esm/parquet-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-loader.js +1 -1
- package/dist/esm/parquet-wasm-loader.js.map +1 -1
- package/dist/esm/parquet-wasm-writer.js +1 -1
- package/dist/esm/parquet-wasm-writer.js.map +1 -1
- package/dist/esm/parquet-writer.js +1 -1
- package/dist/esm/parquet-writer.js.map +1 -1
- package/dist/index.d.ts +23 -3
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +24 -6
- package/dist/lib/arrow/convert-columns-to-row-group.d.ts +1 -0
- package/dist/lib/arrow/convert-columns-to-row-group.d.ts.map +1 -0
- package/dist/lib/arrow/convert-columns-to-row-group.js +1 -0
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts +4 -0
- package/dist/lib/arrow/convert-row-group-to-columns.d.ts.map +1 -0
- package/dist/lib/arrow/convert-row-group-to-columns.js +12 -0
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts +9 -0
- package/dist/lib/arrow/convert-schema-from-parquet.d.ts.map +1 -0
- package/dist/lib/{convert-schema.js → arrow/convert-schema-from-parquet.js} +30 -18
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts +7 -0
- package/dist/lib/arrow/convert-schema-to-parquet.d.ts.map +1 -0
- package/dist/lib/arrow/convert-schema-to-parquet.js +72 -0
- package/dist/lib/geo/decode-geo-metadata.d.ts +31 -0
- package/dist/lib/geo/decode-geo-metadata.d.ts.map +1 -0
- package/dist/lib/geo/decode-geo-metadata.js +73 -0
- package/dist/lib/geo/geoparquet-schema.d.ts +80 -0
- package/dist/lib/geo/geoparquet-schema.d.ts.map +1 -0
- package/dist/lib/geo/geoparquet-schema.js +69 -0
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts +5 -0
- package/dist/lib/parsers/parse-parquet-to-columns.d.ts.map +1 -0
- package/dist/lib/parsers/parse-parquet-to-columns.js +40 -0
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts +4 -0
- package/dist/lib/parsers/parse-parquet-to-rows.d.ts.map +1 -0
- package/dist/lib/{parse-parquet.js → parsers/parse-parquet-to-rows.js} +1 -1
- package/dist/parquet-loader.d.ts +1 -0
- package/dist/parquet-loader.d.ts.map +1 -1
- package/dist/parquet-loader.js +2 -1
- package/dist/parquet-worker.js +19 -19
- package/dist/parquet-worker.js.map +3 -3
- package/package.json +5 -5
- package/src/index.ts +22 -2
- package/src/lib/arrow/convert-columns-to-row-group.ts +0 -0
- package/src/lib/arrow/convert-row-group-to-columns.ts +15 -0
- package/src/lib/{convert-schema.ts → arrow/convert-schema-from-parquet.ts} +41 -22
- package/src/lib/arrow/convert-schema-to-parquet.ts +102 -0
- package/src/lib/geo/decode-geo-metadata.ts +99 -0
- package/src/lib/geo/geoparquet-schema.ts +69 -0
- package/src/lib/parsers/parse-parquet-to-columns.ts +49 -0
- package/src/lib/{parse-parquet.ts → parsers/parse-parquet-to-rows.ts} +2 -2
- package/src/lib/wip/convert-schema-deep.rs.disabled +976 -0
- package/src/parquet-loader.ts +3 -1
- package/dist/es5/lib/convert-schema.js.map +0 -1
- package/dist/es5/lib/parse-parquet.js.map +0 -1
- package/dist/es5/lib/read-array-buffer.js +0 -43
- package/dist/es5/lib/read-array-buffer.js.map +0 -1
- package/dist/esm/lib/convert-schema.js.map +0 -1
- package/dist/esm/lib/parse-parquet.js.map +0 -1
- package/dist/esm/lib/read-array-buffer.js +0 -10
- package/dist/esm/lib/read-array-buffer.js.map +0 -1
- package/dist/lib/convert-schema.d.ts +0 -8
- package/dist/lib/convert-schema.d.ts.map +0 -1
- package/dist/lib/parse-parquet.d.ts +0 -4
- package/dist/lib/parse-parquet.d.ts.map +0 -1
- package/dist/lib/read-array-buffer.d.ts +0 -19
- package/dist/lib/read-array-buffer.d.ts.map +0 -1
- package/dist/lib/read-array-buffer.js +0 -29
- package/src/lib/read-array-buffer.ts +0 -31
- /package/dist/es5/lib/{convert-schema-deep.ts.disabled → wip/convert-schema-deep.java.disabled} +0 -0
- /package/dist/esm/lib/{convert-schema-deep.ts.disabled → wip/convert-schema-deep.java.disabled} +0 -0
- /package/src/lib/{convert-schema-deep.ts.disabled → wip/convert-schema-deep.java.disabled} +0 -0
package/src/parquet-loader.ts
CHANGED
|
@@ -9,6 +9,7 @@ export type ParquetLoaderOptions = LoaderOptions & {
|
|
|
9
9
|
type?: 'object-row-table';
|
|
10
10
|
url?: string;
|
|
11
11
|
columnList?: string[] | string[][];
|
|
12
|
+
geoparquet?: boolean;
|
|
12
13
|
};
|
|
13
14
|
};
|
|
14
15
|
|
|
@@ -16,7 +17,8 @@ const DEFAULT_PARQUET_LOADER_OPTIONS: ParquetLoaderOptions = {
|
|
|
16
17
|
parquet: {
|
|
17
18
|
type: 'object-row-table',
|
|
18
19
|
url: undefined,
|
|
19
|
-
columnList: []
|
|
20
|
+
columnList: [],
|
|
21
|
+
geoparquet: true
|
|
20
22
|
}
|
|
21
23
|
};
|
|
22
24
|
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"convert-schema.js","names":["PARQUET_TYPE_MAPPING","BOOLEAN","Bool","INT32","Int32","INT64","Float64","INT96","FLOAT","Float32","DOUBLE","BYTE_ARRAY","Binary","FIXED_LEN_BYTE_ARRAY","UTF8","Utf8","DATE","TIME_MILLIS","Int64","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","Uint16","UINT_32","Uint32","UINT_64","Uint64","INT_8","Int8","INT_16","Int16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertParquetToArrowSchema","parquetSchema","fields","getFields","schema","Schema","getFieldMetadata","field","metadata","Map","key","value","stringify","set","name","childField","nestedField","Field","Struct","optional","push","FieldType","type","arrowField"],"sources":["../../../src/lib/convert-schema.ts"],"sourcesContent":["import type {ParquetSchema} from '../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../parquetjs/schema/declare';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interal type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertParquetToArrowSchema(parquetSchema: ParquetSchema): Schema {\n const fields = getFields(parquetSchema.schema);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childField = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childField), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n"],"mappings":";;;;;;;AAGA;AAmBO,IAAMA,oBAA8D,GAAG;EAC5EC,OAAO,EAAEC,YAAI;EACbC,KAAK,EAAEC,aAAK;EACZC,KAAK,EAAEC,eAAO;EACdC,KAAK,EAAED,eAAO;EACdE,KAAK,EAAEC,eAAO;EACdC,MAAM,EAAEJ,eAAO;EACfK,UAAU,EAAEC,cAAM;EAClBC,oBAAoB,EAAED,cAAM;EAC5BE,IAAI,EAAEC,YAAI;EACVC,IAAI,EAAEZ,aAAK;EACXa,WAAW,EAAEC,aAAK;EAClBC,WAAW,EAAED,aAAK;EAClBE,gBAAgB,EAAEF,aAAK;EACvBG,gBAAgB,EAAEH,aAAK;EACvBI,MAAM,EAAElB,aAAK;EACbmB,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,OAAO,EAAEC,cAAM;EACfC,KAAK,EAAEC,YAAI;EACXC,MAAM,EAAEC,aAAK;EACbC,MAAM,EAAE7B,aAAK;EACb8B,MAAM,EAAEhB,aAAK;EACbiB,IAAI,EAAEvB,cAAM;EACZwB,IAAI,EAAExB,cAAM;EAEZyB,QAAQ,EAAEzB,cAAM;EAChB0B,aAAa,EAAE7B,eAAO;EACtB8B,aAAa,EAAEjC,eAAO;EACtBkC,kBAAkB,EAAElC,eAAO;EAC3BmC,4BAA4B,EAAEnC;AAChC,CAAC;AAAC;AAEK,SAASoC,2BAA2B,CAACC,aAA4B,EAAU;EAChF,IAAMC,MAAM,GAAGC,SAAS,CAACF,aAAa,CAACG,MAAM,CAAC;;EAG9C,OAAO,IAAIC,cAAM,CAACH,MAAM,CAAC;AAC3B;AAEA,SAASI,gBAAgB,CAACC,KAAmB,EAAuB;EAClE,IAAMC,QAAQ,GAAG,IAAIC,GAAG,EAAE;EAE1B,KAAK,IAAMC,GAAG,IAAIH,KAAK,EAAE;IACvB,IAAIG,GAAG,KAAK,MAAM,EAAE;MAClB,IAAMC,KAAK,GAAG,OAAOJ,KAAK,CAACG,GAAG,CAAC,KAAK,QAAQ,GAAGjB,IAAI,CAACmB,SAAS,CAACL,KAAK,CAACG,GAAG,CAAC,CAAC,GAAGH,KAAK,CAACG,GAAG,CAAC;MACtFF,QAAQ,CAACK,GAAG,CAACH,GAAG,EAAEC,KAAK,CAAC;IAC1B;EACF;EAEA,OAAOH,QAAQ;AACjB;AAEA,SAASL,SAAS,CAACC,MAAuB,EAAW;EACnD,IAAMF,MAAe,GAAG,EAAE;EAE1B,KAAK,IAAMY,IAAI,IAAIV,MAAM,EAAE;IACzB,IAAMG,KAAK,GAAGH,MAAM,CAACU,IAAI,CAAC;IAE1B,IAAIP,KAAK,CAACL,MAAM,EAAE;MAChB,IAAMa,UAAU,GAAGZ,SAAS,CAACI,KAAK,CAACL,MAAM,CAAC;MAC1C,IAAMc,WAAW,GAAG,IAAIC,aAAK,CAACH,IAAI,EAAE,IAAII,cAAM,CAACH,UAAU,CAAC,EAAER,KAAK,CAACY,QAAQ,CAAC;MAC3EjB,MAAM,CAACkB,IAAI,CAACJ,WAAW,CAAC;IAC1B,CAAC,MAAM;MACL,IAAMK,SAAS,GAAG/D,oBAAoB,CAACiD,KAAK,CAACe,IAAI,CAAC;MAClD,IAAMd,QAAQ,GAAGF,gBAAgB,CAACC,KAAK,CAAC;MACxC,IAAMgB,UAAU,GAAG,IAAIN,aAAK,CAACH,IAAI,EAAE,IAAIO,SAAS,EAAE,EAAEd,KAAK,CAACY,QAAQ,EAAEX,QAAQ,CAAC;MAC7EN,MAAM,CAACkB,IAAI,CAACG,UAAU,CAAC;IACzB;EACF;EAEA,OAAOrB,MAAM;AACf"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet.js","names":["parseParquet","arrayBuffer","options","blob","Blob","parseParquetFileInBatches","batch","file","makeReadableFile","reader","ParquetReader","rowBatches","rowBatchIterator","parquet","rows"],"sources":["../../../src/lib/parse-parquet.ts"],"sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n// import {ColumnarTableBatch} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../parquet-loader';\nimport {ParquetReader} from '../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const rowBatches = reader.rowBatchIterator(options?.parquet);\n for await (const rows of rowBatches) {\n yield rows;\n }\n}\n\n// export async function* parseParquetFileInColumnarBatches(blob: Blob, options?: {columnList?: string[][]}): AsyncIterable<ColumnarTableBatch> {\n// const rowGroupReader = new ParquetRowGroupReader({data: blob, columnList: options?.columnList});\n// try {\n// for await (const rowGroup of rowGroupReader) {\n// yield convertRowGroupToTableBatch(rowGroup);\n// }\n// } finally {\n// await rowGroupReader.close();\n// }\n// }\n\n// function convertRowGroupToTableBatch(rowGroup): ColumnarTableBatch {\n// // @ts-expect-error\n// return {\n// data: rowGroup\n// };\n// }\n"],"mappings":";;;;;;;;;;;;AAEA;AAEA;AAAiE;AAAA;AAAA,SAE3CA,YAAY;EAAA;AAAA;AAAA;EAAA,0EAA3B,kBAA4BC,WAAwB,EAAEC,OAA8B;IAAA;IAAA;MAAA;QAAA;UAAA;YACnFC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;YAAA;YAAA;YAAA;YAAA,2BACVI,yBAAyB,CAACF,IAAI,EAAED,OAAO,CAAC;UAAA;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAAjDI,KAAK;YAAA,kCACbA,KAAK;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;UAAA;YAAA;UAAA;YAAA,kCAEP,IAAI;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CACZ;EAAA;AAAA;AAAA,SAEsBD,yBAAyB;EAAA;AAAA;AAAA;EAAA,yFAAzC,iBAA0CF,IAAU,EAAED,OAA8B;IAAA;IAAA;MAAA;QAAA;UAAA;YACnFK,IAAI,GAAG,IAAAC,6BAAgB,EAACL,IAAI,CAAC;YAC7BM,MAAM,GAAG,IAAIC,4BAAa,CAACH,IAAI,CAAC;YAChCI,UAAU,GAAGF,MAAM,CAACG,gBAAgB,CAACV,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAEW,OAAO,CAAC;YAAA;YAAA;YAAA;YAAA,4BACnCF,UAAU;UAAA;YAAA;YAAA;UAAA;YAAA;cAAA;cAAA;YAAA;YAAlBG,IAAI;YAAA;YACnB,OAAMA,IAAI;UAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;YAAA;UAAA;YAAA;YAAA;cAAA;cAAA;YAAA;YAAA;UAAA;YAAA;UAAA;YAAA;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CAEb;EAAA;AAAA"}
|
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
|
|
3
|
-
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
-
Object.defineProperty(exports, "__esModule", {
|
|
5
|
-
value: true
|
|
6
|
-
});
|
|
7
|
-
exports.readArrayBuffer = readArrayBuffer;
|
|
8
|
-
var _regenerator = _interopRequireDefault(require("@babel/runtime/regenerator"));
|
|
9
|
-
var _asyncToGenerator2 = _interopRequireDefault(require("@babel/runtime/helpers/asyncToGenerator"));
|
|
10
|
-
function readArrayBuffer(_x, _x2, _x3) {
|
|
11
|
-
return _readArrayBuffer.apply(this, arguments);
|
|
12
|
-
}
|
|
13
|
-
function _readArrayBuffer() {
|
|
14
|
-
_readArrayBuffer = (0, _asyncToGenerator2.default)(_regenerator.default.mark(function _callee(file, start, length) {
|
|
15
|
-
var slice;
|
|
16
|
-
return _regenerator.default.wrap(function _callee$(_context) {
|
|
17
|
-
while (1) {
|
|
18
|
-
switch (_context.prev = _context.next) {
|
|
19
|
-
case 0:
|
|
20
|
-
if (!(file instanceof Blob)) {
|
|
21
|
-
_context.next = 5;
|
|
22
|
-
break;
|
|
23
|
-
}
|
|
24
|
-
slice = file.slice(start, start + length);
|
|
25
|
-
_context.next = 4;
|
|
26
|
-
return slice.arrayBuffer();
|
|
27
|
-
case 4:
|
|
28
|
-
return _context.abrupt("return", _context.sent);
|
|
29
|
-
case 5:
|
|
30
|
-
_context.next = 7;
|
|
31
|
-
return file.read(start, start + length);
|
|
32
|
-
case 7:
|
|
33
|
-
return _context.abrupt("return", _context.sent);
|
|
34
|
-
case 8:
|
|
35
|
-
case "end":
|
|
36
|
-
return _context.stop();
|
|
37
|
-
}
|
|
38
|
-
}
|
|
39
|
-
}, _callee);
|
|
40
|
-
}));
|
|
41
|
-
return _readArrayBuffer.apply(this, arguments);
|
|
42
|
-
}
|
|
43
|
-
//# sourceMappingURL=read-array-buffer.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"read-array-buffer.js","names":["readArrayBuffer","file","start","length","Blob","slice","arrayBuffer","read"],"sources":["../../../src/lib/read-array-buffer.ts"],"sourcesContent":["// Random-Access read\n\nexport async function readArrayBuffer(\n file: Blob | ArrayBuffer | any,\n start: number,\n length: number\n): Promise<ArrayBuffer> {\n if (file instanceof Blob) {\n const slice = file.slice(start, start + length);\n return await slice.arrayBuffer();\n }\n return await file.read(start, start + length);\n}\n\n/**\n * Read a slice of a Blob or File, without loading the entire file into memory\n * The trick when reading File objects is to read successive \"slices\" of the File\n * Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields\n * Actually reading from file happens in `readAsArrayBuffer`\n * @param blob to read\n export async function readBlob(blob: Blob): Promise<ArrayBuffer> {\n return await new Promise((resolve, reject) => {\n const fileReader = new FileReader();\n fileReader.onload = (event: ProgressEvent<FileReader>) =>\n resolve(event?.target?.result as ArrayBuffer);\n // TODO - reject with a proper Error\n fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);\n fileReader.readAsArrayBuffer(blob);\n });\n}\n*/\n"],"mappings":";;;;;;;;;SAEsBA,eAAe;EAAA;AAAA;AAAA;EAAA,6EAA9B,iBACLC,IAA8B,EAC9BC,KAAa,EACbC,MAAc;IAAA;IAAA;MAAA;QAAA;UAAA;YAAA,MAEVF,IAAI,YAAYG,IAAI;cAAA;cAAA;YAAA;YAChBC,KAAK,GAAGJ,IAAI,CAACI,KAAK,CAACH,KAAK,EAAEA,KAAK,GAAGC,MAAM,CAAC;YAAA;YAAA,OAClCE,KAAK,CAACC,WAAW,EAAE;UAAA;YAAA;UAAA;YAAA;YAAA,OAErBL,IAAI,CAACM,IAAI,CAACL,KAAK,EAAEA,KAAK,GAAGC,MAAM,CAAC;UAAA;YAAA;UAAA;UAAA;YAAA;QAAA;MAAA;IAAA;EAAA,CAC9C;EAAA;AAAA"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"convert-schema.js","names":["Schema","Struct","Field","Bool","Float64","Int32","Float32","Binary","Utf8","Int64","Uint16","Uint32","Uint64","Int8","Int16","PARQUET_TYPE_MAPPING","BOOLEAN","INT32","INT64","INT96","FLOAT","DOUBLE","BYTE_ARRAY","FIXED_LEN_BYTE_ARRAY","UTF8","DATE","TIME_MILLIS","TIME_MICROS","TIMESTAMP_MILLIS","TIMESTAMP_MICROS","UINT_8","UINT_16","UINT_32","UINT_64","INT_8","INT_16","INT_32","INT_64","JSON","BSON","INTERVAL","DECIMAL_INT32","DECIMAL_INT64","DECIMAL_BYTE_ARRAY","DECIMAL_FIXED_LEN_BYTE_ARRAY","convertParquetToArrowSchema","parquetSchema","fields","getFields","schema","getFieldMetadata","field","metadata","Map","key","value","stringify","set","name","childField","nestedField","optional","push","FieldType","type","arrowField"],"sources":["../../../src/lib/convert-schema.ts"],"sourcesContent":["import type {ParquetSchema} from '../parquetjs/schema/schema';\nimport type {FieldDefinition, ParquetField, ParquetType} from '../parquetjs/schema/declare';\n\nimport {\n Schema,\n Struct,\n Field,\n DataType,\n Bool,\n Float64,\n Int32,\n Float32,\n Binary,\n Utf8,\n Int64,\n Uint16,\n Uint32,\n Uint64,\n Int8,\n Int16\n} from '@loaders.gl/schema';\n\nexport const PARQUET_TYPE_MAPPING: {[type in ParquetType]: typeof DataType} = {\n BOOLEAN: Bool,\n INT32: Int32,\n INT64: Float64,\n INT96: Float64,\n FLOAT: Float32,\n DOUBLE: Float64,\n BYTE_ARRAY: Binary,\n FIXED_LEN_BYTE_ARRAY: Binary,\n UTF8: Utf8,\n DATE: Int32,\n TIME_MILLIS: Int64,\n TIME_MICROS: Int64,\n TIMESTAMP_MILLIS: Int64,\n TIMESTAMP_MICROS: Int64,\n UINT_8: Int32,\n UINT_16: Uint16,\n UINT_32: Uint32,\n UINT_64: Uint64,\n INT_8: Int8,\n INT_16: Int16,\n INT_32: Int32,\n INT_64: Int64,\n JSON: Binary,\n BSON: Binary,\n // TODO check interal type\n INTERVAL: Binary,\n DECIMAL_INT32: Float32,\n DECIMAL_INT64: Float64,\n DECIMAL_BYTE_ARRAY: Float64,\n DECIMAL_FIXED_LEN_BYTE_ARRAY: Float64\n};\n\nexport function convertParquetToArrowSchema(parquetSchema: ParquetSchema): Schema {\n const fields = getFields(parquetSchema.schema);\n\n // TODO add metadata if needed.\n return new Schema(fields);\n}\n\nfunction getFieldMetadata(field: ParquetField): Map<string, string> {\n const metadata = new Map();\n\n for (const key in field) {\n if (key !== 'name') {\n const value = typeof field[key] !== 'string' ? JSON.stringify(field[key]) : field[key];\n metadata.set(key, value);\n }\n }\n\n return metadata;\n}\n\nfunction getFields(schema: FieldDefinition): Field[] {\n const fields: Field[] = [];\n\n for (const name in schema) {\n const field = schema[name];\n\n if (field.fields) {\n const childField = getFields(field.fields);\n const nestedField = new Field(name, new Struct(childField), field.optional);\n fields.push(nestedField);\n } else {\n const FieldType = PARQUET_TYPE_MAPPING[field.type];\n const metadata = getFieldMetadata(field);\n const arrowField = new Field(name, new FieldType(), field.optional, metadata);\n fields.push(arrowField);\n }\n }\n\n return fields;\n}\n"],"mappings":"AAGA,SACEA,MAAM,EACNC,MAAM,EACNC,KAAK,EAELC,IAAI,EACJC,OAAO,EACPC,KAAK,EACLC,OAAO,EACPC,MAAM,EACNC,IAAI,EACJC,KAAK,EACLC,MAAM,EACNC,MAAM,EACNC,MAAM,EACNC,IAAI,EACJC,KAAK,QACA,oBAAoB;AAE3B,OAAO,MAAMC,oBAA8D,GAAG;EAC5EC,OAAO,EAAEb,IAAI;EACbc,KAAK,EAAEZ,KAAK;EACZa,KAAK,EAAEd,OAAO;EACde,KAAK,EAAEf,OAAO;EACdgB,KAAK,EAAEd,OAAO;EACde,MAAM,EAAEjB,OAAO;EACfkB,UAAU,EAAEf,MAAM;EAClBgB,oBAAoB,EAAEhB,MAAM;EAC5BiB,IAAI,EAAEhB,IAAI;EACViB,IAAI,EAAEpB,KAAK;EACXqB,WAAW,EAAEjB,KAAK;EAClBkB,WAAW,EAAElB,KAAK;EAClBmB,gBAAgB,EAAEnB,KAAK;EACvBoB,gBAAgB,EAAEpB,KAAK;EACvBqB,MAAM,EAAEzB,KAAK;EACb0B,OAAO,EAAErB,MAAM;EACfsB,OAAO,EAAErB,MAAM;EACfsB,OAAO,EAAErB,MAAM;EACfsB,KAAK,EAAErB,IAAI;EACXsB,MAAM,EAAErB,KAAK;EACbsB,MAAM,EAAE/B,KAAK;EACbgC,MAAM,EAAE5B,KAAK;EACb6B,IAAI,EAAE/B,MAAM;EACZgC,IAAI,EAAEhC,MAAM;EAEZiC,QAAQ,EAAEjC,MAAM;EAChBkC,aAAa,EAAEnC,OAAO;EACtBoC,aAAa,EAAEtC,OAAO;EACtBuC,kBAAkB,EAAEvC,OAAO;EAC3BwC,4BAA4B,EAAExC;AAChC,CAAC;AAED,OAAO,SAASyC,2BAA2B,CAACC,aAA4B,EAAU;EAChF,MAAMC,MAAM,GAAGC,SAAS,CAACF,aAAa,CAACG,MAAM,CAAC;;EAG9C,OAAO,IAAIjD,MAAM,CAAC+C,MAAM,CAAC;AAC3B;AAEA,SAASG,gBAAgB,CAACC,KAAmB,EAAuB;EAClE,MAAMC,QAAQ,GAAG,IAAIC,GAAG,EAAE;EAE1B,KAAK,MAAMC,GAAG,IAAIH,KAAK,EAAE;IACvB,IAAIG,GAAG,KAAK,MAAM,EAAE;MAClB,MAAMC,KAAK,GAAG,OAAOJ,KAAK,CAACG,GAAG,CAAC,KAAK,QAAQ,GAAGhB,IAAI,CAACkB,SAAS,CAACL,KAAK,CAACG,GAAG,CAAC,CAAC,GAAGH,KAAK,CAACG,GAAG,CAAC;MACtFF,QAAQ,CAACK,GAAG,CAACH,GAAG,EAAEC,KAAK,CAAC;IAC1B;EACF;EAEA,OAAOH,QAAQ;AACjB;AAEA,SAASJ,SAAS,CAACC,MAAuB,EAAW;EACnD,MAAMF,MAAe,GAAG,EAAE;EAE1B,KAAK,MAAMW,IAAI,IAAIT,MAAM,EAAE;IACzB,MAAME,KAAK,GAAGF,MAAM,CAACS,IAAI,CAAC;IAE1B,IAAIP,KAAK,CAACJ,MAAM,EAAE;MAChB,MAAMY,UAAU,GAAGX,SAAS,CAACG,KAAK,CAACJ,MAAM,CAAC;MAC1C,MAAMa,WAAW,GAAG,IAAI1D,KAAK,CAACwD,IAAI,EAAE,IAAIzD,MAAM,CAAC0D,UAAU,CAAC,EAAER,KAAK,CAACU,QAAQ,CAAC;MAC3Ed,MAAM,CAACe,IAAI,CAACF,WAAW,CAAC;IAC1B,CAAC,MAAM;MACL,MAAMG,SAAS,GAAGhD,oBAAoB,CAACoC,KAAK,CAACa,IAAI,CAAC;MAClD,MAAMZ,QAAQ,GAAGF,gBAAgB,CAACC,KAAK,CAAC;MACxC,MAAMc,UAAU,GAAG,IAAI/D,KAAK,CAACwD,IAAI,EAAE,IAAIK,SAAS,EAAE,EAAEZ,KAAK,CAACU,QAAQ,EAAET,QAAQ,CAAC;MAC7EL,MAAM,CAACe,IAAI,CAACG,UAAU,CAAC;IACzB;EACF;EAEA,OAAOlB,MAAM;AACf"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet.js","names":["makeReadableFile","ParquetReader","parseParquet","arrayBuffer","options","blob","Blob","batch","parseParquetFileInBatches","file","reader","rowBatches","rowBatchIterator","parquet","rows"],"sources":["../../../src/lib/parse-parquet.ts"],"sourcesContent":["// import type {LoaderWithParser, Loader, LoaderOptions} from '@loaders.gl/loader-utils';\n// import {ColumnarTableBatch} from '@loaders.gl/schema';\nimport {makeReadableFile} from '@loaders.gl/loader-utils';\nimport type {ParquetLoaderOptions} from '../parquet-loader';\nimport {ParquetReader} from '../parquetjs/parser/parquet-reader';\n\nexport async function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions) {\n const blob = new Blob([arrayBuffer]);\n for await (const batch of parseParquetFileInBatches(blob, options)) {\n return batch;\n }\n return null;\n}\n\nexport async function* parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions) {\n const file = makeReadableFile(blob);\n const reader = new ParquetReader(file);\n const rowBatches = reader.rowBatchIterator(options?.parquet);\n for await (const rows of rowBatches) {\n yield rows;\n }\n}\n\n// export async function* parseParquetFileInColumnarBatches(blob: Blob, options?: {columnList?: string[][]}): AsyncIterable<ColumnarTableBatch> {\n// const rowGroupReader = new ParquetRowGroupReader({data: blob, columnList: options?.columnList});\n// try {\n// for await (const rowGroup of rowGroupReader) {\n// yield convertRowGroupToTableBatch(rowGroup);\n// }\n// } finally {\n// await rowGroupReader.close();\n// }\n// }\n\n// function convertRowGroupToTableBatch(rowGroup): ColumnarTableBatch {\n// // @ts-expect-error\n// return {\n// data: rowGroup\n// };\n// }\n"],"mappings":";AAEA,SAAQA,gBAAgB,QAAO,0BAA0B;AAEzD,SAAQC,aAAa,QAAO,oCAAoC;AAEhE,OAAO,eAAeC,YAAY,CAACC,WAAwB,EAAEC,OAA8B,EAAE;EAC3F,MAAMC,IAAI,GAAG,IAAIC,IAAI,CAAC,CAACH,WAAW,CAAC,CAAC;EACpC,WAAW,MAAMI,KAAK,IAAIC,yBAAyB,CAACH,IAAI,EAAED,OAAO,CAAC,EAAE;IAClE,OAAOG,KAAK;EACd;EACA,OAAO,IAAI;AACb;AAEA,OAAO,gBAAgBC,yBAAyB,CAACH,IAAU,EAAED,OAA8B,EAAE;EAC3F,MAAMK,IAAI,GAAGT,gBAAgB,CAACK,IAAI,CAAC;EACnC,MAAMK,MAAM,GAAG,IAAIT,aAAa,CAACQ,IAAI,CAAC;EACtC,MAAME,UAAU,GAAGD,MAAM,CAACE,gBAAgB,CAACR,OAAO,aAAPA,OAAO,uBAAPA,OAAO,CAAES,OAAO,CAAC;EAC5D,WAAW,MAAMC,IAAI,IAAIH,UAAU,EAAE;IACnC,MAAMG,IAAI;EACZ;AACF"}
|
|
@@ -1,10 +0,0 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
export async function readArrayBuffer(file, start, length) {
|
|
4
|
-
if (file instanceof Blob) {
|
|
5
|
-
const slice = file.slice(start, start + length);
|
|
6
|
-
return await slice.arrayBuffer();
|
|
7
|
-
}
|
|
8
|
-
return await file.read(start, start + length);
|
|
9
|
-
}
|
|
10
|
-
//# sourceMappingURL=read-array-buffer.js.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"read-array-buffer.js","names":["readArrayBuffer","file","start","length","Blob","slice","arrayBuffer","read"],"sources":["../../../src/lib/read-array-buffer.ts"],"sourcesContent":["// Random-Access read\n\nexport async function readArrayBuffer(\n file: Blob | ArrayBuffer | any,\n start: number,\n length: number\n): Promise<ArrayBuffer> {\n if (file instanceof Blob) {\n const slice = file.slice(start, start + length);\n return await slice.arrayBuffer();\n }\n return await file.read(start, start + length);\n}\n\n/**\n * Read a slice of a Blob or File, without loading the entire file into memory\n * The trick when reading File objects is to read successive \"slices\" of the File\n * Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields\n * Actually reading from file happens in `readAsArrayBuffer`\n * @param blob to read\n export async function readBlob(blob: Blob): Promise<ArrayBuffer> {\n return await new Promise((resolve, reject) => {\n const fileReader = new FileReader();\n fileReader.onload = (event: ProgressEvent<FileReader>) =>\n resolve(event?.target?.result as ArrayBuffer);\n // TODO - reject with a proper Error\n fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);\n fileReader.readAsArrayBuffer(blob);\n });\n}\n*/\n"],"mappings":";;AAEA,OAAO,eAAeA,eAAe,CACnCC,IAA8B,EAC9BC,KAAa,EACbC,MAAc,EACQ;EACtB,IAAIF,IAAI,YAAYG,IAAI,EAAE;IACxB,MAAMC,KAAK,GAAGJ,IAAI,CAACI,KAAK,CAACH,KAAK,EAAEA,KAAK,GAAGC,MAAM,CAAC;IAC/C,OAAO,MAAME,KAAK,CAACC,WAAW,EAAE;EAClC;EACA,OAAO,MAAML,IAAI,CAACM,IAAI,CAACL,KAAK,EAAEA,KAAK,GAAGC,MAAM,CAAC;AAC/C"}
|
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import type { ParquetSchema } from '../parquetjs/schema/schema';
|
|
2
|
-
import type { ParquetType } from '../parquetjs/schema/declare';
|
|
3
|
-
import { Schema, DataType } from '@loaders.gl/schema';
|
|
4
|
-
export declare const PARQUET_TYPE_MAPPING: {
|
|
5
|
-
[type in ParquetType]: typeof DataType;
|
|
6
|
-
};
|
|
7
|
-
export declare function convertParquetToArrowSchema(parquetSchema: ParquetSchema): Schema;
|
|
8
|
-
//# sourceMappingURL=convert-schema.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"convert-schema.d.ts","sourceRoot":"","sources":["../../src/lib/convert-schema.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAC,aAAa,EAAC,MAAM,4BAA4B,CAAC;AAC9D,OAAO,KAAK,EAAgC,WAAW,EAAC,MAAM,6BAA6B,CAAC;AAE5F,OAAO,EACL,MAAM,EAGN,QAAQ,EAaT,MAAM,oBAAoB,CAAC;AAE5B,eAAO,MAAM,oBAAoB,EAAE;KAAE,IAAI,IAAI,WAAW,GAAG,OAAO,QAAQ;CA+BzE,CAAC;AAEF,wBAAgB,2BAA2B,CAAC,aAAa,EAAE,aAAa,GAAG,MAAM,CAKhF"}
|
|
@@ -1,4 +0,0 @@
|
|
|
1
|
-
import type { ParquetLoaderOptions } from '../parquet-loader';
|
|
2
|
-
export declare function parseParquet(arrayBuffer: ArrayBuffer, options?: ParquetLoaderOptions): Promise<import("../parquetjs/schema/declare").ParquetRecord[] | null>;
|
|
3
|
-
export declare function parseParquetFileInBatches(blob: Blob, options?: ParquetLoaderOptions): AsyncGenerator<import("../parquetjs/schema/declare").ParquetRecord[], void, unknown>;
|
|
4
|
-
//# sourceMappingURL=parse-parquet.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"parse-parquet.d.ts","sourceRoot":"","sources":["../../src/lib/parse-parquet.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,EAAC,oBAAoB,EAAC,MAAM,mBAAmB,CAAC;AAG5D,wBAAsB,YAAY,CAAC,WAAW,EAAE,WAAW,EAAE,OAAO,CAAC,EAAE,oBAAoB,yEAM1F;AAED,wBAAuB,yBAAyB,CAAC,IAAI,EAAE,IAAI,EAAE,OAAO,CAAC,EAAE,oBAAoB,wFAO1F"}
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
export declare function readArrayBuffer(file: Blob | ArrayBuffer | any, start: number, length: number): Promise<ArrayBuffer>;
|
|
2
|
-
/**
|
|
3
|
-
* Read a slice of a Blob or File, without loading the entire file into memory
|
|
4
|
-
* The trick when reading File objects is to read successive "slices" of the File
|
|
5
|
-
* Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields
|
|
6
|
-
* Actually reading from file happens in `readAsArrayBuffer`
|
|
7
|
-
* @param blob to read
|
|
8
|
-
export async function readBlob(blob: Blob): Promise<ArrayBuffer> {
|
|
9
|
-
return await new Promise((resolve, reject) => {
|
|
10
|
-
const fileReader = new FileReader();
|
|
11
|
-
fileReader.onload = (event: ProgressEvent<FileReader>) =>
|
|
12
|
-
resolve(event?.target?.result as ArrayBuffer);
|
|
13
|
-
// TODO - reject with a proper Error
|
|
14
|
-
fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);
|
|
15
|
-
fileReader.readAsArrayBuffer(blob);
|
|
16
|
-
});
|
|
17
|
-
}
|
|
18
|
-
*/
|
|
19
|
-
//# sourceMappingURL=read-array-buffer.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"read-array-buffer.d.ts","sourceRoot":"","sources":["../../src/lib/read-array-buffer.ts"],"names":[],"mappings":"AAEA,wBAAsB,eAAe,CACnC,IAAI,EAAE,IAAI,GAAG,WAAW,GAAG,GAAG,EAC9B,KAAK,EAAE,MAAM,EACb,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,WAAW,CAAC,CAMtB;AAED;;;;;;;;;;;;;;;;EAgBE"}
|
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
// Random-Access read
|
|
3
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
4
|
-
exports.readArrayBuffer = void 0;
|
|
5
|
-
async function readArrayBuffer(file, start, length) {
|
|
6
|
-
if (file instanceof Blob) {
|
|
7
|
-
const slice = file.slice(start, start + length);
|
|
8
|
-
return await slice.arrayBuffer();
|
|
9
|
-
}
|
|
10
|
-
return await file.read(start, start + length);
|
|
11
|
-
}
|
|
12
|
-
exports.readArrayBuffer = readArrayBuffer;
|
|
13
|
-
/**
|
|
14
|
-
* Read a slice of a Blob or File, without loading the entire file into memory
|
|
15
|
-
* The trick when reading File objects is to read successive "slices" of the File
|
|
16
|
-
* Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields
|
|
17
|
-
* Actually reading from file happens in `readAsArrayBuffer`
|
|
18
|
-
* @param blob to read
|
|
19
|
-
export async function readBlob(blob: Blob): Promise<ArrayBuffer> {
|
|
20
|
-
return await new Promise((resolve, reject) => {
|
|
21
|
-
const fileReader = new FileReader();
|
|
22
|
-
fileReader.onload = (event: ProgressEvent<FileReader>) =>
|
|
23
|
-
resolve(event?.target?.result as ArrayBuffer);
|
|
24
|
-
// TODO - reject with a proper Error
|
|
25
|
-
fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);
|
|
26
|
-
fileReader.readAsArrayBuffer(blob);
|
|
27
|
-
});
|
|
28
|
-
}
|
|
29
|
-
*/
|
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
// Random-Access read
|
|
2
|
-
|
|
3
|
-
export async function readArrayBuffer(
|
|
4
|
-
file: Blob | ArrayBuffer | any,
|
|
5
|
-
start: number,
|
|
6
|
-
length: number
|
|
7
|
-
): Promise<ArrayBuffer> {
|
|
8
|
-
if (file instanceof Blob) {
|
|
9
|
-
const slice = file.slice(start, start + length);
|
|
10
|
-
return await slice.arrayBuffer();
|
|
11
|
-
}
|
|
12
|
-
return await file.read(start, start + length);
|
|
13
|
-
}
|
|
14
|
-
|
|
15
|
-
/**
|
|
16
|
-
* Read a slice of a Blob or File, without loading the entire file into memory
|
|
17
|
-
* The trick when reading File objects is to read successive "slices" of the File
|
|
18
|
-
* Per spec https://w3c.github.io/FileAPI/, slicing a File only updates the start and end fields
|
|
19
|
-
* Actually reading from file happens in `readAsArrayBuffer`
|
|
20
|
-
* @param blob to read
|
|
21
|
-
export async function readBlob(blob: Blob): Promise<ArrayBuffer> {
|
|
22
|
-
return await new Promise((resolve, reject) => {
|
|
23
|
-
const fileReader = new FileReader();
|
|
24
|
-
fileReader.onload = (event: ProgressEvent<FileReader>) =>
|
|
25
|
-
resolve(event?.target?.result as ArrayBuffer);
|
|
26
|
-
// TODO - reject with a proper Error
|
|
27
|
-
fileReader.onerror = (error: ProgressEvent<FileReader>) => reject(error);
|
|
28
|
-
fileReader.readAsArrayBuffer(blob);
|
|
29
|
-
});
|
|
30
|
-
}
|
|
31
|
-
*/
|
/package/dist/es5/lib/{convert-schema-deep.ts.disabled → wip/convert-schema-deep.java.disabled}
RENAMED
|
File without changes
|
/package/dist/esm/lib/{convert-schema-deep.ts.disabled → wip/convert-schema-deep.java.disabled}
RENAMED
|
File without changes
|
|
File without changes
|