@loaders.gl/flatgeobuf 4.2.0-alpha.3 → 4.2.0-alpha.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (136) hide show
  1. package/dist/dist.dev.js +286 -241
  2. package/dist/dist.min.js +30 -0
  3. package/dist/flatgeobuf/3.27.2/column-meta.d.ts +1 -1
  4. package/dist/flatgeobuf/3.27.2/column-meta.d.ts.map +1 -1
  5. package/dist/flatgeobuf/3.27.2/column-meta.js +1 -2
  6. package/dist/flatgeobuf/3.27.2/config.js +16 -13
  7. package/dist/flatgeobuf/3.27.2/constants.js +3 -2
  8. package/dist/flatgeobuf/3.27.2/crs-meta.js +1 -2
  9. package/dist/flatgeobuf/3.27.2/dumptree.js +28 -23
  10. package/dist/flatgeobuf/3.27.2/feature.d.ts +1 -1
  11. package/dist/flatgeobuf/3.27.2/feature.d.ts.map +1 -1
  12. package/dist/flatgeobuf/3.27.2/feature.js +2 -3
  13. package/dist/flatgeobuf/3.27.2/flat-geobuf/column-type.js +19 -19
  14. package/dist/flatgeobuf/3.27.2/flat-geobuf/column.d.ts +1 -1
  15. package/dist/flatgeobuf/3.27.2/flat-geobuf/column.d.ts.map +1 -1
  16. package/dist/flatgeobuf/3.27.2/flat-geobuf/column.js +117 -117
  17. package/dist/flatgeobuf/3.27.2/flat-geobuf/crs.js +76 -76
  18. package/dist/flatgeobuf/3.27.2/flat-geobuf/feature.d.ts +2 -2
  19. package/dist/flatgeobuf/3.27.2/flat-geobuf/feature.d.ts.map +1 -1
  20. package/dist/flatgeobuf/3.27.2/flat-geobuf/feature.js +96 -90
  21. package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry-type.js +22 -22
  22. package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry.d.ts +1 -1
  23. package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry.d.ts.map +1 -1
  24. package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry.js +224 -210
  25. package/dist/flatgeobuf/3.27.2/flat-geobuf/header.d.ts +3 -3
  26. package/dist/flatgeobuf/3.27.2/flat-geobuf/header.d.ts.map +1 -1
  27. package/dist/flatgeobuf/3.27.2/flat-geobuf/header.js +166 -160
  28. package/dist/flatgeobuf/3.27.2/flat-geobuf.d.ts +5 -5
  29. package/dist/flatgeobuf/3.27.2/flat-geobuf.d.ts.map +1 -1
  30. package/dist/flatgeobuf/3.27.2/flat-geobuf.js +1 -1
  31. package/dist/flatgeobuf/3.27.2/generic/feature.d.ts +4 -4
  32. package/dist/flatgeobuf/3.27.2/generic/feature.d.ts.map +1 -1
  33. package/dist/flatgeobuf/3.27.2/generic/feature.js +195 -205
  34. package/dist/flatgeobuf/3.27.2/generic/featurecollection.d.ts +6 -6
  35. package/dist/flatgeobuf/3.27.2/generic/featurecollection.d.ts.map +1 -1
  36. package/dist/flatgeobuf/3.27.2/generic/featurecollection.js +150 -128
  37. package/dist/flatgeobuf/3.27.2/generic/geometry.d.ts +2 -2
  38. package/dist/flatgeobuf/3.27.2/generic/geometry.d.ts.map +1 -1
  39. package/dist/flatgeobuf/3.27.2/generic/geometry.js +78 -66
  40. package/dist/flatgeobuf/3.27.2/generic/header.d.ts +3 -3
  41. package/dist/flatgeobuf/3.27.2/generic/header.d.ts.map +1 -1
  42. package/dist/flatgeobuf/3.27.2/generic/header.js +18 -18
  43. package/dist/flatgeobuf/3.27.2/generic/index.d.ts +4 -4
  44. package/dist/flatgeobuf/3.27.2/generic/index.d.ts.map +1 -1
  45. package/dist/flatgeobuf/3.27.2/generic/index.js +2 -1
  46. package/dist/flatgeobuf/3.27.2/generic.d.ts +7 -7
  47. package/dist/flatgeobuf/3.27.2/generic.d.ts.map +1 -1
  48. package/dist/flatgeobuf/3.27.2/generic.js +11 -3
  49. package/dist/flatgeobuf/3.27.2/geojson/feature.d.ts +3 -3
  50. package/dist/flatgeobuf/3.27.2/geojson/feature.d.ts.map +1 -1
  51. package/dist/flatgeobuf/3.27.2/geojson/feature.js +8 -9
  52. package/dist/flatgeobuf/3.27.2/geojson/featurecollection.d.ts +2 -2
  53. package/dist/flatgeobuf/3.27.2/geojson/featurecollection.d.ts.map +1 -1
  54. package/dist/flatgeobuf/3.27.2/geojson/featurecollection.js +40 -38
  55. package/dist/flatgeobuf/3.27.2/geojson/geometry.d.ts +3 -3
  56. package/dist/flatgeobuf/3.27.2/geojson/geometry.d.ts.map +1 -1
  57. package/dist/flatgeobuf/3.27.2/geojson/geometry.js +107 -103
  58. package/dist/flatgeobuf/3.27.2/geojson/index.d.ts +3 -3
  59. package/dist/flatgeobuf/3.27.2/geojson/index.d.ts.map +1 -1
  60. package/dist/flatgeobuf/3.27.2/geojson/index.js +2 -1
  61. package/dist/flatgeobuf/3.27.2/geojson.d.ts +3 -3
  62. package/dist/flatgeobuf/3.27.2/geojson.d.ts.map +1 -1
  63. package/dist/flatgeobuf/3.27.2/geojson.js +17 -5
  64. package/dist/flatgeobuf/3.27.2/header-meta.d.ts +3 -3
  65. package/dist/flatgeobuf/3.27.2/header-meta.d.ts.map +1 -1
  66. package/dist/flatgeobuf/3.27.2/header-meta.js +46 -43
  67. package/dist/flatgeobuf/3.27.2/header.d.ts +1 -1
  68. package/dist/flatgeobuf/3.27.2/header.d.ts.map +1 -1
  69. package/dist/flatgeobuf/3.27.2/header.js +2 -3
  70. package/dist/flatgeobuf/3.27.2/http-reader.d.ts +3 -3
  71. package/dist/flatgeobuf/3.27.2/http-reader.d.ts.map +1 -1
  72. package/dist/flatgeobuf/3.27.2/http-reader.js +235 -171
  73. package/dist/flatgeobuf/3.27.2/index.d.ts +13 -13
  74. package/dist/flatgeobuf/3.27.2/index.d.ts.map +1 -1
  75. package/dist/flatgeobuf/3.27.2/index.js +5 -13
  76. package/dist/flatgeobuf/3.27.2/logger.js +37 -56
  77. package/dist/flatgeobuf/3.27.2/packedrtree.js +174 -123
  78. package/dist/flatgeobuf/3.27.2/streams/utils.js +59 -58
  79. package/dist/flatgeobuf-loader.js +29 -23
  80. package/dist/flatgeobuf-worker.js +1 -1
  81. package/dist/index.cjs +175 -199
  82. package/dist/index.cjs.map +7 -0
  83. package/dist/index.d.ts +2 -2
  84. package/dist/index.d.ts.map +1 -1
  85. package/dist/index.js +3 -1
  86. package/dist/lib/binary-geometries.js +115 -117
  87. package/dist/lib/get-schema-from-fgb-header.js +110 -85
  88. package/dist/lib/parse-flatgeobuf.d.ts +1 -1
  89. package/dist/lib/parse-flatgeobuf.d.ts.map +1 -1
  90. package/dist/lib/parse-flatgeobuf.js +125 -109
  91. package/dist/workers/flatgeobuf-worker.js +3 -1
  92. package/package.json +10 -6
  93. package/src/flatgeobuf/3.27.2/.DS_Store +0 -0
  94. package/dist/LICENSE +0 -9
  95. package/dist/flatgeobuf/3.27.2/LICENSE +0 -29
  96. package/dist/flatgeobuf/3.27.2/README.md +0 -45
  97. package/dist/flatgeobuf/3.27.2/column-meta.js.map +0 -1
  98. package/dist/flatgeobuf/3.27.2/config.js.map +0 -1
  99. package/dist/flatgeobuf/3.27.2/constants.js.map +0 -1
  100. package/dist/flatgeobuf/3.27.2/crs-meta.js.map +0 -1
  101. package/dist/flatgeobuf/3.27.2/dumptree.js.map +0 -1
  102. package/dist/flatgeobuf/3.27.2/feature.js.map +0 -1
  103. package/dist/flatgeobuf/3.27.2/flat-geobuf/column-type.js.map +0 -1
  104. package/dist/flatgeobuf/3.27.2/flat-geobuf/column.js.map +0 -1
  105. package/dist/flatgeobuf/3.27.2/flat-geobuf/crs.js.map +0 -1
  106. package/dist/flatgeobuf/3.27.2/flat-geobuf/feature.js.map +0 -1
  107. package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry-type.js.map +0 -1
  108. package/dist/flatgeobuf/3.27.2/flat-geobuf/geometry.js.map +0 -1
  109. package/dist/flatgeobuf/3.27.2/flat-geobuf/header.js.map +0 -1
  110. package/dist/flatgeobuf/3.27.2/flat-geobuf.js.map +0 -1
  111. package/dist/flatgeobuf/3.27.2/generic/feature.js.map +0 -1
  112. package/dist/flatgeobuf/3.27.2/generic/featurecollection.js.map +0 -1
  113. package/dist/flatgeobuf/3.27.2/generic/geometry.js.map +0 -1
  114. package/dist/flatgeobuf/3.27.2/generic/header.js.map +0 -1
  115. package/dist/flatgeobuf/3.27.2/generic/index.js.map +0 -1
  116. package/dist/flatgeobuf/3.27.2/generic.js.map +0 -1
  117. package/dist/flatgeobuf/3.27.2/geojson/feature.js.map +0 -1
  118. package/dist/flatgeobuf/3.27.2/geojson/featurecollection.js.map +0 -1
  119. package/dist/flatgeobuf/3.27.2/geojson/geometry.js.map +0 -1
  120. package/dist/flatgeobuf/3.27.2/geojson/index.js.map +0 -1
  121. package/dist/flatgeobuf/3.27.2/geojson.js.map +0 -1
  122. package/dist/flatgeobuf/3.27.2/header-meta.js.map +0 -1
  123. package/dist/flatgeobuf/3.27.2/header.js.map +0 -1
  124. package/dist/flatgeobuf/3.27.2/http-reader.js.map +0 -1
  125. package/dist/flatgeobuf/3.27.2/index.js.map +0 -1
  126. package/dist/flatgeobuf/3.27.2/logger.js.map +0 -1
  127. package/dist/flatgeobuf/3.27.2/packedrtree.js.map +0 -1
  128. package/dist/flatgeobuf/3.27.2/streams/utils.js.map +0 -1
  129. package/dist/flatgeobuf/3.27.2/types/jsts.d.ts +0 -23
  130. package/dist/flatgeobuf/3.27.2/types/slice.d.ts +0 -16
  131. package/dist/flatgeobuf-loader.js.map +0 -1
  132. package/dist/index.js.map +0 -1
  133. package/dist/lib/binary-geometries.js.map +0 -1
  134. package/dist/lib/get-schema-from-fgb-header.js.map +0 -1
  135. package/dist/lib/parse-flatgeobuf.js.map +0 -1
  136. package/dist/workers/flatgeobuf-worker.js.map +0 -1
@@ -7,187 +7,251 @@ import Logger from "./logger.js";
7
7
  import { fromByteBuffer } from "./header-meta.js";
8
8
  import { Feature } from "./flat-geobuf/feature.js";
9
9
  export class HttpReader {
10
- constructor(headerClient, header, headerLength, indexLength) {
11
- this.headerClient = void 0;
12
- this.header = void 0;
13
- this.headerLength = void 0;
14
- this.indexLength = void 0;
15
- this.headerClient = headerClient;
16
- this.header = header;
17
- this.headerLength = headerLength;
18
- this.indexLength = indexLength;
19
- }
20
- static async open(url) {
21
- const assumedHeaderLength = 2024;
22
- const headerClient = new BufferedHttpRangeClient(url);
23
- const assumedIndexLength = (() => {
24
- const assumedBranchingFactor = DEFAULT_NODE_SIZE;
25
- const prefetchedLayers = 3;
26
- let result = 0;
27
- let i;
28
- for (i = 0; i < prefetchedLayers; i++) {
29
- const layer_width = assumedBranchingFactor ** i * NODE_ITEM_BYTE_LEN;
30
- result += layer_width;
31
- }
32
- return result;
33
- })();
34
- const minReqLength = assumedHeaderLength + assumedIndexLength;
35
- Logger.debug(`fetching header. minReqLength: ${minReqLength} (assumedHeaderLength: ${assumedHeaderLength}, assumedIndexLength: ${assumedIndexLength})`);
36
- {
37
- const bytes = new Uint8Array(await headerClient.getRange(0, 8, minReqLength, 'header'));
38
- if (!bytes.subarray(0, 3).every((v, i) => magicbytes[i] === v)) {
39
- Logger.error(`bytes: ${bytes} != ${magicbytes}`);
40
- throw new Error('Not a FlatGeobuf file');
41
- }
42
- Logger.debug('magic bytes look good');
10
+ constructor(headerClient, header, headerLength, indexLength) {
11
+ this.headerClient = headerClient;
12
+ this.header = header;
13
+ this.headerLength = headerLength;
14
+ this.indexLength = indexLength;
43
15
  }
44
- let headerLength;
45
- {
46
- const bytes = await headerClient.getRange(8, 4, minReqLength, 'header');
47
- headerLength = new DataView(bytes).getUint32(0, true);
48
- const HEADER_MAX_BUFFER_SIZE = 1048576 * 10;
49
- if (headerLength > HEADER_MAX_BUFFER_SIZE || headerLength < 8) {
50
- throw new Error('Invalid header size');
51
- }
52
- Logger.debug(`headerLength: ${headerLength}`);
16
+ // Fetch the header, preparing the reader to read Feature data.
17
+ //
18
+ // and potentially some opportunistic fetching of the index.
19
+ static async open(url) {
20
+ // In reality, the header is probably less than half this size, but
21
+ // better to overshoot and fetch an extra kb rather than have to issue
22
+ // a second request.
23
+ const assumedHeaderLength = 2024;
24
+ const headerClient = new BufferedHttpRangeClient(url);
25
+ // Immediately following the header is the optional spatial index, we deliberately fetch
26
+ // a small part of that to skip subsequent requests.
27
+ const assumedIndexLength = (() => {
28
+ // The actual branching factor will be in the header, but since we
29
+ // don't have the header yet, we just guess. The consequence of
30
+ // getting this wrong isn't terminal, it only means we may be
31
+ // fetching slightly more than we need or that we need to make an
32
+ // extra request later.
33
+ const assumedBranchingFactor = DEFAULT_NODE_SIZE;
34
+ // NOTE: each layer is exponentially larger
35
+ const prefetchedLayers = 3;
36
+ let result = 0;
37
+ let i;
38
+ for (i = 0; i < prefetchedLayers; i++) {
39
+ const layer_width = assumedBranchingFactor ** i * NODE_ITEM_BYTE_LEN;
40
+ result += layer_width;
41
+ }
42
+ return result;
43
+ })();
44
+ const minReqLength = assumedHeaderLength + assumedIndexLength;
45
+ Logger.debug(`fetching header. minReqLength: ${minReqLength} (assumedHeaderLength: ${assumedHeaderLength}, assumedIndexLength: ${assumedIndexLength})`);
46
+ {
47
+ const bytes = new Uint8Array(await headerClient.getRange(0, 8, minReqLength, 'header'));
48
+ if (!bytes.subarray(0, 3).every((v, i) => magicbytes[i] === v)) {
49
+ Logger.error(`bytes: ${bytes} != ${magicbytes}`);
50
+ throw new Error('Not a FlatGeobuf file');
51
+ }
52
+ Logger.debug('magic bytes look good');
53
+ }
54
+ let headerLength;
55
+ {
56
+ const bytes = await headerClient.getRange(8, 4, minReqLength, 'header');
57
+ headerLength = new DataView(bytes).getUint32(0, true);
58
+ const HEADER_MAX_BUFFER_SIZE = 1048576 * 10;
59
+ if (headerLength > HEADER_MAX_BUFFER_SIZE || headerLength < 8) {
60
+ // minimum size check avoids panic in FlatBuffers header decoding
61
+ throw new Error('Invalid header size');
62
+ }
63
+ Logger.debug(`headerLength: ${headerLength}`);
64
+ }
65
+ const bytes = await headerClient.getRange(12, headerLength, minReqLength, 'header');
66
+ const bb = new flatbuffers.ByteBuffer(new Uint8Array(bytes));
67
+ const header = fromByteBuffer(bb);
68
+ const indexLength = calcTreeSize(header.featuresCount, header.indexNodeSize);
69
+ Logger.debug('completed: opening http reader');
70
+ return new HttpReader(headerClient, header, headerLength, indexLength);
53
71
  }
54
- const bytes = await headerClient.getRange(12, headerLength, minReqLength, 'header');
55
- const bb = new flatbuffers.ByteBuffer(new Uint8Array(bytes));
56
- const header = fromByteBuffer(bb);
57
- const indexLength = calcTreeSize(header.featuresCount, header.indexNodeSize);
58
- Logger.debug('completed: opening http reader');
59
- return new HttpReader(headerClient, header, headerLength, indexLength);
60
- }
61
- async *selectBbox(rect) {
62
- const lengthBeforeTree = this.lengthBeforeTree();
63
- const bufferedClient = this.headerClient;
64
- const readNode = async function (offsetIntoTree, size) {
65
- const minReqLength = 0;
66
- return bufferedClient.getRange(lengthBeforeTree + offsetIntoTree, size, minReqLength, 'index');
67
- };
68
- const batches = [];
69
- let currentBatch = [];
70
- for await (const searchResult of streamSearch(this.header.featuresCount, this.header.indexNodeSize, rect, readNode)) {
71
- const [featureOffset,,] = searchResult;
72
- let [,, featureLength] = searchResult;
73
- if (!featureLength) {
74
- Logger.info('final feature');
75
- featureLength = 4;
76
- }
77
- if (currentBatch.length == 0) {
78
- currentBatch.push([featureOffset, featureLength]);
79
- continue;
80
- }
81
- const prevFeature = currentBatch[currentBatch.length - 1];
82
- const gap = featureOffset - (prevFeature[0] + prevFeature[1]);
83
- if (gap > Config.global.extraRequestThreshold()) {
84
- Logger.info(`Pushing new feature batch, since gap ${gap} was too large`);
85
- batches.push(currentBatch);
86
- currentBatch = [];
87
- }
88
- currentBatch.push([featureOffset, featureLength]);
72
+ async *selectBbox(rect) {
73
+ // Read R-Tree index and build filter for features within bbox
74
+ const lengthBeforeTree = this.lengthBeforeTree();
75
+ const bufferedClient = this.headerClient;
76
+ const readNode = async function (offsetIntoTree, size) {
77
+ const minReqLength = 0;
78
+ return bufferedClient.getRange(lengthBeforeTree + offsetIntoTree, size, minReqLength, 'index');
79
+ };
80
+ const batches = [];
81
+ let currentBatch = [];
82
+ for await (const searchResult of streamSearch(this.header.featuresCount, this.header.indexNodeSize, rect, readNode)) {
83
+ const [featureOffset, ,] = searchResult;
84
+ let [, , featureLength] = searchResult;
85
+ if (!featureLength) {
86
+ Logger.info('final feature');
87
+ // Normally we get the feature length by subtracting between
88
+ // adjacent nodes from the index, which we can't do for the
89
+ // _very_ last feature in a dataset.
90
+ //
91
+ // We could *guess* the size, but we'd risk overshooting the length,
92
+ // which will cause some webservers to return HTTP 416: Unsatisfiable range
93
+ //
94
+ // So instead we fetch only the final features byte length, stored in the
95
+ // first 4 bytes.
96
+ featureLength = 4;
97
+ }
98
+ if (currentBatch.length == 0) {
99
+ currentBatch.push([featureOffset, featureLength]);
100
+ continue;
101
+ }
102
+ const prevFeature = currentBatch[currentBatch.length - 1];
103
+ const gap = featureOffset - (prevFeature[0] + prevFeature[1]);
104
+ if (gap > Config.global.extraRequestThreshold()) {
105
+ Logger.info(`Pushing new feature batch, since gap ${gap} was too large`);
106
+ batches.push(currentBatch);
107
+ currentBatch = [];
108
+ }
109
+ currentBatch.push([featureOffset, featureLength]);
110
+ }
111
+ this.headerClient.logUsage('header+index');
112
+ if (currentBatch.length > 0) {
113
+ batches.push(currentBatch);
114
+ }
115
+ const promises = batches.flatMap((batch) => this.readFeatureBatch(batch));
116
+ // Fetch all batches concurrently, yielding features as they become
117
+ // available, meaning the results may be intermixed.
118
+ yield* Repeater.merge(promises);
89
119
  }
90
- this.headerClient.logUsage('header+index');
91
- if (currentBatch.length > 0) {
92
- batches.push(currentBatch);
120
+ lengthBeforeTree() {
121
+ // FGB Layout is: [magicbytes (fixed), headerLength (i32), header (variable), Tree (variable), Features (variable)]
122
+ return magicbytes.length + SIZE_PREFIX_LEN + this.headerLength;
93
123
  }
94
- const promises = batches.flatMap(batch => this.readFeatureBatch(batch));
95
- yield* Repeater.merge(promises);
96
- }
97
- lengthBeforeTree() {
98
- return magicbytes.length + SIZE_PREFIX_LEN + this.headerLength;
99
- }
100
- lengthBeforeFeatures() {
101
- return this.lengthBeforeTree() + this.indexLength;
102
- }
103
- buildFeatureClient() {
104
- return new BufferedHttpRangeClient(this.headerClient.httpClient);
105
- }
106
- async *readFeatureBatch(batch) {
107
- const [firstFeatureOffset] = batch[0];
108
- const [lastFeatureOffset, lastFeatureLength] = batch[batch.length - 1];
109
- const batchStart = firstFeatureOffset;
110
- const batchEnd = lastFeatureOffset + lastFeatureLength;
111
- const batchSize = batchEnd - batchStart;
112
- const featureClient = this.buildFeatureClient();
113
- let minFeatureReqLength = batchSize;
114
- for (const [featureOffset] of batch) {
115
- yield await this.readFeature(featureClient, featureOffset, minFeatureReqLength);
116
- minFeatureReqLength = 0;
124
+ lengthBeforeFeatures() {
125
+ return this.lengthBeforeTree() + this.indexLength;
117
126
  }
118
- featureClient.logUsage('feature');
119
- }
120
- async readFeature(featureClient, featureOffset, minFeatureReqLength) {
121
- const offset = featureOffset + this.lengthBeforeFeatures();
122
- let featureLength;
123
- {
124
- const bytes = await featureClient.getRange(offset, 4, minFeatureReqLength, 'feature length');
125
- featureLength = new DataView(bytes).getUint32(0, true);
127
+ buildFeatureClient() {
128
+ return new BufferedHttpRangeClient(this.headerClient.httpClient);
129
+ }
130
+ /**
131
+ * Fetch a batch of features in a single request, yielding each Feature
132
+ *
133
+ * `batch`: [offset, length] of features in the batch
134
+ */
135
+ async *readFeatureBatch(batch) {
136
+ const [firstFeatureOffset] = batch[0];
137
+ const [lastFeatureOffset, lastFeatureLength] = batch[batch.length - 1];
138
+ const batchStart = firstFeatureOffset;
139
+ const batchEnd = lastFeatureOffset + lastFeatureLength;
140
+ const batchSize = batchEnd - batchStart;
141
+ // A new feature client is needed for each batch to own the underlying buffer as features are yielded.
142
+ const featureClient = this.buildFeatureClient();
143
+ let minFeatureReqLength = batchSize;
144
+ for (const [featureOffset] of batch) {
145
+ yield await this.readFeature(featureClient, featureOffset, minFeatureReqLength);
146
+ // Only set minFeatureReqLength for the first request.
147
+ //
148
+ // This should only affect a batch that contains the final feature, otherwise
149
+ // we've calculated `batchSize` to get all the data we need for the batch.
150
+ // For the very final feature in a dataset, we don't know it's length, so we
151
+ // will end up executing an extra request for that batch.
152
+ minFeatureReqLength = 0;
153
+ }
154
+ featureClient.logUsage('feature');
155
+ }
156
+ async readFeature(featureClient, featureOffset, minFeatureReqLength) {
157
+ const offset = featureOffset + this.lengthBeforeFeatures();
158
+ let featureLength;
159
+ {
160
+ const bytes = await featureClient.getRange(offset, 4, minFeatureReqLength, 'feature length');
161
+ featureLength = new DataView(bytes).getUint32(0, true);
162
+ }
163
+ const byteBuffer = await featureClient.getRange(offset + 4, featureLength, minFeatureReqLength, 'feature data');
164
+ const bytes = new Uint8Array(byteBuffer);
165
+ const bytesAligned = new Uint8Array(featureLength + SIZE_PREFIX_LEN);
166
+ bytesAligned.set(bytes, SIZE_PREFIX_LEN);
167
+ const bb = new flatbuffers.ByteBuffer(bytesAligned);
168
+ bb.setPosition(SIZE_PREFIX_LEN);
169
+ return Feature.getRootAsFeature(bb);
126
170
  }
127
- const byteBuffer = await featureClient.getRange(offset + 4, featureLength, minFeatureReqLength, 'feature data');
128
- const bytes = new Uint8Array(byteBuffer);
129
- const bytesAligned = new Uint8Array(featureLength + SIZE_PREFIX_LEN);
130
- bytesAligned.set(bytes, SIZE_PREFIX_LEN);
131
- const bb = new flatbuffers.ByteBuffer(bytesAligned);
132
- bb.setPosition(SIZE_PREFIX_LEN);
133
- return Feature.getRootAsFeature(bb);
134
- }
135
171
  }
136
172
  class BufferedHttpRangeClient {
137
- constructor(source) {
138
- this.httpClient = void 0;
139
- this.bytesEverUsed = 0;
140
- this.bytesEverFetched = 0;
141
- this.buffer = new ArrayBuffer(0);
142
- this.head = 0;
143
- if (typeof source === 'string') {
144
- this.httpClient = new HttpRangeClient(source);
145
- } else if (source instanceof HttpRangeClient) {
146
- this.httpClient = source;
147
- } else {
148
- throw new Error('Unknown source ');
173
+ constructor(source) {
174
+ this.bytesEverUsed = 0;
175
+ this.bytesEverFetched = 0;
176
+ this.buffer = new ArrayBuffer(0);
177
+ // Byte offset of `buffer` with respect to the beginning of the file being
178
+ // buffered
179
+ this.head = 0;
180
+ if (typeof source === 'string') {
181
+ this.httpClient = new HttpRangeClient(source);
182
+ }
183
+ else if (source instanceof HttpRangeClient) {
184
+ this.httpClient = source;
185
+ }
186
+ else {
187
+ throw new Error('Unknown source ');
188
+ }
189
+ }
190
+ async getRange(start, length, minReqLength, purpose) {
191
+ this.bytesEverUsed += length;
192
+ const start_i = start - this.head;
193
+ const end_i = start_i + length;
194
+ if (start_i >= 0 && end_i <= this.buffer.byteLength) {
195
+ return this.buffer.slice(start_i, end_i);
196
+ }
197
+ const lengthToFetch = Math.max(length, minReqLength);
198
+ this.bytesEverFetched += lengthToFetch;
199
+ Logger.debug(`requesting for new Range: ${start}-${start + lengthToFetch - 1}`);
200
+ this.buffer = await this.httpClient.getRange(start, lengthToFetch, purpose);
201
+ this.head = start;
202
+ return this.buffer.slice(0, length);
149
203
  }
150
- }
151
- async getRange(start, length, minReqLength, purpose) {
152
- this.bytesEverUsed += length;
153
- const start_i = start - this.head;
154
- const end_i = start_i + length;
155
- if (start_i >= 0 && end_i <= this.buffer.byteLength) {
156
- return this.buffer.slice(start_i, end_i);
204
+ logUsage(purpose) {
205
+ const category = purpose.split(' ')[0];
206
+ const used = this.bytesEverUsed;
207
+ const requested = this.bytesEverFetched;
208
+ const efficiency = ((100.0 * used) / requested).toFixed(2);
209
+ Logger.info(`${category} bytes used/requested: ${used} / ${requested} = ${efficiency}%`);
157
210
  }
158
- const lengthToFetch = Math.max(length, minReqLength);
159
- this.bytesEverFetched += lengthToFetch;
160
- Logger.debug(`requesting for new Range: ${start}-${start + lengthToFetch - 1}`);
161
- this.buffer = await this.httpClient.getRange(start, lengthToFetch, purpose);
162
- this.head = start;
163
- return this.buffer.slice(0, length);
164
- }
165
- logUsage(purpose) {
166
- const category = purpose.split(' ')[0];
167
- const used = this.bytesEverUsed;
168
- const requested = this.bytesEverFetched;
169
- const efficiency = (100.0 * used / requested).toFixed(2);
170
- Logger.info(`${category} bytes used/requested: ${used} / ${requested} = ${efficiency}%`);
171
- }
172
211
  }
173
212
  class HttpRangeClient {
174
- constructor(url) {
175
- this.url = void 0;
176
- this.requestsEverMade = 0;
177
- this.bytesEverRequested = 0;
178
- this.url = url;
179
- }
180
- async getRange(begin, length, purpose) {
181
- this.requestsEverMade += 1;
182
- this.bytesEverRequested += length;
183
- const range = `bytes=${begin}-${begin + length - 1}`;
184
- Logger.info(`request: #${this.requestsEverMade}, purpose: ${purpose}), bytes: (this_request: ${length}, ever: ${this.bytesEverRequested}), Range: ${range}`);
185
- const response = await fetch(this.url, {
186
- headers: {
187
- Range: range
188
- }
189
- });
190
- return response.arrayBuffer();
191
- }
213
+ constructor(url) {
214
+ this.requestsEverMade = 0;
215
+ this.bytesEverRequested = 0;
216
+ this.url = url;
217
+ }
218
+ async getRange(begin, length, purpose) {
219
+ this.requestsEverMade += 1;
220
+ this.bytesEverRequested += length;
221
+ const range = `bytes=${begin}-${begin + length - 1}`;
222
+ Logger.info(`request: #${this.requestsEverMade}, purpose: ${purpose}), bytes: (this_request: ${length}, ever: ${this.bytesEverRequested}), Range: ${range}`);
223
+ const response = await fetch(this.url, {
224
+ headers: {
225
+ Range: range
226
+ // TODO: better parallelize requests on Chrome
227
+ //
228
+ // Safari and Firefox have no issue performing Range requests
229
+ // for a resource in parallel, but Chrome will stall a
230
+ // subsequent request to the resource until it's received the
231
+ // response headers of the prior request. So, it still allows
232
+ // some limited parallelization, but it's not ideal.
233
+ //
234
+ // This is seemingly an artifact of how Chrome manages caching
235
+ // and it might differ between platforms. We could work around it
236
+ // by setting the request header:
237
+ //
238
+ // 'Cache-Control': 'no-cache, no-store'
239
+ //
240
+ // This allows requests to be fully parallelized in Chrome, but
241
+ // then Chrome won't cache the response, so it seems not a
242
+ // great trade-off.
243
+ //
244
+ // Another work around would be to make each Range request for
245
+ // a separate URL by appending something like
246
+ // `?cache_buster=<range>` to the URL, but then Chrome will
247
+ // require an additional CORS preflight OPTIONS requests per
248
+ // Range, which is also not a great trade-off.
249
+ //
250
+ // See:
251
+ // https://bugs.chromium.org/p/chromium/issues/detail?id=969828&q=concurrent%20range%20requests&can=2
252
+ // https://stackoverflow.com/questions/27513994/chrome-stalls-when-making-multiple-requests-to-same-resource
253
+ }
254
+ });
255
+ return response.arrayBuffer();
256
+ }
192
257
  }
193
- //# sourceMappingURL=http-reader.js.map
@@ -1,14 +1,14 @@
1
- export * as generic from './generic';
2
- export * as geojson from './geojson';
3
- export { Column } from './flat-geobuf/column';
4
- export { Geometry } from './flat-geobuf/geometry';
5
- export { Feature } from './flat-geobuf/feature';
6
- export { ISimpleGeometry } from './generic/geometry';
7
- export { IFeature } from './generic/feature';
8
- export { FromFeatureFn } from './generic/featurecollection';
9
- export { IGeoJsonFeature } from './geojson/feature';
10
- export { default as HeaderMeta } from './header-meta';
11
- export { default as ColumnMeta } from './column-meta';
12
- export { default as CrsMeta } from './crs-meta';
13
- export { Rect } from './packedrtree';
1
+ export * as generic from "./generic.js";
2
+ export * as geojson from "./geojson.js";
3
+ export { Column } from "./flat-geobuf/column.js";
4
+ export { Geometry } from "./flat-geobuf/geometry.js";
5
+ export { Feature } from "./flat-geobuf/feature.js";
6
+ export { ISimpleGeometry } from "./generic/geometry.js";
7
+ export { IFeature } from "./generic/feature.js";
8
+ export { FromFeatureFn } from "./generic/featurecollection.js";
9
+ export { IGeoJsonFeature } from "./geojson/feature.js";
10
+ export { default as HeaderMeta } from "./header-meta.js";
11
+ export { default as ColumnMeta } from "./column-meta.js";
12
+ export { default as CrsMeta } from "./crs-meta.js";
13
+ export { Rect } from "./packedrtree.js";
14
14
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/flatgeobuf/3.27.2/index.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,OAAO,MAAM,WAAW,CAAC;AACrC,OAAO,KAAK,OAAO,MAAM,WAAW,CAAC;AAGrC,OAAO,EAAC,MAAM,EAAC,MAAM,sBAAsB,CAAC;AAC5C,OAAO,EAAC,QAAQ,EAAC,MAAM,wBAAwB,CAAC;AAChD,OAAO,EAAC,OAAO,EAAC,MAAM,uBAAuB,CAAC;AAE9C,OAAO,EAAC,eAAe,EAAC,MAAM,oBAAoB,CAAC;AACnD,OAAO,EAAC,QAAQ,EAAC,MAAM,mBAAmB,CAAC;AAC3C,OAAO,EAAC,aAAa,EAAC,MAAM,6BAA6B,CAAC;AAE1D,OAAO,EAAC,eAAe,EAAC,MAAM,mBAAmB,CAAC;AAElD,OAAO,EAAC,OAAO,IAAI,UAAU,EAAC,MAAM,eAAe,CAAC;AACpD,OAAO,EAAC,OAAO,IAAI,UAAU,EAAC,MAAM,eAAe,CAAC;AACpD,OAAO,EAAC,OAAO,IAAI,OAAO,EAAC,MAAM,YAAY,CAAC;AAE9C,OAAO,EAAC,IAAI,EAAC,MAAM,eAAe,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/flatgeobuf/3.27.2/index.ts"],"names":[],"mappings":"AAGA,OAAO,KAAK,OAAO,qBAAkB;AACrC,OAAO,KAAK,OAAO,qBAAkB;AAGrC,OAAO,EAAC,MAAM,EAAC,gCAA6B;AAC5C,OAAO,EAAC,QAAQ,EAAC,kCAA+B;AAChD,OAAO,EAAC,OAAO,EAAC,iCAA8B;AAE9C,OAAO,EAAC,eAAe,EAAC,8BAA2B;AACnD,OAAO,EAAC,QAAQ,EAAC,6BAA0B;AAC3C,OAAO,EAAC,aAAa,EAAC,uCAAoC;AAE1D,OAAO,EAAC,eAAe,EAAC,6BAA0B;AAElD,OAAO,EAAC,OAAO,IAAI,UAAU,EAAC,yBAAsB;AACpD,OAAO,EAAC,OAAO,IAAI,UAAU,EAAC,yBAAsB;AACpD,OAAO,EAAC,OAAO,IAAI,OAAO,EAAC,sBAAmB;AAE9C,OAAO,EAAC,IAAI,EAAC,yBAAsB"}
@@ -1,16 +1,8 @@
1
- import * as _generic from "./generic/index.js";
2
- export { _generic as generic };
3
- import * as _geojson from "./geojson/index.js";
4
- export { _geojson as geojson };
1
+ // SPDX-License-Identifier: BSD-2-Clause AND ISC
2
+ // Copyright (c) 2018, Björn Harrtell
3
+ export * as generic from "./generic.js";
4
+ export * as geojson from "./geojson.js";
5
+ // export * as ol from './ol';
5
6
  export { Column } from "./flat-geobuf/column.js";
6
7
  export { Geometry } from "./flat-geobuf/geometry.js";
7
8
  export { Feature } from "./flat-geobuf/feature.js";
8
- export { ISimpleGeometry } from "./generic/geometry.js";
9
- export { IFeature } from "./generic/feature.js";
10
- export { FromFeatureFn } from "./generic/featurecollection.js";
11
- export { IGeoJsonFeature } from "./geojson/feature.js";
12
- export { default as HeaderMeta } from "./header-meta.js";
13
- export { default as ColumnMeta } from "./column-meta.js";
14
- export { default as CrsMeta } from "./crs-meta.js";
15
- export { Rect } from "./packedrtree.js";
16
- //# sourceMappingURL=index.js.map
@@ -1,65 +1,46 @@
1
- export let LogLevel = function (LogLevel) {
2
- LogLevel[LogLevel["Debug"] = 0] = "Debug";
3
- LogLevel[LogLevel["Info"] = 1] = "Info";
4
- LogLevel[LogLevel["Warn"] = 2] = "Warn";
5
- LogLevel[LogLevel["Error"] = 3] = "Error";
6
- return LogLevel;
7
- }({});
8
- export default class Logger {
9
- static debug() {
10
- for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
11
- args[_key] = arguments[_key];
1
+ export var LogLevel;
2
+ (function (LogLevel) {
3
+ LogLevel[LogLevel["Debug"] = 0] = "Debug";
4
+ LogLevel[LogLevel["Info"] = 1] = "Info";
5
+ LogLevel[LogLevel["Warn"] = 2] = "Warn";
6
+ LogLevel[LogLevel["Error"] = 3] = "Error";
7
+ })(LogLevel || (LogLevel = {}));
8
+ class Logger {
9
+ static debug(...args) {
10
+ this.log(LogLevel.Debug, ...args);
12
11
  }
13
- this.log(LogLevel.Debug, ...args);
14
- }
15
- static info() {
16
- for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
17
- args[_key2] = arguments[_key2];
12
+ static info(...args) {
13
+ this.log(LogLevel.Info, ...args);
18
14
  }
19
- this.log(LogLevel.Info, ...args);
20
- }
21
- static warn() {
22
- for (var _len3 = arguments.length, args = new Array(_len3), _key3 = 0; _key3 < _len3; _key3++) {
23
- args[_key3] = arguments[_key3];
15
+ static warn(...args) {
16
+ this.log(LogLevel.Warn, ...args);
24
17
  }
25
- this.log(LogLevel.Warn, ...args);
26
- }
27
- static error() {
28
- for (var _len4 = arguments.length, args = new Array(_len4), _key4 = 0; _key4 < _len4; _key4++) {
29
- args[_key4] = arguments[_key4];
18
+ static error(...args) {
19
+ this.log(LogLevel.Error, ...args);
30
20
  }
31
- this.log(LogLevel.Error, ...args);
32
- }
33
- static log(level) {
34
- if (this.logLevel > level) {
35
- return;
36
- }
37
- for (var _len5 = arguments.length, args = new Array(_len5 > 1 ? _len5 - 1 : 0), _key5 = 1; _key5 < _len5; _key5++) {
38
- args[_key5 - 1] = arguments[_key5];
39
- }
40
- switch (level) {
41
- case LogLevel.Debug:
42
- {
43
- console.debug(...args);
44
- break;
45
- }
46
- case LogLevel.Info:
47
- {
48
- console.info(...args);
49
- break;
50
- }
51
- case LogLevel.Warn:
52
- {
53
- console.warn(...args);
54
- break;
21
+ static log(level, ...args) {
22
+ if (this.logLevel > level) {
23
+ return;
55
24
  }
56
- case LogLevel.Error:
57
- {
58
- console.error(...args);
59
- break;
25
+ switch (level) {
26
+ case LogLevel.Debug: {
27
+ console.debug(...args);
28
+ break;
29
+ }
30
+ case LogLevel.Info: {
31
+ console.info(...args);
32
+ break;
33
+ }
34
+ case LogLevel.Warn: {
35
+ console.warn(...args);
36
+ break;
37
+ }
38
+ case LogLevel.Error: {
39
+ console.error(...args);
40
+ break;
41
+ }
60
42
  }
61
43
  }
62
- }
63
44
  }
64
45
  Logger.logLevel = LogLevel.Warn;
65
- //# sourceMappingURL=logger.js.map
46
+ export default Logger;