@loaders.gl/loader-utils 3.1.0-alpha.2 → 3.1.0-beta.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/es5/index.js +323 -0
- package/dist/es5/index.js.map +1 -0
- package/dist/es5/json-loader.js +29 -0
- package/dist/es5/json-loader.js.map +1 -0
- package/dist/es5/lib/binary-utils/array-buffer-utils.js +102 -0
- package/dist/es5/lib/binary-utils/array-buffer-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/binary-copy-utils.js +35 -0
- package/dist/es5/lib/binary-utils/binary-copy-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/buffer-utils.js +32 -0
- package/dist/es5/lib/binary-utils/buffer-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/encode-utils.js +42 -0
- package/dist/es5/lib/binary-utils/encode-utils.js.map +1 -0
- package/dist/es5/lib/binary-utils/get-first-characters.js +36 -0
- package/dist/es5/lib/binary-utils/get-first-characters.js.map +1 -0
- package/dist/es5/lib/binary-utils/memory-copy-utils.js +39 -0
- package/dist/es5/lib/binary-utils/memory-copy-utils.js.map +1 -0
- package/dist/es5/lib/env-utils/assert.js +13 -0
- package/dist/es5/lib/env-utils/assert.js.map +1 -0
- package/dist/es5/lib/env-utils/globals.js +28 -0
- package/dist/es5/lib/env-utils/globals.js.map +1 -0
- package/dist/es5/lib/filesystems/node-filesystem.js +74 -0
- package/dist/es5/lib/filesystems/node-filesystem.js.map +1 -0
- package/dist/es5/lib/iterators/async-iteration.js +51 -0
- package/dist/es5/lib/iterators/async-iteration.js.map +1 -0
- package/dist/es5/lib/iterators/text-iterators.js +59 -0
- package/dist/es5/lib/iterators/text-iterators.js.map +1 -0
- package/dist/es5/lib/node/buffer.js +38 -0
- package/dist/es5/lib/node/buffer.js.map +1 -0
- package/dist/es5/lib/node/fs.js +52 -0
- package/dist/es5/lib/node/fs.js.map +1 -0
- package/dist/es5/lib/node/util.js +16 -0
- package/dist/es5/lib/node/util.js.map +1 -0
- package/dist/es5/lib/parser-utils/parse-json.js +17 -0
- package/dist/es5/lib/parser-utils/parse-json.js.map +1 -0
- package/dist/es5/lib/path-utils/file-aliases.js +39 -0
- package/dist/es5/lib/path-utils/file-aliases.js.map +1 -0
- package/dist/es5/lib/path-utils/path.js +35 -0
- package/dist/es5/lib/path-utils/path.js.map +1 -0
- package/dist/es5/lib/request-utils/request-scheduler.js +154 -0
- package/dist/es5/lib/request-utils/request-scheduler.js.map +1 -0
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js +117 -0
- package/dist/es5/lib/worker-loader-utils/create-loader-worker.js.map +1 -0
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js +76 -0
- package/dist/es5/lib/worker-loader-utils/parse-with-worker.js.map +1 -0
- package/dist/es5/types.js +2 -0
- package/dist/{types.js.map → es5/types.js.map} +0 -0
- package/dist/es5/workers/json-worker.js +8 -0
- package/dist/es5/workers/json-worker.js.map +1 -0
- package/dist/esm/index.js +26 -0
- package/dist/esm/index.js.map +1 -0
- package/dist/esm/json-loader.js +21 -0
- package/dist/esm/json-loader.js.map +1 -0
- package/dist/esm/lib/binary-utils/array-buffer-utils.js +86 -0
- package/dist/esm/lib/binary-utils/array-buffer-utils.js.map +1 -0
- package/dist/esm/lib/binary-utils/binary-copy-utils.js +25 -0
- package/dist/esm/lib/binary-utils/binary-copy-utils.js.map +1 -0
- package/dist/esm/lib/binary-utils/buffer-utils.js +16 -0
- package/dist/esm/lib/binary-utils/buffer-utils.js.map +1 -0
- package/dist/esm/lib/binary-utils/encode-utils.js +31 -0
- package/dist/esm/lib/binary-utils/encode-utils.js.map +1 -0
- package/dist/esm/lib/binary-utils/get-first-characters.js +27 -0
- package/dist/esm/lib/binary-utils/get-first-characters.js.map +1 -0
- package/dist/esm/lib/binary-utils/memory-copy-utils.js +27 -0
- package/dist/esm/lib/binary-utils/memory-copy-utils.js.map +1 -0
- package/dist/esm/lib/env-utils/assert.js +6 -0
- package/dist/esm/lib/env-utils/assert.js.map +1 -0
- package/dist/esm/lib/env-utils/globals.js +16 -0
- package/dist/esm/lib/env-utils/globals.js.map +1 -0
- package/dist/esm/lib/filesystems/node-filesystem.js +60 -0
- package/dist/esm/lib/filesystems/node-filesystem.js.map +1 -0
- package/dist/esm/lib/iterators/async-iteration.js +39 -0
- package/dist/esm/lib/iterators/async-iteration.js.map +1 -0
- package/dist/esm/lib/iterators/text-iterators.js +46 -0
- package/dist/esm/lib/iterators/text-iterators.js.map +1 -0
- package/dist/{lib/node/buffer-utils.node.js → esm/lib/node/buffer.js} +1 -1
- package/dist/esm/lib/node/buffer.js.map +1 -0
- package/dist/esm/lib/node/fs.js +27 -0
- package/dist/esm/lib/node/fs.js.map +1 -0
- package/dist/esm/lib/node/util.js +3 -0
- package/dist/esm/lib/node/util.js.map +1 -0
- package/dist/esm/lib/parser-utils/parse-json.js +9 -0
- package/dist/esm/lib/parser-utils/parse-json.js.map +1 -0
- package/dist/esm/lib/path-utils/file-aliases.js +26 -0
- package/dist/esm/lib/path-utils/file-aliases.js.map +1 -0
- package/dist/esm/lib/path-utils/path.js +24 -0
- package/dist/esm/lib/path-utils/path.js.map +1 -0
- package/dist/esm/lib/request-utils/request-scheduler.js +146 -0
- package/dist/esm/lib/request-utils/request-scheduler.js.map +1 -0
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js +102 -0
- package/dist/esm/lib/worker-loader-utils/create-loader-worker.js.map +1 -0
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js +64 -0
- package/dist/esm/lib/worker-loader-utils/parse-with-worker.js.map +1 -0
- package/dist/esm/types.js +2 -0
- package/dist/esm/types.js.map +1 -0
- package/dist/esm/workers/json-worker.js +4 -0
- package/dist/esm/workers/json-worker.js.map +1 -0
- package/dist/index.d.ts +27 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +111 -22
- package/dist/json-loader.d.ts +22 -0
- package/dist/json-loader.d.ts.map +1 -0
- package/dist/json-loader.js +25 -18
- package/dist/lib/binary-utils/array-buffer-utils.d.ts +31 -0
- package/dist/lib/binary-utils/array-buffer-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/array-buffer-utils.js +108 -79
- package/dist/lib/binary-utils/binary-copy-utils.d.ts +24 -0
- package/dist/lib/binary-utils/binary-copy-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/binary-copy-utils.js +48 -22
- package/dist/lib/binary-utils/buffer-utils.d.ts +16 -0
- package/dist/lib/binary-utils/buffer-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/buffer-utils.js +47 -13
- package/dist/lib/binary-utils/encode-utils.d.ts +4 -0
- package/dist/lib/binary-utils/encode-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/encode-utils.js +30 -26
- package/dist/lib/binary-utils/get-first-characters.d.ts +3 -0
- package/dist/lib/binary-utils/get-first-characters.d.ts.map +1 -0
- package/dist/lib/binary-utils/get-first-characters.js +28 -25
- package/dist/lib/binary-utils/memory-copy-utils.d.ts +25 -0
- package/dist/lib/binary-utils/memory-copy-utils.d.ts.map +1 -0
- package/dist/lib/binary-utils/memory-copy-utils.js +58 -24
- package/dist/lib/env-utils/assert.d.ts +6 -0
- package/dist/lib/env-utils/assert.d.ts.map +1 -0
- package/dist/lib/env-utils/assert.js +12 -5
- package/dist/lib/env-utils/globals.d.ts +15 -0
- package/dist/lib/env-utils/globals.d.ts.map +1 -0
- package/dist/lib/env-utils/globals.js +23 -9
- package/dist/lib/filesystems/node-filesystem.d.ts +38 -0
- package/dist/lib/filesystems/node-filesystem.d.ts.map +1 -0
- package/dist/lib/filesystems/node-filesystem.js +69 -0
- package/dist/lib/iterators/async-iteration.d.ts +20 -0
- package/dist/lib/iterators/async-iteration.d.ts.map +1 -0
- package/dist/lib/iterators/async-iteration.js +49 -35
- package/dist/lib/iterators/text-iterators.d.ts +19 -0
- package/dist/lib/iterators/text-iterators.d.ts.map +1 -0
- package/dist/lib/iterators/text-iterators.js +56 -41
- package/dist/lib/node/buffer.d.ts +10 -0
- package/dist/lib/node/buffer.d.ts.map +1 -0
- package/dist/lib/node/buffer.js +36 -0
- package/dist/lib/node/fs.d.ts +26 -0
- package/dist/lib/node/fs.d.ts.map +1 -0
- package/dist/lib/node/fs.js +40 -27
- package/dist/lib/node/util.d.ts +5 -0
- package/dist/lib/node/util.d.ts.map +1 -0
- package/dist/lib/node/util.js +25 -0
- package/dist/lib/parser-utils/parse-json.d.ts +5 -0
- package/dist/lib/parser-utils/parse-json.d.ts.map +1 -0
- package/dist/lib/parser-utils/parse-json.js +15 -8
- package/dist/lib/path-utils/file-aliases.d.ts +17 -0
- package/dist/lib/path-utils/file-aliases.d.ts.map +1 -0
- package/dist/lib/path-utils/file-aliases.js +40 -19
- package/dist/lib/path-utils/path.d.ts +16 -0
- package/dist/lib/path-utils/path.d.ts.map +1 -0
- package/dist/lib/path-utils/path.js +38 -18
- package/dist/lib/request-utils/request-scheduler.d.ts +62 -0
- package/dist/lib/request-utils/request-scheduler.d.ts.map +1 -0
- package/dist/lib/request-utils/request-scheduler.js +127 -131
- package/dist/lib/worker-loader-utils/create-loader-worker.d.ts +7 -0
- package/dist/lib/worker-loader-utils/create-loader-worker.d.ts.map +1 -0
- package/dist/lib/worker-loader-utils/create-loader-worker.js +91 -96
- package/dist/lib/worker-loader-utils/parse-with-worker.d.ts +15 -0
- package/dist/lib/worker-loader-utils/parse-with-worker.d.ts.map +1 -0
- package/dist/lib/worker-loader-utils/parse-with-worker.js +69 -60
- package/dist/types.d.ts +207 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +3 -2
- package/dist/workers/json-worker.d.ts +2 -0
- package/dist/workers/json-worker.d.ts.map +1 -0
- package/dist/workers/json-worker.js +5 -4
- package/package.json +15 -14
- package/src/index.ts +26 -8
- package/src/lib/binary-utils/buffer-utils.ts +1 -1
- package/src/lib/env-utils/globals.ts +1 -1
- package/src/lib/filesystems/node-filesystem.ts +79 -0
- package/src/lib/node/{buffer-utils.node.ts → buffer.ts} +0 -0
- package/src/lib/node/fs.ts +29 -13
- package/src/lib/node/util.ts +4 -0
- package/src/lib/path-utils/path.ts +9 -0
- package/src/lib/worker-loader-utils/parse-with-worker.ts +6 -5
- package/dist/index.js.map +0 -1
- package/dist/json-loader.js.map +0 -1
- package/dist/lib/binary-utils/array-buffer-utils.js.map +0 -1
- package/dist/lib/binary-utils/binary-copy-utils.js.map +0 -1
- package/dist/lib/binary-utils/buffer-utils.js.map +0 -1
- package/dist/lib/binary-utils/encode-utils.js.map +0 -1
- package/dist/lib/binary-utils/get-first-characters.js.map +0 -1
- package/dist/lib/binary-utils/memory-copy-utils.js.map +0 -1
- package/dist/lib/env-utils/assert.js.map +0 -1
- package/dist/lib/env-utils/globals.js.map +0 -1
- package/dist/lib/iterators/async-iteration.js.map +0 -1
- package/dist/lib/iterators/text-iterators.js.map +0 -1
- package/dist/lib/node/buffer-utils.node.js.map +0 -1
- package/dist/lib/node/fs.js.map +0 -1
- package/dist/lib/parser-utils/parse-json.js.map +0 -1
- package/dist/lib/path-utils/file-aliases.js.map +0 -1
- package/dist/lib/path-utils/path.js.map +0 -1
- package/dist/lib/request-utils/request-scheduler.js.map +0 -1
- package/dist/lib/worker-loader-utils/create-loader-worker.js.map +0 -1
- package/dist/lib/worker-loader-utils/parse-with-worker.js.map +0 -1
- package/dist/workers/json-worker.js.map +0 -1
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _interopRequireDefault = require("@babel/runtime/helpers/interopRequireDefault");
|
|
4
|
+
|
|
5
|
+
Object.defineProperty(exports, "__esModule", {
|
|
6
|
+
value: true
|
|
7
|
+
});
|
|
8
|
+
exports.default = void 0;
|
|
9
|
+
|
|
10
|
+
var _defineProperty2 = _interopRequireDefault(require("@babel/runtime/helpers/defineProperty"));
|
|
11
|
+
|
|
12
|
+
var _stats = require("@probe.gl/stats");
|
|
13
|
+
|
|
14
|
+
const STAT_QUEUED_REQUESTS = 'Queued Requests';
|
|
15
|
+
const STAT_ACTIVE_REQUESTS = 'Active Requests';
|
|
16
|
+
const STAT_CANCELLED_REQUESTS = 'Cancelled Requests';
|
|
17
|
+
const STAT_QUEUED_REQUESTS_EVER = 'Queued Requests Ever';
|
|
18
|
+
const STAT_ACTIVE_REQUESTS_EVER = 'Active Requests Ever';
|
|
19
|
+
const DEFAULT_PROPS = {
|
|
20
|
+
id: 'request-scheduler',
|
|
21
|
+
throttleRequests: true,
|
|
22
|
+
maxRequests: 6
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
class RequestScheduler {
|
|
26
|
+
constructor(props = {}) {
|
|
27
|
+
(0, _defineProperty2.default)(this, "props", void 0);
|
|
28
|
+
(0, _defineProperty2.default)(this, "stats", void 0);
|
|
29
|
+
(0, _defineProperty2.default)(this, "activeRequestCount", 0);
|
|
30
|
+
(0, _defineProperty2.default)(this, "requestQueue", []);
|
|
31
|
+
(0, _defineProperty2.default)(this, "requestMap", new Map());
|
|
32
|
+
(0, _defineProperty2.default)(this, "deferredUpdate", null);
|
|
33
|
+
this.props = { ...DEFAULT_PROPS,
|
|
34
|
+
...props
|
|
35
|
+
};
|
|
36
|
+
this.stats = new _stats.Stats({
|
|
37
|
+
id: this.props.id
|
|
38
|
+
});
|
|
39
|
+
this.stats.get(STAT_QUEUED_REQUESTS);
|
|
40
|
+
this.stats.get(STAT_ACTIVE_REQUESTS);
|
|
41
|
+
this.stats.get(STAT_CANCELLED_REQUESTS);
|
|
42
|
+
this.stats.get(STAT_QUEUED_REQUESTS_EVER);
|
|
43
|
+
this.stats.get(STAT_ACTIVE_REQUESTS_EVER);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
scheduleRequest(handle, getPriority = () => 0) {
|
|
47
|
+
if (!this.props.throttleRequests) {
|
|
48
|
+
return Promise.resolve({
|
|
49
|
+
done: () => {}
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
if (this.requestMap.has(handle)) {
|
|
54
|
+
return this.requestMap.get(handle);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const request = {
|
|
58
|
+
handle,
|
|
59
|
+
priority: 0,
|
|
60
|
+
getPriority
|
|
61
|
+
};
|
|
62
|
+
const promise = new Promise(resolve => {
|
|
63
|
+
request.resolve = resolve;
|
|
64
|
+
return request;
|
|
65
|
+
});
|
|
66
|
+
this.requestQueue.push(request);
|
|
67
|
+
this.requestMap.set(handle, promise);
|
|
68
|
+
|
|
69
|
+
this._issueNewRequests();
|
|
70
|
+
|
|
71
|
+
return promise;
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
_issueRequest(request) {
|
|
75
|
+
const {
|
|
76
|
+
handle,
|
|
77
|
+
resolve
|
|
78
|
+
} = request;
|
|
79
|
+
let isDone = false;
|
|
80
|
+
|
|
81
|
+
const done = () => {
|
|
82
|
+
if (!isDone) {
|
|
83
|
+
isDone = true;
|
|
84
|
+
this.requestMap.delete(handle);
|
|
85
|
+
this.activeRequestCount--;
|
|
86
|
+
|
|
87
|
+
this._issueNewRequests();
|
|
88
|
+
}
|
|
89
|
+
};
|
|
90
|
+
|
|
91
|
+
this.activeRequestCount++;
|
|
92
|
+
return resolve ? resolve({
|
|
93
|
+
done
|
|
94
|
+
}) : Promise.resolve({
|
|
95
|
+
done
|
|
96
|
+
});
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
_issueNewRequests() {
|
|
100
|
+
if (!this.deferredUpdate) {
|
|
101
|
+
this.deferredUpdate = setTimeout(() => this._issueNewRequestsAsync(), 0);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
_issueNewRequestsAsync() {
|
|
106
|
+
this.deferredUpdate = null;
|
|
107
|
+
const freeSlots = Math.max(this.props.maxRequests - this.activeRequestCount, 0);
|
|
108
|
+
|
|
109
|
+
if (freeSlots === 0) {
|
|
110
|
+
return;
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
this._updateAllRequests();
|
|
114
|
+
|
|
115
|
+
for (let i = 0; i < freeSlots; ++i) {
|
|
116
|
+
const request = this.requestQueue.shift();
|
|
117
|
+
|
|
118
|
+
if (request) {
|
|
119
|
+
this._issueRequest(request);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
_updateAllRequests() {
|
|
125
|
+
const requestQueue = this.requestQueue;
|
|
126
|
+
|
|
127
|
+
for (let i = 0; i < requestQueue.length; ++i) {
|
|
128
|
+
const request = requestQueue[i];
|
|
129
|
+
|
|
130
|
+
if (!this._updateRequest(request)) {
|
|
131
|
+
requestQueue.splice(i, 1);
|
|
132
|
+
this.requestMap.delete(request.handle);
|
|
133
|
+
i--;
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
requestQueue.sort((a, b) => a.priority - b.priority);
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
_updateRequest(request) {
|
|
141
|
+
request.priority = request.getPriority(request.handle);
|
|
142
|
+
|
|
143
|
+
if (request.priority < 0) {
|
|
144
|
+
request.resolve(null);
|
|
145
|
+
return false;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return true;
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
exports.default = RequestScheduler;
|
|
154
|
+
//# sourceMappingURL=request-scheduler.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/request-utils/request-scheduler.ts"],"names":["STAT_QUEUED_REQUESTS","STAT_ACTIVE_REQUESTS","STAT_CANCELLED_REQUESTS","STAT_QUEUED_REQUESTS_EVER","STAT_ACTIVE_REQUESTS_EVER","DEFAULT_PROPS","id","throttleRequests","maxRequests","RequestScheduler","constructor","props","Map","stats","Stats","get","scheduleRequest","handle","getPriority","Promise","resolve","done","requestMap","has","request","priority","promise","requestQueue","push","set","_issueNewRequests","_issueRequest","isDone","delete","activeRequestCount","deferredUpdate","setTimeout","_issueNewRequestsAsync","freeSlots","Math","max","_updateAllRequests","i","shift","length","_updateRequest","splice","sort","a","b"],"mappings":";;;;;;;;;;;AAAA;;AAgBA,MAAMA,oBAAoB,GAAG,iBAA7B;AACA,MAAMC,oBAAoB,GAAG,iBAA7B;AACA,MAAMC,uBAAuB,GAAG,oBAAhC;AACA,MAAMC,yBAAyB,GAAG,sBAAlC;AACA,MAAMC,yBAAyB,GAAG,sBAAlC;AAEA,MAAMC,aAA8C,GAAG;AACrDC,EAAAA,EAAE,EAAE,mBADiD;AAGrDC,EAAAA,gBAAgB,EAAE,IAHmC;AAKrDC,EAAAA,WAAW,EAAE;AALwC,CAAvD;;AAoBe,MAAMC,gBAAN,CAAuB;AAUpCC,EAAAA,WAAW,CAACC,KAA4B,GAAG,EAAhC,EAAoC;AAAA;AAAA;AAAA,8DAPlB,CAOkB;AAAA,wDAJb,EAIa;AAAA,sDAHW,IAAIC,GAAJ,EAGX;AAAA,0DAFjB,IAEiB;AAC7C,SAAKD,KAAL,GAAa,EAAC,GAAGN,aAAJ;AAAmB,SAAGM;AAAtB,KAAb;AAGA,SAAKE,KAAL,GAAa,IAAIC,YAAJ,CAAU;AAACR,MAAAA,EAAE,EAAE,KAAKK,KAAL,CAAWL;AAAhB,KAAV,CAAb;AACA,SAAKO,KAAL,CAAWE,GAAX,CAAef,oBAAf;AACA,SAAKa,KAAL,CAAWE,GAAX,CAAed,oBAAf;AACA,SAAKY,KAAL,CAAWE,GAAX,CAAeb,uBAAf;AACA,SAAKW,KAAL,CAAWE,GAAX,CAAeZ,yBAAf;AACA,SAAKU,KAAL,CAAWE,GAAX,CAAeX,yBAAf;AACD;;AAkBDY,EAAAA,eAAe,CACbC,MADa,EAEbC,WAAgC,GAAG,MAAM,CAF5B,EAGW;AAExB,QAAI,CAAC,KAAKP,KAAL,CAAWJ,gBAAhB,EAAkC;AAChC,aAAOY,OAAO,CAACC,OAAR,CAAgB;AAACC,QAAAA,IAAI,EAAE,MAAM,CAAE;AAAf,OAAhB,CAAP;AACD;;AAGD,QAAI,KAAKC,UAAL,CAAgBC,GAAhB,CAAoBN,MAApB,CAAJ,EAAiC;AAC/B,aAAO,KAAKK,UAAL,CAAgBP,GAAhB,CAAoBE,MAApB,CAAP;AACD;;AAED,UAAMO,OAAgB,GAAG;AAACP,MAAAA,MAAD;AAASQ,MAAAA,QAAQ,EAAE,CAAnB;AAAsBP,MAAAA;AAAtB,KAAzB;AACA,UAAMQ,OAAO,GAAG,IAAIP,OAAJ,CAA4BC,OAAD,IAAa;AAEtDI,MAAAA,OAAO,CAACJ,OAAR,GAAkBA,OAAlB;AACA,aAAOI,OAAP;AACD,KAJe,CAAhB;AAMA,SAAKG,YAAL,CAAkBC,IAAlB,CAAuBJ,OAAvB;AACA,SAAKF,UAAL,CAAgBO,GAAhB,CAAoBZ,MAApB,EAA4BS,OAA5B;;AACA,SAAKI,iBAAL;;AACA,WAAOJ,OAAP;AACD;;AAIDK,EAAAA,aAAa,CAACP,OAAD,EAAiC;AAC5C,UAAM;AAACP,MAAAA,MAAD;AAASG,MAAAA;AAAT,QAAoBI,OAA1B;AACA,QAAIQ,MAAM,GAAG,KAAb;;AAEA,UAAMX,IAAI,GAAG,MAAM;AAEjB,UAAI,CAACW,MAAL,EAAa;AACXA,QAAAA,MAAM,GAAG,IAAT;AAGA,aAAKV,UAAL,CAAgBW,MAAhB,CAAuBhB,MAAvB;AACA,aAAKiB,kBAAL;;AAEA,aAAKJ,iBAAL;AACD;AACF,KAXD;;AAcA,SAAKI,kBAAL;AAEA,WAAOd,OAAO,GAAGA,OAAO,CAAC;AAACC,MAAAA;AAAD,KAAD,CAAV,GAAqBF,OAAO,CAACC,OAAR,CAAgB;AAACC,MAAAA;AAAD,KAAhB,CAAnC;AACD;;AAGDS,EAAAA,iBAAiB,GAAS;AACxB,QAAI,CAAC,KAAKK,cAAV,EAA0B;AACxB,WAAKA,cAAL,GAAsBC,UAAU,CAAC,MAAM,KAAKC,sBAAL,EAAP,EAAsC,CAAtC,CAAhC;AACD;AACF;;AAGDA,EAAAA,sBAAsB,GAAG;AAEvB,SAAKF,cAAL,GAAsB,IAAtB;AAEA,UAAMG,SAAS,GAAGC,IAAI,CAACC,GAAL,CAAS,KAAK7B,KAAL,CAAWH,WAAX,GAAyB,KAAK0B,kBAAvC,EAA2D,CAA3D,CAAlB;;AAEA,QAAII,SAAS,KAAK,CAAlB,EAAqB;AACnB;AACD;;AAED,SAAKG,kBAAL;;AAGA,SAAK,IAAIC,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGJ,SAApB,EAA+B,EAAEI,CAAjC,EAAoC;AAClC,YAAMlB,OAAO,GAAG,KAAKG,YAAL,CAAkBgB,KAAlB,EAAhB;;AACA,UAAInB,OAAJ,EAAa;AACX,aAAKO,aAAL,CAAmBP,OAAnB;AACD;AACF;AAIF;;AAGDiB,EAAAA,kBAAkB,GAAG;AACnB,UAAMd,YAAY,GAAG,KAAKA,YAA1B;;AACA,SAAK,IAAIe,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGf,YAAY,CAACiB,MAAjC,EAAyC,EAAEF,CAA3C,EAA8C;AAC5C,YAAMlB,OAAO,GAAGG,YAAY,CAACe,CAAD,CAA5B;;AACA,UAAI,CAAC,KAAKG,cAAL,CAAoBrB,OAApB,CAAL,EAAmC;AAEjCG,QAAAA,YAAY,CAACmB,MAAb,CAAoBJ,CAApB,EAAuB,CAAvB;AACA,aAAKpB,UAAL,CAAgBW,MAAhB,CAAuBT,OAAO,CAACP,MAA/B;AACAyB,QAAAA,CAAC;AACF;AACF;;AAGDf,IAAAA,YAAY,CAACoB,IAAb,CAAkB,CAACC,CAAD,EAAIC,CAAJ,KAAUD,CAAC,CAACvB,QAAF,GAAawB,CAAC,CAACxB,QAA3C;AACD;;AAGDoB,EAAAA,cAAc,CAACrB,OAAD,EAAU;AACtBA,IAAAA,OAAO,CAACC,QAAR,GAAmBD,OAAO,CAACN,WAAR,CAAoBM,OAAO,CAACP,MAA5B,CAAnB;;AAGA,QAAIO,OAAO,CAACC,QAAR,GAAmB,CAAvB,EAA0B;AACxBD,MAAAA,OAAO,CAACJ,OAAR,CAAgB,IAAhB;AACA,aAAO,KAAP;AACD;;AACD,WAAO,IAAP;AACD;;AArJmC","sourcesContent":["import {Stats} from '@probe.gl/stats';\n\ntype Handle = any;\ntype DoneFunction = () => any;\ntype GetPriorityFunction = () => number;\ntype RequestResult = {\n done: DoneFunction;\n} | null;\n\n/** RequestScheduler Options */\nexport type RequestSchedulerProps = {\n id?: string;\n throttleRequests?: boolean;\n maxRequests?: number;\n};\n\nconst STAT_QUEUED_REQUESTS = 'Queued Requests';\nconst STAT_ACTIVE_REQUESTS = 'Active Requests';\nconst STAT_CANCELLED_REQUESTS = 'Cancelled Requests';\nconst STAT_QUEUED_REQUESTS_EVER = 'Queued Requests Ever';\nconst STAT_ACTIVE_REQUESTS_EVER = 'Active Requests Ever';\n\nconst DEFAULT_PROPS: Required<RequestSchedulerProps> = {\n id: 'request-scheduler',\n // Specifies if the request scheduler should throttle incoming requests, mainly for comparative testing\n throttleRequests: true,\n // The maximum number of simultaneous active requests. Un-throttled requests do not observe this limit.\n maxRequests: 6\n};\n\n/** Tracks one request */\ntype Request = {\n handle: Handle;\n priority: number;\n getPriority: GetPriorityFunction;\n resolve?: (value: any) => any;\n};\n\n/**\n * Used to issue a request, without having them \"deeply queued\" by the browser.\n * @todo - Track requests globally, across multiple servers\n */\nexport default class RequestScheduler {\n readonly props: Required<RequestSchedulerProps>;\n readonly stats: Stats;\n activeRequestCount: number = 0;\n\n /** Tracks the number of active requests and prioritizes/cancels queued requests. */\n private requestQueue: Request[] = [];\n private requestMap: Map<Handle, Promise<RequestResult>> = new Map();\n private deferredUpdate: any = null;\n\n constructor(props: RequestSchedulerProps = {}) {\n this.props = {...DEFAULT_PROPS, ...props};\n\n // Returns the statistics used by the request scheduler.\n this.stats = new Stats({id: this.props.id});\n this.stats.get(STAT_QUEUED_REQUESTS);\n this.stats.get(STAT_ACTIVE_REQUESTS);\n this.stats.get(STAT_CANCELLED_REQUESTS);\n this.stats.get(STAT_QUEUED_REQUESTS_EVER);\n this.stats.get(STAT_ACTIVE_REQUESTS_EVER);\n }\n\n /**\n * Called by an application that wants to issue a request, without having it deeply queued by the browser\n *\n * When the returned promise resolved, it is OK for the application to issue a request.\n * The promise resolves to an object that contains a `done` method.\n * When the application's request has completed (or failed), the application must call the `done` function\n *\n * @param handle\n * @param getPriority will be called when request \"slots\" open up,\n * allowing the caller to update priority or cancel the request\n * Highest priority executes first, priority < 0 cancels the request\n * @returns a promise\n * - resolves to a object (with a `done` field) when the request can be issued without queueing,\n * - resolves to `null` if the request has been cancelled (by the callback return < 0).\n * In this case the application should not issue the request\n */\n scheduleRequest(\n handle: Handle,\n getPriority: GetPriorityFunction = () => 0\n ): Promise<RequestResult> {\n // Allows throttling to be disabled\n if (!this.props.throttleRequests) {\n return Promise.resolve({done: () => {}});\n }\n\n // dedupe\n if (this.requestMap.has(handle)) {\n return this.requestMap.get(handle) as Promise<any>;\n }\n\n const request: Request = {handle, priority: 0, getPriority};\n const promise = new Promise<RequestResult>((resolve) => {\n // @ts-ignore\n request.resolve = resolve;\n return request;\n });\n\n this.requestQueue.push(request);\n this.requestMap.set(handle, promise);\n this._issueNewRequests();\n return promise;\n }\n\n // PRIVATE\n\n _issueRequest(request: Request): Promise<any> {\n const {handle, resolve} = request;\n let isDone = false;\n\n const done = () => {\n // can only be called once\n if (!isDone) {\n isDone = true;\n\n // Stop tracking a request - it has completed, failed, cancelled etc\n this.requestMap.delete(handle);\n this.activeRequestCount--;\n // A slot just freed up, see if any queued requests are waiting\n this._issueNewRequests();\n }\n };\n\n // Track this request\n this.activeRequestCount++;\n\n return resolve ? resolve({done}) : Promise.resolve({done});\n }\n\n /** We check requests asynchronously, to prevent multiple updates */\n _issueNewRequests(): void {\n if (!this.deferredUpdate) {\n this.deferredUpdate = setTimeout(() => this._issueNewRequestsAsync(), 0);\n }\n }\n\n /** Refresh all requests */\n _issueNewRequestsAsync() {\n // TODO - shouldn't we clear the timeout?\n this.deferredUpdate = null;\n\n const freeSlots = Math.max(this.props.maxRequests - this.activeRequestCount, 0);\n\n if (freeSlots === 0) {\n return;\n }\n\n this._updateAllRequests();\n\n // Resolve pending promises for the top-priority requests\n for (let i = 0; i < freeSlots; ++i) {\n const request = this.requestQueue.shift();\n if (request) {\n this._issueRequest(request); // eslint-disable-line @typescript-eslint/no-floating-promises\n }\n }\n\n // Uncomment to debug\n // console.log(`${freeSlots} free slots, ${this.requestQueue.length} queued requests`);\n }\n\n /** Ensure all requests have updated priorities, and that no longer valid requests are cancelled */\n _updateAllRequests() {\n const requestQueue = this.requestQueue;\n for (let i = 0; i < requestQueue.length; ++i) {\n const request = requestQueue[i];\n if (!this._updateRequest(request)) {\n // Remove the element and make sure to adjust the counter to account for shortened array\n requestQueue.splice(i, 1);\n this.requestMap.delete(request.handle);\n i--;\n }\n }\n\n // Sort the remaining requests based on priority\n requestQueue.sort((a, b) => a.priority - b.priority);\n }\n\n /** Update a single request by calling the callback */\n _updateRequest(request) {\n request.priority = request.getPriority(request.handle); // eslint-disable-line callback-return\n\n // by returning a negative priority, the callback cancels the request\n if (request.priority < 0) {\n request.resolve(null);\n return false;\n }\n return true;\n }\n}\n"],"file":"request-scheduler.js"}
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.createLoaderWorker = createLoaderWorker;
|
|
7
|
+
|
|
8
|
+
var _workerUtils = require("@loaders.gl/worker-utils");
|
|
9
|
+
|
|
10
|
+
let requestId = 0;
|
|
11
|
+
|
|
12
|
+
function createLoaderWorker(loader) {
|
|
13
|
+
if (typeof self === 'undefined') {
|
|
14
|
+
return;
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
_workerUtils.WorkerBody.onmessage = async (type, payload) => {
|
|
18
|
+
switch (type) {
|
|
19
|
+
case 'process':
|
|
20
|
+
try {
|
|
21
|
+
const {
|
|
22
|
+
input,
|
|
23
|
+
options = {}
|
|
24
|
+
} = payload;
|
|
25
|
+
const result = await parseData({
|
|
26
|
+
loader,
|
|
27
|
+
arrayBuffer: input,
|
|
28
|
+
options,
|
|
29
|
+
context: {
|
|
30
|
+
parse: parseOnMainThread
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
_workerUtils.WorkerBody.postMessage('done', {
|
|
35
|
+
result
|
|
36
|
+
});
|
|
37
|
+
} catch (error) {
|
|
38
|
+
const message = error instanceof Error ? error.message : '';
|
|
39
|
+
|
|
40
|
+
_workerUtils.WorkerBody.postMessage('error', {
|
|
41
|
+
error: message
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
break;
|
|
46
|
+
|
|
47
|
+
default:
|
|
48
|
+
}
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
function parseOnMainThread(arrayBuffer, options) {
|
|
53
|
+
return new Promise((resolve, reject) => {
|
|
54
|
+
const id = requestId++;
|
|
55
|
+
|
|
56
|
+
const onMessage = (type, payload) => {
|
|
57
|
+
if (payload.id !== id) {
|
|
58
|
+
return;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
switch (type) {
|
|
62
|
+
case 'done':
|
|
63
|
+
_workerUtils.WorkerBody.removeEventListener(onMessage);
|
|
64
|
+
|
|
65
|
+
resolve(payload.result);
|
|
66
|
+
break;
|
|
67
|
+
|
|
68
|
+
case 'error':
|
|
69
|
+
_workerUtils.WorkerBody.removeEventListener(onMessage);
|
|
70
|
+
|
|
71
|
+
reject(payload.error);
|
|
72
|
+
break;
|
|
73
|
+
|
|
74
|
+
default:
|
|
75
|
+
}
|
|
76
|
+
};
|
|
77
|
+
|
|
78
|
+
_workerUtils.WorkerBody.addEventListener(onMessage);
|
|
79
|
+
|
|
80
|
+
const payload = {
|
|
81
|
+
id,
|
|
82
|
+
input: arrayBuffer,
|
|
83
|
+
options
|
|
84
|
+
};
|
|
85
|
+
|
|
86
|
+
_workerUtils.WorkerBody.postMessage('process', payload);
|
|
87
|
+
});
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
async function parseData({
|
|
91
|
+
loader,
|
|
92
|
+
arrayBuffer,
|
|
93
|
+
options,
|
|
94
|
+
context
|
|
95
|
+
}) {
|
|
96
|
+
let data;
|
|
97
|
+
let parser;
|
|
98
|
+
|
|
99
|
+
if (loader.parseSync || loader.parse) {
|
|
100
|
+
data = arrayBuffer;
|
|
101
|
+
parser = loader.parseSync || loader.parse;
|
|
102
|
+
} else if (loader.parseTextSync) {
|
|
103
|
+
const textDecoder = new TextDecoder();
|
|
104
|
+
data = textDecoder.decode(arrayBuffer);
|
|
105
|
+
parser = loader.parseTextSync;
|
|
106
|
+
} else {
|
|
107
|
+
throw new Error("Could not load data with ".concat(loader.name, " loader"));
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
options = { ...options,
|
|
111
|
+
modules: loader && loader.options && loader.options.modules || {},
|
|
112
|
+
worker: false
|
|
113
|
+
};
|
|
114
|
+
return await parser(data, { ...options
|
|
115
|
+
}, context, loader);
|
|
116
|
+
}
|
|
117
|
+
//# sourceMappingURL=create-loader-worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/worker-loader-utils/create-loader-worker.ts"],"names":["requestId","createLoaderWorker","loader","self","WorkerBody","onmessage","type","payload","input","options","result","parseData","arrayBuffer","context","parse","parseOnMainThread","postMessage","error","message","Error","Promise","resolve","reject","id","onMessage","removeEventListener","addEventListener","data","parser","parseSync","parseTextSync","textDecoder","TextDecoder","decode","name","modules","worker"],"mappings":";;;;;;;AAEA;;AAGA,IAAIA,SAAS,GAAG,CAAhB;;AAMO,SAASC,kBAAT,CAA4BC,MAA5B,EAAsD;AAE3D,MAAI,OAAOC,IAAP,KAAgB,WAApB,EAAiC;AAC/B;AACD;;AAEDC,0BAAWC,SAAX,GAAuB,OAAOC,IAAP,EAAaC,OAAb,KAAyB;AAC9C,YAAQD,IAAR;AACE,WAAK,SAAL;AACE,YAAI;AAGF,gBAAM;AAACE,YAAAA,KAAD;AAAQC,YAAAA,OAAO,GAAG;AAAlB,cAAwBF,OAA9B;AAEA,gBAAMG,MAAM,GAAG,MAAMC,SAAS,CAAC;AAC7BT,YAAAA,MAD6B;AAE7BU,YAAAA,WAAW,EAAEJ,KAFgB;AAG7BC,YAAAA,OAH6B;AAI7BI,YAAAA,OAAO,EAAE;AACPC,cAAAA,KAAK,EAAEC;AADA;AAJoB,WAAD,CAA9B;;AAQAX,kCAAWY,WAAX,CAAuB,MAAvB,EAA+B;AAACN,YAAAA;AAAD,WAA/B;AACD,SAdD,CAcE,OAAOO,KAAP,EAAc;AACd,gBAAMC,OAAO,GAAGD,KAAK,YAAYE,KAAjB,GAAyBF,KAAK,CAACC,OAA/B,GAAyC,EAAzD;;AACAd,kCAAWY,WAAX,CAAuB,OAAvB,EAAgC;AAACC,YAAAA,KAAK,EAAEC;AAAR,WAAhC;AACD;;AACD;;AACF;AArBF;AAuBD,GAxBD;AAyBD;;AAED,SAASH,iBAAT,CAA2BH,WAA3B,EAAqDH,OAArD,EAAmG;AACjG,SAAO,IAAIW,OAAJ,CAAY,CAACC,OAAD,EAAUC,MAAV,KAAqB;AACtC,UAAMC,EAAE,GAAGvB,SAAS,EAApB;;AAIA,UAAMwB,SAAS,GAAG,CAAClB,IAAD,EAAOC,OAAP,KAAmB;AACnC,UAAIA,OAAO,CAACgB,EAAR,KAAeA,EAAnB,EAAuB;AAErB;AACD;;AAED,cAAQjB,IAAR;AACE,aAAK,MAAL;AACEF,kCAAWqB,mBAAX,CAA+BD,SAA/B;;AACAH,UAAAA,OAAO,CAACd,OAAO,CAACG,MAAT,CAAP;AACA;;AAEF,aAAK,OAAL;AACEN,kCAAWqB,mBAAX,CAA+BD,SAA/B;;AACAF,UAAAA,MAAM,CAACf,OAAO,CAACU,KAAT,CAAN;AACA;;AAEF;AAXF;AAcD,KApBD;;AAsBAb,4BAAWsB,gBAAX,CAA4BF,SAA5B;;AAGA,UAAMjB,OAAO,GAAG;AAACgB,MAAAA,EAAD;AAAKf,MAAAA,KAAK,EAAEI,WAAZ;AAAyBH,MAAAA;AAAzB,KAAhB;;AACAL,4BAAWY,WAAX,CAAuB,SAAvB,EAAkCT,OAAlC;AACD,GAhCM,CAAP;AAiCD;;AAMD,eAAeI,SAAf,CAAyB;AAACT,EAAAA,MAAD;AAASU,EAAAA,WAAT;AAAsBH,EAAAA,OAAtB;AAA+BI,EAAAA;AAA/B,CAAzB,EAAkE;AAChE,MAAIc,IAAJ;AACA,MAAIC,MAAJ;;AACA,MAAI1B,MAAM,CAAC2B,SAAP,IAAoB3B,MAAM,CAACY,KAA/B,EAAsC;AACpCa,IAAAA,IAAI,GAAGf,WAAP;AACAgB,IAAAA,MAAM,GAAG1B,MAAM,CAAC2B,SAAP,IAAoB3B,MAAM,CAACY,KAApC;AACD,GAHD,MAGO,IAAIZ,MAAM,CAAC4B,aAAX,EAA0B;AAC/B,UAAMC,WAAW,GAAG,IAAIC,WAAJ,EAApB;AACAL,IAAAA,IAAI,GAAGI,WAAW,CAACE,MAAZ,CAAmBrB,WAAnB,CAAP;AACAgB,IAAAA,MAAM,GAAG1B,MAAM,CAAC4B,aAAhB;AACD,GAJM,MAIA;AACL,UAAM,IAAIX,KAAJ,oCAAsCjB,MAAM,CAACgC,IAA7C,aAAN;AACD;;AAGDzB,EAAAA,OAAO,GAAG,EACR,GAAGA,OADK;AAER0B,IAAAA,OAAO,EAAGjC,MAAM,IAAIA,MAAM,CAACO,OAAjB,IAA4BP,MAAM,CAACO,OAAP,CAAe0B,OAA5C,IAAwD,EAFzD;AAGRC,IAAAA,MAAM,EAAE;AAHA,GAAV;AAMA,SAAO,MAAMR,MAAM,CAACD,IAAD,EAAO,EAAC,GAAGlB;AAAJ,GAAP,EAAqBI,OAArB,EAA8BX,MAA9B,CAAnB;AACD","sourcesContent":["/* eslint-disable no-restricted-globals */\nimport type {LoaderWithParser} from '../../types';\nimport {WorkerBody} from '@loaders.gl/worker-utils';\n// import {validateLoaderVersion} from './validate-loader-version';\n\nlet requestId = 0;\n\n/**\n * Set up a WebWorkerGlobalScope to talk with the main thread\n * @param loader\n */\nexport function createLoaderWorker(loader: LoaderWithParser) {\n // Check that we are actually in a worker thread\n if (typeof self === 'undefined') {\n return;\n }\n\n WorkerBody.onmessage = async (type, payload) => {\n switch (type) {\n case 'process':\n try {\n // validateLoaderVersion(loader, data.source.split('@')[1]);\n\n const {input, options = {}} = payload;\n\n const result = await parseData({\n loader,\n arrayBuffer: input,\n options,\n context: {\n parse: parseOnMainThread\n }\n });\n WorkerBody.postMessage('done', {result});\n } catch (error) {\n const message = error instanceof Error ? error.message : '';\n WorkerBody.postMessage('error', {error: message});\n }\n break;\n default:\n }\n };\n}\n\nfunction parseOnMainThread(arrayBuffer: ArrayBuffer, options: {[key: string]: any}): Promise<void> {\n return new Promise((resolve, reject) => {\n const id = requestId++;\n\n /**\n */\n const onMessage = (type, payload) => {\n if (payload.id !== id) {\n // not ours\n return;\n }\n\n switch (type) {\n case 'done':\n WorkerBody.removeEventListener(onMessage);\n resolve(payload.result);\n break;\n\n case 'error':\n WorkerBody.removeEventListener(onMessage);\n reject(payload.error);\n break;\n\n default:\n // ignore\n }\n };\n\n WorkerBody.addEventListener(onMessage);\n\n // Ask the main thread to decode data\n const payload = {id, input: arrayBuffer, options};\n WorkerBody.postMessage('process', payload);\n });\n}\n\n// TODO - Support byteOffset and byteLength (enabling parsing of embedded binaries without copies)\n// TODO - Why not support async loader.parse* funcs here?\n// TODO - Why not reuse a common function instead of reimplementing loader.parse* selection logic? Keeping loader small?\n// TODO - Lack of appropriate parser functions can be detected when we create worker, no need to wait until parse\nasync function parseData({loader, arrayBuffer, options, context}) {\n let data;\n let parser;\n if (loader.parseSync || loader.parse) {\n data = arrayBuffer;\n parser = loader.parseSync || loader.parse;\n } else if (loader.parseTextSync) {\n const textDecoder = new TextDecoder();\n data = textDecoder.decode(arrayBuffer);\n parser = loader.parseTextSync;\n } else {\n throw new Error(`Could not load data with ${loader.name} loader`);\n }\n\n // TODO - proper merge in of loader options...\n options = {\n ...options,\n modules: (loader && loader.options && loader.options.modules) || {},\n worker: false\n };\n\n return await parser(data, {...options}, context, loader);\n}\n"],"file":"create-loader-worker.js"}
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.canParseWithWorker = canParseWithWorker;
|
|
7
|
+
exports.parseWithWorker = parseWithWorker;
|
|
8
|
+
|
|
9
|
+
var _workerUtils = require("@loaders.gl/worker-utils");
|
|
10
|
+
|
|
11
|
+
function canParseWithWorker(loader, options) {
|
|
12
|
+
if (!_workerUtils.WorkerFarm.isSupported()) {
|
|
13
|
+
return false;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
return loader.worker && (options === null || options === void 0 ? void 0 : options.worker);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
async function parseWithWorker(loader, data, options, context, parseOnMainThread) {
|
|
20
|
+
const name = loader.id;
|
|
21
|
+
const url = (0, _workerUtils.getWorkerURL)(loader, options);
|
|
22
|
+
|
|
23
|
+
const workerFarm = _workerUtils.WorkerFarm.getWorkerFarm(options);
|
|
24
|
+
|
|
25
|
+
const workerPool = workerFarm.getWorkerPool({
|
|
26
|
+
name,
|
|
27
|
+
url
|
|
28
|
+
});
|
|
29
|
+
options = JSON.parse(JSON.stringify(options));
|
|
30
|
+
const job = await workerPool.startJob('process-on-worker', onMessage.bind(null, parseOnMainThread));
|
|
31
|
+
job.postMessage('process', {
|
|
32
|
+
input: data,
|
|
33
|
+
options
|
|
34
|
+
});
|
|
35
|
+
const result = await job.result;
|
|
36
|
+
return await result.result;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
async function onMessage(parseOnMainThread, job, type, payload) {
|
|
40
|
+
switch (type) {
|
|
41
|
+
case 'done':
|
|
42
|
+
job.done(payload);
|
|
43
|
+
break;
|
|
44
|
+
|
|
45
|
+
case 'error':
|
|
46
|
+
job.error(new Error(payload.error));
|
|
47
|
+
break;
|
|
48
|
+
|
|
49
|
+
case 'process':
|
|
50
|
+
const {
|
|
51
|
+
id,
|
|
52
|
+
input,
|
|
53
|
+
options
|
|
54
|
+
} = payload;
|
|
55
|
+
|
|
56
|
+
try {
|
|
57
|
+
const result = await parseOnMainThread(input, options);
|
|
58
|
+
job.postMessage('done', {
|
|
59
|
+
id,
|
|
60
|
+
result
|
|
61
|
+
});
|
|
62
|
+
} catch (error) {
|
|
63
|
+
const message = error instanceof Error ? error.message : 'unknown error';
|
|
64
|
+
job.postMessage('error', {
|
|
65
|
+
id,
|
|
66
|
+
error: message
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
break;
|
|
71
|
+
|
|
72
|
+
default:
|
|
73
|
+
console.warn("parse-with-worker unknown message ".concat(type));
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
//# sourceMappingURL=parse-with-worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/worker-loader-utils/parse-with-worker.ts"],"names":["canParseWithWorker","loader","options","WorkerFarm","isSupported","worker","parseWithWorker","data","context","parseOnMainThread","name","id","url","workerFarm","getWorkerFarm","workerPool","getWorkerPool","JSON","parse","stringify","job","startJob","onMessage","bind","postMessage","input","result","type","payload","done","error","Error","message","console","warn"],"mappings":";;;;;;;;AAEA;;AAOO,SAASA,kBAAT,CAA4BC,MAA5B,EAA4CC,OAA5C,EAAqE;AAC1E,MAAI,CAACC,wBAAWC,WAAX,EAAL,EAA+B;AAC7B,WAAO,KAAP;AACD;;AAED,SAAOH,MAAM,CAACI,MAAP,KAAiBH,OAAjB,aAAiBA,OAAjB,uBAAiBA,OAAO,CAAEG,MAA1B,CAAP;AACD;;AAMM,eAAeC,eAAf,CACLL,MADK,EAELM,IAFK,EAGLL,OAHK,EAILM,OAJK,EAKLC,iBALK,EAML;AACA,QAAMC,IAAI,GAAGT,MAAM,CAACU,EAApB;AACA,QAAMC,GAAG,GAAG,+BAAaX,MAAb,EAAqBC,OAArB,CAAZ;;AAEA,QAAMW,UAAU,GAAGV,wBAAWW,aAAX,CAAyBZ,OAAzB,CAAnB;;AACA,QAAMa,UAAU,GAAGF,UAAU,CAACG,aAAX,CAAyB;AAACN,IAAAA,IAAD;AAAOE,IAAAA;AAAP,GAAzB,CAAnB;AAIAV,EAAAA,OAAO,GAAGe,IAAI,CAACC,KAAL,CAAWD,IAAI,CAACE,SAAL,CAAejB,OAAf,CAAX,CAAV;AAEA,QAAMkB,GAAG,GAAG,MAAML,UAAU,CAACM,QAAX,CAChB,mBADgB,EAGhBC,SAAS,CAACC,IAAV,CAAe,IAAf,EAAqBd,iBAArB,CAHgB,CAAlB;AAMAW,EAAAA,GAAG,CAACI,WAAJ,CAAgB,SAAhB,EAA2B;AAEzBC,IAAAA,KAAK,EAAElB,IAFkB;AAGzBL,IAAAA;AAHyB,GAA3B;AAMA,QAAMwB,MAAM,GAAG,MAAMN,GAAG,CAACM,MAAzB;AAEA,SAAO,MAAMA,MAAM,CAACA,MAApB;AACD;;AAQD,eAAeJ,SAAf,CACEb,iBADF,EAEEW,GAFF,EAGEO,IAHF,EAIEC,OAJF,EAKE;AACA,UAAQD,IAAR;AACE,SAAK,MAAL;AACEP,MAAAA,GAAG,CAACS,IAAJ,CAASD,OAAT;AACA;;AAEF,SAAK,OAAL;AACER,MAAAA,GAAG,CAACU,KAAJ,CAAU,IAAIC,KAAJ,CAAUH,OAAO,CAACE,KAAlB,CAAV;AACA;;AAEF,SAAK,SAAL;AAEE,YAAM;AAACnB,QAAAA,EAAD;AAAKc,QAAAA,KAAL;AAAYvB,QAAAA;AAAZ,UAAuB0B,OAA7B;;AACA,UAAI;AACF,cAAMF,MAAM,GAAG,MAAMjB,iBAAiB,CAACgB,KAAD,EAAQvB,OAAR,CAAtC;AACAkB,QAAAA,GAAG,CAACI,WAAJ,CAAgB,MAAhB,EAAwB;AAACb,UAAAA,EAAD;AAAKe,UAAAA;AAAL,SAAxB;AACD,OAHD,CAGE,OAAOI,KAAP,EAAc;AACd,cAAME,OAAO,GAAGF,KAAK,YAAYC,KAAjB,GAAyBD,KAAK,CAACE,OAA/B,GAAyC,eAAzD;AACAZ,QAAAA,GAAG,CAACI,WAAJ,CAAgB,OAAhB,EAAyB;AAACb,UAAAA,EAAD;AAAKmB,UAAAA,KAAK,EAAEE;AAAZ,SAAzB;AACD;;AACD;;AAEF;AAEEC,MAAAA,OAAO,CAACC,IAAR,6CAAkDP,IAAlD;AAvBJ;AAyBD","sourcesContent":["import type {WorkerJob, WorkerMessageType, WorkerMessagePayload} from '@loaders.gl/worker-utils';\nimport type {Loader, LoaderOptions, LoaderContext} from '../../types';\nimport {WorkerFarm, getWorkerURL} from '@loaders.gl/worker-utils';\n\n/**\n * Determines if a loader can parse with worker\n * @param loader\n * @param options\n */\nexport function canParseWithWorker(loader: Loader, options?: LoaderOptions) {\n if (!WorkerFarm.isSupported()) {\n return false;\n }\n\n return loader.worker && options?.worker;\n}\n\n/**\n * this function expects that the worker function sends certain messages,\n * this can be automated if the worker is wrapper by a call to createLoaderWorker in @loaders.gl/loader-utils.\n */\nexport async function parseWithWorker(\n loader: Loader,\n data: any,\n options?: LoaderOptions,\n context?: LoaderContext,\n parseOnMainThread?: (arrayBuffer: ArrayBuffer, options: {[key: string]: any}) => Promise<void>\n) {\n const name = loader.id; // TODO\n const url = getWorkerURL(loader, options);\n\n const workerFarm = WorkerFarm.getWorkerFarm(options);\n const workerPool = workerFarm.getWorkerPool({name, url});\n\n // options.log object contains functions which cannot be transferred\n // TODO - decide how to handle logging on workers\n options = JSON.parse(JSON.stringify(options));\n\n const job = await workerPool.startJob(\n 'process-on-worker',\n // @ts-expect-error\n onMessage.bind(null, parseOnMainThread) // eslint-disable-line @typescript-eslint/no-misused-promises\n );\n\n job.postMessage('process', {\n // @ts-ignore\n input: data,\n options\n });\n\n const result = await job.result;\n // TODO - what is going on here?\n return await result.result;\n}\n\n/**\n * Handle worker's responses to the main thread\n * @param job\n * @param type\n * @param payload\n */\nasync function onMessage(\n parseOnMainThread: (arrayBuffer: ArrayBuffer, options?: {[key: string]: any}) => Promise<void>,\n job: WorkerJob,\n type: WorkerMessageType,\n payload: WorkerMessagePayload\n) {\n switch (type) {\n case 'done':\n job.done(payload);\n break;\n\n case 'error':\n job.error(new Error(payload.error));\n break;\n\n case 'process':\n // Worker is asking for main thread to parseO\n const {id, input, options} = payload;\n try {\n const result = await parseOnMainThread(input, options);\n job.postMessage('done', {id, result});\n } catch (error) {\n const message = error instanceof Error ? error.message : 'unknown error';\n job.postMessage('error', {id, error: message});\n }\n break;\n\n default:\n // eslint-disable-next-line\n console.warn(`parse-with-worker unknown message ${type}`);\n }\n}\n"],"file":"parse-with-worker.js"}
|
|
File without changes
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
var _createLoaderWorker = require("../lib/worker-loader-utils/create-loader-worker");
|
|
4
|
+
|
|
5
|
+
var _jsonLoader = require("../json-loader");
|
|
6
|
+
|
|
7
|
+
(0, _createLoaderWorker.createLoaderWorker)(_jsonLoader.JSONLoader);
|
|
8
|
+
//# sourceMappingURL=json-worker.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../src/workers/json-worker.ts"],"names":["JSONLoader"],"mappings":";;AAAA;;AACA;;AAEA,4CAAmBA,sBAAnB","sourcesContent":["import {createLoaderWorker} from '../lib/worker-loader-utils/create-loader-worker';\nimport {JSONLoader} from '../json-loader';\n\ncreateLoaderWorker(JSONLoader);\n"],"file":"json-worker.js"}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
export { assert } from './lib/env-utils/assert';
|
|
2
|
+
export { isBrowser, isWorker, nodeVersion, self, window, global, document } from './lib/env-utils/globals';
|
|
3
|
+
export { createLoaderWorker } from './lib/worker-loader-utils/create-loader-worker';
|
|
4
|
+
export { parseWithWorker, canParseWithWorker } from './lib/worker-loader-utils/parse-with-worker';
|
|
5
|
+
export { parseJSON } from './lib/parser-utils/parse-json';
|
|
6
|
+
export { toArrayBuffer, sliceArrayBuffer, concatenateArrayBuffers, concatenateTypedArrays, compareArrayBuffers } from './lib/binary-utils/array-buffer-utils';
|
|
7
|
+
export { padToNBytes, copyToArray, copyArrayBuffer } from './lib/binary-utils/memory-copy-utils';
|
|
8
|
+
export { copyPaddedArrayBufferToDataView, copyPaddedStringToDataView } from './lib/binary-utils/binary-copy-utils';
|
|
9
|
+
export { padStringToByteAlignment, copyStringToDataView, copyBinaryToDataView } from './lib/binary-utils/encode-utils';
|
|
10
|
+
export { getFirstCharacters, getMagicString } from './lib/binary-utils/get-first-characters';
|
|
11
|
+
export { makeTextEncoderIterator, makeTextDecoderIterator, makeLineIterator, makeNumberedLineIterator } from './lib/iterators/text-iterators';
|
|
12
|
+
export { forEach, concatenateArrayBuffersAsync } from './lib/iterators/async-iteration';
|
|
13
|
+
export { default as RequestScheduler } from './lib/request-utils/request-scheduler';
|
|
14
|
+
export { setPathPrefix, getPathPrefix, resolvePath } from './lib/path-utils/file-aliases';
|
|
15
|
+
export { addAliases as _addAliases } from './lib/path-utils/file-aliases';
|
|
16
|
+
export { JSONLoader } from './json-loader';
|
|
17
|
+
import * as path from './lib/path-utils/path';
|
|
18
|
+
export { path };
|
|
19
|
+
export { isBuffer, toBuffer, bufferToArrayBuffer } from './lib/binary-utils/buffer-utils';
|
|
20
|
+
import * as util from './lib/node/util';
|
|
21
|
+
export { util };
|
|
22
|
+
export { promisify } from './lib/node/util';
|
|
23
|
+
import * as fs from './lib/node/fs';
|
|
24
|
+
export { fs };
|
|
25
|
+
export { default as _NodeFileSystem } from './lib/filesystems/node-filesystem';
|
|
26
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/index.ts"],"names":["assert","isBrowser","isWorker","nodeVersion","self","window","global","document","createLoaderWorker","parseWithWorker","canParseWithWorker","parseJSON","toArrayBuffer","sliceArrayBuffer","concatenateArrayBuffers","concatenateTypedArrays","compareArrayBuffers","padToNBytes","copyToArray","copyArrayBuffer","copyPaddedArrayBufferToDataView","copyPaddedStringToDataView","padStringToByteAlignment","copyStringToDataView","copyBinaryToDataView","getFirstCharacters","getMagicString","makeTextEncoderIterator","makeTextDecoderIterator","makeLineIterator","makeNumberedLineIterator","forEach","concatenateArrayBuffersAsync","default","RequestScheduler","setPathPrefix","getPathPrefix","resolvePath","addAliases","_addAliases","JSONLoader","path","isBuffer","toBuffer","bufferToArrayBuffer","util","promisify","fs","_NodeFileSystem"],"mappings":"AAgBA,SAAQA,MAAR,QAAqB,wBAArB;AACA,SACEC,SADF,EAEEC,QAFF,EAGEC,WAHF,EAIEC,IAJF,EAKEC,MALF,EAMEC,MANF,EAOEC,QAPF,QAQO,yBARP;AAWA,SAAQC,kBAAR,QAAiC,gDAAjC;AACA,SAAQC,eAAR,EAAyBC,kBAAzB,QAAkD,6CAAlD;AAGA,SAAQC,SAAR,QAAwB,+BAAxB;AAGA,SACEC,aADF,EAEEC,gBAFF,EAGEC,uBAHF,EAIEC,sBAJF,EAKEC,mBALF,QAMO,uCANP;AAOA,SAAQC,WAAR,EAAqBC,WAArB,EAAkCC,eAAlC,QAAwD,sCAAxD;AACA,SACEC,+BADF,EAEEC,0BAFF,QAGO,sCAHP;AAIA,SACEC,wBADF,EAEEC,oBAFF,EAGEC,oBAHF,QAIO,iCAJP;AAKA,SAAQC,kBAAR,EAA4BC,cAA5B,QAAiD,yCAAjD;AAGA,SACEC,uBADF,EAEEC,uBAFF,EAGEC,gBAHF,EAIEC,wBAJF,QAKO,gCALP;AAMA,SAAQC,OAAR,EAAiBC,4BAAjB,QAAoD,iCAApD;AAGA,SAAQC,OAAO,IAAIC,gBAAnB,QAA0C,uCAA1C;AAGA,SAAQC,aAAR,EAAuBC,aAAvB,EAAsCC,WAAtC,QAAwD,+BAAxD;AACA,SAAQC,UAAU,IAAIC,WAAtB,QAAwC,+BAAxC;AAGA,SAAQC,UAAR,QAAyB,eAAzB;AAOA,OAAO,KAAKC,IAAZ,MAAsB,uBAAtB;AACA,SAAQA,IAAR;AAGA,SAAQC,QAAR,EAAkBC,QAAlB,EAA4BC,mBAA5B,QAAsD,iCAAtD;AAKA,OAAO,KAAKC,IAAZ,MAAsB,iBAAtB;AACA,SAAQA,IAAR;AAEA,SAAQC,SAAR,QAAwB,iBAAxB;AAGA,OAAO,KAAKC,EAAZ,MAAoB,eAApB;AACA,SAAQA,EAAR;AAGA,SAAQd,OAAO,IAAIe,eAAnB,QAAyC,mCAAzC","sourcesContent":["// TYPES\nexport type {\n Loader,\n LoaderWithParser,\n LoaderContext,\n LoaderOptions,\n Writer,\n WriterOptions,\n DataType,\n SyncDataType,\n BatchableDataType,\n IFileSystem,\n IRandomAccessReadFileSystem\n} from './types';\n\n// GENERAL UTILS\nexport {assert} from './lib/env-utils/assert';\nexport {\n isBrowser,\n isWorker,\n nodeVersion,\n self,\n window,\n global,\n document\n} from './lib/env-utils/globals';\n\n// LOADERS.GL-SPECIFIC WORKER UTILS\nexport {createLoaderWorker} from './lib/worker-loader-utils/create-loader-worker';\nexport {parseWithWorker, canParseWithWorker} from './lib/worker-loader-utils/parse-with-worker';\n\n// PARSER UTILS\nexport {parseJSON} from './lib/parser-utils/parse-json';\n\n// MEMORY COPY UTILS\nexport {\n toArrayBuffer,\n sliceArrayBuffer,\n concatenateArrayBuffers,\n concatenateTypedArrays,\n compareArrayBuffers\n} from './lib/binary-utils/array-buffer-utils';\nexport {padToNBytes, copyToArray, copyArrayBuffer} from './lib/binary-utils/memory-copy-utils';\nexport {\n copyPaddedArrayBufferToDataView,\n copyPaddedStringToDataView\n} from './lib/binary-utils/binary-copy-utils';\nexport {\n padStringToByteAlignment,\n copyStringToDataView,\n copyBinaryToDataView\n} from './lib/binary-utils/encode-utils';\nexport {getFirstCharacters, getMagicString} from './lib/binary-utils/get-first-characters';\n\n// ITERATOR UTILS\nexport {\n makeTextEncoderIterator,\n makeTextDecoderIterator,\n makeLineIterator,\n makeNumberedLineIterator\n} from './lib/iterators/text-iterators';\nexport {forEach, concatenateArrayBuffersAsync} from './lib/iterators/async-iteration';\n\n// REQUEST UTILS\nexport {default as RequestScheduler} from './lib/request-utils/request-scheduler';\n\n// PATH HELPERS\nexport {setPathPrefix, getPathPrefix, resolvePath} from './lib/path-utils/file-aliases';\nexport {addAliases as _addAliases} from './lib/path-utils/file-aliases';\n\n// MICRO LOADERS\nexport {JSONLoader} from './json-loader';\n\n// NODE support\n\n// Node.js emulation (can be used in browser)\n\n// `path` replacement (avoids bundling big path polyfill)\nimport * as path from './lib/path-utils/path';\nexport {path};\n\n// Avoid direct use of `Buffer` which pulls in 50KB polyfill\nexport {isBuffer, toBuffer, bufferToArrayBuffer} from './lib/binary-utils/buffer-utils';\n\n// Note.js wrappers (can be safely imported, but not used in browser)\n\n// Use instead of importing 'util'\nimport * as util from './lib/node/util';\nexport {util};\n// TODO - remove\nexport {promisify} from './lib/node/util';\n\n// Use instead of importing 'fs';`\nimport * as fs from './lib/node/fs';\nexport {fs};\n\n// EXPERIMENTAL\nexport {default as _NodeFileSystem} from './lib/filesystems/node-filesystem';\n"],"file":"index.js"}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
const VERSION = typeof "3.1.0-beta.1" !== 'undefined' ? "3.1.0-beta.1" : 'latest';
|
|
2
|
+
export const JSONLoader = {
|
|
3
|
+
name: 'JSON',
|
|
4
|
+
id: 'json',
|
|
5
|
+
module: 'json',
|
|
6
|
+
version: VERSION,
|
|
7
|
+
extensions: ['json', 'geojson'],
|
|
8
|
+
mimeTypes: ['application/json'],
|
|
9
|
+
category: 'json',
|
|
10
|
+
text: true,
|
|
11
|
+
parseTextSync,
|
|
12
|
+
parse: async arrayBuffer => parseTextSync(new TextDecoder().decode(arrayBuffer)),
|
|
13
|
+
options: {}
|
|
14
|
+
};
|
|
15
|
+
|
|
16
|
+
function parseTextSync(text) {
|
|
17
|
+
return JSON.parse(text);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export const _typecheckJSONLoader = JSONLoader;
|
|
21
|
+
//# sourceMappingURL=json-loader.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../src/json-loader.ts"],"names":["VERSION","JSONLoader","name","id","module","version","extensions","mimeTypes","category","text","parseTextSync","parse","arrayBuffer","TextDecoder","decode","options","JSON","_typecheckJSONLoader"],"mappings":"AAIA,MAAMA,OAAO,GAAG,0BAAuB,WAAvB,oBAAmD,QAAnE;AAMA,OAAO,MAAMC,UAAU,GAAG;AACxBC,EAAAA,IAAI,EAAE,MADkB;AAExBC,EAAAA,EAAE,EAAE,MAFoB;AAGxBC,EAAAA,MAAM,EAAE,MAHgB;AAIxBC,EAAAA,OAAO,EAAEL,OAJe;AAKxBM,EAAAA,UAAU,EAAE,CAAC,MAAD,EAAS,SAAT,CALY;AAMxBC,EAAAA,SAAS,EAAE,CAAC,kBAAD,CANa;AAOxBC,EAAAA,QAAQ,EAAE,MAPc;AAQxBC,EAAAA,IAAI,EAAE,IARkB;AASxBC,EAAAA,aATwB;AAUxBC,EAAAA,KAAK,EAAE,MAAOC,WAAP,IAAuBF,aAAa,CAAC,IAAIG,WAAJ,GAAkBC,MAAlB,CAAyBF,WAAzB,CAAD,CAVnB;AAWxBG,EAAAA,OAAO,EAAE;AAXe,CAAnB;;AAeP,SAASL,aAAT,CAAuBD,IAAvB,EAA6B;AAC3B,SAAOO,IAAI,CAACL,KAAL,CAAWF,IAAX,CAAP;AACD;;AAED,OAAO,MAAMQ,oBAAsC,GAAGhB,UAA/C","sourcesContent":["import type {LoaderWithParser} from './types';\n\n// __VERSION__ is injected by babel-plugin-version-inline\n// @ts-ignore TS2304: Cannot find name '__VERSION__'.\nconst VERSION = typeof __VERSION__ !== 'undefined' ? __VERSION__ : 'latest';\n\n/**\n * A JSON Micro loader (minimal bundle size)\n * Alternative to `@loaders.gl/json`\n */\nexport const JSONLoader = {\n name: 'JSON',\n id: 'json',\n module: 'json',\n version: VERSION,\n extensions: ['json', 'geojson'],\n mimeTypes: ['application/json'],\n category: 'json',\n text: true,\n parseTextSync,\n parse: async (arrayBuffer) => parseTextSync(new TextDecoder().decode(arrayBuffer)),\n options: {}\n};\n\n// TODO - deprecated\nfunction parseTextSync(text) {\n return JSON.parse(text);\n}\n\nexport const _typecheckJSONLoader: LoaderWithParser = JSONLoader;\n"],"file":"json-loader.js"}
|
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
import { isBuffer, bufferToArrayBuffer } from './buffer-utils';
|
|
2
|
+
export function toArrayBuffer(data) {
|
|
3
|
+
if (isBuffer(data)) {
|
|
4
|
+
return bufferToArrayBuffer(data);
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
if (data instanceof ArrayBuffer) {
|
|
8
|
+
return data;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
if (ArrayBuffer.isView(data)) {
|
|
12
|
+
if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {
|
|
13
|
+
return data.buffer;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (typeof data === 'string') {
|
|
20
|
+
const text = data;
|
|
21
|
+
const uint8Array = new TextEncoder().encode(text);
|
|
22
|
+
return uint8Array.buffer;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
if (data && typeof data === 'object' && data._toArrayBuffer) {
|
|
26
|
+
return data._toArrayBuffer();
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
throw new Error('toArrayBuffer');
|
|
30
|
+
}
|
|
31
|
+
export function compareArrayBuffers(arrayBuffer1, arrayBuffer2, byteLength) {
|
|
32
|
+
byteLength = byteLength || arrayBuffer1.byteLength;
|
|
33
|
+
|
|
34
|
+
if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {
|
|
35
|
+
return false;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const array1 = new Uint8Array(arrayBuffer1);
|
|
39
|
+
const array2 = new Uint8Array(arrayBuffer2);
|
|
40
|
+
|
|
41
|
+
for (let i = 0; i < array1.length; ++i) {
|
|
42
|
+
if (array1[i] !== array2[i]) {
|
|
43
|
+
return false;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return true;
|
|
48
|
+
}
|
|
49
|
+
export function concatenateArrayBuffers(...sources) {
|
|
50
|
+
const sourceArrays = sources.map(source2 => source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2);
|
|
51
|
+
const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);
|
|
52
|
+
const result = new Uint8Array(byteLength);
|
|
53
|
+
let offset = 0;
|
|
54
|
+
|
|
55
|
+
for (const sourceArray of sourceArrays) {
|
|
56
|
+
result.set(sourceArray, offset);
|
|
57
|
+
offset += sourceArray.byteLength;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
return result.buffer;
|
|
61
|
+
}
|
|
62
|
+
export function concatenateTypedArrays(...typedArrays) {
|
|
63
|
+
const arrays = typedArrays;
|
|
64
|
+
const TypedArrayConstructor = arrays && arrays.length > 1 && arrays[0].constructor || null;
|
|
65
|
+
|
|
66
|
+
if (!TypedArrayConstructor) {
|
|
67
|
+
throw new Error('"concatenateTypedArrays" - incorrect quantity of arguments or arguments have incompatible data types');
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
const sumLength = arrays.reduce((acc, value) => acc + value.length, 0);
|
|
71
|
+
const result = new TypedArrayConstructor(sumLength);
|
|
72
|
+
let offset = 0;
|
|
73
|
+
|
|
74
|
+
for (const array of arrays) {
|
|
75
|
+
result.set(array, offset);
|
|
76
|
+
offset += array.length;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
return result;
|
|
80
|
+
}
|
|
81
|
+
export function sliceArrayBuffer(arrayBuffer, byteOffset, byteLength) {
|
|
82
|
+
const subArray = byteLength !== undefined ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength) : new Uint8Array(arrayBuffer).subarray(byteOffset);
|
|
83
|
+
const arrayCopy = new Uint8Array(subArray);
|
|
84
|
+
return arrayCopy.buffer;
|
|
85
|
+
}
|
|
86
|
+
//# sourceMappingURL=array-buffer-utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/binary-utils/array-buffer-utils.ts"],"names":["isBuffer","bufferToArrayBuffer","toArrayBuffer","data","ArrayBuffer","isView","byteOffset","byteLength","buffer","slice","text","uint8Array","TextEncoder","encode","_toArrayBuffer","Error","compareArrayBuffers","arrayBuffer1","arrayBuffer2","array1","Uint8Array","array2","i","length","concatenateArrayBuffers","sources","sourceArrays","map","source2","reduce","typedArray","result","offset","sourceArray","set","concatenateTypedArrays","typedArrays","arrays","TypedArrayConstructor","constructor","sumLength","acc","value","array","sliceArrayBuffer","arrayBuffer","subArray","undefined","subarray","arrayCopy"],"mappings":"AACA,SAAQA,QAAR,EAAkBC,mBAAlB,QAA4C,gBAA5C;AAKA,OAAO,SAASC,aAAT,CAAuBC,IAAvB,EAA+C;AAEpD,MAAIH,QAAQ,CAACG,IAAD,CAAZ,EAAoB;AAClB,WAAOF,mBAAmB,CAACE,IAAD,CAA1B;AACD;;AAED,MAAIA,IAAI,YAAYC,WAApB,EAAiC;AAC/B,WAAOD,IAAP;AACD;;AAGD,MAAIC,WAAW,CAACC,MAAZ,CAAmBF,IAAnB,CAAJ,EAA8B;AAC5B,QAAIA,IAAI,CAACG,UAAL,KAAoB,CAApB,IAAyBH,IAAI,CAACI,UAAL,KAAoBJ,IAAI,CAACK,MAAL,CAAYD,UAA7D,EAAyE;AACvE,aAAOJ,IAAI,CAACK,MAAZ;AACD;;AACD,WAAOL,IAAI,CAACK,MAAL,CAAYC,KAAZ,CAAkBN,IAAI,CAACG,UAAvB,EAAmCH,IAAI,CAACG,UAAL,GAAkBH,IAAI,CAACI,UAA1D,CAAP;AACD;;AAED,MAAI,OAAOJ,IAAP,KAAgB,QAApB,EAA8B;AAC5B,UAAMO,IAAI,GAAGP,IAAb;AACA,UAAMQ,UAAU,GAAG,IAAIC,WAAJ,GAAkBC,MAAlB,CAAyBH,IAAzB,CAAnB;AACA,WAAOC,UAAU,CAACH,MAAlB;AACD;;AAGD,MAAIL,IAAI,IAAI,OAAOA,IAAP,KAAgB,QAAxB,IAAoCA,IAAI,CAACW,cAA7C,EAA6D;AAC3D,WAAOX,IAAI,CAACW,cAAL,EAAP;AACD;;AAED,QAAM,IAAIC,KAAJ,CAAU,eAAV,CAAN;AACD;AAQD,OAAO,SAASC,mBAAT,CACLC,YADK,EAELC,YAFK,EAGLX,UAHK,EAII;AACTA,EAAAA,UAAU,GAAGA,UAAU,IAAIU,YAAY,CAACV,UAAxC;;AACA,MAAIU,YAAY,CAACV,UAAb,GAA0BA,UAA1B,IAAwCW,YAAY,CAACX,UAAb,GAA0BA,UAAtE,EAAkF;AAChF,WAAO,KAAP;AACD;;AACD,QAAMY,MAAM,GAAG,IAAIC,UAAJ,CAAeH,YAAf,CAAf;AACA,QAAMI,MAAM,GAAG,IAAID,UAAJ,CAAeF,YAAf,CAAf;;AACA,OAAK,IAAII,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGH,MAAM,CAACI,MAA3B,EAAmC,EAAED,CAArC,EAAwC;AACtC,QAAIH,MAAM,CAACG,CAAD,CAAN,KAAcD,MAAM,CAACC,CAAD,CAAxB,EAA6B;AAC3B,aAAO,KAAP;AACD;AACF;;AACD,SAAO,IAAP;AACD;AAMD,OAAO,SAASE,uBAAT,CAAiC,GAAGC,OAApC,EAAwF;AAE7F,QAAMC,YAAY,GAAGD,OAAO,CAACE,GAAR,CAAaC,OAAD,IAC/BA,OAAO,YAAYxB,WAAnB,GAAiC,IAAIgB,UAAJ,CAAeQ,OAAf,CAAjC,GAA2DA,OADxC,CAArB;AAKA,QAAMrB,UAAU,GAAGmB,YAAY,CAACG,MAAb,CAAoB,CAACN,MAAD,EAASO,UAAT,KAAwBP,MAAM,GAAGO,UAAU,CAACvB,UAAhE,EAA4E,CAA5E,CAAnB;AAGA,QAAMwB,MAAM,GAAG,IAAIX,UAAJ,CAAeb,UAAf,CAAf;AAGA,MAAIyB,MAAM,GAAG,CAAb;;AACA,OAAK,MAAMC,WAAX,IAA0BP,YAA1B,EAAwC;AACtCK,IAAAA,MAAM,CAACG,GAAP,CAAWD,WAAX,EAAwBD,MAAxB;AACAA,IAAAA,MAAM,IAAIC,WAAW,CAAC1B,UAAtB;AACD;;AAGD,SAAOwB,MAAM,CAACvB,MAAd;AACD;AAQD,OAAO,SAAS2B,sBAAT,CAAmC,GAAGC,WAAtC,EAA2D;AAEhE,QAAMC,MAAM,GAAGD,WAAf;AAEA,QAAME,qBAAqB,GAAID,MAAM,IAAIA,MAAM,CAACd,MAAP,GAAgB,CAA1B,IAA+Bc,MAAM,CAAC,CAAD,CAAN,CAAUE,WAA1C,IAA0D,IAAxF;;AACA,MAAI,CAACD,qBAAL,EAA4B;AAC1B,UAAM,IAAIvB,KAAJ,CACJ,sGADI,CAAN;AAGD;;AAED,QAAMyB,SAAS,GAAGH,MAAM,CAACR,MAAP,CAAc,CAACY,GAAD,EAAMC,KAAN,KAAgBD,GAAG,GAAGC,KAAK,CAACnB,MAA1C,EAAkD,CAAlD,CAAlB;AAEA,QAAMQ,MAAM,GAAG,IAAIO,qBAAJ,CAA0BE,SAA1B,CAAf;AACA,MAAIR,MAAM,GAAG,CAAb;;AACA,OAAK,MAAMW,KAAX,IAAoBN,MAApB,EAA4B;AAC1BN,IAAAA,MAAM,CAACG,GAAP,CAAWS,KAAX,EAAkBX,MAAlB;AACAA,IAAAA,MAAM,IAAIW,KAAK,CAACpB,MAAhB;AACD;;AACD,SAAOQ,MAAP;AACD;AAQD,OAAO,SAASa,gBAAT,CACLC,WADK,EAELvC,UAFK,EAGLC,UAHK,EAIQ;AACb,QAAMuC,QAAQ,GACZvC,UAAU,KAAKwC,SAAf,GACI,IAAI3B,UAAJ,CAAeyB,WAAf,EAA4BG,QAA5B,CAAqC1C,UAArC,EAAiDA,UAAU,GAAGC,UAA9D,CADJ,GAEI,IAAIa,UAAJ,CAAeyB,WAAf,EAA4BG,QAA5B,CAAqC1C,UAArC,CAHN;AAIA,QAAM2C,SAAS,GAAG,IAAI7B,UAAJ,CAAe0B,QAAf,CAAlB;AACA,SAAOG,SAAS,CAACzC,MAAjB;AACD","sourcesContent":["import {TypedArray} from '../../types';\nimport {isBuffer, bufferToArrayBuffer} from './buffer-utils';\n\n/**\n * Convert an object to an array buffer\n */\nexport function toArrayBuffer(data: any): ArrayBuffer {\n // Note: Should be called first, Buffers can trigger other detections below\n if (isBuffer(data)) {\n return bufferToArrayBuffer(data);\n }\n\n if (data instanceof ArrayBuffer) {\n return data;\n }\n\n // Careful - Node Buffers look like Uint8Arrays (keep after isBuffer)\n if (ArrayBuffer.isView(data)) {\n if (data.byteOffset === 0 && data.byteLength === data.buffer.byteLength) {\n return data.buffer;\n }\n return data.buffer.slice(data.byteOffset, data.byteOffset + data.byteLength);\n }\n\n if (typeof data === 'string') {\n const text = data;\n const uint8Array = new TextEncoder().encode(text);\n return uint8Array.buffer;\n }\n\n // HACK to support Blob polyfill\n if (data && typeof data === 'object' && data._toArrayBuffer) {\n return data._toArrayBuffer();\n }\n\n throw new Error('toArrayBuffer');\n}\n\n/**\n * compare two binary arrays for equality\n * @param {ArrayBuffer} a\n * @param {ArrayBuffer} b\n * @param {number} byteLength\n */\nexport function compareArrayBuffers(\n arrayBuffer1: ArrayBuffer,\n arrayBuffer2: ArrayBuffer,\n byteLength?: number\n): boolean {\n byteLength = byteLength || arrayBuffer1.byteLength;\n if (arrayBuffer1.byteLength < byteLength || arrayBuffer2.byteLength < byteLength) {\n return false;\n }\n const array1 = new Uint8Array(arrayBuffer1);\n const array2 = new Uint8Array(arrayBuffer2);\n for (let i = 0; i < array1.length; ++i) {\n if (array1[i] !== array2[i]) {\n return false;\n }\n }\n return true;\n}\n\n/**\n * Concatenate a sequence of ArrayBuffers\n * @return A concatenated ArrayBuffer\n */\nexport function concatenateArrayBuffers(...sources: (ArrayBuffer | Uint8Array)[]): ArrayBuffer {\n // Make sure all inputs are wrapped in typed arrays\n const sourceArrays = sources.map((source2) =>\n source2 instanceof ArrayBuffer ? new Uint8Array(source2) : source2\n );\n\n // Get length of all inputs\n const byteLength = sourceArrays.reduce((length, typedArray) => length + typedArray.byteLength, 0);\n\n // Allocate array with space for all inputs\n const result = new Uint8Array(byteLength);\n\n // Copy the subarrays\n let offset = 0;\n for (const sourceArray of sourceArrays) {\n result.set(sourceArray, offset);\n offset += sourceArray.byteLength;\n }\n\n // We work with ArrayBuffers, discard the typed array wrapper\n return result.buffer;\n}\n\n/**\n * Concatenate arbitrary count of typed arrays\n * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Typed_arrays\n * @param {...*} arrays - list of arrays. All arrays should be the same type\n * @return A concatenated TypedArray\n */\nexport function concatenateTypedArrays<T>(...typedArrays: T[]): T {\n // @ts-ignore\n const arrays = typedArrays as TypedArray[];\n // @ts-ignore\n const TypedArrayConstructor = (arrays && arrays.length > 1 && arrays[0].constructor) || null;\n if (!TypedArrayConstructor) {\n throw new Error(\n '\"concatenateTypedArrays\" - incorrect quantity of arguments or arguments have incompatible data types'\n );\n }\n\n const sumLength = arrays.reduce((acc, value) => acc + value.length, 0);\n // @ts-ignore typescript does not like dynamic constructors\n const result = new TypedArrayConstructor(sumLength);\n let offset = 0;\n for (const array of arrays) {\n result.set(array, offset);\n offset += array.length;\n }\n return result;\n}\n\n/**\n * Copy a view of an ArrayBuffer into new ArrayBuffer with byteOffset = 0\n * @param arrayBuffer\n * @param byteOffset\n * @param byteLength\n */\nexport function sliceArrayBuffer(\n arrayBuffer: ArrayBuffer,\n byteOffset: number,\n byteLength?: number\n): ArrayBuffer {\n const subArray =\n byteLength !== undefined\n ? new Uint8Array(arrayBuffer).subarray(byteOffset, byteOffset + byteLength)\n : new Uint8Array(arrayBuffer).subarray(byteOffset);\n const arrayCopy = new Uint8Array(subArray);\n return arrayCopy.buffer;\n}\n"],"file":"array-buffer-utils.js"}
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
import { padToNBytes } from './memory-copy-utils';
|
|
2
|
+
export function copyPaddedArrayBufferToDataView(dataView, byteOffset, sourceBuffer, padding) {
|
|
3
|
+
const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);
|
|
4
|
+
const padLength = paddedLength - sourceBuffer.byteLength;
|
|
5
|
+
|
|
6
|
+
if (dataView) {
|
|
7
|
+
const targetArray = new Uint8Array(dataView.buffer, dataView.byteOffset + byteOffset, sourceBuffer.byteLength);
|
|
8
|
+
const sourceArray = new Uint8Array(sourceBuffer);
|
|
9
|
+
targetArray.set(sourceArray);
|
|
10
|
+
|
|
11
|
+
for (let i = 0; i < padLength; ++i) {
|
|
12
|
+
dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 0x20);
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
byteOffset += paddedLength;
|
|
17
|
+
return byteOffset;
|
|
18
|
+
}
|
|
19
|
+
export function copyPaddedStringToDataView(dataView, byteOffset, string, padding) {
|
|
20
|
+
const textEncoder = new TextEncoder();
|
|
21
|
+
const stringBuffer = textEncoder.encode(string);
|
|
22
|
+
byteOffset = copyPaddedArrayBufferToDataView(dataView, byteOffset, stringBuffer, padding);
|
|
23
|
+
return byteOffset;
|
|
24
|
+
}
|
|
25
|
+
//# sourceMappingURL=binary-copy-utils.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../../../../src/lib/binary-utils/binary-copy-utils.ts"],"names":["padToNBytes","copyPaddedArrayBufferToDataView","dataView","byteOffset","sourceBuffer","padding","paddedLength","byteLength","padLength","targetArray","Uint8Array","buffer","sourceArray","set","i","setUint8","copyPaddedStringToDataView","string","textEncoder","TextEncoder","stringBuffer","encode"],"mappings":"AACA,SAAQA,WAAR,QAA0B,qBAA1B;AAYA,OAAO,SAASC,+BAAT,CACLC,QADK,EAELC,UAFK,EAGLC,YAHK,EAILC,OAJK,EAKL;AACA,QAAMC,YAAY,GAAGN,WAAW,CAACI,YAAY,CAACG,UAAd,EAA0BF,OAA1B,CAAhC;AACA,QAAMG,SAAS,GAAGF,YAAY,GAAGF,YAAY,CAACG,UAA9C;;AAEA,MAAIL,QAAJ,EAAc;AAEZ,UAAMO,WAAW,GAAG,IAAIC,UAAJ,CAClBR,QAAQ,CAACS,MADS,EAElBT,QAAQ,CAACC,UAAT,GAAsBA,UAFJ,EAGlBC,YAAY,CAACG,UAHK,CAApB;AAKA,UAAMK,WAAW,GAAG,IAAIF,UAAJ,CAAeN,YAAf,CAApB;AACAK,IAAAA,WAAW,CAACI,GAAZ,CAAgBD,WAAhB;;AAGA,SAAK,IAAIE,CAAC,GAAG,CAAb,EAAgBA,CAAC,GAAGN,SAApB,EAA+B,EAAEM,CAAjC,EAAoC;AAElCZ,MAAAA,QAAQ,CAACa,QAAT,CAAkBZ,UAAU,GAAGC,YAAY,CAACG,UAA1B,GAAuCO,CAAzD,EAA4D,IAA5D;AACD;AACF;;AACDX,EAAAA,UAAU,IAAIG,YAAd;AACA,SAAOH,UAAP;AACD;AAYD,OAAO,SAASa,0BAAT,CACLd,QADK,EAELC,UAFK,EAGLc,MAHK,EAILZ,OAJK,EAKG;AACR,QAAMa,WAAW,GAAG,IAAIC,WAAJ,EAApB;AAGA,QAAMC,YAAY,GAAGF,WAAW,CAACG,MAAZ,CAAmBJ,MAAnB,CAArB;AAEAd,EAAAA,UAAU,GAAGF,+BAA+B,CAACC,QAAD,EAAWC,UAAX,EAAuBiB,YAAvB,EAAqCf,OAArC,CAA5C;AAEA,SAAOF,UAAP;AACD","sourcesContent":["import {TypedArray} from '../../types';\nimport {padToNBytes} from './memory-copy-utils';\n\n/**\n * Copy sourceBuffer to dataView with some padding\n *\n * @param {DataView | null} dataView - destination data container. If null - only new offset is calculated\n * @param {number} byteOffset - destination byte offset to copy to\n * @param {Array | TypedArray} sourceBuffer - source data buffer\n * @param {number} padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedArrayBufferToDataView(\n dataView: DataView | null,\n byteOffset: number,\n sourceBuffer: TypedArray,\n padding: number\n) {\n const paddedLength = padToNBytes(sourceBuffer.byteLength, padding);\n const padLength = paddedLength - sourceBuffer.byteLength;\n\n if (dataView) {\n // Copy array\n const targetArray = new Uint8Array(\n dataView.buffer,\n dataView.byteOffset + byteOffset,\n sourceBuffer.byteLength\n );\n const sourceArray = new Uint8Array(sourceBuffer);\n targetArray.set(sourceArray);\n\n // Add PADDING\n for (let i = 0; i < padLength; ++i) {\n // json chunk is padded with spaces (ASCII 0x20)\n dataView.setUint8(byteOffset + sourceBuffer.byteLength + i, 0x20);\n }\n }\n byteOffset += paddedLength;\n return byteOffset;\n}\n\n/**\n * Copy string to dataView with some padding\n *\n * @param {DataView | null} dataView - destination data container. If null - only new offset is calculated\n * @param {number} byteOffset - destination byte offset to copy to\n * @param {string} string - source string\n * @param {number} padding - pad the resulting array to multiple of \"padding\" bytes. Additional bytes are filled with 0x20 (ASCII space)\n *\n * @return new byteOffset of resulting dataView\n */\nexport function copyPaddedStringToDataView(\n dataView: DataView | null,\n byteOffset: number,\n string: string,\n padding: number\n): number {\n const textEncoder = new TextEncoder();\n // PERFORMANCE IDEA: We encode twice, once to get size and once to store\n // PERFORMANCE IDEA: Use TextEncoder.encodeInto() to avoid temporary copy\n const stringBuffer = textEncoder.encode(string);\n\n byteOffset = copyPaddedArrayBufferToDataView(dataView, byteOffset, stringBuffer, padding);\n\n return byteOffset;\n}\n"],"file":"binary-copy-utils.js"}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import * as node from '../node/buffer';
|
|
2
|
+
export function isBuffer(value) {
|
|
3
|
+
return value && typeof value === 'object' && value.isBuffer;
|
|
4
|
+
}
|
|
5
|
+
export function toBuffer(data) {
|
|
6
|
+
return node.toBuffer ? node.toBuffer(data) : data;
|
|
7
|
+
}
|
|
8
|
+
export function bufferToArrayBuffer(buffer) {
|
|
9
|
+
if (isBuffer(buffer)) {
|
|
10
|
+
const typedArray = new Uint8Array(buffer.buffer, buffer.byteOffset, buffer.length);
|
|
11
|
+
return typedArray.slice().buffer;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
return buffer;
|
|
15
|
+
}
|
|
16
|
+
//# sourceMappingURL=buffer-utils.js.map
|