@dra2020/baseclient 1.0.11 → 1.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +16 -5
- package/dist/all/all.d.ts +18 -2
- package/dist/all/allclient.d.ts +18 -0
- package/dist/base.js +33010 -0
- package/dist/base.js.map +1 -0
- package/dist/baseclient.js +166 -1279
- package/dist/baseclient.js.map +1 -1
- package/dist/dbabstract/all.d.ts +1 -0
- package/dist/dbabstract/db.d.ts +83 -0
- package/dist/dbdynamo/all.d.ts +1 -0
- package/dist/dbdynamo/dbdynamo.d.ts +190 -0
- package/dist/filterexpr/filterexpr.d.ts +0 -3
- package/dist/fsm/fsm.d.ts +0 -1
- package/dist/fsmfile/all.d.ts +1 -0
- package/dist/fsmfile/fsmfile.d.ts +47 -0
- package/dist/jsonstream/all.d.ts +1 -0
- package/dist/jsonstream/jsonstream.d.ts +130 -0
- package/dist/lambda/all.d.ts +1 -0
- package/dist/lambda/env.d.ts +10 -0
- package/dist/lambda/lambda.d.ts +18 -0
- package/dist/logserver/all.d.ts +5 -0
- package/dist/logserver/log.d.ts +11 -0
- package/dist/logserver/logaccum.d.ts +154 -0
- package/dist/logserver/logblob.d.ts +24 -0
- package/dist/logserver/logconcat.d.ts +55 -0
- package/dist/logserver/logkey.d.ts +28 -0
- package/dist/memsqs/all.d.ts +4 -0
- package/dist/memsqs/client.d.ts +13 -0
- package/dist/memsqs/loopback.d.ts +11 -0
- package/dist/memsqs/orderedlist.d.ts +19 -0
- package/dist/memsqs/queue.d.ts +84 -0
- package/dist/memsqs/server.d.ts +37 -0
- package/dist/ot-js/otsession.d.ts +0 -3
- package/dist/poly/union.d.ts +0 -1
- package/dist/storage/all.d.ts +4 -0
- package/dist/storage/datablob.d.ts +9 -0
- package/dist/storage/env.d.ts +10 -0
- package/dist/storage/splitsblob.d.ts +13 -0
- package/dist/storage/storage.d.ts +166 -0
- package/dist/storages3/all.d.ts +1 -0
- package/dist/storages3/s3.d.ts +62 -0
- package/docs/dbabstract.md +2 -0
- package/docs/dbdynamo.md +2 -0
- package/docs/fsmfile.md +2 -0
- package/docs/jsonstream.md +44 -0
- package/docs/lambda.md +2 -0
- package/docs/logserver.md +2 -0
- package/docs/storage.md +2 -0
- package/docs/storages3.md +2 -0
- package/lib/all/all.ts +22 -2
- package/lib/all/allclient.ts +19 -0
- package/lib/dbabstract/all.ts +1 -0
- package/lib/dbabstract/db.ts +246 -0
- package/lib/dbdynamo/all.ts +1 -0
- package/lib/dbdynamo/dbdynamo.ts +1551 -0
- package/lib/filterexpr/filterexpr.ts +5 -79
- package/lib/fsm/fsm.ts +2 -12
- package/lib/fsmfile/all.ts +1 -0
- package/lib/fsmfile/fsmfile.ts +236 -0
- package/lib/jsonstream/all.ts +1 -0
- package/lib/jsonstream/jsonstream.ts +940 -0
- package/lib/lambda/all.ts +1 -0
- package/lib/lambda/env.ts +13 -0
- package/lib/lambda/lambda.ts +120 -0
- package/lib/logserver/all.ts +5 -0
- package/lib/logserver/log.ts +565 -0
- package/lib/logserver/logaccum.ts +1445 -0
- package/lib/logserver/logblob.ts +84 -0
- package/lib/logserver/logconcat.ts +313 -0
- package/lib/logserver/logkey.ts +125 -0
- package/lib/memsqs/all.ts +4 -0
- package/lib/memsqs/client.ts +268 -0
- package/lib/memsqs/loopback.ts +64 -0
- package/lib/memsqs/orderedlist.ts +74 -0
- package/lib/memsqs/queue.ts +395 -0
- package/lib/memsqs/server.ts +262 -0
- package/lib/ot-js/otsession.ts +1 -4
- package/lib/poly/hash.ts +1 -1
- package/lib/poly/topo.ts +41 -15
- package/lib/poly/union.ts +0 -17
- package/lib/storage/all.ts +4 -0
- package/lib/storage/datablob.ts +36 -0
- package/lib/storage/env.ts +14 -0
- package/lib/storage/splitsblob.ts +63 -0
- package/lib/storage/storage.ts +604 -0
- package/lib/storages3/all.ts +1 -0
- package/lib/storages3/s3.ts +576 -0
- package/package.json +10 -9
- package/dist/geo/all.d.ts +0 -2
- package/dist/geo/geo.d.ts +0 -67
- package/dist/geo/vfeature.d.ts +0 -4
- package/docs/filterexpr.md +0 -22
- package/lib/geo/all.ts +0 -2
- package/lib/geo/geo.ts +0 -452
- package/lib/geo/vfeature.ts +0 -34
|
@@ -0,0 +1,576 @@
|
|
|
1
|
+
// Node libraries
|
|
2
|
+
import * as fs from 'fs';
|
|
3
|
+
import * as stream from 'stream';
|
|
4
|
+
import * as zlib from 'zlib';
|
|
5
|
+
|
|
6
|
+
// Public libraries
|
|
7
|
+
import * as S3 from 'aws-sdk/clients/s3';
|
|
8
|
+
|
|
9
|
+
// Shared libraries
|
|
10
|
+
import * as Context from '../context/all';
|
|
11
|
+
import * as Storage from '../storage/all';
|
|
12
|
+
import * as LogAbstract from '../logabstract/all';
|
|
13
|
+
import * as FSM from '../fsm/all';
|
|
14
|
+
|
|
15
|
+
export interface Environment
|
|
16
|
+
{
|
|
17
|
+
context: Context.IContext;
|
|
18
|
+
log: LogAbstract.ILog;
|
|
19
|
+
fsmManager: FSM.FsmManager;
|
|
20
|
+
storageManager: Storage.StorageManager;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
class S3Request implements Storage.BlobRequest
|
|
24
|
+
{
|
|
25
|
+
blob: Storage.StorageBlob;
|
|
26
|
+
req: any;
|
|
27
|
+
res: any;
|
|
28
|
+
data: any;
|
|
29
|
+
err: any;
|
|
30
|
+
|
|
31
|
+
constructor(blob: Storage.StorageBlob)
|
|
32
|
+
{
|
|
33
|
+
this.blob = blob;
|
|
34
|
+
this.req = null;
|
|
35
|
+
this.res = null;
|
|
36
|
+
this.data = null;
|
|
37
|
+
this.err = null;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
continuationToken(): string
|
|
41
|
+
{
|
|
42
|
+
if (this.data && this.data.NextContinuationToken)
|
|
43
|
+
return this.data.NextContinuationToken;
|
|
44
|
+
|
|
45
|
+
return undefined;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
result(): number
|
|
49
|
+
{
|
|
50
|
+
if (this.data == null && this.blob.toLoadStream() == null && this.err == null)
|
|
51
|
+
return Storage.EPending;
|
|
52
|
+
else if (this.err != null)
|
|
53
|
+
{
|
|
54
|
+
if (this.err.statusCode && this.err.statusCode == 404)
|
|
55
|
+
return Storage.ENotFound;
|
|
56
|
+
else if (this.err.statusCode === 999)
|
|
57
|
+
return Storage.EBadFormat;
|
|
58
|
+
else
|
|
59
|
+
return Storage.EFail;
|
|
60
|
+
}
|
|
61
|
+
else
|
|
62
|
+
return Storage.ESuccess;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
decode(): void
|
|
66
|
+
{
|
|
67
|
+
if (this.err == null && this.data && this.data.Body && this.data.ContentEncoding === 'gzip')
|
|
68
|
+
{
|
|
69
|
+
try
|
|
70
|
+
{
|
|
71
|
+
this.data.Body = zlib.gunzipSync(this.data.Body);
|
|
72
|
+
}
|
|
73
|
+
catch (err)
|
|
74
|
+
{
|
|
75
|
+
this.err = { statusCode: 999, message: 'Content not in gzip format.' };
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
asString(): string
|
|
81
|
+
{
|
|
82
|
+
if (this.err || this.res == null || this.data == null || this.data.Body == null)
|
|
83
|
+
return undefined;
|
|
84
|
+
let body: Buffer = this.data.Body;
|
|
85
|
+
return body.toString('utf-8');
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
// Uncompress as necessary
|
|
89
|
+
asBuffer(): Buffer
|
|
90
|
+
{
|
|
91
|
+
if (this.err || this.res == null || this.data == null || this.data.Body == null)
|
|
92
|
+
return undefined;
|
|
93
|
+
let body: Buffer = this.data.Body;
|
|
94
|
+
return body;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
_dataToProps(data: any): Storage.BlobProperties
|
|
98
|
+
{
|
|
99
|
+
let props: Storage.BlobProperties = {};
|
|
100
|
+
props.ContentLength = (data.Size !== undefined) ? data.Size : 0;
|
|
101
|
+
props.Key = data.Key;
|
|
102
|
+
props.ETag = data.ETag;
|
|
103
|
+
props.LastModified = data.LastModified;
|
|
104
|
+
props.ContentEncoding = data.ContentEncoding;
|
|
105
|
+
return props;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
asProps(): Storage.BlobProperties[]
|
|
109
|
+
{
|
|
110
|
+
let a: Storage.BlobProperties[] = [];
|
|
111
|
+
|
|
112
|
+
if (this.data && Array.isArray(this.data.Contents))
|
|
113
|
+
{
|
|
114
|
+
for (let i: number = 0; i < this.data.Contents.length; i++)
|
|
115
|
+
a.push(this._dataToProps(this.data.Contents[i]));
|
|
116
|
+
}
|
|
117
|
+
else
|
|
118
|
+
a.push(this._dataToProps(this.data));
|
|
119
|
+
|
|
120
|
+
return a;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
asError(): string
|
|
124
|
+
{
|
|
125
|
+
if (this.err)
|
|
126
|
+
return this.err.message ? this.err.message : JSON.stringify(this.err);
|
|
127
|
+
return undefined;
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
const ChunkSize = 4000000;
|
|
132
|
+
|
|
133
|
+
export class FsmStreamLoader extends FSM.Fsm
|
|
134
|
+
{
|
|
135
|
+
sm: StorageManager;
|
|
136
|
+
blob: Storage.StorageBlob;
|
|
137
|
+
param: any;
|
|
138
|
+
err: any;
|
|
139
|
+
contentLength: number;
|
|
140
|
+
contentPos: number;
|
|
141
|
+
readStream: Storage.MultiBufferPassThrough;
|
|
142
|
+
passThrough: Storage.MultiBufferPassThrough;
|
|
143
|
+
|
|
144
|
+
constructor(env: Environment, sm: StorageManager, blob: Storage.StorageBlob)
|
|
145
|
+
{
|
|
146
|
+
super(env);
|
|
147
|
+
this.sm = sm;
|
|
148
|
+
this.blob = blob;
|
|
149
|
+
this.contentPos = 0;
|
|
150
|
+
this.param = { Bucket: sm.blobBucket(blob), Key: blob.params.id };
|
|
151
|
+
|
|
152
|
+
// We use passthrough stream because we want to make the load stream available
|
|
153
|
+
// immediately but we don't actually know whether we are going to have to pipe
|
|
154
|
+
// through gunzip or not until we get the first ContentEncoding header back.
|
|
155
|
+
this.readStream = new Storage.MultiBufferPassThrough();
|
|
156
|
+
this.passThrough = new Storage.MultiBufferPassThrough();
|
|
157
|
+
this.blob.setLoadStream(this.passThrough);
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
get env(): Environment { return this._env as Environment; }
|
|
161
|
+
|
|
162
|
+
setStreamError(): void
|
|
163
|
+
{
|
|
164
|
+
this.passThrough._done();
|
|
165
|
+
this.setState(FSM.FSM_ERROR);
|
|
166
|
+
this.err = { statusCode: 999, message: 'Content not in gzip format.' };
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
tick(): void
|
|
170
|
+
{
|
|
171
|
+
if (this.ready)
|
|
172
|
+
{
|
|
173
|
+
// Figure out next chunk
|
|
174
|
+
if (this.contentLength === undefined)
|
|
175
|
+
this.param.Range = `bytes=0-${ChunkSize-1}`;
|
|
176
|
+
else
|
|
177
|
+
this.param.Range = `bytes=${this.contentPos}-${Math.min(this.contentPos+ChunkSize-1, this.contentLength-1)}`;
|
|
178
|
+
|
|
179
|
+
switch (this.state)
|
|
180
|
+
{
|
|
181
|
+
|
|
182
|
+
case FSM.FSM_STARTING:
|
|
183
|
+
this.sm.s3.getObject(this.param, (err: any, data: any) => {
|
|
184
|
+
if (err == null)
|
|
185
|
+
{
|
|
186
|
+
// On first chunk, figure out if we need to pipe through gunzip
|
|
187
|
+
if (this.contentLength === undefined)
|
|
188
|
+
{
|
|
189
|
+
if (data.ContentEncoding && data.ContentEncoding === 'gzip')
|
|
190
|
+
{
|
|
191
|
+
let unzip = zlib.createGunzip({});
|
|
192
|
+
unzip.on('end', () => { this.passThrough._done(); this.setState(FSM.FSM_DONE); } );
|
|
193
|
+
unzip.on('error', () => { this.setStreamError() } );
|
|
194
|
+
this.readStream.pipe(unzip).pipe(this.passThrough);
|
|
195
|
+
}
|
|
196
|
+
else
|
|
197
|
+
{
|
|
198
|
+
this.readStream.on('end', () => { this.passThrough._done(); this.setState(FSM.FSM_DONE); } );
|
|
199
|
+
this.readStream.on('error', () => { this.setStreamError() } );
|
|
200
|
+
this.readStream.pipe(this.passThrough);
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// Handle this data
|
|
205
|
+
if (data.Body)
|
|
206
|
+
this.readStream.write(data.Body);
|
|
207
|
+
|
|
208
|
+
// Update content range and content length for next time through, or noticing finish
|
|
209
|
+
if (data.ContentRange)
|
|
210
|
+
{
|
|
211
|
+
let re = /bytes (\d+)-(\d+)\/(\d+)/
|
|
212
|
+
let s: string = data.ContentRange; // "bytes start-end/total"
|
|
213
|
+
let matched = re.exec(s);
|
|
214
|
+
if (matched && matched.length === 4)
|
|
215
|
+
{
|
|
216
|
+
this.contentPos = Number(matched[2]) + 1;
|
|
217
|
+
this.contentLength = Number(matched[3]);
|
|
218
|
+
}
|
|
219
|
+
}
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// Error or done reading
|
|
223
|
+
if (err || this.contentPos === this.contentLength)
|
|
224
|
+
{
|
|
225
|
+
this.err = err;
|
|
226
|
+
this.readStream._done();
|
|
227
|
+
if (err)
|
|
228
|
+
{
|
|
229
|
+
this.passThrough._done();
|
|
230
|
+
this.setState(FSM.FSM_ERROR);
|
|
231
|
+
}
|
|
232
|
+
}
|
|
233
|
+
else
|
|
234
|
+
this.setState(FSM.FSM_STARTING);
|
|
235
|
+
});
|
|
236
|
+
break;
|
|
237
|
+
}
|
|
238
|
+
}
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
export class FsmTransferUrl extends Storage.FsmTransferUrl
|
|
243
|
+
{
|
|
244
|
+
storageManager: StorageManager;
|
|
245
|
+
|
|
246
|
+
constructor(env: Environment, bucket: string, params: Storage.TransferParams)
|
|
247
|
+
{
|
|
248
|
+
super(env, bucket, params);
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
export class StorageManager extends Storage.StorageManager
|
|
253
|
+
{
|
|
254
|
+
s3: any;
|
|
255
|
+
count: number;
|
|
256
|
+
|
|
257
|
+
constructor(env: Environment, bucketMap?: Storage.BucketMap)
|
|
258
|
+
{
|
|
259
|
+
super(env, bucketMap);
|
|
260
|
+
|
|
261
|
+
if (this.env.context.xstring('aws_access_key_id') === undefined
|
|
262
|
+
|| this.env.context.xstring('aws_secret_access_key') === undefined)
|
|
263
|
+
{
|
|
264
|
+
this.env.log.error('S3: not configured: exiting');
|
|
265
|
+
this.env.log.dump();
|
|
266
|
+
process.exit(1);
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
this.s3 = new S3({apiVersion: '2006-03-01', region: 'us-west-2'});
|
|
270
|
+
this.count = 0;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
get env(): Environment { return this._env as Environment; }
|
|
274
|
+
|
|
275
|
+
lookupBucket(s: string): string
|
|
276
|
+
{
|
|
277
|
+
while (this.bucketMap[s] !== undefined)
|
|
278
|
+
s = this.bucketMap[s];
|
|
279
|
+
return s;
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
blobBucket(blob: Storage.StorageBlob): string
|
|
283
|
+
{
|
|
284
|
+
return this.lookupBucket(blob.params.bucket);
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
load(blob: Storage.StorageBlob): void
|
|
288
|
+
{
|
|
289
|
+
if (blob.params.id == '')
|
|
290
|
+
{
|
|
291
|
+
this.env.log.error('S3: blob load called with empty key');
|
|
292
|
+
return;
|
|
293
|
+
}
|
|
294
|
+
let id: string = `load+${blob.params.id}+${this.count++}`;
|
|
295
|
+
|
|
296
|
+
this.env.log.event('S3: load start', 1);
|
|
297
|
+
let trace = new LogAbstract.AsyncTimer(this.env.log, 'S3: load', 1);
|
|
298
|
+
let params = { Bucket: this.blobBucket(blob), Key: blob.params.id };
|
|
299
|
+
let rq = new S3Request(blob);
|
|
300
|
+
this.loadBlobIndex[id] = rq;
|
|
301
|
+
blob.setLoading();
|
|
302
|
+
if (blob.params.loadToType === 'stream')
|
|
303
|
+
{
|
|
304
|
+
let fsm = new FsmStreamLoader(this.env, this, blob);
|
|
305
|
+
rq.req = fsm;
|
|
306
|
+
new FSM.FsmOnDone(this.env, fsm, (f: FSM.Fsm) => {
|
|
307
|
+
this._finishLoad(blob, id, rq, fsm.err, undefined);
|
|
308
|
+
trace.log();
|
|
309
|
+
});
|
|
310
|
+
}
|
|
311
|
+
else
|
|
312
|
+
{
|
|
313
|
+
rq.req = this.s3.getObject(params, (err: any, data: any) => {
|
|
314
|
+
this._finishLoad(blob, id, rq, err, data);
|
|
315
|
+
trace.log();
|
|
316
|
+
});
|
|
317
|
+
}
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
_finishLoad(blob: Storage.StorageBlob, id: string, rq: S3Request, err: any, data: any)
|
|
321
|
+
{
|
|
322
|
+
rq.res = this;
|
|
323
|
+
if (err)
|
|
324
|
+
rq.err = err;
|
|
325
|
+
else
|
|
326
|
+
rq.data = data;
|
|
327
|
+
|
|
328
|
+
rq.decode();
|
|
329
|
+
blob.setLoaded(rq.result());
|
|
330
|
+
blob.endLoad(rq);
|
|
331
|
+
this.emit('load', blob);
|
|
332
|
+
|
|
333
|
+
delete this.loadBlobIndex[id];
|
|
334
|
+
|
|
335
|
+
this.env.log.event('S3: load end', 1);
|
|
336
|
+
}
|
|
337
|
+
|
|
338
|
+
head(blob: Storage.StorageBlob): void
|
|
339
|
+
{
|
|
340
|
+
if (blob.params.id == '')
|
|
341
|
+
{
|
|
342
|
+
this.env.log.error('S3: blob head called with empty key');
|
|
343
|
+
return;
|
|
344
|
+
}
|
|
345
|
+
let id: string = `head+${blob.params.id}+${this.count++}`;
|
|
346
|
+
|
|
347
|
+
this.env.log.event('S3: head start', 1);
|
|
348
|
+
let trace = new LogAbstract.AsyncTimer(this.env.log, 'S3: head', 1);
|
|
349
|
+
let params = { Bucket: this.blobBucket(blob), Key: blob.params.id };
|
|
350
|
+
let rq = new S3Request(blob);
|
|
351
|
+
this.headBlobIndex[id] = rq;
|
|
352
|
+
blob.setLoading();
|
|
353
|
+
rq.req = this.s3.headObject(params, (err: any, data: any) => {
|
|
354
|
+
rq.res = this;
|
|
355
|
+
if (err)
|
|
356
|
+
rq.err = err;
|
|
357
|
+
else
|
|
358
|
+
rq.data = data;
|
|
359
|
+
|
|
360
|
+
blob.setLoaded(rq.result());
|
|
361
|
+
blob.endHead(rq);
|
|
362
|
+
this.emit('head', blob);
|
|
363
|
+
|
|
364
|
+
delete this.headBlobIndex[id];
|
|
365
|
+
|
|
366
|
+
this.env.log.event('S3: head end', 1);
|
|
367
|
+
trace.log();
|
|
368
|
+
});
|
|
369
|
+
}
|
|
370
|
+
|
|
371
|
+
safeSaveFromPath(blob: Storage.StorageBlob, rq: S3Request, id: string, trace: LogAbstract.AsyncTimer): any
|
|
372
|
+
{
|
|
373
|
+
try
|
|
374
|
+
{
|
|
375
|
+
// We can't gzip the stream, so read as buffer (more size limited than stream) if required
|
|
376
|
+
if (blob.params.ContentEncoding === 'gzip')
|
|
377
|
+
return fs.readFileSync(blob.params.saveFrom);
|
|
378
|
+
else
|
|
379
|
+
return fs.createReadStream(blob.params.saveFrom);
|
|
380
|
+
}
|
|
381
|
+
catch (err)
|
|
382
|
+
{
|
|
383
|
+
rq.err = err;
|
|
384
|
+
process.nextTick(() => {
|
|
385
|
+
blob.setSaved(rq.result());
|
|
386
|
+
blob.endSave(rq);
|
|
387
|
+
this.emit('save', blob);
|
|
388
|
+
delete this.saveBlobIndex[id];
|
|
389
|
+
this.env.log.error('S3: failed to open blob path file');
|
|
390
|
+
trace.log();
|
|
391
|
+
});
|
|
392
|
+
}
|
|
393
|
+
return null;
|
|
394
|
+
}
|
|
395
|
+
|
|
396
|
+
save(blob: Storage.StorageBlob): void
|
|
397
|
+
{
|
|
398
|
+
if (blob.params.id == '')
|
|
399
|
+
{
|
|
400
|
+
this.env.log.error('S3: blob save called with empty key');
|
|
401
|
+
return;
|
|
402
|
+
}
|
|
403
|
+
let id: string = `save+${blob.params.id}+${this.count++}`;
|
|
404
|
+
|
|
405
|
+
this.env.log.event('S3: save start', 1);
|
|
406
|
+
|
|
407
|
+
let trace = new LogAbstract.AsyncTimer(this.env.log, 'S3: save', 1);
|
|
408
|
+
let params: any = { Bucket: this.blobBucket(blob), Key: blob.params.id };
|
|
409
|
+
if (blob.params.ContentEncoding)
|
|
410
|
+
params['ContentEncoding'] = blob.params.ContentEncoding;
|
|
411
|
+
if (blob.params.ContentType)
|
|
412
|
+
params['ContentType'] = blob.params.ContentType;
|
|
413
|
+
if (blob.params.CacheControl)
|
|
414
|
+
params['CacheControl'] = blob.params.CacheControl;
|
|
415
|
+
let rq = new S3Request(blob);
|
|
416
|
+
this.saveBlobIndex[id] = rq;
|
|
417
|
+
blob.setSaving();
|
|
418
|
+
|
|
419
|
+
let body: any;
|
|
420
|
+
let bodyStream: stream.Readable;
|
|
421
|
+
switch (blob.params.saveFromType)
|
|
422
|
+
{
|
|
423
|
+
case 'object':
|
|
424
|
+
body = Buffer.from(JSON.stringify(blob.params.saveFrom));
|
|
425
|
+
break;
|
|
426
|
+
case 'string':
|
|
427
|
+
body = Buffer.from(blob.params.saveFrom);
|
|
428
|
+
break;
|
|
429
|
+
case 'buffer':
|
|
430
|
+
body = blob.params.saveFrom;
|
|
431
|
+
break;
|
|
432
|
+
case 'stream':
|
|
433
|
+
body = blob.params.saveFrom;
|
|
434
|
+
bodyStream = body as stream.Readable;
|
|
435
|
+
break;
|
|
436
|
+
case 'filepath':
|
|
437
|
+
body = this.safeSaveFromPath(blob, rq, id, trace);
|
|
438
|
+
if (body && !Buffer.isBuffer(body)) bodyStream = body as stream.Readable;
|
|
439
|
+
if (body == null) return;
|
|
440
|
+
break;
|
|
441
|
+
}
|
|
442
|
+
if (blob.params.ContentEncoding === 'gzip' && Buffer.isBuffer(body))
|
|
443
|
+
body = zlib.gzipSync(body);
|
|
444
|
+
|
|
445
|
+
params.Body = body;
|
|
446
|
+
rq.req = this.s3.putObject(params, (err: any, data: any) => {
|
|
447
|
+
if (err)
|
|
448
|
+
rq.err = err;
|
|
449
|
+
else
|
|
450
|
+
rq.data = data;
|
|
451
|
+
rq.res = this;
|
|
452
|
+
|
|
453
|
+
blob.setSaved(rq.result());
|
|
454
|
+
blob.endSave(rq);
|
|
455
|
+
this.emit('save', blob);
|
|
456
|
+
|
|
457
|
+
delete this.saveBlobIndex[id];
|
|
458
|
+
|
|
459
|
+
this.env.log.event('S3: save done', 1);
|
|
460
|
+
trace.log();
|
|
461
|
+
|
|
462
|
+
if (bodyStream)
|
|
463
|
+
bodyStream.destroy();
|
|
464
|
+
});
|
|
465
|
+
}
|
|
466
|
+
|
|
467
|
+
del(blob: Storage.StorageBlob): void
|
|
468
|
+
{
|
|
469
|
+
if (blob.params.id == '')
|
|
470
|
+
{
|
|
471
|
+
this.env.log.error('S3: blob delete called with empty key');
|
|
472
|
+
return;
|
|
473
|
+
}
|
|
474
|
+
let id: string = `delete+${blob.params.id}+${this.count++}`;
|
|
475
|
+
|
|
476
|
+
this.env.log.event(`S3: del start`, 1);
|
|
477
|
+
|
|
478
|
+
let trace = new LogAbstract.AsyncTimer(this.env.log, 'S3: del', 1);
|
|
479
|
+
let params = { Bucket: this.blobBucket(blob), Key: blob.params.id };
|
|
480
|
+
let rq = new S3Request(blob);
|
|
481
|
+
this.delBlobIndex[id] = rq;
|
|
482
|
+
blob.setDeleting();
|
|
483
|
+
rq.req = this.s3.deleteObject(params, (err: any, data: any) => {
|
|
484
|
+
if (err)
|
|
485
|
+
rq.err = err;
|
|
486
|
+
else
|
|
487
|
+
rq.data = data;
|
|
488
|
+
rq.res = this;
|
|
489
|
+
|
|
490
|
+
blob.setDeleted(rq.result());
|
|
491
|
+
blob.endDelete(rq);
|
|
492
|
+
this.emit('del', blob);
|
|
493
|
+
|
|
494
|
+
delete this.delBlobIndex[id];
|
|
495
|
+
|
|
496
|
+
trace.log();
|
|
497
|
+
this.env.log.event(`S3: del done`, 1);
|
|
498
|
+
});
|
|
499
|
+
}
|
|
500
|
+
|
|
501
|
+
ls(blob: Storage.StorageBlob, continuationToken?: string): void
|
|
502
|
+
{
|
|
503
|
+
let b = this.blobBucket(blob);
|
|
504
|
+
if (b == '')
|
|
505
|
+
{
|
|
506
|
+
this.env.log.error('S3: blob ls called with empty bucket');
|
|
507
|
+
return;
|
|
508
|
+
}
|
|
509
|
+
let id: string = `ls+${b}+${this.count++}`;
|
|
510
|
+
|
|
511
|
+
this.env.log.event(`S3: ls start`, 1);
|
|
512
|
+
|
|
513
|
+
let trace = new LogAbstract.AsyncTimer(this.env.log, 'S3: ls', 1);
|
|
514
|
+
let params: any = { Bucket: b };
|
|
515
|
+
if (continuationToken)
|
|
516
|
+
params.ContinuationToken = continuationToken;
|
|
517
|
+
let rq = new S3Request(blob);
|
|
518
|
+
this.lsBlobIndex[id] = rq;
|
|
519
|
+
blob.setListing();
|
|
520
|
+
rq.req = this.s3.listObjectsV2(params, (err: any, data: any) => {
|
|
521
|
+
if (err)
|
|
522
|
+
rq.err = err;
|
|
523
|
+
else
|
|
524
|
+
rq.data = data;
|
|
525
|
+
rq.res = this;
|
|
526
|
+
|
|
527
|
+
blob.setListed();
|
|
528
|
+
blob.endList(rq);
|
|
529
|
+
this.emit('ls', blob);
|
|
530
|
+
|
|
531
|
+
delete this.lsBlobIndex[id];
|
|
532
|
+
|
|
533
|
+
trace.log();
|
|
534
|
+
this.env.log.event(`S3: ls done`, 1);
|
|
535
|
+
});
|
|
536
|
+
}
|
|
537
|
+
|
|
538
|
+
createTransferUrl(params: Storage.TransferParams): Storage.FsmTransferUrl
|
|
539
|
+
{
|
|
540
|
+
let fsm = new FsmTransferUrl(this.env, this.lookupBucket('transfers'), params);
|
|
541
|
+
if (fsm === null)
|
|
542
|
+
{
|
|
543
|
+
let params: any = { Bucket: fsm.bucket, Fields: { key: fsm.key } };
|
|
544
|
+
this.s3.createPresignedPost(params, (err: any, url: string) => {
|
|
545
|
+
if (err)
|
|
546
|
+
{
|
|
547
|
+
this.env.log.error(`S3: createPresignedPost failed: ${err}`);
|
|
548
|
+
fsm.setState(FSM.FSM_ERROR);
|
|
549
|
+
}
|
|
550
|
+
else
|
|
551
|
+
{
|
|
552
|
+
fsm.url = url;
|
|
553
|
+
fsm.setState(FSM.FSM_DONE);
|
|
554
|
+
}
|
|
555
|
+
});
|
|
556
|
+
}
|
|
557
|
+
else
|
|
558
|
+
{
|
|
559
|
+
let s3params: any = { Bucket: fsm.bucket, Key: fsm.key };
|
|
560
|
+
if (params.op === 'putObject') s3params.ContentType = fsm.params.contentType;
|
|
561
|
+
this.s3.getSignedUrl(params.op, s3params, (err: any, url: string) => {
|
|
562
|
+
if (err)
|
|
563
|
+
{
|
|
564
|
+
this.env.log.error(`S3: getSignedUrl failed: ${err}`);
|
|
565
|
+
fsm.setState(FSM.FSM_ERROR);
|
|
566
|
+
}
|
|
567
|
+
else
|
|
568
|
+
{
|
|
569
|
+
fsm.url = url;
|
|
570
|
+
fsm.setState(FSM.FSM_DONE);
|
|
571
|
+
}
|
|
572
|
+
});
|
|
573
|
+
}
|
|
574
|
+
return fsm;
|
|
575
|
+
}
|
|
576
|
+
}
|
package/package.json
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@dra2020/baseclient",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.14",
|
|
4
4
|
"description": "Utility functions for Javascript projects.",
|
|
5
5
|
"main": "dist/baseclient.js",
|
|
6
|
-
"types": "
|
|
6
|
+
"types": "dist/all/allclient.d.ts",
|
|
7
7
|
"scripts": {
|
|
8
8
|
"test": "echo \"No test defined.\"",
|
|
9
9
|
"build": "webpack",
|
|
@@ -16,7 +16,7 @@
|
|
|
16
16
|
],
|
|
17
17
|
"repository": {
|
|
18
18
|
"type": "git",
|
|
19
|
-
"url": "git+https://github.com/dra2020/
|
|
19
|
+
"url": "git+https://github.com/dra2020/base.git"
|
|
20
20
|
},
|
|
21
21
|
"keywords": [
|
|
22
22
|
"Typescript"
|
|
@@ -24,25 +24,26 @@
|
|
|
24
24
|
"author": "Terry Crowley",
|
|
25
25
|
"license": "MIT",
|
|
26
26
|
"bugs": {
|
|
27
|
-
"url": "https://github.com/dra2020/
|
|
27
|
+
"url": "https://github.com/dra2020/base/issues"
|
|
28
28
|
},
|
|
29
|
-
"homepage": "https://github.com/dra2020/
|
|
29
|
+
"homepage": "https://github.com/dra2020/base#readme",
|
|
30
30
|
"devDependencies": {
|
|
31
31
|
"@types/diff-match-patch": "^1.0.32",
|
|
32
32
|
"@types/geojson": "^7946.0.7",
|
|
33
33
|
"@types/node": "12.7.2",
|
|
34
34
|
"@types/object-hash": "^1.3.4",
|
|
35
|
-
"source-map-loader": "^
|
|
36
|
-
"ts-loader": "^8.0.
|
|
35
|
+
"source-map-loader": "^1.1.3",
|
|
36
|
+
"ts-loader": "^8.0.12",
|
|
37
37
|
"tsify": "^5.0.2",
|
|
38
38
|
"typescript": "^4.1.3",
|
|
39
|
-
"webpack": "^5.
|
|
40
|
-
"webpack-cli": "^4.
|
|
39
|
+
"webpack": "^5.10.1",
|
|
40
|
+
"webpack-cli": "^4.2.0"
|
|
41
41
|
},
|
|
42
42
|
"dependencies": {
|
|
43
43
|
"@dra2020/topojson-client": "^3.2.7",
|
|
44
44
|
"@dra2020/topojson-server": "^3.0.103",
|
|
45
45
|
"@dra2020/topojson-simplify": "^3.0.102",
|
|
46
|
+
"aws-sdk": "^2.825.0",
|
|
46
47
|
"diff-match-patch": "^1.0.5",
|
|
47
48
|
"geojson": "^0.5.0",
|
|
48
49
|
"object-hash": "^2.1.1",
|
package/dist/geo/all.d.ts
DELETED
package/dist/geo/geo.d.ts
DELETED
|
@@ -1,67 +0,0 @@
|
|
|
1
|
-
import * as geojson from 'geojson';
|
|
2
|
-
import * as Poly from '../poly/all';
|
|
3
|
-
export declare type GeoProperties = geojson.GeoJsonProperties;
|
|
4
|
-
export declare type GeoFeature = geojson.Feature;
|
|
5
|
-
export declare type GeoFeatureArray = GeoFeature[];
|
|
6
|
-
export declare type GeoFeatureCollection = geojson.FeatureCollection;
|
|
7
|
-
export declare function geoCollectionToMap(col: GeoFeatureCollection): GeoFeatureMap;
|
|
8
|
-
export declare function geoMapToCollection(map: GeoFeatureMap): GeoFeatureCollection;
|
|
9
|
-
export declare function geoCollectionToTopo(col: GeoFeatureCollection): Poly.Topo;
|
|
10
|
-
export declare function geoTopoToCollection(topo: Poly.Topo): GeoFeatureCollection;
|
|
11
|
-
export interface GeoFeatureMap {
|
|
12
|
-
[id: string]: GeoFeature;
|
|
13
|
-
}
|
|
14
|
-
export declare type FeatureFunc = (f: GeoFeature) => void;
|
|
15
|
-
interface GeoEntry {
|
|
16
|
-
tag: string;
|
|
17
|
-
col?: GeoFeatureCollection;
|
|
18
|
-
map?: GeoFeatureMap;
|
|
19
|
-
topo?: Poly.Topo;
|
|
20
|
-
}
|
|
21
|
-
declare type GeoEntryMap = {
|
|
22
|
-
[tag: string]: GeoEntry;
|
|
23
|
-
};
|
|
24
|
-
export declare function geoEqual(m1: GeoMultiCollection, m2: GeoMultiCollection): boolean;
|
|
25
|
-
export declare function geoMapEqual(m1: GeoFeatureMap, m2: GeoFeatureMap): boolean;
|
|
26
|
-
export declare class GeoMultiCollection {
|
|
27
|
-
entries: GeoEntryMap;
|
|
28
|
-
all: GeoEntry;
|
|
29
|
-
hidden: any;
|
|
30
|
-
stamp: number;
|
|
31
|
-
constructor(tag?: string, topo?: Poly.Topo, col?: GeoFeatureCollection, map?: GeoFeatureMap);
|
|
32
|
-
empty(): void;
|
|
33
|
-
get nEntries(): number;
|
|
34
|
-
nthEntry(n: number): GeoEntry;
|
|
35
|
-
add(tag: string, topo: Poly.Topo, col: GeoFeatureCollection, map: GeoFeatureMap): void;
|
|
36
|
-
addMulti(multi: GeoMultiCollection): void;
|
|
37
|
-
remove(tag: string): void;
|
|
38
|
-
_onChange(): void;
|
|
39
|
-
_col(e: GeoEntry): GeoFeatureCollection;
|
|
40
|
-
_map(e: GeoEntry): GeoFeatureMap;
|
|
41
|
-
_topo(e: GeoEntry): Poly.Topo;
|
|
42
|
-
colOf(tag: string): GeoFeatureCollection;
|
|
43
|
-
mapOf(tag: string): GeoFeatureMap;
|
|
44
|
-
topoOf(tag: string): Poly.Topo;
|
|
45
|
-
forEachEntry(cb: (e: GeoEntry) => void): void;
|
|
46
|
-
allCol(): GeoFeatureCollection;
|
|
47
|
-
allMap(): GeoFeatureMap;
|
|
48
|
-
allTopo(): Poly.Topo;
|
|
49
|
-
hide(id: any): void;
|
|
50
|
-
show(id: any): void;
|
|
51
|
-
showAll(): void;
|
|
52
|
-
get length(): number;
|
|
53
|
-
nthFeature(n: number): GeoFeature;
|
|
54
|
-
nthFilteredFeature(n: number, cb: (f: GeoFeature) => boolean): GeoFeature;
|
|
55
|
-
forEach(cb: FeatureFunc): void;
|
|
56
|
-
map(cb: (f: GeoFeature) => GeoFeature): GeoFeature[];
|
|
57
|
-
isHidden(id: string): boolean;
|
|
58
|
-
find(id: string): GeoFeature;
|
|
59
|
-
filter(test: (f: GeoFeature) => boolean): GeoMultiCollection;
|
|
60
|
-
}
|
|
61
|
-
export declare enum geoIntersectOptions {
|
|
62
|
-
Intersects = 0,
|
|
63
|
-
Bounds = 1,
|
|
64
|
-
BoundsCenter = 2
|
|
65
|
-
}
|
|
66
|
-
export declare function geoIntersect(multi: GeoMultiCollection, bbox: Poly.BoundBox, opt: geoIntersectOptions): GeoMultiCollection;
|
|
67
|
-
export {};
|
package/dist/geo/vfeature.d.ts
DELETED