@nats-io/obj 3.0.0-1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +187 -0
- package/build/src/internal_mod.ts +13 -0
- package/build/src/mod.ts +13 -0
- package/build/src/objectstore.ts +922 -0
- package/build/src/types.ts +340 -0
- package/lib/internal_mod.d.ts +2 -0
- package/lib/internal_mod.js +6 -0
- package/lib/internal_mod.js.map +1 -0
- package/lib/mod.d.ts +2 -0
- package/lib/mod.js +6 -0
- package/lib/mod.js.map +1 -0
- package/lib/objectstore.d.ts +101 -0
- package/lib/objectstore.js +730 -0
- package/lib/objectstore.js.map +1 -0
- package/lib/types.d.ts +289 -0
- package/lib/types.js +3 -0
- package/lib/types.js.map +1 -0
- package/package.json +38 -0
|
@@ -0,0 +1,730 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
/*
|
|
3
|
+
* Copyright 2022-2024 The NATS Authors
|
|
4
|
+
* Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
* you may not use this file except in compliance with the License.
|
|
6
|
+
* You may obtain a copy of the License at
|
|
7
|
+
*
|
|
8
|
+
* http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
*
|
|
10
|
+
* Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
* See the License for the specific language governing permissions and
|
|
14
|
+
* limitations under the License.
|
|
15
|
+
*/
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
exports.ObjectStoreImpl = exports.validateBucket = exports.ObjectStoreStatusImpl = exports.Objm = exports.objectStoreBucketName = exports.objectStoreStreamName = exports.digestType = exports.osPrefix = void 0;
|
|
18
|
+
const internal_1 = require("@nats-io/nats-core/internal");
|
|
19
|
+
const internal_2 = require("@nats-io/jetstream/internal");
|
|
20
|
+
exports.osPrefix = "OBJ_";
|
|
21
|
+
exports.digestType = "SHA-256=";
|
|
22
|
+
function objectStoreStreamName(bucket) {
|
|
23
|
+
validateBucket(bucket);
|
|
24
|
+
return `${exports.osPrefix}${bucket}`;
|
|
25
|
+
}
|
|
26
|
+
exports.objectStoreStreamName = objectStoreStreamName;
|
|
27
|
+
function objectStoreBucketName(stream) {
|
|
28
|
+
if (stream.startsWith(exports.osPrefix)) {
|
|
29
|
+
return stream.substring(4);
|
|
30
|
+
}
|
|
31
|
+
return stream;
|
|
32
|
+
}
|
|
33
|
+
exports.objectStoreBucketName = objectStoreBucketName;
|
|
34
|
+
/**
|
|
35
|
+
* The entry point to creating and managing new ObjectStore instances.
|
|
36
|
+
*/
|
|
37
|
+
class Objm {
|
|
38
|
+
js;
|
|
39
|
+
/**
|
|
40
|
+
* Creates an instance of the Objm that allows you to create and access ObjectStore.
|
|
41
|
+
* Note that if the argument is a NatsConnection, default JetStream Options are
|
|
42
|
+
* used. If you want to set some options, please provide a JetStreamClient instead.
|
|
43
|
+
* @param nc
|
|
44
|
+
*/
|
|
45
|
+
constructor(nc) {
|
|
46
|
+
this.js = (0, internal_2.toJetStreamClient)(nc);
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Creates and opens the specified ObjectStore. If the Object already exists, it opens the existing ObjectStore.
|
|
50
|
+
* @param name
|
|
51
|
+
* @param opts
|
|
52
|
+
*/
|
|
53
|
+
create(name, opts = {}) {
|
|
54
|
+
return this.#maybeCreate(name, opts);
|
|
55
|
+
}
|
|
56
|
+
#maybeCreate(name, opts = {}) {
|
|
57
|
+
if (typeof crypto?.subtle?.digest !== "function") {
|
|
58
|
+
return Promise.reject(new Error("objectstore: unable to calculate hashes - crypto.subtle.digest with sha256 support is required"));
|
|
59
|
+
}
|
|
60
|
+
const { ok, min } = this.js.nc.features.get(internal_1.Feature.JS_OBJECTSTORE);
|
|
61
|
+
if (!ok) {
|
|
62
|
+
return Promise.reject(new Error(`objectstore is only supported on servers ${min} or better`));
|
|
63
|
+
}
|
|
64
|
+
return ObjectStoreImpl.create(this.js, name, opts);
|
|
65
|
+
}
|
|
66
|
+
/**
|
|
67
|
+
* Returns a list of ObjectStoreInfo for all streams that are identified as
|
|
68
|
+
* being a ObjectStore (that is having names that have the prefix `OBJ_`)
|
|
69
|
+
*/
|
|
70
|
+
list() {
|
|
71
|
+
const filter = (v) => {
|
|
72
|
+
const slr = v;
|
|
73
|
+
const streams = slr.streams.filter((v) => {
|
|
74
|
+
return v.config.name.startsWith(exports.osPrefix);
|
|
75
|
+
});
|
|
76
|
+
streams.forEach((si) => {
|
|
77
|
+
si.config.sealed = si.config.sealed || false;
|
|
78
|
+
si.config.deny_delete = si.config.deny_delete || false;
|
|
79
|
+
si.config.deny_purge = si.config.deny_purge || false;
|
|
80
|
+
si.config.allow_rollup_hdrs = si.config.allow_rollup_hdrs || false;
|
|
81
|
+
});
|
|
82
|
+
return streams.map((si) => {
|
|
83
|
+
return new ObjectStoreStatusImpl(si);
|
|
84
|
+
});
|
|
85
|
+
};
|
|
86
|
+
const subj = `${this.js.prefix}.STREAM.LIST`;
|
|
87
|
+
return new internal_2.ListerImpl(subj, filter, this.js);
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
exports.Objm = Objm;
|
|
91
|
+
class ObjectStoreStatusImpl {
|
|
92
|
+
si;
|
|
93
|
+
backingStore;
|
|
94
|
+
constructor(si) {
|
|
95
|
+
this.si = si;
|
|
96
|
+
this.backingStore = "JetStream";
|
|
97
|
+
}
|
|
98
|
+
get bucket() {
|
|
99
|
+
return objectStoreBucketName(this.si.config.name);
|
|
100
|
+
}
|
|
101
|
+
get description() {
|
|
102
|
+
return this.si.config.description ?? "";
|
|
103
|
+
}
|
|
104
|
+
get ttl() {
|
|
105
|
+
return this.si.config.max_age;
|
|
106
|
+
}
|
|
107
|
+
get storage() {
|
|
108
|
+
return this.si.config.storage;
|
|
109
|
+
}
|
|
110
|
+
get replicas() {
|
|
111
|
+
return this.si.config.num_replicas;
|
|
112
|
+
}
|
|
113
|
+
get sealed() {
|
|
114
|
+
return this.si.config.sealed;
|
|
115
|
+
}
|
|
116
|
+
get size() {
|
|
117
|
+
return this.si.state.bytes;
|
|
118
|
+
}
|
|
119
|
+
get streamInfo() {
|
|
120
|
+
return this.si;
|
|
121
|
+
}
|
|
122
|
+
get metadata() {
|
|
123
|
+
return this.si.config.metadata;
|
|
124
|
+
}
|
|
125
|
+
get compression() {
|
|
126
|
+
if (this.si.config.compression) {
|
|
127
|
+
return this.si.config.compression !== internal_2.StoreCompression.None;
|
|
128
|
+
}
|
|
129
|
+
return false;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
exports.ObjectStoreStatusImpl = ObjectStoreStatusImpl;
|
|
133
|
+
function validateBucket(name) {
|
|
134
|
+
const validBucketRe = /^[-\w]+$/;
|
|
135
|
+
if (!validBucketRe.test(name)) {
|
|
136
|
+
throw new Error(`invalid bucket name: ${name}`);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
exports.validateBucket = validateBucket;
|
|
140
|
+
class ObjectInfoImpl {
|
|
141
|
+
info;
|
|
142
|
+
hdrs;
|
|
143
|
+
constructor(oi) {
|
|
144
|
+
this.info = oi;
|
|
145
|
+
}
|
|
146
|
+
get name() {
|
|
147
|
+
return this.info.name;
|
|
148
|
+
}
|
|
149
|
+
get description() {
|
|
150
|
+
return this.info.description ?? "";
|
|
151
|
+
}
|
|
152
|
+
get headers() {
|
|
153
|
+
if (!this.hdrs) {
|
|
154
|
+
this.hdrs = internal_1.MsgHdrsImpl.fromRecord(this.info.headers || {});
|
|
155
|
+
}
|
|
156
|
+
return this.hdrs;
|
|
157
|
+
}
|
|
158
|
+
get options() {
|
|
159
|
+
return this.info.options;
|
|
160
|
+
}
|
|
161
|
+
get bucket() {
|
|
162
|
+
return this.info.bucket;
|
|
163
|
+
}
|
|
164
|
+
get chunks() {
|
|
165
|
+
return this.info.chunks;
|
|
166
|
+
}
|
|
167
|
+
get deleted() {
|
|
168
|
+
return this.info.deleted ?? false;
|
|
169
|
+
}
|
|
170
|
+
get digest() {
|
|
171
|
+
return this.info.digest;
|
|
172
|
+
}
|
|
173
|
+
get mtime() {
|
|
174
|
+
return this.info.mtime;
|
|
175
|
+
}
|
|
176
|
+
get nuid() {
|
|
177
|
+
return this.info.nuid;
|
|
178
|
+
}
|
|
179
|
+
get size() {
|
|
180
|
+
return this.info.size;
|
|
181
|
+
}
|
|
182
|
+
get revision() {
|
|
183
|
+
return this.info.revision;
|
|
184
|
+
}
|
|
185
|
+
get metadata() {
|
|
186
|
+
return this.info.metadata || {};
|
|
187
|
+
}
|
|
188
|
+
isLink() {
|
|
189
|
+
return (this.info.options?.link !== undefined) &&
|
|
190
|
+
(this.info.options?.link !== null);
|
|
191
|
+
}
|
|
192
|
+
}
|
|
193
|
+
function toServerObjectStoreMeta(meta) {
|
|
194
|
+
const v = {
|
|
195
|
+
name: meta.name,
|
|
196
|
+
description: meta.description ?? "",
|
|
197
|
+
options: meta.options,
|
|
198
|
+
metadata: meta.metadata,
|
|
199
|
+
};
|
|
200
|
+
if (meta.headers) {
|
|
201
|
+
const mhi = meta.headers;
|
|
202
|
+
v.headers = mhi.toRecord();
|
|
203
|
+
}
|
|
204
|
+
return v;
|
|
205
|
+
}
|
|
206
|
+
function emptyReadableStream() {
|
|
207
|
+
return new ReadableStream({
|
|
208
|
+
pull(c) {
|
|
209
|
+
c.enqueue(new Uint8Array(0));
|
|
210
|
+
c.close();
|
|
211
|
+
},
|
|
212
|
+
});
|
|
213
|
+
}
|
|
214
|
+
class ObjectStoreImpl {
|
|
215
|
+
jsm;
|
|
216
|
+
js;
|
|
217
|
+
stream;
|
|
218
|
+
name;
|
|
219
|
+
constructor(name, jsm, js) {
|
|
220
|
+
this.name = name;
|
|
221
|
+
this.jsm = jsm;
|
|
222
|
+
this.js = js;
|
|
223
|
+
}
|
|
224
|
+
_checkNotEmpty(name) {
|
|
225
|
+
if (!name || name.length === 0) {
|
|
226
|
+
return { name, error: new Error("name cannot be empty") };
|
|
227
|
+
}
|
|
228
|
+
return { name };
|
|
229
|
+
}
|
|
230
|
+
async info(name) {
|
|
231
|
+
const info = await this.rawInfo(name);
|
|
232
|
+
return info ? new ObjectInfoImpl(info) : null;
|
|
233
|
+
}
|
|
234
|
+
async list() {
|
|
235
|
+
const buf = [];
|
|
236
|
+
const iter = await this.watch({
|
|
237
|
+
ignoreDeletes: true,
|
|
238
|
+
includeHistory: true,
|
|
239
|
+
});
|
|
240
|
+
for await (const info of iter) {
|
|
241
|
+
// watch will give a null when it has initialized
|
|
242
|
+
// for us that is the hint we are done
|
|
243
|
+
if (info === null) {
|
|
244
|
+
break;
|
|
245
|
+
}
|
|
246
|
+
buf.push(info);
|
|
247
|
+
}
|
|
248
|
+
return Promise.resolve(buf);
|
|
249
|
+
}
|
|
250
|
+
async rawInfo(name) {
|
|
251
|
+
const { name: obj, error } = this._checkNotEmpty(name);
|
|
252
|
+
if (error) {
|
|
253
|
+
return Promise.reject(error);
|
|
254
|
+
}
|
|
255
|
+
const meta = this._metaSubject(obj);
|
|
256
|
+
try {
|
|
257
|
+
const m = await this.jsm.streams.getMessage(this.stream, {
|
|
258
|
+
last_by_subj: meta,
|
|
259
|
+
});
|
|
260
|
+
const jc = (0, internal_1.JSONCodec)();
|
|
261
|
+
const soi = jc.decode(m.data);
|
|
262
|
+
soi.revision = m.seq;
|
|
263
|
+
return soi;
|
|
264
|
+
}
|
|
265
|
+
catch (err) {
|
|
266
|
+
if (err.code === "404") {
|
|
267
|
+
return null;
|
|
268
|
+
}
|
|
269
|
+
return Promise.reject(err);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
async _si(opts) {
|
|
273
|
+
try {
|
|
274
|
+
return await this.jsm.streams.info(this.stream, opts);
|
|
275
|
+
}
|
|
276
|
+
catch (err) {
|
|
277
|
+
const nerr = err;
|
|
278
|
+
if (nerr.code === "404") {
|
|
279
|
+
return null;
|
|
280
|
+
}
|
|
281
|
+
return Promise.reject(err);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
async seal() {
|
|
285
|
+
let info = await this._si();
|
|
286
|
+
if (info === null) {
|
|
287
|
+
return Promise.reject(new Error("object store not found"));
|
|
288
|
+
}
|
|
289
|
+
info.config.sealed = true;
|
|
290
|
+
info = await this.jsm.streams.update(this.stream, info.config);
|
|
291
|
+
return Promise.resolve(new ObjectStoreStatusImpl(info));
|
|
292
|
+
}
|
|
293
|
+
async status(opts) {
|
|
294
|
+
const info = await this._si(opts);
|
|
295
|
+
if (info === null) {
|
|
296
|
+
return Promise.reject(new Error("object store not found"));
|
|
297
|
+
}
|
|
298
|
+
return Promise.resolve(new ObjectStoreStatusImpl(info));
|
|
299
|
+
}
|
|
300
|
+
destroy() {
|
|
301
|
+
return this.jsm.streams.delete(this.stream);
|
|
302
|
+
}
|
|
303
|
+
async _put(meta, rs, opts) {
|
|
304
|
+
const jsopts = this.js.getOptions();
|
|
305
|
+
opts = opts || { timeout: jsopts.timeout };
|
|
306
|
+
opts.timeout = opts.timeout || jsopts.timeout;
|
|
307
|
+
opts.previousRevision = opts.previousRevision ?? undefined;
|
|
308
|
+
const { timeout, previousRevision } = opts;
|
|
309
|
+
const si = this.js.nc.info;
|
|
310
|
+
const maxPayload = si?.max_payload || 1024;
|
|
311
|
+
meta = meta || {};
|
|
312
|
+
meta.options = meta.options || {};
|
|
313
|
+
let maxChunk = meta.options?.max_chunk_size || 128 * 1024;
|
|
314
|
+
maxChunk = maxChunk > maxPayload ? maxPayload : maxChunk;
|
|
315
|
+
meta.options.max_chunk_size = maxChunk;
|
|
316
|
+
const old = await this.info(meta.name);
|
|
317
|
+
const { name: n, error } = this._checkNotEmpty(meta.name);
|
|
318
|
+
if (error) {
|
|
319
|
+
return Promise.reject(error);
|
|
320
|
+
}
|
|
321
|
+
const id = internal_1.nuid.next();
|
|
322
|
+
const chunkSubj = this._chunkSubject(id);
|
|
323
|
+
const metaSubj = this._metaSubject(n);
|
|
324
|
+
const info = Object.assign({
|
|
325
|
+
bucket: this.name,
|
|
326
|
+
nuid: id,
|
|
327
|
+
size: 0,
|
|
328
|
+
chunks: 0,
|
|
329
|
+
}, toServerObjectStoreMeta(meta));
|
|
330
|
+
const d = (0, internal_1.deferred)();
|
|
331
|
+
const proms = [];
|
|
332
|
+
const db = new internal_1.DataBuffer();
|
|
333
|
+
try {
|
|
334
|
+
const reader = rs ? rs.getReader() : null;
|
|
335
|
+
const sha = new internal_1.SHA256();
|
|
336
|
+
while (true) {
|
|
337
|
+
const { done, value } = reader
|
|
338
|
+
? await reader.read()
|
|
339
|
+
: { done: true, value: undefined };
|
|
340
|
+
if (done) {
|
|
341
|
+
// put any partial chunk in
|
|
342
|
+
if (db.size() > 0) {
|
|
343
|
+
const payload = db.drain();
|
|
344
|
+
sha.update(payload);
|
|
345
|
+
info.chunks++;
|
|
346
|
+
info.size += payload.length;
|
|
347
|
+
proms.push(this.js.publish(chunkSubj, payload, { timeout }));
|
|
348
|
+
}
|
|
349
|
+
// wait for all the chunks to write
|
|
350
|
+
await Promise.all(proms);
|
|
351
|
+
proms.length = 0;
|
|
352
|
+
// prepare the metadata
|
|
353
|
+
info.mtime = new Date().toISOString();
|
|
354
|
+
const digest = sha.digest("base64");
|
|
355
|
+
const pad = digest.length % 3;
|
|
356
|
+
const padding = pad > 0 ? "=".repeat(pad) : "";
|
|
357
|
+
info.digest = `${exports.digestType}${digest}${padding}`;
|
|
358
|
+
info.deleted = false;
|
|
359
|
+
// trailing md for the object
|
|
360
|
+
const h = (0, internal_1.headers)();
|
|
361
|
+
if (typeof previousRevision === "number") {
|
|
362
|
+
h.set(internal_2.PubHeaders.ExpectedLastSubjectSequenceHdr, `${previousRevision}`);
|
|
363
|
+
}
|
|
364
|
+
h.set(internal_2.JsHeaders.RollupHdr, internal_2.JsHeaders.RollupValueSubject);
|
|
365
|
+
// try to update the metadata
|
|
366
|
+
const pa = await this.js.publish(metaSubj, (0, internal_1.JSONCodec)().encode(info), {
|
|
367
|
+
headers: h,
|
|
368
|
+
timeout,
|
|
369
|
+
});
|
|
370
|
+
// update the revision to point to the sequence where we inserted
|
|
371
|
+
info.revision = pa.seq;
|
|
372
|
+
// if we are here, the new entry is live
|
|
373
|
+
if (old) {
|
|
374
|
+
try {
|
|
375
|
+
await this.jsm.streams.purge(this.stream, {
|
|
376
|
+
filter: `$O.${this.name}.C.${old.nuid}`,
|
|
377
|
+
});
|
|
378
|
+
}
|
|
379
|
+
catch (_err) {
|
|
380
|
+
// rejecting here, would mean send the wrong signal
|
|
381
|
+
// the update succeeded, but cleanup of old chunks failed.
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
// resolve the ObjectInfo
|
|
385
|
+
d.resolve(new ObjectInfoImpl(info));
|
|
386
|
+
// stop
|
|
387
|
+
break;
|
|
388
|
+
}
|
|
389
|
+
if (value) {
|
|
390
|
+
db.fill(value);
|
|
391
|
+
while (db.size() > maxChunk) {
|
|
392
|
+
info.chunks++;
|
|
393
|
+
info.size += maxChunk;
|
|
394
|
+
const payload = db.drain(meta.options.max_chunk_size);
|
|
395
|
+
sha.update(payload);
|
|
396
|
+
proms.push(this.js.publish(chunkSubj, payload, { timeout }));
|
|
397
|
+
}
|
|
398
|
+
}
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
catch (err) {
|
|
402
|
+
// we failed, remove any partials
|
|
403
|
+
await this.jsm.streams.purge(this.stream, { filter: chunkSubj });
|
|
404
|
+
d.reject(err);
|
|
405
|
+
}
|
|
406
|
+
return d;
|
|
407
|
+
}
|
|
408
|
+
putBlob(meta, data, opts) {
|
|
409
|
+
function readableStreamFrom(data) {
|
|
410
|
+
return new ReadableStream({
|
|
411
|
+
pull(controller) {
|
|
412
|
+
controller.enqueue(data);
|
|
413
|
+
controller.close();
|
|
414
|
+
},
|
|
415
|
+
});
|
|
416
|
+
}
|
|
417
|
+
if (data === null) {
|
|
418
|
+
data = new Uint8Array(0);
|
|
419
|
+
}
|
|
420
|
+
return this.put(meta, readableStreamFrom(data), opts);
|
|
421
|
+
}
|
|
422
|
+
put(meta, rs, opts) {
|
|
423
|
+
if (meta?.options?.link) {
|
|
424
|
+
return Promise.reject(new Error("link cannot be set when putting the object in bucket"));
|
|
425
|
+
}
|
|
426
|
+
return this._put(meta, rs, opts);
|
|
427
|
+
}
|
|
428
|
+
async getBlob(name) {
|
|
429
|
+
async function fromReadableStream(rs) {
|
|
430
|
+
const buf = new internal_1.DataBuffer();
|
|
431
|
+
const reader = rs.getReader();
|
|
432
|
+
while (true) {
|
|
433
|
+
const { done, value } = await reader.read();
|
|
434
|
+
if (done) {
|
|
435
|
+
return buf.drain();
|
|
436
|
+
}
|
|
437
|
+
if (value && value.length) {
|
|
438
|
+
buf.fill(value);
|
|
439
|
+
}
|
|
440
|
+
}
|
|
441
|
+
}
|
|
442
|
+
const r = await this.get(name);
|
|
443
|
+
if (r === null) {
|
|
444
|
+
return Promise.resolve(null);
|
|
445
|
+
}
|
|
446
|
+
const vs = await Promise.all([r.error, fromReadableStream(r.data)]);
|
|
447
|
+
if (vs[0]) {
|
|
448
|
+
return Promise.reject(vs[0]);
|
|
449
|
+
}
|
|
450
|
+
else {
|
|
451
|
+
return Promise.resolve(vs[1]);
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
async get(name) {
|
|
455
|
+
const info = await this.rawInfo(name);
|
|
456
|
+
if (info === null) {
|
|
457
|
+
return Promise.resolve(null);
|
|
458
|
+
}
|
|
459
|
+
if (info.deleted) {
|
|
460
|
+
return Promise.resolve(null);
|
|
461
|
+
}
|
|
462
|
+
if (info.options && info.options.link) {
|
|
463
|
+
const ln = info.options.link.name || "";
|
|
464
|
+
if (ln === "") {
|
|
465
|
+
throw new Error("link is a bucket");
|
|
466
|
+
}
|
|
467
|
+
const os = info.options.link.bucket !== this.name
|
|
468
|
+
? await ObjectStoreImpl.create(this.js, info.options.link.bucket)
|
|
469
|
+
: this;
|
|
470
|
+
return os.get(ln);
|
|
471
|
+
}
|
|
472
|
+
const d = (0, internal_1.deferred)();
|
|
473
|
+
const r = {
|
|
474
|
+
info: new ObjectInfoImpl(info),
|
|
475
|
+
error: d,
|
|
476
|
+
};
|
|
477
|
+
if (info.size === 0) {
|
|
478
|
+
r.data = emptyReadableStream();
|
|
479
|
+
d.resolve(null);
|
|
480
|
+
return Promise.resolve(r);
|
|
481
|
+
}
|
|
482
|
+
let controller;
|
|
483
|
+
const oc = (0, internal_2.consumerOpts)();
|
|
484
|
+
oc.orderedConsumer();
|
|
485
|
+
const sha = new internal_1.SHA256();
|
|
486
|
+
const subj = `$O.${this.name}.C.${info.nuid}`;
|
|
487
|
+
const sub = await this.js.subscribe(subj, oc);
|
|
488
|
+
(async () => {
|
|
489
|
+
for await (const jm of sub) {
|
|
490
|
+
if (jm.data.length > 0) {
|
|
491
|
+
sha.update(jm.data);
|
|
492
|
+
controller.enqueue(jm.data);
|
|
493
|
+
}
|
|
494
|
+
if (jm.info.pending === 0) {
|
|
495
|
+
const hash = sha.digest("base64");
|
|
496
|
+
// go pads the hash - which should be multiple of 3 - otherwise pads with '='
|
|
497
|
+
const pad = hash.length % 3;
|
|
498
|
+
const padding = pad > 0 ? "=".repeat(pad) : "";
|
|
499
|
+
const digest = `${exports.digestType}${hash}${padding}`;
|
|
500
|
+
if (digest !== info.digest) {
|
|
501
|
+
controller.error(new Error(`received a corrupt object, digests do not match received: ${info.digest} calculated ${digest}`));
|
|
502
|
+
}
|
|
503
|
+
else {
|
|
504
|
+
controller.close();
|
|
505
|
+
}
|
|
506
|
+
sub.unsubscribe();
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
})()
|
|
510
|
+
.then(() => {
|
|
511
|
+
d.resolve();
|
|
512
|
+
})
|
|
513
|
+
.catch((err) => {
|
|
514
|
+
controller.error(err);
|
|
515
|
+
d.reject(err);
|
|
516
|
+
});
|
|
517
|
+
r.data = new ReadableStream({
|
|
518
|
+
start(c) {
|
|
519
|
+
controller = c;
|
|
520
|
+
},
|
|
521
|
+
cancel() {
|
|
522
|
+
sub.unsubscribe();
|
|
523
|
+
},
|
|
524
|
+
});
|
|
525
|
+
return r;
|
|
526
|
+
}
|
|
527
|
+
linkStore(name, bucket) {
|
|
528
|
+
if (!(bucket instanceof ObjectStoreImpl)) {
|
|
529
|
+
return Promise.reject("bucket required");
|
|
530
|
+
}
|
|
531
|
+
const osi = bucket;
|
|
532
|
+
const { name: n, error } = this._checkNotEmpty(name);
|
|
533
|
+
if (error) {
|
|
534
|
+
return Promise.reject(error);
|
|
535
|
+
}
|
|
536
|
+
const meta = {
|
|
537
|
+
name: n,
|
|
538
|
+
options: { link: { bucket: osi.name } },
|
|
539
|
+
};
|
|
540
|
+
return this._put(meta, null);
|
|
541
|
+
}
|
|
542
|
+
async link(name, info) {
|
|
543
|
+
const { name: n, error } = this._checkNotEmpty(name);
|
|
544
|
+
if (error) {
|
|
545
|
+
return Promise.reject(error);
|
|
546
|
+
}
|
|
547
|
+
if (info.deleted) {
|
|
548
|
+
return Promise.reject(new Error("src object is deleted"));
|
|
549
|
+
}
|
|
550
|
+
if (info.isLink()) {
|
|
551
|
+
return Promise.reject(new Error("src object is a link"));
|
|
552
|
+
}
|
|
553
|
+
const dest = await this.rawInfo(name);
|
|
554
|
+
if (dest !== null && !dest.deleted) {
|
|
555
|
+
return Promise.reject(new Error("an object already exists with that name"));
|
|
556
|
+
}
|
|
557
|
+
const link = { bucket: info.bucket, name: info.name };
|
|
558
|
+
const mm = {
|
|
559
|
+
name: n,
|
|
560
|
+
bucket: info.bucket,
|
|
561
|
+
options: { link: link },
|
|
562
|
+
};
|
|
563
|
+
await this.js.publish(this._metaSubject(name), JSON.stringify(mm));
|
|
564
|
+
const i = await this.info(name);
|
|
565
|
+
return Promise.resolve(i);
|
|
566
|
+
}
|
|
567
|
+
async delete(name) {
|
|
568
|
+
const info = await this.rawInfo(name);
|
|
569
|
+
if (info === null) {
|
|
570
|
+
return Promise.resolve({ purged: 0, success: false });
|
|
571
|
+
}
|
|
572
|
+
info.deleted = true;
|
|
573
|
+
info.size = 0;
|
|
574
|
+
info.chunks = 0;
|
|
575
|
+
info.digest = "";
|
|
576
|
+
const jc = (0, internal_1.JSONCodec)();
|
|
577
|
+
const h = (0, internal_1.headers)();
|
|
578
|
+
h.set(internal_2.JsHeaders.RollupHdr, internal_2.JsHeaders.RollupValueSubject);
|
|
579
|
+
await this.js.publish(this._metaSubject(info.name), jc.encode(info), {
|
|
580
|
+
headers: h,
|
|
581
|
+
});
|
|
582
|
+
return this.jsm.streams.purge(this.stream, {
|
|
583
|
+
filter: this._chunkSubject(info.nuid),
|
|
584
|
+
});
|
|
585
|
+
}
|
|
586
|
+
async update(name, meta = {}) {
|
|
587
|
+
const info = await this.rawInfo(name);
|
|
588
|
+
if (info === null) {
|
|
589
|
+
return Promise.reject(new Error("object not found"));
|
|
590
|
+
}
|
|
591
|
+
if (info.deleted) {
|
|
592
|
+
return Promise.reject(new Error("cannot update meta for a deleted object"));
|
|
593
|
+
}
|
|
594
|
+
meta.name = meta.name ?? info.name;
|
|
595
|
+
const { name: n, error } = this._checkNotEmpty(meta.name);
|
|
596
|
+
if (error) {
|
|
597
|
+
return Promise.reject(error);
|
|
598
|
+
}
|
|
599
|
+
if (name !== meta.name) {
|
|
600
|
+
const i = await this.info(meta.name);
|
|
601
|
+
if (i && !i.deleted) {
|
|
602
|
+
return Promise.reject(new Error("an object already exists with that name"));
|
|
603
|
+
}
|
|
604
|
+
}
|
|
605
|
+
meta.name = n;
|
|
606
|
+
const ii = Object.assign({}, info, toServerObjectStoreMeta(meta));
|
|
607
|
+
// if the name changed, delete the old meta
|
|
608
|
+
const ack = await this.js.publish(this._metaSubject(ii.name), JSON.stringify(ii));
|
|
609
|
+
if (name !== meta.name) {
|
|
610
|
+
await this.jsm.streams.purge(this.stream, {
|
|
611
|
+
filter: this._metaSubject(name),
|
|
612
|
+
});
|
|
613
|
+
}
|
|
614
|
+
return Promise.resolve(ack);
|
|
615
|
+
}
|
|
616
|
+
async watch(opts = {}) {
|
|
617
|
+
opts.includeHistory = opts.includeHistory ?? false;
|
|
618
|
+
opts.ignoreDeletes = opts.ignoreDeletes ?? false;
|
|
619
|
+
let initialized = false;
|
|
620
|
+
const qi = new internal_1.QueuedIteratorImpl();
|
|
621
|
+
const subj = this._metaSubjectAll();
|
|
622
|
+
try {
|
|
623
|
+
await this.jsm.streams.getMessage(this.stream, { last_by_subj: subj });
|
|
624
|
+
}
|
|
625
|
+
catch (err) {
|
|
626
|
+
if (err.code === "404") {
|
|
627
|
+
qi.push(null);
|
|
628
|
+
initialized = true;
|
|
629
|
+
}
|
|
630
|
+
else {
|
|
631
|
+
qi.stop(err);
|
|
632
|
+
}
|
|
633
|
+
}
|
|
634
|
+
const jc = (0, internal_1.JSONCodec)();
|
|
635
|
+
const copts = (0, internal_2.consumerOpts)();
|
|
636
|
+
copts.orderedConsumer();
|
|
637
|
+
if (opts.includeHistory) {
|
|
638
|
+
copts.deliverLastPerSubject();
|
|
639
|
+
}
|
|
640
|
+
else {
|
|
641
|
+
// FIXME: Go's implementation doesn't seem correct - if history is not desired
|
|
642
|
+
// the watch should only be giving notifications on new entries
|
|
643
|
+
initialized = true;
|
|
644
|
+
copts.deliverNew();
|
|
645
|
+
}
|
|
646
|
+
copts.callback((err, jm) => {
|
|
647
|
+
if (err) {
|
|
648
|
+
qi.stop(err);
|
|
649
|
+
return;
|
|
650
|
+
}
|
|
651
|
+
if (jm !== null) {
|
|
652
|
+
const oi = jc.decode(jm.data);
|
|
653
|
+
if (oi.deleted && opts.ignoreDeletes === true) {
|
|
654
|
+
// do nothing
|
|
655
|
+
}
|
|
656
|
+
else {
|
|
657
|
+
qi.push(oi);
|
|
658
|
+
}
|
|
659
|
+
if (jm.info?.pending === 0 && !initialized) {
|
|
660
|
+
initialized = true;
|
|
661
|
+
qi.push(null);
|
|
662
|
+
}
|
|
663
|
+
}
|
|
664
|
+
});
|
|
665
|
+
const sub = await this.js.subscribe(subj, copts);
|
|
666
|
+
qi._data = sub;
|
|
667
|
+
qi.iterClosed.then(() => {
|
|
668
|
+
sub.unsubscribe();
|
|
669
|
+
});
|
|
670
|
+
sub.closed.then(() => {
|
|
671
|
+
qi.stop();
|
|
672
|
+
}).catch((err) => {
|
|
673
|
+
qi.stop(err);
|
|
674
|
+
});
|
|
675
|
+
return qi;
|
|
676
|
+
}
|
|
677
|
+
_chunkSubject(id) {
|
|
678
|
+
return `$O.${this.name}.C.${id}`;
|
|
679
|
+
}
|
|
680
|
+
_metaSubject(n) {
|
|
681
|
+
return `$O.${this.name}.M.${internal_1.Base64UrlPaddedCodec.encode(n)}`;
|
|
682
|
+
}
|
|
683
|
+
_metaSubjectAll() {
|
|
684
|
+
return `$O.${this.name}.M.>`;
|
|
685
|
+
}
|
|
686
|
+
async init(opts = {}) {
|
|
687
|
+
try {
|
|
688
|
+
this.stream = objectStoreStreamName(this.name);
|
|
689
|
+
}
|
|
690
|
+
catch (err) {
|
|
691
|
+
return Promise.reject(err);
|
|
692
|
+
}
|
|
693
|
+
const max_age = opts?.ttl || 0;
|
|
694
|
+
delete opts.ttl;
|
|
695
|
+
// pacify the tsc compiler downstream
|
|
696
|
+
const sc = Object.assign({ max_age }, opts);
|
|
697
|
+
sc.name = this.stream;
|
|
698
|
+
sc.allow_direct = true;
|
|
699
|
+
sc.allow_rollup_hdrs = true;
|
|
700
|
+
sc.discard = internal_2.DiscardPolicy.New;
|
|
701
|
+
sc.subjects = [`$O.${this.name}.C.>`, `$O.${this.name}.M.>`];
|
|
702
|
+
if (opts.placement) {
|
|
703
|
+
sc.placement = opts.placement;
|
|
704
|
+
}
|
|
705
|
+
if (opts.metadata) {
|
|
706
|
+
sc.metadata = opts.metadata;
|
|
707
|
+
}
|
|
708
|
+
if (typeof opts.compression === "boolean") {
|
|
709
|
+
sc.compression = opts.compression
|
|
710
|
+
? internal_2.StoreCompression.S2
|
|
711
|
+
: internal_2.StoreCompression.None;
|
|
712
|
+
}
|
|
713
|
+
try {
|
|
714
|
+
await this.jsm.streams.info(sc.name);
|
|
715
|
+
}
|
|
716
|
+
catch (err) {
|
|
717
|
+
if (err.message === "stream not found") {
|
|
718
|
+
await this.jsm.streams.add(sc);
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
}
|
|
722
|
+
static async create(js, name, opts = {}) {
|
|
723
|
+
const jsm = await js.jetstreamManager();
|
|
724
|
+
const os = new ObjectStoreImpl(name, jsm, js);
|
|
725
|
+
await os.init(opts);
|
|
726
|
+
return Promise.resolve(os);
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
exports.ObjectStoreImpl = ObjectStoreImpl;
|
|
730
|
+
//# sourceMappingURL=objectstore.js.map
|