hydrousdb 2.0.1 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/README.md +911 -515
- package/dist/index.cjs +1647 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +1540 -0
- package/dist/index.d.ts +1339 -510
- package/dist/index.js +1427 -1100
- package/dist/index.js.map +1 -1
- package/package.json +31 -14
- package/dist/index.d.mts +0 -711
- package/dist/index.mjs +0 -1291
- package/dist/index.mjs.map +0 -1
package/dist/index.mjs
DELETED
|
@@ -1,1291 +0,0 @@
|
|
|
1
|
-
// src/utils/errors.ts
|
|
2
|
-
var HydrousSDKError = class extends Error {
|
|
3
|
-
constructor(message, code = "SDK_ERROR", status) {
|
|
4
|
-
super(message);
|
|
5
|
-
this.name = "HydrousSDKError";
|
|
6
|
-
this.code = code;
|
|
7
|
-
this.status = status;
|
|
8
|
-
}
|
|
9
|
-
};
|
|
10
|
-
function toHydrousError(err) {
|
|
11
|
-
if (err instanceof HydrousSDKError) {
|
|
12
|
-
return { message: err.message, code: err.code, status: err.status };
|
|
13
|
-
}
|
|
14
|
-
if (err instanceof Error) {
|
|
15
|
-
return { message: err.message, code: "UNKNOWN_ERROR" };
|
|
16
|
-
}
|
|
17
|
-
return { message: String(err), code: "UNKNOWN_ERROR" };
|
|
18
|
-
}
|
|
19
|
-
function isHydrousError(err) {
|
|
20
|
-
return err instanceof HydrousSDKError;
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
// src/utils/http.ts
|
|
24
|
-
async function parseResponse(res) {
|
|
25
|
-
let body;
|
|
26
|
-
try {
|
|
27
|
-
body = await res.json();
|
|
28
|
-
} catch (e) {
|
|
29
|
-
if (!res.ok) {
|
|
30
|
-
throw new HydrousSDKError(`HTTP ${res.status}`, "HTTP_ERROR", res.status);
|
|
31
|
-
}
|
|
32
|
-
return void 0;
|
|
33
|
-
}
|
|
34
|
-
if (!res.ok) {
|
|
35
|
-
const err = body;
|
|
36
|
-
throw new HydrousSDKError(
|
|
37
|
-
err.error || err.message || `HTTP ${res.status}`,
|
|
38
|
-
err.code || "HTTP_ERROR",
|
|
39
|
-
res.status
|
|
40
|
-
);
|
|
41
|
-
}
|
|
42
|
-
return body;
|
|
43
|
-
}
|
|
44
|
-
function buildUrl(base, path, params) {
|
|
45
|
-
const url = new URL(path, base.endsWith("/") ? base : base + "/");
|
|
46
|
-
if (params) {
|
|
47
|
-
for (const [k, v] of Object.entries(params)) {
|
|
48
|
-
if (v !== void 0 && v !== null) {
|
|
49
|
-
url.searchParams.set(k, String(v));
|
|
50
|
-
}
|
|
51
|
-
}
|
|
52
|
-
}
|
|
53
|
-
return url.toString();
|
|
54
|
-
}
|
|
55
|
-
function mergeHeaders(defaults, overrides) {
|
|
56
|
-
return { ...defaults, ...overrides };
|
|
57
|
-
}
|
|
58
|
-
async function readSSEStream(response, onEvent) {
|
|
59
|
-
if (!response.body) return;
|
|
60
|
-
const reader = response.body.getReader();
|
|
61
|
-
const decoder = new TextDecoder();
|
|
62
|
-
let buf = "";
|
|
63
|
-
const flush = (chunk) => {
|
|
64
|
-
var _a;
|
|
65
|
-
buf += chunk;
|
|
66
|
-
const blocks = buf.split("\n\n");
|
|
67
|
-
buf = (_a = blocks.pop()) != null ? _a : "";
|
|
68
|
-
for (const block of blocks) {
|
|
69
|
-
if (!block.trim()) continue;
|
|
70
|
-
let eventType = "message";
|
|
71
|
-
let dataLine = null;
|
|
72
|
-
for (const line of block.split("\n")) {
|
|
73
|
-
if (line.startsWith("event:")) eventType = line.slice(6).trim();
|
|
74
|
-
if (line.startsWith("data:")) dataLine = line.slice(5).trim();
|
|
75
|
-
}
|
|
76
|
-
if (dataLine === null) continue;
|
|
77
|
-
try {
|
|
78
|
-
onEvent(eventType, JSON.parse(dataLine));
|
|
79
|
-
} catch (e) {
|
|
80
|
-
}
|
|
81
|
-
}
|
|
82
|
-
};
|
|
83
|
-
while (true) {
|
|
84
|
-
const { done, value } = await reader.read();
|
|
85
|
-
if (done) break;
|
|
86
|
-
flush(decoder.decode(value, { stream: true }));
|
|
87
|
-
}
|
|
88
|
-
if (buf.trim()) flush("");
|
|
89
|
-
}
|
|
90
|
-
function xhrUpload(url, body, headers, onXhrProgress) {
|
|
91
|
-
return new Promise((resolve, reject) => {
|
|
92
|
-
const xhr = new XMLHttpRequest();
|
|
93
|
-
xhr.open("POST", url);
|
|
94
|
-
for (const [key, val] of Object.entries(headers)) {
|
|
95
|
-
xhr.setRequestHeader(key, val);
|
|
96
|
-
}
|
|
97
|
-
xhr.responseType = "text";
|
|
98
|
-
if (onXhrProgress) {
|
|
99
|
-
xhr.upload.onprogress = (e) => {
|
|
100
|
-
if (e.lengthComputable) onXhrProgress(e.loaded, e.total);
|
|
101
|
-
};
|
|
102
|
-
}
|
|
103
|
-
xhr.onload = () => {
|
|
104
|
-
var _a;
|
|
105
|
-
if (xhr.status >= 200 && xhr.status < 300) {
|
|
106
|
-
resolve(xhr.responseText);
|
|
107
|
-
} else {
|
|
108
|
-
try {
|
|
109
|
-
const d = JSON.parse(xhr.responseText);
|
|
110
|
-
reject(new HydrousSDKError((_a = d.error) != null ? _a : `HTTP ${xhr.status}`, "HTTP_ERROR", xhr.status));
|
|
111
|
-
} catch (e) {
|
|
112
|
-
reject(new HydrousSDKError(`HTTP ${xhr.status}`, "HTTP_ERROR", xhr.status));
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
};
|
|
116
|
-
xhr.onerror = () => reject(new HydrousSDKError("Network error", "NETWORK_ERROR"));
|
|
117
|
-
xhr.onabort = () => reject(new HydrousSDKError("Upload aborted", "UPLOAD_ABORTED"));
|
|
118
|
-
xhr.ontimeout = () => reject(new HydrousSDKError("Upload timed out", "UPLOAD_TIMEOUT"));
|
|
119
|
-
xhr.send(body);
|
|
120
|
-
});
|
|
121
|
-
}
|
|
122
|
-
function parseSSEText(text, onEvent) {
|
|
123
|
-
const blocks = text.split("\n\n");
|
|
124
|
-
for (const block of blocks) {
|
|
125
|
-
if (!block.trim()) continue;
|
|
126
|
-
let eventType = "message";
|
|
127
|
-
let dataLine = null;
|
|
128
|
-
for (const line of block.split("\n")) {
|
|
129
|
-
if (line.startsWith("event:")) eventType = line.slice(6).trim();
|
|
130
|
-
if (line.startsWith("data:")) dataLine = line.slice(5).trim();
|
|
131
|
-
}
|
|
132
|
-
if (dataLine === null) continue;
|
|
133
|
-
try {
|
|
134
|
-
onEvent(eventType, JSON.parse(dataLine));
|
|
135
|
-
} catch (e) {
|
|
136
|
-
}
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
|
|
140
|
-
// src/auth/client.ts
|
|
141
|
-
var AuthClient = class {
|
|
142
|
-
constructor(config) {
|
|
143
|
-
this.session = null;
|
|
144
|
-
this.baseUrl = config.url;
|
|
145
|
-
this.headers = {
|
|
146
|
-
"Content-Type": "application/json",
|
|
147
|
-
"Authorization": `Bearer ${config.apiKey}`
|
|
148
|
-
};
|
|
149
|
-
}
|
|
150
|
-
// ─── SIGN UP ───────────────────────────────────────────────────────────────
|
|
151
|
-
/**
|
|
152
|
-
* Create a new user account and return a session.
|
|
153
|
-
*
|
|
154
|
-
* @example
|
|
155
|
-
* const { data, error } = await hydrous.auth.signUp({
|
|
156
|
-
* email: 'user@example.com',
|
|
157
|
-
* password: 'supersecret',
|
|
158
|
-
* });
|
|
159
|
-
*/
|
|
160
|
-
async signUp(options) {
|
|
161
|
-
try {
|
|
162
|
-
const url = buildUrl(this.baseUrl, "auth/signup");
|
|
163
|
-
const res = await fetch(url, {
|
|
164
|
-
method: "POST",
|
|
165
|
-
headers: this.headers,
|
|
166
|
-
body: JSON.stringify(options)
|
|
167
|
-
});
|
|
168
|
-
const json = await parseResponse(res);
|
|
169
|
-
this.session = json.data;
|
|
170
|
-
return { data: json.data, error: null };
|
|
171
|
-
} catch (err) {
|
|
172
|
-
return { data: null, error: toHydrousError(err) };
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
// ─── SIGN IN ───────────────────────────────────────────────────────────────
|
|
176
|
-
/**
|
|
177
|
-
* Sign in with email and password.
|
|
178
|
-
*
|
|
179
|
-
* @example
|
|
180
|
-
* const { data, error } = await hydrous.auth.signIn({
|
|
181
|
-
* email: 'user@example.com',
|
|
182
|
-
* password: 'supersecret',
|
|
183
|
-
* });
|
|
184
|
-
* if (data) console.log('Signed in as', data.user.email);
|
|
185
|
-
*/
|
|
186
|
-
async signIn(options) {
|
|
187
|
-
try {
|
|
188
|
-
const url = buildUrl(this.baseUrl, "auth/signin");
|
|
189
|
-
const res = await fetch(url, {
|
|
190
|
-
method: "POST",
|
|
191
|
-
headers: this.headers,
|
|
192
|
-
body: JSON.stringify(options)
|
|
193
|
-
});
|
|
194
|
-
const json = await parseResponse(res);
|
|
195
|
-
this.session = json.data;
|
|
196
|
-
return { data: json.data, error: null };
|
|
197
|
-
} catch (err) {
|
|
198
|
-
return { data: null, error: toHydrousError(err) };
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
// ─── SIGN OUT ──────────────────────────────────────────────────────────────
|
|
202
|
-
/**
|
|
203
|
-
* Sign out the current user and invalidate their session.
|
|
204
|
-
*/
|
|
205
|
-
async signOut() {
|
|
206
|
-
try {
|
|
207
|
-
const url = buildUrl(this.baseUrl, "auth/signout");
|
|
208
|
-
const res = await fetch(url, {
|
|
209
|
-
method: "POST",
|
|
210
|
-
headers: mergeHeaders(this.headers, this._sessionHeader())
|
|
211
|
-
});
|
|
212
|
-
await parseResponse(res);
|
|
213
|
-
this.session = null;
|
|
214
|
-
return { data: void 0, error: null };
|
|
215
|
-
} catch (err) {
|
|
216
|
-
return { data: null, error: toHydrousError(err) };
|
|
217
|
-
}
|
|
218
|
-
}
|
|
219
|
-
// ─── GET USER ──────────────────────────────────────────────────────────────
|
|
220
|
-
/** Return the currently authenticated user, or null if not signed in. */
|
|
221
|
-
async getUser() {
|
|
222
|
-
try {
|
|
223
|
-
const url = buildUrl(this.baseUrl, "auth/user");
|
|
224
|
-
const res = await fetch(url, {
|
|
225
|
-
headers: mergeHeaders(this.headers, this._sessionHeader())
|
|
226
|
-
});
|
|
227
|
-
const json = await parseResponse(res);
|
|
228
|
-
return { data: json.data, error: null };
|
|
229
|
-
} catch (err) {
|
|
230
|
-
return { data: null, error: toHydrousError(err) };
|
|
231
|
-
}
|
|
232
|
-
}
|
|
233
|
-
// ─── REFRESH TOKEN ────────────────────────────────────────────────────────
|
|
234
|
-
/**
|
|
235
|
-
* Refresh the access token using the stored refresh token.
|
|
236
|
-
* Called automatically by the SDK when a 401 is received.
|
|
237
|
-
*/
|
|
238
|
-
async refreshSession() {
|
|
239
|
-
var _a;
|
|
240
|
-
if (!((_a = this.session) == null ? void 0 : _a.refreshToken)) {
|
|
241
|
-
return { data: null, error: { message: "No session", code: "NO_SESSION" } };
|
|
242
|
-
}
|
|
243
|
-
try {
|
|
244
|
-
const url = buildUrl(this.baseUrl, "auth/refresh");
|
|
245
|
-
const res = await fetch(url, {
|
|
246
|
-
method: "POST",
|
|
247
|
-
headers: this.headers,
|
|
248
|
-
body: JSON.stringify({ refreshToken: this.session.refreshToken })
|
|
249
|
-
});
|
|
250
|
-
const json = await parseResponse(res);
|
|
251
|
-
this.session = json.data;
|
|
252
|
-
return { data: json.data, error: null };
|
|
253
|
-
} catch (err) {
|
|
254
|
-
return { data: null, error: toHydrousError(err) };
|
|
255
|
-
}
|
|
256
|
-
}
|
|
257
|
-
/** Return the current in-memory session (may be null). */
|
|
258
|
-
getSession() {
|
|
259
|
-
return this.session;
|
|
260
|
-
}
|
|
261
|
-
_sessionHeader() {
|
|
262
|
-
var _a;
|
|
263
|
-
return ((_a = this.session) == null ? void 0 : _a.accessToken) ? { "X-Session-Token": this.session.accessToken } : {};
|
|
264
|
-
}
|
|
265
|
-
};
|
|
266
|
-
|
|
267
|
-
// src/utils/query.ts
|
|
268
|
-
function serialiseQuery(opts = {}) {
|
|
269
|
-
var _a;
|
|
270
|
-
const params = {};
|
|
271
|
-
if (opts.limit !== void 0) params["limit"] = String(opts.limit);
|
|
272
|
-
if (opts.offset !== void 0) params["offset"] = String(opts.offset);
|
|
273
|
-
if (opts.select && opts.select.length > 0) {
|
|
274
|
-
params["select"] = opts.select.join(",");
|
|
275
|
-
}
|
|
276
|
-
if (opts.orderBy) {
|
|
277
|
-
params["orderBy"] = opts.orderBy.field;
|
|
278
|
-
params["direction"] = (_a = opts.orderBy.direction) != null ? _a : "asc";
|
|
279
|
-
}
|
|
280
|
-
const filters = opts.where ? Array.isArray(opts.where) ? opts.where : [opts.where] : [];
|
|
281
|
-
if (filters.length > 0) {
|
|
282
|
-
params["where"] = JSON.stringify(filters);
|
|
283
|
-
}
|
|
284
|
-
return params;
|
|
285
|
-
}
|
|
286
|
-
function eq(field, value) {
|
|
287
|
-
return { field, operator: "eq", value };
|
|
288
|
-
}
|
|
289
|
-
function neq(field, value) {
|
|
290
|
-
return { field, operator: "neq", value };
|
|
291
|
-
}
|
|
292
|
-
function gt(field, value) {
|
|
293
|
-
return { field, operator: "gt", value };
|
|
294
|
-
}
|
|
295
|
-
function lt(field, value) {
|
|
296
|
-
return { field, operator: "lt", value };
|
|
297
|
-
}
|
|
298
|
-
function inArray(field, value) {
|
|
299
|
-
return { field, operator: "in", value };
|
|
300
|
-
}
|
|
301
|
-
|
|
302
|
-
// src/records/client.ts
|
|
303
|
-
var RecordsClient = class {
|
|
304
|
-
constructor(config) {
|
|
305
|
-
this.baseUrl = config.url;
|
|
306
|
-
this.headers = {
|
|
307
|
-
"Content-Type": "application/json",
|
|
308
|
-
"Authorization": `Bearer ${config.apiKey}`
|
|
309
|
-
};
|
|
310
|
-
}
|
|
311
|
-
// ─── SELECT ────────────────────────────────────────────────────────────────
|
|
312
|
-
/**
|
|
313
|
-
* Query records from a collection.
|
|
314
|
-
*
|
|
315
|
-
* @param collection - Collection name (e.g. "users")
|
|
316
|
-
* @param options - Filters, ordering, pagination
|
|
317
|
-
*
|
|
318
|
-
* @example
|
|
319
|
-
* const { data, error } = await hydrous.records.select('users', {
|
|
320
|
-
* where: { field: 'role', operator: 'eq', value: 'admin' },
|
|
321
|
-
* orderBy: { field: 'createdAt', direction: 'desc' },
|
|
322
|
-
* limit: 20,
|
|
323
|
-
* });
|
|
324
|
-
*/
|
|
325
|
-
async select(collection, options = {}) {
|
|
326
|
-
try {
|
|
327
|
-
const params = serialiseQuery(options);
|
|
328
|
-
const url = buildUrl(this.baseUrl, `records/${collection}`, params);
|
|
329
|
-
const res = await fetch(url, { headers: this.headers });
|
|
330
|
-
const json = await parseResponse(res);
|
|
331
|
-
return { data: json.data, count: json.count, error: null };
|
|
332
|
-
} catch (err) {
|
|
333
|
-
return { data: [], count: 0, error: toHydrousError(err) };
|
|
334
|
-
}
|
|
335
|
-
}
|
|
336
|
-
// ─── GET ONE ───────────────────────────────────────────────────────────────
|
|
337
|
-
/**
|
|
338
|
-
* Fetch a single record by its ID.
|
|
339
|
-
*
|
|
340
|
-
* @example
|
|
341
|
-
* const { data, error } = await hydrous.records.get('users', 'user_abc123');
|
|
342
|
-
*/
|
|
343
|
-
async get(collection, id) {
|
|
344
|
-
try {
|
|
345
|
-
const url = buildUrl(this.baseUrl, `records/${collection}/${id}`);
|
|
346
|
-
const res = await fetch(url, { headers: this.headers });
|
|
347
|
-
const json = await parseResponse(res);
|
|
348
|
-
return { data: json.data, error: null };
|
|
349
|
-
} catch (err) {
|
|
350
|
-
return { data: null, error: toHydrousError(err) };
|
|
351
|
-
}
|
|
352
|
-
}
|
|
353
|
-
// ─── INSERT ────────────────────────────────────────────────────────────────
|
|
354
|
-
/**
|
|
355
|
-
* Insert one or more records into a collection.
|
|
356
|
-
*
|
|
357
|
-
* @param collection - Collection name
|
|
358
|
-
* @param payload - A single record object or an array of record objects
|
|
359
|
-
*
|
|
360
|
-
* @example
|
|
361
|
-
* // Single insert
|
|
362
|
-
* const { data, error } = await hydrous.records.insert('users', {
|
|
363
|
-
* name: 'Alice', email: 'alice@example.com'
|
|
364
|
-
* });
|
|
365
|
-
*
|
|
366
|
-
* // Bulk insert
|
|
367
|
-
* const { data, error } = await hydrous.records.insert('users', [
|
|
368
|
-
* { name: 'Alice' }, { name: 'Bob' }
|
|
369
|
-
* ]);
|
|
370
|
-
*/
|
|
371
|
-
async insert(collection, payload) {
|
|
372
|
-
try {
|
|
373
|
-
const url = buildUrl(this.baseUrl, `records/${collection}`);
|
|
374
|
-
const res = await fetch(url, {
|
|
375
|
-
method: "POST",
|
|
376
|
-
headers: this.headers,
|
|
377
|
-
body: JSON.stringify(payload)
|
|
378
|
-
});
|
|
379
|
-
const json = await parseResponse(res);
|
|
380
|
-
return { data: json.data, count: json.count, error: null };
|
|
381
|
-
} catch (err) {
|
|
382
|
-
return { data: [], count: 0, error: toHydrousError(err) };
|
|
383
|
-
}
|
|
384
|
-
}
|
|
385
|
-
// ─── UPDATE ────────────────────────────────────────────────────────────────
|
|
386
|
-
/**
|
|
387
|
-
* Update a record by ID.
|
|
388
|
-
*
|
|
389
|
-
* @example
|
|
390
|
-
* const { data, error } = await hydrous.records.update('users', 'user_abc123', {
|
|
391
|
-
* name: 'Alice Smith'
|
|
392
|
-
* });
|
|
393
|
-
*/
|
|
394
|
-
async update(collection, id, payload) {
|
|
395
|
-
try {
|
|
396
|
-
const url = buildUrl(this.baseUrl, `records/${collection}/${id}`);
|
|
397
|
-
const res = await fetch(url, {
|
|
398
|
-
method: "PATCH",
|
|
399
|
-
headers: this.headers,
|
|
400
|
-
body: JSON.stringify(payload)
|
|
401
|
-
});
|
|
402
|
-
const json = await parseResponse(res);
|
|
403
|
-
return { data: json.data, error: null };
|
|
404
|
-
} catch (err) {
|
|
405
|
-
return { data: null, error: toHydrousError(err) };
|
|
406
|
-
}
|
|
407
|
-
}
|
|
408
|
-
// ─── DELETE ────────────────────────────────────────────────────────────────
|
|
409
|
-
/**
|
|
410
|
-
* Delete a record by ID.
|
|
411
|
-
*
|
|
412
|
-
* @example
|
|
413
|
-
* const { error } = await hydrous.records.delete('users', 'user_abc123');
|
|
414
|
-
*/
|
|
415
|
-
async delete(collection, id) {
|
|
416
|
-
try {
|
|
417
|
-
const url = buildUrl(this.baseUrl, `records/${collection}/${id}`);
|
|
418
|
-
const res = await fetch(url, { method: "DELETE", headers: this.headers });
|
|
419
|
-
await parseResponse(res);
|
|
420
|
-
return { data: void 0, error: null };
|
|
421
|
-
} catch (err) {
|
|
422
|
-
return { data: null, error: toHydrousError(err) };
|
|
423
|
-
}
|
|
424
|
-
}
|
|
425
|
-
};
|
|
426
|
-
|
|
427
|
-
// src/analytics/client.ts
|
|
428
|
-
var AnalyticsClient = class {
|
|
429
|
-
constructor(config) {
|
|
430
|
-
this.baseUrl = config.url;
|
|
431
|
-
this.headers = {
|
|
432
|
-
"Content-Type": "application/json",
|
|
433
|
-
"Authorization": `Bearer ${config.apiKey}`
|
|
434
|
-
};
|
|
435
|
-
}
|
|
436
|
-
// ─── TRACK ────────────────────────────────────────────────────────────────
|
|
437
|
-
/**
|
|
438
|
-
* Track an analytics event.
|
|
439
|
-
*
|
|
440
|
-
* @example
|
|
441
|
-
* await hydrous.analytics.track({
|
|
442
|
-
* event: 'page_view',
|
|
443
|
-
* properties: { page: '/home', referrer: 'google.com' },
|
|
444
|
-
* userId: 'user_abc123',
|
|
445
|
-
* });
|
|
446
|
-
*/
|
|
447
|
-
async track(options) {
|
|
448
|
-
var _a;
|
|
449
|
-
try {
|
|
450
|
-
const url = buildUrl(this.baseUrl, "analytics/track");
|
|
451
|
-
const res = await fetch(url, {
|
|
452
|
-
method: "POST",
|
|
453
|
-
headers: this.headers,
|
|
454
|
-
body: JSON.stringify({
|
|
455
|
-
...options,
|
|
456
|
-
timestamp: (_a = options.timestamp) != null ? _a : Date.now()
|
|
457
|
-
})
|
|
458
|
-
});
|
|
459
|
-
await parseResponse(res);
|
|
460
|
-
return { data: void 0, error: null };
|
|
461
|
-
} catch (err) {
|
|
462
|
-
return { data: null, error: toHydrousError(err) };
|
|
463
|
-
}
|
|
464
|
-
}
|
|
465
|
-
// ─── QUERY ────────────────────────────────────────────────────────────────
|
|
466
|
-
/**
|
|
467
|
-
* Query recorded analytics events.
|
|
468
|
-
*
|
|
469
|
-
* @example
|
|
470
|
-
* const { data } = await hydrous.analytics.query({
|
|
471
|
-
* event: 'page_view',
|
|
472
|
-
* from: '2024-01-01',
|
|
473
|
-
* to: '2024-01-31',
|
|
474
|
-
* limit: 100,
|
|
475
|
-
* });
|
|
476
|
-
*/
|
|
477
|
-
async query(options = {}) {
|
|
478
|
-
try {
|
|
479
|
-
const params = {};
|
|
480
|
-
if (options.event) params["event"] = options.event;
|
|
481
|
-
if (options.from) params["from"] = options.from;
|
|
482
|
-
if (options.to) params["to"] = options.to;
|
|
483
|
-
if (options.limit) params["limit"] = String(options.limit);
|
|
484
|
-
if (options.groupBy) params["groupBy"] = options.groupBy;
|
|
485
|
-
const url = buildUrl(this.baseUrl, "analytics/events", params);
|
|
486
|
-
const res = await fetch(url, { headers: this.headers });
|
|
487
|
-
const json = await parseResponse(res);
|
|
488
|
-
return { data: json.data, count: json.count, error: null };
|
|
489
|
-
} catch (err) {
|
|
490
|
-
return { data: [], count: 0, error: toHydrousError(err) };
|
|
491
|
-
}
|
|
492
|
-
}
|
|
493
|
-
// ─── BATCH TRACK ─────────────────────────────────────────────────────────
|
|
494
|
-
/**
|
|
495
|
-
* Track multiple events in a single request (more efficient than
|
|
496
|
-
* calling `track` in a loop).
|
|
497
|
-
*
|
|
498
|
-
* @example
|
|
499
|
-
* await hydrous.analytics.trackBatch([
|
|
500
|
-
* { event: 'signup', userId: 'u1' },
|
|
501
|
-
* { event: 'onboarded', userId: 'u1' },
|
|
502
|
-
* ]);
|
|
503
|
-
*/
|
|
504
|
-
async trackBatch(events) {
|
|
505
|
-
try {
|
|
506
|
-
const url = buildUrl(this.baseUrl, "analytics/track/batch");
|
|
507
|
-
const stamped = events.map((e) => {
|
|
508
|
-
var _a;
|
|
509
|
-
return {
|
|
510
|
-
...e,
|
|
511
|
-
timestamp: (_a = e.timestamp) != null ? _a : Date.now()
|
|
512
|
-
};
|
|
513
|
-
});
|
|
514
|
-
const res = await fetch(url, {
|
|
515
|
-
method: "POST",
|
|
516
|
-
headers: this.headers,
|
|
517
|
-
body: JSON.stringify({ events: stamped })
|
|
518
|
-
});
|
|
519
|
-
await parseResponse(res);
|
|
520
|
-
return { data: void 0, error: null };
|
|
521
|
-
} catch (err) {
|
|
522
|
-
return { data: null, error: toHydrousError(err) };
|
|
523
|
-
}
|
|
524
|
-
}
|
|
525
|
-
};
|
|
526
|
-
|
|
527
|
-
// src/storage/client.ts
|
|
528
|
-
var isBrowser = typeof window !== "undefined" && typeof XMLHttpRequest !== "undefined";
|
|
529
|
-
function bucketFromKey(key) {
|
|
530
|
-
return encodeURIComponent(key);
|
|
531
|
-
}
|
|
532
|
-
function storageUrl(base, bucketKey, path) {
|
|
533
|
-
const bucket = bucketFromKey(bucketKey);
|
|
534
|
-
return `${base.replace(/\/$/, "")}/storage/${bucket}/${path.replace(/^\//, "")}`;
|
|
535
|
-
}
|
|
536
|
-
function storageHeaders(bucketKey) {
|
|
537
|
-
return { "X-Storage-Key": bucketKey };
|
|
538
|
-
}
|
|
539
|
-
function drainSSEProgress(rawText, onProgress) {
|
|
540
|
-
const results = [];
|
|
541
|
-
const errors = [];
|
|
542
|
-
parseSSEText(rawText, (eventType, data) => {
|
|
543
|
-
var _a, _b, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
544
|
-
const d = data;
|
|
545
|
-
if (eventType === "progress" && onProgress) {
|
|
546
|
-
onProgress({
|
|
547
|
-
index: (_a = d["index"]) != null ? _a : 0,
|
|
548
|
-
total: (_b = d["total"]) != null ? _b : 1,
|
|
549
|
-
path: (_c = d["path"]) != null ? _c : "",
|
|
550
|
-
stage: (_d = d["stage"]) != null ? _d : "uploading",
|
|
551
|
-
bytesUploaded: (_e = d["bytesUploaded"]) != null ? _e : 0,
|
|
552
|
-
totalBytes: (_f = d["totalBytes"]) != null ? _f : 0,
|
|
553
|
-
percent: (_g = d["percent"]) != null ? _g : 0,
|
|
554
|
-
bytesPerSecond: (_h = d["bytesPerSecond"]) != null ? _h : null,
|
|
555
|
-
eta: (_i = d["eta"]) != null ? _i : null,
|
|
556
|
-
result: d["result"],
|
|
557
|
-
error: d["error"],
|
|
558
|
-
code: d["code"]
|
|
559
|
-
});
|
|
560
|
-
}
|
|
561
|
-
if (eventType === "done") {
|
|
562
|
-
if (d["path"]) {
|
|
563
|
-
results.push(d);
|
|
564
|
-
} else if (Array.isArray(d["errors"])) {
|
|
565
|
-
const succeeded = (_j = d["succeeded"]) != null ? _j : [];
|
|
566
|
-
const errs = d["errors"];
|
|
567
|
-
results.push(...succeeded);
|
|
568
|
-
errors.push(...errs);
|
|
569
|
-
}
|
|
570
|
-
}
|
|
571
|
-
if (eventType === "error") {
|
|
572
|
-
errors.push({
|
|
573
|
-
path: "",
|
|
574
|
-
error: (_k = d["error"]) != null ? _k : "Unknown error",
|
|
575
|
-
code: (_l = d["code"]) != null ? _l : "UNKNOWN"
|
|
576
|
-
});
|
|
577
|
-
}
|
|
578
|
-
});
|
|
579
|
-
return { results, errors };
|
|
580
|
-
}
|
|
581
|
-
var StorageClient = class {
|
|
582
|
-
constructor(config) {
|
|
583
|
-
this.baseUrl = config.url;
|
|
584
|
-
}
|
|
585
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
586
|
-
// UPLOAD
|
|
587
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
588
|
-
/**
|
|
589
|
-
* Upload a single file to a bucket.
|
|
590
|
-
*
|
|
591
|
-
* The bucket key **always comes first**.
|
|
592
|
-
* Supply an `onProgress` callback to receive live upload progress including
|
|
593
|
-
* bytes transferred, speed (bytes/sec), ETA, and lifecycle stage.
|
|
594
|
-
*
|
|
595
|
-
* ### Stages fired via `onProgress`
|
|
596
|
-
* | Stage | Meaning |
|
|
597
|
-
* |-------------|------------------------------------------|
|
|
598
|
-
* | `pending` | Queued, not yet started |
|
|
599
|
-
* | `compressing` | Server is compressing the file |
|
|
600
|
-
* | `uploading` | Bytes flowing to cloud storage |
|
|
601
|
-
* | `done` | Confirmed written to cloud storage |
|
|
602
|
-
* | `error` | Something went wrong |
|
|
603
|
-
*
|
|
604
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
605
|
-
* @param file A `File`, `Blob`, or `Buffer` (Node)
|
|
606
|
-
* @param options Path, overwrite flag, progress callback
|
|
607
|
-
*
|
|
608
|
-
* @example
|
|
609
|
-
* const { data, error } = await hydrous.storage.upload(
|
|
610
|
-
* 'ssk_my_bucket_key',
|
|
611
|
-
* file,
|
|
612
|
-
* {
|
|
613
|
-
* path: 'avatars/alice.jpg',
|
|
614
|
-
* overwrite: true,
|
|
615
|
-
* onProgress: (p) => {
|
|
616
|
-
* console.log(`${p.stage} — ${p.percent}% ${p.bytesPerSecond} B/s ETA ${p.eta}s`);
|
|
617
|
-
* },
|
|
618
|
-
* }
|
|
619
|
-
* );
|
|
620
|
-
*/
|
|
621
|
-
async upload(bucketKey, file, options = {}) {
|
|
622
|
-
var _a, _b;
|
|
623
|
-
const { path, overwrite = false, onProgress } = options;
|
|
624
|
-
try {
|
|
625
|
-
const url = storageUrl(this.baseUrl, bucketKey, "upload");
|
|
626
|
-
const form = new FormData();
|
|
627
|
-
if (file instanceof Uint8Array) {
|
|
628
|
-
form.append("file", new Blob([file.buffer]), path != null ? path : "file");
|
|
629
|
-
} else if (file instanceof ArrayBuffer) {
|
|
630
|
-
form.append("file", new Blob([file]), path != null ? path : "file");
|
|
631
|
-
} else {
|
|
632
|
-
form.append("file", file, path != null ? path : file instanceof File ? file.name : "file");
|
|
633
|
-
}
|
|
634
|
-
if (path) form.append("path", path);
|
|
635
|
-
if (overwrite) form.append("overwrite", "true");
|
|
636
|
-
const headers = storageHeaders(bucketKey);
|
|
637
|
-
if (isBrowser) {
|
|
638
|
-
const totalBytes = file instanceof Blob ? file.size : file instanceof Uint8Array ? file.byteLength : file.byteLength;
|
|
639
|
-
const rawBody = await xhrUpload(url, form, headers, (loaded, total) => {
|
|
640
|
-
if (onProgress) {
|
|
641
|
-
onProgress({
|
|
642
|
-
index: 0,
|
|
643
|
-
total: 1,
|
|
644
|
-
path: path != null ? path : "",
|
|
645
|
-
stage: "uploading",
|
|
646
|
-
bytesUploaded: loaded,
|
|
647
|
-
totalBytes: total || totalBytes,
|
|
648
|
-
percent: Math.min(99, Math.round(loaded / (total || totalBytes) * 100)),
|
|
649
|
-
bytesPerSecond: null,
|
|
650
|
-
eta: null
|
|
651
|
-
});
|
|
652
|
-
}
|
|
653
|
-
});
|
|
654
|
-
const { results, errors } = drainSSEProgress(rawBody, onProgress);
|
|
655
|
-
if (errors.length > 0 && results.length === 0) {
|
|
656
|
-
return { data: null, error: { message: errors[0].error, code: errors[0].code } };
|
|
657
|
-
}
|
|
658
|
-
const result = (_a = results[0]) != null ? _a : null;
|
|
659
|
-
if (result && onProgress) {
|
|
660
|
-
onProgress({
|
|
661
|
-
index: 0,
|
|
662
|
-
total: 1,
|
|
663
|
-
path: result.path,
|
|
664
|
-
stage: "done",
|
|
665
|
-
bytesUploaded: totalBytes,
|
|
666
|
-
totalBytes,
|
|
667
|
-
percent: 100,
|
|
668
|
-
bytesPerSecond: null,
|
|
669
|
-
eta: 0,
|
|
670
|
-
result
|
|
671
|
-
});
|
|
672
|
-
}
|
|
673
|
-
return { data: result, error: null };
|
|
674
|
-
}
|
|
675
|
-
const res = await fetch(url, { method: "POST", headers, body: form });
|
|
676
|
-
if (!res.ok) {
|
|
677
|
-
const err = await res.json().catch(() => ({}));
|
|
678
|
-
throw new HydrousSDKError((_b = err.error) != null ? _b : `HTTP ${res.status}`, "HTTP_ERROR", res.status);
|
|
679
|
-
}
|
|
680
|
-
let finalResult = null;
|
|
681
|
-
await readSSEStream(res, (eventType, data) => {
|
|
682
|
-
var _a2, _b2, _c, _d, _e, _f, _g, _h, _i, _j, _k, _l;
|
|
683
|
-
const d = data;
|
|
684
|
-
if (eventType === "progress" && onProgress) {
|
|
685
|
-
onProgress({
|
|
686
|
-
index: (_a2 = d["index"]) != null ? _a2 : 0,
|
|
687
|
-
total: (_b2 = d["total"]) != null ? _b2 : 1,
|
|
688
|
-
path: (_d = (_c = d["path"]) != null ? _c : path) != null ? _d : "",
|
|
689
|
-
stage: (_e = d["stage"]) != null ? _e : "uploading",
|
|
690
|
-
bytesUploaded: (_f = d["bytesUploaded"]) != null ? _f : 0,
|
|
691
|
-
totalBytes: (_g = d["totalBytes"]) != null ? _g : 0,
|
|
692
|
-
percent: (_h = d["percent"]) != null ? _h : 0,
|
|
693
|
-
bytesPerSecond: (_i = d["bytesPerSecond"]) != null ? _i : null,
|
|
694
|
-
eta: (_j = d["eta"]) != null ? _j : null,
|
|
695
|
-
result: d["result"],
|
|
696
|
-
error: d["error"]
|
|
697
|
-
});
|
|
698
|
-
}
|
|
699
|
-
if (eventType === "done") finalResult = data;
|
|
700
|
-
if (eventType === "error") {
|
|
701
|
-
throw new HydrousSDKError(
|
|
702
|
-
(_k = d["error"]) != null ? _k : "Upload failed",
|
|
703
|
-
(_l = d["code"]) != null ? _l : "UPLOAD_ERROR"
|
|
704
|
-
);
|
|
705
|
-
}
|
|
706
|
-
});
|
|
707
|
-
return { data: finalResult, error: null };
|
|
708
|
-
} catch (err) {
|
|
709
|
-
return { data: null, error: toHydrousError(err) };
|
|
710
|
-
}
|
|
711
|
-
}
|
|
712
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
713
|
-
// UPLOAD RAW (text / JSON / binary from string)
|
|
714
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
715
|
-
/**
|
|
716
|
-
* Upload raw text or JSON content directly — no `File` object needed.
|
|
717
|
-
* Great for saving generated content, config files, or JSON records.
|
|
718
|
-
*
|
|
719
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
720
|
-
* @param path Destination path (e.g. `"configs/settings.json"`)
|
|
721
|
-
* @param content String content to store
|
|
722
|
-
* @param options `mimeType`, `overwrite`, `onProgress`
|
|
723
|
-
*
|
|
724
|
-
* @example
|
|
725
|
-
* await hydrous.storage.uploadText(
|
|
726
|
-
* 'ssk_my_bucket_key',
|
|
727
|
-
* 'reports/summary.txt',
|
|
728
|
-
* 'Hello from Hydrous!',
|
|
729
|
-
* { mimeType: 'text/plain' }
|
|
730
|
-
* );
|
|
731
|
-
*/
|
|
732
|
-
async uploadText(bucketKey, path, content, options = {}) {
|
|
733
|
-
var _a;
|
|
734
|
-
const { mimeType = "text/plain", overwrite = false, onProgress } = options;
|
|
735
|
-
try {
|
|
736
|
-
const url = storageUrl(this.baseUrl, bucketKey, "upload-raw");
|
|
737
|
-
const headers = { ...storageHeaders(bucketKey), "Content-Type": "application/json" };
|
|
738
|
-
const res = await fetch(url, {
|
|
739
|
-
method: "POST",
|
|
740
|
-
headers,
|
|
741
|
-
body: JSON.stringify({ path, content, mimeType, overwrite })
|
|
742
|
-
});
|
|
743
|
-
if (!res.ok) {
|
|
744
|
-
const e = await res.json().catch(() => ({}));
|
|
745
|
-
throw new HydrousSDKError((_a = e.error) != null ? _a : `HTTP ${res.status}`, "HTTP_ERROR", res.status);
|
|
746
|
-
}
|
|
747
|
-
let finalResult = null;
|
|
748
|
-
await readSSEStream(res, (eventType, data) => {
|
|
749
|
-
var _a2, _b, _c, _d, _e, _f;
|
|
750
|
-
const d = data;
|
|
751
|
-
if (eventType === "progress" && onProgress) {
|
|
752
|
-
onProgress({
|
|
753
|
-
index: 0,
|
|
754
|
-
total: 1,
|
|
755
|
-
path,
|
|
756
|
-
stage: (_a2 = d["stage"]) != null ? _a2 : "uploading",
|
|
757
|
-
bytesUploaded: (_b = d["bytesUploaded"]) != null ? _b : 0,
|
|
758
|
-
totalBytes: (_c = d["totalBytes"]) != null ? _c : 0,
|
|
759
|
-
percent: (_d = d["percent"]) != null ? _d : 0,
|
|
760
|
-
bytesPerSecond: (_e = d["bytesPerSecond"]) != null ? _e : null,
|
|
761
|
-
eta: (_f = d["eta"]) != null ? _f : null
|
|
762
|
-
});
|
|
763
|
-
}
|
|
764
|
-
if (eventType === "done") finalResult = data;
|
|
765
|
-
});
|
|
766
|
-
return { data: finalResult, error: null };
|
|
767
|
-
} catch (err) {
|
|
768
|
-
return { data: null, error: toHydrousError(err) };
|
|
769
|
-
}
|
|
770
|
-
}
|
|
771
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
772
|
-
// BATCH UPLOAD
|
|
773
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
774
|
-
/**
|
|
775
|
-
* Upload multiple files in one request.
|
|
776
|
-
*
|
|
777
|
-
* `onProgress` fires for **every file individually** — the `index` field
|
|
778
|
-
* tells you which file the event belongs to (0-based, same order as `files`).
|
|
779
|
-
* All files receive a `pending` event upfront before any uploads start,
|
|
780
|
-
* so you can render all progress bars immediately.
|
|
781
|
-
*
|
|
782
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
783
|
-
* @param files Array of `File` objects (browser) or `{ name, data }` objects (Node)
|
|
784
|
-
* @param options Prefix, per-file paths, overwrite, concurrency, onProgress
|
|
785
|
-
*
|
|
786
|
-
* @example
|
|
787
|
-
* await hydrous.storage.batchUpload(
|
|
788
|
-
* 'ssk_my_bucket_key',
|
|
789
|
-
* fileArray,
|
|
790
|
-
* {
|
|
791
|
-
* prefix: 'uploads/2024/',
|
|
792
|
-
* onProgress: (p) => {
|
|
793
|
-
* console.log(`File ${p.index}: ${p.stage} ${p.percent}%`);
|
|
794
|
-
* },
|
|
795
|
-
* }
|
|
796
|
-
* );
|
|
797
|
-
*/
|
|
798
|
-
async batchUpload(bucketKey, files, options = {}) {
|
|
799
|
-
var _a;
|
|
800
|
-
const { prefix = "", paths, overwrite = false, onProgress } = options;
|
|
801
|
-
try {
|
|
802
|
-
const url = storageUrl(this.baseUrl, bucketKey, "batch-upload");
|
|
803
|
-
const form = new FormData();
|
|
804
|
-
const resolvedPaths = files.map(
|
|
805
|
-
(f, i) => {
|
|
806
|
-
var _a2;
|
|
807
|
-
return (_a2 = paths == null ? void 0 : paths[i]) != null ? _a2 : `${prefix}${f.name}`;
|
|
808
|
-
}
|
|
809
|
-
);
|
|
810
|
-
files.forEach((f) => form.append("files", f, f.name));
|
|
811
|
-
form.append("paths", JSON.stringify(resolvedPaths));
|
|
812
|
-
if (overwrite) form.append("overwrite", "true");
|
|
813
|
-
const headers = storageHeaders(bucketKey);
|
|
814
|
-
if (isBrowser) {
|
|
815
|
-
const totalBytes = files.reduce((s, f) => s + f.size, 0);
|
|
816
|
-
const rawBody = await xhrUpload(url, form, headers, (loaded, total) => {
|
|
817
|
-
if (onProgress) {
|
|
818
|
-
let cursor = 0;
|
|
819
|
-
for (let i = 0; i < files.length; i++) {
|
|
820
|
-
const share = files[i].size / (totalBytes || 1);
|
|
821
|
-
const myStart = cursor;
|
|
822
|
-
const myEnd = cursor + share;
|
|
823
|
-
const fileLoaded = Math.max(0, Math.min(
|
|
824
|
-
files[i].size,
|
|
825
|
-
(loaded / (total || totalBytes) - myStart) / share * files[i].size
|
|
826
|
-
));
|
|
827
|
-
onProgress({
|
|
828
|
-
index: i,
|
|
829
|
-
total: files.length,
|
|
830
|
-
path: resolvedPaths[i],
|
|
831
|
-
stage: "uploading",
|
|
832
|
-
bytesUploaded: Math.round(fileLoaded),
|
|
833
|
-
totalBytes: files[i].size,
|
|
834
|
-
percent: Math.min(99, Math.round(fileLoaded / files[i].size * 100)),
|
|
835
|
-
bytesPerSecond: null,
|
|
836
|
-
eta: null
|
|
837
|
-
});
|
|
838
|
-
cursor = myEnd;
|
|
839
|
-
}
|
|
840
|
-
}
|
|
841
|
-
});
|
|
842
|
-
const { results, errors } = drainSSEProgress(rawBody, onProgress);
|
|
843
|
-
return {
|
|
844
|
-
data: {
|
|
845
|
-
succeeded: results,
|
|
846
|
-
failed: errors
|
|
847
|
-
},
|
|
848
|
-
error: null
|
|
849
|
-
};
|
|
850
|
-
}
|
|
851
|
-
const res = await fetch(url, { method: "POST", headers, body: form });
|
|
852
|
-
if (!res.ok) {
|
|
853
|
-
const e = await res.json().catch(() => ({}));
|
|
854
|
-
throw new HydrousSDKError((_a = e.error) != null ? _a : `HTTP ${res.status}`, "HTTP_ERROR", res.status);
|
|
855
|
-
}
|
|
856
|
-
const succeeded = [];
|
|
857
|
-
const failed = [];
|
|
858
|
-
await readSSEStream(res, (eventType, data) => {
|
|
859
|
-
var _a2, _b, _c, _d, _e, _f, _g, _h, _i, _j;
|
|
860
|
-
const d = data;
|
|
861
|
-
if (eventType === "progress" && onProgress) {
|
|
862
|
-
onProgress({
|
|
863
|
-
index: (_a2 = d["index"]) != null ? _a2 : 0,
|
|
864
|
-
total: (_b = d["total"]) != null ? _b : files.length,
|
|
865
|
-
path: (_c = d["path"]) != null ? _c : "",
|
|
866
|
-
stage: (_d = d["stage"]) != null ? _d : "uploading",
|
|
867
|
-
bytesUploaded: (_e = d["bytesUploaded"]) != null ? _e : 0,
|
|
868
|
-
totalBytes: (_f = d["totalBytes"]) != null ? _f : 0,
|
|
869
|
-
percent: (_g = d["percent"]) != null ? _g : 0,
|
|
870
|
-
bytesPerSecond: (_h = d["bytesPerSecond"]) != null ? _h : null,
|
|
871
|
-
eta: (_i = d["eta"]) != null ? _i : null,
|
|
872
|
-
result: d["result"],
|
|
873
|
-
error: d["error"],
|
|
874
|
-
code: d["code"]
|
|
875
|
-
});
|
|
876
|
-
}
|
|
877
|
-
if (eventType === "done" && d["succeeded"]) {
|
|
878
|
-
succeeded.push(...d["succeeded"]);
|
|
879
|
-
failed.push(...(_j = d["errors"]) != null ? _j : []);
|
|
880
|
-
}
|
|
881
|
-
});
|
|
882
|
-
return { data: { succeeded, failed }, error: null };
|
|
883
|
-
} catch (err) {
|
|
884
|
-
return { data: null, error: toHydrousError(err) };
|
|
885
|
-
}
|
|
886
|
-
}
|
|
887
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
888
|
-
// DOWNLOAD
|
|
889
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
890
|
-
/**
|
|
891
|
-
* Download a single file and return its content as an `ArrayBuffer`.
|
|
892
|
-
*
|
|
893
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
894
|
-
* @param filePath Path of the file within your bucket
|
|
895
|
-
*
|
|
896
|
-
* @example
|
|
897
|
-
* const { data, error } = await hydrous.storage.download(
|
|
898
|
-
* 'ssk_my_bucket_key',
|
|
899
|
-
* 'avatars/alice.jpg'
|
|
900
|
-
* );
|
|
901
|
-
* if (data) {
|
|
902
|
-
* const blob = new Blob([data]);
|
|
903
|
-
* const url = URL.createObjectURL(blob);
|
|
904
|
-
* }
|
|
905
|
-
*/
|
|
906
|
-
async download(bucketKey, filePath) {
|
|
907
|
-
var _a;
|
|
908
|
-
try {
|
|
909
|
-
const url = storageUrl(this.baseUrl, bucketKey, `download/${filePath}`);
|
|
910
|
-
const res = await fetch(url, { headers: storageHeaders(bucketKey) });
|
|
911
|
-
if (!res.ok) {
|
|
912
|
-
const e = await res.json().catch(() => ({}));
|
|
913
|
-
throw new HydrousSDKError((_a = e.error) != null ? _a : `HTTP ${res.status}`, "HTTP_ERROR", res.status);
|
|
914
|
-
}
|
|
915
|
-
const buffer = await res.arrayBuffer();
|
|
916
|
-
return { data: buffer, error: null };
|
|
917
|
-
} catch (err) {
|
|
918
|
-
return { data: null, error: toHydrousError(err) };
|
|
919
|
-
}
|
|
920
|
-
}
|
|
921
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
922
|
-
// BATCH DOWNLOAD
|
|
923
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
924
|
-
/**
|
|
925
|
-
* Download multiple files in one request.
|
|
926
|
-
*
|
|
927
|
-
* When `autoSave: true` (browser only) each file is automatically saved
|
|
928
|
-
* to the user's Downloads folder as it arrives.
|
|
929
|
-
*
|
|
930
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
931
|
-
* @param filePaths Array of file paths within your bucket
|
|
932
|
-
* @param options Concurrency, onProgress, autoSave
|
|
933
|
-
*
|
|
934
|
-
* @example
|
|
935
|
-
* const { data } = await hydrous.storage.batchDownload(
|
|
936
|
-
* 'ssk_my_bucket_key',
|
|
937
|
-
* ['reports/jan.pdf', 'reports/feb.pdf'],
|
|
938
|
-
* {
|
|
939
|
-
* onProgress: (p) => console.log(p.path, p.status),
|
|
940
|
-
* autoSave: true, // triggers browser file-save dialog per file
|
|
941
|
-
* }
|
|
942
|
-
* );
|
|
943
|
-
*/
|
|
944
|
-
async batchDownload(bucketKey, filePaths, options = {}) {
|
|
945
|
-
var _a;
|
|
946
|
-
const { concurrency = 5, onProgress, autoSave = false } = options;
|
|
947
|
-
try {
|
|
948
|
-
const url = storageUrl(this.baseUrl, bucketKey, "batch-download");
|
|
949
|
-
const res = await fetch(url, {
|
|
950
|
-
method: "POST",
|
|
951
|
-
headers: { ...storageHeaders(bucketKey), "Content-Type": "application/json" },
|
|
952
|
-
body: JSON.stringify({ paths: filePaths, concurrency })
|
|
953
|
-
});
|
|
954
|
-
if (!res.ok) {
|
|
955
|
-
const e = await res.json().catch(() => ({}));
|
|
956
|
-
throw new HydrousSDKError((_a = e.error) != null ? _a : `HTTP ${res.status}`, "HTTP_ERROR", res.status);
|
|
957
|
-
}
|
|
958
|
-
const downloadedFiles = [];
|
|
959
|
-
await readSSEStream(res, (eventType, data) => {
|
|
960
|
-
var _a2, _b, _c, _d, _e, _f, _g, _h;
|
|
961
|
-
const d = data;
|
|
962
|
-
if (eventType === "file") {
|
|
963
|
-
const base64 = d["content"];
|
|
964
|
-
const mimeType = (_a2 = d["mimeType"]) != null ? _a2 : "application/octet-stream";
|
|
965
|
-
const path = (_b = d["path"]) != null ? _b : "";
|
|
966
|
-
const size = (_c = d["size"]) != null ? _c : 0;
|
|
967
|
-
const index = (_d = d["index"]) != null ? _d : 0;
|
|
968
|
-
const binary = atob(base64);
|
|
969
|
-
const bytes = new Uint8Array(binary.length);
|
|
970
|
-
for (let i = 0; i < binary.length; i++) bytes[i] = binary.charCodeAt(i);
|
|
971
|
-
const content = bytes.buffer;
|
|
972
|
-
downloadedFiles.push({ path, content, mimeType, size });
|
|
973
|
-
if (onProgress) {
|
|
974
|
-
onProgress({
|
|
975
|
-
index,
|
|
976
|
-
total: filePaths.length,
|
|
977
|
-
path,
|
|
978
|
-
status: "success",
|
|
979
|
-
size,
|
|
980
|
-
mimeType
|
|
981
|
-
});
|
|
982
|
-
}
|
|
983
|
-
if (autoSave && isBrowser) {
|
|
984
|
-
const blob = new Blob([content], { type: mimeType });
|
|
985
|
-
const blobUrl = URL.createObjectURL(blob);
|
|
986
|
-
const a = document.createElement("a");
|
|
987
|
-
a.href = blobUrl;
|
|
988
|
-
a.download = (_e = path.split("/").pop()) != null ? _e : "download";
|
|
989
|
-
a.click();
|
|
990
|
-
setTimeout(() => URL.revokeObjectURL(blobUrl), 5e3);
|
|
991
|
-
}
|
|
992
|
-
}
|
|
993
|
-
if (eventType === "error" && onProgress) {
|
|
994
|
-
const index = (_f = d["index"]) != null ? _f : 0;
|
|
995
|
-
onProgress({
|
|
996
|
-
index,
|
|
997
|
-
total: filePaths.length,
|
|
998
|
-
path: (_g = filePaths[index]) != null ? _g : "",
|
|
999
|
-
status: "error",
|
|
1000
|
-
error: (_h = d["error"]) != null ? _h : "Download failed"
|
|
1001
|
-
});
|
|
1002
|
-
}
|
|
1003
|
-
});
|
|
1004
|
-
return { data: downloadedFiles, error: null };
|
|
1005
|
-
} catch (err) {
|
|
1006
|
-
return { data: null, error: toHydrousError(err) };
|
|
1007
|
-
}
|
|
1008
|
-
}
|
|
1009
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1010
|
-
// LIST
|
|
1011
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1012
|
-
/**
|
|
1013
|
-
* List files and folders inside a bucket (or a folder within it).
|
|
1014
|
-
*
|
|
1015
|
-
* Results are paginated — use `pagination.nextCursor` to fetch the next page.
|
|
1016
|
-
*
|
|
1017
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1018
|
-
* @param options `prefix`, `limit`, `cursor`
|
|
1019
|
-
*
|
|
1020
|
-
* @example
|
|
1021
|
-
* const { data } = await hydrous.storage.list('ssk_my_bucket_key', {
|
|
1022
|
-
* prefix: 'avatars/',
|
|
1023
|
-
* limit: 50,
|
|
1024
|
-
* });
|
|
1025
|
-
* for (const item of data.items) {
|
|
1026
|
-
* console.log(item.type, item.path);
|
|
1027
|
-
* }
|
|
1028
|
-
*/
|
|
1029
|
-
async list(bucketKey, options = {}) {
|
|
1030
|
-
const { prefix = "", limit = 50, cursor } = options;
|
|
1031
|
-
try {
|
|
1032
|
-
const params = {
|
|
1033
|
-
prefix: prefix || void 0,
|
|
1034
|
-
limit,
|
|
1035
|
-
cursor: cursor || void 0
|
|
1036
|
-
};
|
|
1037
|
-
const url = buildUrl(
|
|
1038
|
-
this.baseUrl,
|
|
1039
|
-
`storage/${bucketFromKey(bucketKey)}/list`,
|
|
1040
|
-
params
|
|
1041
|
-
);
|
|
1042
|
-
const res = await fetch(url, { headers: storageHeaders(bucketKey) });
|
|
1043
|
-
const json = await parseResponse(res);
|
|
1044
|
-
return { data: json, error: null };
|
|
1045
|
-
} catch (err) {
|
|
1046
|
-
return { data: null, error: toHydrousError(err) };
|
|
1047
|
-
}
|
|
1048
|
-
}
|
|
1049
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1050
|
-
// METADATA
|
|
1051
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1052
|
-
/**
|
|
1053
|
-
* Get metadata for a specific file (size, MIME type, compression info, etc.)
|
|
1054
|
-
*
|
|
1055
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1056
|
-
* @param filePath Path of the file within your bucket
|
|
1057
|
-
*
|
|
1058
|
-
* @example
|
|
1059
|
-
* const { data } = await hydrous.storage.metadata(
|
|
1060
|
-
* 'ssk_my_bucket_key',
|
|
1061
|
-
* 'avatars/alice.jpg'
|
|
1062
|
-
* );
|
|
1063
|
-
* console.log(data.size, data.mimeType);
|
|
1064
|
-
*/
|
|
1065
|
-
async metadata(bucketKey, filePath) {
|
|
1066
|
-
try {
|
|
1067
|
-
const url = storageUrl(this.baseUrl, bucketKey, `metadata/${filePath}`);
|
|
1068
|
-
const res = await fetch(url, { headers: storageHeaders(bucketKey) });
|
|
1069
|
-
const json = await parseResponse(res);
|
|
1070
|
-
return { data: json.data, error: null };
|
|
1071
|
-
} catch (err) {
|
|
1072
|
-
return { data: null, error: toHydrousError(err) };
|
|
1073
|
-
}
|
|
1074
|
-
}
|
|
1075
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1076
|
-
// DELETE FILE
|
|
1077
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1078
|
-
/**
|
|
1079
|
-
* Delete a single file.
|
|
1080
|
-
*
|
|
1081
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1082
|
-
* @param filePath Path of the file to delete
|
|
1083
|
-
*
|
|
1084
|
-
* @example
|
|
1085
|
-
* await hydrous.storage.deleteFile('ssk_my_bucket_key', 'avatars/old.jpg');
|
|
1086
|
-
*/
|
|
1087
|
-
async deleteFile(bucketKey, filePath) {
|
|
1088
|
-
try {
|
|
1089
|
-
const url = storageUrl(this.baseUrl, bucketKey, "file");
|
|
1090
|
-
const res = await fetch(url, {
|
|
1091
|
-
method: "DELETE",
|
|
1092
|
-
headers: { ...storageHeaders(bucketKey), "Content-Type": "application/json" },
|
|
1093
|
-
body: JSON.stringify({ path: filePath })
|
|
1094
|
-
});
|
|
1095
|
-
await parseResponse(res);
|
|
1096
|
-
return { data: void 0, error: null };
|
|
1097
|
-
} catch (err) {
|
|
1098
|
-
return { data: null, error: toHydrousError(err) };
|
|
1099
|
-
}
|
|
1100
|
-
}
|
|
1101
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1102
|
-
// DELETE FOLDER
|
|
1103
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1104
|
-
/**
|
|
1105
|
-
* Recursively delete a folder and all of its contents.
|
|
1106
|
-
*
|
|
1107
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1108
|
-
* @param folderPath Folder path to delete (e.g. `"old-uploads/"`)
|
|
1109
|
-
*
|
|
1110
|
-
* @example
|
|
1111
|
-
* await hydrous.storage.deleteFolder('ssk_my_bucket_key', 'temp/');
|
|
1112
|
-
*/
|
|
1113
|
-
async deleteFolder(bucketKey, folderPath) {
|
|
1114
|
-
try {
|
|
1115
|
-
const url = storageUrl(this.baseUrl, bucketKey, "folder");
|
|
1116
|
-
const res = await fetch(url, {
|
|
1117
|
-
method: "DELETE",
|
|
1118
|
-
headers: { ...storageHeaders(bucketKey), "Content-Type": "application/json" },
|
|
1119
|
-
body: JSON.stringify({ path: folderPath })
|
|
1120
|
-
});
|
|
1121
|
-
await parseResponse(res);
|
|
1122
|
-
return { data: void 0, error: null };
|
|
1123
|
-
} catch (err) {
|
|
1124
|
-
return { data: null, error: toHydrousError(err) };
|
|
1125
|
-
}
|
|
1126
|
-
}
|
|
1127
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1128
|
-
// CREATE FOLDER
|
|
1129
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1130
|
-
/**
|
|
1131
|
-
* Create an empty folder.
|
|
1132
|
-
*
|
|
1133
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1134
|
-
* @param folderPath Path for the new folder (e.g. `"avatars/2024/"`)
|
|
1135
|
-
*
|
|
1136
|
-
* @example
|
|
1137
|
-
* await hydrous.storage.createFolder('ssk_my_bucket_key', 'avatars/2024/');
|
|
1138
|
-
*/
|
|
1139
|
-
async createFolder(bucketKey, folderPath) {
|
|
1140
|
-
try {
|
|
1141
|
-
const url = storageUrl(this.baseUrl, bucketKey, "folder");
|
|
1142
|
-
const res = await fetch(url, {
|
|
1143
|
-
method: "POST",
|
|
1144
|
-
headers: { ...storageHeaders(bucketKey), "Content-Type": "application/json" },
|
|
1145
|
-
body: JSON.stringify({ path: folderPath })
|
|
1146
|
-
});
|
|
1147
|
-
await parseResponse(res);
|
|
1148
|
-
return { data: void 0, error: null };
|
|
1149
|
-
} catch (err) {
|
|
1150
|
-
return { data: null, error: toHydrousError(err) };
|
|
1151
|
-
}
|
|
1152
|
-
}
|
|
1153
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1154
|
-
// MOVE
|
|
1155
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1156
|
-
/**
|
|
1157
|
-
* Move (rename) a file to a new path.
|
|
1158
|
-
*
|
|
1159
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1160
|
-
* @param fromPath Current path of the file
|
|
1161
|
-
* @param toPath New path for the file
|
|
1162
|
-
*
|
|
1163
|
-
* @example
|
|
1164
|
-
* await hydrous.storage.move(
|
|
1165
|
-
* 'ssk_my_bucket_key',
|
|
1166
|
-
* 'drafts/report.pdf',
|
|
1167
|
-
* 'published/report.pdf'
|
|
1168
|
-
* );
|
|
1169
|
-
*/
|
|
1170
|
-
async move(bucketKey, fromPath, toPath) {
|
|
1171
|
-
try {
|
|
1172
|
-
const url = storageUrl(this.baseUrl, bucketKey, "move");
|
|
1173
|
-
const res = await fetch(url, {
|
|
1174
|
-
method: "POST",
|
|
1175
|
-
headers: { ...storageHeaders(bucketKey), "Content-Type": "application/json" },
|
|
1176
|
-
body: JSON.stringify({ from: fromPath, to: toPath })
|
|
1177
|
-
});
|
|
1178
|
-
await parseResponse(res);
|
|
1179
|
-
return { data: void 0, error: null };
|
|
1180
|
-
} catch (err) {
|
|
1181
|
-
return { data: null, error: toHydrousError(err) };
|
|
1182
|
-
}
|
|
1183
|
-
}
|
|
1184
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1185
|
-
// COPY
|
|
1186
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1187
|
-
/**
|
|
1188
|
-
* Copy a file to a new path (original is kept).
|
|
1189
|
-
*
|
|
1190
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1191
|
-
* @param fromPath Source path
|
|
1192
|
-
* @param toPath Destination path
|
|
1193
|
-
*
|
|
1194
|
-
* @example
|
|
1195
|
-
* await hydrous.storage.copy(
|
|
1196
|
-
* 'ssk_my_bucket_key',
|
|
1197
|
-
* 'templates/invoice.pdf',
|
|
1198
|
-
* 'invoices/invoice-001.pdf'
|
|
1199
|
-
* );
|
|
1200
|
-
*/
|
|
1201
|
-
async copy(bucketKey, fromPath, toPath) {
|
|
1202
|
-
try {
|
|
1203
|
-
const url = storageUrl(this.baseUrl, bucketKey, "copy");
|
|
1204
|
-
const res = await fetch(url, {
|
|
1205
|
-
method: "POST",
|
|
1206
|
-
headers: { ...storageHeaders(bucketKey), "Content-Type": "application/json" },
|
|
1207
|
-
body: JSON.stringify({ from: fromPath, to: toPath })
|
|
1208
|
-
});
|
|
1209
|
-
await parseResponse(res);
|
|
1210
|
-
return { data: void 0, error: null };
|
|
1211
|
-
} catch (err) {
|
|
1212
|
-
return { data: null, error: toHydrousError(err) };
|
|
1213
|
-
}
|
|
1214
|
-
}
|
|
1215
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1216
|
-
// SIGNED URL
|
|
1217
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1218
|
-
/**
|
|
1219
|
-
* Generate a time-limited public URL for a private file.
|
|
1220
|
-
*
|
|
1221
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1222
|
-
* @param filePath Path of the file
|
|
1223
|
-
* @param options `expiresIn` seconds (default: 3600)
|
|
1224
|
-
*
|
|
1225
|
-
* @example
|
|
1226
|
-
* const { data } = await hydrous.storage.signedUrl(
|
|
1227
|
-
* 'ssk_my_bucket_key',
|
|
1228
|
-
* 'private/contract.pdf',
|
|
1229
|
-
* { expiresIn: 300 } // 5 minutes
|
|
1230
|
-
* );
|
|
1231
|
-
* console.log(data.signedUrl); // share this URL
|
|
1232
|
-
*/
|
|
1233
|
-
async signedUrl(bucketKey, filePath, options = {}) {
|
|
1234
|
-
const { expiresIn = 3600 } = options;
|
|
1235
|
-
try {
|
|
1236
|
-
const url = storageUrl(this.baseUrl, bucketKey, "signed-url");
|
|
1237
|
-
const res = await fetch(url, {
|
|
1238
|
-
method: "POST",
|
|
1239
|
-
headers: { ...storageHeaders(bucketKey), "Content-Type": "application/json" },
|
|
1240
|
-
body: JSON.stringify({ path: filePath, expiresInSeconds: expiresIn })
|
|
1241
|
-
});
|
|
1242
|
-
const json = await parseResponse(res);
|
|
1243
|
-
return { data: json, error: null };
|
|
1244
|
-
} catch (err) {
|
|
1245
|
-
return { data: null, error: toHydrousError(err) };
|
|
1246
|
-
}
|
|
1247
|
-
}
|
|
1248
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1249
|
-
// STATS
|
|
1250
|
-
// ══════════════════════════════════════════════════════════════════════════
|
|
1251
|
-
/**
|
|
1252
|
-
* Get usage and billing statistics for this bucket key.
|
|
1253
|
-
*
|
|
1254
|
-
* @param bucketKey Your storage bucket key (`ssk_…`)
|
|
1255
|
-
*
|
|
1256
|
-
* @example
|
|
1257
|
-
* const { data } = await hydrous.storage.stats('ssk_my_bucket_key');
|
|
1258
|
-
* console.log(`${data.totalFiles} files, ${data.totalSizeBytes} bytes stored`);
|
|
1259
|
-
*/
|
|
1260
|
-
async stats(bucketKey) {
|
|
1261
|
-
try {
|
|
1262
|
-
const url = buildUrl(this.baseUrl, `storage/${bucketFromKey(bucketKey)}/stats`);
|
|
1263
|
-
const res = await fetch(url, { headers: storageHeaders(bucketKey) });
|
|
1264
|
-
const json = await parseResponse(res);
|
|
1265
|
-
return { data: json.data, error: null };
|
|
1266
|
-
} catch (err) {
|
|
1267
|
-
return { data: null, error: toHydrousError(err) };
|
|
1268
|
-
}
|
|
1269
|
-
}
|
|
1270
|
-
};
|
|
1271
|
-
|
|
1272
|
-
// src/client.ts
|
|
1273
|
-
var HydrousClient = class {
|
|
1274
|
-
constructor(config) {
|
|
1275
|
-
if (!config.url) throw new Error("[Hydrous] config.url is required");
|
|
1276
|
-
if (!config.apiKey) throw new Error("[Hydrous] config.apiKey is required");
|
|
1277
|
-
this.auth = new AuthClient(config);
|
|
1278
|
-
this.records = new RecordsClient(config);
|
|
1279
|
-
this.analytics = new AnalyticsClient(config);
|
|
1280
|
-
this.storage = new StorageClient(config);
|
|
1281
|
-
}
|
|
1282
|
-
};
|
|
1283
|
-
|
|
1284
|
-
// src/index.ts
|
|
1285
|
-
function createClient(config) {
|
|
1286
|
-
return new HydrousClient(config);
|
|
1287
|
-
}
|
|
1288
|
-
|
|
1289
|
-
export { AnalyticsClient, AuthClient, HydrousClient, HydrousSDKError, RecordsClient, StorageClient, createClient, eq, gt, inArray, isHydrousError, lt, neq };
|
|
1290
|
-
//# sourceMappingURL=index.mjs.map
|
|
1291
|
-
//# sourceMappingURL=index.mjs.map
|