@knymbus/firestoredb 1.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/FirestoreDB.d.ts +114 -0
- package/dist/FirestoreDB.d.ts.map +1 -0
- package/dist/FirestoreDB.js +407 -0
- package/dist/FirestoreQuery.d.ts +58 -0
- package/dist/FirestoreQuery.d.ts.map +1 -0
- package/dist/FirestoreQuery.js +200 -0
- package/dist/ParallelPipe.d.ts +22 -0
- package/dist/ParallelPipe.d.ts.map +1 -0
- package/dist/ParallelPipe.js +63 -0
- package/dist/cjs/FirestoreDB.d.ts +114 -0
- package/dist/cjs/FirestoreDB.d.ts.map +1 -0
- package/dist/cjs/FirestoreDB.js +407 -0
- package/dist/cjs/FirestoreQuery.d.ts +58 -0
- package/dist/cjs/FirestoreQuery.d.ts.map +1 -0
- package/dist/cjs/FirestoreQuery.js +200 -0
- package/dist/cjs/ParallelPipe.d.ts +22 -0
- package/dist/cjs/ParallelPipe.d.ts.map +1 -0
- package/dist/cjs/ParallelPipe.js +63 -0
- package/dist/cjs/index.d.ts +4 -0
- package/dist/cjs/index.d.ts.map +1 -0
- package/dist/cjs/index.js +19 -0
- package/dist/cjs/types.d.ts +22 -0
- package/dist/cjs/types.d.ts.map +1 -0
- package/dist/cjs/types.js +2 -0
- package/dist/cjs/utils/$Operators.d.ts +4 -0
- package/dist/cjs/utils/$Operators.d.ts.map +1 -0
- package/dist/cjs/utils/$Operators.js +12 -0
- package/dist/cjs/utils/Hasher.d.ts +11 -0
- package/dist/cjs/utils/Hasher.d.ts.map +1 -0
- package/dist/cjs/utils/Hasher.js +28 -0
- package/dist/cjs/utils/LRUCache.d.ts +13 -0
- package/dist/cjs/utils/LRUCache.d.ts.map +1 -0
- package/dist/cjs/utils/LRUCache.js +39 -0
- package/dist/cjs/utils/hydrateDates.d.ts +6 -0
- package/dist/cjs/utils/hydrateDates.d.ts.map +1 -0
- package/dist/cjs/utils/hydrateDates.js +29 -0
- package/dist/cjs/utils/index.d.ts +5 -0
- package/dist/cjs/utils/index.d.ts.map +1 -0
- package/dist/cjs/utils/index.js +20 -0
- package/dist/index.d.ts +4 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +19 -0
- package/dist/types.d.ts +22 -0
- package/dist/types.d.ts.map +1 -0
- package/dist/types.js +2 -0
- package/dist/utils/$Operators.d.ts +4 -0
- package/dist/utils/$Operators.d.ts.map +1 -0
- package/dist/utils/$Operators.js +12 -0
- package/dist/utils/Hasher.d.ts +11 -0
- package/dist/utils/Hasher.d.ts.map +1 -0
- package/dist/utils/Hasher.js +28 -0
- package/dist/utils/LRUCache.d.ts +13 -0
- package/dist/utils/LRUCache.d.ts.map +1 -0
- package/dist/utils/LRUCache.js +39 -0
- package/dist/utils/hydrateDates.d.ts +6 -0
- package/dist/utils/hydrateDates.d.ts.map +1 -0
- package/dist/utils/hydrateDates.js +29 -0
- package/dist/utils/index.d.ts +5 -0
- package/dist/utils/index.d.ts.map +1 -0
- package/dist/utils/index.js +20 -0
- package/package.json +45 -0
|
@@ -0,0 +1,200 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.FirestoreQuery = void 0;
|
|
13
|
+
const firestore_1 = require("firebase/firestore");
|
|
14
|
+
const utils_1 = require("./utils");
|
|
15
|
+
const Hasher_1 = require("./utils/Hasher");
|
|
16
|
+
const utils_2 = require("./utils");
|
|
17
|
+
class FirestoreQuery {
|
|
18
|
+
constructor(db, collectionName, collectionRef, filter, buildConstraints, countDocs, isSoftDeleteEnabled) {
|
|
19
|
+
this.db = db;
|
|
20
|
+
this.collectionName = collectionName;
|
|
21
|
+
this.collectionRef = collectionRef;
|
|
22
|
+
this.filter = filter;
|
|
23
|
+
this.buildConstraints = buildConstraints;
|
|
24
|
+
this.countDocs = countDocs;
|
|
25
|
+
this._useCache = false;
|
|
26
|
+
this._ttl = 60000; // Default 1 minute
|
|
27
|
+
this._deleteMode = 'hide';
|
|
28
|
+
this._limit = 100;
|
|
29
|
+
this._sort = {};
|
|
30
|
+
this._cursorType = null;
|
|
31
|
+
this._deleteMode = isSoftDeleteEnabled ? 'hide' : 'include';
|
|
32
|
+
}
|
|
33
|
+
/** Enables result caching for this query */
|
|
34
|
+
cache(ttlMs) {
|
|
35
|
+
this._useCache = true;
|
|
36
|
+
if (ttlMs)
|
|
37
|
+
this._ttl = ttlMs;
|
|
38
|
+
return this;
|
|
39
|
+
}
|
|
40
|
+
sort(sortObj) {
|
|
41
|
+
this._sort = sortObj;
|
|
42
|
+
return this;
|
|
43
|
+
}
|
|
44
|
+
limit(n) {
|
|
45
|
+
this._limit = n;
|
|
46
|
+
return this;
|
|
47
|
+
}
|
|
48
|
+
/** Move to the next page */
|
|
49
|
+
after(id) {
|
|
50
|
+
this._cursorId = id;
|
|
51
|
+
this._cursorType = 'after';
|
|
52
|
+
return this;
|
|
53
|
+
}
|
|
54
|
+
/** Move to the previous page */
|
|
55
|
+
before(id) {
|
|
56
|
+
this._cursorId = id;
|
|
57
|
+
this._cursorType = 'before';
|
|
58
|
+
return this;
|
|
59
|
+
}
|
|
60
|
+
/**
|
|
61
|
+
* FirestoreDB: db.collection.find({}).withDeleted().execute()
|
|
62
|
+
* Allows viewing soft-deleted documents for this specific query.
|
|
63
|
+
*/
|
|
64
|
+
withDeleted() {
|
|
65
|
+
this._deleteMode = 'include';
|
|
66
|
+
return this;
|
|
67
|
+
}
|
|
68
|
+
onlyDeleted() { this._deleteMode = 'only'; return this; }
|
|
69
|
+
/** Simple execution (Returns Array) */
|
|
70
|
+
execute() {
|
|
71
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
72
|
+
let finalConstraints = yield this._prepareConstraints();
|
|
73
|
+
const q = (0, firestore_1.query)(this.collectionRef, ...finalConstraints);
|
|
74
|
+
const snapshot = yield (0, firestore_1.getDocs)(q);
|
|
75
|
+
return snapshot.docs.map(d => {
|
|
76
|
+
const rawData = Object.assign({ _id: d.id }, (d.data() || {}));
|
|
77
|
+
return (0, utils_1.hydrateDates)(rawData);
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
}
|
|
81
|
+
/** Paginated execution (Returns Data + Metadata) */
|
|
82
|
+
paginate() {
|
|
83
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
84
|
+
// set the global key for this query
|
|
85
|
+
const key = this._generateCacheKey();
|
|
86
|
+
// 1. Check Cache
|
|
87
|
+
if (this._useCache) {
|
|
88
|
+
const cached = FirestoreQuery._globalCache.get(key);
|
|
89
|
+
if (cached && (Date.now() - cached.timestamp < this._ttl)) {
|
|
90
|
+
return cached.data;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
const data = yield this.execute();
|
|
94
|
+
const totalRecords = yield this.countDocs(this.filter);
|
|
95
|
+
const totalPages = this._limit > 0 ? Math.ceil(totalRecords / this._limit) : 1;
|
|
96
|
+
const result = {
|
|
97
|
+
data,
|
|
98
|
+
metadata: {
|
|
99
|
+
totalRecords,
|
|
100
|
+
totalPages,
|
|
101
|
+
hasNext: data.length === this._limit,
|
|
102
|
+
hasPrevious: !!this._cursorId
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
// 2. Fetch & Store
|
|
106
|
+
if (this._useCache) {
|
|
107
|
+
FirestoreQuery._globalCache.set(key, {
|
|
108
|
+
data: result,
|
|
109
|
+
timestamp: Date.now()
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
return result;
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
stream() {
|
|
116
|
+
const fetchDocs = () => __awaiter(this, void 0, void 0, function* () { return this.execute(); });
|
|
117
|
+
return new ReadableStream({
|
|
118
|
+
start(controller) {
|
|
119
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
120
|
+
try {
|
|
121
|
+
const docs = yield fetchDocs();
|
|
122
|
+
for (let d of docs) {
|
|
123
|
+
controller.enqueue(d); // push docs into pipe
|
|
124
|
+
}
|
|
125
|
+
controller.close(); // End of stream
|
|
126
|
+
}
|
|
127
|
+
catch (error) {
|
|
128
|
+
controller.error(error);
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
}
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
// PRIVATE METHODS
|
|
135
|
+
_prepareConstraints() {
|
|
136
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
137
|
+
const constraints = [...this.buildConstraints(this.filter)];
|
|
138
|
+
// If mode is 'include', we simply don't add an isDeleted constraint at all
|
|
139
|
+
if (this._deleteMode === 'hide') {
|
|
140
|
+
constraints.push((0, firestore_1.where)("isDeleted", "==", false));
|
|
141
|
+
}
|
|
142
|
+
else if (this._deleteMode === 'only') {
|
|
143
|
+
constraints.push((0, firestore_1.where)("isDeleted", "==", true));
|
|
144
|
+
}
|
|
145
|
+
// 1. Apply Sorting (Required for cursors)
|
|
146
|
+
Object.entries(this._sort).forEach(([field, dir]) => {
|
|
147
|
+
constraints.push((0, firestore_1.orderBy)(field, dir));
|
|
148
|
+
});
|
|
149
|
+
// 2. Handle Cursors (Validation + Logic)
|
|
150
|
+
if (this._cursorId) {
|
|
151
|
+
const docRef = (0, firestore_1.doc)(this.db, this.collectionName, this._cursorId);
|
|
152
|
+
const snap = yield (0, firestore_1.getDoc)(docRef);
|
|
153
|
+
if (snap.exists()) {
|
|
154
|
+
if (this._cursorType === 'after') {
|
|
155
|
+
constraints.push((0, firestore_1.startAfter)(snap));
|
|
156
|
+
if (this._limit)
|
|
157
|
+
constraints.push((0, firestore_1.limit)(this._limit));
|
|
158
|
+
}
|
|
159
|
+
// If not start then before
|
|
160
|
+
else {
|
|
161
|
+
constraints.push((0, firestore_1.endBefore)(snap));
|
|
162
|
+
// limitToLast is required to get the 10 items *closest* to the cursor
|
|
163
|
+
if (this._limit)
|
|
164
|
+
constraints.push((0, firestore_1.limitToLast)(this._limit));
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
else if (this._limit) {
|
|
169
|
+
constraints.push((0, firestore_1.limit)(this._limit));
|
|
170
|
+
}
|
|
171
|
+
return constraints;
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
_generateCacheKey() {
|
|
175
|
+
const params = {
|
|
176
|
+
filter: this._sortObjectKeys(this.filter), // Sort keys for consistency
|
|
177
|
+
sort: this._sort,
|
|
178
|
+
limit: this._limit,
|
|
179
|
+
cursorId: this._cursorId,
|
|
180
|
+
cursorType: this._cursorType,
|
|
181
|
+
includeDeleted: this._deleteMode
|
|
182
|
+
};
|
|
183
|
+
// Use the static Hasher class
|
|
184
|
+
return Hasher_1.Hasher.generateCacheKey(this.collectionName, params);
|
|
185
|
+
}
|
|
186
|
+
/**
|
|
187
|
+
* Helper to ensure {a:1, b:2} and {b:2, a:1} result in the same JSON string
|
|
188
|
+
*/
|
|
189
|
+
_sortObjectKeys(obj) {
|
|
190
|
+
if (obj === null || typeof obj !== 'object' || Array.isArray(obj))
|
|
191
|
+
return obj;
|
|
192
|
+
return Object.keys(obj).sort().reduce((acc, key) => {
|
|
193
|
+
acc[key] = this._sortObjectKeys(obj[key]);
|
|
194
|
+
return acc;
|
|
195
|
+
}, {});
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
exports.FirestoreQuery = FirestoreQuery;
|
|
199
|
+
// CACHE VARS
|
|
200
|
+
FirestoreQuery._globalCache = new utils_2.LRUCache(); // Shared across instances
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* PARALLEL TRANSFORM STREAM
|
|
3
|
+
* @example
|
|
4
|
+
* // 1. Stream data from Firestore
|
|
5
|
+
const source = Products.find({ status: 'pending' }).stream();
|
|
6
|
+
|
|
7
|
+
// 2. Process 50 items in parallel with 3 retries
|
|
8
|
+
const processor = parallelPipe(50, 3, async (doc) => {
|
|
9
|
+
const apiData = await transformNetworkAction(doc); // Your slow O(n) task
|
|
10
|
+
return { ...doc, ...apiData, status: 'processed' };
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
// 3. Write back to Firestore
|
|
14
|
+
const saver = new WritableStream({
|
|
15
|
+
async write(doc) { await Products.updateOne(doc._id, doc); }
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
// 4. Pipe through
|
|
19
|
+
await source.pipeThrough(processor).pipeTo(saver);
|
|
20
|
+
*/
|
|
21
|
+
export declare const ParallelPipe: <TIn, TOut>(concurrency: number, retryCount: number, transformFn: (item: TIn) => Promise<TOut>) => TransformStream<TIn, TOut>;
|
|
22
|
+
//# sourceMappingURL=ParallelPipe.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ParallelPipe.d.ts","sourceRoot":"","sources":["../src/ParallelPipe.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;GAmBG;AACH,eAAO,MAAM,YAAY,GAAI,GAAG,EAAE,IAAI,EAClC,aAAa,MAAM,EACnB,YAAY,MAAM,EAClB,aAAa,CAAC,IAAI,EAAE,GAAG,KAAK,OAAO,CAAC,IAAI,CAAC,+BAsB5C,CAAC"}
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
12
|
+
exports.ParallelPipe = void 0;
|
|
13
|
+
/**
|
|
14
|
+
* PARALLEL TRANSFORM STREAM
|
|
15
|
+
* @example
|
|
16
|
+
* // 1. Stream data from Firestore
|
|
17
|
+
const source = Products.find({ status: 'pending' }).stream();
|
|
18
|
+
|
|
19
|
+
// 2. Process 50 items in parallel with 3 retries
|
|
20
|
+
const processor = parallelPipe(50, 3, async (doc) => {
|
|
21
|
+
const apiData = await transformNetworkAction(doc); // Your slow O(n) task
|
|
22
|
+
return { ...doc, ...apiData, status: 'processed' };
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
// 3. Write back to Firestore
|
|
26
|
+
const saver = new WritableStream({
|
|
27
|
+
async write(doc) { await Products.updateOne(doc._id, doc); }
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
// 4. Pipe through
|
|
31
|
+
await source.pipeThrough(processor).pipeTo(saver);
|
|
32
|
+
*/
|
|
33
|
+
const ParallelPipe = (concurrency, retryCount, transformFn) => {
|
|
34
|
+
const inFlight = new Set();
|
|
35
|
+
const executeWithRetry = (item_1, ...args_1) => __awaiter(void 0, [item_1, ...args_1], void 0, function* (item, attempt = 0) {
|
|
36
|
+
try {
|
|
37
|
+
return yield transformFn(item);
|
|
38
|
+
}
|
|
39
|
+
catch (err) {
|
|
40
|
+
if (attempt < retryCount)
|
|
41
|
+
return yield executeWithRetry(item, attempt + 1);
|
|
42
|
+
throw err;
|
|
43
|
+
}
|
|
44
|
+
});
|
|
45
|
+
return new TransformStream({
|
|
46
|
+
transform(item, controller) {
|
|
47
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
48
|
+
const p = (() => __awaiter(this, void 0, void 0, function* () {
|
|
49
|
+
const result = yield executeWithRetry(item);
|
|
50
|
+
controller.enqueue(result);
|
|
51
|
+
}))();
|
|
52
|
+
inFlight.add(p);
|
|
53
|
+
p.finally(() => inFlight.delete(p));
|
|
54
|
+
if (inFlight.size >= concurrency)
|
|
55
|
+
yield Promise.race(inFlight);
|
|
56
|
+
});
|
|
57
|
+
},
|
|
58
|
+
flush() {
|
|
59
|
+
return __awaiter(this, void 0, void 0, function* () { yield Promise.all(inFlight); });
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
};
|
|
63
|
+
exports.ParallelPipe = ParallelPipe;
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
import { Firestore } from "firebase/firestore";
|
|
2
|
+
import { FirestoreQuery } from './FirestoreQuery';
|
|
3
|
+
import { DocumentInput, QueryFilter, UpdateDocument, WithSystemFields } from './types';
|
|
4
|
+
export interface DBOptions {
|
|
5
|
+
softDelete?: boolean;
|
|
6
|
+
}
|
|
7
|
+
export declare class FirestoreDB<T> {
|
|
8
|
+
private _db;
|
|
9
|
+
private _collectionName;
|
|
10
|
+
private _collectionRef;
|
|
11
|
+
private _isSoftDeleteEnabled;
|
|
12
|
+
/**
|
|
13
|
+
* Initialize with Firebase DB and targeted collection Name
|
|
14
|
+
* @param db: Firebase
|
|
15
|
+
* @param collection string
|
|
16
|
+
*/
|
|
17
|
+
constructor(db: Firestore, collectionName: string, options?: DBOptions);
|
|
18
|
+
/**
|
|
19
|
+
* FIRESTOREDB: findOne({ age: '13' }); or findOne('id_123');
|
|
20
|
+
*/
|
|
21
|
+
findOne: (filter: string | Record<string, any>) => Promise<WithSystemFields<T> | null>;
|
|
22
|
+
/**
|
|
23
|
+
* This will count the number of documents found based on the given filter
|
|
24
|
+
* @param filter Object with the filter key/value pair
|
|
25
|
+
* @returns number
|
|
26
|
+
*/
|
|
27
|
+
countDocuments: (filter?: QueryFilter<T>) => Promise<number>;
|
|
28
|
+
findOneAndUpdate: (filter: string | QueryFilter<T>, updateObject: Partial<T>) => Promise<WithSystemFields<T>>;
|
|
29
|
+
/**
|
|
30
|
+
* NEW: find(query)
|
|
31
|
+
* Basic implementation. For production, you'd expand the 'filter' to handle where clauses.
|
|
32
|
+
*/
|
|
33
|
+
find: (filter?: QueryFilter<T>) => FirestoreQuery<T>;
|
|
34
|
+
/**
|
|
35
|
+
* FIRESTOREDB: insertOne(doc)
|
|
36
|
+
* Optimized: Uses set with merge or create
|
|
37
|
+
*/
|
|
38
|
+
insertOne: (entity: Partial<T & {
|
|
39
|
+
_id?: string;
|
|
40
|
+
}>) => Promise<WithSystemFields<T>>;
|
|
41
|
+
/**
|
|
42
|
+
* FIRESTOREDB: insertMany(docs) - Uses Firestore Batched Writes (Limit 500 per batch)
|
|
43
|
+
*
|
|
44
|
+
*/
|
|
45
|
+
insertMany: (entities: DocumentInput<T>[]) => Promise<string[]>;
|
|
46
|
+
/**
|
|
47
|
+
* FIRESTOREDB: updateOne(filter, update)
|
|
48
|
+
* Use "Upsert" logic when true will create a new document default to false
|
|
49
|
+
*/
|
|
50
|
+
updateOne: (filter: string | Record<string, any>, updateObject: Partial<T>, options?: {
|
|
51
|
+
upsert?: boolean;
|
|
52
|
+
}) => Promise<Record<string, any> | null>;
|
|
53
|
+
/**
|
|
54
|
+
* FIRESTOREDB: updateMany([ { docId: '1', entity: { status: 'A' } }, ... ])
|
|
55
|
+
* Optimized: Chunks updates into batches of 500 to handle large datasets.
|
|
56
|
+
*/
|
|
57
|
+
updateMany: <T_1>(updates: UpdateDocument<T_1>[], options?: {
|
|
58
|
+
upsert?: boolean;
|
|
59
|
+
}) => Promise<{
|
|
60
|
+
_id: string;
|
|
61
|
+
}[]>;
|
|
62
|
+
/**
|
|
63
|
+
* FIRESTOREDB: deleteOne('id_123')
|
|
64
|
+
*/
|
|
65
|
+
deleteOne: (docId: string) => Promise<{
|
|
66
|
+
acknowledged: boolean;
|
|
67
|
+
deletedCount: number;
|
|
68
|
+
}>;
|
|
69
|
+
/**
|
|
70
|
+
* FIRESTOREDB: deleteMany()
|
|
71
|
+
* Uses Batched Writes for speed.
|
|
72
|
+
*/
|
|
73
|
+
deleteMany: (docIds: string[]) => Promise<{
|
|
74
|
+
acknowledged: boolean;
|
|
75
|
+
deletedCount: number;
|
|
76
|
+
}>;
|
|
77
|
+
/**
|
|
78
|
+
* FIRESTOREDB: db.collection.restore(id)
|
|
79
|
+
* Reverses a soft delete by flipping the flag and removing deletedAt.
|
|
80
|
+
*/
|
|
81
|
+
restore(id: string): Promise<{
|
|
82
|
+
acknowledged: boolean;
|
|
83
|
+
restoredCount: number;
|
|
84
|
+
}>;
|
|
85
|
+
/**
|
|
86
|
+
* MONGODB: db.collection.watch(filter)
|
|
87
|
+
* A real-time listener that bypasses the manual cache and
|
|
88
|
+
* pushes updates as they happen in the database.
|
|
89
|
+
*/
|
|
90
|
+
watch<T>(filter: Record<string, any> | undefined, callback: (data: WithSystemFields<T>[]) => void, onError?: (error: any) => void): import("@firebase/firestore").Unsubscribe;
|
|
91
|
+
/**
|
|
92
|
+
* Efficiently checks if a document exists without downloading it. (only metadata).
|
|
93
|
+
* Optimized to only check for the presence of a document
|
|
94
|
+
*/
|
|
95
|
+
exists: (filter: Record<string, any>) => Promise<boolean>;
|
|
96
|
+
/**
|
|
97
|
+
* FIRESTOREDB STYLE: aggregate({ status: 'sold' }, { total: { $sum: 'price' }, avg: { $avg: 'price' } })
|
|
98
|
+
*/
|
|
99
|
+
aggregate: (filter: Record<string, any> | undefined, aggregations: {
|
|
100
|
+
[key: string]: {
|
|
101
|
+
$sum?: string;
|
|
102
|
+
$avg?: string;
|
|
103
|
+
$count?: boolean;
|
|
104
|
+
};
|
|
105
|
+
}) => Promise<import("@firebase/firestore").AggregateSpecData<any>>;
|
|
106
|
+
private _buildConstraints;
|
|
107
|
+
private _flattenFilter;
|
|
108
|
+
/**
|
|
109
|
+
* In FirestoreDB class:
|
|
110
|
+
* Purges all cached queries for THIS collection to ensure data freshness.
|
|
111
|
+
*/
|
|
112
|
+
private _invalidateCache;
|
|
113
|
+
}
|
|
114
|
+
//# sourceMappingURL=FirestoreDB.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"FirestoreDB.d.ts","sourceRoot":"","sources":["../../src/FirestoreDB.ts"],"names":[],"mappings":"AACA,OAAO,EAGH,SAAS,EAgBZ,MAAM,oBAAoB,CAAC;AAC5B,OAAO,EAAE,cAAc,EAAkB,MAAM,kBAAkB,CAAC;AAClE,OAAO,EAAE,aAAa,EAAE,WAAW,EAAE,cAAc,EAAE,gBAAgB,EAAE,MAAM,SAAS,CAAC;AAKvF,MAAM,WAAW,SAAS;IACtB,UAAU,CAAC,EAAE,OAAO,CAAC;CACxB;AAGD,qBAAa,WAAW,CAAC,CAAC;IACtB,OAAO,CAAC,GAAG,CAAY;IACvB,OAAO,CAAC,eAAe,CAAS;IAChC,OAAO,CAAC,cAAc,CAAoC;IAC1D,OAAO,CAAC,oBAAoB,CAAU;IAEtC;;;;OAIG;gBACS,EAAE,EAAE,SAAS,EAAE,cAAc,EAAE,MAAM,EAAE,OAAO,GAAE,SAAc;IAQ1E;;OAEG;IACI,OAAO,GAAU,QAAQ,MAAM,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,KAAG,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,GAAG,IAAI,CAAC,CAkBjG;IAED;;;;OAIG;IACI,cAAc,GAAU,SAAQ,WAAW,CAAC,CAAC,CAAM,KAAG,OAAO,CAAC,MAAM,CAAC,CAM3E;IAEM,gBAAgB,GAAU,QAAQ,MAAM,GAAG,WAAW,CAAC,CAAC,CAAC,EAAE,cAAc,OAAO,CAAC,CAAC,CAAC,KAAG,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAoBxH;IAED;;;OAGG;IACI,IAAI,GAAI,SAAQ,WAAW,CAAC,CAAC,CAAM,uBAUzC;IAED;;;OAGG;IACI,SAAS,GAAU,QAAQ,OAAO,CAAC,CAAC,GAAG;QAAE,GAAG,CAAC,EAAE,MAAM,CAAA;KAAE,CAAC,KAAG,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC,CAsB7F;IAED;;;OAGG;IACI,UAAU,GAAU,UAAU,aAAa,CAAC,CAAC,CAAC,EAAE,KAAG,OAAO,CAAC,MAAM,EAAE,CAAC,CAiD1E;IAED;;;OAGG;IACI,SAAS,GAAU,QAAQ,MAAM,GAAG,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,EAAE,cAAc,OAAO,CAAC,CAAC,CAAC,EAAE,UAAS;QAAE,MAAM,CAAC,EAAE,OAAO,CAAA;KAAO,yCAqC3H;IAED;;;OAGG;IACI,UAAU,GAAU,GAAC,EAAE,SAAS,cAAc,CAAC,GAAC,CAAC,EAAE,EAAE,UAAS;QAAE,MAAM,CAAC,EAAE,OAAO,CAAA;KAAsB,KAAG,OAAO,CAAC;QAAE,GAAG,EAAE,MAAM,CAAA;KAAE,EAAE,CAAC,CAkDzI;IAED;;OAEG;IACI,SAAS,GAAU,OAAO,MAAM,KAAG,OAAO,CAAC;QAAE,YAAY,EAAE,OAAO,CAAC;QAAC,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC,CA0BjG;IAED;;;OAGG;IACI,UAAU,GAAU,QAAQ,MAAM,EAAE,KAAG,OAAO,CAAC;QAAE,YAAY,EAAE,OAAO,CAAC;QAAC,YAAY,EAAE,MAAM,CAAA;KAAE,CAAC,CA+BrG;IAED;;;GAGD;IACc,OAAO,CAAC,EAAE,EAAE,MAAM,GAAG,OAAO,CAAC;QAAE,YAAY,EAAE,OAAO,CAAC;QAAC,aAAa,EAAE,MAAM,CAAA;KAAE,CAAC;IAgB3F;;;;GAID;IACQ,KAAK,CAAC,CAAC,EACV,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,YAAK,EAChC,QAAQ,EAAE,CAAC,IAAI,EAAE,gBAAgB,CAAC,CAAC,CAAC,EAAE,KAAK,IAAI,EAC/C,OAAO,CAAC,EAAE,CAAC,KAAK,EAAE,GAAG,KAAK,IAAI;IAkBlC;;;OAGG;IACI,MAAM,GAAU,QAAQ,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,KAAG,OAAO,CAAC,OAAO,CAAC,CAIpE;IAED;;OAEG;IACI,SAAS,GACZ,QAAQ,MAAM,CAAC,MAAM,EAAE,GAAG,CAAC,YAAK,EAChC,cAAc;QAAE,CAAC,GAAG,EAAE,MAAM,GAAG;YAAE,IAAI,CAAC,EAAE,MAAM,CAAC;YAAC,IAAI,CAAC,EAAE,MAAM,CAAC;YAAC,MAAM,CAAC,EAAE,OAAO,CAAA;SAAE,CAAA;KAAE,mEAgBtF;IAGD,OAAO,CAAC,iBAAiB;IAIzB,OAAO,CAAC,cAAc,CAwBpB;IAEF;;;OAGG;IACH,OAAO,CAAC,gBAAgB;CAa3B"}
|