fire2mongo 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +651 -0
- package/dist/index.d.mts +317 -0
- package/dist/index.d.ts +317 -0
- package/dist/index.js +576 -0
- package/dist/index.mjs +518 -0
- package/package.json +42 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,518 @@
|
|
|
1
|
+
// src/connection.ts
|
|
2
|
+
import { MongoClient } from "mongodb";
|
|
3
|
+
var _client = null;
|
|
4
|
+
var _db = null;
|
|
5
|
+
async function initMongoDB(config) {
|
|
6
|
+
if (_db) return _db;
|
|
7
|
+
_client = new MongoClient(config.uri);
|
|
8
|
+
await _client.connect();
|
|
9
|
+
_db = _client.db(config.dbName);
|
|
10
|
+
return _db;
|
|
11
|
+
}
|
|
12
|
+
function getDb() {
|
|
13
|
+
if (!_db) {
|
|
14
|
+
throw new Error(
|
|
15
|
+
"[fire2mongo] MongoDB not initialized. Call initMongoDB() before using any firestore functions."
|
|
16
|
+
);
|
|
17
|
+
}
|
|
18
|
+
return _db;
|
|
19
|
+
}
|
|
20
|
+
async function closeMongoDB() {
|
|
21
|
+
if (_client) {
|
|
22
|
+
await _client.close();
|
|
23
|
+
_client = null;
|
|
24
|
+
_db = null;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
// src/collection-registry.ts
|
|
29
|
+
var _registry = /* @__PURE__ */ new Map();
|
|
30
|
+
function registerCollection(firebaseName, options) {
|
|
31
|
+
_registry.set(firebaseName, options?.mongoCollection ?? firebaseName);
|
|
32
|
+
}
|
|
33
|
+
function registerCollections(map) {
|
|
34
|
+
for (const [firebaseName, mongoName] of Object.entries(map)) {
|
|
35
|
+
_registry.set(firebaseName, mongoName);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
function hasCollection(firebaseName) {
|
|
39
|
+
return _registry.has(firebaseName);
|
|
40
|
+
}
|
|
41
|
+
function getCollection(firebaseName) {
|
|
42
|
+
const mongoName = _registry.get(firebaseName);
|
|
43
|
+
if (!mongoName) {
|
|
44
|
+
const registered = [..._registry.keys()].join(", ") || "(none)";
|
|
45
|
+
throw new Error(
|
|
46
|
+
`[fire2mongo] No collection registered for "${firebaseName}". Registered: ${registered}. Call registerCollection("${firebaseName}") during app startup.`
|
|
47
|
+
);
|
|
48
|
+
}
|
|
49
|
+
return getDb().collection(mongoName);
|
|
50
|
+
}
|
|
51
|
+
function clearRegistry() {
|
|
52
|
+
_registry.clear();
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// src/firestore.ts
|
|
56
|
+
import { ObjectId } from "mongodb";
|
|
57
|
+
|
|
58
|
+
// src/query-builder.ts
|
|
59
|
+
function buildMongoFilter(constraints) {
|
|
60
|
+
const filter = {};
|
|
61
|
+
const $and = [];
|
|
62
|
+
const $or = [];
|
|
63
|
+
for (const c of constraints) {
|
|
64
|
+
if (c.type === "and" && c.queries) {
|
|
65
|
+
$and.push(...c.queries.map((q) => buildMongoFilter([q])));
|
|
66
|
+
} else if (c.type === "or" && c.queries) {
|
|
67
|
+
$or.push(...c.queries.map((q) => buildMongoFilter([q])));
|
|
68
|
+
} else if (c.type === "where" && c.field && c.operator !== void 0) {
|
|
69
|
+
applyWhere(filter, c.field, c.operator, c.value);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
if ($and.length) filter.$and = $and;
|
|
73
|
+
if ($or.length) filter.$or = $or;
|
|
74
|
+
return filter;
|
|
75
|
+
}
|
|
76
|
+
function applyWhere(filter, field, op, value) {
|
|
77
|
+
switch (op) {
|
|
78
|
+
case "==":
|
|
79
|
+
filter[field] = value;
|
|
80
|
+
break;
|
|
81
|
+
case "!=":
|
|
82
|
+
filter[field] = { $ne: value };
|
|
83
|
+
break;
|
|
84
|
+
case "<":
|
|
85
|
+
filter[field] = { $lt: value };
|
|
86
|
+
break;
|
|
87
|
+
case "<=":
|
|
88
|
+
filter[field] = { $lte: value };
|
|
89
|
+
break;
|
|
90
|
+
case ">":
|
|
91
|
+
filter[field] = { $gt: value };
|
|
92
|
+
break;
|
|
93
|
+
case ">=":
|
|
94
|
+
filter[field] = { $gte: value };
|
|
95
|
+
break;
|
|
96
|
+
case "in":
|
|
97
|
+
filter[field] = { $in: value };
|
|
98
|
+
break;
|
|
99
|
+
case "not-in":
|
|
100
|
+
filter[field] = { $nin: value };
|
|
101
|
+
break;
|
|
102
|
+
case "array-contains":
|
|
103
|
+
filter[field] = { $elemMatch: { $eq: value } };
|
|
104
|
+
break;
|
|
105
|
+
case "array-contains-any":
|
|
106
|
+
filter[field] = { $in: value };
|
|
107
|
+
break;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
function buildFindOptions(constraints) {
|
|
111
|
+
const options = {};
|
|
112
|
+
const sort = {};
|
|
113
|
+
for (const c of constraints) {
|
|
114
|
+
if (c.type === "orderBy" && c.field) {
|
|
115
|
+
sort[c.field] = c.direction === "desc" ? -1 : 1;
|
|
116
|
+
} else if (c.type === "limit" && c.count !== void 0) {
|
|
117
|
+
options.limit = c.count;
|
|
118
|
+
} else if (c.type === "offset" && c.count !== void 0) {
|
|
119
|
+
options.skip = c.count;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
if (Object.keys(sort).length) {
|
|
123
|
+
options.sort = sort;
|
|
124
|
+
}
|
|
125
|
+
return options;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
// src/snapshot.ts
|
|
129
|
+
function createDocumentSnapshot(id, raw) {
|
|
130
|
+
return {
|
|
131
|
+
id,
|
|
132
|
+
exists() {
|
|
133
|
+
return raw !== null && raw !== void 0;
|
|
134
|
+
},
|
|
135
|
+
data() {
|
|
136
|
+
if (!raw) return void 0;
|
|
137
|
+
const { _id, ...rest } = raw;
|
|
138
|
+
return { ...rest, id };
|
|
139
|
+
}
|
|
140
|
+
};
|
|
141
|
+
}
|
|
142
|
+
function createQuerySnapshot(raws) {
|
|
143
|
+
const docs = raws.map((raw) => {
|
|
144
|
+
const id = raw._id?.toString() ?? raw.id ?? "";
|
|
145
|
+
const { _id, ...rest } = raw;
|
|
146
|
+
const docData = { ...rest, id };
|
|
147
|
+
return {
|
|
148
|
+
id,
|
|
149
|
+
exists() {
|
|
150
|
+
return true;
|
|
151
|
+
},
|
|
152
|
+
data() {
|
|
153
|
+
return docData;
|
|
154
|
+
}
|
|
155
|
+
};
|
|
156
|
+
});
|
|
157
|
+
return {
|
|
158
|
+
docs,
|
|
159
|
+
size: docs.length,
|
|
160
|
+
empty: docs.length === 0,
|
|
161
|
+
forEach(callback) {
|
|
162
|
+
docs.forEach(callback);
|
|
163
|
+
}
|
|
164
|
+
};
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// src/field-value.ts
|
|
168
|
+
var FieldValue = class {
|
|
169
|
+
constructor(type, elements) {
|
|
170
|
+
this.type = type;
|
|
171
|
+
this.elements = elements;
|
|
172
|
+
}
|
|
173
|
+
};
|
|
174
|
+
function arrayUnion(...elements) {
|
|
175
|
+
return new FieldValue("arrayUnion", elements);
|
|
176
|
+
}
|
|
177
|
+
function arrayRemove(...elements) {
|
|
178
|
+
return new FieldValue("arrayRemove", elements);
|
|
179
|
+
}
|
|
180
|
+
function increment(n) {
|
|
181
|
+
return new FieldValue("increment", [n]);
|
|
182
|
+
}
|
|
183
|
+
function buildUpdateOperators(data) {
|
|
184
|
+
const $set = {};
|
|
185
|
+
const $addToSet = {};
|
|
186
|
+
const $pull = {};
|
|
187
|
+
const $inc = {};
|
|
188
|
+
for (const [key, value] of Object.entries(data)) {
|
|
189
|
+
if (value instanceof FieldValue) {
|
|
190
|
+
switch (value.type) {
|
|
191
|
+
case "arrayUnion":
|
|
192
|
+
$addToSet[key] = { $each: value.elements };
|
|
193
|
+
break;
|
|
194
|
+
case "arrayRemove":
|
|
195
|
+
$pull[key] = { $in: value.elements };
|
|
196
|
+
break;
|
|
197
|
+
case "increment":
|
|
198
|
+
$inc[key] = value.elements[0];
|
|
199
|
+
break;
|
|
200
|
+
}
|
|
201
|
+
} else {
|
|
202
|
+
$set[key] = value;
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
const ops = {};
|
|
206
|
+
if (Object.keys($set).length) ops.$set = $set;
|
|
207
|
+
if (Object.keys($addToSet).length) ops.$addToSet = $addToSet;
|
|
208
|
+
if (Object.keys($pull).length) ops.$pull = $pull;
|
|
209
|
+
if (Object.keys($inc).length) ops.$inc = $inc;
|
|
210
|
+
return ops;
|
|
211
|
+
}
|
|
212
|
+
function hasFieldValues(data) {
|
|
213
|
+
return Object.values(data).some((v) => v instanceof FieldValue);
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// src/firestore.ts
|
|
217
|
+
function extractParentRefs(parentPath) {
|
|
218
|
+
if (!parentPath) return {};
|
|
219
|
+
const parts = parentPath.split("/");
|
|
220
|
+
const refs = {};
|
|
221
|
+
for (let i = 0; i + 1 < parts.length; i += 2) {
|
|
222
|
+
refs[`${parts[i]}Id`] = parts[i + 1];
|
|
223
|
+
}
|
|
224
|
+
return refs;
|
|
225
|
+
}
|
|
226
|
+
function resolveMongoName(fullPath) {
|
|
227
|
+
const parts = fullPath.split("/");
|
|
228
|
+
const collectionParts = [];
|
|
229
|
+
for (let i = 0; i < parts.length; i += 2) {
|
|
230
|
+
collectionParts.push(parts[i]);
|
|
231
|
+
}
|
|
232
|
+
return collectionParts.join("_");
|
|
233
|
+
}
|
|
234
|
+
function doc(dbOrCollection, collectionPath, ...pathSegments) {
|
|
235
|
+
if (typeof dbOrCollection === "object" && dbOrCollection.path && !collectionPath) {
|
|
236
|
+
const colRef = dbOrCollection;
|
|
237
|
+
const autoId = new ObjectId().toString();
|
|
238
|
+
return {
|
|
239
|
+
id: autoId,
|
|
240
|
+
path: `${colRef.path}/${autoId}`,
|
|
241
|
+
collection: colRef._mongoCollection,
|
|
242
|
+
_parentPath: colRef._parentPath
|
|
243
|
+
};
|
|
244
|
+
}
|
|
245
|
+
if (!collectionPath) throw new Error("[fire2mongo] doc() requires a collection path.");
|
|
246
|
+
const fullPath = [collectionPath, ...pathSegments].join("/");
|
|
247
|
+
const parts = fullPath.split("/");
|
|
248
|
+
if (parts.length < 2) throw new Error("[fire2mongo] doc() requires at least collection/id.");
|
|
249
|
+
const documentId = parts[parts.length - 1];
|
|
250
|
+
const parentPath = parts.slice(0, -1).join("/");
|
|
251
|
+
const mongoCollection = resolveMongoName(parentPath);
|
|
252
|
+
return {
|
|
253
|
+
id: documentId,
|
|
254
|
+
path: fullPath,
|
|
255
|
+
collection: mongoCollection,
|
|
256
|
+
_parentPath: parts.length > 2 ? parentPath : void 0
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
function collection(_db2, collectionPath, ...pathSegments) {
|
|
260
|
+
const fullPath = [collectionPath, ...pathSegments].join("/");
|
|
261
|
+
const parts = fullPath.split("/");
|
|
262
|
+
if (parts.length % 2 === 0) {
|
|
263
|
+
throw new Error(
|
|
264
|
+
`[fire2mongo] Invalid collection path "${fullPath}". Must have an odd number of segments.`
|
|
265
|
+
);
|
|
266
|
+
}
|
|
267
|
+
const mongoCollection = resolveMongoName(fullPath);
|
|
268
|
+
const parentPath = parts.length > 1 ? parts.slice(0, -1).join("/") : void 0;
|
|
269
|
+
return {
|
|
270
|
+
id: parts[parts.length - 1],
|
|
271
|
+
path: fullPath,
|
|
272
|
+
_mongoCollection: mongoCollection,
|
|
273
|
+
_parentPath: parentPath
|
|
274
|
+
};
|
|
275
|
+
}
|
|
276
|
+
async function getDoc(reference) {
|
|
277
|
+
try {
|
|
278
|
+
const col = getCollection(reference.collection);
|
|
279
|
+
const raw = await col.findOne({ _id: new ObjectId(reference.id) }) ?? await col.findOne({ id: reference.id });
|
|
280
|
+
return createDocumentSnapshot(reference.id, raw);
|
|
281
|
+
} catch (err) {
|
|
282
|
+
console.error("[fire2mongo] getDoc error:", err);
|
|
283
|
+
return createDocumentSnapshot(reference.id, null);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
async function getDocs(queryOrRef) {
|
|
287
|
+
try {
|
|
288
|
+
let mongoCollection;
|
|
289
|
+
let constraints = [];
|
|
290
|
+
let parentPath;
|
|
291
|
+
if ("_filters" in queryOrRef) {
|
|
292
|
+
mongoCollection = queryOrRef._collectionRef._mongoCollection;
|
|
293
|
+
constraints = queryOrRef._filters;
|
|
294
|
+
parentPath = queryOrRef._collectionRef._parentPath;
|
|
295
|
+
} else {
|
|
296
|
+
mongoCollection = queryOrRef._mongoCollection;
|
|
297
|
+
parentPath = queryOrRef._parentPath;
|
|
298
|
+
}
|
|
299
|
+
const col = getCollection(mongoCollection);
|
|
300
|
+
let filter = buildMongoFilter(constraints);
|
|
301
|
+
const parentRefs = extractParentRefs(parentPath);
|
|
302
|
+
if (Object.keys(parentRefs).length) {
|
|
303
|
+
filter = { ...parentRefs, ...filter };
|
|
304
|
+
}
|
|
305
|
+
const options = buildFindOptions(constraints);
|
|
306
|
+
const raws = await col.find(filter, options).toArray();
|
|
307
|
+
return createQuerySnapshot(raws);
|
|
308
|
+
} catch (err) {
|
|
309
|
+
console.error("[fire2mongo] getDocs error:", err);
|
|
310
|
+
return createQuerySnapshot([]);
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
async function setDoc(reference, data, options) {
|
|
314
|
+
const col = getCollection(reference.collection);
|
|
315
|
+
const parentRefs = extractParentRefs(reference._parentPath);
|
|
316
|
+
const fullData = { ...parentRefs, ...data };
|
|
317
|
+
if (hasFieldValues(fullData)) {
|
|
318
|
+
const ops = buildUpdateOperators(fullData);
|
|
319
|
+
await col.updateOne(
|
|
320
|
+
{ _id: new ObjectId(reference.id) },
|
|
321
|
+
ops,
|
|
322
|
+
{ upsert: true }
|
|
323
|
+
);
|
|
324
|
+
return;
|
|
325
|
+
}
|
|
326
|
+
if (options?.merge) {
|
|
327
|
+
await col.updateOne(
|
|
328
|
+
{ _id: new ObjectId(reference.id) },
|
|
329
|
+
{ $set: fullData },
|
|
330
|
+
{ upsert: true }
|
|
331
|
+
);
|
|
332
|
+
return;
|
|
333
|
+
}
|
|
334
|
+
await col.replaceOne(
|
|
335
|
+
{ _id: new ObjectId(reference.id) },
|
|
336
|
+
{ _id: new ObjectId(reference.id), ...fullData },
|
|
337
|
+
{ upsert: true }
|
|
338
|
+
);
|
|
339
|
+
}
|
|
340
|
+
async function addDoc(reference, data) {
|
|
341
|
+
const col = getCollection(reference._mongoCollection);
|
|
342
|
+
const parentRefs = extractParentRefs(reference._parentPath);
|
|
343
|
+
const fullData = { ...parentRefs, ...data };
|
|
344
|
+
const result = await col.insertOne(fullData);
|
|
345
|
+
const newId = result.insertedId.toString();
|
|
346
|
+
return {
|
|
347
|
+
id: newId,
|
|
348
|
+
path: `${reference.path}/${newId}`,
|
|
349
|
+
collection: reference._mongoCollection,
|
|
350
|
+
_parentPath: reference._parentPath
|
|
351
|
+
};
|
|
352
|
+
}
|
|
353
|
+
async function updateDoc(reference, data) {
|
|
354
|
+
const col = getCollection(reference.collection);
|
|
355
|
+
const ops = hasFieldValues(data) ? buildUpdateOperators(data) : { $set: data };
|
|
356
|
+
await col.updateOne(
|
|
357
|
+
{ _id: new ObjectId(reference.id) },
|
|
358
|
+
ops
|
|
359
|
+
);
|
|
360
|
+
}
|
|
361
|
+
async function deleteDoc(reference) {
|
|
362
|
+
const col = getCollection(reference.collection);
|
|
363
|
+
await col.deleteOne({ _id: new ObjectId(reference.id) });
|
|
364
|
+
}
|
|
365
|
+
function query(collectionRef, ...constraints) {
|
|
366
|
+
return { _collectionRef: collectionRef, _filters: constraints };
|
|
367
|
+
}
|
|
368
|
+
function where(field, op, value) {
|
|
369
|
+
return { type: "where", field, operator: op, value };
|
|
370
|
+
}
|
|
371
|
+
function and(...queries) {
|
|
372
|
+
return { type: "and", queries };
|
|
373
|
+
}
|
|
374
|
+
function or(...queries) {
|
|
375
|
+
return { type: "or", queries };
|
|
376
|
+
}
|
|
377
|
+
function orderBy(field, direction = "asc") {
|
|
378
|
+
return { type: "orderBy", field, direction };
|
|
379
|
+
}
|
|
380
|
+
function limit(count) {
|
|
381
|
+
return { type: "limit", count };
|
|
382
|
+
}
|
|
383
|
+
function offset(count) {
|
|
384
|
+
return { type: "offset", count };
|
|
385
|
+
}
|
|
386
|
+
function writeBatch(_db2) {
|
|
387
|
+
const ops = [];
|
|
388
|
+
const b = {
|
|
389
|
+
set(ref, data, options) {
|
|
390
|
+
ops.push(() => setDoc(ref, data, options));
|
|
391
|
+
return b;
|
|
392
|
+
},
|
|
393
|
+
update(ref, data) {
|
|
394
|
+
ops.push(() => updateDoc(ref, data));
|
|
395
|
+
return b;
|
|
396
|
+
},
|
|
397
|
+
delete(ref) {
|
|
398
|
+
ops.push(() => deleteDoc(ref));
|
|
399
|
+
return b;
|
|
400
|
+
},
|
|
401
|
+
async commit() {
|
|
402
|
+
await Promise.all(ops.map((op) => op()));
|
|
403
|
+
}
|
|
404
|
+
};
|
|
405
|
+
return b;
|
|
406
|
+
}
|
|
407
|
+
async function runTransaction(_db2, fn) {
|
|
408
|
+
const pendingWrites = [];
|
|
409
|
+
const tx = {
|
|
410
|
+
async get(ref) {
|
|
411
|
+
return getDoc(ref);
|
|
412
|
+
},
|
|
413
|
+
set(ref, data, options) {
|
|
414
|
+
pendingWrites.push(() => setDoc(ref, data, options));
|
|
415
|
+
return tx;
|
|
416
|
+
},
|
|
417
|
+
update(ref, data) {
|
|
418
|
+
pendingWrites.push(() => updateDoc(ref, data));
|
|
419
|
+
return tx;
|
|
420
|
+
},
|
|
421
|
+
delete(ref) {
|
|
422
|
+
pendingWrites.push(() => deleteDoc(ref));
|
|
423
|
+
return tx;
|
|
424
|
+
}
|
|
425
|
+
};
|
|
426
|
+
const result = await fn(tx);
|
|
427
|
+
await Promise.all(pendingWrites.map((op) => op()));
|
|
428
|
+
return result;
|
|
429
|
+
}
|
|
430
|
+
function getFirestore() {
|
|
431
|
+
return {};
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
// src/timestamp.ts
|
|
435
|
+
var Timestamp = class _Timestamp {
|
|
436
|
+
constructor(_seconds, _nanoseconds) {
|
|
437
|
+
this._seconds = _seconds;
|
|
438
|
+
this._nanoseconds = _nanoseconds;
|
|
439
|
+
}
|
|
440
|
+
get seconds() {
|
|
441
|
+
return this._seconds;
|
|
442
|
+
}
|
|
443
|
+
get nanoseconds() {
|
|
444
|
+
return this._nanoseconds;
|
|
445
|
+
}
|
|
446
|
+
/** Current time */
|
|
447
|
+
static now() {
|
|
448
|
+
const ms = Date.now();
|
|
449
|
+
return new _Timestamp(Math.floor(ms / 1e3), ms % 1e3 * 1e6);
|
|
450
|
+
}
|
|
451
|
+
/** From a JavaScript Date */
|
|
452
|
+
static fromDate(date) {
|
|
453
|
+
const ms = date.getTime();
|
|
454
|
+
return new _Timestamp(Math.floor(ms / 1e3), ms % 1e3 * 1e6);
|
|
455
|
+
}
|
|
456
|
+
/** From epoch milliseconds */
|
|
457
|
+
static fromMillis(ms) {
|
|
458
|
+
return new _Timestamp(Math.floor(ms / 1e3), ms % 1e3 * 1e6);
|
|
459
|
+
}
|
|
460
|
+
/** Convert to JavaScript Date */
|
|
461
|
+
toDate() {
|
|
462
|
+
return new Date(this._seconds * 1e3 + this._nanoseconds / 1e6);
|
|
463
|
+
}
|
|
464
|
+
/** Convert to epoch milliseconds */
|
|
465
|
+
toMillis() {
|
|
466
|
+
return this._seconds * 1e3 + Math.floor(this._nanoseconds / 1e6);
|
|
467
|
+
}
|
|
468
|
+
isEqual(other) {
|
|
469
|
+
return this._seconds === other._seconds && this._nanoseconds === other._nanoseconds;
|
|
470
|
+
}
|
|
471
|
+
toString() {
|
|
472
|
+
return `Timestamp(seconds=${this._seconds}, nanoseconds=${this._nanoseconds})`;
|
|
473
|
+
}
|
|
474
|
+
/** MongoDB serialization — stored as Date */
|
|
475
|
+
toJSON() {
|
|
476
|
+
return this.toDate();
|
|
477
|
+
}
|
|
478
|
+
valueOf() {
|
|
479
|
+
return this.toDate();
|
|
480
|
+
}
|
|
481
|
+
};
|
|
482
|
+
|
|
483
|
+
// src/index.ts
|
|
484
|
+
var db = {};
|
|
485
|
+
export {
|
|
486
|
+
FieldValue,
|
|
487
|
+
Timestamp,
|
|
488
|
+
addDoc,
|
|
489
|
+
and,
|
|
490
|
+
arrayRemove,
|
|
491
|
+
arrayUnion,
|
|
492
|
+
clearRegistry,
|
|
493
|
+
closeMongoDB,
|
|
494
|
+
collection,
|
|
495
|
+
db,
|
|
496
|
+
deleteDoc,
|
|
497
|
+
doc,
|
|
498
|
+
getCollection,
|
|
499
|
+
getDb,
|
|
500
|
+
getDoc,
|
|
501
|
+
getDocs,
|
|
502
|
+
getFirestore,
|
|
503
|
+
hasCollection,
|
|
504
|
+
increment,
|
|
505
|
+
initMongoDB,
|
|
506
|
+
limit,
|
|
507
|
+
offset,
|
|
508
|
+
or,
|
|
509
|
+
orderBy,
|
|
510
|
+
query,
|
|
511
|
+
registerCollection,
|
|
512
|
+
registerCollections,
|
|
513
|
+
runTransaction,
|
|
514
|
+
setDoc,
|
|
515
|
+
updateDoc,
|
|
516
|
+
where,
|
|
517
|
+
writeBatch
|
|
518
|
+
};
|
package/package.json
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "fire2mongo",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Drop-in TypeScript replacement for firebase/firestore backed by MongoDB",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"module": "dist/index.mjs",
|
|
7
|
+
"types": "dist/index.d.ts",
|
|
8
|
+
"exports": {
|
|
9
|
+
".": {
|
|
10
|
+
"types": "./dist/index.d.ts",
|
|
11
|
+
"import": "./dist/index.mjs",
|
|
12
|
+
"require": "./dist/index.js"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"scripts": {
|
|
19
|
+
"build": "tsup src/index.ts --format cjs,esm --dts --clean",
|
|
20
|
+
"dev": "tsup src/index.ts --format cjs,esm --dts --watch",
|
|
21
|
+
"typecheck": "tsc --noEmit",
|
|
22
|
+
"test": "jest"
|
|
23
|
+
},
|
|
24
|
+
"keywords": [
|
|
25
|
+
"firebase",
|
|
26
|
+
"firestore",
|
|
27
|
+
"mongodb",
|
|
28
|
+
"migration",
|
|
29
|
+
"drop-in"
|
|
30
|
+
],
|
|
31
|
+
"author": "",
|
|
32
|
+
"license": "MIT",
|
|
33
|
+
"peerDependencies": {
|
|
34
|
+
"mongodb": "^6"
|
|
35
|
+
},
|
|
36
|
+
"devDependencies": {
|
|
37
|
+
"@types/node": "^20.0.0",
|
|
38
|
+
"mongodb": "^6.0.0",
|
|
39
|
+
"tsup": "^8.0.0",
|
|
40
|
+
"typescript": "^5.0.0"
|
|
41
|
+
}
|
|
42
|
+
}
|