@typicalday/firegraph 0.12.0 → 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +317 -73
- package/dist/backend-DuvHGgK1.d.cts +1897 -0
- package/dist/backend-DuvHGgK1.d.ts +1897 -0
- package/dist/backend.cjs +222 -3
- package/dist/backend.cjs.map +1 -1
- package/dist/backend.d.cts +25 -5
- package/dist/backend.d.ts +25 -5
- package/dist/backend.js +197 -4
- package/dist/backend.js.map +1 -1
- package/dist/chunk-2DHMNTV6.js +16 -0
- package/dist/chunk-2DHMNTV6.js.map +1 -0
- package/dist/chunk-4MMQ5W74.js +288 -0
- package/dist/chunk-4MMQ5W74.js.map +1 -0
- package/dist/chunk-D4J7Z4FE.js +67 -0
- package/dist/chunk-D4J7Z4FE.js.map +1 -0
- package/dist/chunk-N5HFDWQX.js +23 -0
- package/dist/chunk-N5HFDWQX.js.map +1 -0
- package/dist/chunk-PAD7WFFU.js +573 -0
- package/dist/chunk-PAD7WFFU.js.map +1 -0
- package/dist/{chunk-AWW4MUJ5.js → chunk-TK64DNVK.js} +12 -1
- package/dist/chunk-TK64DNVK.js.map +1 -0
- package/dist/{chunk-HONQY4HF.js → chunk-WRTFC5NG.js} +362 -17
- package/dist/chunk-WRTFC5NG.js.map +1 -0
- package/dist/client-BKi3vk0Q.d.ts +34 -0
- package/dist/client-BrsaXtDV.d.cts +34 -0
- package/dist/cloudflare/index.cjs +930 -3
- package/dist/cloudflare/index.cjs.map +1 -1
- package/dist/cloudflare/index.d.cts +213 -12
- package/dist/cloudflare/index.d.ts +213 -12
- package/dist/cloudflare/index.js +562 -281
- package/dist/cloudflare/index.js.map +1 -1
- package/dist/codegen/index.d.cts +1 -1
- package/dist/codegen/index.d.ts +1 -1
- package/dist/errors-BRc3I_eH.d.cts +73 -0
- package/dist/errors-BRc3I_eH.d.ts +73 -0
- package/dist/firestore-enterprise/index.cjs +3877 -0
- package/dist/firestore-enterprise/index.cjs.map +1 -0
- package/dist/firestore-enterprise/index.d.cts +141 -0
- package/dist/firestore-enterprise/index.d.ts +141 -0
- package/dist/firestore-enterprise/index.js +985 -0
- package/dist/firestore-enterprise/index.js.map +1 -0
- package/dist/firestore-standard/index.cjs +3117 -0
- package/dist/firestore-standard/index.cjs.map +1 -0
- package/dist/firestore-standard/index.d.cts +49 -0
- package/dist/firestore-standard/index.d.ts +49 -0
- package/dist/firestore-standard/index.js +283 -0
- package/dist/firestore-standard/index.js.map +1 -0
- package/dist/index.cjs +590 -550
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +9 -37
- package/dist/index.d.ts +9 -37
- package/dist/index.js +178 -555
- package/dist/index.js.map +1 -1
- package/dist/{registry-Fi074zVa.d.ts → registry-Bc7h6WTM.d.cts} +1 -1
- package/dist/{registry-B1qsVL0E.d.cts → registry-C2KUPVZj.d.ts} +1 -1
- package/dist/{scope-path-B1G3YiA7.d.cts → scope-path-CROFZGr9.d.cts} +1 -56
- package/dist/{scope-path-B1G3YiA7.d.ts → scope-path-CROFZGr9.d.ts} +1 -56
- package/dist/sqlite/index.cjs +3631 -0
- package/dist/sqlite/index.cjs.map +1 -0
- package/dist/sqlite/index.d.cts +111 -0
- package/dist/sqlite/index.d.ts +111 -0
- package/dist/sqlite/index.js +1164 -0
- package/dist/sqlite/index.js.map +1 -0
- package/package.json +33 -3
- package/dist/backend-BsR0lnFL.d.ts +0 -200
- package/dist/backend-Ct-fLlkG.d.cts +0 -200
- package/dist/chunk-AWW4MUJ5.js.map +0 -1
- package/dist/chunk-HONQY4HF.js.map +0 -1
- package/dist/types-DxYLy8Ol.d.cts +0 -770
- package/dist/types-DxYLy8Ol.d.ts +0 -770
|
@@ -0,0 +1,573 @@
|
|
|
1
|
+
import {
|
|
2
|
+
NODE_RELATION,
|
|
3
|
+
computeEdgeDocId,
|
|
4
|
+
computeNodeDocId
|
|
5
|
+
} from "./chunk-WRTFC5NG.js";
|
|
6
|
+
import {
|
|
7
|
+
FiregraphError
|
|
8
|
+
} from "./chunk-TK64DNVK.js";
|
|
9
|
+
|
|
10
|
+
// src/bulk.ts
|
|
11
|
+
var MAX_BATCH_SIZE = 500;
|
|
12
|
+
var DEFAULT_MAX_RETRIES = 3;
|
|
13
|
+
var BASE_DELAY_MS = 200;
|
|
14
|
+
function sleep(ms) {
|
|
15
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
16
|
+
}
|
|
17
|
+
function chunk(arr, size) {
|
|
18
|
+
const chunks = [];
|
|
19
|
+
for (let i = 0; i < arr.length; i += size) {
|
|
20
|
+
chunks.push(arr.slice(i, i + size));
|
|
21
|
+
}
|
|
22
|
+
return chunks;
|
|
23
|
+
}
|
|
24
|
+
async function bulkDeleteDocIds(db, collectionPath, docIds, options) {
|
|
25
|
+
if (docIds.length === 0) {
|
|
26
|
+
return { deleted: 0, batches: 0, errors: [] };
|
|
27
|
+
}
|
|
28
|
+
const batchSize = Math.min(options?.batchSize ?? MAX_BATCH_SIZE, MAX_BATCH_SIZE);
|
|
29
|
+
const maxRetries = options?.maxRetries ?? DEFAULT_MAX_RETRIES;
|
|
30
|
+
const onProgress = options?.onProgress;
|
|
31
|
+
const chunks = chunk(docIds, batchSize);
|
|
32
|
+
const errors = [];
|
|
33
|
+
let deleted = 0;
|
|
34
|
+
let completedBatches = 0;
|
|
35
|
+
for (let i = 0; i < chunks.length; i++) {
|
|
36
|
+
const ids = chunks[i];
|
|
37
|
+
let committed = false;
|
|
38
|
+
for (let attempt = 0; attempt <= maxRetries; attempt++) {
|
|
39
|
+
try {
|
|
40
|
+
const batch = db.batch();
|
|
41
|
+
const collectionRef = db.collection(collectionPath);
|
|
42
|
+
for (const id of ids) {
|
|
43
|
+
batch.delete(collectionRef.doc(id));
|
|
44
|
+
}
|
|
45
|
+
await batch.commit();
|
|
46
|
+
committed = true;
|
|
47
|
+
deleted += ids.length;
|
|
48
|
+
break;
|
|
49
|
+
} catch (err) {
|
|
50
|
+
if (attempt < maxRetries) {
|
|
51
|
+
const delay = BASE_DELAY_MS * Math.pow(2, attempt);
|
|
52
|
+
await sleep(delay);
|
|
53
|
+
} else {
|
|
54
|
+
errors.push({
|
|
55
|
+
batchIndex: i,
|
|
56
|
+
error: err instanceof Error ? err : new Error(String(err)),
|
|
57
|
+
operationCount: ids.length
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
if (committed) {
|
|
63
|
+
completedBatches++;
|
|
64
|
+
}
|
|
65
|
+
if (onProgress) {
|
|
66
|
+
onProgress({
|
|
67
|
+
completedBatches,
|
|
68
|
+
totalBatches: chunks.length,
|
|
69
|
+
deletedSoFar: deleted
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
return { deleted, batches: completedBatches, errors };
|
|
74
|
+
}
|
|
75
|
+
async function bulkRemoveEdges(db, collectionPath, reader, params, options) {
|
|
76
|
+
const effectiveParams = params.limit !== void 0 ? { ...params, allowCollectionScan: params.allowCollectionScan ?? true } : { ...params, limit: 0, allowCollectionScan: params.allowCollectionScan ?? true };
|
|
77
|
+
const edges = await reader.findEdges(effectiveParams);
|
|
78
|
+
const docIds = edges.map((e) => computeEdgeDocId(e.aUid, e.axbType, e.bUid));
|
|
79
|
+
return bulkDeleteDocIds(db, collectionPath, docIds, options);
|
|
80
|
+
}
|
|
81
|
+
async function deleteSubcollectionsRecursive(db, collectionPath, docId, options) {
|
|
82
|
+
const docRef = db.collection(collectionPath).doc(docId);
|
|
83
|
+
const subcollections = await docRef.listCollections();
|
|
84
|
+
if (subcollections.length === 0) return { deleted: 0, errors: [] };
|
|
85
|
+
let totalDeleted = 0;
|
|
86
|
+
const allErrors = [];
|
|
87
|
+
const subOptions = options ? { batchSize: options.batchSize, maxRetries: options.maxRetries } : void 0;
|
|
88
|
+
for (const subCollRef of subcollections) {
|
|
89
|
+
const subCollPath = subCollRef.path;
|
|
90
|
+
const snapshot = await subCollRef.select().get();
|
|
91
|
+
const subDocIds = snapshot.docs.map((d) => d.id);
|
|
92
|
+
for (const subDocId of subDocIds) {
|
|
93
|
+
const subResult = await deleteSubcollectionsRecursive(db, subCollPath, subDocId, subOptions);
|
|
94
|
+
totalDeleted += subResult.deleted;
|
|
95
|
+
allErrors.push(...subResult.errors);
|
|
96
|
+
}
|
|
97
|
+
if (subDocIds.length > 0) {
|
|
98
|
+
const result = await bulkDeleteDocIds(db, subCollPath, subDocIds, subOptions);
|
|
99
|
+
totalDeleted += result.deleted;
|
|
100
|
+
allErrors.push(...result.errors);
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
return { deleted: totalDeleted, errors: allErrors };
|
|
104
|
+
}
|
|
105
|
+
async function removeNodeCascade(db, collectionPath, reader, uid, options) {
|
|
106
|
+
const [outgoingRaw, incomingRaw] = await Promise.all([
|
|
107
|
+
reader.findEdges({ aUid: uid, allowCollectionScan: true, limit: 0 }),
|
|
108
|
+
reader.findEdges({ bUid: uid, allowCollectionScan: true, limit: 0 })
|
|
109
|
+
]);
|
|
110
|
+
const outgoing = outgoingRaw.filter((e) => e.axbType !== NODE_RELATION);
|
|
111
|
+
const incoming = incomingRaw.filter((e) => e.axbType !== NODE_RELATION);
|
|
112
|
+
const edgeDocIdSet = /* @__PURE__ */ new Set();
|
|
113
|
+
const allEdges = [];
|
|
114
|
+
for (const edge of [...outgoing, ...incoming]) {
|
|
115
|
+
const docId = computeEdgeDocId(edge.aUid, edge.axbType, edge.bUid);
|
|
116
|
+
if (!edgeDocIdSet.has(docId)) {
|
|
117
|
+
edgeDocIdSet.add(docId);
|
|
118
|
+
allEdges.push(edge);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
const shouldDeleteSubcollections = options?.deleteSubcollections !== false;
|
|
122
|
+
const nodeDocId = computeNodeDocId(uid);
|
|
123
|
+
let subcollectionResult = { deleted: 0, errors: [] };
|
|
124
|
+
if (shouldDeleteSubcollections) {
|
|
125
|
+
subcollectionResult = await deleteSubcollectionsRecursive(
|
|
126
|
+
db,
|
|
127
|
+
collectionPath,
|
|
128
|
+
nodeDocId,
|
|
129
|
+
options
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
const edgeDocIds = allEdges.map((e) => computeEdgeDocId(e.aUid, e.axbType, e.bUid));
|
|
133
|
+
const allDocIds = [...edgeDocIds, nodeDocId];
|
|
134
|
+
const batchSize = Math.min(options?.batchSize ?? MAX_BATCH_SIZE, MAX_BATCH_SIZE);
|
|
135
|
+
const result = await bulkDeleteDocIds(db, collectionPath, allDocIds, {
|
|
136
|
+
...options,
|
|
137
|
+
batchSize
|
|
138
|
+
});
|
|
139
|
+
const totalChunks = Math.ceil(allDocIds.length / batchSize);
|
|
140
|
+
const nodeChunkIndex = totalChunks - 1;
|
|
141
|
+
const nodeDeleted = !result.errors.some((e) => e.batchIndex === nodeChunkIndex);
|
|
142
|
+
const topLevelEdgesDeleted = nodeDeleted ? result.deleted - 1 : result.deleted;
|
|
143
|
+
return {
|
|
144
|
+
deleted: result.deleted + subcollectionResult.deleted,
|
|
145
|
+
batches: result.batches,
|
|
146
|
+
errors: [...result.errors, ...subcollectionResult.errors],
|
|
147
|
+
edgesDeleted: topLevelEdgesDeleted,
|
|
148
|
+
nodeDeleted
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// src/internal/firestore-aggregate.ts
|
|
153
|
+
import { AggregateField } from "@google-cloud/firestore";
|
|
154
|
+
function applyFiltersToQuery(base, filters) {
|
|
155
|
+
let q = base;
|
|
156
|
+
for (const f of filters) {
|
|
157
|
+
q = q.where(f.field, f.op, f.value);
|
|
158
|
+
}
|
|
159
|
+
return q;
|
|
160
|
+
}
|
|
161
|
+
async function runFirestoreAggregate(base, spec, filters, { edition }) {
|
|
162
|
+
if (Object.keys(spec).length === 0) {
|
|
163
|
+
throw new FiregraphError(
|
|
164
|
+
"aggregate() requires at least one aggregation in the `aggregates` map.",
|
|
165
|
+
"INVALID_QUERY"
|
|
166
|
+
);
|
|
167
|
+
}
|
|
168
|
+
const filtered = applyFiltersToQuery(base, filters);
|
|
169
|
+
const aggregations = {};
|
|
170
|
+
for (const [alias, { op, field }] of Object.entries(spec)) {
|
|
171
|
+
if (op === "count") {
|
|
172
|
+
if (field !== void 0) {
|
|
173
|
+
throw new FiregraphError(
|
|
174
|
+
`Aggregate '${alias}' op 'count' must not specify a field \u2014 count operates on rows, not a column expression.`,
|
|
175
|
+
"INVALID_QUERY"
|
|
176
|
+
);
|
|
177
|
+
}
|
|
178
|
+
aggregations[alias] = AggregateField.count();
|
|
179
|
+
continue;
|
|
180
|
+
}
|
|
181
|
+
if (!field) {
|
|
182
|
+
throw new FiregraphError(
|
|
183
|
+
`Aggregate '${alias}' op '${op}' requires a field.`,
|
|
184
|
+
"INVALID_QUERY"
|
|
185
|
+
);
|
|
186
|
+
}
|
|
187
|
+
if (op === "sum") {
|
|
188
|
+
aggregations[alias] = AggregateField.sum(field);
|
|
189
|
+
} else if (op === "avg") {
|
|
190
|
+
aggregations[alias] = AggregateField.average(field);
|
|
191
|
+
} else {
|
|
192
|
+
const editionLabel = edition === "enterprise" ? "Firestore Enterprise" : "Firestore Standard";
|
|
193
|
+
throw new FiregraphError(
|
|
194
|
+
`Aggregate op '${op}' is not supported on ${editionLabel}. Both Firestore editions support count/sum/avg via the classic Query API; min/max requires a backend with SQL aggregation (SQLite or DO).`,
|
|
195
|
+
"UNSUPPORTED_AGGREGATE"
|
|
196
|
+
);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
const snap = await filtered.aggregate(aggregations).get();
|
|
200
|
+
const data = snap.data();
|
|
201
|
+
const out = {};
|
|
202
|
+
for (const alias of Object.keys(spec)) {
|
|
203
|
+
const v = data[alias];
|
|
204
|
+
if (v === null || v === void 0) {
|
|
205
|
+
const op = spec[alias].op;
|
|
206
|
+
out[alias] = op === "avg" ? Number.NaN : 0;
|
|
207
|
+
} else {
|
|
208
|
+
out[alias] = v;
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
return out;
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// src/internal/firestore-classic-adapter.ts
|
|
215
|
+
function createFirestoreAdapter(db, collectionPath) {
|
|
216
|
+
const collectionRef = db.collection(collectionPath);
|
|
217
|
+
return {
|
|
218
|
+
collectionPath,
|
|
219
|
+
async getDoc(docId) {
|
|
220
|
+
const snap = await collectionRef.doc(docId).get();
|
|
221
|
+
if (!snap.exists) return null;
|
|
222
|
+
return snap.data();
|
|
223
|
+
},
|
|
224
|
+
async setDoc(docId, data, options) {
|
|
225
|
+
if (options?.merge) {
|
|
226
|
+
await collectionRef.doc(docId).set(data, { merge: true });
|
|
227
|
+
} else {
|
|
228
|
+
await collectionRef.doc(docId).set(data);
|
|
229
|
+
}
|
|
230
|
+
},
|
|
231
|
+
async updateDoc(docId, data) {
|
|
232
|
+
await collectionRef.doc(docId).update(data);
|
|
233
|
+
},
|
|
234
|
+
async deleteDoc(docId) {
|
|
235
|
+
await collectionRef.doc(docId).delete();
|
|
236
|
+
},
|
|
237
|
+
async query(filters, options) {
|
|
238
|
+
let q = collectionRef;
|
|
239
|
+
for (const f of filters) {
|
|
240
|
+
q = q.where(f.field, f.op, f.value);
|
|
241
|
+
}
|
|
242
|
+
if (options?.orderBy) {
|
|
243
|
+
q = q.orderBy(options.orderBy.field, options.orderBy.direction ?? "asc");
|
|
244
|
+
}
|
|
245
|
+
if (options?.limit !== void 0) {
|
|
246
|
+
q = q.limit(options.limit);
|
|
247
|
+
}
|
|
248
|
+
const snap = await q.get();
|
|
249
|
+
return snap.docs.map((doc) => doc.data());
|
|
250
|
+
}
|
|
251
|
+
};
|
|
252
|
+
}
|
|
253
|
+
function createTransactionAdapter(db, collectionPath, tx) {
|
|
254
|
+
const collectionRef = db.collection(collectionPath);
|
|
255
|
+
return {
|
|
256
|
+
async getDoc(docId) {
|
|
257
|
+
const snap = await tx.get(collectionRef.doc(docId));
|
|
258
|
+
if (!snap.exists) return null;
|
|
259
|
+
return snap.data();
|
|
260
|
+
},
|
|
261
|
+
setDoc(docId, data, options) {
|
|
262
|
+
if (options?.merge) {
|
|
263
|
+
tx.set(collectionRef.doc(docId), data, { merge: true });
|
|
264
|
+
} else {
|
|
265
|
+
tx.set(collectionRef.doc(docId), data);
|
|
266
|
+
}
|
|
267
|
+
},
|
|
268
|
+
updateDoc(docId, data) {
|
|
269
|
+
tx.update(collectionRef.doc(docId), data);
|
|
270
|
+
},
|
|
271
|
+
deleteDoc(docId) {
|
|
272
|
+
tx.delete(collectionRef.doc(docId));
|
|
273
|
+
},
|
|
274
|
+
async query(filters, options) {
|
|
275
|
+
let q = collectionRef;
|
|
276
|
+
for (const f of filters) {
|
|
277
|
+
q = q.where(f.field, f.op, f.value);
|
|
278
|
+
}
|
|
279
|
+
if (options?.orderBy) {
|
|
280
|
+
q = q.orderBy(options.orderBy.field, options.orderBy.direction ?? "asc");
|
|
281
|
+
}
|
|
282
|
+
if (options?.limit !== void 0) {
|
|
283
|
+
q = q.limit(options.limit);
|
|
284
|
+
}
|
|
285
|
+
const snap = await tx.get(q);
|
|
286
|
+
return snap.docs.map((doc) => doc.data());
|
|
287
|
+
}
|
|
288
|
+
};
|
|
289
|
+
}
|
|
290
|
+
function createBatchAdapter(db, collectionPath) {
|
|
291
|
+
const collectionRef = db.collection(collectionPath);
|
|
292
|
+
const batch = db.batch();
|
|
293
|
+
return {
|
|
294
|
+
setDoc(docId, data, options) {
|
|
295
|
+
if (options?.merge) {
|
|
296
|
+
batch.set(collectionRef.doc(docId), data, { merge: true });
|
|
297
|
+
} else {
|
|
298
|
+
batch.set(collectionRef.doc(docId), data);
|
|
299
|
+
}
|
|
300
|
+
},
|
|
301
|
+
updateDoc(docId, data) {
|
|
302
|
+
batch.update(collectionRef.doc(docId), data);
|
|
303
|
+
},
|
|
304
|
+
deleteDoc(docId) {
|
|
305
|
+
batch.delete(collectionRef.doc(docId));
|
|
306
|
+
},
|
|
307
|
+
async commit() {
|
|
308
|
+
await batch.commit();
|
|
309
|
+
}
|
|
310
|
+
};
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
// src/internal/firestore-classic-expand.ts
|
|
314
|
+
var FIRESTORE_CLASSIC_IN_CHUNK_SIZE = 30;
|
|
315
|
+
function readField(record, path) {
|
|
316
|
+
if (!path.includes(".")) {
|
|
317
|
+
return record[path];
|
|
318
|
+
}
|
|
319
|
+
let cursor = record;
|
|
320
|
+
for (const seg of path.split(".")) {
|
|
321
|
+
if (cursor === null || typeof cursor !== "object") return void 0;
|
|
322
|
+
cursor = cursor[seg];
|
|
323
|
+
}
|
|
324
|
+
return cursor;
|
|
325
|
+
}
|
|
326
|
+
function compareFieldValues(a, b) {
|
|
327
|
+
if (a === b) return 0;
|
|
328
|
+
if (a === void 0 || a === null) return -1;
|
|
329
|
+
if (b === void 0 || b === null) return 1;
|
|
330
|
+
const ta = typeof a;
|
|
331
|
+
const tb = typeof b;
|
|
332
|
+
if (ta === tb) {
|
|
333
|
+
return a < b ? -1 : 1;
|
|
334
|
+
}
|
|
335
|
+
const sa = String(a);
|
|
336
|
+
const sb = String(b);
|
|
337
|
+
return sa < sb ? -1 : sa > sb ? 1 : 0;
|
|
338
|
+
}
|
|
339
|
+
async function runFirestoreClassicExpand(adapter, params) {
|
|
340
|
+
if (params.sources.length === 0) {
|
|
341
|
+
return params.hydrate ? { edges: [], targets: [] } : { edges: [] };
|
|
342
|
+
}
|
|
343
|
+
if (params.axbType.length === 0) {
|
|
344
|
+
throw new FiregraphError("expand(): axbType must be a non-empty string.", "INVALID_QUERY");
|
|
345
|
+
}
|
|
346
|
+
const direction = params.direction ?? "forward";
|
|
347
|
+
const sourceField = direction === "forward" ? "aUid" : "bUid";
|
|
348
|
+
const chunks = chunkUids(params.sources, FIRESTORE_CLASSIC_IN_CHUNK_SIZE);
|
|
349
|
+
const totalLimit = params.limitPerSource !== void 0 ? params.sources.length * params.limitPerSource : void 0;
|
|
350
|
+
const buildFilters = (chunk2) => {
|
|
351
|
+
const filters = [
|
|
352
|
+
{ field: "axbType", op: "==", value: params.axbType },
|
|
353
|
+
{ field: sourceField, op: "in", value: chunk2 }
|
|
354
|
+
];
|
|
355
|
+
if (params.aType !== void 0) {
|
|
356
|
+
filters.push({ field: "aType", op: "==", value: params.aType });
|
|
357
|
+
}
|
|
358
|
+
if (params.bType !== void 0) {
|
|
359
|
+
filters.push({ field: "bType", op: "==", value: params.bType });
|
|
360
|
+
}
|
|
361
|
+
return filters;
|
|
362
|
+
};
|
|
363
|
+
const buildOptions = (chunk2) => {
|
|
364
|
+
const opts = {};
|
|
365
|
+
if (params.orderBy) opts.orderBy = params.orderBy;
|
|
366
|
+
if (params.limitPerSource !== void 0) {
|
|
367
|
+
opts.limit = chunk2.length * params.limitPerSource;
|
|
368
|
+
}
|
|
369
|
+
return opts;
|
|
370
|
+
};
|
|
371
|
+
const chunkResults = await Promise.all(
|
|
372
|
+
chunks.map((chunk2) => adapter.query(buildFilters(chunk2), buildOptions(chunk2)))
|
|
373
|
+
);
|
|
374
|
+
let edges = chunkResults.flat();
|
|
375
|
+
if (params.axbType === NODE_RELATION) {
|
|
376
|
+
edges = edges.filter((e) => e.aUid !== e.bUid);
|
|
377
|
+
}
|
|
378
|
+
if (params.orderBy) {
|
|
379
|
+
const sortField = params.orderBy.field;
|
|
380
|
+
const dir = params.orderBy.direction ?? "asc";
|
|
381
|
+
edges.sort((a, b) => {
|
|
382
|
+
const cmp = compareFieldValues(readField(a, sortField), readField(b, sortField));
|
|
383
|
+
return dir === "asc" ? cmp : -cmp;
|
|
384
|
+
});
|
|
385
|
+
}
|
|
386
|
+
if (totalLimit !== void 0 && edges.length > totalLimit) {
|
|
387
|
+
edges = edges.slice(0, totalLimit);
|
|
388
|
+
}
|
|
389
|
+
if (!params.hydrate) return { edges };
|
|
390
|
+
const targetUids = edges.map((e) => direction === "forward" ? e.bUid : e.aUid);
|
|
391
|
+
const uniqueTargets = [...new Set(targetUids)];
|
|
392
|
+
if (uniqueTargets.length === 0) {
|
|
393
|
+
return { edges, targets: [] };
|
|
394
|
+
}
|
|
395
|
+
const hydrateChunks = chunkUids(uniqueTargets, FIRESTORE_CLASSIC_IN_CHUNK_SIZE);
|
|
396
|
+
const hydrateResults = await Promise.all(
|
|
397
|
+
hydrateChunks.map(
|
|
398
|
+
(chunk2) => adapter.query([
|
|
399
|
+
{ field: "axbType", op: "==", value: NODE_RELATION },
|
|
400
|
+
{ field: "aUid", op: "in", value: chunk2 }
|
|
401
|
+
])
|
|
402
|
+
)
|
|
403
|
+
);
|
|
404
|
+
const byUid = /* @__PURE__ */ new Map();
|
|
405
|
+
for (const row of hydrateResults.flat()) {
|
|
406
|
+
byUid.set(row.bUid, row);
|
|
407
|
+
}
|
|
408
|
+
const targets = targetUids.map((uid) => byUid.get(uid) ?? null);
|
|
409
|
+
return { edges, targets };
|
|
410
|
+
}
|
|
411
|
+
function chunkUids(uids, chunkSize) {
|
|
412
|
+
if (chunkSize <= 0) {
|
|
413
|
+
throw new FiregraphError(
|
|
414
|
+
`chunkUids: chunkSize must be positive (got ${chunkSize}).`,
|
|
415
|
+
"INVALID_QUERY"
|
|
416
|
+
);
|
|
417
|
+
}
|
|
418
|
+
const out = [];
|
|
419
|
+
for (let i = 0; i < uids.length; i += chunkSize) {
|
|
420
|
+
out.push(uids.slice(i, i + chunkSize));
|
|
421
|
+
}
|
|
422
|
+
return out;
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
// src/internal/projection.ts
|
|
426
|
+
var PROJECTION_BUILTIN_FIELDS = /* @__PURE__ */ new Set([
|
|
427
|
+
"aType",
|
|
428
|
+
"aUid",
|
|
429
|
+
"axbType",
|
|
430
|
+
"bType",
|
|
431
|
+
"bUid",
|
|
432
|
+
"createdAt",
|
|
433
|
+
"updatedAt",
|
|
434
|
+
"v"
|
|
435
|
+
]);
|
|
436
|
+
function normalizeFirestoreProjectionField(field) {
|
|
437
|
+
if (PROJECTION_BUILTIN_FIELDS.has(field)) return field;
|
|
438
|
+
if (field === "data" || field.startsWith("data.")) return field;
|
|
439
|
+
return `data.${field}`;
|
|
440
|
+
}
|
|
441
|
+
function readProjectionPath(obj, path) {
|
|
442
|
+
if (obj === void 0 || obj === null) return null;
|
|
443
|
+
const raw = !path.includes(".") ? obj[path] : (() => {
|
|
444
|
+
const parts = path.split(".");
|
|
445
|
+
let cur = obj;
|
|
446
|
+
for (const part of parts) {
|
|
447
|
+
if (cur === void 0 || cur === null) return void 0;
|
|
448
|
+
if (typeof cur !== "object") return void 0;
|
|
449
|
+
cur = cur[part];
|
|
450
|
+
}
|
|
451
|
+
return cur;
|
|
452
|
+
})();
|
|
453
|
+
return raw === void 0 ? null : raw;
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
// src/internal/firestore-projection.ts
|
|
457
|
+
async function runFirestoreFindEdgesProjected(base, select, filters, options) {
|
|
458
|
+
if (select.length === 0) {
|
|
459
|
+
throw new FiregraphError(
|
|
460
|
+
"findEdgesProjected requires a non-empty select list \u2014 an empty projection has no representation distinct from `findEdges`.",
|
|
461
|
+
"INVALID_QUERY"
|
|
462
|
+
);
|
|
463
|
+
}
|
|
464
|
+
const seen = /* @__PURE__ */ new Set();
|
|
465
|
+
const fields = [];
|
|
466
|
+
for (const f of select) {
|
|
467
|
+
if (!seen.has(f)) {
|
|
468
|
+
seen.add(f);
|
|
469
|
+
fields.push({ original: f, canonical: normalizeFirestoreProjectionField(f) });
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
let q = base;
|
|
473
|
+
for (const f of filters) {
|
|
474
|
+
q = q.where(f.field, f.op, f.value);
|
|
475
|
+
}
|
|
476
|
+
if (options?.orderBy) {
|
|
477
|
+
q = q.orderBy(options.orderBy.field, options.orderBy.direction ?? "asc");
|
|
478
|
+
}
|
|
479
|
+
if (options?.limit !== void 0) {
|
|
480
|
+
q = q.limit(options.limit);
|
|
481
|
+
}
|
|
482
|
+
q = q.select(...fields.map((p) => p.canonical));
|
|
483
|
+
const snap = await q.get();
|
|
484
|
+
return snap.docs.map((doc) => {
|
|
485
|
+
const data = doc.data();
|
|
486
|
+
const out = {};
|
|
487
|
+
for (const { original, canonical } of fields) {
|
|
488
|
+
out[original] = readProjectionPath(data, canonical);
|
|
489
|
+
}
|
|
490
|
+
return out;
|
|
491
|
+
});
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
// src/internal/firestore-vector.ts
|
|
495
|
+
var ENVELOPE_FIELDS = /* @__PURE__ */ new Set([
|
|
496
|
+
"aType",
|
|
497
|
+
"aUid",
|
|
498
|
+
"axbType",
|
|
499
|
+
"bType",
|
|
500
|
+
"bUid",
|
|
501
|
+
"createdAt",
|
|
502
|
+
"updatedAt",
|
|
503
|
+
"v"
|
|
504
|
+
]);
|
|
505
|
+
function normalizeVectorFieldPath(label, field) {
|
|
506
|
+
if (ENVELOPE_FIELDS.has(field)) {
|
|
507
|
+
throw new FiregraphError(
|
|
508
|
+
`findNearest(): ${label} '${field}' is a built-in envelope field \u2014 vectors must live under \`data.*\`. Use a path like 'data.${field}' if you really meant a nested data field.`,
|
|
509
|
+
"INVALID_QUERY"
|
|
510
|
+
);
|
|
511
|
+
}
|
|
512
|
+
if (field === "data" || field.startsWith("data.")) return field;
|
|
513
|
+
return `data.${field}`;
|
|
514
|
+
}
|
|
515
|
+
function buildVectorFilters(params) {
|
|
516
|
+
const filters = [];
|
|
517
|
+
if (params.aType) filters.push({ field: "aType", op: "==", value: params.aType });
|
|
518
|
+
if (params.axbType) filters.push({ field: "axbType", op: "==", value: params.axbType });
|
|
519
|
+
if (params.bType) filters.push({ field: "bType", op: "==", value: params.bType });
|
|
520
|
+
if (params.where) filters.push(...params.where);
|
|
521
|
+
return filters;
|
|
522
|
+
}
|
|
523
|
+
function toNumberArray(qv) {
|
|
524
|
+
if (Array.isArray(qv)) return qv;
|
|
525
|
+
if (typeof qv.toArray === "function") {
|
|
526
|
+
return qv.toArray();
|
|
527
|
+
}
|
|
528
|
+
throw new FiregraphError(
|
|
529
|
+
"findNearest(): queryVector must be a number[] or a Firestore VectorValue.",
|
|
530
|
+
"INVALID_QUERY"
|
|
531
|
+
);
|
|
532
|
+
}
|
|
533
|
+
async function runFirestoreFindNearest(base, params) {
|
|
534
|
+
const vec = toNumberArray(params.queryVector);
|
|
535
|
+
if (vec.length === 0) {
|
|
536
|
+
throw new FiregraphError(
|
|
537
|
+
"findNearest(): queryVector is empty \u2014 at least one dimension is required.",
|
|
538
|
+
"INVALID_QUERY"
|
|
539
|
+
);
|
|
540
|
+
}
|
|
541
|
+
if (!Number.isInteger(params.limit) || params.limit <= 0 || params.limit > 1e3) {
|
|
542
|
+
throw new FiregraphError(
|
|
543
|
+
`findNearest(): limit must be a positive integer \u2264 1000 (got ${params.limit}).`,
|
|
544
|
+
"INVALID_QUERY"
|
|
545
|
+
);
|
|
546
|
+
}
|
|
547
|
+
const vectorField = normalizeVectorFieldPath("vectorField", params.vectorField);
|
|
548
|
+
const distanceResultField = params.distanceResultField !== void 0 ? normalizeVectorFieldPath("distanceResultField", params.distanceResultField) : void 0;
|
|
549
|
+
const filtered = applyFiltersToQuery(base, buildVectorFilters(params));
|
|
550
|
+
const opts = {
|
|
551
|
+
vectorField,
|
|
552
|
+
queryVector: vec,
|
|
553
|
+
limit: params.limit,
|
|
554
|
+
distanceMeasure: params.distanceMeasure
|
|
555
|
+
};
|
|
556
|
+
if (params.distanceThreshold !== void 0) opts.distanceThreshold = params.distanceThreshold;
|
|
557
|
+
if (distanceResultField !== void 0) opts.distanceResultField = distanceResultField;
|
|
558
|
+
const snap = await filtered.findNearest(opts).get();
|
|
559
|
+
return snap.docs.map((doc) => doc.data());
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
export {
|
|
563
|
+
bulkRemoveEdges,
|
|
564
|
+
removeNodeCascade,
|
|
565
|
+
runFirestoreAggregate,
|
|
566
|
+
createFirestoreAdapter,
|
|
567
|
+
createTransactionAdapter,
|
|
568
|
+
createBatchAdapter,
|
|
569
|
+
runFirestoreClassicExpand,
|
|
570
|
+
runFirestoreFindEdgesProjected,
|
|
571
|
+
runFirestoreFindNearest
|
|
572
|
+
};
|
|
573
|
+
//# sourceMappingURL=chunk-PAD7WFFU.js.map
|