@prisma-next/adapter-mongo 0.3.0-dev.147 → 0.3.0-dev.162
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/codecs-9xSaT_DN.mjs +85 -0
- package/dist/codecs-9xSaT_DN.mjs.map +1 -0
- package/dist/control.d.mts +74 -2
- package/dist/control.d.mts.map +1 -1
- package/dist/control.mjs +967 -65
- package/dist/control.mjs.map +1 -1
- package/dist/index.d.mts +3 -2
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +46 -22
- package/dist/index.mjs.map +1 -1
- package/package.json +16 -10
- package/src/core/command-executor.ts +89 -0
- package/src/core/contract-to-schema.ts +63 -0
- package/src/core/ddl-formatter.ts +112 -0
- package/src/core/filter-evaluator.ts +84 -0
- package/src/core/introspect-schema.ts +118 -0
- package/src/core/mongo-control-driver.ts +30 -0
- package/src/core/mongo-ops-serializer.ts +275 -0
- package/src/core/mongo-planner.ts +470 -0
- package/src/core/mongo-runner.ts +277 -0
- package/src/exports/control.ts +19 -0
- package/src/lowering.ts +2 -2
- package/src/mongo-adapter.ts +58 -22
- package/src/resolve-value.ts +8 -3
- package/dist/codec-ids-FBmJhfq-.mjs +0 -12
- package/dist/codec-ids-FBmJhfq-.mjs.map +0 -1
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
import type { TargetBoundComponentDescriptor } from '@prisma-next/framework-components/components';
|
|
2
|
+
import type {
|
|
3
|
+
ControlDriverInstance,
|
|
4
|
+
MigrationOperationPolicy,
|
|
5
|
+
MigrationPlan,
|
|
6
|
+
MigrationPlanOperation,
|
|
7
|
+
MigrationRunner,
|
|
8
|
+
MigrationRunnerExecutionChecks,
|
|
9
|
+
MigrationRunnerFailure,
|
|
10
|
+
MigrationRunnerResult,
|
|
11
|
+
} from '@prisma-next/framework-components/control';
|
|
12
|
+
import type {
|
|
13
|
+
MongoMigrationCheck,
|
|
14
|
+
MongoMigrationPlanOperation,
|
|
15
|
+
} from '@prisma-next/mongo-query-ast/control';
|
|
16
|
+
import {
|
|
17
|
+
initMarker,
|
|
18
|
+
readMarker,
|
|
19
|
+
updateMarker,
|
|
20
|
+
writeLedgerEntry,
|
|
21
|
+
} from '@prisma-next/target-mongo/control';
|
|
22
|
+
import { notOk, ok } from '@prisma-next/utils/result';
|
|
23
|
+
import type { Db } from 'mongodb';
|
|
24
|
+
import { MongoCommandExecutor, MongoInspectionExecutor } from './command-executor';
|
|
25
|
+
import { FilterEvaluator } from './filter-evaluator';
|
|
26
|
+
import type { MongoControlDriverInstance } from './mongo-control-driver';
|
|
27
|
+
import { deserializeMongoOps } from './mongo-ops-serializer';
|
|
28
|
+
|
|
29
|
+
function runnerFailure(
|
|
30
|
+
code: string,
|
|
31
|
+
summary: string,
|
|
32
|
+
opts?: { why?: string; meta?: Record<string, unknown> },
|
|
33
|
+
): MigrationRunnerResult {
|
|
34
|
+
return notOk<MigrationRunnerFailure>({
|
|
35
|
+
code,
|
|
36
|
+
summary,
|
|
37
|
+
...opts,
|
|
38
|
+
});
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
function isMongoControlDriverInstance(
|
|
42
|
+
driver: ControlDriverInstance<'mongo', 'mongo'>,
|
|
43
|
+
): driver is MongoControlDriverInstance {
|
|
44
|
+
return 'db' in driver && driver.db != null;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function extractDb(driver: ControlDriverInstance<'mongo', 'mongo'>): Db {
|
|
48
|
+
if (!isMongoControlDriverInstance(driver)) {
|
|
49
|
+
throw new Error(
|
|
50
|
+
'Mongo control driver does not expose a db property. ' +
|
|
51
|
+
'Use mongoControlDriver.create() from `@prisma-next/driver-mongo/control`.',
|
|
52
|
+
);
|
|
53
|
+
}
|
|
54
|
+
return driver.db;
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
export class MongoMigrationRunner implements MigrationRunner<'mongo', 'mongo'> {
|
|
58
|
+
async execute(options: {
|
|
59
|
+
readonly plan: MigrationPlan;
|
|
60
|
+
readonly driver: ControlDriverInstance<'mongo', 'mongo'>;
|
|
61
|
+
readonly destinationContract: unknown;
|
|
62
|
+
readonly policy: MigrationOperationPolicy;
|
|
63
|
+
readonly callbacks?: {
|
|
64
|
+
onOperationStart?(op: MigrationPlanOperation): void;
|
|
65
|
+
onOperationComplete?(op: MigrationPlanOperation): void;
|
|
66
|
+
};
|
|
67
|
+
readonly executionChecks?: MigrationRunnerExecutionChecks;
|
|
68
|
+
readonly frameworkComponents: ReadonlyArray<TargetBoundComponentDescriptor<'mongo', 'mongo'>>;
|
|
69
|
+
}): Promise<MigrationRunnerResult> {
|
|
70
|
+
const db = extractDb(options.driver);
|
|
71
|
+
const operations = deserializeMongoOps(options.plan.operations as readonly unknown[]);
|
|
72
|
+
|
|
73
|
+
const policyCheck = this.enforcePolicyCompatibility(options.policy, operations);
|
|
74
|
+
if (policyCheck) return policyCheck;
|
|
75
|
+
|
|
76
|
+
const existingMarker = await readMarker(db);
|
|
77
|
+
|
|
78
|
+
const markerCheck = this.ensureMarkerCompatibility(existingMarker, options.plan);
|
|
79
|
+
if (markerCheck) return markerCheck;
|
|
80
|
+
|
|
81
|
+
const checks = options.executionChecks;
|
|
82
|
+
const runPrechecks = checks?.prechecks !== false;
|
|
83
|
+
const runPostchecks = checks?.postchecks !== false;
|
|
84
|
+
const runIdempotency = checks?.idempotencyChecks !== false;
|
|
85
|
+
|
|
86
|
+
const commandExecutor = new MongoCommandExecutor(db);
|
|
87
|
+
const inspectionExecutor = new MongoInspectionExecutor(db);
|
|
88
|
+
const filterEvaluator = new FilterEvaluator();
|
|
89
|
+
|
|
90
|
+
let operationsExecuted = 0;
|
|
91
|
+
|
|
92
|
+
for (const operation of operations) {
|
|
93
|
+
options.callbacks?.onOperationStart?.(operation);
|
|
94
|
+
try {
|
|
95
|
+
if (runPostchecks && runIdempotency) {
|
|
96
|
+
const allSatisfied = await this.allChecksSatisfied(
|
|
97
|
+
operation.postcheck,
|
|
98
|
+
inspectionExecutor,
|
|
99
|
+
filterEvaluator,
|
|
100
|
+
);
|
|
101
|
+
if (allSatisfied) continue;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
if (runPrechecks) {
|
|
105
|
+
const precheckResult = await this.evaluateChecks(
|
|
106
|
+
operation.precheck,
|
|
107
|
+
inspectionExecutor,
|
|
108
|
+
filterEvaluator,
|
|
109
|
+
);
|
|
110
|
+
if (!precheckResult) {
|
|
111
|
+
return runnerFailure(
|
|
112
|
+
'PRECHECK_FAILED',
|
|
113
|
+
`Operation ${operation.id} failed during precheck`,
|
|
114
|
+
{ meta: { operationId: operation.id } },
|
|
115
|
+
);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
for (const step of operation.execute) {
|
|
120
|
+
await step.command.accept(commandExecutor);
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (runPostchecks) {
|
|
124
|
+
const postcheckResult = await this.evaluateChecks(
|
|
125
|
+
operation.postcheck,
|
|
126
|
+
inspectionExecutor,
|
|
127
|
+
filterEvaluator,
|
|
128
|
+
);
|
|
129
|
+
if (!postcheckResult) {
|
|
130
|
+
return runnerFailure(
|
|
131
|
+
'POSTCHECK_FAILED',
|
|
132
|
+
`Operation ${operation.id} failed during postcheck`,
|
|
133
|
+
{ meta: { operationId: operation.id } },
|
|
134
|
+
);
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
operationsExecuted += 1;
|
|
139
|
+
} finally {
|
|
140
|
+
options.callbacks?.onOperationComplete?.(operation);
|
|
141
|
+
}
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
const destination = options.plan.destination;
|
|
145
|
+
const contract = options.destinationContract as { profileHash?: string };
|
|
146
|
+
const profileHash = contract.profileHash ?? destination.storageHash;
|
|
147
|
+
|
|
148
|
+
if (
|
|
149
|
+
operationsExecuted === 0 &&
|
|
150
|
+
existingMarker?.storageHash === destination.storageHash &&
|
|
151
|
+
existingMarker.profileHash === profileHash
|
|
152
|
+
) {
|
|
153
|
+
return ok({ operationsPlanned: operations.length, operationsExecuted });
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
if (existingMarker) {
|
|
157
|
+
const updated = await updateMarker(db, existingMarker.storageHash, {
|
|
158
|
+
storageHash: destination.storageHash,
|
|
159
|
+
profileHash,
|
|
160
|
+
});
|
|
161
|
+
if (!updated) {
|
|
162
|
+
return runnerFailure(
|
|
163
|
+
'MARKER_CAS_FAILURE',
|
|
164
|
+
'Marker was modified by another process during migration execution.',
|
|
165
|
+
{
|
|
166
|
+
meta: {
|
|
167
|
+
expectedStorageHash: existingMarker.storageHash,
|
|
168
|
+
destinationStorageHash: destination.storageHash,
|
|
169
|
+
},
|
|
170
|
+
},
|
|
171
|
+
);
|
|
172
|
+
}
|
|
173
|
+
} else {
|
|
174
|
+
await initMarker(db, {
|
|
175
|
+
storageHash: destination.storageHash,
|
|
176
|
+
profileHash,
|
|
177
|
+
});
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
const originHash = existingMarker?.storageHash ?? '';
|
|
181
|
+
await writeLedgerEntry(db, {
|
|
182
|
+
edgeId: `${originHash}->${destination.storageHash}`,
|
|
183
|
+
from: originHash,
|
|
184
|
+
to: destination.storageHash,
|
|
185
|
+
});
|
|
186
|
+
|
|
187
|
+
return ok({ operationsPlanned: operations.length, operationsExecuted });
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
private async evaluateChecks(
|
|
191
|
+
checks: readonly MongoMigrationCheck[],
|
|
192
|
+
inspectionExecutor: MongoInspectionExecutor,
|
|
193
|
+
filterEvaluator: FilterEvaluator,
|
|
194
|
+
): Promise<boolean> {
|
|
195
|
+
for (const check of checks) {
|
|
196
|
+
const documents = await check.source.accept(inspectionExecutor);
|
|
197
|
+
const matchFound = documents.some((doc) =>
|
|
198
|
+
filterEvaluator.evaluate(check.filter, doc as Record<string, unknown>),
|
|
199
|
+
);
|
|
200
|
+
const passed = check.expect === 'exists' ? matchFound : !matchFound;
|
|
201
|
+
if (!passed) return false;
|
|
202
|
+
}
|
|
203
|
+
return true;
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
private async allChecksSatisfied(
|
|
207
|
+
checks: readonly MongoMigrationCheck[],
|
|
208
|
+
inspectionExecutor: MongoInspectionExecutor,
|
|
209
|
+
filterEvaluator: FilterEvaluator,
|
|
210
|
+
): Promise<boolean> {
|
|
211
|
+
if (checks.length === 0) return false;
|
|
212
|
+
return this.evaluateChecks(checks, inspectionExecutor, filterEvaluator);
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
private enforcePolicyCompatibility(
|
|
216
|
+
policy: MigrationOperationPolicy,
|
|
217
|
+
operations: readonly MongoMigrationPlanOperation[],
|
|
218
|
+
): MigrationRunnerResult | undefined {
|
|
219
|
+
const allowedClasses = new Set(policy.allowedOperationClasses);
|
|
220
|
+
for (const operation of operations) {
|
|
221
|
+
if (!allowedClasses.has(operation.operationClass)) {
|
|
222
|
+
return runnerFailure(
|
|
223
|
+
'POLICY_VIOLATION',
|
|
224
|
+
`Operation ${operation.id} has class "${operation.operationClass}" which is not allowed by policy.`,
|
|
225
|
+
{
|
|
226
|
+
why: `Policy only allows: ${[...allowedClasses].join(', ')}.`,
|
|
227
|
+
meta: {
|
|
228
|
+
operationId: operation.id,
|
|
229
|
+
operationClass: operation.operationClass,
|
|
230
|
+
},
|
|
231
|
+
},
|
|
232
|
+
);
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
return undefined;
|
|
236
|
+
}
|
|
237
|
+
|
|
238
|
+
private ensureMarkerCompatibility(
|
|
239
|
+
marker: Awaited<ReturnType<typeof readMarker>>,
|
|
240
|
+
plan: MigrationPlan,
|
|
241
|
+
): MigrationRunnerResult | undefined {
|
|
242
|
+
const origin = plan.origin ?? null;
|
|
243
|
+
if (!origin) {
|
|
244
|
+
if (marker) {
|
|
245
|
+
return runnerFailure(
|
|
246
|
+
'MARKER_ORIGIN_MISMATCH',
|
|
247
|
+
'Database already has a contract marker but the plan has no origin. This would silently overwrite the existing marker.',
|
|
248
|
+
{ meta: { markerStorageHash: marker.storageHash } },
|
|
249
|
+
);
|
|
250
|
+
}
|
|
251
|
+
return undefined;
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
if (!marker) {
|
|
255
|
+
return runnerFailure(
|
|
256
|
+
'MARKER_ORIGIN_MISMATCH',
|
|
257
|
+
`Missing contract marker: expected origin storage hash ${origin.storageHash}.`,
|
|
258
|
+
{ meta: { expectedOriginStorageHash: origin.storageHash } },
|
|
259
|
+
);
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
if (marker.storageHash !== origin.storageHash) {
|
|
263
|
+
return runnerFailure(
|
|
264
|
+
'MARKER_ORIGIN_MISMATCH',
|
|
265
|
+
`Existing contract marker (${marker.storageHash}) does not match plan origin (${origin.storageHash}).`,
|
|
266
|
+
{
|
|
267
|
+
meta: {
|
|
268
|
+
markerStorageHash: marker.storageHash,
|
|
269
|
+
expectedOriginStorageHash: origin.storageHash,
|
|
270
|
+
},
|
|
271
|
+
},
|
|
272
|
+
);
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
return undefined;
|
|
276
|
+
}
|
|
277
|
+
}
|
package/src/exports/control.ts
CHANGED
|
@@ -1,4 +1,23 @@
|
|
|
1
1
|
import type { ControlAdapterDescriptor } from '@prisma-next/framework-components/control';
|
|
2
|
+
|
|
3
|
+
export {
|
|
4
|
+
initMarker,
|
|
5
|
+
readMarker,
|
|
6
|
+
updateMarker,
|
|
7
|
+
writeLedgerEntry,
|
|
8
|
+
} from '@prisma-next/target-mongo/control';
|
|
9
|
+
export { MongoCommandExecutor, MongoInspectionExecutor } from '../core/command-executor';
|
|
10
|
+
export { contractToMongoSchemaIR } from '../core/contract-to-schema';
|
|
11
|
+
export { formatMongoOperations } from '../core/ddl-formatter';
|
|
12
|
+
export { introspectSchema } from '../core/introspect-schema';
|
|
13
|
+
export {
|
|
14
|
+
createMongoControlDriver,
|
|
15
|
+
type MongoControlDriverInstance,
|
|
16
|
+
} from '../core/mongo-control-driver';
|
|
17
|
+
export { deserializeMongoOps, serializeMongoOps } from '../core/mongo-ops-serializer';
|
|
18
|
+
export { MongoMigrationPlanner } from '../core/mongo-planner';
|
|
19
|
+
export { MongoMigrationRunner } from '../core/mongo-runner';
|
|
20
|
+
|
|
2
21
|
import {
|
|
3
22
|
mongoBooleanCodec,
|
|
4
23
|
mongoDateCodec,
|
package/src/lowering.ts
CHANGED
|
@@ -6,8 +6,8 @@ import type {
|
|
|
6
6
|
MongoPipelineStage,
|
|
7
7
|
MongoProjectionValue,
|
|
8
8
|
MongoWindowField,
|
|
9
|
-
} from '@prisma-next/mongo-query-ast';
|
|
10
|
-
import { isExprArray, isRecordArgs } from '@prisma-next/mongo-query-ast';
|
|
9
|
+
} from '@prisma-next/mongo-query-ast/execution';
|
|
10
|
+
import { isExprArray, isRecordArgs } from '@prisma-next/mongo-query-ast/execution';
|
|
11
11
|
import type { Document } from '@prisma-next/mongo-value';
|
|
12
12
|
import { resolveValue } from './resolve-value';
|
|
13
13
|
|
package/src/mongo-adapter.ts
CHANGED
|
@@ -1,9 +1,10 @@
|
|
|
1
|
+
import { createMongoCodecRegistry, type MongoCodecRegistry } from '@prisma-next/mongo-codec';
|
|
1
2
|
import type { MongoAdapter } from '@prisma-next/mongo-lowering';
|
|
2
3
|
import type {
|
|
3
4
|
MongoQueryPlan,
|
|
4
5
|
MongoUpdatePipelineStage,
|
|
5
6
|
MongoUpdateSpec,
|
|
6
|
-
} from '@prisma-next/mongo-query-ast';
|
|
7
|
+
} from '@prisma-next/mongo-query-ast/execution';
|
|
7
8
|
import type { Document, MongoExpr } from '@prisma-next/mongo-value';
|
|
8
9
|
import type { AnyMongoWireCommand } from '@prisma-next/mongo-wire';
|
|
9
10
|
import {
|
|
@@ -20,49 +21,58 @@ import {
|
|
|
20
21
|
import { lowerFilter, lowerPipeline, lowerStage } from './lowering';
|
|
21
22
|
import { resolveValue } from './resolve-value';
|
|
22
23
|
|
|
23
|
-
function resolveDocument(expr: MongoExpr): Document {
|
|
24
|
-
const result: Record<string, unknown> = {};
|
|
25
|
-
for (const [key, val] of Object.entries(expr)) {
|
|
26
|
-
result[key] = resolveValue(val);
|
|
27
|
-
}
|
|
28
|
-
return result;
|
|
29
|
-
}
|
|
30
|
-
|
|
31
24
|
function isUpdatePipeline(
|
|
32
25
|
update: MongoUpdateSpec,
|
|
33
26
|
): update is ReadonlyArray<MongoUpdatePipelineStage> {
|
|
34
27
|
return Array.isArray(update);
|
|
35
28
|
}
|
|
36
29
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
30
|
+
class MongoAdapterImpl implements MongoAdapter {
|
|
31
|
+
readonly #codecs: MongoCodecRegistry | undefined;
|
|
32
|
+
|
|
33
|
+
constructor(codecs?: MongoCodecRegistry) {
|
|
34
|
+
this.#codecs = codecs;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
#resolveDocument(expr: MongoExpr): Document {
|
|
38
|
+
const result: Record<string, unknown> = {};
|
|
39
|
+
for (const [key, val] of Object.entries(expr)) {
|
|
40
|
+
result[key] = resolveValue(val, this.#codecs);
|
|
41
|
+
}
|
|
42
|
+
return result;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
#lowerUpdate(update: MongoUpdateSpec): Document | ReadonlyArray<Document> {
|
|
46
|
+
if (isUpdatePipeline(update)) {
|
|
47
|
+
return update.map((stage) => lowerStage(stage));
|
|
48
|
+
}
|
|
49
|
+
return this.#resolveDocument(update);
|
|
40
50
|
}
|
|
41
|
-
return resolveDocument(update);
|
|
42
|
-
}
|
|
43
51
|
|
|
44
|
-
class MongoAdapterImpl implements MongoAdapter {
|
|
45
52
|
lower(plan: MongoQueryPlan): AnyMongoWireCommand {
|
|
46
53
|
const { command } = plan;
|
|
47
54
|
switch (command.kind) {
|
|
48
55
|
case 'insertOne':
|
|
49
|
-
return new InsertOneWireCommand(
|
|
56
|
+
return new InsertOneWireCommand(
|
|
57
|
+
command.collection,
|
|
58
|
+
this.#resolveDocument(command.document),
|
|
59
|
+
);
|
|
50
60
|
case 'updateOne':
|
|
51
61
|
return new UpdateOneWireCommand(
|
|
52
62
|
command.collection,
|
|
53
63
|
lowerFilter(command.filter),
|
|
54
|
-
lowerUpdate(command.update),
|
|
64
|
+
this.#lowerUpdate(command.update),
|
|
55
65
|
);
|
|
56
66
|
case 'insertMany':
|
|
57
67
|
return new InsertManyWireCommand(
|
|
58
68
|
command.collection,
|
|
59
|
-
command.documents.map((doc) => resolveDocument(doc)),
|
|
69
|
+
command.documents.map((doc) => this.#resolveDocument(doc)),
|
|
60
70
|
);
|
|
61
71
|
case 'updateMany':
|
|
62
72
|
return new UpdateManyWireCommand(
|
|
63
73
|
command.collection,
|
|
64
74
|
lowerFilter(command.filter),
|
|
65
|
-
lowerUpdate(command.update),
|
|
75
|
+
this.#lowerUpdate(command.update),
|
|
66
76
|
);
|
|
67
77
|
case 'deleteOne':
|
|
68
78
|
return new DeleteOneWireCommand(command.collection, lowerFilter(command.filter));
|
|
@@ -72,7 +82,7 @@ class MongoAdapterImpl implements MongoAdapter {
|
|
|
72
82
|
return new FindOneAndUpdateWireCommand(
|
|
73
83
|
command.collection,
|
|
74
84
|
lowerFilter(command.filter),
|
|
75
|
-
lowerUpdate(command.update),
|
|
85
|
+
this.#lowerUpdate(command.update),
|
|
76
86
|
command.upsert,
|
|
77
87
|
);
|
|
78
88
|
case 'findOneAndDelete':
|
|
@@ -111,6 +121,32 @@ class MongoAdapterImpl implements MongoAdapter {
|
|
|
111
121
|
}
|
|
112
122
|
}
|
|
113
123
|
|
|
114
|
-
|
|
115
|
-
|
|
124
|
+
import {
|
|
125
|
+
mongoBooleanCodec,
|
|
126
|
+
mongoDateCodec,
|
|
127
|
+
mongoDoubleCodec,
|
|
128
|
+
mongoInt32Codec,
|
|
129
|
+
mongoObjectIdCodec,
|
|
130
|
+
mongoStringCodec,
|
|
131
|
+
mongoVectorCodec,
|
|
132
|
+
} from './core/codecs';
|
|
133
|
+
|
|
134
|
+
function defaultCodecRegistry(): MongoCodecRegistry {
|
|
135
|
+
const registry = createMongoCodecRegistry();
|
|
136
|
+
for (const codec of [
|
|
137
|
+
mongoObjectIdCodec,
|
|
138
|
+
mongoStringCodec,
|
|
139
|
+
mongoDoubleCodec,
|
|
140
|
+
mongoInt32Codec,
|
|
141
|
+
mongoBooleanCodec,
|
|
142
|
+
mongoDateCodec,
|
|
143
|
+
mongoVectorCodec,
|
|
144
|
+
]) {
|
|
145
|
+
registry.register(codec);
|
|
146
|
+
}
|
|
147
|
+
return registry;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
export function createMongoAdapter(codecs?: MongoCodecRegistry): MongoAdapter {
|
|
151
|
+
return new MongoAdapterImpl(codecs ?? defaultCodecRegistry());
|
|
116
152
|
}
|
package/src/resolve-value.ts
CHANGED
|
@@ -1,8 +1,13 @@
|
|
|
1
|
+
import type { MongoCodecRegistry } from '@prisma-next/mongo-codec';
|
|
1
2
|
import type { MongoValue } from '@prisma-next/mongo-value';
|
|
2
3
|
import { MongoParamRef } from '@prisma-next/mongo-value';
|
|
3
4
|
|
|
4
|
-
export function resolveValue(value: MongoValue): unknown {
|
|
5
|
+
export function resolveValue(value: MongoValue, codecs?: MongoCodecRegistry): unknown {
|
|
5
6
|
if (value instanceof MongoParamRef) {
|
|
7
|
+
if (value.codecId && codecs) {
|
|
8
|
+
const codec = codecs.get(value.codecId);
|
|
9
|
+
if (codec?.encode) return codec.encode(value.value);
|
|
10
|
+
}
|
|
6
11
|
return value.value;
|
|
7
12
|
}
|
|
8
13
|
if (value === null || typeof value !== 'object') {
|
|
@@ -12,11 +17,11 @@ export function resolveValue(value: MongoValue): unknown {
|
|
|
12
17
|
return value;
|
|
13
18
|
}
|
|
14
19
|
if (Array.isArray(value)) {
|
|
15
|
-
return value.map((v) => resolveValue(v));
|
|
20
|
+
return value.map((v) => resolveValue(v, codecs));
|
|
16
21
|
}
|
|
17
22
|
const result: Record<string, unknown> = {};
|
|
18
23
|
for (const [key, val] of Object.entries(value)) {
|
|
19
|
-
result[key] = resolveValue(val);
|
|
24
|
+
result[key] = resolveValue(val, codecs);
|
|
20
25
|
}
|
|
21
26
|
return result;
|
|
22
27
|
}
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
//#region src/core/codec-ids.ts
|
|
2
|
-
const MONGO_OBJECTID_CODEC_ID = "mongo/objectId@1";
|
|
3
|
-
const MONGO_STRING_CODEC_ID = "mongo/string@1";
|
|
4
|
-
const MONGO_DOUBLE_CODEC_ID = "mongo/double@1";
|
|
5
|
-
const MONGO_INT32_CODEC_ID = "mongo/int32@1";
|
|
6
|
-
const MONGO_BOOLEAN_CODEC_ID = "mongo/bool@1";
|
|
7
|
-
const MONGO_DATE_CODEC_ID = "mongo/date@1";
|
|
8
|
-
const MONGO_VECTOR_CODEC_ID = "mongo/vector@1";
|
|
9
|
-
|
|
10
|
-
//#endregion
|
|
11
|
-
export { MONGO_OBJECTID_CODEC_ID as a, MONGO_INT32_CODEC_ID as i, MONGO_DATE_CODEC_ID as n, MONGO_STRING_CODEC_ID as o, MONGO_DOUBLE_CODEC_ID as r, MONGO_VECTOR_CODEC_ID as s, MONGO_BOOLEAN_CODEC_ID as t };
|
|
12
|
-
//# sourceMappingURL=codec-ids-FBmJhfq-.mjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"codec-ids-FBmJhfq-.mjs","names":[],"sources":["../src/core/codec-ids.ts"],"sourcesContent":["export const MONGO_OBJECTID_CODEC_ID = 'mongo/objectId@1' as const;\nexport const MONGO_STRING_CODEC_ID = 'mongo/string@1' as const;\nexport const MONGO_DOUBLE_CODEC_ID = 'mongo/double@1' as const;\nexport const MONGO_INT32_CODEC_ID = 'mongo/int32@1' as const;\nexport const MONGO_BOOLEAN_CODEC_ID = 'mongo/bool@1' as const;\nexport const MONGO_DATE_CODEC_ID = 'mongo/date@1' as const;\nexport const MONGO_VECTOR_CODEC_ID = 'mongo/vector@1' as const;\n"],"mappings":";AAAA,MAAa,0BAA0B;AACvC,MAAa,wBAAwB;AACrC,MAAa,wBAAwB;AACrC,MAAa,uBAAuB;AACpC,MAAa,yBAAyB;AACtC,MAAa,sBAAsB;AACnC,MAAa,wBAAwB"}
|