@prisma-next/target-mongo 0.3.0 → 0.4.0-dev.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -0
- package/dist/control.d.mts.map +1 -1
- package/dist/migration.d.mts +18 -0
- package/dist/migration.d.mts.map +1 -0
- package/dist/migration.mjs +126 -0
- package/dist/migration.mjs.map +1 -0
- package/package.json +8 -6
- package/src/core/migration-factories.ts +193 -0
- package/src/exports/migration.ts +8 -0
package/README.md
CHANGED
|
@@ -7,14 +7,18 @@ MongoDB target pack for Prisma Next.
|
|
|
7
7
|
- **Target pack assembly**: Exports the MongoDB target pack for authoring and family composition
|
|
8
8
|
- **Target metadata**: Defines the stable Mongo target identity (`kind`, `familyId`, `targetId`, `version`, `capabilities`)
|
|
9
9
|
- **Codec type surface**: Exposes the base Mongo codec type map used by authoring-time type composition
|
|
10
|
+
- **Migration operation factories**: Factory functions for MongoDB migration operations
|
|
10
11
|
|
|
11
12
|
## Entrypoints
|
|
12
13
|
|
|
13
14
|
- `./pack`: pure target pack ref used by `@prisma-next/family-mongo` and `@prisma-next/mongo-contract-ts`
|
|
14
15
|
- `./codec-types`: base Mongo codec type map
|
|
16
|
+
- `./migration`: factory functions (the `Migration` base class is in `@prisma-next/family-mongo/migration`)
|
|
15
17
|
|
|
16
18
|
## Usage
|
|
17
19
|
|
|
20
|
+
### Contract definition
|
|
21
|
+
|
|
18
22
|
```typescript
|
|
19
23
|
import mongoFamily from '@prisma-next/family-mongo/pack';
|
|
20
24
|
import { defineContract } from '@prisma-next/mongo-contract-ts/contract-builder';
|
|
@@ -25,3 +29,35 @@ const contract = defineContract({
|
|
|
25
29
|
target: mongoTarget,
|
|
26
30
|
});
|
|
27
31
|
```
|
|
32
|
+
|
|
33
|
+
### Migration authoring
|
|
34
|
+
|
|
35
|
+
```typescript
|
|
36
|
+
import { Migration } from '@prisma-next/family-mongo/migration';
|
|
37
|
+
import { createIndex, createCollection } from '@prisma-next/target-mongo/migration';
|
|
38
|
+
|
|
39
|
+
export default class extends Migration {
|
|
40
|
+
plan() {
|
|
41
|
+
return [
|
|
42
|
+
createCollection("users", {
|
|
43
|
+
validator: { $jsonSchema: { required: ["email"] } },
|
|
44
|
+
validationLevel: "strict",
|
|
45
|
+
}),
|
|
46
|
+
createIndex("users", [{ field: "email", direction: 1 }], { unique: true }),
|
|
47
|
+
]
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
|
|
51
|
+
Migration.run(import.meta.url)
|
|
52
|
+
```
|
|
53
|
+
|
|
54
|
+
Run `tsx migration.ts` to produce `ops.json` and `migration.json` (when `describe()` is implemented). Use `--dry-run` to preview without writing.
|
|
55
|
+
|
|
56
|
+
### Available factories
|
|
57
|
+
|
|
58
|
+
- `createIndex(collection, keys, options?)` — create an index
|
|
59
|
+
- `dropIndex(collection, keys)` — drop an index
|
|
60
|
+
- `createCollection(collection, options?)` — create a collection
|
|
61
|
+
- `dropCollection(collection)` — drop a collection
|
|
62
|
+
- `setValidation(collection, schema, options?)` — set document validation on a collection
|
|
63
|
+
- `validatedCollection(name, schema, indexes)` — create a collection with a JSON Schema validator and indexes
|
package/dist/control.d.mts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"control.d.mts","names":[],"sources":["../src/core/marker-ledger.ts"],"sourcesContent":[],"mappings":";;;;iBA+BsB,UAAA,KAAe,KAAK,QAAQ;iBAgB5B,UAAA,KAChB;EAjBgB,SAAA,WAAU,EAAA,MAAA;EAAK,SAAA,WAAA,EAAA,MAAA;CAAa,CAAA,EAmB/C,OAnB+C,CAAA,IAAA,CAAA;AAAR,iBAiCpB,YAAA,CAjCoB,EAAA,EAkCpC,EAlCoC,EAAA,YAAA,EAAA,MAAA,EAAA,WAAA,EAAA;EAAO,SAAA,WAAA,EAAA,MAAA;EAgB3B,SAAA,WAAU,EAAA,
|
|
1
|
+
{"version":3,"file":"control.d.mts","names":[],"sources":["../src/core/marker-ledger.ts"],"sourcesContent":[],"mappings":";;;;iBA+BsB,UAAA,KAAe,KAAK,QAAQ;iBAgB5B,UAAA,KAChB;EAjBgB,SAAA,WAAU,EAAA,MAAA;EAAK,SAAA,WAAA,EAAA,MAAA;CAAa,CAAA,EAmB/C,OAnB+C,CAAA,IAAA,CAAA;AAAR,iBAiCpB,YAAA,CAjCoB,EAAA,EAkCpC,EAlCoC,EAAA,YAAA,EAAA,MAAA,EAAA,WAAA,EAAA;EAAO,SAAA,WAAA,EAAA,MAAA;EAgB3B,SAAA,WAAU,EAAA,MAC1B;AAgBN,CAAA,CAAA,EAIG,OAJmB,CAAA,OAAY,CAAA;AAqBZ,iBAAA,gBAAA,CAGnB,EAAA,EAFG,EAEI,EAAA,KAAA,EAAA;;;;IAAP"}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { CreateCollectionOptions, CreateIndexOptions, MongoIndexKey, MongoMigrationPlanOperation } from "@prisma-next/mongo-query-ast/control";
|
|
2
|
+
|
|
3
|
+
//#region src/core/migration-factories.d.ts
|
|
4
|
+
declare function createIndex(collection: string, keys: ReadonlyArray<MongoIndexKey>, options?: CreateIndexOptions): MongoMigrationPlanOperation;
|
|
5
|
+
declare function dropIndex(collection: string, keys: ReadonlyArray<MongoIndexKey>): MongoMigrationPlanOperation;
|
|
6
|
+
declare function createCollection(collection: string, options?: CreateCollectionOptions): MongoMigrationPlanOperation;
|
|
7
|
+
declare function dropCollection(collection: string): MongoMigrationPlanOperation;
|
|
8
|
+
declare function setValidation(collection: string, schema: Record<string, unknown>, options?: {
|
|
9
|
+
validationLevel?: 'strict' | 'moderate';
|
|
10
|
+
validationAction?: 'error' | 'warn';
|
|
11
|
+
}): MongoMigrationPlanOperation;
|
|
12
|
+
declare function validatedCollection(name: string, schema: Record<string, unknown>, indexes: ReadonlyArray<{
|
|
13
|
+
keys: MongoIndexKey[];
|
|
14
|
+
unique?: boolean;
|
|
15
|
+
}>): MongoMigrationPlanOperation[];
|
|
16
|
+
//#endregion
|
|
17
|
+
export { createCollection, createIndex, dropCollection, dropIndex, setValidation, validatedCollection };
|
|
18
|
+
//# sourceMappingURL=migration.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migration.d.mts","names":[],"sources":["../src/core/migration-factories.ts"],"sourcesContent":[],"mappings":";;;iBAiCgB,WAAA,2BAER,cAAc,0BACV,qBACT;AAJa,iBA4CA,SAAA,CA5CW,UAAA,EAAA,MAAA,EAAA,IAAA,EA8CnB,aA9CmB,CA8CL,aA9CK,CAAA,CAAA,EA+CxB,2BA/CwB;AAEL,iBA8EN,gBAAA,CA9EM,UAAA,EAAA,MAAA,EAAA,OAAA,CAAA,EAgFV,uBAhFU,CAAA,EAiFnB,2BAjFmB;AAAd,iBAwGQ,cAAA,CAxGR,UAAA,EAAA,MAAA,CAAA,EAwG4C,2BAxG5C;AACI,iBAuHI,aAAA,CAvHJ,UAAA,EAAA,MAAA,EAAA,MAAA,EAyHF,MAzHE,CAAA,MAAA,EAAA,OAAA,CAAA,EAAA,OAyCZ,CAzCY,EAAA;EACT,eAAA,CAAA,EAAA,QAAA,GAAA,UAAA;EAA2B,gBAAA,CAAA,EAAA,OAAA,GAAA,MAAA;AAwC9B,CAAA,CAAA,EAkFG,2BAlFsB;AAEH,iBAoGN,mBAAA,CApGM,IAAA,EAAA,MAAA,EAAA,MAAA,EAsGZ,MAtGY,CAAA,MAAA,EAAA,OAAA,CAAA,EAAA,OAAA,EAuGX,aAvGW,CAAA;EAAd,IAAA,EAuGyB,aAvGzB,EAAA;EACL,MAAA,CAAA,EAAA,OAAA;CAA2B,CAAA,CAAA,EAuG3B,2BAvG2B,EAAA"}
|
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
import { CollModCommand, CreateCollectionCommand, CreateIndexCommand, DropCollectionCommand, DropIndexCommand, ListCollectionsCommand, ListIndexesCommand, MongoAndExpr, MongoFieldFilter, buildIndexOpId, defaultMongoIndexName, keysToKeySpec } from "@prisma-next/mongo-query-ast/control";
|
|
2
|
+
|
|
3
|
+
//#region src/core/migration-factories.ts
|
|
4
|
+
function formatKeys(keys) {
|
|
5
|
+
return keys.map((k) => `${k.field}:${k.direction}`).join(", ");
|
|
6
|
+
}
|
|
7
|
+
function isTextIndex(keys) {
|
|
8
|
+
return keys.some((k) => k.direction === "text");
|
|
9
|
+
}
|
|
10
|
+
function keyFilter(keys) {
|
|
11
|
+
return isTextIndex(keys) ? MongoFieldFilter.eq("key._fts", "text") : MongoFieldFilter.eq("key", keysToKeySpec(keys));
|
|
12
|
+
}
|
|
13
|
+
function createIndex(collection, keys, options) {
|
|
14
|
+
const name = defaultMongoIndexName(keys);
|
|
15
|
+
const filter = keyFilter(keys);
|
|
16
|
+
const fullFilter = options?.unique ? MongoAndExpr.of([filter, MongoFieldFilter.eq("unique", true)]) : filter;
|
|
17
|
+
return {
|
|
18
|
+
id: buildIndexOpId("create", collection, keys),
|
|
19
|
+
label: `Create index on ${collection} (${formatKeys(keys)})`,
|
|
20
|
+
operationClass: "additive",
|
|
21
|
+
precheck: [{
|
|
22
|
+
description: `index does not already exist on ${collection}`,
|
|
23
|
+
source: new ListIndexesCommand(collection),
|
|
24
|
+
filter,
|
|
25
|
+
expect: "notExists"
|
|
26
|
+
}],
|
|
27
|
+
execute: [{
|
|
28
|
+
description: `create index on ${collection}`,
|
|
29
|
+
command: new CreateIndexCommand(collection, keys, {
|
|
30
|
+
...options,
|
|
31
|
+
unique: options?.unique ?? void 0,
|
|
32
|
+
name
|
|
33
|
+
})
|
|
34
|
+
}],
|
|
35
|
+
postcheck: [{
|
|
36
|
+
description: `index exists on ${collection}`,
|
|
37
|
+
source: new ListIndexesCommand(collection),
|
|
38
|
+
filter: fullFilter,
|
|
39
|
+
expect: "exists"
|
|
40
|
+
}]
|
|
41
|
+
};
|
|
42
|
+
}
|
|
43
|
+
function dropIndex(collection, keys) {
|
|
44
|
+
const indexName = defaultMongoIndexName(keys);
|
|
45
|
+
const filter = keyFilter(keys);
|
|
46
|
+
return {
|
|
47
|
+
id: buildIndexOpId("drop", collection, keys),
|
|
48
|
+
label: `Drop index on ${collection} (${formatKeys(keys)})`,
|
|
49
|
+
operationClass: "destructive",
|
|
50
|
+
precheck: [{
|
|
51
|
+
description: `index exists on ${collection}`,
|
|
52
|
+
source: new ListIndexesCommand(collection),
|
|
53
|
+
filter,
|
|
54
|
+
expect: "exists"
|
|
55
|
+
}],
|
|
56
|
+
execute: [{
|
|
57
|
+
description: `drop index on ${collection}`,
|
|
58
|
+
command: new DropIndexCommand(collection, indexName)
|
|
59
|
+
}],
|
|
60
|
+
postcheck: [{
|
|
61
|
+
description: `index no longer exists on ${collection}`,
|
|
62
|
+
source: new ListIndexesCommand(collection),
|
|
63
|
+
filter,
|
|
64
|
+
expect: "notExists"
|
|
65
|
+
}]
|
|
66
|
+
};
|
|
67
|
+
}
|
|
68
|
+
function createCollection(collection, options) {
|
|
69
|
+
return {
|
|
70
|
+
id: `collection.${collection}.create`,
|
|
71
|
+
label: `Create collection ${collection}`,
|
|
72
|
+
operationClass: "additive",
|
|
73
|
+
precheck: [{
|
|
74
|
+
description: `collection ${collection} does not exist`,
|
|
75
|
+
source: new ListCollectionsCommand(),
|
|
76
|
+
filter: MongoFieldFilter.eq("name", collection),
|
|
77
|
+
expect: "notExists"
|
|
78
|
+
}],
|
|
79
|
+
execute: [{
|
|
80
|
+
description: `create collection ${collection}`,
|
|
81
|
+
command: new CreateCollectionCommand(collection, options)
|
|
82
|
+
}],
|
|
83
|
+
postcheck: []
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
function dropCollection(collection) {
|
|
87
|
+
return {
|
|
88
|
+
id: `collection.${collection}.drop`,
|
|
89
|
+
label: `Drop collection ${collection}`,
|
|
90
|
+
operationClass: "destructive",
|
|
91
|
+
precheck: [],
|
|
92
|
+
execute: [{
|
|
93
|
+
description: `drop collection ${collection}`,
|
|
94
|
+
command: new DropCollectionCommand(collection)
|
|
95
|
+
}],
|
|
96
|
+
postcheck: []
|
|
97
|
+
};
|
|
98
|
+
}
|
|
99
|
+
function setValidation(collection, schema, options) {
|
|
100
|
+
return {
|
|
101
|
+
id: `collection.${collection}.setValidation`,
|
|
102
|
+
label: `Set validation on ${collection}`,
|
|
103
|
+
operationClass: "destructive",
|
|
104
|
+
precheck: [],
|
|
105
|
+
execute: [{
|
|
106
|
+
description: `set validation on ${collection}`,
|
|
107
|
+
command: new CollModCommand(collection, {
|
|
108
|
+
validator: { $jsonSchema: schema },
|
|
109
|
+
validationLevel: options?.validationLevel,
|
|
110
|
+
validationAction: options?.validationAction
|
|
111
|
+
})
|
|
112
|
+
}],
|
|
113
|
+
postcheck: []
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
function validatedCollection(name, schema, indexes) {
|
|
117
|
+
return [createCollection(name, {
|
|
118
|
+
validator: { $jsonSchema: schema },
|
|
119
|
+
validationLevel: "strict",
|
|
120
|
+
validationAction: "error"
|
|
121
|
+
}), ...indexes.map((idx) => createIndex(name, idx.keys, { unique: idx.unique }))];
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
//#endregion
|
|
125
|
+
export { createCollection, createIndex, dropCollection, dropIndex, setValidation, validatedCollection };
|
|
126
|
+
//# sourceMappingURL=migration.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"migration.mjs","names":[],"sources":["../src/core/migration-factories.ts"],"sourcesContent":["import type { MongoIndexKey } from '@prisma-next/mongo-query-ast/control';\nimport {\n buildIndexOpId,\n CollModCommand,\n CreateCollectionCommand,\n type CreateCollectionOptions,\n CreateIndexCommand,\n type CreateIndexOptions,\n DropCollectionCommand,\n DropIndexCommand,\n defaultMongoIndexName,\n keysToKeySpec,\n ListCollectionsCommand,\n ListIndexesCommand,\n MongoAndExpr,\n MongoFieldFilter,\n type MongoMigrationPlanOperation,\n} from '@prisma-next/mongo-query-ast/control';\n\nfunction formatKeys(keys: ReadonlyArray<MongoIndexKey>): string {\n return keys.map((k) => `${k.field}:${k.direction}`).join(', ');\n}\n\nfunction isTextIndex(keys: ReadonlyArray<MongoIndexKey>): boolean {\n return keys.some((k) => k.direction === 'text');\n}\n\nfunction keyFilter(keys: ReadonlyArray<MongoIndexKey>) {\n return isTextIndex(keys)\n ? MongoFieldFilter.eq('key._fts', 'text')\n : MongoFieldFilter.eq('key', keysToKeySpec(keys));\n}\n\nexport function createIndex(\n collection: string,\n keys: ReadonlyArray<MongoIndexKey>,\n options?: CreateIndexOptions,\n): MongoMigrationPlanOperation {\n const name = defaultMongoIndexName(keys);\n const filter = keyFilter(keys);\n const fullFilter = options?.unique\n ? MongoAndExpr.of([filter, MongoFieldFilter.eq('unique', true)])\n : filter;\n\n return {\n id: buildIndexOpId('create', collection, keys),\n label: `Create index on ${collection} (${formatKeys(keys)})`,\n operationClass: 'additive',\n precheck: [\n {\n description: `index does not already exist on ${collection}`,\n source: new ListIndexesCommand(collection),\n filter,\n expect: 'notExists',\n },\n ],\n execute: [\n {\n description: `create index on ${collection}`,\n command: new CreateIndexCommand(collection, keys, {\n ...options,\n unique: options?.unique ?? undefined,\n name,\n }),\n },\n ],\n postcheck: [\n {\n description: `index exists on ${collection}`,\n source: new ListIndexesCommand(collection),\n filter: fullFilter,\n expect: 'exists',\n },\n ],\n };\n}\n\nexport function dropIndex(\n collection: string,\n keys: ReadonlyArray<MongoIndexKey>,\n): MongoMigrationPlanOperation {\n const indexName = defaultMongoIndexName(keys);\n const filter = keyFilter(keys);\n\n return {\n id: buildIndexOpId('drop', collection, keys),\n label: `Drop index on ${collection} (${formatKeys(keys)})`,\n operationClass: 'destructive',\n precheck: [\n {\n description: `index exists on ${collection}`,\n source: new ListIndexesCommand(collection),\n filter,\n expect: 'exists',\n },\n ],\n execute: [\n {\n description: `drop index on ${collection}`,\n command: new DropIndexCommand(collection, indexName),\n },\n ],\n postcheck: [\n {\n description: `index no longer exists on ${collection}`,\n source: new ListIndexesCommand(collection),\n filter,\n expect: 'notExists',\n },\n ],\n };\n}\n\nexport function createCollection(\n collection: string,\n options?: CreateCollectionOptions,\n): MongoMigrationPlanOperation {\n return {\n id: `collection.${collection}.create`,\n label: `Create collection ${collection}`,\n operationClass: 'additive',\n precheck: [\n {\n description: `collection ${collection} does not exist`,\n source: new ListCollectionsCommand(),\n filter: MongoFieldFilter.eq('name', collection),\n expect: 'notExists',\n },\n ],\n execute: [\n {\n description: `create collection ${collection}`,\n command: new CreateCollectionCommand(collection, options),\n },\n ],\n postcheck: [],\n };\n}\n\nexport function dropCollection(collection: string): MongoMigrationPlanOperation {\n return {\n id: `collection.${collection}.drop`,\n label: `Drop collection ${collection}`,\n operationClass: 'destructive',\n precheck: [],\n execute: [\n {\n description: `drop collection ${collection}`,\n command: new DropCollectionCommand(collection),\n },\n ],\n postcheck: [],\n };\n}\n\nexport function setValidation(\n collection: string,\n schema: Record<string, unknown>,\n options?: { validationLevel?: 'strict' | 'moderate'; validationAction?: 'error' | 'warn' },\n): MongoMigrationPlanOperation {\n return {\n id: `collection.${collection}.setValidation`,\n label: `Set validation on ${collection}`,\n operationClass: 'destructive',\n precheck: [],\n execute: [\n {\n description: `set validation on ${collection}`,\n command: new CollModCommand(collection, {\n validator: { $jsonSchema: schema },\n validationLevel: options?.validationLevel,\n validationAction: options?.validationAction,\n }),\n },\n ],\n postcheck: [],\n };\n}\n\nexport function validatedCollection(\n name: string,\n schema: Record<string, unknown>,\n indexes: ReadonlyArray<{ keys: MongoIndexKey[]; unique?: boolean }>,\n): MongoMigrationPlanOperation[] {\n return [\n createCollection(name, {\n validator: { $jsonSchema: schema },\n validationLevel: 'strict',\n validationAction: 'error',\n }),\n ...indexes.map((idx) => createIndex(name, idx.keys, { unique: idx.unique })),\n ];\n}\n"],"mappings":";;;AAmBA,SAAS,WAAW,MAA4C;AAC9D,QAAO,KAAK,KAAK,MAAM,GAAG,EAAE,MAAM,GAAG,EAAE,YAAY,CAAC,KAAK,KAAK;;AAGhE,SAAS,YAAY,MAA6C;AAChE,QAAO,KAAK,MAAM,MAAM,EAAE,cAAc,OAAO;;AAGjD,SAAS,UAAU,MAAoC;AACrD,QAAO,YAAY,KAAK,GACpB,iBAAiB,GAAG,YAAY,OAAO,GACvC,iBAAiB,GAAG,OAAO,cAAc,KAAK,CAAC;;AAGrD,SAAgB,YACd,YACA,MACA,SAC6B;CAC7B,MAAM,OAAO,sBAAsB,KAAK;CACxC,MAAM,SAAS,UAAU,KAAK;CAC9B,MAAM,aAAa,SAAS,SACxB,aAAa,GAAG,CAAC,QAAQ,iBAAiB,GAAG,UAAU,KAAK,CAAC,CAAC,GAC9D;AAEJ,QAAO;EACL,IAAI,eAAe,UAAU,YAAY,KAAK;EAC9C,OAAO,mBAAmB,WAAW,IAAI,WAAW,KAAK,CAAC;EAC1D,gBAAgB;EAChB,UAAU,CACR;GACE,aAAa,mCAAmC;GAChD,QAAQ,IAAI,mBAAmB,WAAW;GAC1C;GACA,QAAQ;GACT,CACF;EACD,SAAS,CACP;GACE,aAAa,mBAAmB;GAChC,SAAS,IAAI,mBAAmB,YAAY,MAAM;IAChD,GAAG;IACH,QAAQ,SAAS,UAAU;IAC3B;IACD,CAAC;GACH,CACF;EACD,WAAW,CACT;GACE,aAAa,mBAAmB;GAChC,QAAQ,IAAI,mBAAmB,WAAW;GAC1C,QAAQ;GACR,QAAQ;GACT,CACF;EACF;;AAGH,SAAgB,UACd,YACA,MAC6B;CAC7B,MAAM,YAAY,sBAAsB,KAAK;CAC7C,MAAM,SAAS,UAAU,KAAK;AAE9B,QAAO;EACL,IAAI,eAAe,QAAQ,YAAY,KAAK;EAC5C,OAAO,iBAAiB,WAAW,IAAI,WAAW,KAAK,CAAC;EACxD,gBAAgB;EAChB,UAAU,CACR;GACE,aAAa,mBAAmB;GAChC,QAAQ,IAAI,mBAAmB,WAAW;GAC1C;GACA,QAAQ;GACT,CACF;EACD,SAAS,CACP;GACE,aAAa,iBAAiB;GAC9B,SAAS,IAAI,iBAAiB,YAAY,UAAU;GACrD,CACF;EACD,WAAW,CACT;GACE,aAAa,6BAA6B;GAC1C,QAAQ,IAAI,mBAAmB,WAAW;GAC1C;GACA,QAAQ;GACT,CACF;EACF;;AAGH,SAAgB,iBACd,YACA,SAC6B;AAC7B,QAAO;EACL,IAAI,cAAc,WAAW;EAC7B,OAAO,qBAAqB;EAC5B,gBAAgB;EAChB,UAAU,CACR;GACE,aAAa,cAAc,WAAW;GACtC,QAAQ,IAAI,wBAAwB;GACpC,QAAQ,iBAAiB,GAAG,QAAQ,WAAW;GAC/C,QAAQ;GACT,CACF;EACD,SAAS,CACP;GACE,aAAa,qBAAqB;GAClC,SAAS,IAAI,wBAAwB,YAAY,QAAQ;GAC1D,CACF;EACD,WAAW,EAAE;EACd;;AAGH,SAAgB,eAAe,YAAiD;AAC9E,QAAO;EACL,IAAI,cAAc,WAAW;EAC7B,OAAO,mBAAmB;EAC1B,gBAAgB;EAChB,UAAU,EAAE;EACZ,SAAS,CACP;GACE,aAAa,mBAAmB;GAChC,SAAS,IAAI,sBAAsB,WAAW;GAC/C,CACF;EACD,WAAW,EAAE;EACd;;AAGH,SAAgB,cACd,YACA,QACA,SAC6B;AAC7B,QAAO;EACL,IAAI,cAAc,WAAW;EAC7B,OAAO,qBAAqB;EAC5B,gBAAgB;EAChB,UAAU,EAAE;EACZ,SAAS,CACP;GACE,aAAa,qBAAqB;GAClC,SAAS,IAAI,eAAe,YAAY;IACtC,WAAW,EAAE,aAAa,QAAQ;IAClC,iBAAiB,SAAS;IAC1B,kBAAkB,SAAS;IAC5B,CAAC;GACH,CACF;EACD,WAAW,EAAE;EACd;;AAGH,SAAgB,oBACd,MACA,QACA,SAC+B;AAC/B,QAAO,CACL,iBAAiB,MAAM;EACrB,WAAW,EAAE,aAAa,QAAQ;EAClC,iBAAiB;EACjB,kBAAkB;EACnB,CAAC,EACF,GAAG,QAAQ,KAAK,QAAQ,YAAY,MAAM,IAAI,MAAM,EAAE,QAAQ,IAAI,QAAQ,CAAC,CAAC,CAC7E"}
|
package/package.json
CHANGED
|
@@ -1,23 +1,24 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@prisma-next/target-mongo",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.4.0-dev.1",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"sideEffects": false,
|
|
6
6
|
"description": "MongoDB target pack for Prisma Next",
|
|
7
7
|
"dependencies": {
|
|
8
8
|
"mongodb": "^6.16.0",
|
|
9
|
-
"@prisma-next/contract": "0.
|
|
10
|
-
"@prisma-next/
|
|
11
|
-
"@prisma-next/
|
|
9
|
+
"@prisma-next/contract": "0.4.0-dev.1",
|
|
10
|
+
"@prisma-next/mongo-query-ast": "0.4.0-dev.1",
|
|
11
|
+
"@prisma-next/framework-components": "0.4.0-dev.1"
|
|
12
12
|
},
|
|
13
13
|
"devDependencies": {
|
|
14
14
|
"mongodb-memory-server": "10.4.3",
|
|
15
|
+
"pathe": "^2.0.3",
|
|
15
16
|
"tsdown": "0.18.4",
|
|
16
17
|
"typescript": "5.9.3",
|
|
17
18
|
"vitest": "4.0.17",
|
|
18
19
|
"@prisma-next/test-utils": "0.0.1",
|
|
19
|
-
"@prisma-next/
|
|
20
|
-
"@prisma-next/
|
|
20
|
+
"@prisma-next/tsconfig": "0.0.0",
|
|
21
|
+
"@prisma-next/tsdown": "0.0.0"
|
|
21
22
|
},
|
|
22
23
|
"files": [
|
|
23
24
|
"dist",
|
|
@@ -26,6 +27,7 @@
|
|
|
26
27
|
"exports": {
|
|
27
28
|
"./codec-types": "./dist/codec-types.mjs",
|
|
28
29
|
"./control": "./dist/control.mjs",
|
|
30
|
+
"./migration": "./dist/migration.mjs",
|
|
29
31
|
"./pack": "./dist/pack.mjs",
|
|
30
32
|
"./package.json": "./package.json"
|
|
31
33
|
},
|
|
@@ -0,0 +1,193 @@
|
|
|
1
|
+
import type { MongoIndexKey } from '@prisma-next/mongo-query-ast/control';
|
|
2
|
+
import {
|
|
3
|
+
buildIndexOpId,
|
|
4
|
+
CollModCommand,
|
|
5
|
+
CreateCollectionCommand,
|
|
6
|
+
type CreateCollectionOptions,
|
|
7
|
+
CreateIndexCommand,
|
|
8
|
+
type CreateIndexOptions,
|
|
9
|
+
DropCollectionCommand,
|
|
10
|
+
DropIndexCommand,
|
|
11
|
+
defaultMongoIndexName,
|
|
12
|
+
keysToKeySpec,
|
|
13
|
+
ListCollectionsCommand,
|
|
14
|
+
ListIndexesCommand,
|
|
15
|
+
MongoAndExpr,
|
|
16
|
+
MongoFieldFilter,
|
|
17
|
+
type MongoMigrationPlanOperation,
|
|
18
|
+
} from '@prisma-next/mongo-query-ast/control';
|
|
19
|
+
|
|
20
|
+
function formatKeys(keys: ReadonlyArray<MongoIndexKey>): string {
|
|
21
|
+
return keys.map((k) => `${k.field}:${k.direction}`).join(', ');
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function isTextIndex(keys: ReadonlyArray<MongoIndexKey>): boolean {
|
|
25
|
+
return keys.some((k) => k.direction === 'text');
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function keyFilter(keys: ReadonlyArray<MongoIndexKey>) {
|
|
29
|
+
return isTextIndex(keys)
|
|
30
|
+
? MongoFieldFilter.eq('key._fts', 'text')
|
|
31
|
+
: MongoFieldFilter.eq('key', keysToKeySpec(keys));
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export function createIndex(
|
|
35
|
+
collection: string,
|
|
36
|
+
keys: ReadonlyArray<MongoIndexKey>,
|
|
37
|
+
options?: CreateIndexOptions,
|
|
38
|
+
): MongoMigrationPlanOperation {
|
|
39
|
+
const name = defaultMongoIndexName(keys);
|
|
40
|
+
const filter = keyFilter(keys);
|
|
41
|
+
const fullFilter = options?.unique
|
|
42
|
+
? MongoAndExpr.of([filter, MongoFieldFilter.eq('unique', true)])
|
|
43
|
+
: filter;
|
|
44
|
+
|
|
45
|
+
return {
|
|
46
|
+
id: buildIndexOpId('create', collection, keys),
|
|
47
|
+
label: `Create index on ${collection} (${formatKeys(keys)})`,
|
|
48
|
+
operationClass: 'additive',
|
|
49
|
+
precheck: [
|
|
50
|
+
{
|
|
51
|
+
description: `index does not already exist on ${collection}`,
|
|
52
|
+
source: new ListIndexesCommand(collection),
|
|
53
|
+
filter,
|
|
54
|
+
expect: 'notExists',
|
|
55
|
+
},
|
|
56
|
+
],
|
|
57
|
+
execute: [
|
|
58
|
+
{
|
|
59
|
+
description: `create index on ${collection}`,
|
|
60
|
+
command: new CreateIndexCommand(collection, keys, {
|
|
61
|
+
...options,
|
|
62
|
+
unique: options?.unique ?? undefined,
|
|
63
|
+
name,
|
|
64
|
+
}),
|
|
65
|
+
},
|
|
66
|
+
],
|
|
67
|
+
postcheck: [
|
|
68
|
+
{
|
|
69
|
+
description: `index exists on ${collection}`,
|
|
70
|
+
source: new ListIndexesCommand(collection),
|
|
71
|
+
filter: fullFilter,
|
|
72
|
+
expect: 'exists',
|
|
73
|
+
},
|
|
74
|
+
],
|
|
75
|
+
};
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
export function dropIndex(
|
|
79
|
+
collection: string,
|
|
80
|
+
keys: ReadonlyArray<MongoIndexKey>,
|
|
81
|
+
): MongoMigrationPlanOperation {
|
|
82
|
+
const indexName = defaultMongoIndexName(keys);
|
|
83
|
+
const filter = keyFilter(keys);
|
|
84
|
+
|
|
85
|
+
return {
|
|
86
|
+
id: buildIndexOpId('drop', collection, keys),
|
|
87
|
+
label: `Drop index on ${collection} (${formatKeys(keys)})`,
|
|
88
|
+
operationClass: 'destructive',
|
|
89
|
+
precheck: [
|
|
90
|
+
{
|
|
91
|
+
description: `index exists on ${collection}`,
|
|
92
|
+
source: new ListIndexesCommand(collection),
|
|
93
|
+
filter,
|
|
94
|
+
expect: 'exists',
|
|
95
|
+
},
|
|
96
|
+
],
|
|
97
|
+
execute: [
|
|
98
|
+
{
|
|
99
|
+
description: `drop index on ${collection}`,
|
|
100
|
+
command: new DropIndexCommand(collection, indexName),
|
|
101
|
+
},
|
|
102
|
+
],
|
|
103
|
+
postcheck: [
|
|
104
|
+
{
|
|
105
|
+
description: `index no longer exists on ${collection}`,
|
|
106
|
+
source: new ListIndexesCommand(collection),
|
|
107
|
+
filter,
|
|
108
|
+
expect: 'notExists',
|
|
109
|
+
},
|
|
110
|
+
],
|
|
111
|
+
};
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export function createCollection(
|
|
115
|
+
collection: string,
|
|
116
|
+
options?: CreateCollectionOptions,
|
|
117
|
+
): MongoMigrationPlanOperation {
|
|
118
|
+
return {
|
|
119
|
+
id: `collection.${collection}.create`,
|
|
120
|
+
label: `Create collection ${collection}`,
|
|
121
|
+
operationClass: 'additive',
|
|
122
|
+
precheck: [
|
|
123
|
+
{
|
|
124
|
+
description: `collection ${collection} does not exist`,
|
|
125
|
+
source: new ListCollectionsCommand(),
|
|
126
|
+
filter: MongoFieldFilter.eq('name', collection),
|
|
127
|
+
expect: 'notExists',
|
|
128
|
+
},
|
|
129
|
+
],
|
|
130
|
+
execute: [
|
|
131
|
+
{
|
|
132
|
+
description: `create collection ${collection}`,
|
|
133
|
+
command: new CreateCollectionCommand(collection, options),
|
|
134
|
+
},
|
|
135
|
+
],
|
|
136
|
+
postcheck: [],
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
export function dropCollection(collection: string): MongoMigrationPlanOperation {
|
|
141
|
+
return {
|
|
142
|
+
id: `collection.${collection}.drop`,
|
|
143
|
+
label: `Drop collection ${collection}`,
|
|
144
|
+
operationClass: 'destructive',
|
|
145
|
+
precheck: [],
|
|
146
|
+
execute: [
|
|
147
|
+
{
|
|
148
|
+
description: `drop collection ${collection}`,
|
|
149
|
+
command: new DropCollectionCommand(collection),
|
|
150
|
+
},
|
|
151
|
+
],
|
|
152
|
+
postcheck: [],
|
|
153
|
+
};
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
export function setValidation(
|
|
157
|
+
collection: string,
|
|
158
|
+
schema: Record<string, unknown>,
|
|
159
|
+
options?: { validationLevel?: 'strict' | 'moderate'; validationAction?: 'error' | 'warn' },
|
|
160
|
+
): MongoMigrationPlanOperation {
|
|
161
|
+
return {
|
|
162
|
+
id: `collection.${collection}.setValidation`,
|
|
163
|
+
label: `Set validation on ${collection}`,
|
|
164
|
+
operationClass: 'destructive',
|
|
165
|
+
precheck: [],
|
|
166
|
+
execute: [
|
|
167
|
+
{
|
|
168
|
+
description: `set validation on ${collection}`,
|
|
169
|
+
command: new CollModCommand(collection, {
|
|
170
|
+
validator: { $jsonSchema: schema },
|
|
171
|
+
validationLevel: options?.validationLevel,
|
|
172
|
+
validationAction: options?.validationAction,
|
|
173
|
+
}),
|
|
174
|
+
},
|
|
175
|
+
],
|
|
176
|
+
postcheck: [],
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
export function validatedCollection(
|
|
181
|
+
name: string,
|
|
182
|
+
schema: Record<string, unknown>,
|
|
183
|
+
indexes: ReadonlyArray<{ keys: MongoIndexKey[]; unique?: boolean }>,
|
|
184
|
+
): MongoMigrationPlanOperation[] {
|
|
185
|
+
return [
|
|
186
|
+
createCollection(name, {
|
|
187
|
+
validator: { $jsonSchema: schema },
|
|
188
|
+
validationLevel: 'strict',
|
|
189
|
+
validationAction: 'error',
|
|
190
|
+
}),
|
|
191
|
+
...indexes.map((idx) => createIndex(name, idx.keys, { unique: idx.unique })),
|
|
192
|
+
];
|
|
193
|
+
}
|