@payloadcms/db-postgres 3.0.0-canary.a192632 → 3.0.0-canary.a7293f6
Sign up to get free protection for your applications and to get access to all the features.
- package/dist/connect.d.ts.map +1 -1
- package/dist/connect.js +12 -5
- package/dist/connect.js.map +1 -1
- package/dist/index.d.ts +2 -2
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +9 -16
- package/dist/index.js.map +1 -1
- package/dist/predefinedMigrations/v2-v3/fetchAndResave/traverseFields.d.ts.map +1 -1
- package/dist/predefinedMigrations/v2-v3/fetchAndResave/traverseFields.js +3 -1
- package/dist/predefinedMigrations/v2-v3/fetchAndResave/traverseFields.js.map +1 -1
- package/dist/predefinedMigrations/v2-v3/index.js +2 -2
- package/dist/predefinedMigrations/v2-v3/index.js.map +1 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.d.ts.map +1 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.js +9 -3
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.js.map +1 -1
- package/dist/types.d.ts +22 -106
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/package.json +8 -6
- package/dist/countDistinct.d.ts +0 -3
- package/dist/countDistinct.d.ts.map +0 -1
- package/dist/countDistinct.js +0 -24
- package/dist/countDistinct.js.map +0 -1
- package/dist/createJSONQuery/convertPathToJSONTraversal.d.ts +0 -2
- package/dist/createJSONQuery/convertPathToJSONTraversal.d.ts.map +0 -1
- package/dist/createJSONQuery/convertPathToJSONTraversal.js +0 -14
- package/dist/createJSONQuery/convertPathToJSONTraversal.js.map +0 -1
- package/dist/createJSONQuery/formatJSONPathSegment.d.ts +0 -2
- package/dist/createJSONQuery/formatJSONPathSegment.d.ts.map +0 -1
- package/dist/createJSONQuery/formatJSONPathSegment.js +0 -5
- package/dist/createJSONQuery/formatJSONPathSegment.js.map +0 -1
- package/dist/createJSONQuery/index.d.ts +0 -10
- package/dist/createJSONQuery/index.d.ts.map +0 -1
- package/dist/createJSONQuery/index.js +0 -54
- package/dist/createJSONQuery/index.js.map +0 -1
- package/dist/createMigration.d.ts +0 -3
- package/dist/createMigration.d.ts.map +0 -1
- package/dist/createMigration.js +0 -91
- package/dist/createMigration.js.map +0 -1
- package/dist/defaultSnapshot.d.ts +0 -3
- package/dist/defaultSnapshot.d.ts.map +0 -1
- package/dist/defaultSnapshot.js +0 -17
- package/dist/defaultSnapshot.js.map +0 -1
- package/dist/deleteWhere.d.ts +0 -3
- package/dist/deleteWhere.d.ts.map +0 -1
- package/dist/deleteWhere.js +0 -6
- package/dist/deleteWhere.js.map +0 -1
- package/dist/dropDatabase.d.ts +0 -3
- package/dist/dropDatabase.d.ts.map +0 -1
- package/dist/dropDatabase.js +0 -9
- package/dist/dropDatabase.js.map +0 -1
- package/dist/execute.d.ts +0 -3
- package/dist/execute.d.ts.map +0 -1
- package/dist/execute.js +0 -11
- package/dist/execute.js.map +0 -1
- package/dist/getMigrationTemplate.d.ts +0 -4
- package/dist/getMigrationTemplate.d.ts.map +0 -1
- package/dist/getMigrationTemplate.js +0 -13
- package/dist/getMigrationTemplate.js.map +0 -1
- package/dist/init.d.ts +0 -3
- package/dist/init.d.ts.map +0 -1
- package/dist/init.js +0 -95
- package/dist/init.js.map +0 -1
- package/dist/insert.d.ts +0 -3
- package/dist/insert.d.ts.map +0 -1
- package/dist/insert.js +0 -12
- package/dist/insert.js.map +0 -1
- package/dist/requireDrizzleKit.d.ts +0 -3
- package/dist/requireDrizzleKit.d.ts.map +0 -1
- package/dist/requireDrizzleKit.js +0 -5
- package/dist/requireDrizzleKit.js.map +0 -1
- package/dist/schema/build.d.ts +0 -38
- package/dist/schema/build.d.ts.map +0 -1
- package/dist/schema/build.js +0 -369
- package/dist/schema/build.js.map +0 -1
- package/dist/schema/createIndex.d.ts +0 -12
- package/dist/schema/createIndex.d.ts.map +0 -1
- package/dist/schema/createIndex.js +0 -18
- package/dist/schema/createIndex.js.map +0 -1
- package/dist/schema/idToUUID.d.ts +0 -3
- package/dist/schema/idToUUID.d.ts.map +0 -1
- package/dist/schema/idToUUID.js +0 -11
- package/dist/schema/idToUUID.js.map +0 -1
- package/dist/schema/parentIDColumnMap.d.ts +0 -4
- package/dist/schema/parentIDColumnMap.d.ts.map +0 -1
- package/dist/schema/parentIDColumnMap.js +0 -9
- package/dist/schema/parentIDColumnMap.js.map +0 -1
- package/dist/schema/setColumnID.d.ts +0 -11
- package/dist/schema/setColumnID.d.ts.map +0 -1
- package/dist/schema/setColumnID.js +0 -24
- package/dist/schema/setColumnID.js.map +0 -1
- package/dist/schema/traverseFields.d.ts +0 -36
- package/dist/schema/traverseFields.d.ts.map +0 -1
- package/dist/schema/traverseFields.js +0 -613
- package/dist/schema/traverseFields.js.map +0 -1
- package/dist/schema/withDefault.d.ts +0 -4
- package/dist/schema/withDefault.d.ts.map +0 -1
- package/dist/schema/withDefault.js +0 -10
- package/dist/schema/withDefault.js.map +0 -1
package/dist/types.d.ts.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,mBAAmB,EACnB,WAAW,EACX,eAAe,EACf,aAAa,EACb,UAAU,EACV,kBAAkB,EACnB,MAAM,8BAA8B,CAAA;AACrC,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAA;AAC/D,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,aAAa,CAAA;AAChD,OAAO,KAAK,EAAE,QAAQ,EAAE,SAAS,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAA;AACnF,OAAO,KAAK,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,IAAI,CAAA;AAE1C,MAAM,MAAM,IAAI,GAAG;IACjB;;;;OAIG;IACH,eAAe,CAAC,EAAE,kBAAkB,EAAE,CAAA;IACtC;;;;OAIG;IACH,gBAAgB,CAAC,EAAE,kBAAkB,EAAE,CAAA;IACvC,MAAM,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAA;IAC1B,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB,MAAM,CAAC,EAAE,aAAa,CAAC,QAAQ,CAAC,CAAA;IAChC,YAAY,CAAC,EAAE,MAAM,CAAA;IACrB,IAAI,EAAE,UAAU,CAAA;IAChB,cAAc,CAAC,EAAE;QACf,IAAI,EAAE,CAAC,IAAI,EAAE,eAAe,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;QAC9C,IAAI,EAAE,MAAM,CAAA;QACZ,EAAE,EAAE,CAAC,IAAI,EAAE,aAAa,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;KAC3C,EAAE,CAAA;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,mBAAmB,CAAC,EAAE,MAAM,CAAA;IAC5B;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,kBAAkB,CAAC,EAAE,KAAK,GAAG,mBAAmB,CAAA;IAChD,cAAc,CAAC,EAAE,MAAM,CAAA;CACxB,CAAA;AAED,MAAM,MAAM,eAAe,GAAG;IAC5B,IAAI,EAAE,IAAI,CAAA;IACV,WAAW,EAAE,UAAU,CAAA;CACxB,GAAG,mBAAmB,CAAA;AAEvB,OAAO,QAAQ,SAAS,CAAC;IACvB,UAAiB,eACf,SAAQ,IAAI,CAAC,IAAI,EAAE,QAAQ,GAAG,QAAQ,GAAG,cAAc,GAAG,MAAM,CAAC,EAC/D,cAAc;QAChB,eAAe,EAAE,kBAAkB,EAAE,CAAA;QACrC,gBAAgB,EAAE,kBAAkB,EAAE,CAAA;QACtC,gBAAgB,EAAE,CAAC,OAAO,CAAC,EAAE,mBAAmB,KAAK,OAAO,CAAC,IAAI,GAAG,MAAM,GAAG,MAAM,CAAC,CAAA;QACpF,OAAO,EAAE,UAAU,CAAA;QACnB,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAA;QAClC;;;WAGG;QACH,gBAAgB,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAA;QACxD,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACtB,YAAY,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;QAC3B,aAAa,CAAC,EAAE,MAAM,CAAA;QACtB,MAAM,EAAE,aAAa,CAAC,QAAQ,CAAC,CAAA;QAC/B,QAAQ,CAAC,EAAE;YAAE,KAAK,EAAE,SAAS,CAAA;SAAE,GAAG,QAAQ,CAAA;QAC1C,IAAI,EAAE,IAAI,CAAA;QACV,WAAW,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACzB,cAAc,CAAC,EAAE;YACf,IAAI,EAAE,CAAC,IAAI,EAAE,eAAe,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;YAC9C,IAAI,EAAE,MAAM,CAAA;YACZ,EAAE,EAAE,CAAC,IAAI,EAAE,aAAa,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;SAC3C,EAAE,CAAA;QACH,IAAI,EAAE,OAAO,CAAA;QACb,kBAAkB,EAAE,MAAM,IAAI,CAAA;QAC9B,mBAAmB,CAAC,EAAE,MAAM,CAAA;QAC5B,mBAAmB,EAAE,MAAM,IAAI,CAAA;QAC/B,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAC/B,UAAU,CAAC,EAAE,IAAI,CAAC,YAAY,CAAC,CAAA;QAC/B,YAAY,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;QACjC,cAAc,CAAC,EAAE,MAAM,CAAA;KACxB;CACF"}
|
package/dist/types.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/types.ts"],"sourcesContent":["import type {
|
1
|
+
{"version":3,"sources":["../src/types.ts"],"sourcesContent":["import type {\n BasePostgresAdapter,\n GenericEnum,\n MigrateDownArgs,\n MigrateUpArgs,\n PostgresDB,\n PostgresSchemaHook,\n} from '@payloadcms/drizzle/postgres'\nimport type { DrizzleAdapter } from '@payloadcms/drizzle/types'\nimport type { DrizzleConfig } from 'drizzle-orm'\nimport type { PgSchema, PgTableFn, PgTransactionConfig } from 'drizzle-orm/pg-core'\nimport type { Pool, PoolConfig } from 'pg'\n\nexport type Args = {\n /**\n * Transform the schema after it's built.\n * You can use it to customize the schema with features that aren't supported by Payload.\n * Examples may include: composite indices, generated columns, vectors\n */\n afterSchemaInit?: PostgresSchemaHook[]\n /**\n * Transform the schema before it's built.\n * You can use it to preserve an existing database schema and if there are any collissions Payload will override them.\n * To generate Drizzle schema from the database, see [Drizzle Kit introspection](https://orm.drizzle.team/kit-docs/commands#introspect--pull)\n */\n beforeSchemaInit?: PostgresSchemaHook[]\n idType?: 'serial' | 'uuid'\n localesSuffix?: string\n logger?: DrizzleConfig['logger']\n migrationDir?: string\n pool: PoolConfig\n prodMigrations?: {\n down: (args: MigrateDownArgs) => Promise<void>\n name: string\n up: (args: MigrateUpArgs) => Promise<void>\n }[]\n push?: boolean\n relationshipsSuffix?: string\n /**\n * The schema name to use for the database\n * @experimental This only works when there are not other tables or enums of the same name in the database under a different schema. Awaiting fix from Drizzle.\n */\n schemaName?: string\n transactionOptions?: false | PgTransactionConfig\n versionsSuffix?: string\n}\n\nexport type PostgresAdapter = {\n pool: Pool\n poolOptions: PoolConfig\n} & BasePostgresAdapter\n\ndeclare module 'payload' {\n export interface DatabaseAdapter\n extends Omit<Args, 'idType' | 'logger' | 'migrationDir' | 'pool'>,\n DrizzleAdapter {\n afterSchemaInit: PostgresSchemaHook[]\n beforeSchemaInit: PostgresSchemaHook[]\n beginTransaction: (options?: PgTransactionConfig) => Promise<null | number | string>\n drizzle: PostgresDB\n enums: Record<string, GenericEnum>\n /**\n * An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name\n * Used for returning properly formed errors from unique fields\n */\n fieldConstraints: Record<string, Record<string, string>>\n idType: Args['idType']\n initializing: Promise<void>\n localesSuffix?: string\n logger: DrizzleConfig['logger']\n pgSchema?: { table: PgTableFn } | PgSchema\n pool: Pool\n poolOptions: Args['pool']\n prodMigrations?: {\n down: (args: MigrateDownArgs) => Promise<void>\n name: string\n up: (args: MigrateUpArgs) => Promise<void>\n }[]\n push: boolean\n rejectInitializing: () => void\n relationshipsSuffix?: string\n resolveInitializing: () => void\n schema: Record<string, unknown>\n schemaName?: Args['schemaName']\n tableNameMap: Map<string, string>\n versionsSuffix?: string\n }\n}\n"],"names":[],"mappings":"AA+CA,WAGuB"}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@payloadcms/db-postgres",
|
3
|
-
"version": "3.0.0-canary.
|
3
|
+
"version": "3.0.0-canary.a7293f6",
|
4
4
|
"description": "The officially supported Postgres database adapter for Payload",
|
5
5
|
"homepage": "https://payloadcms.com",
|
6
6
|
"repository": {
|
@@ -42,18 +42,18 @@
|
|
42
42
|
"prompts": "2.4.2",
|
43
43
|
"to-snake-case": "1.0.0",
|
44
44
|
"uuid": "10.0.0",
|
45
|
-
"@payloadcms/drizzle": "3.0.0-canary.
|
45
|
+
"@payloadcms/drizzle": "3.0.0-canary.a7293f6"
|
46
46
|
},
|
47
47
|
"devDependencies": {
|
48
48
|
"@hyrious/esbuild-plugin-commonjs": "^0.2.4",
|
49
49
|
"@types/pg": "8.10.2",
|
50
50
|
"@types/to-snake-case": "1.0.0",
|
51
|
-
"esbuild": "0.23.
|
52
|
-
"@payloadcms/eslint-config": "3.0.0-beta.
|
53
|
-
"payload": "3.0.0-canary.
|
51
|
+
"esbuild": "0.23.1",
|
52
|
+
"@payloadcms/eslint-config": "3.0.0-beta.97",
|
53
|
+
"payload": "3.0.0-canary.a7293f6"
|
54
54
|
},
|
55
55
|
"peerDependencies": {
|
56
|
-
"payload": "3.0.0-canary.
|
56
|
+
"payload": "3.0.0-canary.a7293f6"
|
57
57
|
},
|
58
58
|
"scripts": {
|
59
59
|
"build": "rimraf .dist && rimraf tsconfig.tsbuildinfo && pnpm build:types && pnpm build:swc && pnpm build:esbuild && pnpm renamePredefinedMigrations",
|
@@ -61,6 +61,8 @@
|
|
61
61
|
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
|
62
62
|
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
63
63
|
"clean": "rimraf {dist,*.tsbuildinfo}",
|
64
|
+
"lint": "eslint .",
|
65
|
+
"lint:fix": "eslint . --fix",
|
64
66
|
"renamePredefinedMigrations": "node --no-deprecation --import @swc-node/register/esm-register ./scripts/renamePredefinedMigrations.ts"
|
65
67
|
}
|
66
68
|
}
|
package/dist/countDistinct.d.ts
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"countDistinct.d.ts","sourceRoot":"","sources":["../src/countDistinct.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,aAAa,EAAmB,MAAM,YAAY,CAAA;AAEhE,eAAO,MAAM,aAAa,EAAE,aAyB3B,CAAA"}
|
package/dist/countDistinct.js
DELETED
@@ -1,24 +0,0 @@
|
|
1
|
-
import { chainMethods } from '@payloadcms/drizzle';
|
2
|
-
import { sql } from 'drizzle-orm';
|
3
|
-
export const countDistinct = async function countDistinct({ db, joins, tableName, where }) {
|
4
|
-
const chainedMethods = [];
|
5
|
-
joins.forEach(({ condition, table })=>{
|
6
|
-
chainedMethods.push({
|
7
|
-
args: [
|
8
|
-
table,
|
9
|
-
condition
|
10
|
-
],
|
11
|
-
method: 'leftJoin'
|
12
|
-
});
|
13
|
-
});
|
14
|
-
const countResult = await chainMethods({
|
15
|
-
methods: chainedMethods,
|
16
|
-
query: db.select({
|
17
|
-
count: sql`count
|
18
|
-
(DISTINCT ${this.tables[tableName].id})`
|
19
|
-
}).from(this.tables[tableName]).where(where)
|
20
|
-
});
|
21
|
-
return Number(countResult[0].count);
|
22
|
-
};
|
23
|
-
|
24
|
-
//# sourceMappingURL=countDistinct.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/countDistinct.ts"],"sourcesContent":["import type { ChainedMethods, TransactionPg } from '@payloadcms/drizzle/types'\n\nimport { chainMethods } from '@payloadcms/drizzle'\nimport { sql } from 'drizzle-orm'\n\nimport type { CountDistinct, PostgresAdapter } from './types.js'\n\nexport const countDistinct: CountDistinct = async function countDistinct(\n this: PostgresAdapter,\n { db, joins, tableName, where },\n) {\n const chainedMethods: ChainedMethods = []\n\n joins.forEach(({ condition, table }) => {\n chainedMethods.push({\n args: [table, condition],\n method: 'leftJoin',\n })\n })\n\n const countResult = await chainMethods({\n methods: chainedMethods,\n query: (db as TransactionPg)\n .select({\n count: sql<string>`count\n (DISTINCT ${this.tables[tableName].id})`,\n })\n .from(this.tables[tableName])\n .where(where),\n })\n\n return Number(countResult[0].count)\n}\n"],"names":["chainMethods","sql","countDistinct","db","joins","tableName","where","chainedMethods","forEach","condition","table","push","args","method","countResult","methods","query","select","count","tables","id","from","Number"],"mappings":"AAEA,SAASA,YAAY,QAAQ,sBAAqB;AAClD,SAASC,GAAG,QAAQ,cAAa;AAIjC,OAAO,MAAMC,gBAA+B,eAAeA,cAEzD,EAAEC,EAAE,EAAEC,KAAK,EAAEC,SAAS,EAAEC,KAAK,EAAE;IAE/B,MAAMC,iBAAiC,EAAE;IAEzCH,MAAMI,OAAO,CAAC,CAAC,EAAEC,SAAS,EAAEC,KAAK,EAAE;QACjCH,eAAeI,IAAI,CAAC;YAClBC,MAAM;gBAACF;gBAAOD;aAAU;YACxBI,QAAQ;QACV;IACF;IAEA,MAAMC,cAAc,MAAMd,aAAa;QACrCe,SAASR;QACTS,OAAO,AAACb,GACLc,MAAM,CAAC;YACNC,OAAOjB,GAAW,CAAC;sBACL,EAAE,IAAI,CAACkB,MAAM,CAACd,UAAU,CAACe,EAAE,CAAC,CAAC,CAAC;QAC9C,GACCC,IAAI,CAAC,IAAI,CAACF,MAAM,CAACd,UAAU,EAC3BC,KAAK,CAACA;IACX;IAEA,OAAOgB,OAAOR,WAAW,CAAC,EAAE,CAACI,KAAK;AACpC,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"convertPathToJSONTraversal.d.ts","sourceRoot":"","sources":["../../src/createJSONQuery/convertPathToJSONTraversal.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,0BAA0B,qBAAsB,MAAM,EAAE,WAUpE,CAAA"}
|
@@ -1,14 +0,0 @@
|
|
1
|
-
import { formatJSONPathSegment } from './formatJSONPathSegment.js';
|
2
|
-
export const convertPathToJSONTraversal = (incomingSegments)=>{
|
3
|
-
const segments = [
|
4
|
-
...incomingSegments
|
5
|
-
];
|
6
|
-
segments.shift();
|
7
|
-
return segments.reduce((res, segment, i)=>{
|
8
|
-
const formattedSegment = formatJSONPathSegment(segment);
|
9
|
-
if (i + 1 === segments.length) return `${res}->>${formattedSegment}`;
|
10
|
-
return `${res}->${formattedSegment}`;
|
11
|
-
}, '');
|
12
|
-
};
|
13
|
-
|
14
|
-
//# sourceMappingURL=convertPathToJSONTraversal.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../../src/createJSONQuery/convertPathToJSONTraversal.ts"],"sourcesContent":["import { formatJSONPathSegment } from './formatJSONPathSegment.js'\n\nexport const convertPathToJSONTraversal = (incomingSegments: string[]) => {\n const segments = [...incomingSegments]\n segments.shift()\n\n return segments.reduce((res, segment, i) => {\n const formattedSegment = formatJSONPathSegment(segment)\n\n if (i + 1 === segments.length) return `${res}->>${formattedSegment}`\n return `${res}->${formattedSegment}`\n }, '')\n}\n"],"names":["formatJSONPathSegment","convertPathToJSONTraversal","incomingSegments","segments","shift","reduce","res","segment","i","formattedSegment","length"],"mappings":"AAAA,SAASA,qBAAqB,QAAQ,6BAA4B;AAElE,OAAO,MAAMC,6BAA6B,CAACC;IACzC,MAAMC,WAAW;WAAID;KAAiB;IACtCC,SAASC,KAAK;IAEd,OAAOD,SAASE,MAAM,CAAC,CAACC,KAAKC,SAASC;QACpC,MAAMC,mBAAmBT,sBAAsBO;QAE/C,IAAIC,IAAI,MAAML,SAASO,MAAM,EAAE,OAAO,CAAC,EAAEJ,IAAI,GAAG,EAAEG,iBAAiB,CAAC;QACpE,OAAO,CAAC,EAAEH,IAAI,EAAE,EAAEG,iBAAiB,CAAC;IACtC,GAAG;AACL,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"formatJSONPathSegment.d.ts","sourceRoot":"","sources":["../../src/createJSONQuery/formatJSONPathSegment.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,qBAAqB,YAAa,MAAM,WAEpD,CAAA"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../../src/createJSONQuery/formatJSONPathSegment.ts"],"sourcesContent":["export const formatJSONPathSegment = (segment: string) => {\n return Number.isNaN(parseInt(segment)) ? `'${segment}'` : segment\n}\n"],"names":["formatJSONPathSegment","segment","Number","isNaN","parseInt"],"mappings":"AAAA,OAAO,MAAMA,wBAAwB,CAACC;IACpC,OAAOC,OAAOC,KAAK,CAACC,SAASH,YAAY,CAAC,CAAC,EAAEA,QAAQ,CAAC,CAAC,GAAGA;AAC5D,EAAC"}
|
@@ -1,10 +0,0 @@
|
|
1
|
-
type Args = {
|
2
|
-
operator: string;
|
3
|
-
pathSegments: string[];
|
4
|
-
treatAsArray?: string[];
|
5
|
-
treatRootAsArray?: boolean;
|
6
|
-
value: unknown;
|
7
|
-
};
|
8
|
-
export declare const createJSONQuery: ({ operator, pathSegments, treatAsArray, treatRootAsArray, value, }: Args) => string;
|
9
|
-
export {};
|
10
|
-
//# sourceMappingURL=index.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/createJSONQuery/index.ts"],"names":[],"mappings":"AAmDA,KAAK,IAAI,GAAG;IACV,QAAQ,EAAE,MAAM,CAAA;IAChB,YAAY,EAAE,MAAM,EAAE,CAAA;IACtB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAA;IACvB,gBAAgB,CAAC,EAAE,OAAO,CAAA;IAC1B,KAAK,EAAE,OAAO,CAAA;CACf,CAAA;AAED,eAAO,MAAM,eAAe,uEAMzB,IAAI,KAAG,MAqBT,CAAA"}
|
@@ -1,54 +0,0 @@
|
|
1
|
-
import { convertPathToJSONTraversal } from './convertPathToJSONTraversal.js';
|
2
|
-
import { formatJSONPathSegment } from './formatJSONPathSegment.js';
|
3
|
-
const operatorMap = {
|
4
|
-
contains: '~*',
|
5
|
-
equals: '=',
|
6
|
-
like: '~*'
|
7
|
-
};
|
8
|
-
const fromArray = ({ isRoot, operator, pathSegments, treatAsArray, value })=>{
|
9
|
-
const newPathSegments = pathSegments.slice(isRoot ? 1 : 2);
|
10
|
-
const alias = `${pathSegments[isRoot ? 0 : 1]}_alias_${newPathSegments.length}`;
|
11
|
-
newPathSegments.unshift(alias);
|
12
|
-
const arrayElements = isRoot ? pathSegments[0] : `${pathSegments[0]} -> ${formatJSONPathSegment(pathSegments[1])}`;
|
13
|
-
return `EXISTS (
|
14
|
-
SELECT 1
|
15
|
-
FROM jsonb_array_elements(${arrayElements}) AS ${alias}
|
16
|
-
WHERE ${createJSONQuery({
|
17
|
-
operator,
|
18
|
-
pathSegments: newPathSegments,
|
19
|
-
treatAsArray,
|
20
|
-
value
|
21
|
-
})}
|
22
|
-
)`;
|
23
|
-
};
|
24
|
-
const createConstraint = ({ operator, pathSegments, value })=>{
|
25
|
-
const jsonQuery = convertPathToJSONTraversal(pathSegments);
|
26
|
-
return `${pathSegments[0]}${jsonQuery} ${operatorMap[operator]} '${value}'`;
|
27
|
-
};
|
28
|
-
export const createJSONQuery = ({ operator, pathSegments, treatAsArray, treatRootAsArray, value })=>{
|
29
|
-
if (treatRootAsArray) {
|
30
|
-
return fromArray({
|
31
|
-
isRoot: true,
|
32
|
-
operator,
|
33
|
-
pathSegments,
|
34
|
-
treatAsArray,
|
35
|
-
value
|
36
|
-
});
|
37
|
-
}
|
38
|
-
if (treatAsArray.includes(pathSegments[1])) {
|
39
|
-
return fromArray({
|
40
|
-
operator,
|
41
|
-
pathSegments,
|
42
|
-
treatAsArray,
|
43
|
-
value
|
44
|
-
});
|
45
|
-
}
|
46
|
-
return createConstraint({
|
47
|
-
operator,
|
48
|
-
pathSegments,
|
49
|
-
treatAsArray,
|
50
|
-
value
|
51
|
-
});
|
52
|
-
};
|
53
|
-
|
54
|
-
//# sourceMappingURL=index.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../../src/createJSONQuery/index.ts"],"sourcesContent":["import { convertPathToJSONTraversal } from './convertPathToJSONTraversal.js'\nimport { formatJSONPathSegment } from './formatJSONPathSegment.js'\n\nconst operatorMap = {\n contains: '~*',\n equals: '=',\n like: '~*',\n}\n\ntype FromArrayArgs = {\n isRoot?: true\n operator: string\n pathSegments: string[]\n treatAsArray?: string[]\n value: unknown\n}\n\nconst fromArray = ({ isRoot, operator, pathSegments, treatAsArray, value }: FromArrayArgs) => {\n const newPathSegments = pathSegments.slice(isRoot ? 1 : 2)\n const alias = `${pathSegments[isRoot ? 0 : 1]}_alias_${newPathSegments.length}`\n\n newPathSegments.unshift(alias)\n\n const arrayElements = isRoot\n ? pathSegments[0]\n : `${pathSegments[0]} -> ${formatJSONPathSegment(pathSegments[1])}`\n\n return `EXISTS (\n SELECT 1\n FROM jsonb_array_elements(${arrayElements}) AS ${alias}\n WHERE ${createJSONQuery({\n operator,\n pathSegments: newPathSegments,\n treatAsArray,\n value,\n })}\n )`\n}\n\ntype CreateConstraintArgs = {\n operator: string\n pathSegments: string[]\n treatAsArray?: string[]\n value: unknown\n}\n\nconst createConstraint = ({ operator, pathSegments, value }: CreateConstraintArgs): string => {\n const jsonQuery = convertPathToJSONTraversal(pathSegments)\n return `${pathSegments[0]}${jsonQuery} ${operatorMap[operator]} '${value}'`\n}\n\ntype Args = {\n operator: string\n pathSegments: string[]\n treatAsArray?: string[]\n treatRootAsArray?: boolean\n value: unknown\n}\n\nexport const createJSONQuery = ({\n operator,\n pathSegments,\n treatAsArray,\n treatRootAsArray,\n value,\n}: Args): string => {\n if (treatRootAsArray) {\n return fromArray({\n isRoot: true,\n operator,\n pathSegments,\n treatAsArray,\n value,\n })\n }\n\n if (treatAsArray.includes(pathSegments[1])) {\n return fromArray({\n operator,\n pathSegments,\n treatAsArray,\n value,\n })\n }\n\n return createConstraint({ operator, pathSegments, treatAsArray, value })\n}\n"],"names":["convertPathToJSONTraversal","formatJSONPathSegment","operatorMap","contains","equals","like","fromArray","isRoot","operator","pathSegments","treatAsArray","value","newPathSegments","slice","alias","length","unshift","arrayElements","createJSONQuery","createConstraint","jsonQuery","treatRootAsArray","includes"],"mappings":"AAAA,SAASA,0BAA0B,QAAQ,kCAAiC;AAC5E,SAASC,qBAAqB,QAAQ,6BAA4B;AAElE,MAAMC,cAAc;IAClBC,UAAU;IACVC,QAAQ;IACRC,MAAM;AACR;AAUA,MAAMC,YAAY,CAAC,EAAEC,MAAM,EAAEC,QAAQ,EAAEC,YAAY,EAAEC,YAAY,EAAEC,KAAK,EAAiB;IACvF,MAAMC,kBAAkBH,aAAaI,KAAK,CAACN,SAAS,IAAI;IACxD,MAAMO,QAAQ,CAAC,EAAEL,YAAY,CAACF,SAAS,IAAI,EAAE,CAAC,OAAO,EAAEK,gBAAgBG,MAAM,CAAC,CAAC;IAE/EH,gBAAgBI,OAAO,CAACF;IAExB,MAAMG,gBAAgBV,SAClBE,YAAY,CAAC,EAAE,GACf,CAAC,EAAEA,YAAY,CAAC,EAAE,CAAC,IAAI,EAAER,sBAAsBQ,YAAY,CAAC,EAAE,EAAE,CAAC;IAErE,OAAO,CAAC;;8BAEoB,EAAEQ,cAAc,KAAK,EAAEH,MAAM;UACjD,EAAEI,gBAAgB;QACtBV;QACAC,cAAcG;QACdF;QACAC;IACF,GAAG;GACJ,CAAC;AACJ;AASA,MAAMQ,mBAAmB,CAAC,EAAEX,QAAQ,EAAEC,YAAY,EAAEE,KAAK,EAAwB;IAC/E,MAAMS,YAAYpB,2BAA2BS;IAC7C,OAAO,CAAC,EAAEA,YAAY,CAAC,EAAE,CAAC,EAAEW,UAAU,CAAC,EAAElB,WAAW,CAACM,SAAS,CAAC,EAAE,EAAEG,MAAM,CAAC,CAAC;AAC7E;AAUA,OAAO,MAAMO,kBAAkB,CAAC,EAC9BV,QAAQ,EACRC,YAAY,EACZC,YAAY,EACZW,gBAAgB,EAChBV,KAAK,EACA;IACL,IAAIU,kBAAkB;QACpB,OAAOf,UAAU;YACfC,QAAQ;YACRC;YACAC;YACAC;YACAC;QACF;IACF;IAEA,IAAID,aAAaY,QAAQ,CAACb,YAAY,CAAC,EAAE,GAAG;QAC1C,OAAOH,UAAU;YACfE;YACAC;YACAC;YACAC;QACF;IACF;IAEA,OAAOQ,iBAAiB;QAAEX;QAAUC;QAAcC;QAAcC;IAAM;AACxE,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"createMigration.d.ts","sourceRoot":"","sources":["../src/createMigration.ts"],"names":[],"mappings":"AACA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAA;AAgB9C,eAAO,MAAM,eAAe,EAAE,eAwG7B,CAAA"}
|
package/dist/createMigration.js
DELETED
@@ -1,91 +0,0 @@
|
|
1
|
-
import fs from 'fs';
|
2
|
-
import { createRequire } from 'module';
|
3
|
-
import path from 'path';
|
4
|
-
import { getPredefinedMigration, writeMigrationIndex } from 'payload';
|
5
|
-
import prompts from 'prompts';
|
6
|
-
import { fileURLToPath } from 'url';
|
7
|
-
import { defaultDrizzleSnapshot } from './defaultSnapshot.js';
|
8
|
-
import { getMigrationTemplate } from './getMigrationTemplate.js';
|
9
|
-
const require = createRequire(import.meta.url);
|
10
|
-
export const createMigration = async function createMigration({ file, forceAcceptWarning, migrationName, payload }) {
|
11
|
-
const filename = fileURLToPath(import.meta.url);
|
12
|
-
const dirname = path.dirname(filename);
|
13
|
-
const dir = payload.db.migrationDir;
|
14
|
-
if (!fs.existsSync(dir)) {
|
15
|
-
fs.mkdirSync(dir);
|
16
|
-
}
|
17
|
-
const { generateDrizzleJson, generateMigration, upPgSnapshot } = require('drizzle-kit/api');
|
18
|
-
const drizzleJsonAfter = generateDrizzleJson(this.schema);
|
19
|
-
const [yyymmdd, hhmmss] = new Date().toISOString().split('T');
|
20
|
-
const formattedDate = yyymmdd.replace(/\D/g, '');
|
21
|
-
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '');
|
22
|
-
let imports = '';
|
23
|
-
let downSQL;
|
24
|
-
let upSQL;
|
25
|
-
({ downSQL, imports, upSQL } = await getPredefinedMigration({
|
26
|
-
dirname,
|
27
|
-
file,
|
28
|
-
migrationName,
|
29
|
-
payload
|
30
|
-
}));
|
31
|
-
const timestamp = `${formattedDate}_${formattedTime}`;
|
32
|
-
const name = migrationName || file?.split('/').slice(2).join('/');
|
33
|
-
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`;
|
34
|
-
const filePath = `${dir}/${fileName}`;
|
35
|
-
let drizzleJsonBefore = defaultDrizzleSnapshot;
|
36
|
-
if (this.schemaName) {
|
37
|
-
drizzleJsonBefore.schemas = {
|
38
|
-
[this.schemaName]: this.schemaName
|
39
|
-
};
|
40
|
-
}
|
41
|
-
if (!upSQL) {
|
42
|
-
// Get latest migration snapshot
|
43
|
-
const latestSnapshot = fs.readdirSync(dir).filter((file)=>file.endsWith('.json')).sort().reverse()?.[0];
|
44
|
-
if (latestSnapshot) {
|
45
|
-
drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'));
|
46
|
-
if (drizzleJsonBefore.version < drizzleJsonAfter.version) {
|
47
|
-
drizzleJsonBefore = upPgSnapshot(drizzleJsonBefore);
|
48
|
-
}
|
49
|
-
}
|
50
|
-
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter);
|
51
|
-
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore);
|
52
|
-
const sqlExecute = 'await payload.db.drizzle.execute(sql`';
|
53
|
-
if (sqlStatementsUp?.length) {
|
54
|
-
upSQL = `${sqlExecute}\n ${sqlStatementsUp?.join('\n')}\`)`;
|
55
|
-
}
|
56
|
-
if (sqlStatementsDown?.length) {
|
57
|
-
downSQL = `${sqlExecute}\n ${sqlStatementsDown?.join('\n')}\`)`;
|
58
|
-
}
|
59
|
-
if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {
|
60
|
-
const { confirm: shouldCreateBlankMigration } = await prompts({
|
61
|
-
name: 'confirm',
|
62
|
-
type: 'confirm',
|
63
|
-
initial: false,
|
64
|
-
message: 'No schema changes detected. Would you like to create a blank migration file?'
|
65
|
-
}, {
|
66
|
-
onCancel: ()=>{
|
67
|
-
process.exit(0);
|
68
|
-
}
|
69
|
-
});
|
70
|
-
if (!shouldCreateBlankMigration) {
|
71
|
-
process.exit(0);
|
72
|
-
}
|
73
|
-
}
|
74
|
-
// write schema
|
75
|
-
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2));
|
76
|
-
}
|
77
|
-
// write migration
|
78
|
-
fs.writeFileSync(`${filePath}.ts`, getMigrationTemplate({
|
79
|
-
downSQL: downSQL || ` // Migration code`,
|
80
|
-
imports,
|
81
|
-
upSQL: upSQL || ` // Migration code`
|
82
|
-
}));
|
83
|
-
writeMigrationIndex({
|
84
|
-
migrationsDir: payload.db.migrationDir
|
85
|
-
});
|
86
|
-
payload.logger.info({
|
87
|
-
msg: `Migration created at ${filePath}.ts`
|
88
|
-
});
|
89
|
-
};
|
90
|
-
|
91
|
-
//# sourceMappingURL=createMigration.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/createMigration.ts"],"sourcesContent":["import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'\nimport type { CreateMigration } from 'payload'\n\nimport fs from 'fs'\nimport { createRequire } from 'module'\nimport path from 'path'\nimport { getPredefinedMigration, writeMigrationIndex } from 'payload'\nimport prompts from 'prompts'\nimport { fileURLToPath } from 'url'\n\nimport type { PostgresAdapter } from './types.js'\n\nimport { defaultDrizzleSnapshot } from './defaultSnapshot.js'\nimport { getMigrationTemplate } from './getMigrationTemplate.js'\n\nconst require = createRequire(import.meta.url)\n\nexport const createMigration: CreateMigration = async function createMigration(\n this: PostgresAdapter,\n { file, forceAcceptWarning, migrationName, payload },\n) {\n const filename = fileURLToPath(import.meta.url)\n const dirname = path.dirname(filename)\n const dir = payload.db.migrationDir\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir)\n }\n const { generateDrizzleJson, generateMigration, upPgSnapshot } = require('drizzle-kit/api')\n const drizzleJsonAfter = generateDrizzleJson(this.schema)\n const [yyymmdd, hhmmss] = new Date().toISOString().split('T')\n const formattedDate = yyymmdd.replace(/\\D/g, '')\n const formattedTime = hhmmss.split('.')[0].replace(/\\D/g, '')\n let imports: string = ''\n let downSQL: string\n let upSQL: string\n ;({ downSQL, imports, upSQL } = await getPredefinedMigration({\n dirname,\n file,\n migrationName,\n payload,\n }))\n\n const timestamp = `${formattedDate}_${formattedTime}`\n\n const name = migrationName || file?.split('/').slice(2).join('/')\n const fileName = `${timestamp}${name ? `_${name.replace(/\\W/g, '_')}` : ''}`\n\n const filePath = `${dir}/${fileName}`\n\n let drizzleJsonBefore = defaultDrizzleSnapshot\n\n if (this.schemaName) {\n drizzleJsonBefore.schemas = {\n [this.schemaName]: this.schemaName,\n }\n }\n\n if (!upSQL) {\n // Get latest migration snapshot\n const latestSnapshot = fs\n .readdirSync(dir)\n .filter((file) => file.endsWith('.json'))\n .sort()\n .reverse()?.[0]\n\n if (latestSnapshot) {\n drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'))\n\n if (drizzleJsonBefore.version < drizzleJsonAfter.version) {\n drizzleJsonBefore = upPgSnapshot(drizzleJsonBefore)\n }\n }\n\n const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)\n const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)\n const sqlExecute = 'await payload.db.drizzle.execute(sql`'\n\n if (sqlStatementsUp?.length) {\n upSQL = `${sqlExecute}\\n ${sqlStatementsUp?.join('\\n')}\\`)`\n }\n if (sqlStatementsDown?.length) {\n downSQL = `${sqlExecute}\\n ${sqlStatementsDown?.join('\\n')}\\`)`\n }\n\n if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {\n const { confirm: shouldCreateBlankMigration } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message: 'No schema changes detected. Would you like to create a blank migration file?',\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n if (!shouldCreateBlankMigration) {\n process.exit(0)\n }\n }\n\n // write schema\n fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))\n }\n\n // write migration\n fs.writeFileSync(\n `${filePath}.ts`,\n getMigrationTemplate({\n downSQL: downSQL || ` // Migration code`,\n imports,\n upSQL: upSQL || ` // Migration code`,\n }),\n )\n\n writeMigrationIndex({ migrationsDir: payload.db.migrationDir })\n\n payload.logger.info({ msg: `Migration created at ${filePath}.ts` })\n}\n"],"names":["fs","createRequire","path","getPredefinedMigration","writeMigrationIndex","prompts","fileURLToPath","defaultDrizzleSnapshot","getMigrationTemplate","require","url","createMigration","file","forceAcceptWarning","migrationName","payload","filename","dirname","dir","db","migrationDir","existsSync","mkdirSync","generateDrizzleJson","generateMigration","upPgSnapshot","drizzleJsonAfter","schema","yyymmdd","hhmmss","Date","toISOString","split","formattedDate","replace","formattedTime","imports","downSQL","upSQL","timestamp","name","slice","join","fileName","filePath","drizzleJsonBefore","schemaName","schemas","latestSnapshot","readdirSync","filter","endsWith","sort","reverse","JSON","parse","readFileSync","version","sqlStatementsUp","sqlStatementsDown","sqlExecute","length","confirm","shouldCreateBlankMigration","type","initial","message","onCancel","process","exit","writeFileSync","stringify","migrationsDir","logger","info","msg"],"mappings":"AAGA,OAAOA,QAAQ,KAAI;AACnB,SAASC,aAAa,QAAQ,SAAQ;AACtC,OAAOC,UAAU,OAAM;AACvB,SAASC,sBAAsB,EAAEC,mBAAmB,QAAQ,UAAS;AACrE,OAAOC,aAAa,UAAS;AAC7B,SAASC,aAAa,QAAQ,MAAK;AAInC,SAASC,sBAAsB,QAAQ,uBAAsB;AAC7D,SAASC,oBAAoB,QAAQ,4BAA2B;AAEhE,MAAMC,UAAUR,cAAc,YAAYS,GAAG;AAE7C,OAAO,MAAMC,kBAAmC,eAAeA,gBAE7D,EAAEC,IAAI,EAAEC,kBAAkB,EAAEC,aAAa,EAAEC,OAAO,EAAE;IAEpD,MAAMC,WAAWV,cAAc,YAAYI,GAAG;IAC9C,MAAMO,UAAUf,KAAKe,OAAO,CAACD;IAC7B,MAAME,MAAMH,QAAQI,EAAE,CAACC,YAAY;IACnC,IAAI,CAACpB,GAAGqB,UAAU,CAACH,MAAM;QACvBlB,GAAGsB,SAAS,CAACJ;IACf;IACA,MAAM,EAAEK,mBAAmB,EAAEC,iBAAiB,EAAEC,YAAY,EAAE,GAAGhB,QAAQ;IACzE,MAAMiB,mBAAmBH,oBAAoB,IAAI,CAACI,MAAM;IACxD,MAAM,CAACC,SAASC,OAAO,GAAG,IAAIC,OAAOC,WAAW,GAAGC,KAAK,CAAC;IACzD,MAAMC,gBAAgBL,QAAQM,OAAO,CAAC,OAAO;IAC7C,MAAMC,gBAAgBN,OAAOG,KAAK,CAAC,IAAI,CAAC,EAAE,CAACE,OAAO,CAAC,OAAO;IAC1D,IAAIE,UAAkB;IACtB,IAAIC;IACJ,IAAIC;IACF,CAAA,EAAED,OAAO,EAAED,OAAO,EAAEE,KAAK,EAAE,GAAG,MAAMnC,uBAAuB;QAC3Dc;QACAL;QACAE;QACAC;IACF,EAAC;IAED,MAAMwB,YAAY,CAAC,EAAEN,cAAc,CAAC,EAAEE,cAAc,CAAC;IAErD,MAAMK,OAAO1B,iBAAiBF,MAAMoB,MAAM,KAAKS,MAAM,GAAGC,KAAK;IAC7D,MAAMC,WAAW,CAAC,EAAEJ,UAAU,EAAEC,OAAO,CAAC,CAAC,EAAEA,KAAKN,OAAO,CAAC,OAAO,KAAK,CAAC,GAAG,GAAG,CAAC;IAE5E,MAAMU,WAAW,CAAC,EAAE1B,IAAI,CAAC,EAAEyB,SAAS,CAAC;IAErC,IAAIE,oBAAoBtC;IAExB,IAAI,IAAI,CAACuC,UAAU,EAAE;QACnBD,kBAAkBE,OAAO,GAAG;YAC1B,CAAC,IAAI,CAACD,UAAU,CAAC,EAAE,IAAI,CAACA,UAAU;QACpC;IACF;IAEA,IAAI,CAACR,OAAO;QACV,gCAAgC;QAChC,MAAMU,iBAAiBhD,GACpBiD,WAAW,CAAC/B,KACZgC,MAAM,CAAC,CAACtC,OAASA,KAAKuC,QAAQ,CAAC,UAC/BC,IAAI,GACJC,OAAO,IAAI,CAAC,EAAE;QAEjB,IAAIL,gBAAgB;YAClBH,oBAAoBS,KAAKC,KAAK,CAACvD,GAAGwD,YAAY,CAAC,CAAC,EAAEtC,IAAI,CAAC,EAAE8B,eAAe,CAAC,EAAE;YAE3E,IAAIH,kBAAkBY,OAAO,GAAG/B,iBAAiB+B,OAAO,EAAE;gBACxDZ,oBAAoBpB,aAAaoB;YACnC;QACF;QAEA,MAAMa,kBAAkB,MAAMlC,kBAAkBqB,mBAAmBnB;QACnE,MAAMiC,oBAAoB,MAAMnC,kBAAkBE,kBAAkBmB;QACpE,MAAMe,aAAa;QAEnB,IAAIF,iBAAiBG,QAAQ;YAC3BvB,QAAQ,CAAC,EAAEsB,WAAW,GAAG,EAAEF,iBAAiBhB,KAAK,MAAM,GAAG,CAAC;QAC7D;QACA,IAAIiB,mBAAmBE,QAAQ;YAC7BxB,UAAU,CAAC,EAAEuB,WAAW,GAAG,EAAED,mBAAmBjB,KAAK,MAAM,GAAG,CAAC;QACjE;QAEA,IAAI,CAACJ,OAAOuB,UAAU,CAACxB,SAASwB,UAAU,CAAChD,oBAAoB;YAC7D,MAAM,EAAEiD,SAASC,0BAA0B,EAAE,GAAG,MAAM1D,QACpD;gBACEmC,MAAM;gBACNwB,MAAM;gBACNC,SAAS;gBACTC,SAAS;YACX,GACA;gBACEC,UAAU;oBACRC,QAAQC,IAAI,CAAC;gBACf;YACF;YAGF,IAAI,CAACN,4BAA4B;gBAC/BK,QAAQC,IAAI,CAAC;YACf;QACF;QAEA,eAAe;QACfrE,GAAGsE,aAAa,CAAC,CAAC,EAAE1B,SAAS,KAAK,CAAC,EAAEU,KAAKiB,SAAS,CAAC7C,kBAAkB,MAAM;IAC9E;IAEA,kBAAkB;IAClB1B,GAAGsE,aAAa,CACd,CAAC,EAAE1B,SAAS,GAAG,CAAC,EAChBpC,qBAAqB;QACnB6B,SAASA,WAAW,CAAC,mBAAmB,CAAC;QACzCD;QACAE,OAAOA,SAAS,CAAC,mBAAmB,CAAC;IACvC;IAGFlC,oBAAoB;QAAEoE,eAAezD,QAAQI,EAAE,CAACC,YAAY;IAAC;IAE7DL,QAAQ0D,MAAM,CAACC,IAAI,CAAC;QAAEC,KAAK,CAAC,qBAAqB,EAAE/B,SAAS,GAAG,CAAC;IAAC;AACnE,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"defaultSnapshot.d.ts","sourceRoot":"","sources":["../src/defaultSnapshot.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,iBAAiB,CAAA;AAE1D,eAAO,MAAM,sBAAsB,EAAE,mBAcpC,CAAA"}
|
package/dist/defaultSnapshot.js
DELETED
@@ -1,17 +0,0 @@
|
|
1
|
-
export const defaultDrizzleSnapshot = {
|
2
|
-
id: '00000000-0000-0000-0000-000000000000',
|
3
|
-
_meta: {
|
4
|
-
columns: {},
|
5
|
-
schemas: {},
|
6
|
-
tables: {}
|
7
|
-
},
|
8
|
-
dialect: 'postgresql',
|
9
|
-
enums: {},
|
10
|
-
prevId: '00000000-0000-0000-0000-00000000000',
|
11
|
-
schemas: {},
|
12
|
-
sequences: {},
|
13
|
-
tables: {},
|
14
|
-
version: '7'
|
15
|
-
};
|
16
|
-
|
17
|
-
//# sourceMappingURL=defaultSnapshot.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/defaultSnapshot.ts"],"sourcesContent":["import type { DrizzleSnapshotJSON } from 'drizzle-kit/api'\n\nexport const defaultDrizzleSnapshot: DrizzleSnapshotJSON = {\n id: '00000000-0000-0000-0000-000000000000',\n _meta: {\n columns: {},\n schemas: {},\n tables: {},\n },\n dialect: 'postgresql',\n enums: {},\n prevId: '00000000-0000-0000-0000-00000000000',\n schemas: {},\n sequences: {},\n tables: {},\n version: '7',\n}\n"],"names":["defaultDrizzleSnapshot","id","_meta","columns","schemas","tables","dialect","enums","prevId","sequences","version"],"mappings":"AAEA,OAAO,MAAMA,yBAA8C;IACzDC,IAAI;IACJC,OAAO;QACLC,SAAS,CAAC;QACVC,SAAS,CAAC;QACVC,QAAQ,CAAC;IACX;IACAC,SAAS;IACTC,OAAO,CAAC;IACRC,QAAQ;IACRJ,SAAS,CAAC;IACVK,WAAW,CAAC;IACZJ,QAAQ,CAAC;IACTK,SAAS;AACX,EAAC"}
|
package/dist/deleteWhere.d.ts
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"deleteWhere.d.ts","sourceRoot":"","sources":["../src/deleteWhere.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,YAAY,CAAA;AAE7C,eAAO,MAAM,WAAW,EAAE,WAGzB,CAAA"}
|
package/dist/deleteWhere.js
DELETED
package/dist/deleteWhere.js.map
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/deleteWhere.ts"],"sourcesContent":["import type { TransactionPg } from '@payloadcms/drizzle/types'\n\nimport type { DeleteWhere } from './types.js'\n\nexport const deleteWhere: DeleteWhere = async function deleteWhere({ db, tableName, where }) {\n const table = this.tables[tableName]\n await (db as TransactionPg).delete(table).where(where)\n}\n"],"names":["deleteWhere","db","tableName","where","table","tables","delete"],"mappings":"AAIA,OAAO,MAAMA,cAA2B,eAAeA,YAAY,EAAEC,EAAE,EAAEC,SAAS,EAAEC,KAAK,EAAE;IACzF,MAAMC,QAAQ,IAAI,CAACC,MAAM,CAACH,UAAU;IACpC,MAAM,AAACD,GAAqBK,MAAM,CAACF,OAAOD,KAAK,CAACA;AAClD,EAAC"}
|
package/dist/dropDatabase.d.ts
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"dropDatabase.d.ts","sourceRoot":"","sources":["../src/dropDatabase.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AAE9C,eAAO,MAAM,YAAY,EAAE,YAM1B,CAAA"}
|
package/dist/dropDatabase.js
DELETED
@@ -1,9 +0,0 @@
|
|
1
|
-
export const dropDatabase = async function dropDatabase({ adapter }) {
|
2
|
-
await adapter.execute({
|
3
|
-
drizzle: adapter.drizzle,
|
4
|
-
raw: `drop schema if exists ${this.schemaName || 'public'} cascade;
|
5
|
-
create schema ${this.schemaName || 'public'};`
|
6
|
-
});
|
7
|
-
};
|
8
|
-
|
9
|
-
//# sourceMappingURL=dropDatabase.js.map
|
package/dist/dropDatabase.js.map
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/dropDatabase.ts"],"sourcesContent":["import type { DropDatabase } from './types.js'\n\nexport const dropDatabase: DropDatabase = async function dropDatabase({ adapter }) {\n await adapter.execute({\n drizzle: adapter.drizzle,\n raw: `drop schema if exists ${this.schemaName || 'public'} cascade;\n create schema ${this.schemaName || 'public'};`,\n })\n}\n"],"names":["dropDatabase","adapter","execute","drizzle","raw","schemaName"],"mappings":"AAEA,OAAO,MAAMA,eAA6B,eAAeA,aAAa,EAAEC,OAAO,EAAE;IAC/E,MAAMA,QAAQC,OAAO,CAAC;QACpBC,SAASF,QAAQE,OAAO;QACxBC,KAAK,CAAC,sBAAsB,EAAE,IAAI,CAACC,UAAU,IAAI,SAAS;kBAC5C,EAAE,IAAI,CAACA,UAAU,IAAI,SAAS,CAAC,CAAC;IAChD;AACF,EAAC"}
|
package/dist/execute.d.ts
DELETED
package/dist/execute.d.ts.map
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"execute.d.ts","sourceRoot":"","sources":["../src/execute.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,YAAY,CAAA;AAEzC,eAAO,MAAM,OAAO,EAAE,OAAO,CAAC,GAAG,CAQhC,CAAA"}
|
package/dist/execute.js
DELETED
@@ -1,11 +0,0 @@
|
|
1
|
-
import { sql } from 'drizzle-orm';
|
2
|
-
export const execute = function execute({ db, drizzle, raw, sql: statement }) {
|
3
|
-
const executeFrom = db ?? drizzle;
|
4
|
-
if (raw) {
|
5
|
-
return executeFrom.execute(sql.raw(raw));
|
6
|
-
} else {
|
7
|
-
return executeFrom.execute(sql`${statement}`);
|
8
|
-
}
|
9
|
-
};
|
10
|
-
|
11
|
-
//# sourceMappingURL=execute.js.map
|
package/dist/execute.js.map
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/execute.ts"],"sourcesContent":["import { sql } from 'drizzle-orm'\n\nimport type { Execute } from './types.js'\n\nexport const execute: Execute<any> = function execute({ db, drizzle, raw, sql: statement }) {\n const executeFrom = db ?? drizzle\n\n if (raw) {\n return executeFrom.execute(sql.raw(raw))\n } else {\n return executeFrom.execute(sql`${statement}`)\n }\n}\n"],"names":["sql","execute","db","drizzle","raw","statement","executeFrom"],"mappings":"AAAA,SAASA,GAAG,QAAQ,cAAa;AAIjC,OAAO,MAAMC,UAAwB,SAASA,QAAQ,EAAEC,EAAE,EAAEC,OAAO,EAAEC,GAAG,EAAEJ,KAAKK,SAAS,EAAE;IACxF,MAAMC,cAAcJ,MAAMC;IAE1B,IAAIC,KAAK;QACP,OAAOE,YAAYL,OAAO,CAACD,IAAII,GAAG,CAACA;IACrC,OAAO;QACL,OAAOE,YAAYL,OAAO,CAACD,GAAG,CAAC,EAAEK,UAAU,CAAC;IAC9C;AACF,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"getMigrationTemplate.d.ts","sourceRoot":"","sources":["../src/getMigrationTemplate.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,qBAAqB,EAAE,MAAM,SAAS,CAAA;AAEpD,eAAO,MAAM,MAAM,SAAU,MAAM,WAIpB,CAAA;AAEf,eAAO,MAAM,oBAAoB,iCAI9B,qBAAqB,KAAG,MAS1B,CAAA"}
|
@@ -1,13 +0,0 @@
|
|
1
|
-
export const indent = (text)=>text.split('\n').map((line)=>` ${line}`).join('\n');
|
2
|
-
export const getMigrationTemplate = ({ downSQL, imports, upSQL })=>`import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres'
|
3
|
-
${imports ? `${imports}\n` : ''}
|
4
|
-
export async function up({ payload, req }: MigrateUpArgs): Promise<void> {
|
5
|
-
${indent(upSQL)}
|
6
|
-
}
|
7
|
-
|
8
|
-
export async function down({ payload, req }: MigrateDownArgs): Promise<void> {
|
9
|
-
${indent(downSQL)}
|
10
|
-
}
|
11
|
-
`;
|
12
|
-
|
13
|
-
//# sourceMappingURL=getMigrationTemplate.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/getMigrationTemplate.ts"],"sourcesContent":["import type { MigrationTemplateArgs } from 'payload'\n\nexport const indent = (text: string) =>\n text\n .split('\\n')\n .map((line) => ` ${line}`)\n .join('\\n')\n\nexport const getMigrationTemplate = ({\n downSQL,\n imports,\n upSQL,\n}: MigrationTemplateArgs): string => `import { MigrateUpArgs, MigrateDownArgs, sql } from '@payloadcms/db-postgres'\n${imports ? `${imports}\\n` : ''}\nexport async function up({ payload, req }: MigrateUpArgs): Promise<void> {\n${indent(upSQL)}\n}\n\nexport async function down({ payload, req }: MigrateDownArgs): Promise<void> {\n${indent(downSQL)}\n}\n`\n"],"names":["indent","text","split","map","line","join","getMigrationTemplate","downSQL","imports","upSQL"],"mappings":"AAEA,OAAO,MAAMA,SAAS,CAACC,OACrBA,KACGC,KAAK,CAAC,MACNC,GAAG,CAAC,CAACC,OAAS,CAAC,EAAE,EAAEA,KAAK,CAAC,EACzBC,IAAI,CAAC,MAAK;AAEf,OAAO,MAAMC,uBAAuB,CAAC,EACnCC,OAAO,EACPC,OAAO,EACPC,KAAK,EACiB,GAAa,CAAC;AACtC,EAAED,UAAU,CAAC,EAAEA,QAAQ,EAAE,CAAC,GAAG,GAAG;;AAEhC,EAAER,OAAOS,OAAO;;;;AAIhB,EAAET,OAAOO,SAAS;;AAElB,CAAC,CAAA"}
|
package/dist/init.d.ts
DELETED
package/dist/init.d.ts.map
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"init.d.ts","sourceRoot":"","sources":["../src/init.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,IAAI,EAA6B,MAAM,SAAS,CAAA;AAY9D,eAAO,MAAM,IAAI,EAAE,IAqGlB,CAAA"}
|