@payloadcms/db-postgres 3.0.0-canary.ef0a8d0 → 3.0.0-canary.f118ee4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/connect.d.ts.map +1 -1
- package/dist/connect.js +6 -0
- package/dist/connect.js.map +1 -1
- package/dist/index.d.ts +0 -1
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +15 -16
- package/dist/index.js.map +1 -1
- package/dist/predefinedMigrations/v2-v3/fetchAndResave/index.d.ts.map +1 -1
- package/dist/predefinedMigrations/v2-v3/fetchAndResave/index.js +0 -4
- package/dist/predefinedMigrations/v2-v3/fetchAndResave/index.js.map +1 -1
- package/dist/predefinedMigrations/v2-v3/index.d.ts.map +1 -1
- package/dist/predefinedMigrations/v2-v3/index.js +1 -7
- package/dist/predefinedMigrations/v2-v3/index.js.map +1 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.d.ts.map +1 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.js +0 -1
- package/dist/predefinedMigrations/v2-v3/migrateRelationships.js.map +1 -1
- package/dist/types.d.ts +26 -105
- package/dist/types.d.ts.map +1 -1
- package/dist/types.js.map +1 -1
- package/package.json +8 -8
- package/dist/countDistinct.d.ts +0 -3
- package/dist/countDistinct.d.ts.map +0 -1
- package/dist/countDistinct.js +0 -26
- package/dist/countDistinct.js.map +0 -1
- package/dist/createJSONQuery/convertPathToJSONTraversal.d.ts +0 -2
- package/dist/createJSONQuery/convertPathToJSONTraversal.d.ts.map +0 -1
- package/dist/createJSONQuery/convertPathToJSONTraversal.js +0 -14
- package/dist/createJSONQuery/convertPathToJSONTraversal.js.map +0 -1
- package/dist/createJSONQuery/formatJSONPathSegment.d.ts +0 -2
- package/dist/createJSONQuery/formatJSONPathSegment.d.ts.map +0 -1
- package/dist/createJSONQuery/formatJSONPathSegment.js +0 -5
- package/dist/createJSONQuery/formatJSONPathSegment.js.map +0 -1
- package/dist/createJSONQuery/index.d.ts +0 -10
- package/dist/createJSONQuery/index.d.ts.map +0 -1
- package/dist/createJSONQuery/index.js +0 -54
- package/dist/createJSONQuery/index.js.map +0 -1
- package/dist/createMigration.d.ts +0 -3
- package/dist/createMigration.d.ts.map +0 -1
- package/dist/createMigration.js +0 -80
- package/dist/createMigration.js.map +0 -1
- package/dist/defaultSnapshot.d.ts +0 -3
- package/dist/defaultSnapshot.d.ts.map +0 -1
- package/dist/defaultSnapshot.js +0 -16
- package/dist/defaultSnapshot.js.map +0 -1
- package/dist/deleteWhere.d.ts +0 -3
- package/dist/deleteWhere.d.ts.map +0 -1
- package/dist/deleteWhere.js +0 -7
- package/dist/deleteWhere.js.map +0 -1
- package/dist/dropDatabase.d.ts +0 -3
- package/dist/dropDatabase.d.ts.map +0 -1
- package/dist/dropDatabase.js +0 -9
- package/dist/dropDatabase.js.map +0 -1
- package/dist/execute.d.ts +0 -3
- package/dist/execute.d.ts.map +0 -1
- package/dist/execute.js +0 -13
- package/dist/execute.js.map +0 -1
- package/dist/generateDrizzleJSON.d.ts +0 -3
- package/dist/generateDrizzleJSON.d.ts.map +0 -1
- package/dist/generateDrizzleJSON.js +0 -8
- package/dist/generateDrizzleJSON.js.map +0 -1
- package/dist/generateMigration.d.ts +0 -3
- package/dist/generateMigration.d.ts.map +0 -1
- package/dist/generateMigration.js +0 -8
- package/dist/generateMigration.js.map +0 -1
- package/dist/getMigrationTemplate.d.ts +0 -3
- package/dist/getMigrationTemplate.d.ts.map +0 -1
- package/dist/getMigrationTemplate.js +0 -12
- package/dist/getMigrationTemplate.js.map +0 -1
- package/dist/init.d.ts +0 -3
- package/dist/init.d.ts.map +0 -1
- package/dist/init.js +0 -91
- package/dist/init.js.map +0 -1
- package/dist/insert.d.ts +0 -3
- package/dist/insert.d.ts.map +0 -1
- package/dist/insert.js +0 -13
- package/dist/insert.js.map +0 -1
- package/dist/requireDrizzleKit.d.ts +0 -3
- package/dist/requireDrizzleKit.d.ts.map +0 -1
- package/dist/requireDrizzleKit.js +0 -5
- package/dist/requireDrizzleKit.js.map +0 -1
- package/dist/schema/build.d.ts +0 -34
- package/dist/schema/build.d.ts.map +0 -1
- package/dist/schema/build.js +0 -369
- package/dist/schema/build.js.map +0 -1
- package/dist/schema/createIndex.d.ts +0 -12
- package/dist/schema/createIndex.d.ts.map +0 -1
- package/dist/schema/createIndex.js +0 -18
- package/dist/schema/createIndex.js.map +0 -1
- package/dist/schema/idToUUID.d.ts +0 -3
- package/dist/schema/idToUUID.d.ts.map +0 -1
- package/dist/schema/idToUUID.js +0 -11
- package/dist/schema/idToUUID.js.map +0 -1
- package/dist/schema/parentIDColumnMap.d.ts +0 -4
- package/dist/schema/parentIDColumnMap.d.ts.map +0 -1
- package/dist/schema/parentIDColumnMap.js +0 -9
- package/dist/schema/parentIDColumnMap.js.map +0 -1
- package/dist/schema/setColumnID.d.ts +0 -11
- package/dist/schema/setColumnID.d.ts.map +0 -1
- package/dist/schema/setColumnID.js +0 -24
- package/dist/schema/setColumnID.js.map +0 -1
- package/dist/schema/traverseFields.d.ts +0 -36
- package/dist/schema/traverseFields.d.ts.map +0 -1
- package/dist/schema/traverseFields.js +0 -612
- package/dist/schema/traverseFields.js.map +0 -1
package/dist/types.d.ts
CHANGED
@@ -1,120 +1,37 @@
|
|
1
|
-
import type {
|
2
|
-
import type {
|
3
|
-
import type {
|
4
|
-
import type {
|
5
|
-
import type {
|
6
|
-
import type { PgColumn, PgEnum, PgInsertOnConflictDoUpdateConfig, PgSchema, PgTableWithColumns } from 'drizzle-orm/pg-core';
|
7
|
-
import type { PgTableFn } from 'drizzle-orm/pg-core/table';
|
8
|
-
import type { Payload, PayloadRequest } from 'payload';
|
9
|
-
import type { Pool, PoolConfig, QueryResult } from 'pg';
|
1
|
+
import type { BasePostgresAdapter, GenericEnum, MigrateDownArgs, MigrateUpArgs, PostgresDB } from '@payloadcms/drizzle/postgres';
|
2
|
+
import type { DrizzleAdapter } from '@payloadcms/drizzle/types';
|
3
|
+
import type { DrizzleConfig } from 'drizzle-orm';
|
4
|
+
import type { PgSchema, PgTableFn, PgTransactionConfig } from 'drizzle-orm/pg-core';
|
5
|
+
import type { Pool, PoolConfig } from 'pg';
|
10
6
|
export type Args = {
|
11
7
|
idType?: 'serial' | 'uuid';
|
12
8
|
localesSuffix?: string;
|
13
9
|
logger?: DrizzleConfig['logger'];
|
14
10
|
migrationDir?: string;
|
15
11
|
pool: PoolConfig;
|
12
|
+
prodMigrations?: {
|
13
|
+
down: (args: MigrateDownArgs) => Promise<void>;
|
14
|
+
name: string;
|
15
|
+
up: (args: MigrateUpArgs) => Promise<void>;
|
16
|
+
}[];
|
16
17
|
push?: boolean;
|
17
18
|
relationshipsSuffix?: string;
|
18
|
-
schemaName?: string;
|
19
|
-
versionsSuffix?: string;
|
20
|
-
};
|
21
|
-
export type GenericColumn = PgColumn<ColumnBaseConfig<ColumnDataType, string>, Record<string, unknown>>;
|
22
|
-
export type GenericColumns = {
|
23
|
-
[x: string]: GenericColumn;
|
24
|
-
};
|
25
|
-
export type GenericTable = PgTableWithColumns<{
|
26
|
-
columns: GenericColumns;
|
27
|
-
dialect: string;
|
28
|
-
name: string;
|
29
|
-
schema: string;
|
30
|
-
}>;
|
31
|
-
export type GenericEnum = PgEnum<[string, ...string[]]>;
|
32
|
-
export type GenericRelation = Relations<string, Record<string, Relation<string>>>;
|
33
|
-
export type PostgresDB = NodePgDatabase<Record<string, unknown>>;
|
34
|
-
export type CountDistinct = (args: {
|
35
|
-
db: PostgresDB | TransactionPg;
|
36
|
-
joins: BuildQueryJoinAliases;
|
37
|
-
tableName: string;
|
38
|
-
where: SQL;
|
39
|
-
}) => Promise<number>;
|
40
|
-
export type DeleteWhere = (args: {
|
41
|
-
db: PostgresDB | TransactionPg;
|
42
|
-
tableName: string;
|
43
|
-
where: SQL;
|
44
|
-
}) => Promise<void>;
|
45
|
-
export type DropDatabase = (args: {
|
46
|
-
adapter: PostgresAdapter;
|
47
|
-
}) => Promise<void>;
|
48
|
-
export type Execute<T> = (args: {
|
49
|
-
db?: PostgresDB | TransactionPg;
|
50
|
-
drizzle?: PostgresDB;
|
51
|
-
raw?: string;
|
52
|
-
sql?: SQL<unknown>;
|
53
|
-
}) => Promise<QueryResult<Record<string, T>>>;
|
54
|
-
export type GenerateDrizzleJSON = (args: {
|
55
|
-
schema: Record<string, GenericRelation | GenericTable>;
|
56
|
-
}) => PgSchema;
|
57
|
-
export type Insert = (args: {
|
58
|
-
db: PostgresDB | TransactionPg;
|
59
|
-
onConflictDoUpdate?: PgInsertOnConflictDoUpdateConfig<any>;
|
60
|
-
tableName: string;
|
61
|
-
values: Record<string, unknown> | Record<string, unknown>[];
|
62
|
-
}) => Promise<Record<string, unknown>[]>;
|
63
|
-
type PostgresDrizzleAdapter = Omit<DrizzleAdapter, 'countDistinct' | 'deleteWhere' | 'drizzle' | 'dropDatabase' | 'execute' | 'insert' | 'operators' | 'relations'>;
|
64
|
-
export type PostgresAdapter = PostgresDrizzleAdapter & {
|
65
|
-
countDistinct: CountDistinct;
|
66
|
-
defaultDrizzleSnapshot: DrizzleSnapshotJSON;
|
67
|
-
deleteWhere: DeleteWhere;
|
68
|
-
drizzle: PostgresDB;
|
69
|
-
dropDatabase: DropDatabase;
|
70
|
-
enums: Record<string, GenericEnum>;
|
71
|
-
execute: Execute<unknown>;
|
72
19
|
/**
|
73
|
-
*
|
74
|
-
*
|
20
|
+
* The schema name to use for the database
|
21
|
+
* @experimental This only works when there are not other tables or enums of the same name in the database under a different schema. Awaiting fix from Drizzle.
|
75
22
|
*/
|
76
|
-
|
77
|
-
|
78
|
-
idType: Args['idType'];
|
79
|
-
initializing: Promise<void>;
|
80
|
-
insert: Insert;
|
81
|
-
localesSuffix?: string;
|
82
|
-
logger: DrizzleConfig['logger'];
|
83
|
-
operators: Operators;
|
84
|
-
pgSchema?: {
|
85
|
-
table: PgTableFn;
|
86
|
-
} | PgSchema;
|
87
|
-
pool: Pool;
|
88
|
-
poolOptions: Args['pool'];
|
89
|
-
push: boolean;
|
90
|
-
rejectInitializing: () => void;
|
91
|
-
relations: Record<string, GenericRelation>;
|
92
|
-
relationshipsSuffix?: string;
|
93
|
-
resolveInitializing: () => void;
|
94
|
-
schema: Record<string, GenericEnum | GenericRelation | GenericTable>;
|
95
|
-
schemaName?: Args['schemaName'];
|
96
|
-
sessions: {
|
97
|
-
[id: string]: {
|
98
|
-
db: PostgresDB | TransactionPg;
|
99
|
-
reject: () => Promise<void>;
|
100
|
-
resolve: () => Promise<void>;
|
101
|
-
};
|
102
|
-
};
|
103
|
-
tableNameMap: Map<string, string>;
|
104
|
-
tables: Record<string, GenericTable>;
|
23
|
+
schemaName?: string;
|
24
|
+
transactionOptions?: PgTransactionConfig | false;
|
105
25
|
versionsSuffix?: string;
|
106
26
|
};
|
107
|
-
export type
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
};
|
112
|
-
export type MigrateDownArgs = {
|
113
|
-
payload: Payload;
|
114
|
-
req?: Partial<PayloadRequest>;
|
115
|
-
};
|
27
|
+
export type PostgresAdapter = {
|
28
|
+
pool: Pool;
|
29
|
+
poolOptions: PoolConfig;
|
30
|
+
} & BasePostgresAdapter;
|
116
31
|
declare module 'payload' {
|
117
32
|
interface DatabaseAdapter extends Omit<Args, 'idType' | 'logger' | 'migrationDir' | 'pool'>, DrizzleAdapter {
|
33
|
+
beginTransaction: (options?: PgTransactionConfig) => Promise<null | number | string>;
|
34
|
+
drizzle: PostgresDB;
|
118
35
|
enums: Record<string, GenericEnum>;
|
119
36
|
/**
|
120
37
|
* An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name
|
@@ -130,15 +47,19 @@ declare module 'payload' {
|
|
130
47
|
} | PgSchema;
|
131
48
|
pool: Pool;
|
132
49
|
poolOptions: Args['pool'];
|
50
|
+
prodMigrations?: {
|
51
|
+
down: (args: MigrateDownArgs) => Promise<void>;
|
52
|
+
name: string;
|
53
|
+
up: (args: MigrateUpArgs) => Promise<void>;
|
54
|
+
}[];
|
133
55
|
push: boolean;
|
134
56
|
rejectInitializing: () => void;
|
135
57
|
relationshipsSuffix?: string;
|
136
58
|
resolveInitializing: () => void;
|
137
|
-
schema: Record<string,
|
59
|
+
schema: Record<string, unknown>;
|
138
60
|
schemaName?: Args['schemaName'];
|
139
61
|
tableNameMap: Map<string, string>;
|
140
62
|
versionsSuffix?: string;
|
141
63
|
}
|
142
64
|
}
|
143
|
-
export {};
|
144
65
|
//# sourceMappingURL=types.d.ts.map
|
package/dist/types.d.ts.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,
|
1
|
+
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../src/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EACV,mBAAmB,EACnB,WAAW,EACX,eAAe,EACf,aAAa,EACb,UAAU,EACX,MAAM,8BAA8B,CAAA;AACrC,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,2BAA2B,CAAA;AAC/D,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,aAAa,CAAA;AAChD,OAAO,KAAK,EAAE,QAAQ,EAAE,SAAS,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAA;AACnF,OAAO,KAAK,EAAE,IAAI,EAAE,UAAU,EAAE,MAAM,IAAI,CAAA;AAE1C,MAAM,MAAM,IAAI,GAAG;IACjB,MAAM,CAAC,EAAE,QAAQ,GAAG,MAAM,CAAA;IAC1B,aAAa,CAAC,EAAE,MAAM,CAAA;IACtB,MAAM,CAAC,EAAE,aAAa,CAAC,QAAQ,CAAC,CAAA;IAChC,YAAY,CAAC,EAAE,MAAM,CAAA;IACrB,IAAI,EAAE,UAAU,CAAA;IAChB,cAAc,CAAC,EAAE;QACf,IAAI,EAAE,CAAC,IAAI,EAAE,eAAe,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;QAC9C,IAAI,EAAE,MAAM,CAAA;QACZ,EAAE,EAAE,CAAC,IAAI,EAAE,aAAa,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;KAC3C,EAAE,CAAA;IACH,IAAI,CAAC,EAAE,OAAO,CAAA;IACd,mBAAmB,CAAC,EAAE,MAAM,CAAA;IAC5B;;;OAGG;IACH,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,kBAAkB,CAAC,EAAE,mBAAmB,GAAG,KAAK,CAAA;IAChD,cAAc,CAAC,EAAE,MAAM,CAAA;CACxB,CAAA;AAED,MAAM,MAAM,eAAe,GAAG;IAC5B,IAAI,EAAE,IAAI,CAAA;IACV,WAAW,EAAE,UAAU,CAAA;CACxB,GAAG,mBAAmB,CAAA;AAEvB,OAAO,QAAQ,SAAS,CAAC;IACvB,UAAiB,eACf,SAAQ,IAAI,CAAC,IAAI,EAAE,QAAQ,GAAG,QAAQ,GAAG,cAAc,GAAG,MAAM,CAAC,EAC/D,cAAc;QAChB,gBAAgB,EAAE,CAAC,OAAO,CAAC,EAAE,mBAAmB,KAAK,OAAO,CAAC,IAAI,GAAG,MAAM,GAAG,MAAM,CAAC,CAAA;QACpF,OAAO,EAAE,UAAU,CAAA;QACnB,KAAK,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAA;QAClC;;;WAGG;QACH,gBAAgB,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,CAAA;QACxD,MAAM,EAAE,IAAI,CAAC,QAAQ,CAAC,CAAA;QACtB,YAAY,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;QAC3B,aAAa,CAAC,EAAE,MAAM,CAAA;QACtB,MAAM,EAAE,aAAa,CAAC,QAAQ,CAAC,CAAA;QAC/B,QAAQ,CAAC,EAAE;YAAE,KAAK,EAAE,SAAS,CAAA;SAAE,GAAG,QAAQ,CAAA;QAC1C,IAAI,EAAE,IAAI,CAAA;QACV,WAAW,EAAE,IAAI,CAAC,MAAM,CAAC,CAAA;QACzB,cAAc,CAAC,EAAE;YACf,IAAI,EAAE,CAAC,IAAI,EAAE,eAAe,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;YAC9C,IAAI,EAAE,MAAM,CAAA;YACZ,EAAE,EAAE,CAAC,IAAI,EAAE,aAAa,KAAK,OAAO,CAAC,IAAI,CAAC,CAAA;SAC3C,EAAE,CAAA;QACH,IAAI,EAAE,OAAO,CAAA;QACb,kBAAkB,EAAE,MAAM,IAAI,CAAA;QAC9B,mBAAmB,CAAC,EAAE,MAAM,CAAA;QAC5B,mBAAmB,EAAE,MAAM,IAAI,CAAA;QAC/B,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAA;QAC/B,UAAU,CAAC,EAAE,IAAI,CAAC,YAAY,CAAC,CAAA;QAC/B,YAAY,EAAE,GAAG,CAAC,MAAM,EAAE,MAAM,CAAC,CAAA;QACjC,cAAc,CAAC,EAAE,MAAM,CAAA;KACxB;CACF"}
|
package/dist/types.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/types.ts"],"sourcesContent":["import type {
|
1
|
+
{"version":3,"sources":["../src/types.ts"],"sourcesContent":["import type {\n BasePostgresAdapter,\n GenericEnum,\n MigrateDownArgs,\n MigrateUpArgs,\n PostgresDB,\n} from '@payloadcms/drizzle/postgres'\nimport type { DrizzleAdapter } from '@payloadcms/drizzle/types'\nimport type { DrizzleConfig } from 'drizzle-orm'\nimport type { PgSchema, PgTableFn, PgTransactionConfig } from 'drizzle-orm/pg-core'\nimport type { Pool, PoolConfig } from 'pg'\n\nexport type Args = {\n idType?: 'serial' | 'uuid'\n localesSuffix?: string\n logger?: DrizzleConfig['logger']\n migrationDir?: string\n pool: PoolConfig\n prodMigrations?: {\n down: (args: MigrateDownArgs) => Promise<void>\n name: string\n up: (args: MigrateUpArgs) => Promise<void>\n }[]\n push?: boolean\n relationshipsSuffix?: string\n /**\n * The schema name to use for the database\n * @experimental This only works when there are not other tables or enums of the same name in the database under a different schema. Awaiting fix from Drizzle.\n */\n schemaName?: string\n transactionOptions?: PgTransactionConfig | false\n versionsSuffix?: string\n}\n\nexport type PostgresAdapter = {\n pool: Pool\n poolOptions: PoolConfig\n} & BasePostgresAdapter\n\ndeclare module 'payload' {\n export interface DatabaseAdapter\n extends Omit<Args, 'idType' | 'logger' | 'migrationDir' | 'pool'>,\n DrizzleAdapter {\n beginTransaction: (options?: PgTransactionConfig) => Promise<null | number | string>\n drizzle: PostgresDB\n enums: Record<string, GenericEnum>\n /**\n * An object keyed on each table, with a key value pair where the constraint name is the key, followed by the dot-notation field name\n * Used for returning properly formed errors from unique fields\n */\n fieldConstraints: Record<string, Record<string, string>>\n idType: Args['idType']\n initializing: Promise<void>\n localesSuffix?: string\n logger: DrizzleConfig['logger']\n pgSchema?: { table: PgTableFn } | PgSchema\n pool: Pool\n poolOptions: Args['pool']\n prodMigrations?: {\n down: (args: MigrateDownArgs) => Promise<void>\n name: string\n up: (args: MigrateUpArgs) => Promise<void>\n }[]\n push: boolean\n rejectInitializing: () => void\n relationshipsSuffix?: string\n resolveInitializing: () => void\n schema: Record<string, unknown>\n schemaName?: Args['schemaName']\n tableNameMap: Map<string, string>\n versionsSuffix?: string\n }\n}\n"],"names":[],"mappings":"AAkCA,WAGuB"}
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@payloadcms/db-postgres",
|
3
|
-
"version": "3.0.0-canary.
|
3
|
+
"version": "3.0.0-canary.f118ee4",
|
4
4
|
"description": "The officially supported Postgres database adapter for Payload",
|
5
5
|
"homepage": "https://payloadcms.com",
|
6
6
|
"repository": {
|
@@ -36,24 +36,24 @@
|
|
36
36
|
],
|
37
37
|
"dependencies": {
|
38
38
|
"console-table-printer": "2.11.2",
|
39
|
-
"drizzle-kit": "0.
|
40
|
-
"drizzle-orm": "0.
|
39
|
+
"drizzle-kit": "0.23.2-df9e596",
|
40
|
+
"drizzle-orm": "0.32.1",
|
41
41
|
"pg": "8.11.3",
|
42
42
|
"prompts": "2.4.2",
|
43
43
|
"to-snake-case": "1.0.0",
|
44
44
|
"uuid": "10.0.0",
|
45
|
-
"@payloadcms/drizzle": "3.0.0-canary.
|
45
|
+
"@payloadcms/drizzle": "3.0.0-canary.f118ee4"
|
46
46
|
},
|
47
47
|
"devDependencies": {
|
48
48
|
"@hyrious/esbuild-plugin-commonjs": "^0.2.4",
|
49
49
|
"@types/pg": "8.10.2",
|
50
50
|
"@types/to-snake-case": "1.0.0",
|
51
51
|
"esbuild": "0.23.0",
|
52
|
-
"
|
53
|
-
"
|
52
|
+
"@payloadcms/eslint-config": "3.0.0-beta.59",
|
53
|
+
"payload": "3.0.0-canary.f118ee4"
|
54
54
|
},
|
55
55
|
"peerDependencies": {
|
56
|
-
"payload": "3.0.0-canary.
|
56
|
+
"payload": "3.0.0-canary.f118ee4"
|
57
57
|
},
|
58
58
|
"scripts": {
|
59
59
|
"build": "rimraf .dist && rimraf tsconfig.tsbuildinfo && pnpm build:types && pnpm build:swc && pnpm build:esbuild && pnpm renamePredefinedMigrations",
|
@@ -61,6 +61,6 @@
|
|
61
61
|
"build:swc": "swc ./src -d ./dist --config-file .swcrc --strip-leading-paths",
|
62
62
|
"build:types": "tsc --emitDeclarationOnly --outDir dist",
|
63
63
|
"clean": "rimraf {dist,*.tsbuildinfo}",
|
64
|
-
"renamePredefinedMigrations": "
|
64
|
+
"renamePredefinedMigrations": "node --no-deprecation --import @swc-node/register/esm-register ./scripts/renamePredefinedMigrations.ts"
|
65
65
|
}
|
66
66
|
}
|
package/dist/countDistinct.d.ts
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"countDistinct.d.ts","sourceRoot":"","sources":["../src/countDistinct.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,aAAa,EAAmB,MAAM,YAAY,CAAA;AAEhE,eAAO,MAAM,aAAa,EAAE,aA2B3B,CAAA"}
|
package/dist/countDistinct.js
DELETED
@@ -1,26 +0,0 @@
|
|
1
|
-
import { chainMethods } from '@payloadcms/drizzle';
|
2
|
-
import { sql } from 'drizzle-orm';
|
3
|
-
export const countDistinct = async function countDistinct({ db, joins, tableName, where }) {
|
4
|
-
const chainedMethods = [];
|
5
|
-
joins.forEach(({ condition, table })=>{
|
6
|
-
chainedMethods.push({
|
7
|
-
args: [
|
8
|
-
table,
|
9
|
-
condition
|
10
|
-
],
|
11
|
-
method: 'leftJoin'
|
12
|
-
});
|
13
|
-
});
|
14
|
-
const countResult = await chainMethods({
|
15
|
-
methods: chainedMethods,
|
16
|
-
query: db.select({
|
17
|
-
// @ts-expect-error generic string is not matching union type from drizzle
|
18
|
-
count: sql`count
|
19
|
-
(DISTINCT ${this.tables[tableName].id})`
|
20
|
-
}).from(this.tables[tableName])// @ts-expect-error where is picking up libsql types
|
21
|
-
.where(where)
|
22
|
-
});
|
23
|
-
return Number(countResult[0].count);
|
24
|
-
};
|
25
|
-
|
26
|
-
//# sourceMappingURL=countDistinct.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/countDistinct.ts"],"sourcesContent":["import type { ChainedMethods, TransactionPg } from '@payloadcms/drizzle/types'\n\nimport { chainMethods } from '@payloadcms/drizzle'\nimport { sql } from 'drizzle-orm'\n\nimport type { CountDistinct, PostgresAdapter } from './types.js'\n\nexport const countDistinct: CountDistinct = async function countDistinct(\n this: PostgresAdapter,\n { db, joins, tableName, where },\n) {\n const chainedMethods: ChainedMethods = []\n\n joins.forEach(({ condition, table }) => {\n chainedMethods.push({\n args: [table, condition],\n method: 'leftJoin',\n })\n })\n\n const countResult = await chainMethods({\n methods: chainedMethods,\n query: (db as TransactionPg)\n .select({\n // @ts-expect-error generic string is not matching union type from drizzle\n count: sql<string>`count\n (DISTINCT ${this.tables[tableName].id})`,\n })\n .from(this.tables[tableName])\n // @ts-expect-error where is picking up libsql types\n .where(where),\n })\n\n return Number(countResult[0].count)\n}\n"],"names":["chainMethods","sql","countDistinct","db","joins","tableName","where","chainedMethods","forEach","condition","table","push","args","method","countResult","methods","query","select","count","tables","id","from","Number"],"mappings":"AAEA,SAASA,YAAY,QAAQ,sBAAqB;AAClD,SAASC,GAAG,QAAQ,cAAa;AAIjC,OAAO,MAAMC,gBAA+B,eAAeA,cAEzD,EAAEC,EAAE,EAAEC,KAAK,EAAEC,SAAS,EAAEC,KAAK,EAAE;IAE/B,MAAMC,iBAAiC,EAAE;IAEzCH,MAAMI,OAAO,CAAC,CAAC,EAAEC,SAAS,EAAEC,KAAK,EAAE;QACjCH,eAAeI,IAAI,CAAC;YAClBC,MAAM;gBAACF;gBAAOD;aAAU;YACxBI,QAAQ;QACV;IACF;IAEA,MAAMC,cAAc,MAAMd,aAAa;QACrCe,SAASR;QACTS,OAAO,AAACb,GACLc,MAAM,CAAC;YACN,0EAA0E;YAC1EC,OAAOjB,GAAW,CAAC;sBACL,EAAE,IAAI,CAACkB,MAAM,CAACd,UAAU,CAACe,EAAE,CAAC,CAAC,CAAC;QAC9C,GACCC,IAAI,CAAC,IAAI,CAACF,MAAM,CAACd,UAAU,CAC5B,oDAAoD;SACnDC,KAAK,CAACA;IACX;IAEA,OAAOgB,OAAOR,WAAW,CAAC,EAAE,CAACI,KAAK;AACpC,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"convertPathToJSONTraversal.d.ts","sourceRoot":"","sources":["../../src/createJSONQuery/convertPathToJSONTraversal.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,0BAA0B,qBAAsB,MAAM,EAAE,WAUpE,CAAA"}
|
@@ -1,14 +0,0 @@
|
|
1
|
-
import { formatJSONPathSegment } from './formatJSONPathSegment.js';
|
2
|
-
export const convertPathToJSONTraversal = (incomingSegments)=>{
|
3
|
-
const segments = [
|
4
|
-
...incomingSegments
|
5
|
-
];
|
6
|
-
segments.shift();
|
7
|
-
return segments.reduce((res, segment, i)=>{
|
8
|
-
const formattedSegment = formatJSONPathSegment(segment);
|
9
|
-
if (i + 1 === segments.length) return `${res}->>${formattedSegment}`;
|
10
|
-
return `${res}->${formattedSegment}`;
|
11
|
-
}, '');
|
12
|
-
};
|
13
|
-
|
14
|
-
//# sourceMappingURL=convertPathToJSONTraversal.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../../src/createJSONQuery/convertPathToJSONTraversal.ts"],"sourcesContent":["import { formatJSONPathSegment } from './formatJSONPathSegment.js'\n\nexport const convertPathToJSONTraversal = (incomingSegments: string[]) => {\n const segments = [...incomingSegments]\n segments.shift()\n\n return segments.reduce((res, segment, i) => {\n const formattedSegment = formatJSONPathSegment(segment)\n\n if (i + 1 === segments.length) return `${res}->>${formattedSegment}`\n return `${res}->${formattedSegment}`\n }, '')\n}\n"],"names":["formatJSONPathSegment","convertPathToJSONTraversal","incomingSegments","segments","shift","reduce","res","segment","i","formattedSegment","length"],"mappings":"AAAA,SAASA,qBAAqB,QAAQ,6BAA4B;AAElE,OAAO,MAAMC,6BAA6B,CAACC;IACzC,MAAMC,WAAW;WAAID;KAAiB;IACtCC,SAASC,KAAK;IAEd,OAAOD,SAASE,MAAM,CAAC,CAACC,KAAKC,SAASC;QACpC,MAAMC,mBAAmBT,sBAAsBO;QAE/C,IAAIC,IAAI,MAAML,SAASO,MAAM,EAAE,OAAO,CAAC,EAAEJ,IAAI,GAAG,EAAEG,iBAAiB,CAAC;QACpE,OAAO,CAAC,EAAEH,IAAI,EAAE,EAAEG,iBAAiB,CAAC;IACtC,GAAG;AACL,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"formatJSONPathSegment.d.ts","sourceRoot":"","sources":["../../src/createJSONQuery/formatJSONPathSegment.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,qBAAqB,YAAa,MAAM,WAEpD,CAAA"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../../src/createJSONQuery/formatJSONPathSegment.ts"],"sourcesContent":["export const formatJSONPathSegment = (segment: string) => {\n return Number.isNaN(parseInt(segment)) ? `'${segment}'` : segment\n}\n"],"names":["formatJSONPathSegment","segment","Number","isNaN","parseInt"],"mappings":"AAAA,OAAO,MAAMA,wBAAwB,CAACC;IACpC,OAAOC,OAAOC,KAAK,CAACC,SAASH,YAAY,CAAC,CAAC,EAAEA,QAAQ,CAAC,CAAC,GAAGA;AAC5D,EAAC"}
|
@@ -1,10 +0,0 @@
|
|
1
|
-
type Args = {
|
2
|
-
operator: string;
|
3
|
-
pathSegments: string[];
|
4
|
-
treatAsArray?: string[];
|
5
|
-
treatRootAsArray?: boolean;
|
6
|
-
value: unknown;
|
7
|
-
};
|
8
|
-
export declare const createJSONQuery: ({ operator, pathSegments, treatAsArray, treatRootAsArray, value, }: Args) => string;
|
9
|
-
export {};
|
10
|
-
//# sourceMappingURL=index.d.ts.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/createJSONQuery/index.ts"],"names":[],"mappings":"AAmDA,KAAK,IAAI,GAAG;IACV,QAAQ,EAAE,MAAM,CAAA;IAChB,YAAY,EAAE,MAAM,EAAE,CAAA;IACtB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAA;IACvB,gBAAgB,CAAC,EAAE,OAAO,CAAA;IAC1B,KAAK,EAAE,OAAO,CAAA;CACf,CAAA;AAED,eAAO,MAAM,eAAe,uEAMzB,IAAI,KAAG,MAqBT,CAAA"}
|
@@ -1,54 +0,0 @@
|
|
1
|
-
import { convertPathToJSONTraversal } from './convertPathToJSONTraversal.js';
|
2
|
-
import { formatJSONPathSegment } from './formatJSONPathSegment.js';
|
3
|
-
const operatorMap = {
|
4
|
-
contains: '~*',
|
5
|
-
equals: '=',
|
6
|
-
like: '~*'
|
7
|
-
};
|
8
|
-
const fromArray = ({ isRoot, operator, pathSegments, treatAsArray, value })=>{
|
9
|
-
const newPathSegments = pathSegments.slice(isRoot ? 1 : 2);
|
10
|
-
const alias = `${pathSegments[isRoot ? 0 : 1]}_alias_${newPathSegments.length}`;
|
11
|
-
newPathSegments.unshift(alias);
|
12
|
-
const arrayElements = isRoot ? pathSegments[0] : `${pathSegments[0]} -> ${formatJSONPathSegment(pathSegments[1])}`;
|
13
|
-
return `EXISTS (
|
14
|
-
SELECT 1
|
15
|
-
FROM jsonb_array_elements(${arrayElements}) AS ${alias}
|
16
|
-
WHERE ${createJSONQuery({
|
17
|
-
operator,
|
18
|
-
pathSegments: newPathSegments,
|
19
|
-
treatAsArray,
|
20
|
-
value
|
21
|
-
})}
|
22
|
-
)`;
|
23
|
-
};
|
24
|
-
const createConstraint = ({ operator, pathSegments, value })=>{
|
25
|
-
const jsonQuery = convertPathToJSONTraversal(pathSegments);
|
26
|
-
return `${pathSegments[0]}${jsonQuery} ${operatorMap[operator]} '${value}'`;
|
27
|
-
};
|
28
|
-
export const createJSONQuery = ({ operator, pathSegments, treatAsArray, treatRootAsArray, value })=>{
|
29
|
-
if (treatRootAsArray) {
|
30
|
-
return fromArray({
|
31
|
-
isRoot: true,
|
32
|
-
operator,
|
33
|
-
pathSegments,
|
34
|
-
treatAsArray,
|
35
|
-
value
|
36
|
-
});
|
37
|
-
}
|
38
|
-
if (treatAsArray.includes(pathSegments[1])) {
|
39
|
-
return fromArray({
|
40
|
-
operator,
|
41
|
-
pathSegments,
|
42
|
-
treatAsArray,
|
43
|
-
value
|
44
|
-
});
|
45
|
-
}
|
46
|
-
return createConstraint({
|
47
|
-
operator,
|
48
|
-
pathSegments,
|
49
|
-
treatAsArray,
|
50
|
-
value
|
51
|
-
});
|
52
|
-
};
|
53
|
-
|
54
|
-
//# sourceMappingURL=index.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../../src/createJSONQuery/index.ts"],"sourcesContent":["import { convertPathToJSONTraversal } from './convertPathToJSONTraversal.js'\nimport { formatJSONPathSegment } from './formatJSONPathSegment.js'\n\nconst operatorMap = {\n contains: '~*',\n equals: '=',\n like: '~*',\n}\n\ntype FromArrayArgs = {\n isRoot?: true\n operator: string\n pathSegments: string[]\n treatAsArray?: string[]\n value: unknown\n}\n\nconst fromArray = ({ isRoot, operator, pathSegments, treatAsArray, value }: FromArrayArgs) => {\n const newPathSegments = pathSegments.slice(isRoot ? 1 : 2)\n const alias = `${pathSegments[isRoot ? 0 : 1]}_alias_${newPathSegments.length}`\n\n newPathSegments.unshift(alias)\n\n const arrayElements = isRoot\n ? pathSegments[0]\n : `${pathSegments[0]} -> ${formatJSONPathSegment(pathSegments[1])}`\n\n return `EXISTS (\n SELECT 1\n FROM jsonb_array_elements(${arrayElements}) AS ${alias}\n WHERE ${createJSONQuery({\n operator,\n pathSegments: newPathSegments,\n treatAsArray,\n value,\n })}\n )`\n}\n\ntype CreateConstraintArgs = {\n operator: string\n pathSegments: string[]\n treatAsArray?: string[]\n value: unknown\n}\n\nconst createConstraint = ({ operator, pathSegments, value }: CreateConstraintArgs): string => {\n const jsonQuery = convertPathToJSONTraversal(pathSegments)\n return `${pathSegments[0]}${jsonQuery} ${operatorMap[operator]} '${value}'`\n}\n\ntype Args = {\n operator: string\n pathSegments: string[]\n treatAsArray?: string[]\n treatRootAsArray?: boolean\n value: unknown\n}\n\nexport const createJSONQuery = ({\n operator,\n pathSegments,\n treatAsArray,\n treatRootAsArray,\n value,\n}: Args): string => {\n if (treatRootAsArray) {\n return fromArray({\n isRoot: true,\n operator,\n pathSegments,\n treatAsArray,\n value,\n })\n }\n\n if (treatAsArray.includes(pathSegments[1])) {\n return fromArray({\n operator,\n pathSegments,\n treatAsArray,\n value,\n })\n }\n\n return createConstraint({ operator, pathSegments, treatAsArray, value })\n}\n"],"names":["convertPathToJSONTraversal","formatJSONPathSegment","operatorMap","contains","equals","like","fromArray","isRoot","operator","pathSegments","treatAsArray","value","newPathSegments","slice","alias","length","unshift","arrayElements","createJSONQuery","createConstraint","jsonQuery","treatRootAsArray","includes"],"mappings":"AAAA,SAASA,0BAA0B,QAAQ,kCAAiC;AAC5E,SAASC,qBAAqB,QAAQ,6BAA4B;AAElE,MAAMC,cAAc;IAClBC,UAAU;IACVC,QAAQ;IACRC,MAAM;AACR;AAUA,MAAMC,YAAY,CAAC,EAAEC,MAAM,EAAEC,QAAQ,EAAEC,YAAY,EAAEC,YAAY,EAAEC,KAAK,EAAiB;IACvF,MAAMC,kBAAkBH,aAAaI,KAAK,CAACN,SAAS,IAAI;IACxD,MAAMO,QAAQ,CAAC,EAAEL,YAAY,CAACF,SAAS,IAAI,EAAE,CAAC,OAAO,EAAEK,gBAAgBG,MAAM,CAAC,CAAC;IAE/EH,gBAAgBI,OAAO,CAACF;IAExB,MAAMG,gBAAgBV,SAClBE,YAAY,CAAC,EAAE,GACf,CAAC,EAAEA,YAAY,CAAC,EAAE,CAAC,IAAI,EAAER,sBAAsBQ,YAAY,CAAC,EAAE,EAAE,CAAC;IAErE,OAAO,CAAC;;8BAEoB,EAAEQ,cAAc,KAAK,EAAEH,MAAM;UACjD,EAAEI,gBAAgB;QACtBV;QACAC,cAAcG;QACdF;QACAC;IACF,GAAG;GACJ,CAAC;AACJ;AASA,MAAMQ,mBAAmB,CAAC,EAAEX,QAAQ,EAAEC,YAAY,EAAEE,KAAK,EAAwB;IAC/E,MAAMS,YAAYpB,2BAA2BS;IAC7C,OAAO,CAAC,EAAEA,YAAY,CAAC,EAAE,CAAC,EAAEW,UAAU,CAAC,EAAElB,WAAW,CAACM,SAAS,CAAC,EAAE,EAAEG,MAAM,CAAC,CAAC;AAC7E;AAUA,OAAO,MAAMO,kBAAkB,CAAC,EAC9BV,QAAQ,EACRC,YAAY,EACZC,YAAY,EACZW,gBAAgB,EAChBV,KAAK,EACA;IACL,IAAIU,kBAAkB;QACpB,OAAOf,UAAU;YACfC,QAAQ;YACRC;YACAC;YACAC;YACAC;QACF;IACF;IAEA,IAAID,aAAaY,QAAQ,CAACb,YAAY,CAAC,EAAE,GAAG;QAC1C,OAAOH,UAAU;YACfE;YACAC;YACAC;YACAC;QACF;IACF;IAEA,OAAOQ,iBAAiB;QAAEX;QAAUC;QAAcC;QAAcC;IAAM;AACxE,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"createMigration.d.ts","sourceRoot":"","sources":["../src/createMigration.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,SAAS,CAAA;AAgB9C,eAAO,MAAM,eAAe,EAAE,eA6F7B,CAAA"}
|
package/dist/createMigration.js
DELETED
@@ -1,80 +0,0 @@
|
|
1
|
-
/* eslint-disable no-restricted-syntax, no-await-in-loop */ import fs from 'fs';
|
2
|
-
import { createRequire } from 'module';
|
3
|
-
import path from 'path';
|
4
|
-
import { getPredefinedMigration } from 'payload';
|
5
|
-
import prompts from 'prompts';
|
6
|
-
import { fileURLToPath } from 'url';
|
7
|
-
import { defaultDrizzleSnapshot } from './defaultSnapshot.js';
|
8
|
-
import { getMigrationTemplate } from './getMigrationTemplate.js';
|
9
|
-
const require = createRequire(import.meta.url);
|
10
|
-
export const createMigration = async function createMigration({ file, forceAcceptWarning, migrationName, payload }) {
|
11
|
-
const filename = fileURLToPath(import.meta.url);
|
12
|
-
const dirname = path.dirname(filename);
|
13
|
-
const dir = payload.db.migrationDir;
|
14
|
-
if (!fs.existsSync(dir)) {
|
15
|
-
fs.mkdirSync(dir);
|
16
|
-
}
|
17
|
-
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload');
|
18
|
-
const drizzleJsonAfter = generateDrizzleJson(this.schema);
|
19
|
-
const [yyymmdd, hhmmss] = new Date().toISOString().split('T');
|
20
|
-
const formattedDate = yyymmdd.replace(/\D/g, '');
|
21
|
-
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '');
|
22
|
-
let imports = '';
|
23
|
-
let downSQL;
|
24
|
-
let upSQL;
|
25
|
-
({ downSQL, imports, upSQL } = await getPredefinedMigration({
|
26
|
-
dirname,
|
27
|
-
file,
|
28
|
-
migrationName,
|
29
|
-
payload
|
30
|
-
}));
|
31
|
-
const timestamp = `${formattedDate}_${formattedTime}`;
|
32
|
-
const name = migrationName || file?.split('/').slice(2).join('/');
|
33
|
-
const fileName = `${timestamp}${name ? `_${name.replace(/\W/g, '_')}` : ''}`;
|
34
|
-
const filePath = `${dir}/${fileName}`;
|
35
|
-
let drizzleJsonBefore = defaultDrizzleSnapshot;
|
36
|
-
if (!upSQL) {
|
37
|
-
// Get latest migration snapshot
|
38
|
-
const latestSnapshot = fs.readdirSync(dir).filter((file)=>file.endsWith('.json')).sort().reverse()?.[0];
|
39
|
-
if (latestSnapshot) {
|
40
|
-
drizzleJsonBefore = JSON.parse(fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'));
|
41
|
-
}
|
42
|
-
const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter);
|
43
|
-
const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore);
|
44
|
-
const sqlExecute = 'await db.execute(sql`';
|
45
|
-
if (sqlStatementsUp?.length) {
|
46
|
-
upSQL = `${sqlExecute}\n ${sqlStatementsUp?.join('\n')}\`)`;
|
47
|
-
}
|
48
|
-
if (sqlStatementsDown?.length) {
|
49
|
-
downSQL = `${sqlExecute}\n ${sqlStatementsDown?.join('\n')}\`)`;
|
50
|
-
}
|
51
|
-
if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {
|
52
|
-
const { confirm: shouldCreateBlankMigration } = await prompts({
|
53
|
-
name: 'confirm',
|
54
|
-
type: 'confirm',
|
55
|
-
initial: false,
|
56
|
-
message: 'No schema changes detected. Would you like to create a blank migration file?'
|
57
|
-
}, {
|
58
|
-
onCancel: ()=>{
|
59
|
-
process.exit(0);
|
60
|
-
}
|
61
|
-
});
|
62
|
-
if (!shouldCreateBlankMigration) {
|
63
|
-
process.exit(0);
|
64
|
-
}
|
65
|
-
}
|
66
|
-
// write schema
|
67
|
-
fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2));
|
68
|
-
}
|
69
|
-
// write migration
|
70
|
-
fs.writeFileSync(`${filePath}.ts`, getMigrationTemplate({
|
71
|
-
downSQL: downSQL || ` // Migration code`,
|
72
|
-
imports,
|
73
|
-
upSQL: upSQL || ` // Migration code`
|
74
|
-
}));
|
75
|
-
payload.logger.info({
|
76
|
-
msg: `Migration created at ${filePath}.ts`
|
77
|
-
});
|
78
|
-
};
|
79
|
-
|
80
|
-
//# sourceMappingURL=createMigration.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/createMigration.ts"],"sourcesContent":["/* eslint-disable no-restricted-syntax, no-await-in-loop */\nimport type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'\nimport type { CreateMigration } from 'payload'\n\nimport fs from 'fs'\nimport { createRequire } from 'module'\nimport path from 'path'\nimport { getPredefinedMigration } from 'payload'\nimport prompts from 'prompts'\nimport { fileURLToPath } from 'url'\n\nimport type { PostgresAdapter } from './types.js'\n\nimport { defaultDrizzleSnapshot } from './defaultSnapshot.js'\nimport { getMigrationTemplate } from './getMigrationTemplate.js'\n\nconst require = createRequire(import.meta.url)\n\nexport const createMigration: CreateMigration = async function createMigration(\n this: PostgresAdapter,\n { file, forceAcceptWarning, migrationName, payload },\n) {\n const filename = fileURLToPath(import.meta.url)\n const dirname = path.dirname(filename)\n const dir = payload.db.migrationDir\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir)\n }\n const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload')\n const drizzleJsonAfter = generateDrizzleJson(this.schema)\n const [yyymmdd, hhmmss] = new Date().toISOString().split('T')\n const formattedDate = yyymmdd.replace(/\\D/g, '')\n const formattedTime = hhmmss.split('.')[0].replace(/\\D/g, '')\n let imports: string = ''\n let downSQL: string\n let upSQL: string\n ;({ downSQL, imports, upSQL } = await getPredefinedMigration({\n dirname,\n file,\n migrationName,\n payload,\n }))\n\n const timestamp = `${formattedDate}_${formattedTime}`\n\n const name = migrationName || file?.split('/').slice(2).join('/')\n const fileName = `${timestamp}${name ? `_${name.replace(/\\W/g, '_')}` : ''}`\n\n const filePath = `${dir}/${fileName}`\n\n let drizzleJsonBefore = defaultDrizzleSnapshot\n\n if (!upSQL) {\n // Get latest migration snapshot\n const latestSnapshot = fs\n .readdirSync(dir)\n .filter((file) => file.endsWith('.json'))\n .sort()\n .reverse()?.[0]\n\n if (latestSnapshot) {\n drizzleJsonBefore = JSON.parse(\n fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'),\n ) as DrizzleSnapshotJSON\n }\n\n const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)\n const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)\n const sqlExecute = 'await db.execute(sql`'\n\n if (sqlStatementsUp?.length) {\n upSQL = `${sqlExecute}\\n ${sqlStatementsUp?.join('\\n')}\\`)`\n }\n if (sqlStatementsDown?.length) {\n downSQL = `${sqlExecute}\\n ${sqlStatementsDown?.join('\\n')}\\`)`\n }\n\n if (!upSQL?.length && !downSQL?.length && !forceAcceptWarning) {\n const { confirm: shouldCreateBlankMigration } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message: 'No schema changes detected. Would you like to create a blank migration file?',\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n if (!shouldCreateBlankMigration) {\n process.exit(0)\n }\n }\n\n // write schema\n fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))\n }\n\n // write migration\n fs.writeFileSync(\n `${filePath}.ts`,\n getMigrationTemplate({\n downSQL: downSQL || ` // Migration code`,\n imports,\n upSQL: upSQL || ` // Migration code`,\n }),\n )\n payload.logger.info({ msg: `Migration created at ${filePath}.ts` })\n}\n"],"names":["fs","createRequire","path","getPredefinedMigration","prompts","fileURLToPath","defaultDrizzleSnapshot","getMigrationTemplate","require","url","createMigration","file","forceAcceptWarning","migrationName","payload","filename","dirname","dir","db","migrationDir","existsSync","mkdirSync","generateDrizzleJson","generateMigration","drizzleJsonAfter","schema","yyymmdd","hhmmss","Date","toISOString","split","formattedDate","replace","formattedTime","imports","downSQL","upSQL","timestamp","name","slice","join","fileName","filePath","drizzleJsonBefore","latestSnapshot","readdirSync","filter","endsWith","sort","reverse","JSON","parse","readFileSync","sqlStatementsUp","sqlStatementsDown","sqlExecute","length","confirm","shouldCreateBlankMigration","type","initial","message","onCancel","process","exit","writeFileSync","stringify","logger","info","msg"],"mappings":"AAAA,yDAAyD,GAIzD,OAAOA,QAAQ,KAAI;AACnB,SAASC,aAAa,QAAQ,SAAQ;AACtC,OAAOC,UAAU,OAAM;AACvB,SAASC,sBAAsB,QAAQ,UAAS;AAChD,OAAOC,aAAa,UAAS;AAC7B,SAASC,aAAa,QAAQ,MAAK;AAInC,SAASC,sBAAsB,QAAQ,uBAAsB;AAC7D,SAASC,oBAAoB,QAAQ,4BAA2B;AAEhE,MAAMC,UAAUP,cAAc,YAAYQ,GAAG;AAE7C,OAAO,MAAMC,kBAAmC,eAAeA,gBAE7D,EAAEC,IAAI,EAAEC,kBAAkB,EAAEC,aAAa,EAAEC,OAAO,EAAE;IAEpD,MAAMC,WAAWV,cAAc,YAAYI,GAAG;IAC9C,MAAMO,UAAUd,KAAKc,OAAO,CAACD;IAC7B,MAAME,MAAMH,QAAQI,EAAE,CAACC,YAAY;IACnC,IAAI,CAACnB,GAAGoB,UAAU,CAACH,MAAM;QACvBjB,GAAGqB,SAAS,CAACJ;IACf;IACA,MAAM,EAAEK,mBAAmB,EAAEC,iBAAiB,EAAE,GAAGf,QAAQ;IAC3D,MAAMgB,mBAAmBF,oBAAoB,IAAI,CAACG,MAAM;IACxD,MAAM,CAACC,SAASC,OAAO,GAAG,IAAIC,OAAOC,WAAW,GAAGC,KAAK,CAAC;IACzD,MAAMC,gBAAgBL,QAAQM,OAAO,CAAC,OAAO;IAC7C,MAAMC,gBAAgBN,OAAOG,KAAK,CAAC,IAAI,CAAC,EAAE,CAACE,OAAO,CAAC,OAAO;IAC1D,IAAIE,UAAkB;IACtB,IAAIC;IACJ,IAAIC;IACF,CAAA,EAAED,OAAO,EAAED,OAAO,EAAEE,KAAK,EAAE,GAAG,MAAMjC,uBAAuB;QAC3Da;QACAL;QACAE;QACAC;IACF,EAAC;IAED,MAAMuB,YAAY,CAAC,EAAEN,cAAc,CAAC,EAAEE,cAAc,CAAC;IAErD,MAAMK,OAAOzB,iBAAiBF,MAAMmB,MAAM,KAAKS,MAAM,GAAGC,KAAK;IAC7D,MAAMC,WAAW,CAAC,EAAEJ,UAAU,EAAEC,OAAO,CAAC,CAAC,EAAEA,KAAKN,OAAO,CAAC,OAAO,KAAK,CAAC,GAAG,GAAG,CAAC;IAE5E,MAAMU,WAAW,CAAC,EAAEzB,IAAI,CAAC,EAAEwB,SAAS,CAAC;IAErC,IAAIE,oBAAoBrC;IAExB,IAAI,CAAC8B,OAAO;QACV,gCAAgC;QAChC,MAAMQ,iBAAiB5C,GACpB6C,WAAW,CAAC5B,KACZ6B,MAAM,CAAC,CAACnC,OAASA,KAAKoC,QAAQ,CAAC,UAC/BC,IAAI,GACJC,OAAO,IAAI,CAAC,EAAE;QAEjB,IAAIL,gBAAgB;YAClBD,oBAAoBO,KAAKC,KAAK,CAC5BnD,GAAGoD,YAAY,CAAC,CAAC,EAAEnC,IAAI,CAAC,EAAE2B,eAAe,CAAC,EAAE;QAEhD;QAEA,MAAMS,kBAAkB,MAAM9B,kBAAkBoB,mBAAmBnB;QACnE,MAAM8B,oBAAoB,MAAM/B,kBAAkBC,kBAAkBmB;QACpE,MAAMY,aAAa;QAEnB,IAAIF,iBAAiBG,QAAQ;YAC3BpB,QAAQ,CAAC,EAAEmB,WAAW,GAAG,EAAEF,iBAAiBb,KAAK,MAAM,GAAG,CAAC;QAC7D;QACA,IAAIc,mBAAmBE,QAAQ;YAC7BrB,UAAU,CAAC,EAAEoB,WAAW,GAAG,EAAED,mBAAmBd,KAAK,MAAM,GAAG,CAAC;QACjE;QAEA,IAAI,CAACJ,OAAOoB,UAAU,CAACrB,SAASqB,UAAU,CAAC5C,oBAAoB;YAC7D,MAAM,EAAE6C,SAASC,0BAA0B,EAAE,GAAG,MAAMtD,QACpD;gBACEkC,MAAM;gBACNqB,MAAM;gBACNC,SAAS;gBACTC,SAAS;YACX,GACA;gBACEC,UAAU;oBACRC,QAAQC,IAAI,CAAC;gBACf;YACF;YAGF,IAAI,CAACN,4BAA4B;gBAC/BK,QAAQC,IAAI,CAAC;YACf;QACF;QAEA,eAAe;QACfhE,GAAGiE,aAAa,CAAC,CAAC,EAAEvB,SAAS,KAAK,CAAC,EAAEQ,KAAKgB,SAAS,CAAC1C,kBAAkB,MAAM;IAC9E;IAEA,kBAAkB;IAClBxB,GAAGiE,aAAa,CACd,CAAC,EAAEvB,SAAS,GAAG,CAAC,EAChBnC,qBAAqB;QACnB4B,SAASA,WAAW,CAAC,mBAAmB,CAAC;QACzCD;QACAE,OAAOA,SAAS,CAAC,mBAAmB,CAAC;IACvC;IAEFtB,QAAQqD,MAAM,CAACC,IAAI,CAAC;QAAEC,KAAK,CAAC,qBAAqB,EAAE3B,SAAS,GAAG,CAAC;IAAC;AACnE,EAAC"}
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"defaultSnapshot.d.ts","sourceRoot":"","sources":["../src/defaultSnapshot.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,mBAAmB,EAAE,MAAM,qBAAqB,CAAA;AAE9D,eAAO,MAAM,sBAAsB,EAAE,mBAapC,CAAA"}
|
package/dist/defaultSnapshot.js
DELETED
@@ -1,16 +0,0 @@
|
|
1
|
-
export const defaultDrizzleSnapshot = {
|
2
|
-
id: '00000000-0000-0000-0000-000000000000',
|
3
|
-
_meta: {
|
4
|
-
columns: {},
|
5
|
-
schemas: {},
|
6
|
-
tables: {}
|
7
|
-
},
|
8
|
-
dialect: 'postgresql',
|
9
|
-
enums: {},
|
10
|
-
prevId: '00000000-0000-0000-0000-00000000000',
|
11
|
-
schemas: {},
|
12
|
-
tables: {},
|
13
|
-
version: '7'
|
14
|
-
};
|
15
|
-
|
16
|
-
//# sourceMappingURL=defaultSnapshot.js.map
|
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/defaultSnapshot.ts"],"sourcesContent":["import type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'\n\nexport const defaultDrizzleSnapshot: DrizzleSnapshotJSON = {\n id: '00000000-0000-0000-0000-000000000000',\n _meta: {\n columns: {},\n schemas: {},\n tables: {},\n },\n dialect: 'postgresql',\n enums: {},\n prevId: '00000000-0000-0000-0000-00000000000',\n schemas: {},\n tables: {},\n version: '7',\n}\n"],"names":["defaultDrizzleSnapshot","id","_meta","columns","schemas","tables","dialect","enums","prevId","version"],"mappings":"AAEA,OAAO,MAAMA,yBAA8C;IACzDC,IAAI;IACJC,OAAO;QACLC,SAAS,CAAC;QACVC,SAAS,CAAC;QACVC,QAAQ,CAAC;IACX;IACAC,SAAS;IACTC,OAAO,CAAC;IACRC,QAAQ;IACRJ,SAAS,CAAC;IACVC,QAAQ,CAAC;IACTI,SAAS;AACX,EAAC"}
|
package/dist/deleteWhere.d.ts
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"deleteWhere.d.ts","sourceRoot":"","sources":["../src/deleteWhere.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,WAAW,EAAE,MAAM,YAAY,CAAA;AAE7C,eAAO,MAAM,WAAW,EAAE,WAMzB,CAAA"}
|
package/dist/deleteWhere.js
DELETED
package/dist/deleteWhere.js.map
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/deleteWhere.ts"],"sourcesContent":["import type { TransactionPg } from '@payloadcms/drizzle/types'\n\nimport type { DeleteWhere } from './types.js'\n\nexport const deleteWhere: DeleteWhere = async function deleteWhere({ db, tableName, where }) {\n const table = this.tables[tableName]\n await (db as TransactionPg)\n .delete(table)\n // @ts-expect-error where is picking up libsql types\n .where(where)\n}\n"],"names":["deleteWhere","db","tableName","where","table","tables","delete"],"mappings":"AAIA,OAAO,MAAMA,cAA2B,eAAeA,YAAY,EAAEC,EAAE,EAAEC,SAAS,EAAEC,KAAK,EAAE;IACzF,MAAMC,QAAQ,IAAI,CAACC,MAAM,CAACH,UAAU;IACpC,MAAM,AAACD,GACJK,MAAM,CAACF,MACR,oDAAoD;KACnDD,KAAK,CAACA;AACX,EAAC"}
|
package/dist/dropDatabase.d.ts
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"file":"dropDatabase.d.ts","sourceRoot":"","sources":["../src/dropDatabase.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,YAAY,CAAA;AAE9C,eAAO,MAAM,YAAY,EAAE,YAM1B,CAAA"}
|
package/dist/dropDatabase.js
DELETED
@@ -1,9 +0,0 @@
|
|
1
|
-
export const dropDatabase = async function dropDatabase({ adapter }) {
|
2
|
-
await adapter.execute({
|
3
|
-
drizzle: adapter.drizzle,
|
4
|
-
raw: `drop schema if exists ${this.schemaName || 'public'} cascade;
|
5
|
-
create schema ${this.schemaName || 'public'};`
|
6
|
-
});
|
7
|
-
};
|
8
|
-
|
9
|
-
//# sourceMappingURL=dropDatabase.js.map
|
package/dist/dropDatabase.js.map
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
{"version":3,"sources":["../src/dropDatabase.ts"],"sourcesContent":["import type { DropDatabase } from './types.js'\n\nexport const dropDatabase: DropDatabase = async function dropDatabase({ adapter }) {\n await adapter.execute({\n drizzle: adapter.drizzle,\n raw: `drop schema if exists ${this.schemaName || 'public'} cascade;\n create schema ${this.schemaName || 'public'};`,\n })\n}\n"],"names":["dropDatabase","adapter","execute","drizzle","raw","schemaName"],"mappings":"AAEA,OAAO,MAAMA,eAA6B,eAAeA,aAAa,EAAEC,OAAO,EAAE;IAC/E,MAAMA,QAAQC,OAAO,CAAC;QACpBC,SAASF,QAAQE,OAAO;QACxBC,KAAK,CAAC,sBAAsB,EAAE,IAAI,CAACC,UAAU,IAAI,SAAS;kBAC5C,EAAE,IAAI,CAACA,UAAU,IAAI,SAAS,CAAC,CAAC;IAChD;AACF,EAAC"}
|