@payloadcms/db-postgres 3.0.0-alpha.49 → 3.0.0-alpha.51
Sign up to get free protection for your applications and to get access to all the features.
- package/dist/createMigration.d.ts.map +1 -1
- package/dist/createMigration.js +3 -1
- package/dist/createMigration.js.map +1 -1
- package/dist/deleteOne.d.ts.map +1 -1
- package/dist/deleteOne.js +41 -14
- package/dist/deleteOne.js.map +1 -1
- package/dist/find/chainMethods.d.ts +3 -2
- package/dist/find/chainMethods.d.ts.map +1 -1
- package/dist/find/chainMethods.js.map +1 -1
- package/dist/find/findMany.d.ts.map +1 -1
- package/dist/find/findMany.js +31 -53
- package/dist/find/findMany.js.map +1 -1
- package/dist/migrate.d.ts.map +1 -1
- package/dist/migrate.js +4 -2
- package/dist/migrate.js.map +1 -1
- package/dist/queries/sanitizeQueryValue.d.ts.map +1 -1
- package/dist/queries/sanitizeQueryValue.js +4 -1
- package/dist/queries/sanitizeQueryValue.js.map +1 -1
- package/dist/queries/selectDistinct.d.ts +23 -0
- package/dist/queries/selectDistinct.d.ts.map +1 -0
- package/dist/queries/selectDistinct.js +41 -0
- package/dist/queries/selectDistinct.js.map +1 -0
- package/dist/update.d.ts.map +1 -1
- package/dist/update.js +18 -43
- package/dist/update.js.map +1 -1
- package/dist/utilities/pushDevSchema.d.ts.map +1 -1
- package/dist/utilities/pushDevSchema.js +3 -1
- package/dist/utilities/pushDevSchema.js.map +1 -1
- package/package.json +9 -5
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"createMigration.d.ts","sourceRoot":"","sources":["../src/createMigration.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAA;
|
1
|
+
{"version":3,"file":"createMigration.d.ts","sourceRoot":"","sources":["../src/createMigration.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,kBAAkB,CAAA;AAsDvD,eAAO,MAAM,eAAe,EAAE,eA4E7B,CAAA"}
|
package/dist/createMigration.js
CHANGED
@@ -1,5 +1,7 @@
|
|
1
1
|
/* eslint-disable no-restricted-syntax, no-await-in-loop */ import fs from 'fs';
|
2
|
+
import { createRequire } from 'module';
|
2
3
|
import prompts from 'prompts';
|
4
|
+
const require = createRequire(import.meta.url);
|
3
5
|
const migrationTemplate = (upSQL, downSQL)=>`import { MigrateUpArgs, MigrateDownArgs } from '@payloadcms/db-postgres'
|
4
6
|
import { sql } from 'drizzle-orm'
|
5
7
|
|
@@ -36,7 +38,7 @@ export const createMigration = async function createMigration({ forceAcceptWarni
|
|
36
38
|
if (!fs.existsSync(dir)) {
|
37
39
|
fs.mkdirSync(dir);
|
38
40
|
}
|
39
|
-
const { generateDrizzleJson, generateMigration } = require
|
41
|
+
const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload');
|
40
42
|
const [yyymmdd, hhmmss] = new Date().toISOString().split('T');
|
41
43
|
const formattedDate = yyymmdd.replace(/\D/g, '');
|
42
44
|
const formattedTime = hhmmss.split('.')[0].replace(/\D/g, '');
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/createMigration.ts"],"sourcesContent":["/* eslint-disable no-restricted-syntax, no-await-in-loop */\nimport type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'\nimport type { CreateMigration } from 'payload/database'\n\nimport fs from 'fs'\nimport prompts from 'prompts'\n\nimport type { PostgresAdapter } from './types.js'\n\nconst migrationTemplate = (\n upSQL?: string,\n downSQL?: string,\n) => `import { MigrateUpArgs, MigrateDownArgs } from '@payloadcms/db-postgres'\nimport { sql } from 'drizzle-orm'\n\nexport async function up({ payload }: MigrateUpArgs): Promise<void> {\n${\n upSQL\n ? `await payload.db.drizzle.execute(sql\\`\n\n${upSQL}\\`);\n`\n : '// Migration code'\n}\n};\n\nexport async function down({ payload }: MigrateDownArgs): Promise<void> {\n${\n downSQL\n ? `await payload.db.drizzle.execute(sql\\`\n\n${downSQL}\\`);\n`\n : '// Migration code'\n}\n};\n`\n\nconst getDefaultDrizzleSnapshot = (): DrizzleSnapshotJSON => ({\n id: '00000000-0000-0000-0000-000000000000',\n _meta: {\n columns: {},\n schemas: {},\n tables: {},\n },\n dialect: 'pg',\n enums: {},\n prevId: '00000000-0000-0000-0000-00000000000',\n schemas: {},\n tables: {},\n version: '5',\n})\n\nexport const createMigration: CreateMigration = async function createMigration(\n this: PostgresAdapter,\n { forceAcceptWarning, migrationName, payload },\n) {\n const dir = payload.db.migrationDir\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir)\n }\n\n const { generateDrizzleJson, generateMigration } = require
|
1
|
+
{"version":3,"sources":["../src/createMigration.ts"],"sourcesContent":["/* eslint-disable no-restricted-syntax, no-await-in-loop */\nimport type { DrizzleSnapshotJSON } from 'drizzle-kit/payload'\nimport type { CreateMigration } from 'payload/database'\n\nimport fs from 'fs'\nimport { createRequire } from 'module'\nimport prompts from 'prompts'\n\nimport type { PostgresAdapter } from './types.js'\n\nconst require = createRequire(import.meta.url)\n\nconst migrationTemplate = (\n upSQL?: string,\n downSQL?: string,\n) => `import { MigrateUpArgs, MigrateDownArgs } from '@payloadcms/db-postgres'\nimport { sql } from 'drizzle-orm'\n\nexport async function up({ payload }: MigrateUpArgs): Promise<void> {\n${\n upSQL\n ? `await payload.db.drizzle.execute(sql\\`\n\n${upSQL}\\`);\n`\n : '// Migration code'\n}\n};\n\nexport async function down({ payload }: MigrateDownArgs): Promise<void> {\n${\n downSQL\n ? `await payload.db.drizzle.execute(sql\\`\n\n${downSQL}\\`);\n`\n : '// Migration code'\n}\n};\n`\n\nconst getDefaultDrizzleSnapshot = (): DrizzleSnapshotJSON => ({\n id: '00000000-0000-0000-0000-000000000000',\n _meta: {\n columns: {},\n schemas: {},\n tables: {},\n },\n dialect: 'pg',\n enums: {},\n prevId: '00000000-0000-0000-0000-00000000000',\n schemas: {},\n tables: {},\n version: '5',\n})\n\nexport const createMigration: CreateMigration = async function createMigration(\n this: PostgresAdapter,\n { forceAcceptWarning, migrationName, payload },\n) {\n const dir = payload.db.migrationDir\n if (!fs.existsSync(dir)) {\n fs.mkdirSync(dir)\n }\n\n const { generateDrizzleJson, generateMigration } = require('drizzle-kit/payload')\n\n const [yyymmdd, hhmmss] = new Date().toISOString().split('T')\n const formattedDate = yyymmdd.replace(/\\D/g, '')\n const formattedTime = hhmmss.split('.')[0].replace(/\\D/g, '')\n\n const timestamp = `${formattedDate}_${formattedTime}`\n\n const fileName = migrationName\n ? `${timestamp}_${migrationName.replace(/\\W/g, '_')}`\n : `${timestamp}`\n\n const filePath = `${dir}/${fileName}`\n\n let drizzleJsonBefore = getDefaultDrizzleSnapshot()\n\n // Get latest migration snapshot\n const latestSnapshot = fs\n .readdirSync(dir)\n .filter((file) => file.endsWith('.json'))\n .sort()\n .reverse()?.[0]\n\n if (latestSnapshot) {\n const latestSnapshotJSON = JSON.parse(\n fs.readFileSync(`${dir}/${latestSnapshot}`, 'utf8'),\n ) as DrizzleSnapshotJSON\n\n drizzleJsonBefore = latestSnapshotJSON\n }\n\n const drizzleJsonAfter = generateDrizzleJson(this.schema)\n const sqlStatementsUp = await generateMigration(drizzleJsonBefore, drizzleJsonAfter)\n const sqlStatementsDown = await generateMigration(drizzleJsonAfter, drizzleJsonBefore)\n\n if (!sqlStatementsUp.length && !sqlStatementsDown.length && !forceAcceptWarning) {\n const { confirm: shouldCreateBlankMigration } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message: 'No schema changes detected. Would you like to create a blank migration file?',\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n if (!shouldCreateBlankMigration) {\n process.exit(0)\n }\n }\n\n // write schema\n fs.writeFileSync(`${filePath}.json`, JSON.stringify(drizzleJsonAfter, null, 2))\n\n // write migration\n fs.writeFileSync(\n `${filePath}.ts`,\n migrationTemplate(\n sqlStatementsUp.length ? sqlStatementsUp?.join('\\n') : undefined,\n sqlStatementsDown.length ? sqlStatementsDown?.join('\\n') : undefined,\n ),\n )\n payload.logger.info({ msg: `Migration created at ${filePath}.ts` })\n}\n"],"names":["fs","createRequire","prompts","require","url","migrationTemplate","upSQL","downSQL","getDefaultDrizzleSnapshot","id","_meta","columns","schemas","tables","dialect","enums","prevId","version","createMigration","forceAcceptWarning","migrationName","payload","dir","db","migrationDir","existsSync","mkdirSync","generateDrizzleJson","generateMigration","yyymmdd","hhmmss","Date","toISOString","split","formattedDate","replace","formattedTime","timestamp","fileName","filePath","drizzleJsonBefore","latestSnapshot","readdirSync","filter","file","endsWith","sort","reverse","latestSnapshotJSON","JSON","parse","readFileSync","drizzleJsonAfter","schema","sqlStatementsUp","sqlStatementsDown","length","confirm","shouldCreateBlankMigration","name","type","initial","message","onCancel","process","exit","writeFileSync","stringify","join","undefined","logger","info","msg"],"mappings":"AAAA,yDAAyD,GAIzD,OAAOA,QAAQ,KAAI;AACnB,SAASC,aAAa,QAAQ,SAAQ;AACtC,OAAOC,aAAa,UAAS;AAI7B,MAAMC,UAAUF,cAAc,YAAYG,GAAG;AAE7C,MAAMC,oBAAoB,CACxBC,OACAC,UACG,CAAC;;;;AAIN,EACED,QACI,CAAC;;AAEP,EAAEA,MAAM;AACR,CAAC,GACK,oBACL;;;;AAID,EACEC,UACI,CAAC;;AAEP,EAAEA,QAAQ;AACV,CAAC,GACK,oBACL;;AAED,CAAC;AAED,MAAMC,4BAA4B,IAA4B,CAAA;QAC5DC,IAAI;QACJC,OAAO;YACLC,SAAS,CAAC;YACVC,SAAS,CAAC;YACVC,QAAQ,CAAC;QACX;QACAC,SAAS;QACTC,OAAO,CAAC;QACRC,QAAQ;QACRJ,SAAS,CAAC;QACVC,QAAQ,CAAC;QACTI,SAAS;IACX,CAAA;AAEA,OAAO,MAAMC,kBAAmC,eAAeA,gBAE7D,EAAEC,kBAAkB,EAAEC,aAAa,EAAEC,OAAO,EAAE;IAE9C,MAAMC,MAAMD,QAAQE,EAAE,CAACC,YAAY;IACnC,IAAI,CAACxB,GAAGyB,UAAU,CAACH,MAAM;QACvBtB,GAAG0B,SAAS,CAACJ;IACf;IAEA,MAAM,EAAEK,mBAAmB,EAAEC,iBAAiB,EAAE,GAAGzB,QAAQ;IAE3D,MAAM,CAAC0B,SAASC,OAAO,GAAG,IAAIC,OAAOC,WAAW,GAAGC,KAAK,CAAC;IACzD,MAAMC,gBAAgBL,QAAQM,OAAO,CAAC,OAAO;IAC7C,MAAMC,gBAAgBN,OAAOG,KAAK,CAAC,IAAI,CAAC,EAAE,CAACE,OAAO,CAAC,OAAO;IAE1D,MAAME,YAAY,CAAC,EAAEH,cAAc,CAAC,EAAEE,cAAc,CAAC;IAErD,MAAME,WAAWlB,gBACb,CAAC,EAAEiB,UAAU,CAAC,EAAEjB,cAAce,OAAO,CAAC,OAAO,KAAK,CAAC,GACnD,CAAC,EAAEE,UAAU,CAAC;IAElB,MAAME,WAAW,CAAC,EAAEjB,IAAI,CAAC,EAAEgB,SAAS,CAAC;IAErC,IAAIE,oBAAoBhC;IAExB,gCAAgC;IAChC,MAAMiC,iBAAiBzC,GACpB0C,WAAW,CAACpB,KACZqB,MAAM,CAAC,CAACC,OAASA,KAAKC,QAAQ,CAAC,UAC/BC,IAAI,GACJC,OAAO,IAAI,CAAC,EAAE;IAEjB,IAAIN,gBAAgB;QAClB,MAAMO,qBAAqBC,KAAKC,KAAK,CACnClD,GAAGmD,YAAY,CAAC,CAAC,EAAE7B,IAAI,CAAC,EAAEmB,eAAe,CAAC,EAAE;QAG9CD,oBAAoBQ;IACtB;IAEA,MAAMI,mBAAmBzB,oBAAoB,IAAI,CAAC0B,MAAM;IACxD,MAAMC,kBAAkB,MAAM1B,kBAAkBY,mBAAmBY;IACnE,MAAMG,oBAAoB,MAAM3B,kBAAkBwB,kBAAkBZ;IAEpE,IAAI,CAACc,gBAAgBE,MAAM,IAAI,CAACD,kBAAkBC,MAAM,IAAI,CAACrC,oBAAoB;QAC/E,MAAM,EAAEsC,SAASC,0BAA0B,EAAE,GAAG,MAAMxD,QACpD;YACEyD,MAAM;YACNC,MAAM;YACNC,SAAS;YACTC,SAAS;QACX,GACA;YACEC,UAAU;gBACRC,QAAQC,IAAI,CAAC;YACf;QACF;QAGF,IAAI,CAACP,4BAA4B;YAC/BM,QAAQC,IAAI,CAAC;QACf;IACF;IAEA,eAAe;IACfjE,GAAGkE,aAAa,CAAC,CAAC,EAAE3B,SAAS,KAAK,CAAC,EAAEU,KAAKkB,SAAS,CAACf,kBAAkB,MAAM;IAE5E,kBAAkB;IAClBpD,GAAGkE,aAAa,CACd,CAAC,EAAE3B,SAAS,GAAG,CAAC,EAChBlC,kBACEiD,gBAAgBE,MAAM,GAAGF,iBAAiBc,KAAK,QAAQC,WACvDd,kBAAkBC,MAAM,GAAGD,mBAAmBa,KAAK,QAAQC;IAG/DhD,QAAQiD,MAAM,CAACC,IAAI,CAAC;QAAEC,KAAK,CAAC,qBAAqB,EAAEjC,SAAS,GAAG,CAAC;IAAC;AACnE,EAAC"}
|
package/dist/deleteOne.d.ts.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"deleteOne.d.ts","sourceRoot":"","sources":["../src/deleteOne.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAA;
|
1
|
+
{"version":3,"file":"deleteOne.d.ts","sourceRoot":"","sources":["../src/deleteOne.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAA;AAajD,eAAO,MAAM,SAAS,EAAE,SAsDvB,CAAA"}
|
package/dist/deleteOne.js
CHANGED
@@ -1,31 +1,58 @@
|
|
1
|
+
import { eq } from 'drizzle-orm';
|
1
2
|
import toSnakeCase from 'to-snake-case';
|
2
3
|
import { buildFindManyArgs } from './find/buildFindManyArgs.js';
|
3
4
|
import buildQuery from './queries/buildQuery.js';
|
5
|
+
import { selectDistinct } from './queries/selectDistinct.js';
|
4
6
|
import { transform } from './transform/read/index.js';
|
5
|
-
export const deleteOne = async function deleteOne({ collection, req = {}, where:
|
7
|
+
export const deleteOne = async function deleteOne({ collection: collectionSlug, req = {}, where: whereArg }) {
|
6
8
|
const db = this.sessions[req.transactionID]?.db || this.drizzle;
|
7
|
-
const
|
8
|
-
const tableName = toSnakeCase(
|
9
|
-
|
9
|
+
const collection = this.payload.collections[collectionSlug].config;
|
10
|
+
const tableName = toSnakeCase(collectionSlug);
|
11
|
+
let docToDelete;
|
12
|
+
const { joinAliases, joins, selectFields, where } = await buildQuery({
|
10
13
|
adapter: this,
|
11
|
-
fields:
|
14
|
+
fields: collection.fields,
|
15
|
+
locale: req.locale,
|
12
16
|
tableName,
|
13
|
-
where:
|
17
|
+
where: whereArg
|
14
18
|
});
|
15
|
-
const
|
19
|
+
const selectDistinctResult = await selectDistinct({
|
16
20
|
adapter: this,
|
17
|
-
|
18
|
-
|
19
|
-
|
21
|
+
chainedMethods: [
|
22
|
+
{
|
23
|
+
args: [
|
24
|
+
1
|
25
|
+
],
|
26
|
+
method: 'limit'
|
27
|
+
}
|
28
|
+
],
|
29
|
+
db,
|
30
|
+
joinAliases,
|
31
|
+
joins,
|
32
|
+
selectFields,
|
33
|
+
tableName,
|
34
|
+
where
|
20
35
|
});
|
21
|
-
|
22
|
-
|
36
|
+
if (selectDistinctResult?.[0]?.id) {
|
37
|
+
docToDelete = await db.query[tableName].findFirst({
|
38
|
+
where: eq(this.tables[tableName].id, selectDistinctResult[0].id)
|
39
|
+
});
|
40
|
+
} else {
|
41
|
+
const findManyArgs = buildFindManyArgs({
|
42
|
+
adapter: this,
|
43
|
+
depth: 0,
|
44
|
+
fields: collection.fields,
|
45
|
+
tableName
|
46
|
+
});
|
47
|
+
findManyArgs.where = where;
|
48
|
+
docToDelete = await db.query[tableName].findFirst(findManyArgs);
|
49
|
+
}
|
23
50
|
const result = transform({
|
24
51
|
config: this.payload.config,
|
25
52
|
data: docToDelete,
|
26
|
-
fields:
|
53
|
+
fields: collection.fields
|
27
54
|
});
|
28
|
-
await db.delete(this.tables[tableName]).where(
|
55
|
+
await db.delete(this.tables[tableName]).where(eq(this.tables[tableName].id, docToDelete.id));
|
29
56
|
return result;
|
30
57
|
};
|
31
58
|
|
package/dist/deleteOne.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/deleteOne.ts"],"sourcesContent":["import type { DeleteOne } from 'payload/database'\nimport type { PayloadRequest } from 'payload/types'\n\nimport toSnakeCase from 'to-snake-case'\n\nimport type { PostgresAdapter } from './types.js'\n\nimport { buildFindManyArgs } from './find/buildFindManyArgs.js'\nimport buildQuery from './queries/buildQuery.js'\nimport { transform } from './transform/read/index.js'\n\nexport const deleteOne: DeleteOne = async function deleteOne(\n this: PostgresAdapter,\n { collection, req = {} as PayloadRequest, where:
|
1
|
+
{"version":3,"sources":["../src/deleteOne.ts"],"sourcesContent":["import type { DeleteOne } from 'payload/database'\nimport type { PayloadRequest } from 'payload/types'\n\nimport { eq } from 'drizzle-orm'\nimport toSnakeCase from 'to-snake-case'\n\nimport type { PostgresAdapter } from './types.js'\n\nimport { buildFindManyArgs } from './find/buildFindManyArgs.js'\nimport buildQuery from './queries/buildQuery.js'\nimport { selectDistinct } from './queries/selectDistinct.js'\nimport { transform } from './transform/read/index.js'\n\nexport const deleteOne: DeleteOne = async function deleteOne(\n this: PostgresAdapter,\n { collection: collectionSlug, req = {} as PayloadRequest, where: whereArg },\n) {\n const db = this.sessions[req.transactionID]?.db || this.drizzle\n const collection = this.payload.collections[collectionSlug].config\n const tableName = toSnakeCase(collectionSlug)\n let docToDelete: Record<string, unknown>\n\n const { joinAliases, joins, selectFields, where } = await buildQuery({\n adapter: this,\n fields: collection.fields,\n locale: req.locale,\n tableName,\n where: whereArg,\n })\n\n const selectDistinctResult = await selectDistinct({\n adapter: this,\n chainedMethods: [{ args: [1], method: 'limit' }],\n db,\n joinAliases,\n joins,\n selectFields,\n tableName,\n where,\n })\n\n if (selectDistinctResult?.[0]?.id) {\n docToDelete = await db.query[tableName].findFirst({\n where: eq(this.tables[tableName].id, selectDistinctResult[0].id),\n })\n } else {\n const findManyArgs = buildFindManyArgs({\n adapter: this,\n depth: 0,\n fields: collection.fields,\n tableName,\n })\n\n findManyArgs.where = where\n\n docToDelete = await db.query[tableName].findFirst(findManyArgs)\n }\n\n const result = transform({\n config: this.payload.config,\n data: docToDelete,\n fields: collection.fields,\n })\n\n await db.delete(this.tables[tableName]).where(eq(this.tables[tableName].id, docToDelete.id))\n\n return result\n}\n"],"names":["eq","toSnakeCase","buildFindManyArgs","buildQuery","selectDistinct","transform","deleteOne","collection","collectionSlug","req","where","whereArg","db","sessions","transactionID","drizzle","payload","collections","config","tableName","docToDelete","joinAliases","joins","selectFields","adapter","fields","locale","selectDistinctResult","chainedMethods","args","method","id","query","findFirst","tables","findManyArgs","depth","result","data","delete"],"mappings":"AAGA,SAASA,EAAE,QAAQ,cAAa;AAChC,OAAOC,iBAAiB,gBAAe;AAIvC,SAASC,iBAAiB,QAAQ,8BAA6B;AAC/D,OAAOC,gBAAgB,0BAAyB;AAChD,SAASC,cAAc,QAAQ,8BAA6B;AAC5D,SAASC,SAAS,QAAQ,4BAA2B;AAErD,OAAO,MAAMC,YAAuB,eAAeA,UAEjD,EAAEC,YAAYC,cAAc,EAAEC,MAAM,CAAC,CAAmB,EAAEC,OAAOC,QAAQ,EAAE;IAE3E,MAAMC,KAAK,IAAI,CAACC,QAAQ,CAACJ,IAAIK,aAAa,CAAC,EAAEF,MAAM,IAAI,CAACG,OAAO;IAC/D,MAAMR,aAAa,IAAI,CAACS,OAAO,CAACC,WAAW,CAACT,eAAe,CAACU,MAAM;IAClE,MAAMC,YAAYlB,YAAYO;IAC9B,IAAIY;IAEJ,MAAM,EAAEC,WAAW,EAAEC,KAAK,EAAEC,YAAY,EAAEb,KAAK,EAAE,GAAG,MAAMP,WAAW;QACnEqB,SAAS,IAAI;QACbC,QAAQlB,WAAWkB,MAAM;QACzBC,QAAQjB,IAAIiB,MAAM;QAClBP;QACAT,OAAOC;IACT;IAEA,MAAMgB,uBAAuB,MAAMvB,eAAe;QAChDoB,SAAS,IAAI;QACbI,gBAAgB;YAAC;gBAAEC,MAAM;oBAAC;iBAAE;gBAAEC,QAAQ;YAAQ;SAAE;QAChDlB;QACAS;QACAC;QACAC;QACAJ;QACAT;IACF;IAEA,IAAIiB,sBAAsB,CAAC,EAAE,EAAEI,IAAI;QACjCX,cAAc,MAAMR,GAAGoB,KAAK,CAACb,UAAU,CAACc,SAAS,CAAC;YAChDvB,OAAOV,GAAG,IAAI,CAACkC,MAAM,CAACf,UAAU,CAACY,EAAE,EAAEJ,oBAAoB,CAAC,EAAE,CAACI,EAAE;QACjE;IACF,OAAO;QACL,MAAMI,eAAejC,kBAAkB;YACrCsB,SAAS,IAAI;YACbY,OAAO;YACPX,QAAQlB,WAAWkB,MAAM;YACzBN;QACF;QAEAgB,aAAazB,KAAK,GAAGA;QAErBU,cAAc,MAAMR,GAAGoB,KAAK,CAACb,UAAU,CAACc,SAAS,CAACE;IACpD;IAEA,MAAME,SAAShC,UAAU;QACvBa,QAAQ,IAAI,CAACF,OAAO,CAACE,MAAM;QAC3BoB,MAAMlB;QACNK,QAAQlB,WAAWkB,MAAM;IAC3B;IAEA,MAAMb,GAAG2B,MAAM,CAAC,IAAI,CAACL,MAAM,CAACf,UAAU,EAAET,KAAK,CAACV,GAAG,IAAI,CAACkC,MAAM,CAACf,UAAU,CAACY,EAAE,EAAEX,YAAYW,EAAE;IAE1F,OAAOM;AACT,EAAC"}
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import type { QueryPromise } from 'drizzle-orm';
|
1
2
|
export type ChainedMethods = {
|
2
3
|
args: unknown[];
|
3
4
|
method: string;
|
@@ -7,9 +8,9 @@ export type ChainedMethods = {
|
|
7
8
|
* @param methods
|
8
9
|
* @param query
|
9
10
|
*/
|
10
|
-
declare const chainMethods: ({ methods, query }: {
|
11
|
+
declare const chainMethods: <T>({ methods, query }: {
|
11
12
|
methods: any;
|
12
13
|
query: any;
|
13
|
-
}) =>
|
14
|
+
}) => QueryPromise<T>;
|
14
15
|
export { chainMethods };
|
15
16
|
//# sourceMappingURL=chainMethods.d.ts.map
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"chainMethods.d.ts","sourceRoot":"","sources":["../../src/find/chainMethods.ts"],"names":[],"mappings":"AAAA,MAAM,MAAM,cAAc,GAAG;IAC3B,IAAI,EAAE,OAAO,EAAE,CAAA;IACf,MAAM,EAAE,MAAM,CAAA;CACf,EAAE,CAAA;AAEH;;;;GAIG;AACH,QAAA,MAAM,YAAY;;;
|
1
|
+
{"version":3,"file":"chainMethods.d.ts","sourceRoot":"","sources":["../../src/find/chainMethods.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,MAAM,aAAa,CAAA;AAE/C,MAAM,MAAM,cAAc,GAAG;IAC3B,IAAI,EAAE,OAAO,EAAE,CAAA;IACf,MAAM,EAAE,MAAM,CAAA;CACf,EAAE,CAAA;AAEH;;;;GAIG;AACH,QAAA,MAAM,YAAY;;;MAA4B,aAAa,CAAC,CAI3D,CAAA;AAED,OAAO,EAAE,YAAY,EAAE,CAAA"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../src/find/chainMethods.ts"],"sourcesContent":["
|
1
|
+
{"version":3,"sources":["../../src/find/chainMethods.ts"],"sourcesContent":["import type { QueryPromise } from 'drizzle-orm'\n\nexport type ChainedMethods = {\n args: unknown[]\n method: string\n}[]\n\n/**\n * Call and returning methods that would normally be chained together but cannot be because of control logic\n * @param methods\n * @param query\n */\nconst chainMethods = <T>({ methods, query }): QueryPromise<T> => {\n return methods.reduce((query, { args, method }) => {\n return query[method](...args)\n }, query)\n}\n\nexport { chainMethods }\n"],"names":["chainMethods","methods","query","reduce","args","method"],"mappings":"AAOA;;;;CAIC,GACD,MAAMA,eAAe,CAAI,EAAEC,OAAO,EAAEC,KAAK,EAAE;IACzC,OAAOD,QAAQE,MAAM,CAAC,CAACD,OAAO,EAAEE,IAAI,EAAEC,MAAM,EAAE;QAC5C,OAAOH,KAAK,CAACG,OAAO,IAAID;IAC1B,GAAGF;AACL;AAEA,SAASF,YAAY,GAAE"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"findMany.d.ts","sourceRoot":"","sources":["../../src/find/findMany.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAA;AAChD,OAAO,KAAK,EAAE,KAAK,EAA8B,MAAM,eAAe,CAAA;AAItE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;
|
1
|
+
{"version":3,"file":"findMany.d.ts","sourceRoot":"","sources":["../../src/find/findMany.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,QAAQ,EAAE,MAAM,kBAAkB,CAAA;AAChD,OAAO,KAAK,EAAE,KAAK,EAA8B,MAAM,eAAe,CAAA;AAItE,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AASlD,KAAK,IAAI,GAAG,IAAI,CAAC,QAAQ,EAAE,YAAY,CAAC,GAAG;IACzC,OAAO,EAAE,eAAe,CAAA;IACxB,MAAM,EAAE,KAAK,EAAE,CAAA;IACf,SAAS,EAAE,MAAM,CAAA;CAClB,CAAA;AAED,eAAO,MAAM,QAAQ,iHAYlB,IAAI;;;;;;;;;;;EAgKN,CAAA"}
|
package/dist/find/findMany.js
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
import { inArray, sql } from 'drizzle-orm';
|
2
2
|
import buildQuery from '../queries/buildQuery.js';
|
3
|
+
import { selectDistinct } from '../queries/selectDistinct.js';
|
3
4
|
import { transform } from '../transform/read/index.js';
|
4
5
|
import { buildFindManyArgs } from './buildFindManyArgs.js';
|
5
6
|
import { chainMethods } from './chainMethods.js';
|
@@ -12,7 +13,6 @@ export const findMany = async function find({ adapter, fields, limit: limitArg,
|
|
12
13
|
let hasPrevPage;
|
13
14
|
let hasNextPage;
|
14
15
|
let pagingCounter;
|
15
|
-
let selectDistinctResult;
|
16
16
|
const { joinAliases, joins, orderBy, selectFields, where } = await buildQuery({
|
17
17
|
adapter,
|
18
18
|
fields,
|
@@ -38,52 +38,29 @@ export const findMany = async function find({ adapter, fields, limit: limitArg,
|
|
38
38
|
fields,
|
39
39
|
tableName
|
40
40
|
});
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
adapter.tables[joinTable],
|
65
|
-
condition
|
66
|
-
],
|
67
|
-
method: 'leftJoin'
|
68
|
-
});
|
69
|
-
}
|
70
|
-
});
|
71
|
-
selectDistinctMethods.push({
|
72
|
-
args: [
|
73
|
-
skip || (page - 1) * limit
|
74
|
-
],
|
75
|
-
method: 'offset'
|
76
|
-
});
|
77
|
-
selectDistinctMethods.push({
|
78
|
-
args: [
|
79
|
-
limit === 0 ? undefined : limit
|
80
|
-
],
|
81
|
-
method: 'limit'
|
82
|
-
});
|
83
|
-
selectDistinctResult = await chainMethods({
|
84
|
-
methods: selectDistinctMethods,
|
85
|
-
query: db.selectDistinct(selectFields).from(table)
|
86
|
-
});
|
41
|
+
selectDistinctMethods.push({
|
42
|
+
args: [
|
43
|
+
skip || (page - 1) * limit
|
44
|
+
],
|
45
|
+
method: 'offset'
|
46
|
+
});
|
47
|
+
selectDistinctMethods.push({
|
48
|
+
args: [
|
49
|
+
limit === 0 ? undefined : limit
|
50
|
+
],
|
51
|
+
method: 'limit'
|
52
|
+
});
|
53
|
+
const selectDistinctResult = await selectDistinct({
|
54
|
+
adapter,
|
55
|
+
chainedMethods: selectDistinctMethods,
|
56
|
+
db,
|
57
|
+
joinAliases,
|
58
|
+
joins,
|
59
|
+
selectFields,
|
60
|
+
tableName,
|
61
|
+
where
|
62
|
+
});
|
63
|
+
if (selectDistinctResult) {
|
87
64
|
if (selectDistinctResult.length === 0) {
|
88
65
|
return {
|
89
66
|
docs: [],
|
@@ -97,13 +74,14 @@ export const findMany = async function find({ adapter, fields, limit: limitArg,
|
|
97
74
|
totalDocs: 0,
|
98
75
|
totalPages: 0
|
99
76
|
};
|
77
|
+
} else {
|
78
|
+
// set the id in an object for sorting later
|
79
|
+
selectDistinctResult.forEach(({ id }, i)=>{
|
80
|
+
orderedIDMap[id] = i;
|
81
|
+
});
|
82
|
+
orderedIDs = Object.keys(orderedIDMap);
|
83
|
+
findManyArgs.where = inArray(adapter.tables[tableName].id, orderedIDs);
|
100
84
|
}
|
101
|
-
// set the id in an object for sorting later
|
102
|
-
selectDistinctResult.forEach(({ id }, i)=>{
|
103
|
-
orderedIDMap[id] = i;
|
104
|
-
});
|
105
|
-
orderedIDs = Object.keys(orderedIDMap);
|
106
|
-
findManyArgs.where = inArray(adapter.tables[tableName].id, orderedIDs);
|
107
85
|
} else {
|
108
86
|
findManyArgs.limit = limitArg === 0 ? undefined : limitArg;
|
109
87
|
const offset = skip || (page - 1) * limitArg;
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../src/find/findMany.ts"],"sourcesContent":["import type { FindArgs } from 'payload/database'\nimport type { Field, PayloadRequest, TypeWithID } from 'payload/types'\n\nimport { inArray, sql } from 'drizzle-orm'\n\nimport type { PostgresAdapter } from '../types.js'\nimport type { ChainedMethods } from './chainMethods.js'\n\nimport buildQuery from '../queries/buildQuery.js'\nimport { transform } from '../transform/read/index.js'\nimport { buildFindManyArgs } from './buildFindManyArgs.js'\nimport { chainMethods } from './chainMethods.js'\n\ntype Args = Omit<FindArgs, 'collection'> & {\n adapter: PostgresAdapter\n fields: Field[]\n tableName: string\n}\n\nexport const findMany = async function find({\n adapter,\n fields,\n limit: limitArg,\n locale,\n page = 1,\n pagination,\n req = {} as PayloadRequest,\n skip,\n sort,\n tableName,\n where: whereArg,\n}: Args) {\n const db = adapter.sessions[req.transactionID]?.db || adapter.drizzle\n const table = adapter.tables[tableName]\n\n const limit = limitArg ?? 10\n let totalDocs: number\n let totalPages: number\n let hasPrevPage: boolean\n let hasNextPage: boolean\n let pagingCounter: number\n let selectDistinctResult\n\n const { joinAliases, joins, orderBy, selectFields, where } = await buildQuery({\n adapter,\n fields,\n locale,\n sort,\n tableName,\n where: whereArg,\n })\n\n const orderedIDMap: Record<number | string, number> = {}\n let orderedIDs: (number | string)[]\n\n const selectDistinctMethods: ChainedMethods = []\n\n if (orderBy?.order && orderBy?.column) {\n selectDistinctMethods.push({\n args: [orderBy.order(orderBy.column)],\n method: 'orderBy',\n })\n }\n\n const findManyArgs = buildFindManyArgs({\n adapter,\n depth: 0,\n fields,\n tableName,\n })\n\n // only fetch IDs when a sort or where query is used that needs to be done on join tables, otherwise these can be done directly on the table in findMany\n if (Object.keys(joins).length > 0 || joinAliases.length > 0) {\n if (where) {\n selectDistinctMethods.push({ args: [where], method: 'where' })\n }\n\n joinAliases.forEach(({ condition, table }) => {\n selectDistinctMethods.push({\n args: [table, condition],\n method: 'leftJoin',\n })\n })\n\n Object.entries(joins).forEach(([joinTable, condition]) => {\n if (joinTable) {\n selectDistinctMethods.push({\n args: [adapter.tables[joinTable], condition],\n method: 'leftJoin',\n })\n }\n })\n\n selectDistinctMethods.push({ args: [skip || (page - 1) * limit], method: 'offset' })\n selectDistinctMethods.push({ args: [limit === 0 ? undefined : limit], method: 'limit' })\n\n selectDistinctResult = await chainMethods({\n methods: selectDistinctMethods,\n query: db.selectDistinct(selectFields).from(table),\n })\n\n if (selectDistinctResult.length === 0) {\n return {\n docs: [],\n hasNextPage: false,\n hasPrevPage: false,\n limit,\n nextPage: null,\n page: 1,\n pagingCounter: 0,\n prevPage: null,\n totalDocs: 0,\n totalPages: 0,\n }\n }\n // set the id in an object for sorting later\n selectDistinctResult.forEach(({ id }, i) => {\n orderedIDMap[id as number | string] = i\n })\n orderedIDs = Object.keys(orderedIDMap)\n findManyArgs.where = inArray(adapter.tables[tableName].id, orderedIDs)\n } else {\n findManyArgs.limit = limitArg === 0 ? undefined : limitArg\n\n const offset = skip || (page - 1) * limitArg\n\n if (!Number.isNaN(offset)) findManyArgs.offset = offset\n\n if (where) {\n findManyArgs.where = where\n }\n findManyArgs.orderBy = orderBy.order(orderBy.column)\n }\n\n const findPromise = db.query[tableName].findMany(findManyArgs)\n\n if (pagination !== false && (orderedIDs ? orderedIDs?.length >= limit : true)) {\n const selectCountMethods: ChainedMethods = []\n\n joinAliases.forEach(({ condition, table }) => {\n selectCountMethods.push({\n args: [table, condition],\n method: 'leftJoin',\n })\n })\n\n Object.entries(joins).forEach(([joinTable, condition]) => {\n if (joinTable) {\n selectCountMethods.push({\n args: [adapter.tables[joinTable], condition],\n method: 'leftJoin',\n })\n }\n })\n\n const countResult = await chainMethods({\n methods: selectCountMethods,\n query: db\n .select({\n count: sql<number>`count\n (DISTINCT ${adapter.tables[tableName].id})`,\n })\n .from(table)\n .where(where),\n })\n totalDocs = Number(countResult[0].count)\n totalPages = typeof limit === 'number' && limit !== 0 ? Math.ceil(totalDocs / limit) : 1\n hasPrevPage = page > 1\n hasNextPage = totalPages > page\n pagingCounter = (page - 1) * limit + 1\n }\n\n const rawDocs = await findPromise\n // sort rawDocs from selectQuery\n if (Object.keys(orderedIDMap).length > 0) {\n rawDocs.sort((a, b) => orderedIDMap[a.id] - orderedIDMap[b.id])\n }\n\n if (pagination === false || !totalDocs) {\n totalDocs = rawDocs.length\n totalPages = 1\n pagingCounter = 1\n hasPrevPage = false\n hasNextPage = false\n }\n\n const docs = rawDocs.map((data: TypeWithID) => {\n return transform({\n config: adapter.payload.config,\n data,\n fields,\n })\n })\n\n return {\n docs,\n hasNextPage,\n hasPrevPage,\n limit,\n nextPage: hasNextPage ? page + 1 : null,\n page,\n pagingCounter,\n prevPage: hasPrevPage ? page - 1 : null,\n totalDocs,\n totalPages,\n }\n}\n"],"names":["inArray","sql","buildQuery","transform","buildFindManyArgs","chainMethods","findMany","find","adapter","fields","limit","limitArg","locale","page","pagination","req","skip","sort","tableName","where","whereArg","db","sessions","transactionID","drizzle","table","tables","totalDocs","totalPages","hasPrevPage","hasNextPage","pagingCounter","selectDistinctResult","joinAliases","joins","orderBy","selectFields","orderedIDMap","orderedIDs","selectDistinctMethods","order","column","push","args","method","findManyArgs","depth","Object","keys","length","forEach","condition","entries","joinTable","undefined","methods","query","selectDistinct","from","docs","nextPage","prevPage","id","i","offset","Number","isNaN","findPromise","selectCountMethods","countResult","select","count","Math","ceil","rawDocs","a","b","map","data","config","payload"],"mappings":"AAGA,SAASA,OAAO,EAAEC,GAAG,QAAQ,cAAa;AAK1C,OAAOC,gBAAgB,2BAA0B;AACjD,SAASC,SAAS,QAAQ,6BAA4B;AACtD,SAASC,iBAAiB,QAAQ,yBAAwB;AAC1D,SAASC,YAAY,QAAQ,oBAAmB;AAQhD,OAAO,MAAMC,WAAW,eAAeC,KAAK,EAC1CC,OAAO,EACPC,MAAM,EACNC,OAAOC,QAAQ,EACfC,MAAM,EACNC,OAAO,CAAC,EACRC,UAAU,EACVC,MAAM,CAAC,CAAmB,EAC1BC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,OAAOC,QAAQ,EACV;IACL,MAAMC,KAAKb,QAAQc,QAAQ,CAACP,IAAIQ,aAAa,CAAC,EAAEF,MAAMb,QAAQgB,OAAO;IACrE,MAAMC,QAAQjB,QAAQkB,MAAM,CAACR,UAAU;IAEvC,MAAMR,QAAQC,YAAY;IAC1B,IAAIgB;IACJ,IAAIC;IACJ,IAAIC;IACJ,IAAIC;IACJ,IAAIC;IACJ,IAAIC;IAEJ,MAAM,EAAEC,WAAW,EAAEC,KAAK,EAAEC,OAAO,EAAEC,YAAY,EAAEjB,KAAK,EAAE,GAAG,MAAMjB,WAAW;QAC5EM;QACAC;QACAG;QACAK;QACAC;QACAC,OAAOC;IACT;IAEA,MAAMiB,eAAgD,CAAC;IACvD,IAAIC;IAEJ,MAAMC,wBAAwC,EAAE;IAEhD,IAAIJ,SAASK,SAASL,SAASM,QAAQ;QACrCF,sBAAsBG,IAAI,CAAC;YACzBC,MAAM;gBAACR,QAAQK,KAAK,CAACL,QAAQM,MAAM;aAAE;YACrCG,QAAQ;QACV;IACF;IAEA,MAAMC,eAAezC,kBAAkB;QACrCI;QACAsC,OAAO;QACPrC;QACAS;IACF;IAEA,wJAAwJ;IACxJ,IAAI6B,OAAOC,IAAI,CAACd,OAAOe,MAAM,GAAG,KAAKhB,YAAYgB,MAAM,GAAG,GAAG;QAC3D,IAAI9B,OAAO;YACToB,sBAAsBG,IAAI,CAAC;gBAAEC,MAAM;oBAACxB;iBAAM;gBAAEyB,QAAQ;YAAQ;QAC9D;QAEAX,YAAYiB,OAAO,CAAC,CAAC,EAAEC,SAAS,EAAE1B,KAAK,EAAE;YACvCc,sBAAsBG,IAAI,CAAC;gBACzBC,MAAM;oBAAClB;oBAAO0B;iBAAU;gBACxBP,QAAQ;YACV;QACF;QAEAG,OAAOK,OAAO,CAAClB,OAAOgB,OAAO,CAAC,CAAC,CAACG,WAAWF,UAAU;YACnD,IAAIE,WAAW;gBACbd,sBAAsBG,IAAI,CAAC;oBACzBC,MAAM;wBAACnC,QAAQkB,MAAM,CAAC2B,UAAU;wBAAEF;qBAAU;oBAC5CP,QAAQ;gBACV;YACF;QACF;QAEAL,sBAAsBG,IAAI,CAAC;YAAEC,MAAM;gBAAC3B,QAAQ,AAACH,CAAAA,OAAO,CAAA,IAAKH;aAAM;YAAEkC,QAAQ;QAAS;QAClFL,sBAAsBG,IAAI,CAAC;YAAEC,MAAM;gBAACjC,UAAU,IAAI4C,YAAY5C;aAAM;YAAEkC,QAAQ;QAAQ;QAEtFZ,uBAAuB,MAAM3B,aAAa;YACxCkD,SAAShB;YACTiB,OAAOnC,GAAGoC,cAAc,CAACrB,cAAcsB,IAAI,CAACjC;QAC9C;QAEA,IAAIO,qBAAqBiB,MAAM,KAAK,GAAG;YACrC,OAAO;gBACLU,MAAM,EAAE;gBACR7B,aAAa;gBACbD,aAAa;gBACbnB;gBACAkD,UAAU;gBACV/C,MAAM;gBACNkB,eAAe;gBACf8B,UAAU;gBACVlC,WAAW;gBACXC,YAAY;YACd;QACF;QACA,4CAA4C;QAC5CI,qBAAqBkB,OAAO,CAAC,CAAC,EAAEY,EAAE,EAAE,EAAEC;YACpC1B,YAAY,CAACyB,GAAsB,GAAGC;QACxC;QACAzB,aAAaS,OAAOC,IAAI,CAACX;QACzBQ,aAAa1B,KAAK,GAAGnB,QAAQQ,QAAQkB,MAAM,CAACR,UAAU,CAAC4C,EAAE,EAAExB;IAC7D,OAAO;QACLO,aAAanC,KAAK,GAAGC,aAAa,IAAI2C,YAAY3C;QAElD,MAAMqD,SAAShD,QAAQ,AAACH,CAAAA,OAAO,CAAA,IAAKF;QAEpC,IAAI,CAACsD,OAAOC,KAAK,CAACF,SAASnB,aAAamB,MAAM,GAAGA;QAEjD,IAAI7C,OAAO;YACT0B,aAAa1B,KAAK,GAAGA;QACvB;QACA0B,aAAaV,OAAO,GAAGA,QAAQK,KAAK,CAACL,QAAQM,MAAM;IACrD;IAEA,MAAM0B,cAAc9C,GAAGmC,KAAK,CAACtC,UAAU,CAACZ,QAAQ,CAACuC;IAEjD,IAAI/B,eAAe,SAAUwB,CAAAA,aAAaA,YAAYW,UAAUvC,QAAQ,IAAG,GAAI;QAC7E,MAAM0D,qBAAqC,EAAE;QAE7CnC,YAAYiB,OAAO,CAAC,CAAC,EAAEC,SAAS,EAAE1B,KAAK,EAAE;YACvC2C,mBAAmB1B,IAAI,CAAC;gBACtBC,MAAM;oBAAClB;oBAAO0B;iBAAU;gBACxBP,QAAQ;YACV;QACF;QAEAG,OAAOK,OAAO,CAAClB,OAAOgB,OAAO,CAAC,CAAC,CAACG,WAAWF,UAAU;YACnD,IAAIE,WAAW;gBACbe,mBAAmB1B,IAAI,CAAC;oBACtBC,MAAM;wBAACnC,QAAQkB,MAAM,CAAC2B,UAAU;wBAAEF;qBAAU;oBAC5CP,QAAQ;gBACV;YACF;QACF;QAEA,MAAMyB,cAAc,MAAMhE,aAAa;YACrCkD,SAASa;YACTZ,OAAOnC,GACJiD,MAAM,CAAC;gBACNC,OAAOtE,GAAW,CAAC;wBACL,EAAEO,QAAQkB,MAAM,CAACR,UAAU,CAAC4C,EAAE,CAAC,CAAC,CAAC;YACjD,GACCJ,IAAI,CAACjC,OACLN,KAAK,CAACA;QACX;QACAQ,YAAYsC,OAAOI,WAAW,CAAC,EAAE,CAACE,KAAK;QACvC3C,aAAa,OAAOlB,UAAU,YAAYA,UAAU,IAAI8D,KAAKC,IAAI,CAAC9C,YAAYjB,SAAS;QACvFmB,cAAchB,OAAO;QACrBiB,cAAcF,aAAaf;QAC3BkB,gBAAgB,AAAClB,CAAAA,OAAO,CAAA,IAAKH,QAAQ;IACvC;IAEA,MAAMgE,UAAU,MAAMP;IACtB,gCAAgC;IAChC,IAAIpB,OAAOC,IAAI,CAACX,cAAcY,MAAM,GAAG,GAAG;QACxCyB,QAAQzD,IAAI,CAAC,CAAC0D,GAAGC,IAAMvC,YAAY,CAACsC,EAAEb,EAAE,CAAC,GAAGzB,YAAY,CAACuC,EAAEd,EAAE,CAAC;IAChE;IAEA,IAAIhD,eAAe,SAAS,CAACa,WAAW;QACtCA,YAAY+C,QAAQzB,MAAM;QAC1BrB,aAAa;QACbG,gBAAgB;QAChBF,cAAc;QACdC,cAAc;IAChB;IAEA,MAAM6B,OAAOe,QAAQG,GAAG,CAAC,CAACC;QACxB,OAAO3E,UAAU;YACf4E,QAAQvE,QAAQwE,OAAO,CAACD,MAAM;YAC9BD;YACArE;QACF;IACF;IAEA,OAAO;QACLkD;QACA7B;QACAD;QACAnB;QACAkD,UAAU9B,cAAcjB,OAAO,IAAI;QACnCA;QACAkB;QACA8B,UAAUhC,cAAchB,OAAO,IAAI;QACnCc;QACAC;IACF;AACF,EAAC"}
|
1
|
+
{"version":3,"sources":["../../src/find/findMany.ts"],"sourcesContent":["import type { FindArgs } from 'payload/database'\nimport type { Field, PayloadRequest, TypeWithID } from 'payload/types'\n\nimport { inArray, sql } from 'drizzle-orm'\n\nimport type { PostgresAdapter } from '../types.js'\nimport type { ChainedMethods } from './chainMethods.js'\n\nimport buildQuery from '../queries/buildQuery.js'\nimport { selectDistinct } from '../queries/selectDistinct.js'\nimport { transform } from '../transform/read/index.js'\nimport { buildFindManyArgs } from './buildFindManyArgs.js'\nimport { chainMethods } from './chainMethods.js'\n\ntype Args = Omit<FindArgs, 'collection'> & {\n adapter: PostgresAdapter\n fields: Field[]\n tableName: string\n}\n\nexport const findMany = async function find({\n adapter,\n fields,\n limit: limitArg,\n locale,\n page = 1,\n pagination,\n req = {} as PayloadRequest,\n skip,\n sort,\n tableName,\n where: whereArg,\n}: Args) {\n const db = adapter.sessions[req.transactionID]?.db || adapter.drizzle\n const table = adapter.tables[tableName]\n\n const limit = limitArg ?? 10\n let totalDocs: number\n let totalPages: number\n let hasPrevPage: boolean\n let hasNextPage: boolean\n let pagingCounter: number\n\n const { joinAliases, joins, orderBy, selectFields, where } = await buildQuery({\n adapter,\n fields,\n locale,\n sort,\n tableName,\n where: whereArg,\n })\n\n const orderedIDMap: Record<number | string, number> = {}\n let orderedIDs: (number | string)[]\n\n const selectDistinctMethods: ChainedMethods = []\n\n if (orderBy?.order && orderBy?.column) {\n selectDistinctMethods.push({\n args: [orderBy.order(orderBy.column)],\n method: 'orderBy',\n })\n }\n\n const findManyArgs = buildFindManyArgs({\n adapter,\n depth: 0,\n fields,\n tableName,\n })\n\n selectDistinctMethods.push({ args: [skip || (page - 1) * limit], method: 'offset' })\n selectDistinctMethods.push({ args: [limit === 0 ? undefined : limit], method: 'limit' })\n\n const selectDistinctResult = await selectDistinct({\n adapter,\n chainedMethods: selectDistinctMethods,\n db,\n joinAliases,\n joins,\n selectFields,\n tableName,\n where,\n })\n\n if (selectDistinctResult) {\n if (selectDistinctResult.length === 0) {\n return {\n docs: [],\n hasNextPage: false,\n hasPrevPage: false,\n limit,\n nextPage: null,\n page: 1,\n pagingCounter: 0,\n prevPage: null,\n totalDocs: 0,\n totalPages: 0,\n }\n } else {\n // set the id in an object for sorting later\n selectDistinctResult.forEach(({ id }, i) => {\n orderedIDMap[id] = i\n })\n orderedIDs = Object.keys(orderedIDMap)\n findManyArgs.where = inArray(adapter.tables[tableName].id, orderedIDs)\n }\n } else {\n findManyArgs.limit = limitArg === 0 ? undefined : limitArg\n\n const offset = skip || (page - 1) * limitArg\n\n if (!Number.isNaN(offset)) findManyArgs.offset = offset\n\n if (where) {\n findManyArgs.where = where\n }\n findManyArgs.orderBy = orderBy.order(orderBy.column)\n }\n\n const findPromise = db.query[tableName].findMany(findManyArgs)\n\n if (pagination !== false && (orderedIDs ? orderedIDs?.length >= limit : true)) {\n const selectCountMethods: ChainedMethods = []\n\n joinAliases.forEach(({ condition, table }) => {\n selectCountMethods.push({\n args: [table, condition],\n method: 'leftJoin',\n })\n })\n\n Object.entries(joins).forEach(([joinTable, condition]) => {\n if (joinTable) {\n selectCountMethods.push({\n args: [adapter.tables[joinTable], condition],\n method: 'leftJoin',\n })\n }\n })\n\n const countResult = await chainMethods({\n methods: selectCountMethods,\n query: db\n .select({\n count: sql<number>`count\n (DISTINCT ${adapter.tables[tableName].id})`,\n })\n .from(table)\n .where(where),\n })\n totalDocs = Number(countResult[0].count)\n totalPages = typeof limit === 'number' && limit !== 0 ? Math.ceil(totalDocs / limit) : 1\n hasPrevPage = page > 1\n hasNextPage = totalPages > page\n pagingCounter = (page - 1) * limit + 1\n }\n\n const rawDocs = await findPromise\n // sort rawDocs from selectQuery\n if (Object.keys(orderedIDMap).length > 0) {\n rawDocs.sort((a, b) => orderedIDMap[a.id] - orderedIDMap[b.id])\n }\n\n if (pagination === false || !totalDocs) {\n totalDocs = rawDocs.length\n totalPages = 1\n pagingCounter = 1\n hasPrevPage = false\n hasNextPage = false\n }\n\n const docs = rawDocs.map((data: TypeWithID) => {\n return transform({\n config: adapter.payload.config,\n data,\n fields,\n })\n })\n\n return {\n docs,\n hasNextPage,\n hasPrevPage,\n limit,\n nextPage: hasNextPage ? page + 1 : null,\n page,\n pagingCounter,\n prevPage: hasPrevPage ? page - 1 : null,\n totalDocs,\n totalPages,\n }\n}\n"],"names":["inArray","sql","buildQuery","selectDistinct","transform","buildFindManyArgs","chainMethods","findMany","find","adapter","fields","limit","limitArg","locale","page","pagination","req","skip","sort","tableName","where","whereArg","db","sessions","transactionID","drizzle","table","tables","totalDocs","totalPages","hasPrevPage","hasNextPage","pagingCounter","joinAliases","joins","orderBy","selectFields","orderedIDMap","orderedIDs","selectDistinctMethods","order","column","push","args","method","findManyArgs","depth","undefined","selectDistinctResult","chainedMethods","length","docs","nextPage","prevPage","forEach","id","i","Object","keys","offset","Number","isNaN","findPromise","query","selectCountMethods","condition","entries","joinTable","countResult","methods","select","count","from","Math","ceil","rawDocs","a","b","map","data","config","payload"],"mappings":"AAGA,SAASA,OAAO,EAAEC,GAAG,QAAQ,cAAa;AAK1C,OAAOC,gBAAgB,2BAA0B;AACjD,SAASC,cAAc,QAAQ,+BAA8B;AAC7D,SAASC,SAAS,QAAQ,6BAA4B;AACtD,SAASC,iBAAiB,QAAQ,yBAAwB;AAC1D,SAASC,YAAY,QAAQ,oBAAmB;AAQhD,OAAO,MAAMC,WAAW,eAAeC,KAAK,EAC1CC,OAAO,EACPC,MAAM,EACNC,OAAOC,QAAQ,EACfC,MAAM,EACNC,OAAO,CAAC,EACRC,UAAU,EACVC,MAAM,CAAC,CAAmB,EAC1BC,IAAI,EACJC,IAAI,EACJC,SAAS,EACTC,OAAOC,QAAQ,EACV;IACL,MAAMC,KAAKb,QAAQc,QAAQ,CAACP,IAAIQ,aAAa,CAAC,EAAEF,MAAMb,QAAQgB,OAAO;IACrE,MAAMC,QAAQjB,QAAQkB,MAAM,CAACR,UAAU;IAEvC,MAAMR,QAAQC,YAAY;IAC1B,IAAIgB;IACJ,IAAIC;IACJ,IAAIC;IACJ,IAAIC;IACJ,IAAIC;IAEJ,MAAM,EAAEC,WAAW,EAAEC,KAAK,EAAEC,OAAO,EAAEC,YAAY,EAAEhB,KAAK,EAAE,GAAG,MAAMlB,WAAW;QAC5EO;QACAC;QACAG;QACAK;QACAC;QACAC,OAAOC;IACT;IAEA,MAAMgB,eAAgD,CAAC;IACvD,IAAIC;IAEJ,MAAMC,wBAAwC,EAAE;IAEhD,IAAIJ,SAASK,SAASL,SAASM,QAAQ;QACrCF,sBAAsBG,IAAI,CAAC;YACzBC,MAAM;gBAACR,QAAQK,KAAK,CAACL,QAAQM,MAAM;aAAE;YACrCG,QAAQ;QACV;IACF;IAEA,MAAMC,eAAexC,kBAAkB;QACrCI;QACAqC,OAAO;QACPpC;QACAS;IACF;IAEAoB,sBAAsBG,IAAI,CAAC;QAAEC,MAAM;YAAC1B,QAAQ,AAACH,CAAAA,OAAO,CAAA,IAAKH;SAAM;QAAEiC,QAAQ;IAAS;IAClFL,sBAAsBG,IAAI,CAAC;QAAEC,MAAM;YAAChC,UAAU,IAAIoC,YAAYpC;SAAM;QAAEiC,QAAQ;IAAQ;IAEtF,MAAMI,uBAAuB,MAAM7C,eAAe;QAChDM;QACAwC,gBAAgBV;QAChBjB;QACAW;QACAC;QACAE;QACAjB;QACAC;IACF;IAEA,IAAI4B,sBAAsB;QACxB,IAAIA,qBAAqBE,MAAM,KAAK,GAAG;YACrC,OAAO;gBACLC,MAAM,EAAE;gBACRpB,aAAa;gBACbD,aAAa;gBACbnB;gBACAyC,UAAU;gBACVtC,MAAM;gBACNkB,eAAe;gBACfqB,UAAU;gBACVzB,WAAW;gBACXC,YAAY;YACd;QACF,OAAO;YACL,4CAA4C;YAC5CmB,qBAAqBM,OAAO,CAAC,CAAC,EAAEC,EAAE,EAAE,EAAEC;gBACpCnB,YAAY,CAACkB,GAAG,GAAGC;YACrB;YACAlB,aAAamB,OAAOC,IAAI,CAACrB;YACzBQ,aAAazB,KAAK,GAAGpB,QAAQS,QAAQkB,MAAM,CAACR,UAAU,CAACoC,EAAE,EAAEjB;QAC7D;IACF,OAAO;QACLO,aAAalC,KAAK,GAAGC,aAAa,IAAImC,YAAYnC;QAElD,MAAM+C,SAAS1C,QAAQ,AAACH,CAAAA,OAAO,CAAA,IAAKF;QAEpC,IAAI,CAACgD,OAAOC,KAAK,CAACF,SAASd,aAAac,MAAM,GAAGA;QAEjD,IAAIvC,OAAO;YACTyB,aAAazB,KAAK,GAAGA;QACvB;QACAyB,aAAaV,OAAO,GAAGA,QAAQK,KAAK,CAACL,QAAQM,MAAM;IACrD;IAEA,MAAMqB,cAAcxC,GAAGyC,KAAK,CAAC5C,UAAU,CAACZ,QAAQ,CAACsC;IAEjD,IAAI9B,eAAe,SAAUuB,CAAAA,aAAaA,YAAYY,UAAUvC,QAAQ,IAAG,GAAI;QAC7E,MAAMqD,qBAAqC,EAAE;QAE7C/B,YAAYqB,OAAO,CAAC,CAAC,EAAEW,SAAS,EAAEvC,KAAK,EAAE;YACvCsC,mBAAmBtB,IAAI,CAAC;gBACtBC,MAAM;oBAACjB;oBAAOuC;iBAAU;gBACxBrB,QAAQ;YACV;QACF;QAEAa,OAAOS,OAAO,CAAChC,OAAOoB,OAAO,CAAC,CAAC,CAACa,WAAWF,UAAU;YACnD,IAAIE,WAAW;gBACbH,mBAAmBtB,IAAI,CAAC;oBACtBC,MAAM;wBAAClC,QAAQkB,MAAM,CAACwC,UAAU;wBAAEF;qBAAU;oBAC5CrB,QAAQ;gBACV;YACF;QACF;QAEA,MAAMwB,cAAc,MAAM9D,aAAa;YACrC+D,SAASL;YACTD,OAAOzC,GACJgD,MAAM,CAAC;gBACNC,OAAOtE,GAAW,CAAC;wBACL,EAAEQ,QAAQkB,MAAM,CAACR,UAAU,CAACoC,EAAE,CAAC,CAAC,CAAC;YACjD,GACCiB,IAAI,CAAC9C,OACLN,KAAK,CAACA;QACX;QACAQ,YAAYgC,OAAOQ,WAAW,CAAC,EAAE,CAACG,KAAK;QACvC1C,aAAa,OAAOlB,UAAU,YAAYA,UAAU,IAAI8D,KAAKC,IAAI,CAAC9C,YAAYjB,SAAS;QACvFmB,cAAchB,OAAO;QACrBiB,cAAcF,aAAaf;QAC3BkB,gBAAgB,AAAClB,CAAAA,OAAO,CAAA,IAAKH,QAAQ;IACvC;IAEA,MAAMgE,UAAU,MAAMb;IACtB,gCAAgC;IAChC,IAAIL,OAAOC,IAAI,CAACrB,cAAca,MAAM,GAAG,GAAG;QACxCyB,QAAQzD,IAAI,CAAC,CAAC0D,GAAGC,IAAMxC,YAAY,CAACuC,EAAErB,EAAE,CAAC,GAAGlB,YAAY,CAACwC,EAAEtB,EAAE,CAAC;IAChE;IAEA,IAAIxC,eAAe,SAAS,CAACa,WAAW;QACtCA,YAAY+C,QAAQzB,MAAM;QAC1BrB,aAAa;QACbG,gBAAgB;QAChBF,cAAc;QACdC,cAAc;IAChB;IAEA,MAAMoB,OAAOwB,QAAQG,GAAG,CAAC,CAACC;QACxB,OAAO3E,UAAU;YACf4E,QAAQvE,QAAQwE,OAAO,CAACD,MAAM;YAC9BD;YACArE;QACF;IACF;IAEA,OAAO;QACLyC;QACApB;QACAD;QACAnB;QACAyC,UAAUrB,cAAcjB,OAAO,IAAI;QACnCA;QACAkB;QACAqB,UAAUvB,cAAchB,OAAO,IAAI;QACnCc;QACAC;IACF;AACF,EAAC"}
|
package/dist/migrate.d.ts.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"migrate.d.ts","sourceRoot":"","sources":["../src/migrate.ts"],"names":[],"mappings":"
|
1
|
+
{"version":3,"file":"migrate.d.ts","sourceRoot":"","sources":["../src/migrate.ts"],"names":[],"mappings":"AAcA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,YAAY,CAAA;AAQjD,wBAAsB,OAAO,CAAC,IAAI,EAAE,eAAe,GAAG,OAAO,CAAC,IAAI,CAAC,CA8DlE"}
|
package/dist/migrate.js
CHANGED
@@ -1,8 +1,10 @@
|
|
1
|
-
/* eslint-disable no-restricted-syntax, no-await-in-loop */ import {
|
1
|
+
/* eslint-disable no-restricted-syntax, no-await-in-loop */ import { createRequire } from 'module';
|
2
|
+
import { commitTransaction, initTransaction, killTransaction, readMigrationFiles } from 'payload/database';
|
2
3
|
import prompts from 'prompts';
|
3
4
|
import { createMigrationTable } from './utilities/createMigrationTable.js';
|
4
5
|
import { migrationTableExists } from './utilities/migrationTableExists.js';
|
5
6
|
import { parseError } from './utilities/parseError.js';
|
7
|
+
const require = createRequire(import.meta.url);
|
6
8
|
export async function migrate() {
|
7
9
|
const { payload } = this;
|
8
10
|
const migrationFiles = await readMigrationFiles({
|
@@ -56,7 +58,7 @@ export async function migrate() {
|
|
56
58
|
}
|
57
59
|
}
|
58
60
|
async function runMigrationFile(payload, migration, batch) {
|
59
|
-
const { generateDrizzleJson } = require
|
61
|
+
const { generateDrizzleJson } = require('drizzle-kit/payload');
|
60
62
|
const start = Date.now();
|
61
63
|
const req = {
|
62
64
|
payload
|
package/dist/migrate.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/migrate.ts"],"sourcesContent":["/* eslint-disable no-restricted-syntax, no-await-in-loop */\nimport type { Payload } from 'payload'\nimport type { Migration } from 'payload/database'\nimport type { PayloadRequest } from 'payload/types'\n\nimport {\n commitTransaction,\n initTransaction,\n killTransaction,\n readMigrationFiles,\n} from 'payload/database'\nimport prompts from 'prompts'\n\nimport type { PostgresAdapter } from './types.js'\n\nimport { createMigrationTable } from './utilities/createMigrationTable.js'\nimport { migrationTableExists } from './utilities/migrationTableExists.js'\nimport { parseError } from './utilities/parseError.js'\n\nexport async function migrate(this: PostgresAdapter): Promise<void> {\n const { payload } = this\n const migrationFiles = await readMigrationFiles({ payload })\n\n if (!migrationFiles.length) {\n payload.logger.info({ msg: 'No migrations to run.' })\n return\n }\n\n let latestBatch = 0\n let migrationsInDB = []\n\n const hasMigrationTable = await migrationTableExists(this.drizzle)\n\n if (hasMigrationTable) {\n ;({ docs: migrationsInDB } = await payload.find({\n collection: 'payload-migrations',\n limit: 0,\n sort: '-name',\n }))\n if (Number(migrationsInDB?.[0]?.batch) > 0) {\n latestBatch = Number(migrationsInDB[0]?.batch)\n }\n } else {\n await createMigrationTable(this)\n }\n\n if (migrationsInDB.find((m) => m.batch === -1)) {\n const { confirm: runMigrations } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message:\n \"It looks like you've run Payload in dev mode, meaning you've dynamically pushed changes to your database.\\n\\n\" +\n \"If you'd like to run migrations, data loss will occur. Would you like to proceed?\",\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n if (!runMigrations) {\n process.exit(0)\n }\n }\n\n const newBatch = latestBatch + 1\n\n // Execute 'up' function for each migration sequentially\n for (const migration of migrationFiles) {\n const alreadyRan = migrationsInDB.find((existing) => existing.name === migration.name)\n\n // If already ran, skip\n if (alreadyRan) {\n continue // eslint-disable-line no-continue\n }\n\n await runMigrationFile(payload, migration, newBatch)\n }\n}\n\nasync function runMigrationFile(payload: Payload, migration: Migration, batch: number) {\n const { generateDrizzleJson } = require
|
1
|
+
{"version":3,"sources":["../src/migrate.ts"],"sourcesContent":["/* eslint-disable no-restricted-syntax, no-await-in-loop */\nimport type { Payload } from 'payload'\nimport type { Migration } from 'payload/database'\nimport type { PayloadRequest } from 'payload/types'\n\nimport { createRequire } from 'module'\nimport {\n commitTransaction,\n initTransaction,\n killTransaction,\n readMigrationFiles,\n} from 'payload/database'\nimport prompts from 'prompts'\n\nimport type { PostgresAdapter } from './types.js'\n\nimport { createMigrationTable } from './utilities/createMigrationTable.js'\nimport { migrationTableExists } from './utilities/migrationTableExists.js'\nimport { parseError } from './utilities/parseError.js'\n\nconst require = createRequire(import.meta.url)\n\nexport async function migrate(this: PostgresAdapter): Promise<void> {\n const { payload } = this\n const migrationFiles = await readMigrationFiles({ payload })\n\n if (!migrationFiles.length) {\n payload.logger.info({ msg: 'No migrations to run.' })\n return\n }\n\n let latestBatch = 0\n let migrationsInDB = []\n\n const hasMigrationTable = await migrationTableExists(this.drizzle)\n\n if (hasMigrationTable) {\n ;({ docs: migrationsInDB } = await payload.find({\n collection: 'payload-migrations',\n limit: 0,\n sort: '-name',\n }))\n if (Number(migrationsInDB?.[0]?.batch) > 0) {\n latestBatch = Number(migrationsInDB[0]?.batch)\n }\n } else {\n await createMigrationTable(this)\n }\n\n if (migrationsInDB.find((m) => m.batch === -1)) {\n const { confirm: runMigrations } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message:\n \"It looks like you've run Payload in dev mode, meaning you've dynamically pushed changes to your database.\\n\\n\" +\n \"If you'd like to run migrations, data loss will occur. Would you like to proceed?\",\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n if (!runMigrations) {\n process.exit(0)\n }\n }\n\n const newBatch = latestBatch + 1\n\n // Execute 'up' function for each migration sequentially\n for (const migration of migrationFiles) {\n const alreadyRan = migrationsInDB.find((existing) => existing.name === migration.name)\n\n // If already ran, skip\n if (alreadyRan) {\n continue // eslint-disable-line no-continue\n }\n\n await runMigrationFile(payload, migration, newBatch)\n }\n}\n\nasync function runMigrationFile(payload: Payload, migration: Migration, batch: number) {\n const { generateDrizzleJson } = require('drizzle-kit/payload')\n\n const start = Date.now()\n const req = { payload } as PayloadRequest\n\n payload.logger.info({ msg: `Migrating: ${migration.name}` })\n\n const pgAdapter = payload.db as PostgresAdapter\n const drizzleJSON = generateDrizzleJson(pgAdapter.schema)\n\n try {\n await initTransaction(req)\n await migration.up({ payload, req })\n payload.logger.info({ msg: `Migrated: ${migration.name} (${Date.now() - start}ms)` })\n await payload.create({\n collection: 'payload-migrations',\n data: {\n name: migration.name,\n batch,\n schema: drizzleJSON,\n },\n req,\n })\n await commitTransaction(req)\n } catch (err: unknown) {\n await killTransaction(req)\n payload.logger.error({\n err,\n msg: parseError(err, `Error running migration ${migration.name}`),\n })\n }\n}\n"],"names":["createRequire","commitTransaction","initTransaction","killTransaction","readMigrationFiles","prompts","createMigrationTable","migrationTableExists","parseError","require","url","migrate","payload","migrationFiles","length","logger","info","msg","latestBatch","migrationsInDB","hasMigrationTable","drizzle","docs","find","collection","limit","sort","Number","batch","m","confirm","runMigrations","name","type","initial","message","onCancel","process","exit","newBatch","migration","alreadyRan","existing","runMigrationFile","generateDrizzleJson","start","Date","now","req","pgAdapter","db","drizzleJSON","schema","up","create","data","err","error"],"mappings":"AAAA,yDAAyD,GAKzD,SAASA,aAAa,QAAQ,SAAQ;AACtC,SACEC,iBAAiB,EACjBC,eAAe,EACfC,eAAe,EACfC,kBAAkB,QACb,mBAAkB;AACzB,OAAOC,aAAa,UAAS;AAI7B,SAASC,oBAAoB,QAAQ,sCAAqC;AAC1E,SAASC,oBAAoB,QAAQ,sCAAqC;AAC1E,SAASC,UAAU,QAAQ,4BAA2B;AAEtD,MAAMC,UAAUT,cAAc,YAAYU,GAAG;AAE7C,OAAO,eAAeC;IACpB,MAAM,EAAEC,OAAO,EAAE,GAAG,IAAI;IACxB,MAAMC,iBAAiB,MAAMT,mBAAmB;QAAEQ;IAAQ;IAE1D,IAAI,CAACC,eAAeC,MAAM,EAAE;QAC1BF,QAAQG,MAAM,CAACC,IAAI,CAAC;YAAEC,KAAK;QAAwB;QACnD;IACF;IAEA,IAAIC,cAAc;IAClB,IAAIC,iBAAiB,EAAE;IAEvB,MAAMC,oBAAoB,MAAMb,qBAAqB,IAAI,CAACc,OAAO;IAEjE,IAAID,mBAAmB;QACnB,CAAA,EAAEE,MAAMH,cAAc,EAAE,GAAG,MAAMP,QAAQW,IAAI,CAAC;YAC9CC,YAAY;YACZC,OAAO;YACPC,MAAM;QACR,EAAC;QACD,IAAIC,OAAOR,gBAAgB,CAAC,EAAE,EAAES,SAAS,GAAG;YAC1CV,cAAcS,OAAOR,cAAc,CAAC,EAAE,EAAES;QAC1C;IACF,OAAO;QACL,MAAMtB,qBAAqB,IAAI;IACjC;IAEA,IAAIa,eAAeI,IAAI,CAAC,CAACM,IAAMA,EAAED,KAAK,KAAK,CAAC,IAAI;QAC9C,MAAM,EAAEE,SAASC,aAAa,EAAE,GAAG,MAAM1B,QACvC;YACE2B,MAAM;YACNC,MAAM;YACNC,SAAS;YACTC,SACE,kHACA;QACJ,GACA;YACEC,UAAU;gBACRC,QAAQC,IAAI,CAAC;YACf;QACF;QAGF,IAAI,CAACP,eAAe;YAClBM,QAAQC,IAAI,CAAC;QACf;IACF;IAEA,MAAMC,WAAWrB,cAAc;IAE/B,wDAAwD;IACxD,KAAK,MAAMsB,aAAa3B,eAAgB;QACtC,MAAM4B,aAAatB,eAAeI,IAAI,CAAC,CAACmB,WAAaA,SAASV,IAAI,KAAKQ,UAAUR,IAAI;QAErF,uBAAuB;QACvB,IAAIS,YAAY;YACd,UAAS,kCAAkC;QAC7C;QAEA,MAAME,iBAAiB/B,SAAS4B,WAAWD;IAC7C;AACF;AAEA,eAAeI,iBAAiB/B,OAAgB,EAAE4B,SAAoB,EAAEZ,KAAa;IACnF,MAAM,EAAEgB,mBAAmB,EAAE,GAAGnC,QAAQ;IAExC,MAAMoC,QAAQC,KAAKC,GAAG;IACtB,MAAMC,MAAM;QAAEpC;IAAQ;IAEtBA,QAAQG,MAAM,CAACC,IAAI,CAAC;QAAEC,KAAK,CAAC,WAAW,EAAEuB,UAAUR,IAAI,CAAC,CAAC;IAAC;IAE1D,MAAMiB,YAAYrC,QAAQsC,EAAE;IAC5B,MAAMC,cAAcP,oBAAoBK,UAAUG,MAAM;IAExD,IAAI;QACF,MAAMlD,gBAAgB8C;QACtB,MAAMR,UAAUa,EAAE,CAAC;YAAEzC;YAASoC;QAAI;QAClCpC,QAAQG,MAAM,CAACC,IAAI,CAAC;YAAEC,KAAK,CAAC,WAAW,EAAEuB,UAAUR,IAAI,CAAC,EAAE,EAAEc,KAAKC,GAAG,KAAKF,MAAM,GAAG,CAAC;QAAC;QACpF,MAAMjC,QAAQ0C,MAAM,CAAC;YACnB9B,YAAY;YACZ+B,MAAM;gBACJvB,MAAMQ,UAAUR,IAAI;gBACpBJ;gBACAwB,QAAQD;YACV;YACAH;QACF;QACA,MAAM/C,kBAAkB+C;IAC1B,EAAE,OAAOQ,KAAc;QACrB,MAAMrD,gBAAgB6C;QACtBpC,QAAQG,MAAM,CAAC0C,KAAK,CAAC;YACnBD;YACAvC,KAAKT,WAAWgD,KAAK,CAAC,wBAAwB,EAAEhB,UAAUR,IAAI,CAAC,CAAC;QAClE;IACF;AACF"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"sanitizeQueryValue.d.ts","sourceRoot":"","sources":["../../src/queries/sanitizeQueryValue.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,KAAK,EAAE,KAAK,UAAU,EAAoB,MAAM,eAAe,CAAA;AAG7E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAElD,KAAK,sBAAsB,GAAG;IAC5B,OAAO,EAAE,eAAe,CAAA;IACxB,KAAK,EAAE,KAAK,GAAG,UAAU,CAAA;IACzB,QAAQ,EAAE,MAAM,CAAA;IAChB,cAAc,EAAE,MAAM,CAAA;IACtB,GAAG,EAAE,GAAG,CAAA;CACT,CAAA;AAED,eAAO,MAAM,kBAAkB,oEAM5B,sBAAsB;cAAe,MAAM;WAAS,OAAO;
|
1
|
+
{"version":3,"file":"sanitizeQueryValue.d.ts","sourceRoot":"","sources":["../../src/queries/sanitizeQueryValue.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,KAAK,KAAK,EAAE,KAAK,UAAU,EAAoB,MAAM,eAAe,CAAA;AAG7E,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAElD,KAAK,sBAAsB,GAAG;IAC5B,OAAO,EAAE,eAAe,CAAA;IACxB,KAAK,EAAE,KAAK,GAAG,UAAU,CAAA;IACzB,QAAQ,EAAE,MAAM,CAAA;IAChB,cAAc,EAAE,MAAM,CAAA;IACtB,GAAG,EAAE,GAAG,CAAA;CACT,CAAA;AAED,eAAO,MAAM,kBAAkB,oEAM5B,sBAAsB;cAAe,MAAM;WAAS,OAAO;CAyF7D,CAAA"}
|
@@ -44,7 +44,7 @@ export const sanitizeQueryValue = ({ adapter, field, operator: operatorArg, rela
|
|
44
44
|
if (field.type === 'number' && typeof formattedValue === 'string') {
|
45
45
|
formattedValue = Number(val);
|
46
46
|
}
|
47
|
-
if (field.type === 'date') {
|
47
|
+
if (field.type === 'date' && operator !== 'exists') {
|
48
48
|
if (typeof val === 'string') {
|
49
49
|
formattedValue = new Date(val);
|
50
50
|
if (Number.isNaN(Date.parse(formattedValue))) {
|
@@ -66,6 +66,9 @@ export const sanitizeQueryValue = ({ adapter, field, operator: operatorArg, rela
|
|
66
66
|
formattedValue = null;
|
67
67
|
}
|
68
68
|
}
|
69
|
+
if ('hasMany' in field && field.hasMany && operator === 'contains') {
|
70
|
+
operator = 'equals';
|
71
|
+
}
|
69
72
|
if (operator === 'near' || operator === 'within' || operator === 'intersects') {
|
70
73
|
throw new APIError(`Querying with '${operator}' is not supported with the postgres database adapter.`);
|
71
74
|
}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../src/queries/sanitizeQueryValue.ts"],"sourcesContent":["import { APIError } from 'payload/errors'\nimport { type Field, type TabAsField, fieldAffectsData } from 'payload/types'\nimport { createArrayFromCommaDelineated } from 'payload/utilities'\n\nimport type { PostgresAdapter } from '../types.js'\n\ntype SanitizeQueryValueArgs = {\n adapter: PostgresAdapter\n field: Field | TabAsField\n operator: string\n relationOrPath: string\n val: any\n}\n\nexport const sanitizeQueryValue = ({\n adapter,\n field,\n operator: operatorArg,\n relationOrPath,\n val,\n}: SanitizeQueryValueArgs): { operator: string; value: unknown } => {\n let operator = operatorArg\n let formattedValue = val\n\n if (!fieldAffectsData(field)) return { operator, value: formattedValue }\n\n if (\n (field.type === 'relationship' || field.type === 'upload') &&\n !relationOrPath.endsWith('relationTo') &&\n Array.isArray(formattedValue)\n ) {\n const allPossibleIDTypes: (number | string)[] = []\n formattedValue.forEach((val) => {\n if (adapter.idType !== 'uuid' && typeof val === 'string') {\n allPossibleIDTypes.push(val, parseInt(val))\n } else if (typeof val === 'string') {\n allPossibleIDTypes.push(val)\n } else {\n allPossibleIDTypes.push(val, String(val))\n }\n })\n formattedValue = allPossibleIDTypes\n }\n\n // Cast incoming values as proper searchable types\n if (field.type === 'checkbox' && typeof val === 'string') {\n if (val.toLowerCase() === 'true') formattedValue = true\n if (val.toLowerCase() === 'false') formattedValue = false\n }\n\n if (['all', 'in', 'not_in'].includes(operator)) {\n if (typeof formattedValue === 'string') {\n formattedValue = createArrayFromCommaDelineated(formattedValue)\n\n if (field.type === 'number') {\n formattedValue = formattedValue.map((arrayVal) => parseFloat(arrayVal))\n }\n }\n\n if (!Array.isArray(formattedValue) || formattedValue.length === 0) {\n return null\n }\n }\n\n if (field.type === 'number' && typeof formattedValue === 'string') {\n formattedValue = Number(val)\n }\n\n if (field.type === 'date') {\n if (typeof val === 'string') {\n formattedValue = new Date(val)\n if (Number.isNaN(Date.parse(formattedValue))) {\n return { operator, value: undefined }\n }\n }\n\n if (typeof val === 'number') {\n formattedValue = new Date(val)\n }\n }\n\n if (['relationship', 'upload'].includes(field.type)) {\n if (val === 'null') {\n formattedValue = null\n }\n }\n\n if (operator === 'near' || operator === 'within' || operator === 'intersects') {\n throw new APIError(\n `Querying with '${operator}' is not supported with the postgres database adapter.`,\n )\n }\n\n if (operator === 'contains') {\n formattedValue = `%${formattedValue}%`\n }\n\n if (operator === 'exists') {\n formattedValue = formattedValue === 'true' || formattedValue === true\n if (formattedValue === false) {\n operator = 'isNull'\n }\n }\n\n return { operator, value: formattedValue }\n}\n"],"names":["APIError","fieldAffectsData","createArrayFromCommaDelineated","sanitizeQueryValue","adapter","field","operator","operatorArg","relationOrPath","val","formattedValue","value","type","endsWith","Array","isArray","allPossibleIDTypes","forEach","idType","push","parseInt","String","toLowerCase","includes","map","arrayVal","parseFloat","length","Number","Date","isNaN","parse","undefined"],"mappings":"AAAA,SAASA,QAAQ,QAAQ,iBAAgB;AACzC,SAAsCC,gBAAgB,QAAQ,gBAAe;AAC7E,SAASC,8BAA8B,QAAQ,oBAAmB;AAYlE,OAAO,MAAMC,qBAAqB,CAAC,EACjCC,OAAO,EACPC,KAAK,EACLC,UAAUC,WAAW,EACrBC,cAAc,EACdC,GAAG,EACoB;IACvB,IAAIH,WAAWC;IACf,IAAIG,iBAAiBD;IAErB,IAAI,CAACR,iBAAiBI,QAAQ,OAAO;QAAEC;QAAUK,OAAOD;IAAe;IAEvE,IACE,AAACL,CAAAA,MAAMO,IAAI,KAAK,kBAAkBP,MAAMO,IAAI,KAAK,QAAO,KACxD,CAACJ,eAAeK,QAAQ,CAAC,iBACzBC,MAAMC,OAAO,CAACL,iBACd;QACA,MAAMM,qBAA0C,EAAE;QAClDN,eAAeO,OAAO,CAAC,CAACR;YACtB,IAAIL,QAAQc,MAAM,KAAK,UAAU,OAAOT,QAAQ,UAAU;gBACxDO,mBAAmBG,IAAI,CAACV,KAAKW,SAASX;YACxC,OAAO,IAAI,OAAOA,QAAQ,UAAU;gBAClCO,mBAAmBG,IAAI,CAACV;YAC1B,OAAO;gBACLO,mBAAmBG,IAAI,CAACV,KAAKY,OAAOZ;YACtC;QACF;QACAC,iBAAiBM;IACnB;IAEA,kDAAkD;IAClD,IAAIX,MAAMO,IAAI,KAAK,cAAc,OAAOH,QAAQ,UAAU;QACxD,IAAIA,IAAIa,WAAW,OAAO,QAAQZ,iBAAiB;QACnD,IAAID,IAAIa,WAAW,OAAO,SAASZ,iBAAiB;IACtD;IAEA,IAAI;QAAC;QAAO;QAAM;KAAS,CAACa,QAAQ,CAACjB,WAAW;QAC9C,IAAI,OAAOI,mBAAmB,UAAU;YACtCA,iBAAiBR,+BAA+BQ;YAEhD,IAAIL,MAAMO,IAAI,KAAK,UAAU;gBAC3BF,iBAAiBA,eAAec,GAAG,CAAC,CAACC,WAAaC,WAAWD;YAC/D;QACF;QAEA,IAAI,CAACX,MAAMC,OAAO,CAACL,mBAAmBA,eAAeiB,MAAM,KAAK,GAAG;YACjE,OAAO;QACT;IACF;IAEA,IAAItB,MAAMO,IAAI,KAAK,YAAY,OAAOF,mBAAmB,UAAU;QACjEA,iBAAiBkB,OAAOnB;IAC1B;IAEA,IAAIJ,MAAMO,IAAI,KAAK,
|
1
|
+
{"version":3,"sources":["../../src/queries/sanitizeQueryValue.ts"],"sourcesContent":["import { APIError } from 'payload/errors'\nimport { type Field, type TabAsField, fieldAffectsData } from 'payload/types'\nimport { createArrayFromCommaDelineated } from 'payload/utilities'\n\nimport type { PostgresAdapter } from '../types.js'\n\ntype SanitizeQueryValueArgs = {\n adapter: PostgresAdapter\n field: Field | TabAsField\n operator: string\n relationOrPath: string\n val: any\n}\n\nexport const sanitizeQueryValue = ({\n adapter,\n field,\n operator: operatorArg,\n relationOrPath,\n val,\n}: SanitizeQueryValueArgs): { operator: string; value: unknown } => {\n let operator = operatorArg\n let formattedValue = val\n\n if (!fieldAffectsData(field)) return { operator, value: formattedValue }\n\n if (\n (field.type === 'relationship' || field.type === 'upload') &&\n !relationOrPath.endsWith('relationTo') &&\n Array.isArray(formattedValue)\n ) {\n const allPossibleIDTypes: (number | string)[] = []\n formattedValue.forEach((val) => {\n if (adapter.idType !== 'uuid' && typeof val === 'string') {\n allPossibleIDTypes.push(val, parseInt(val))\n } else if (typeof val === 'string') {\n allPossibleIDTypes.push(val)\n } else {\n allPossibleIDTypes.push(val, String(val))\n }\n })\n formattedValue = allPossibleIDTypes\n }\n\n // Cast incoming values as proper searchable types\n if (field.type === 'checkbox' && typeof val === 'string') {\n if (val.toLowerCase() === 'true') formattedValue = true\n if (val.toLowerCase() === 'false') formattedValue = false\n }\n\n if (['all', 'in', 'not_in'].includes(operator)) {\n if (typeof formattedValue === 'string') {\n formattedValue = createArrayFromCommaDelineated(formattedValue)\n\n if (field.type === 'number') {\n formattedValue = formattedValue.map((arrayVal) => parseFloat(arrayVal))\n }\n }\n\n if (!Array.isArray(formattedValue) || formattedValue.length === 0) {\n return null\n }\n }\n\n if (field.type === 'number' && typeof formattedValue === 'string') {\n formattedValue = Number(val)\n }\n\n if (field.type === 'date' && operator !== 'exists') {\n if (typeof val === 'string') {\n formattedValue = new Date(val)\n if (Number.isNaN(Date.parse(formattedValue))) {\n return { operator, value: undefined }\n }\n }\n\n if (typeof val === 'number') {\n formattedValue = new Date(val)\n }\n }\n\n if (['relationship', 'upload'].includes(field.type)) {\n if (val === 'null') {\n formattedValue = null\n }\n }\n\n if ('hasMany' in field && field.hasMany && operator === 'contains') {\n operator = 'equals'\n }\n\n if (operator === 'near' || operator === 'within' || operator === 'intersects') {\n throw new APIError(\n `Querying with '${operator}' is not supported with the postgres database adapter.`,\n )\n }\n\n if (operator === 'contains') {\n formattedValue = `%${formattedValue}%`\n }\n\n if (operator === 'exists') {\n formattedValue = formattedValue === 'true' || formattedValue === true\n if (formattedValue === false) {\n operator = 'isNull'\n }\n }\n\n return { operator, value: formattedValue }\n}\n"],"names":["APIError","fieldAffectsData","createArrayFromCommaDelineated","sanitizeQueryValue","adapter","field","operator","operatorArg","relationOrPath","val","formattedValue","value","type","endsWith","Array","isArray","allPossibleIDTypes","forEach","idType","push","parseInt","String","toLowerCase","includes","map","arrayVal","parseFloat","length","Number","Date","isNaN","parse","undefined","hasMany"],"mappings":"AAAA,SAASA,QAAQ,QAAQ,iBAAgB;AACzC,SAAsCC,gBAAgB,QAAQ,gBAAe;AAC7E,SAASC,8BAA8B,QAAQ,oBAAmB;AAYlE,OAAO,MAAMC,qBAAqB,CAAC,EACjCC,OAAO,EACPC,KAAK,EACLC,UAAUC,WAAW,EACrBC,cAAc,EACdC,GAAG,EACoB;IACvB,IAAIH,WAAWC;IACf,IAAIG,iBAAiBD;IAErB,IAAI,CAACR,iBAAiBI,QAAQ,OAAO;QAAEC;QAAUK,OAAOD;IAAe;IAEvE,IACE,AAACL,CAAAA,MAAMO,IAAI,KAAK,kBAAkBP,MAAMO,IAAI,KAAK,QAAO,KACxD,CAACJ,eAAeK,QAAQ,CAAC,iBACzBC,MAAMC,OAAO,CAACL,iBACd;QACA,MAAMM,qBAA0C,EAAE;QAClDN,eAAeO,OAAO,CAAC,CAACR;YACtB,IAAIL,QAAQc,MAAM,KAAK,UAAU,OAAOT,QAAQ,UAAU;gBACxDO,mBAAmBG,IAAI,CAACV,KAAKW,SAASX;YACxC,OAAO,IAAI,OAAOA,QAAQ,UAAU;gBAClCO,mBAAmBG,IAAI,CAACV;YAC1B,OAAO;gBACLO,mBAAmBG,IAAI,CAACV,KAAKY,OAAOZ;YACtC;QACF;QACAC,iBAAiBM;IACnB;IAEA,kDAAkD;IAClD,IAAIX,MAAMO,IAAI,KAAK,cAAc,OAAOH,QAAQ,UAAU;QACxD,IAAIA,IAAIa,WAAW,OAAO,QAAQZ,iBAAiB;QACnD,IAAID,IAAIa,WAAW,OAAO,SAASZ,iBAAiB;IACtD;IAEA,IAAI;QAAC;QAAO;QAAM;KAAS,CAACa,QAAQ,CAACjB,WAAW;QAC9C,IAAI,OAAOI,mBAAmB,UAAU;YACtCA,iBAAiBR,+BAA+BQ;YAEhD,IAAIL,MAAMO,IAAI,KAAK,UAAU;gBAC3BF,iBAAiBA,eAAec,GAAG,CAAC,CAACC,WAAaC,WAAWD;YAC/D;QACF;QAEA,IAAI,CAACX,MAAMC,OAAO,CAACL,mBAAmBA,eAAeiB,MAAM,KAAK,GAAG;YACjE,OAAO;QACT;IACF;IAEA,IAAItB,MAAMO,IAAI,KAAK,YAAY,OAAOF,mBAAmB,UAAU;QACjEA,iBAAiBkB,OAAOnB;IAC1B;IAEA,IAAIJ,MAAMO,IAAI,KAAK,UAAUN,aAAa,UAAU;QAClD,IAAI,OAAOG,QAAQ,UAAU;YAC3BC,iBAAiB,IAAImB,KAAKpB;YAC1B,IAAImB,OAAOE,KAAK,CAACD,KAAKE,KAAK,CAACrB,kBAAkB;gBAC5C,OAAO;oBAAEJ;oBAAUK,OAAOqB;gBAAU;YACtC;QACF;QAEA,IAAI,OAAOvB,QAAQ,UAAU;YAC3BC,iBAAiB,IAAImB,KAAKpB;QAC5B;IACF;IAEA,IAAI;QAAC;QAAgB;KAAS,CAACc,QAAQ,CAAClB,MAAMO,IAAI,GAAG;QACnD,IAAIH,QAAQ,QAAQ;YAClBC,iBAAiB;QACnB;IACF;IAEA,IAAI,aAAaL,SAASA,MAAM4B,OAAO,IAAI3B,aAAa,YAAY;QAClEA,WAAW;IACb;IAEA,IAAIA,aAAa,UAAUA,aAAa,YAAYA,aAAa,cAAc;QAC7E,MAAM,IAAIN,SACR,CAAC,eAAe,EAAEM,SAAS,sDAAsD,CAAC;IAEtF;IAEA,IAAIA,aAAa,YAAY;QAC3BI,iBAAiB,CAAC,CAAC,EAAEA,eAAe,CAAC,CAAC;IACxC;IAEA,IAAIJ,aAAa,UAAU;QACzBI,iBAAiBA,mBAAmB,UAAUA,mBAAmB;QACjE,IAAIA,mBAAmB,OAAO;YAC5BJ,WAAW;QACb;IACF;IAEA,OAAO;QAAEA;QAAUK,OAAOD;IAAe;AAC3C,EAAC"}
|
@@ -0,0 +1,23 @@
|
|
1
|
+
import type { QueryPromise, SQL } from 'drizzle-orm';
|
2
|
+
import type { ChainedMethods } from '../find/chainMethods.js';
|
3
|
+
import type { DrizzleDB, PostgresAdapter } from '../types.js';
|
4
|
+
import type { BuildQueryJoinAliases, BuildQueryJoins } from './buildQuery.js';
|
5
|
+
import { type GenericColumn } from '../types.js';
|
6
|
+
type Args = {
|
7
|
+
adapter: PostgresAdapter;
|
8
|
+
chainedMethods?: ChainedMethods;
|
9
|
+
db: DrizzleDB;
|
10
|
+
joinAliases: BuildQueryJoinAliases;
|
11
|
+
joins: BuildQueryJoins;
|
12
|
+
selectFields: Record<string, GenericColumn>;
|
13
|
+
tableName: string;
|
14
|
+
where: SQL;
|
15
|
+
};
|
16
|
+
/**
|
17
|
+
* Selects distinct records from a table only if there are joins that need to be used, otherwise return null
|
18
|
+
*/
|
19
|
+
export declare const selectDistinct: ({ adapter, chainedMethods, db, joinAliases, joins, selectFields, tableName, where, }: Args) => QueryPromise<Record<string, GenericColumn> & {
|
20
|
+
id: number | string;
|
21
|
+
}[]>;
|
22
|
+
export {};
|
23
|
+
//# sourceMappingURL=selectDistinct.d.ts.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"file":"selectDistinct.d.ts","sourceRoot":"","sources":["../../src/queries/selectDistinct.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,YAAY,EAAE,GAAG,EAAE,MAAM,aAAa,CAAA;AAEpD,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,yBAAyB,CAAA;AAC7D,OAAO,KAAK,EAAE,SAAS,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAC7D,OAAO,KAAK,EAAE,qBAAqB,EAAE,eAAe,EAAE,MAAM,iBAAiB,CAAA;AAG7E,OAAO,EAAE,KAAK,aAAa,EAAE,MAAM,aAAa,CAAA;AAEhD,KAAK,IAAI,GAAG;IACV,OAAO,EAAE,eAAe,CAAA;IACxB,cAAc,CAAC,EAAE,cAAc,CAAA;IAC/B,EAAE,EAAE,SAAS,CAAA;IACb,WAAW,EAAE,qBAAqB,CAAA;IAClC,KAAK,EAAE,eAAe,CAAA;IACtB,YAAY,EAAE,MAAM,CAAC,MAAM,EAAE,aAAa,CAAC,CAAA;IAC3C,SAAS,EAAE,MAAM,CAAA;IACjB,KAAK,EAAE,GAAG,CAAA;CACX,CAAA;AAED;;GAEG;AACH,eAAO,MAAM,cAAc,yFASxB,IAAI,KAAG,aAAa,OAAO,MAAM,EAAE,aAAa,CAAC,GAAG;IAAE,EAAE,EAAE,MAAM,GAAG,MAAM,CAAA;CAAE,EAAE,CA2B/E,CAAA"}
|
@@ -0,0 +1,41 @@
|
|
1
|
+
import { chainMethods } from '../find/chainMethods.js';
|
2
|
+
/**
|
3
|
+
* Selects distinct records from a table only if there are joins that need to be used, otherwise return null
|
4
|
+
*/ export const selectDistinct = ({ adapter, chainedMethods = [], db, joinAliases, joins, selectFields, tableName, where })=>{
|
5
|
+
if (Object.keys(joins).length > 0 || joinAliases.length > 0) {
|
6
|
+
if (where) {
|
7
|
+
chainedMethods.push({
|
8
|
+
args: [
|
9
|
+
where
|
10
|
+
],
|
11
|
+
method: 'where'
|
12
|
+
});
|
13
|
+
}
|
14
|
+
joinAliases.forEach(({ condition, table })=>{
|
15
|
+
chainedMethods.push({
|
16
|
+
args: [
|
17
|
+
table,
|
18
|
+
condition
|
19
|
+
],
|
20
|
+
method: 'leftJoin'
|
21
|
+
});
|
22
|
+
});
|
23
|
+
Object.entries(joins).forEach(([joinTable, condition])=>{
|
24
|
+
if (joinTable) {
|
25
|
+
chainedMethods.push({
|
26
|
+
args: [
|
27
|
+
adapter.tables[joinTable],
|
28
|
+
condition
|
29
|
+
],
|
30
|
+
method: 'leftJoin'
|
31
|
+
});
|
32
|
+
}
|
33
|
+
});
|
34
|
+
return chainMethods({
|
35
|
+
methods: chainedMethods,
|
36
|
+
query: db.selectDistinct(selectFields).from(adapter.tables[tableName])
|
37
|
+
});
|
38
|
+
}
|
39
|
+
};
|
40
|
+
|
41
|
+
//# sourceMappingURL=selectDistinct.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"sources":["../../src/queries/selectDistinct.ts"],"sourcesContent":["import type { QueryPromise, SQL } from 'drizzle-orm'\n\nimport type { ChainedMethods } from '../find/chainMethods.js'\nimport type { DrizzleDB, PostgresAdapter } from '../types.js'\nimport type { BuildQueryJoinAliases, BuildQueryJoins } from './buildQuery.js'\n\nimport { chainMethods } from '../find/chainMethods.js'\nimport { type GenericColumn } from '../types.js'\n\ntype Args = {\n adapter: PostgresAdapter\n chainedMethods?: ChainedMethods\n db: DrizzleDB\n joinAliases: BuildQueryJoinAliases\n joins: BuildQueryJoins\n selectFields: Record<string, GenericColumn>\n tableName: string\n where: SQL\n}\n\n/**\n * Selects distinct records from a table only if there are joins that need to be used, otherwise return null\n */\nexport const selectDistinct = ({\n adapter,\n chainedMethods = [],\n db,\n joinAliases,\n joins,\n selectFields,\n tableName,\n where,\n}: Args): QueryPromise<Record<string, GenericColumn> & { id: number | string }[]> => {\n if (Object.keys(joins).length > 0 || joinAliases.length > 0) {\n if (where) {\n chainedMethods.push({ args: [where], method: 'where' })\n }\n\n joinAliases.forEach(({ condition, table }) => {\n chainedMethods.push({\n args: [table, condition],\n method: 'leftJoin',\n })\n })\n\n Object.entries(joins).forEach(([joinTable, condition]) => {\n if (joinTable) {\n chainedMethods.push({\n args: [adapter.tables[joinTable], condition],\n method: 'leftJoin',\n })\n }\n })\n\n return chainMethods({\n methods: chainedMethods,\n query: db.selectDistinct(selectFields).from(adapter.tables[tableName]),\n })\n }\n}\n"],"names":["chainMethods","selectDistinct","adapter","chainedMethods","db","joinAliases","joins","selectFields","tableName","where","Object","keys","length","push","args","method","forEach","condition","table","entries","joinTable","tables","methods","query","from"],"mappings":"AAMA,SAASA,YAAY,QAAQ,0BAAyB;AActD;;CAEC,GACD,OAAO,MAAMC,iBAAiB,CAAC,EAC7BC,OAAO,EACPC,iBAAiB,EAAE,EACnBC,EAAE,EACFC,WAAW,EACXC,KAAK,EACLC,YAAY,EACZC,SAAS,EACTC,KAAK,EACA;IACL,IAAIC,OAAOC,IAAI,CAACL,OAAOM,MAAM,GAAG,KAAKP,YAAYO,MAAM,GAAG,GAAG;QAC3D,IAAIH,OAAO;YACTN,eAAeU,IAAI,CAAC;gBAAEC,MAAM;oBAACL;iBAAM;gBAAEM,QAAQ;YAAQ;QACvD;QAEAV,YAAYW,OAAO,CAAC,CAAC,EAAEC,SAAS,EAAEC,KAAK,EAAE;YACvCf,eAAeU,IAAI,CAAC;gBAClBC,MAAM;oBAACI;oBAAOD;iBAAU;gBACxBF,QAAQ;YACV;QACF;QAEAL,OAAOS,OAAO,CAACb,OAAOU,OAAO,CAAC,CAAC,CAACI,WAAWH,UAAU;YACnD,IAAIG,WAAW;gBACbjB,eAAeU,IAAI,CAAC;oBAClBC,MAAM;wBAACZ,QAAQmB,MAAM,CAACD,UAAU;wBAAEH;qBAAU;oBAC5CF,QAAQ;gBACV;YACF;QACF;QAEA,OAAOf,aAAa;YAClBsB,SAASnB;YACToB,OAAOnB,GAAGH,cAAc,CAACM,cAAciB,IAAI,CAACtB,QAAQmB,MAAM,CAACb,UAAU;QACvE;IACF;AACF,EAAC"}
|
package/dist/update.d.ts.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"update.d.ts","sourceRoot":"","sources":["../src/update.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAA;
|
1
|
+
{"version":3,"file":"update.d.ts","sourceRoot":"","sources":["../src/update.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,kBAAkB,CAAA;AAUjD,eAAO,MAAM,SAAS,EAAE,SA6CvB,CAAA"}
|
package/dist/update.js
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
import toSnakeCase from 'to-snake-case';
|
2
|
-
import { chainMethods } from './find/chainMethods.js';
|
3
2
|
import buildQuery from './queries/buildQuery.js';
|
3
|
+
import { selectDistinct } from './queries/selectDistinct.js';
|
4
4
|
import { upsertRow } from './upsertRow/index.js';
|
5
5
|
export const updateOne = async function updateOne({ id, collection: collectionSlug, data, draft, locale, req, where: whereArg }) {
|
6
6
|
const db = this.sessions[req.transactionID]?.db || this.drizzle;
|
@@ -11,6 +11,7 @@ export const updateOne = async function updateOne({ id, collection: collectionSl
|
|
11
11
|
equals: id
|
12
12
|
}
|
13
13
|
};
|
14
|
+
let idToUpdate = id;
|
14
15
|
const { joinAliases, joins, selectFields, where } = await buildQuery({
|
15
16
|
adapter: this,
|
16
17
|
fields: collection.fields,
|
@@ -18,51 +19,25 @@ export const updateOne = async function updateOne({ id, collection: collectionSl
|
|
18
19
|
tableName,
|
19
20
|
where: whereToUse
|
20
21
|
});
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
if (where) {
|
26
|
-
selectDistinctMethods.push({
|
27
|
-
args: [
|
28
|
-
where
|
29
|
-
],
|
30
|
-
method: 'where'
|
31
|
-
});
|
32
|
-
}
|
33
|
-
joinAliases.forEach(({ condition, table })=>{
|
34
|
-
selectDistinctMethods.push({
|
22
|
+
const selectDistinctResult = await selectDistinct({
|
23
|
+
adapter: this,
|
24
|
+
chainedMethods: [
|
25
|
+
{
|
35
26
|
args: [
|
36
|
-
|
37
|
-
condition
|
27
|
+
1
|
38
28
|
],
|
39
|
-
method: '
|
40
|
-
});
|
41
|
-
});
|
42
|
-
Object.entries(joins).forEach(([joinTable, condition])=>{
|
43
|
-
if (joinTable) {
|
44
|
-
selectDistinctMethods.push({
|
45
|
-
args: [
|
46
|
-
this.tables[joinTable],
|
47
|
-
condition
|
48
|
-
],
|
49
|
-
method: 'leftJoin'
|
50
|
-
});
|
29
|
+
method: 'limit'
|
51
30
|
}
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
});
|
63
|
-
if (selectDistinctResult?.[0]?.id) {
|
64
|
-
idToUpdate = selectDistinctResult?.[0]?.id;
|
65
|
-
}
|
31
|
+
],
|
32
|
+
db,
|
33
|
+
joinAliases,
|
34
|
+
joins,
|
35
|
+
selectFields,
|
36
|
+
tableName,
|
37
|
+
where
|
38
|
+
});
|
39
|
+
if (selectDistinctResult?.[0]?.id) {
|
40
|
+
idToUpdate = selectDistinctResult?.[0]?.id;
|
66
41
|
}
|
67
42
|
const result = await upsertRow({
|
68
43
|
id: idToUpdate,
|
package/dist/update.js.map
CHANGED
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/update.ts"],"sourcesContent":["import type { UpdateOne } from 'payload/database'\n\nimport toSnakeCase from 'to-snake-case'\n\nimport type {
|
1
|
+
{"version":3,"sources":["../src/update.ts"],"sourcesContent":["import type { UpdateOne } from 'payload/database'\n\nimport toSnakeCase from 'to-snake-case'\n\nimport type { PostgresAdapter } from './types.js'\n\nimport buildQuery from './queries/buildQuery.js'\nimport { selectDistinct } from './queries/selectDistinct.js'\nimport { upsertRow } from './upsertRow/index.js'\n\nexport const updateOne: UpdateOne = async function updateOne(\n this: PostgresAdapter,\n { id, collection: collectionSlug, data, draft, locale, req, where: whereArg },\n) {\n const db = this.sessions[req.transactionID]?.db || this.drizzle\n const collection = this.payload.collections[collectionSlug].config\n const tableName = toSnakeCase(collectionSlug)\n const whereToUse = whereArg || { id: { equals: id } }\n let idToUpdate = id\n\n const { joinAliases, joins, selectFields, where } = await buildQuery({\n adapter: this,\n fields: collection.fields,\n locale,\n tableName,\n where: whereToUse,\n })\n\n const selectDistinctResult = await selectDistinct({\n adapter: this,\n chainedMethods: [{ args: [1], method: 'limit' }],\n db,\n joinAliases,\n joins,\n selectFields,\n tableName,\n where,\n })\n\n if (selectDistinctResult?.[0]?.id) {\n idToUpdate = selectDistinctResult?.[0]?.id\n }\n\n const result = await upsertRow({\n id: idToUpdate,\n adapter: this,\n data,\n db,\n fields: collection.fields,\n operation: 'update',\n req,\n tableName: toSnakeCase(collectionSlug),\n })\n\n return result\n}\n"],"names":["toSnakeCase","buildQuery","selectDistinct","upsertRow","updateOne","id","collection","collectionSlug","data","draft","locale","req","where","whereArg","db","sessions","transactionID","drizzle","payload","collections","config","tableName","whereToUse","equals","idToUpdate","joinAliases","joins","selectFields","adapter","fields","selectDistinctResult","chainedMethods","args","method","result","operation"],"mappings":"AAEA,OAAOA,iBAAiB,gBAAe;AAIvC,OAAOC,gBAAgB,0BAAyB;AAChD,SAASC,cAAc,QAAQ,8BAA6B;AAC5D,SAASC,SAAS,QAAQ,uBAAsB;AAEhD,OAAO,MAAMC,YAAuB,eAAeA,UAEjD,EAAEC,EAAE,EAAEC,YAAYC,cAAc,EAAEC,IAAI,EAAEC,KAAK,EAAEC,MAAM,EAAEC,GAAG,EAAEC,OAAOC,QAAQ,EAAE;IAE7E,MAAMC,KAAK,IAAI,CAACC,QAAQ,CAACJ,IAAIK,aAAa,CAAC,EAAEF,MAAM,IAAI,CAACG,OAAO;IAC/D,MAAMX,aAAa,IAAI,CAACY,OAAO,CAACC,WAAW,CAACZ,eAAe,CAACa,MAAM;IAClE,MAAMC,YAAYrB,YAAYO;IAC9B,MAAMe,aAAaT,YAAY;QAAER,IAAI;YAAEkB,QAAQlB;QAAG;IAAE;IACpD,IAAImB,aAAanB;IAEjB,MAAM,EAAEoB,WAAW,EAAEC,KAAK,EAAEC,YAAY,EAAEf,KAAK,EAAE,GAAG,MAAMX,WAAW;QACnE2B,SAAS,IAAI;QACbC,QAAQvB,WAAWuB,MAAM;QACzBnB;QACAW;QACAT,OAAOU;IACT;IAEA,MAAMQ,uBAAuB,MAAM5B,eAAe;QAChD0B,SAAS,IAAI;QACbG,gBAAgB;YAAC;gBAAEC,MAAM;oBAAC;iBAAE;gBAAEC,QAAQ;YAAQ;SAAE;QAChDnB;QACAW;QACAC;QACAC;QACAN;QACAT;IACF;IAEA,IAAIkB,sBAAsB,CAAC,EAAE,EAAEzB,IAAI;QACjCmB,aAAaM,sBAAsB,CAAC,EAAE,EAAEzB;IAC1C;IAEA,MAAM6B,SAAS,MAAM/B,UAAU;QAC7BE,IAAImB;QACJI,SAAS,IAAI;QACbpB;QACAM;QACAe,QAAQvB,WAAWuB,MAAM;QACzBM,WAAW;QACXxB;QACAU,WAAWrB,YAAYO;IACzB;IAEA,OAAO2B;AACT,EAAC"}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"file":"pushDevSchema.d.ts","sourceRoot":"","sources":["../../src/utilities/pushDevSchema.ts"],"names":[],"mappings":"
|
1
|
+
{"version":3,"file":"pushDevSchema.d.ts","sourceRoot":"","sources":["../../src/utilities/pushDevSchema.ts"],"names":[],"mappings":"AAKA,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,aAAa,CAAA;AAIlD;;;;;GAKG;AACH,eAAO,MAAM,aAAa,OAAc,eAAe,kBAmEtD,CAAA"}
|
@@ -1,13 +1,15 @@
|
|
1
1
|
import { eq } from 'drizzle-orm';
|
2
2
|
import { numeric, timestamp, varchar } from 'drizzle-orm/pg-core';
|
3
|
+
import { createRequire } from 'module';
|
3
4
|
import prompts from 'prompts';
|
5
|
+
const require = createRequire(import.meta.url);
|
4
6
|
/**
|
5
7
|
* Pushes the development schema to the database using Drizzle.
|
6
8
|
*
|
7
9
|
* @param {PostgresAdapter} db - The PostgresAdapter instance connected to the database.
|
8
10
|
* @returns {Promise<void>} - A promise that resolves once the schema push is complete.
|
9
11
|
*/ export const pushDevSchema = async (db)=>{
|
10
|
-
const { pushSchema } = require
|
12
|
+
const { pushSchema } = require('drizzle-kit/payload');
|
11
13
|
// This will prompt if clarifications are needed for Drizzle to push new schema
|
12
14
|
const { apply, hasDataLoss, statementsToExecute, warnings } = await pushSchema(db.schema, db.drizzle);
|
13
15
|
if (warnings.length) {
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../src/utilities/pushDevSchema.ts"],"sourcesContent":["import { eq } from 'drizzle-orm'\nimport { numeric, timestamp, varchar } from 'drizzle-orm/pg-core'\nimport prompts from 'prompts'\n\nimport type { PostgresAdapter } from '../types.js'\n\n/**\n * Pushes the development schema to the database using Drizzle.\n *\n * @param {PostgresAdapter} db - The PostgresAdapter instance connected to the database.\n * @returns {Promise<void>} - A promise that resolves once the schema push is complete.\n */\nexport const pushDevSchema = async (db: PostgresAdapter) => {\n const { pushSchema } = require
|
1
|
+
{"version":3,"sources":["../../src/utilities/pushDevSchema.ts"],"sourcesContent":["import { eq } from 'drizzle-orm'\nimport { numeric, timestamp, varchar } from 'drizzle-orm/pg-core'\nimport { createRequire } from 'module'\nimport prompts from 'prompts'\n\nimport type { PostgresAdapter } from '../types.js'\n\nconst require = createRequire(import.meta.url)\n\n/**\n * Pushes the development schema to the database using Drizzle.\n *\n * @param {PostgresAdapter} db - The PostgresAdapter instance connected to the database.\n * @returns {Promise<void>} - A promise that resolves once the schema push is complete.\n */\nexport const pushDevSchema = async (db: PostgresAdapter) => {\n const { pushSchema } = require('drizzle-kit/payload')\n\n // This will prompt if clarifications are needed for Drizzle to push new schema\n const { apply, hasDataLoss, statementsToExecute, warnings } = await pushSchema(\n db.schema,\n db.drizzle,\n )\n\n if (warnings.length) {\n let message = `Warnings detected during schema push: \\n\\n${warnings.join('\\n')}\\n\\n`\n\n if (hasDataLoss) {\n message += `DATA LOSS WARNING: Possible data loss detected if schema is pushed.\\n\\n`\n }\n\n message += `Accept warnings and push schema to database?`\n\n const { confirm: acceptWarnings } = await prompts(\n {\n name: 'confirm',\n type: 'confirm',\n initial: false,\n message,\n },\n {\n onCancel: () => {\n process.exit(0)\n },\n },\n )\n\n // Exit if user does not accept warnings.\n // Q: Is this the right type of exit for this interaction?\n if (!acceptWarnings) {\n process.exit(0)\n }\n }\n\n await apply()\n\n // Migration table def in order to use query using drizzle\n const migrationsSchema = db.pgSchema.table('payload_migrations', {\n name: varchar('name'),\n batch: numeric('batch'),\n created_at: timestamp('created_at'),\n updated_at: timestamp('updated_at'),\n })\n\n const devPush = await db.drizzle\n .select()\n .from(migrationsSchema)\n .where(eq(migrationsSchema.batch, '-1'))\n\n if (!devPush.length) {\n await db.drizzle.insert(migrationsSchema).values({\n name: 'dev',\n batch: '-1',\n })\n } else {\n await db.drizzle\n .update(migrationsSchema)\n .set({\n updated_at: new Date(),\n })\n .where(eq(migrationsSchema.batch, '-1'))\n }\n}\n"],"names":["eq","numeric","timestamp","varchar","createRequire","prompts","require","url","pushDevSchema","db","pushSchema","apply","hasDataLoss","statementsToExecute","warnings","schema","drizzle","length","message","join","confirm","acceptWarnings","name","type","initial","onCancel","process","exit","migrationsSchema","pgSchema","table","batch","created_at","updated_at","devPush","select","from","where","insert","values","update","set","Date"],"mappings":"AAAA,SAASA,EAAE,QAAQ,cAAa;AAChC,SAASC,OAAO,EAAEC,SAAS,EAAEC,OAAO,QAAQ,sBAAqB;AACjE,SAASC,aAAa,QAAQ,SAAQ;AACtC,OAAOC,aAAa,UAAS;AAI7B,MAAMC,UAAUF,cAAc,YAAYG,GAAG;AAE7C;;;;;CAKC,GACD,OAAO,MAAMC,gBAAgB,OAAOC;IAClC,MAAM,EAAEC,UAAU,EAAE,GAAGJ,QAAQ;IAE/B,+EAA+E;IAC/E,MAAM,EAAEK,KAAK,EAAEC,WAAW,EAAEC,mBAAmB,EAAEC,QAAQ,EAAE,GAAG,MAAMJ,WAClED,GAAGM,MAAM,EACTN,GAAGO,OAAO;IAGZ,IAAIF,SAASG,MAAM,EAAE;QACnB,IAAIC,UAAU,CAAC,0CAA0C,EAAEJ,SAASK,IAAI,CAAC,MAAM,IAAI,CAAC;QAEpF,IAAIP,aAAa;YACfM,WAAW,CAAC,uEAAuE,CAAC;QACtF;QAEAA,WAAW,CAAC,4CAA4C,CAAC;QAEzD,MAAM,EAAEE,SAASC,cAAc,EAAE,GAAG,MAAMhB,QACxC;YACEiB,MAAM;YACNC,MAAM;YACNC,SAAS;YACTN;QACF,GACA;YACEO,UAAU;gBACRC,QAAQC,IAAI,CAAC;YACf;QACF;QAGF,yCAAyC;QACzC,0DAA0D;QAC1D,IAAI,CAACN,gBAAgB;YACnBK,QAAQC,IAAI,CAAC;QACf;IACF;IAEA,MAAMhB;IAEN,0DAA0D;IAC1D,MAAMiB,mBAAmBnB,GAAGoB,QAAQ,CAACC,KAAK,CAAC,sBAAsB;QAC/DR,MAAMnB,QAAQ;QACd4B,OAAO9B,QAAQ;QACf+B,YAAY9B,UAAU;QACtB+B,YAAY/B,UAAU;IACxB;IAEA,MAAMgC,UAAU,MAAMzB,GAAGO,OAAO,CAC7BmB,MAAM,GACNC,IAAI,CAACR,kBACLS,KAAK,CAACrC,GAAG4B,iBAAiBG,KAAK,EAAE;IAEpC,IAAI,CAACG,QAAQjB,MAAM,EAAE;QACnB,MAAMR,GAAGO,OAAO,CAACsB,MAAM,CAACV,kBAAkBW,MAAM,CAAC;YAC/CjB,MAAM;YACNS,OAAO;QACT;IACF,OAAO;QACL,MAAMtB,GAAGO,OAAO,CACbwB,MAAM,CAACZ,kBACPa,GAAG,CAAC;YACHR,YAAY,IAAIS;QAClB,GACCL,KAAK,CAACrC,GAAG4B,iBAAiBG,KAAK,EAAE;IACtC;AACF,EAAC"}
|
package/package.json
CHANGED
@@ -1,8 +1,12 @@
|
|
1
1
|
{
|
2
2
|
"name": "@payloadcms/db-postgres",
|
3
|
-
"version": "3.0.0-alpha.
|
3
|
+
"version": "3.0.0-alpha.51",
|
4
4
|
"description": "The officially supported Postgres database adapter for Payload",
|
5
|
-
"repository":
|
5
|
+
"repository": {
|
6
|
+
"type": "git",
|
7
|
+
"url": "https://github.com/payloadcms/payload.git",
|
8
|
+
"directory": "packages/db-postgres"
|
9
|
+
},
|
6
10
|
"license": "MIT",
|
7
11
|
"homepage": "https://payloadcms.com",
|
8
12
|
"type": "module",
|
@@ -26,11 +30,11 @@
|
|
26
30
|
"devDependencies": {
|
27
31
|
"@types/pg": "8.10.2",
|
28
32
|
"@types/to-snake-case": "1.0.0",
|
29
|
-
"
|
30
|
-
"
|
33
|
+
"@payloadcms/eslint-config": "1.1.1",
|
34
|
+
"payload": "3.0.0-alpha.51"
|
31
35
|
},
|
32
36
|
"peerDependencies": {
|
33
|
-
"payload": "3.0.0-alpha.
|
37
|
+
"payload": "3.0.0-alpha.51"
|
34
38
|
},
|
35
39
|
"exports": {
|
36
40
|
".": {
|