@ainsleydev/payload-helper 0.0.9 → 0.0.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,4 @@
1
1
  import type { Payload, PayloadRequest } from 'payload';
2
- import type { Seeder } from './seed.js';
3
2
  import type { Media, MediaSeed } from './types.js';
4
3
  /**
5
4
  *
@@ -9,15 +8,3 @@ import type { Media, MediaSeed } from './types.js';
9
8
  * @param media
10
9
  */
11
10
  export declare const uploadMedia: (req: PayloadRequest, payload: Payload, dirname: string, media: MediaSeed) => Promise<Media>;
12
- /**
13
- * Up script to create tables and seed data.
14
- *
15
- * @param payload
16
- * @param req
17
- * @param seeder
18
- */
19
- export declare const up: ({ payload, req, seeder, }: {
20
- payload: Payload;
21
- req: PayloadRequest;
22
- seeder: Seeder;
23
- }) => Promise<void>;
@@ -25,31 +25,5 @@ import { htmlToLexical } from '../util/lexical.js';
25
25
  throw error;
26
26
  }
27
27
  };
28
- /**
29
- * Up script to create tables and seed data.
30
- *
31
- * @param payload
32
- * @param req
33
- * @param seeder
34
- */ export const up = async ({ payload, req, seeder })=>{
35
- payload.logger.info('Running up script');
36
- await payload.init({
37
- config: payload.config
38
- });
39
- // Creating new tables
40
- payload.logger.info('Creating indexes...');
41
- try {
42
- if (payload.db.init) {
43
- await payload.db.init();
44
- }
45
- } catch (error) {
46
- payload.logger.error(`Creating database: ${error}`);
47
- return;
48
- }
49
- await seeder({
50
- payload,
51
- req
52
- });
53
- };
54
28
 
55
- //# sourceMappingURL=up.js.map
29
+ //# sourceMappingURL=media.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../../src/seed/media.ts"],"sourcesContent":["import path from 'node:path';\nimport type { Payload, PayloadRequest } from 'payload';\nimport { getFileByPath } from 'payload';\nimport { htmlToLexical } from '../util/lexical.js';\nimport type { Media, MediaSeed } from './types.js';\n\n/**\n *\n * @param req\n * @param payload\n * @param dirname\n * @param media\n */\nexport const uploadMedia = async (\n\treq: PayloadRequest,\n\tpayload: Payload,\n\tdirname: string,\n\tmedia: MediaSeed,\n): Promise<Media> => {\n\ttry {\n\t\tconst image = await getFileByPath(path.resolve(dirname, media.path));\n\t\tconst caption = media.caption ? await htmlToLexical(media.caption) : null;\n\n\t\treturn (await payload.create({\n\t\t\tcollection: 'media',\n\t\t\tfile: image,\n\t\t\tdata: {\n\t\t\t\talt: media.alt,\n\t\t\t\tcaption: caption,\n\t\t\t},\n\t\t\treq,\n\t\t})) as unknown as Media;\n\t} catch (error) {\n\t\tpayload.logger.error(`Uploading media: ${error}`);\n\t\tthrow error;\n\t}\n};\n"],"names":["path","getFileByPath","htmlToLexical","uploadMedia","req","payload","dirname","media","image","resolve","caption","create","collection","file","data","alt","error","logger"],"mappings":"AAAA,OAAOA,UAAU,YAAY;AAE7B,SAASC,aAAa,QAAQ,UAAU;AACxC,SAASC,aAAa,QAAQ,qBAAqB;AAGnD;;;;;;CAMC,GACD,OAAO,MAAMC,cAAc,OAC1BC,KACAC,SACAC,SACAC;IAEA,IAAI;QACH,MAAMC,QAAQ,MAAMP,cAAcD,KAAKS,OAAO,CAACH,SAASC,MAAMP,IAAI;QAClE,MAAMU,UAAUH,MAAMG,OAAO,GAAG,MAAMR,cAAcK,MAAMG,OAAO,IAAI;QAErE,OAAQ,MAAML,QAAQM,MAAM,CAAC;YAC5BC,YAAY;YACZC,MAAML;YACNM,MAAM;gBACLC,KAAKR,MAAMQ,GAAG;gBACdL,SAASA;YACV;YACAN;QACD;IACD,EAAE,OAAOY,OAAO;QACfX,QAAQY,MAAM,CAACD,KAAK,CAAC,CAAC,iBAAiB,EAAEA,MAAM,CAAC;QAChD,MAAMA;IACP;AACD,EAAE"}
package/dist/seed/seed.js CHANGED
@@ -1,8 +1,12 @@
1
1
  import dotenv from 'dotenv';
2
2
  import { commitTransaction, getPayload, initTransaction, killTransaction } from 'payload';
3
3
  import { importConfig } from 'payload/node';
4
- import { down } from './down.js';
5
- import { up } from './up.js';
4
+ import env from "../util/env.js";
5
+ import path from "node:path";
6
+ import fs from "node:fs";
7
+ import { fileURLToPath } from "node:url";
8
+ const filename = fileURLToPath(import.meta.url);
9
+ const dirname = path.dirname(filename);
6
10
  export var DBAdapter;
7
11
  (function(DBAdapter) {
8
12
  DBAdapter["Postgres"] = "postgres";
@@ -17,36 +21,58 @@ export var DBAdapter;
17
21
  dotenv.config({
18
22
  path: opts.envPath
19
23
  });
20
- for (const fn of [
21
- down,
22
- up
23
- ]){
24
- if (fn === down) {
25
- process.env.PAYLOAD_DROP_DATABASE = 'true';
26
- } else {
27
- delete process.env.PAYLOAD_DROP_DATABASE; // Ensure it is not set for other functions
28
- }
29
- const config = await importConfig(opts.configPath);
30
- const payload = await getPayload({
31
- config
24
+ process.env.PAYLOAD_DROP_DATABASE = 'true';
25
+ const config = await importConfig(opts.configPath);
26
+ const payload = await getPayload({
27
+ config
28
+ });
29
+ const req = {
30
+ payload
31
+ };
32
+ await initTransaction(req);
33
+ delete process.env.PAYLOAD_DROP_DATABASE;
34
+ try {
35
+ // Init
36
+ payload.logger.info("Initialising Payload...");
37
+ await payload.init({
38
+ config: payload.config
32
39
  });
33
- const req = {
34
- payload
35
- };
36
- await initTransaction(req);
40
+ // Creating new tables
41
+ payload.logger.info('Creating indexes...');
37
42
  try {
38
- await fn({
39
- payload,
40
- req,
41
- seeder: opts.seeder
42
- });
43
- payload.logger.info('Seed complete');
44
- await commitTransaction(req);
45
- } catch (err) {
46
- const message = err instanceof Error ? err.message : 'Unknown error';
47
- payload.logger.error(`Seed failed: ${message}`);
48
- await killTransaction(req);
43
+ if (payload.db.init) {
44
+ await payload.db.init();
45
+ }
46
+ } catch (error) {
47
+ payload.logger.error(`Creating database: ${error}`);
48
+ return;
49
+ }
50
+ if (env.isProduction) {
51
+ payload.logger.info('Migrating DB...');
52
+ await payload.db.migrate();
49
53
  }
54
+ // Clearing local media
55
+ if (!env.isProduction) {
56
+ payload.logger.info('Clearing media...');
57
+ const mediaDir = path.resolve(dirname, '../../media');
58
+ if (fs.existsSync(mediaDir)) {
59
+ fs.rmSync(mediaDir, {
60
+ recursive: true,
61
+ force: true
62
+ });
63
+ }
64
+ }
65
+ // Run user defined seed script
66
+ await opts.seeder({
67
+ payload,
68
+ req
69
+ });
70
+ await commitTransaction(req);
71
+ payload.logger.info('Seed complete');
72
+ } catch (err) {
73
+ const message = err instanceof Error ? err.message : 'Unknown error';
74
+ payload.logger.error(`Seed failed: ${message}`);
75
+ await killTransaction(req);
50
76
  }
51
77
  };
52
78
  fn().then(()=>process.exit(0)).catch((e)=>{
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/seed/seed.ts"],"sourcesContent":["import dotenv from 'dotenv';\nimport {\n\ttype Payload,\n\ttype PayloadRequest,\n\tcommitTransaction,\n\tgetPayload,\n\tinitTransaction,\n\tkillTransaction,\n} from 'payload';\nimport { importConfig } from 'payload/node';\nimport { down } from './down.js';\nimport { up } from './up.js';\n\n/**\n * A function that seeds the database with initial data.\n */\nexport type Seeder = (args: { payload: Payload; req: PayloadRequest }) => Promise<void>;\n\n/**\n * Options for the seed function.\n * Note: You must use path.resolve for the paths, i.e. path.resolve(__dirname, 'path/to/file')\n */\nexport type SeedOptions = {\n\tenvPath: string;\n\tconfigPath: string;\n\tdbAdapter: DBAdapter;\n\tseeder: Seeder;\n};\n\n/**\n * The database adapter to use, which will remove and recreate the database.\n */\nexport enum DBAdapter {\n\tPostgres = 'postgres',\n}\n\n/**\n * Seeds the database with initial data.\n *\n * @param opts - The options for seeding.\n * @returns A promise that resolves when the seeding is complete.\n */\nexport const seed = (opts: SeedOptions) => {\n\tconst fn = async () => {\n\t\tdotenv.config({\n\t\t\tpath: opts.envPath,\n\t\t});\n\n\t\tfor (const fn of [down, up]) {\n\t\t\tif (fn === down) {\n\t\t\t\tprocess.env.PAYLOAD_DROP_DATABASE = 'true';\n\t\t\t} else {\n\t\t\t\tdelete process.env.PAYLOAD_DROP_DATABASE; // Ensure it is not set for other functions\n\t\t\t}\n\n\t\t\tconst config = await importConfig(opts.configPath);\n\t\t\tconst payload = await getPayload({ config });\n\t\t\tconst req = { payload } as PayloadRequest;\n\n\t\t\tawait initTransaction(req);\n\n\t\t\ttry {\n\t\t\t\tawait fn({ payload, req, seeder: opts.seeder });\n\t\t\t\tpayload.logger.info('Seed complete');\n\t\t\t\tawait commitTransaction(req);\n\t\t\t} catch (err) {\n\t\t\t\tconst message = err instanceof Error ? err.message : 'Unknown error';\n\t\t\t\tpayload.logger.error(`Seed failed: ${message}`);\n\t\t\t\tawait killTransaction(req);\n\t\t\t}\n\t\t}\n\t};\n\n\tfn()\n\t\t.then(() => process.exit(0))\n\t\t.catch((e) => {\n\t\t\tconsole.error(e);\n\t\t\tprocess.exit(1);\n\t\t});\n};\n"],"names":["dotenv","commitTransaction","getPayload","initTransaction","killTransaction","importConfig","down","up","DBAdapter","seed","opts","fn","config","path","envPath","process","env","PAYLOAD_DROP_DATABASE","configPath","payload","req","seeder","logger","info","err","message","Error","error","then","exit","catch","e","console"],"mappings":"AAAA,OAAOA,YAAY,SAAS;AAC5B,SAGCC,iBAAiB,EACjBC,UAAU,EACVC,eAAe,EACfC,eAAe,QACT,UAAU;AACjB,SAASC,YAAY,QAAQ,eAAe;AAC5C,SAASC,IAAI,QAAQ,YAAY;AACjC,SAASC,EAAE,QAAQ,UAAU;;UAqBjBC;;GAAAA,cAAAA;AAIZ;;;;;CAKC,GACD,OAAO,MAAMC,OAAO,CAACC;IACpB,MAAMC,KAAK;QACVX,OAAOY,MAAM,CAAC;YACbC,MAAMH,KAAKI,OAAO;QACnB;QAEA,KAAK,MAAMH,MAAM;YAACL;YAAMC;SAAG,CAAE;YAC5B,IAAII,OAAOL,MAAM;gBAChBS,QAAQC,GAAG,CAACC,qBAAqB,GAAG;YACrC,OAAO;gBACN,OAAOF,QAAQC,GAAG,CAACC,qBAAqB,EAAE,2CAA2C;YACtF;YAEA,MAAML,SAAS,MAAMP,aAAaK,KAAKQ,UAAU;YACjD,MAAMC,UAAU,MAAMjB,WAAW;gBAAEU;YAAO;YAC1C,MAAMQ,MAAM;gBAAED;YAAQ;YAEtB,MAAMhB,gBAAgBiB;YAEtB,IAAI;gBACH,MAAMT,GAAG;oBAAEQ;oBAASC;oBAAKC,QAAQX,KAAKW,MAAM;gBAAC;gBAC7CF,QAAQG,MAAM,CAACC,IAAI,CAAC;gBACpB,MAAMtB,kBAAkBmB;YACzB,EAAE,OAAOI,KAAK;gBACb,MAAMC,UAAUD,eAAeE,QAAQF,IAAIC,OAAO,GAAG;gBACrDN,QAAQG,MAAM,CAACK,KAAK,CAAC,CAAC,aAAa,EAAEF,QAAQ,CAAC;gBAC9C,MAAMrB,gBAAgBgB;YACvB;QACD;IACD;IAEAT,KACEiB,IAAI,CAAC,IAAMb,QAAQc,IAAI,CAAC,IACxBC,KAAK,CAAC,CAACC;QACPC,QAAQL,KAAK,CAACI;QACdhB,QAAQc,IAAI,CAAC;IACd;AACF,EAAE"}
1
+ {"version":3,"sources":["../../src/seed/seed.ts"],"sourcesContent":["import dotenv from 'dotenv';\nimport {\n\ttype Payload,\n\ttype PayloadRequest,\n\tcommitTransaction,\n\tgetPayload,\n\tinitTransaction,\n\tkillTransaction,\n} from 'payload';\nimport { importConfig } from 'payload/node';\nimport env from \"../util/env.js\";\nimport path from \"node:path\";\nimport fs from \"node:fs\";\nimport {fileURLToPath} from \"node:url\";\n\nconst filename = fileURLToPath(import.meta.url);\nconst dirname = path.dirname(filename);\n\n/**\n * A function that seeds the database with initial data.\n */\nexport type Seeder = (args: { payload: Payload; req: PayloadRequest }) => Promise<void>;\n\n/**\n * Options for the seed function.\n * Note: You must use path.resolve for the paths, i.e. path.resolve(__dirname, 'path/to/file')\n */\nexport type SeedOptions = {\n\tenvPath: string;\n\tconfigPath: string;\n\tdbAdapter: DBAdapter;\n\tseeder: Seeder;\n};\n\n/**\n * The database adapter to use, which will remove and recreate the database.\n */\nexport enum DBAdapter {\n\tPostgres = 'postgres',\n}\n\n/**\n * Seeds the database with initial data.\n *\n * @param opts - The options for seeding.\n * @returns A promise that resolves when the seeding is complete.\n */\nexport const seed = (opts: SeedOptions) => {\n\tconst fn = async () => {\n\t\tdotenv.config({\n\t\t\tpath: opts.envPath,\n\t\t});\n\n\t\tprocess.env.PAYLOAD_DROP_DATABASE = 'true';\n\n\t\tconst config = await importConfig(opts.configPath);\n\t\tconst payload = await getPayload({ config });\n\t\tconst req = { payload } as PayloadRequest;\n\n\t\tawait initTransaction(req);\n\n\t\tdelete process.env.PAYLOAD_DROP_DATABASE\n\n\t\ttry {\n\t\t\t// Init\n\t\t\tpayload.logger.info(\"Initialising Payload...\")\n\t\t\tawait payload.init({\n\t\t\t\tconfig: payload.config,\n\t\t\t});\n\n\t\t\t// Creating new tables\n\t\t\tpayload.logger.info('Creating indexes...');\n\t\t\ttry {\n\t\t\t\tif (payload.db.init) {\n\t\t\t\t\tawait payload.db.init();\n\t\t\t\t}\n\t\t\t} catch (error) {\n\t\t\t\tpayload.logger.error(`Creating database: ${error}`);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (env.isProduction) {\n\t\t\t\tpayload.logger.info('Migrating DB...');\n\t\t\t\tawait payload.db.migrate();\n\t\t\t}\n\n\t\t\t// Clearing local media\n\t\t\tif (!env.isProduction) {\n\t\t\t\tpayload.logger.info('Clearing media...');\n\t\t\t\tconst mediaDir = path.resolve(dirname, '../../media');\n\t\t\t\tif (fs.existsSync(mediaDir)) {\n\t\t\t\t\tfs.rmSync(mediaDir, { recursive: true, force: true });\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// Run user defined seed script\n\t\t\tawait opts.seeder({ payload, req });\n\n\t\t\tawait commitTransaction(req)\n\n\t\t\tpayload.logger.info('Seed complete');\n\t\t} catch (err) {\n\t\t\tconst message = err instanceof Error ? err.message : 'Unknown error';\n\t\t\tpayload.logger.error(`Seed failed: ${message}`);\n\t\t\tawait killTransaction(req);\n\t\t}\n\t};\n\n\tfn()\n\t\t.then(() => process.exit(0))\n\t\t.catch((e) => {\n\t\t\tconsole.error(e);\n\t\t\tprocess.exit(1);\n\t\t});\n};\n"],"names":["dotenv","commitTransaction","getPayload","initTransaction","killTransaction","importConfig","env","path","fs","fileURLToPath","filename","url","dirname","DBAdapter","seed","opts","fn","config","envPath","process","PAYLOAD_DROP_DATABASE","configPath","payload","req","logger","info","init","db","error","isProduction","migrate","mediaDir","resolve","existsSync","rmSync","recursive","force","seeder","err","message","Error","then","exit","catch","e","console"],"mappings":"AAAA,OAAOA,YAAY,SAAS;AAC5B,SAGCC,iBAAiB,EACjBC,UAAU,EACVC,eAAe,EACfC,eAAe,QACT,UAAU;AACjB,SAASC,YAAY,QAAQ,eAAe;AAC5C,OAAOC,SAAS,iBAAiB;AACjC,OAAOC,UAAU,YAAY;AAC7B,OAAOC,QAAQ,UAAU;AACzB,SAAQC,aAAa,QAAO,WAAW;AAEvC,MAAMC,WAAWD,cAAc,YAAYE,GAAG;AAC9C,MAAMC,UAAUL,KAAKK,OAAO,CAACF;;UAqBjBG;;GAAAA,cAAAA;AAIZ;;;;;CAKC,GACD,OAAO,MAAMC,OAAO,CAACC;IACpB,MAAMC,KAAK;QACVhB,OAAOiB,MAAM,CAAC;YACbV,MAAMQ,KAAKG,OAAO;QACnB;QAEAC,QAAQb,GAAG,CAACc,qBAAqB,GAAG;QAEpC,MAAMH,SAAS,MAAMZ,aAAaU,KAAKM,UAAU;QACjD,MAAMC,UAAU,MAAMpB,WAAW;YAAEe;QAAO;QAC1C,MAAMM,MAAM;YAAED;QAAQ;QAEtB,MAAMnB,gBAAgBoB;QAEtB,OAAOJ,QAAQb,GAAG,CAACc,qBAAqB;QAExC,IAAI;YACH,OAAO;YACPE,QAAQE,MAAM,CAACC,IAAI,CAAC;YACpB,MAAMH,QAAQI,IAAI,CAAC;gBAClBT,QAAQK,QAAQL,MAAM;YACvB;YAEA,sBAAsB;YACtBK,QAAQE,MAAM,CAACC,IAAI,CAAC;YACpB,IAAI;gBACH,IAAIH,QAAQK,EAAE,CAACD,IAAI,EAAE;oBACpB,MAAMJ,QAAQK,EAAE,CAACD,IAAI;gBACtB;YACD,EAAE,OAAOE,OAAO;gBACfN,QAAQE,MAAM,CAACI,KAAK,CAAC,CAAC,mBAAmB,EAAEA,MAAM,CAAC;gBAClD;YACD;YAEA,IAAItB,IAAIuB,YAAY,EAAE;gBACrBP,QAAQE,MAAM,CAACC,IAAI,CAAC;gBACpB,MAAMH,QAAQK,EAAE,CAACG,OAAO;YACzB;YAEA,uBAAuB;YACvB,IAAI,CAACxB,IAAIuB,YAAY,EAAE;gBACtBP,QAAQE,MAAM,CAACC,IAAI,CAAC;gBACpB,MAAMM,WAAWxB,KAAKyB,OAAO,CAACpB,SAAS;gBACvC,IAAIJ,GAAGyB,UAAU,CAACF,WAAW;oBAC5BvB,GAAG0B,MAAM,CAACH,UAAU;wBAAEI,WAAW;wBAAMC,OAAO;oBAAK;gBACpD;YACD;YAEA,+BAA+B;YAC/B,MAAMrB,KAAKsB,MAAM,CAAC;gBAAEf;gBAASC;YAAI;YAEjC,MAAMtB,kBAAkBsB;YAExBD,QAAQE,MAAM,CAACC,IAAI,CAAC;QACrB,EAAE,OAAOa,KAAK;YACb,MAAMC,UAAUD,eAAeE,QAAQF,IAAIC,OAAO,GAAG;YACrDjB,QAAQE,MAAM,CAACI,KAAK,CAAC,CAAC,aAAa,EAAEW,QAAQ,CAAC;YAC9C,MAAMnC,gBAAgBmB;QACvB;IACD;IAEAP,KACEyB,IAAI,CAAC,IAAMtB,QAAQuB,IAAI,CAAC,IACxBC,KAAK,CAAC,CAACC;QACPC,QAAQjB,KAAK,CAACgB;QACdzB,QAAQuB,IAAI,CAAC;IACd;AACF,EAAE"}
@@ -1,34 +1,43 @@
1
1
  import { createHeadlessEditor } from '@lexical/headless';
2
2
  import { $generateHtmlFromNodes, $generateNodesFromDOM } from '@lexical/html';
3
- import { sqliteAdapter } from '@payloadcms/db-sqlite';
4
- import { defaultEditorConfig, getEnabledNodes, lexicalEditor, sanitizeServerEditorConfig } from '@payloadcms/richtext-lexical';
3
+ // import { sqliteAdapter } from '@payloadcms/db-sqlite';
4
+ // import {
5
+ // defaultEditorConfig,
6
+ // getEnabledNodes,
7
+ // lexicalEditor,
8
+ // sanitizeServerEditorConfig,
9
+ // } from '@payloadcms/richtext-lexical';
5
10
  import { JSDOM } from 'jsdom';
6
11
  import { $getRoot, $getSelection } from 'lexical';
7
- import { buildConfig, getPayload } from 'payload';
8
- const loadEditor = async ()=>{
9
- const config = {
10
- secret: 'testing',
11
- editor: lexicalEditor({
12
- admin: {
13
- hideGutter: false
14
- }
15
- }),
16
- db: sqliteAdapter({
17
- client: {
18
- url: 'file:./local.db'
19
- }
20
- })
21
- };
22
- const instance = await getPayload({
23
- config: buildConfig(config)
24
- });
25
- const editorConfig = await sanitizeServerEditorConfig(defaultEditorConfig, instance.config);
26
- return createHeadlessEditor({
27
- nodes: getEnabledNodes({
28
- editorConfig
29
- })
30
- });
31
- };
12
+ // import { buildConfig, getPayload } from 'payload';
13
+ // import { importWithoutClientFiles } from 'payload/node';
14
+ // const loadEditor = async (): Promise<LexicalEditor> => {
15
+ // const config = {
16
+ // secret: 'testing',
17
+ // editor: lexicalEditor({
18
+ // admin: {
19
+ // hideGutter: false,
20
+ // },
21
+ // }),
22
+ // db: sqliteAdapter({
23
+ // client: {
24
+ // url: 'file:./local.db',
25
+ // },
26
+ // }),
27
+ // };
28
+ //
29
+ // const instance = await getPayload({
30
+ // config: buildConfig(config),
31
+ // });
32
+ //
33
+ // const editorConfig = await sanitizeServerEditorConfig(defaultEditorConfig, instance.config);
34
+ //
35
+ // return createHeadlessEditor({
36
+ // nodes: getEnabledNodes({
37
+ // editorConfig,
38
+ // }),
39
+ // });
40
+ // };
32
41
  /**
33
42
  * Converts an HTML string to a Lexical editor state.
34
43
  *
@@ -50,7 +59,6 @@ const loadEditor = async ()=>{
50
59
  $getRoot().select();
51
60
  // Insert them at a selection.
52
61
  const selection = $getSelection();
53
- console.log('Generated nodes: ', nodes);
54
62
  if (selection) selection.insertNodes(nodes);
55
63
  }, {
56
64
  discrete: true
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/util/lexical.ts"],"sourcesContent":["import { createHeadlessEditor } from '@lexical/headless';\nimport { $generateHtmlFromNodes, $generateNodesFromDOM } from '@lexical/html';\nimport { sqliteAdapter } from '@payloadcms/db-sqlite';\nimport {\n\tdefaultEditorConfig,\n\tgetEnabledNodes,\n\tlexicalEditor,\n\tsanitizeServerEditorConfig,\n} from '@payloadcms/richtext-lexical';\nimport { JSDOM } from 'jsdom';\nimport { $getRoot, $getSelection, type LexicalEditor } from 'lexical';\nimport type { SerializedEditorState } from 'lexical';\nimport { buildConfig, getPayload } from 'payload';\nimport { importWithoutClientFiles } from 'payload/node';\n\nconst loadEditor = async (): Promise<LexicalEditor> => {\n\tconst config = {\n\t\tsecret: 'testing',\n\t\teditor: lexicalEditor({\n\t\t\tadmin: {\n\t\t\t\thideGutter: false,\n\t\t\t},\n\t\t}),\n\t\tdb: sqliteAdapter({\n\t\t\tclient: {\n\t\t\t\turl: 'file:./local.db',\n\t\t\t},\n\t\t}),\n\t};\n\n\tconst instance = await getPayload({\n\t\tconfig: buildConfig(config),\n\t});\n\n\tconst editorConfig = await sanitizeServerEditorConfig(defaultEditorConfig, instance.config);\n\n\treturn createHeadlessEditor({\n\t\tnodes: getEnabledNodes({\n\t\t\teditorConfig,\n\t\t}),\n\t});\n};\n\n/**\n * Converts an HTML string to a Lexical editor state.\n *\n * @param {string} html - The HTML string to convert.\n * @returns {SerializedEditorState} The serialized editor state.\n */\nexport const htmlToLexical = (html: string): SerializedEditorState => {\n\tconst editor = createHeadlessEditor({\n\t\tnodes: [],\n\t\tonError: (error) => {\n\t\t\tconsole.error(error);\n\t\t},\n\t});\n\n\teditor.update(\n\t\t() => {\n\t\t\t// In a headless environment you can use a package such as JSDom to parse the HTML string.\n\t\t\tconst dom = new JSDOM(`<!DOCTYPE html><body>${html}</body>`);\n\n\t\t\t// Once you have the DOM instance it's easy to generate LexicalNodes.\n\t\t\tconst nodes = $generateNodesFromDOM(editor, dom.window.document);\n\n\t\t\t// Select the root\n\t\t\t$getRoot().select();\n\n\t\t\t// Insert them at a selection.\n\t\t\tconst selection = $getSelection();\n\n\t\t\tconsole.log('Generated nodes: ', nodes);\n\n\t\t\tif (selection) selection.insertNodes(nodes);\n\t\t},\n\t\t{ discrete: true },\n\t);\n\n\treturn editor.getEditorState().toJSON();\n\n\t// let state = {};\n\t//\n\t// loadEditor().then((editor) => {\n\t// \teditor.update(\n\t// \t\t() => {\n\t// \t\t\t// In a headless environment you can use a package such as JSDom to parse the HTML string.\n\t// \t\t\tconst dom = new JSDOM(`<!DOCTYPE html><body>${html}</body>`);\n\t//\n\t// \t\t\t// Once you have the DOM instance it's easy to generate LexicalNodes.\n\t// \t\t\tconst nodes = $generateNodesFromDOM(editor, dom.window.document);\n\t//\n\t// \t\t\t// Select the root\n\t// \t\t\t$getRoot().select();\n\t//\n\t// \t\t\t// Insert them at a selection.\n\t// \t\t\tconst selection = $getSelection();\n\t//\n\t// \t\t\tif (selection) selection.insertNodes(nodes);\n\t// \t\t},\n\t// \t\t{ discrete: true },\n\t// \t);\n\t//\n\t// \tstate = editor.getEditorState().toJSON();\n\t// });\n\t//\n\t// return state as SerializedEditorState;\n};\n\n/**\n * Converts a Lexical editor state to an HTML string.\n *\n * @param {SerializedEditorState} json - The serialized editor state to convert.\n * @returns {string} The HTML string.\n */\nexport const lexicalToHtml = (json: SerializedEditorState): string => {\n\tconst editor = createHeadlessEditor({\n\t\tnodes: [],\n\t\tonError: (error) => {\n\t\t\tconsole.error(error);\n\t\t},\n\t});\n\n\t// Initialize a JSDOM instance\n\tconst dom = new JSDOM('');\n\n\t// @ts-ignore\n\tglobalThis.window = dom.window;\n\tglobalThis.document = dom.window.document;\n\n\teditor.update(() => {\n\t\tconst editorState = editor.parseEditorState(json);\n\t\teditor.setEditorState(editorState);\n\t});\n\n\t// Convert the editor state to HTML\n\tlet html = '';\n\teditor.getEditorState().read(() => {\n\t\thtml = $generateHtmlFromNodes(editor);\n\t});\n\n\treturn html;\n};\n"],"names":["createHeadlessEditor","$generateHtmlFromNodes","$generateNodesFromDOM","sqliteAdapter","defaultEditorConfig","getEnabledNodes","lexicalEditor","sanitizeServerEditorConfig","JSDOM","$getRoot","$getSelection","buildConfig","getPayload","loadEditor","config","secret","editor","admin","hideGutter","db","client","url","instance","editorConfig","nodes","htmlToLexical","html","onError","error","console","update","dom","window","document","select","selection","log","insertNodes","discrete","getEditorState","toJSON","lexicalToHtml","json","globalThis","editorState","parseEditorState","setEditorState","read"],"mappings":"AAAA,SAASA,oBAAoB,QAAQ,oBAAoB;AACzD,SAASC,sBAAsB,EAAEC,qBAAqB,QAAQ,gBAAgB;AAC9E,SAASC,aAAa,QAAQ,wBAAwB;AACtD,SACCC,mBAAmB,EACnBC,eAAe,EACfC,aAAa,EACbC,0BAA0B,QACpB,+BAA+B;AACtC,SAASC,KAAK,QAAQ,QAAQ;AAC9B,SAASC,QAAQ,EAAEC,aAAa,QAA4B,UAAU;AAEtE,SAASC,WAAW,EAAEC,UAAU,QAAQ,UAAU;AAGlD,MAAMC,aAAa;IAClB,MAAMC,SAAS;QACdC,QAAQ;QACRC,QAAQV,cAAc;YACrBW,OAAO;gBACNC,YAAY;YACb;QACD;QACAC,IAAIhB,cAAc;YACjBiB,QAAQ;gBACPC,KAAK;YACN;QACD;IACD;IAEA,MAAMC,WAAW,MAAMV,WAAW;QACjCE,QAAQH,YAAYG;IACrB;IAEA,MAAMS,eAAe,MAAMhB,2BAA2BH,qBAAqBkB,SAASR,MAAM;IAE1F,OAAOd,qBAAqB;QAC3BwB,OAAOnB,gBAAgB;YACtBkB;QACD;IACD;AACD;AAEA;;;;;CAKC,GACD,OAAO,MAAME,gBAAgB,CAACC;IAC7B,MAAMV,SAAShB,qBAAqB;QACnCwB,OAAO,EAAE;QACTG,SAAS,CAACC;YACTC,QAAQD,KAAK,CAACA;QACf;IACD;IAEAZ,OAAOc,MAAM,CACZ;QACC,0FAA0F;QAC1F,MAAMC,MAAM,IAAIvB,MAAM,CAAC,qBAAqB,EAAEkB,KAAK,OAAO,CAAC;QAE3D,qEAAqE;QACrE,MAAMF,QAAQtB,sBAAsBc,QAAQe,IAAIC,MAAM,CAACC,QAAQ;QAE/D,kBAAkB;QAClBxB,WAAWyB,MAAM;QAEjB,8BAA8B;QAC9B,MAAMC,YAAYzB;QAElBmB,QAAQO,GAAG,CAAC,qBAAqBZ;QAEjC,IAAIW,WAAWA,UAAUE,WAAW,CAACb;IACtC,GACA;QAAEc,UAAU;IAAK;IAGlB,OAAOtB,OAAOuB,cAAc,GAAGC,MAAM;AAErC,kBAAkB;AAClB,EAAE;AACF,kCAAkC;AAClC,kBAAkB;AAClB,YAAY;AACZ,gGAAgG;AAChG,mEAAmE;AACnE,EAAE;AACF,2EAA2E;AAC3E,uEAAuE;AACvE,EAAE;AACF,wBAAwB;AACxB,0BAA0B;AAC1B,EAAE;AACF,oCAAoC;AACpC,wCAAwC;AACxC,EAAE;AACF,kDAAkD;AAClD,OAAO;AACP,wBAAwB;AACxB,MAAM;AACN,EAAE;AACF,6CAA6C;AAC7C,MAAM;AACN,EAAE;AACF,yCAAyC;AAC1C,EAAE;AAEF;;;;;CAKC,GACD,OAAO,MAAMC,gBAAgB,CAACC;IAC7B,MAAM1B,SAAShB,qBAAqB;QACnCwB,OAAO,EAAE;QACTG,SAAS,CAACC;YACTC,QAAQD,KAAK,CAACA;QACf;IACD;IAEA,8BAA8B;IAC9B,MAAMG,MAAM,IAAIvB,MAAM;IAEtB,aAAa;IACbmC,WAAWX,MAAM,GAAGD,IAAIC,MAAM;IAC9BW,WAAWV,QAAQ,GAAGF,IAAIC,MAAM,CAACC,QAAQ;IAEzCjB,OAAOc,MAAM,CAAC;QACb,MAAMc,cAAc5B,OAAO6B,gBAAgB,CAACH;QAC5C1B,OAAO8B,cAAc,CAACF;IACvB;IAEA,mCAAmC;IACnC,IAAIlB,OAAO;IACXV,OAAOuB,cAAc,GAAGQ,IAAI,CAAC;QAC5BrB,OAAOzB,uBAAuBe;IAC/B;IAEA,OAAOU;AACR,EAAE"}
1
+ {"version":3,"sources":["../../src/util/lexical.ts"],"sourcesContent":["import { createHeadlessEditor } from '@lexical/headless';\nimport { $generateHtmlFromNodes, $generateNodesFromDOM } from '@lexical/html';\n// import { sqliteAdapter } from '@payloadcms/db-sqlite';\n// import {\n// \tdefaultEditorConfig,\n// \tgetEnabledNodes,\n// \tlexicalEditor,\n// \tsanitizeServerEditorConfig,\n// } from '@payloadcms/richtext-lexical';\nimport { JSDOM } from 'jsdom';\nimport { $getRoot, $getSelection, type LexicalEditor } from 'lexical';\nimport type { SerializedEditorState } from 'lexical';\n// import { buildConfig, getPayload } from 'payload';\n// import { importWithoutClientFiles } from 'payload/node';\n\n// const loadEditor = async (): Promise<LexicalEditor> => {\n// \tconst config = {\n// \t\tsecret: 'testing',\n// \t\teditor: lexicalEditor({\n// \t\t\tadmin: {\n// \t\t\t\thideGutter: false,\n// \t\t\t},\n// \t\t}),\n// \t\tdb: sqliteAdapter({\n// \t\t\tclient: {\n// \t\t\t\turl: 'file:./local.db',\n// \t\t\t},\n// \t\t}),\n// \t};\n//\n// \tconst instance = await getPayload({\n// \t\tconfig: buildConfig(config),\n// \t});\n//\n// \tconst editorConfig = await sanitizeServerEditorConfig(defaultEditorConfig, instance.config);\n//\n// \treturn createHeadlessEditor({\n// \t\tnodes: getEnabledNodes({\n// \t\t\teditorConfig,\n// \t\t}),\n// \t});\n// };\n\n/**\n * Converts an HTML string to a Lexical editor state.\n *\n * @param {string} html - The HTML string to convert.\n * @returns {SerializedEditorState} The serialized editor state.\n */\nexport const htmlToLexical = (html: string): SerializedEditorState => {\n\tconst editor = createHeadlessEditor({\n\t\tnodes: [],\n\t\tonError: (error) => {\n\t\t\tconsole.error(error);\n\t\t},\n\t});\n\n\teditor.update(\n\t\t() => {\n\t\t\t// In a headless environment you can use a package such as JSDom to parse the HTML string.\n\t\t\tconst dom = new JSDOM(`<!DOCTYPE html><body>${html}</body>`);\n\n\t\t\t// Once you have the DOM instance it's easy to generate LexicalNodes.\n\t\t\tconst nodes = $generateNodesFromDOM(editor, dom.window.document);\n\n\t\t\t// Select the root\n\t\t\t$getRoot().select();\n\n\t\t\t// Insert them at a selection.\n\t\t\tconst selection = $getSelection();\n\n\t\t\tif (selection) selection.insertNodes(nodes);\n\t\t},\n\t\t{ discrete: true },\n\t);\n\n\treturn editor.getEditorState().toJSON();\n\n\t// let state = {};\n\t//\n\t// loadEditor().then((editor) => {\n\t// \teditor.update(\n\t// \t\t() => {\n\t// \t\t\t// In a headless environment you can use a package such as JSDom to parse the HTML string.\n\t// \t\t\tconst dom = new JSDOM(`<!DOCTYPE html><body>${html}</body>`);\n\t//\n\t// \t\t\t// Once you have the DOM instance it's easy to generate LexicalNodes.\n\t// \t\t\tconst nodes = $generateNodesFromDOM(editor, dom.window.document);\n\t//\n\t// \t\t\t// Select the root\n\t// \t\t\t$getRoot().select();\n\t//\n\t// \t\t\t// Insert them at a selection.\n\t// \t\t\tconst selection = $getSelection();\n\t//\n\t// \t\t\tif (selection) selection.insertNodes(nodes);\n\t// \t\t},\n\t// \t\t{ discrete: true },\n\t// \t);\n\t//\n\t// \tstate = editor.getEditorState().toJSON();\n\t// });\n\t//\n\t// return state as SerializedEditorState;\n};\n\n/**\n * Converts a Lexical editor state to an HTML string.\n *\n * @param {SerializedEditorState} json - The serialized editor state to convert.\n * @returns {string} The HTML string.\n */\nexport const lexicalToHtml = (json: SerializedEditorState): string => {\n\tconst editor = createHeadlessEditor({\n\t\tnodes: [],\n\t\tonError: (error) => {\n\t\t\tconsole.error(error);\n\t\t},\n\t});\n\n\t// Initialize a JSDOM instance\n\tconst dom = new JSDOM('');\n\n\t// @ts-ignore\n\tglobalThis.window = dom.window;\n\tglobalThis.document = dom.window.document;\n\n\teditor.update(() => {\n\t\tconst editorState = editor.parseEditorState(json);\n\t\teditor.setEditorState(editorState);\n\t});\n\n\t// Convert the editor state to HTML\n\tlet html = '';\n\teditor.getEditorState().read(() => {\n\t\thtml = $generateHtmlFromNodes(editor);\n\t});\n\n\treturn html;\n};\n"],"names":["createHeadlessEditor","$generateHtmlFromNodes","$generateNodesFromDOM","JSDOM","$getRoot","$getSelection","htmlToLexical","html","editor","nodes","onError","error","console","update","dom","window","document","select","selection","insertNodes","discrete","getEditorState","toJSON","lexicalToHtml","json","globalThis","editorState","parseEditorState","setEditorState","read"],"mappings":"AAAA,SAASA,oBAAoB,QAAQ,oBAAoB;AACzD,SAASC,sBAAsB,EAAEC,qBAAqB,QAAQ,gBAAgB;AAC9E,yDAAyD;AACzD,WAAW;AACX,wBAAwB;AACxB,oBAAoB;AACpB,kBAAkB;AAClB,+BAA+B;AAC/B,yCAAyC;AACzC,SAASC,KAAK,QAAQ,QAAQ;AAC9B,SAASC,QAAQ,EAAEC,aAAa,QAA4B,UAAU;AAEtE,qDAAqD;AACrD,2DAA2D;AAE3D,2DAA2D;AAC3D,oBAAoB;AACpB,uBAAuB;AACvB,4BAA4B;AAC5B,cAAc;AACd,yBAAyB;AACzB,QAAQ;AACR,QAAQ;AACR,wBAAwB;AACxB,eAAe;AACf,8BAA8B;AAC9B,QAAQ;AACR,QAAQ;AACR,MAAM;AACN,EAAE;AACF,uCAAuC;AACvC,iCAAiC;AACjC,OAAO;AACP,EAAE;AACF,gGAAgG;AAChG,EAAE;AACF,iCAAiC;AACjC,6BAA6B;AAC7B,mBAAmB;AACnB,QAAQ;AACR,OAAO;AACP,KAAK;AAEL;;;;;CAKC,GACD,OAAO,MAAMC,gBAAgB,CAACC;IAC7B,MAAMC,SAASR,qBAAqB;QACnCS,OAAO,EAAE;QACTC,SAAS,CAACC;YACTC,QAAQD,KAAK,CAACA;QACf;IACD;IAEAH,OAAOK,MAAM,CACZ;QACC,0FAA0F;QAC1F,MAAMC,MAAM,IAAIX,MAAM,CAAC,qBAAqB,EAAEI,KAAK,OAAO,CAAC;QAE3D,qEAAqE;QACrE,MAAME,QAAQP,sBAAsBM,QAAQM,IAAIC,MAAM,CAACC,QAAQ;QAE/D,kBAAkB;QAClBZ,WAAWa,MAAM;QAEjB,8BAA8B;QAC9B,MAAMC,YAAYb;QAElB,IAAIa,WAAWA,UAAUC,WAAW,CAACV;IACtC,GACA;QAAEW,UAAU;IAAK;IAGlB,OAAOZ,OAAOa,cAAc,GAAGC,MAAM;AAErC,kBAAkB;AAClB,EAAE;AACF,kCAAkC;AAClC,kBAAkB;AAClB,YAAY;AACZ,gGAAgG;AAChG,mEAAmE;AACnE,EAAE;AACF,2EAA2E;AAC3E,uEAAuE;AACvE,EAAE;AACF,wBAAwB;AACxB,0BAA0B;AAC1B,EAAE;AACF,oCAAoC;AACpC,wCAAwC;AACxC,EAAE;AACF,kDAAkD;AAClD,OAAO;AACP,wBAAwB;AACxB,MAAM;AACN,EAAE;AACF,6CAA6C;AAC7C,MAAM;AACN,EAAE;AACF,yCAAyC;AAC1C,EAAE;AAEF;;;;;CAKC,GACD,OAAO,MAAMC,gBAAgB,CAACC;IAC7B,MAAMhB,SAASR,qBAAqB;QACnCS,OAAO,EAAE;QACTC,SAAS,CAACC;YACTC,QAAQD,KAAK,CAACA;QACf;IACD;IAEA,8BAA8B;IAC9B,MAAMG,MAAM,IAAIX,MAAM;IAEtB,aAAa;IACbsB,WAAWV,MAAM,GAAGD,IAAIC,MAAM;IAC9BU,WAAWT,QAAQ,GAAGF,IAAIC,MAAM,CAACC,QAAQ;IAEzCR,OAAOK,MAAM,CAAC;QACb,MAAMa,cAAclB,OAAOmB,gBAAgB,CAACH;QAC5ChB,OAAOoB,cAAc,CAACF;IACvB;IAEA,mCAAmC;IACnC,IAAInB,OAAO;IACXC,OAAOa,cAAc,GAAGQ,IAAI,CAAC;QAC5BtB,OAAON,uBAAuBO;IAC/B;IAEA,OAAOD;AACR,EAAE"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@ainsleydev/payload-helper",
3
- "version": "0.0.9",
3
+ "version": "0.0.10",
4
4
  "description": "Payload CMS utilities, collections and global types for ainsley.dev builds",
5
5
  "license": "MIT",
6
6
  "type": "module",
@@ -1,13 +0,0 @@
1
- import type { Payload, PayloadRequest } from 'payload';
2
- import type { Seeder } from './seed.js';
3
- /**
4
- * Down script to remove all media and drop all tables.
5
- *
6
- * @param payload
7
- * @param req
8
- */
9
- export declare const down: ({ payload, }: {
10
- payload: Payload;
11
- req: PayloadRequest;
12
- seeder: Seeder;
13
- }) => Promise<void>;
package/dist/seed/down.js DELETED
@@ -1,35 +0,0 @@
1
- import fs from 'node:fs';
2
- import path from 'node:path';
3
- import { fileURLToPath } from 'node:url';
4
- import env from '../util/env.js';
5
- const filename = fileURLToPath(import.meta.url);
6
- const dirname = path.dirname(filename);
7
- const postgresDrop = async (payload)=>{
8
- // @ts-expect-error
9
- const db = payload.db.pool;
10
- const client = await db.connect();
11
- await client.query('drop schema public cascade; create schema public;');
12
- };
13
- const sqlDrop = async (payload)=>{};
14
- /**
15
- * Down script to remove all media and drop all tables.
16
- *
17
- * @param payload
18
- * @param req
19
- */ export const down = async ({ payload })=>{
20
- payload.logger.info('Running down script');
21
- // Clearing local media
22
- if (!env.isProduction) {
23
- payload.logger.info('Clearing media...');
24
- const mediaDir = path.resolve(dirname, '../../media');
25
- if (fs.existsSync(mediaDir)) {
26
- fs.rmSync(mediaDir, {
27
- recursive: true,
28
- force: true
29
- });
30
- }
31
- }
32
- payload.logger.info('Dropping tables in database...');
33
- };
34
-
35
- //# sourceMappingURL=down.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../../src/seed/down.ts"],"sourcesContent":["import fs from 'node:fs';\nimport path from 'node:path';\nimport { fileURLToPath } from 'node:url';\nimport type { Payload, PayloadRequest } from 'payload';\nimport env from '../util/env.js';\nimport type { Seeder } from './seed.js';\n\nconst filename = fileURLToPath(import.meta.url);\nconst dirname = path.dirname(filename);\n\nconst postgresDrop = async (payload: Payload): Promise<void> => {\n\t// @ts-expect-error\n\tconst db = payload.db.pool;\n\tconst client = await db.connect();\n\tawait client.query('drop schema public cascade; create schema public;');\n};\n\nconst sqlDrop = async (payload: Payload): Promise<void> => {};\n\n/**\n * Down script to remove all media and drop all tables.\n *\n * @param payload\n * @param req\n */\nexport const down = async ({\n\tpayload,\n}: {\n\tpayload: Payload;\n\treq: PayloadRequest;\n\tseeder: Seeder;\n}): Promise<void> => {\n\tpayload.logger.info('Running down script');\n\n\t// Clearing local media\n\tif (!env.isProduction) {\n\t\tpayload.logger.info('Clearing media...');\n\t\tconst mediaDir = path.resolve(dirname, '../../media');\n\t\tif (fs.existsSync(mediaDir)) {\n\t\t\tfs.rmSync(mediaDir, { recursive: true, force: true });\n\t\t}\n\t}\n\n\tpayload.logger.info('Dropping tables in database...');\n};\n"],"names":["fs","path","fileURLToPath","env","filename","url","dirname","postgresDrop","payload","db","pool","client","connect","query","sqlDrop","down","logger","info","isProduction","mediaDir","resolve","existsSync","rmSync","recursive","force"],"mappings":"AAAA,OAAOA,QAAQ,UAAU;AACzB,OAAOC,UAAU,YAAY;AAC7B,SAASC,aAAa,QAAQ,WAAW;AAEzC,OAAOC,SAAS,iBAAiB;AAGjC,MAAMC,WAAWF,cAAc,YAAYG,GAAG;AAC9C,MAAMC,UAAUL,KAAKK,OAAO,CAACF;AAE7B,MAAMG,eAAe,OAAOC;IAC3B,mBAAmB;IACnB,MAAMC,KAAKD,QAAQC,EAAE,CAACC,IAAI;IAC1B,MAAMC,SAAS,MAAMF,GAAGG,OAAO;IAC/B,MAAMD,OAAOE,KAAK,CAAC;AACpB;AAEA,MAAMC,UAAU,OAAON,WAAqC;AAE5D;;;;;CAKC,GACD,OAAO,MAAMO,OAAO,OAAO,EAC1BP,OAAO,EAKP;IACAA,QAAQQ,MAAM,CAACC,IAAI,CAAC;IAEpB,uBAAuB;IACvB,IAAI,CAACd,IAAIe,YAAY,EAAE;QACtBV,QAAQQ,MAAM,CAACC,IAAI,CAAC;QACpB,MAAME,WAAWlB,KAAKmB,OAAO,CAACd,SAAS;QACvC,IAAIN,GAAGqB,UAAU,CAACF,WAAW;YAC5BnB,GAAGsB,MAAM,CAACH,UAAU;gBAAEI,WAAW;gBAAMC,OAAO;YAAK;QACpD;IACD;IAEAhB,QAAQQ,MAAM,CAACC,IAAI,CAAC;AACrB,EAAE"}
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../../src/seed/up.ts"],"sourcesContent":["import path from 'node:path';\nimport type { Payload, PayloadRequest } from 'payload';\nimport { getFileByPath } from 'payload';\nimport { htmlToLexical } from '../util/lexical.js';\nimport type { Seeder } from './seed.js';\nimport type { Media, MediaSeed } from './types.js';\n\n/**\n *\n * @param req\n * @param payload\n * @param dirname\n * @param media\n */\nexport const uploadMedia = async (\n\treq: PayloadRequest,\n\tpayload: Payload,\n\tdirname: string,\n\tmedia: MediaSeed,\n): Promise<Media> => {\n\ttry {\n\t\tconst image = await getFileByPath(path.resolve(dirname, media.path));\n\t\tconst caption = media.caption ? await htmlToLexical(media.caption) : null;\n\n\t\treturn (await payload.create({\n\t\t\tcollection: 'media',\n\t\t\tfile: image,\n\t\t\tdata: {\n\t\t\t\talt: media.alt,\n\t\t\t\tcaption: caption,\n\t\t\t},\n\t\t\treq,\n\t\t})) as unknown as Media;\n\t} catch (error) {\n\t\tpayload.logger.error(`Uploading media: ${error}`);\n\t\tthrow error;\n\t}\n};\n\n/**\n * Up script to create tables and seed data.\n *\n * @param payload\n * @param req\n * @param seeder\n */\nexport const up = async ({\n\tpayload,\n\treq,\n\tseeder,\n}: {\n\tpayload: Payload;\n\treq: PayloadRequest;\n\tseeder: Seeder;\n}): Promise<void> => {\n\tpayload.logger.info('Running up script');\n\n\tawait payload.init({\n\t\tconfig: payload.config,\n\t});\n\n\t// Creating new tables\n\tpayload.logger.info('Creating indexes...');\n\ttry {\n\t\tif (payload.db.init) {\n\t\t\tawait payload.db.init();\n\t\t}\n\t} catch (error) {\n\t\tpayload.logger.error(`Creating database: ${error}`);\n\t\treturn;\n\t}\n\n\tawait seeder({ payload, req });\n};\n"],"names":["path","getFileByPath","htmlToLexical","uploadMedia","req","payload","dirname","media","image","resolve","caption","create","collection","file","data","alt","error","logger","up","seeder","info","init","config","db"],"mappings":"AAAA,OAAOA,UAAU,YAAY;AAE7B,SAASC,aAAa,QAAQ,UAAU;AACxC,SAASC,aAAa,QAAQ,qBAAqB;AAInD;;;;;;CAMC,GACD,OAAO,MAAMC,cAAc,OAC1BC,KACAC,SACAC,SACAC;IAEA,IAAI;QACH,MAAMC,QAAQ,MAAMP,cAAcD,KAAKS,OAAO,CAACH,SAASC,MAAMP,IAAI;QAClE,MAAMU,UAAUH,MAAMG,OAAO,GAAG,MAAMR,cAAcK,MAAMG,OAAO,IAAI;QAErE,OAAQ,MAAML,QAAQM,MAAM,CAAC;YAC5BC,YAAY;YACZC,MAAML;YACNM,MAAM;gBACLC,KAAKR,MAAMQ,GAAG;gBACdL,SAASA;YACV;YACAN;QACD;IACD,EAAE,OAAOY,OAAO;QACfX,QAAQY,MAAM,CAACD,KAAK,CAAC,CAAC,iBAAiB,EAAEA,MAAM,CAAC;QAChD,MAAMA;IACP;AACD,EAAE;AAEF;;;;;;CAMC,GACD,OAAO,MAAME,KAAK,OAAO,EACxBb,OAAO,EACPD,GAAG,EACHe,MAAM,EAKN;IACAd,QAAQY,MAAM,CAACG,IAAI,CAAC;IAEpB,MAAMf,QAAQgB,IAAI,CAAC;QAClBC,QAAQjB,QAAQiB,MAAM;IACvB;IAEA,sBAAsB;IACtBjB,QAAQY,MAAM,CAACG,IAAI,CAAC;IACpB,IAAI;QACH,IAAIf,QAAQkB,EAAE,CAACF,IAAI,EAAE;YACpB,MAAMhB,QAAQkB,EAAE,CAACF,IAAI;QACtB;IACD,EAAE,OAAOL,OAAO;QACfX,QAAQY,MAAM,CAACD,KAAK,CAAC,CAAC,mBAAmB,EAAEA,MAAM,CAAC;QAClD;IACD;IAEA,MAAMG,OAAO;QAAEd;QAASD;IAAI;AAC7B,EAAE"}