@event-driven-io/pongo 0.17.0-beta.8 → 0.17.0-beta.9
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.cjs +1 -1
- package/dist/cli.cjs.map +1 -1
- package/dist/cli.js +1 -1
- package/dist/cli.js.map +1 -1
- package/dist/pg.cjs +4 -7
- package/dist/pg.cjs.map +1 -1
- package/dist/pg.d.cts +6 -6
- package/dist/pg.d.ts +6 -6
- package/dist/pg.js +5 -8
- package/dist/pg.js.map +1 -1
- package/package.json +3 -3
package/dist/cli.cjs
CHANGED
|
@@ -353,7 +353,7 @@ var prettifyLogs = (logLevel) => {
|
|
|
353
353
|
var startRepl = async (options) => {
|
|
354
354
|
setLogLevel(_nullishCoalesce(process.env.DUMBO_LOG_LEVEL, () => ( options.logging.logLevel)));
|
|
355
355
|
setLogStyle(_nullishCoalesce(process.env.DUMBO_LOG_STYLE, () => ( options.logging.logStyle)));
|
|
356
|
-
console.log(_dumbo.color.green("Starting Pongo Shell (version: 0.17.0-beta.
|
|
356
|
+
console.log(_dumbo.color.green("Starting Pongo Shell (version: 0.17.0-beta.9)"));
|
|
357
357
|
if (options.logging.printOptions) {
|
|
358
358
|
console.log(_dumbo.color.green("With Options:"));
|
|
359
359
|
console.log(_dumbo.prettyJson.call(void 0, options));
|
package/dist/cli.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/cli.cjs","../src/cli.ts","../src/commandLine/configFile.ts","../src/commandLine/migrate.ts","../src/commandLine/shell.ts"],"names":[],"mappings":"AAAA;AACA;AACE;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACRA,sCAAwB;ADUxB;AACA;AEZA;AACA,gEAAe;AAQf,IAAM,eAAA,EAAiB,CAAC,KAAA,EAAA,GAA0B;AAChD,EAAA,GAAA,CAAI,KAAA,CAAM,OAAA,IAAW,CAAA,EAAG;AACtB,IAAA,OAAO,KAAA;AAAA,EACT;AAEA,EAAA,IAAI,UAAA,EAAY,KAAA,CAAM,MAAA,CAAO,CAAC,CAAA,CAAE,WAAA,CAAY,EAAA,EAAI,KAAA,CAAM,KAAA,CAAM,CAAC,CAAA;AAE7D,EAAA,GAAA,CAAI,SAAA,CAAU,QAAA,CAAS,GAAG,CAAA,EAAG;AAC3B,IAAA,UAAA,EAAY,SAAA,CAAU,KAAA,CAAM,CAAA,EAAG,CAAA,CAAE,CAAA;AAAA,EACnC;AAEA,EAAA,OAAO,SAAA;AACT,CAAA;AAEA,IAAM,aAAA,EAAe,CAAC,gBAAA,EAA4B,CAAC,OAAO,CAAA,EAAA,GAAM;AAC9D,EAAA,MAAM,MAAA,EAAQ,eAAA,CACX,GAAA;AAAA,IACC,CAAC,IAAA,EAAA,GACC,CAAA,YAAA,EAAe,cAAA,CAAe,IAAI,CAAC,CAAA,oDAAA;AAAA,EACvC,CAAA,CACC,IAAA,CAAK,IAAI,CAAA;AAEZ,EAAA,MAAM,YAAA,EAAc,eAAA,CACjB,GAAA;AAAA,IACC,CAAC,IAAA,EAAA,GACC,CAAA,MAAA,EAAS,IAAI,CAAA,yBAAA,EAA4B,cAAA,CAAe,IAAI,CAAC,CAAA,GAAA,EAAM,IAAI,CAAA,GAAA;AAAA,EAC3E,CAAA,CACC,IAAA,CAAK,IAAI,CAAA;AAEZ,EAAA,OAAO,CAAA;AAAA;AAAA,EAEP,KAAK,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKL,WAAW,CAAA;AAAA;AAAA;AAAA,EAAA,CAAA;AAIb,CAAA;AAEA,IAAM,qBAAA,EAAuB,CAAA;AAAA;AAAA,EAAwD,YAAA,CAAa,CAAC,CAAA,CAAA;AAC7F;AAAgB;AAAuE;AAC1E;AAAA;AAAuF;AACpG;AAAmB;AAA+H;AAClJ;AAAqB;AAAsF;AAEpG;AAGL,EAAA;AACF,EAAA;AAEI,IAAA;AAIA,IAAA;AAEK,IAAA;AACD,MAAA;AACA,MAAA;AACV,IAAA;AAEO,IAAA;AACD,EAAA;AACE,IAAA;AACK,IAAA;AACf,EAAA;AACF;AAEa;AAIP,EAAA;AACC,IAAA;AACS,IAAA;AACL,EAAA;AACC,IAAA;AACA,IAAA;AACK,IAAA;AACf,EAAA;AACF;AAEa;AAGG,EAAA;AACL,IAAA;AACT,EAAA;AAEc,EAAA;AACL,IAAA;AACT,EAAA;AAEc,EAAA;AACL,IAAA;AACT,EAAA;AAEY,EAAA;AAEN,EAAA;AAED,EAAA;AACI,IAAA;AACT,EAAA;AAEe,EAAA;AACN,IAAA;AACT,EAAA;AAEM,EAAA;AAEF,EAAA;AACK,IAAA;AACT,EAAA;AAEO,EAAA;AACT;AAaa;AACX,EAAA;AACF;AAGG;AAGC,EAAA;AACA,EAAA;AACgB,EAAA;AAEP,IAAA;AACT,EAAA;AACC,EAAA;AAEF;AACC,EAAA;AACA,EAAA;AAEM;AAGA,EAAA;AAGA,EAAA;AACI,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEe,EAAA;AACD,IAAA;AACH,EAAA;AACI,IAAA;AACH,MAAA;AACN,QAAA;AACF,MAAA;AACQ,MAAA;AACV,IAAA;AAEA,IAAA;AACF,EAAA;AACD;AF9Cc;AACA;AG9IjB;AACE;AACA;AACA;AACA;AAEK;AACE;AA+BI;AACX,EAAA;AACF;AAGG;AAGC,EAAA;AACA,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AACgB,EAAA;AAEP,IAAA;AACT,EAAA;AACC,EAAA;AAEK;AAGE,EAAA;AACF,EAAA;AAGA,EAAA;AAIF,EAAA;AAEC,EAAA;AACK,IAAA;AACN,MAAA;AAEF,IAAA;AACa,IAAA;AACf,EAAA;AAEY,EAAA;AACJ,IAAA;AAEN,IAAA;AACS,EAAA;AACT,IAAA;AACK,EAAA;AACG,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEM,EAAA;AAEA,EAAA;AACJ,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACD,EAAA;AAEY,EAAA;AAEP,EAAA;AACJ,IAAA;AACD,EAAA;AACF;AAGA;AAGC,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AACgB,EAAA;AAEP,IAAA;AACT,EAAA;AACC,EAAA;AAEK;AAIE,EAAA;AAEJ,EAAA;AAEQ,EAAA;AACJ,IAAA;AAEN,IAAA;AACS,EAAA;AACT,IAAA;AACK,EAAA;AACG,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEM,EAAA;AAEA,EAAA;AACJ,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACD,EAAA;AAEW,EAAA;AACA,EAAA;AACb;AAEG;AACJ,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAMI;AACW,EAAA;AAEA,EAAA;AACL,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEM,EAAA;AAEA,EAAA;AAIM,IAAA;AACZ,EAAA;AAEM,EAAA;AACJ,IAAA;AACA,IAAA;AACF,EAAA;AAEW,EAAA;AAED,EAAA;AACZ;AHuDiB;AACA;AIlRjB;AACE;AACA;AACA;AACA;AACA;AACA;AAEK;AACE;AACF;AACE;AACQ;AAWb;AAEE;AAKE,EAAA;AACE,IAAA;AACA,MAAA;AACO,MAAA;AAAK,QAAA;AAAA;AAEP,UAAA;AAAoC,QAAA;AAC7C,MAAA;AACF,IAAA;AACO,IAAA;AACR,EAAA;AACM,EAAA;AACT;AAEI;AAEE;AAIA;AAMA;AACQ,EAAA;AACG,IAAA;AACf,EAAA;AAEM,EAAA;AAEM,IAAA;AAAA;AAED,MAAA;AAAmD,IAAA;AAEnD,EAAA;AAEL,EAAA;AAEQ,EAAA;AACN,IAAA;AACK,IAAA;AACZ,EAAA;AAEO,EAAA;AACA,IAAA;AACJ,MAAA;AAAiB,QAAA;AAAA;AAER,UAAA;AAAS;AAEZ,YAAA;AAAyB;AAEvB,cAAA;AAAiC,YAAA;AAAA;AAEjC,cAAA;AAAsB,YAAA;AACxB,UAAA;AAII,QAAA;AACV,MAAA;AACF,IAAA;AACD,EAAA;AAEY,EAAA;AACf;AAEM;AACQ,EAAA;AACd;AAEM;AACQ,EAAA;AACd;AAEM;AACA,EAAA;AACQ,EAAA;AACd;AAEkB;AAgBJ,EAAA;AACA,EAAA;AAEA,EAAA;AAEA,EAAA;AACE,IAAA;AACA,IAAA;AACd,EAAA;AAEM,EAAA;AAKQ,EAAA;AACJ,IAAA;AACA,MAAA;AACJ,QAAA;AACF,MAAA;AACF,IAAA;AACF,EAAA;AAEQ,EAAA;AACF,EAAA;AAES,EAAA;AAEA,EAAA;AACL,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEM,EAAA;AAED,EAAA;AACC,IAAA;AACM,MAAA;AACA,QAAA;AACJ,UAAA;AACF,QAAA;AACF,MAAA;AACS,IAAA;AACD,MAAA;AACA,QAAA;AACJ,UAAA;AACF,QAAA;AACF,MAAA;AACK,IAAA;AACG,MAAA;AACV,IAAA;AACY,IAAA;AACC,IAAA;AACf,EAAA;AAEY,EAAA;AACA,EAAA;AAEE,EAAA;AACJ,IAAA;AACG,IAAA;AACX,IAAA;AACQ,IAAA;AACT,EAAA;AAEG,EAAA;AAEQ,EAAA;AACJ,IAAA;AAEK,IAAA;AACT,MAAA;AAEF,IAAA;AAEM,IAAA;AACM,MAAA;AACX,IAAA;AAEK,IAAA;AACJ,MAAA;AACQ,MAAA;AACN,QAAA;AACA,QAAA;AACF,MAAA;AACF,IAAA;AAGM,IAAA;AACJ,MAAA;AACF,IAAA;AAEM,IAAA;AACD,MAAA;AACA,MAAA;AACJ,IAAA;AAEI,IAAA;AAEM,IAAA;AACH,MAAA;AACR,IAAA;AAEQ,IAAA;AACH,EAAA;AACC,IAAA;AACJ,MAAA;AACQ,MAAA;AACN,QAAA;AACF,MAAA;AACF,IAAA;AAGM,IAAA;AACJ,MAAA;AACF,IAAA;AAEQ,IAAA;AACH,MAAA;AACA,MAAA;AACJ,IAAA;AAEU,IAAA;AACb,EAAA;AAEc,EAAA;AACA,EAAA;AAGA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAGL,EAAA;AACD,IAAA;AACO,IAAA;AACd,EAAA;AAEQ,EAAA;AACD,IAAA;AACO,IAAA;AACd,EAAA;AACH;AAEiB;AACH,EAAA;AACA,EAAA;AACd;AAEW;AACA;AAcL;AAGF,EAAA;AACA,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AAEM;AAEN,EAAA;AACA,EAAA;AACgB,EAAA;AAEP,IAAA;AACT,EAAA;AACC,EAAA;AAEF;AACC,EAAA;AACA,EAAA;AAEM;AAEN,EAAA;AACA,EAAA;AACA,EAAA;AAEM;AAGE,EAAA;AACF,EAAA;AAEA,EAAA;AACK,IAAA;AACP,MAAA;AACU,MAAA;AAGA,MAAA;AAKZ,IAAA;AACQ,IAAA;AACN,MAAA;AACA,MAAA;AACA,MAAA;AAGF,IAAA;AACA,IAAA;AACA,IAAA;AACD,EAAA;AACF;AJ2Jc;AACA;AC7fD;AAEH;AAEL;AACA;AACA;AAEM;AAEP;AD2fU;AACA;AACA","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/cli.cjs","sourcesContent":[null,"#!/usr/bin/env node\nimport { Command } from 'commander';\nimport { configCommand, migrateCommand, shellCommand } from './commandLine';\n\nconst program = new Command();\n\nprogram.name('pongo').description('CLI tool for Pongo');\n\nprogram.addCommand(configCommand);\nprogram.addCommand(migrateCommand);\nprogram.addCommand(shellCommand);\n\nprogram.parse(process.argv);\n\nexport default program;\n","import { Command } from 'commander';\nimport fs from 'node:fs';\nimport {\n objectEntries,\n toDbSchemaMetadata,\n type PongoDbSchemaMetadata,\n type PongoSchemaConfig,\n} from '../core';\n\nconst formatTypeName = (input: string): string => {\n if (input.length === 0) {\n return input;\n }\n\n let formatted = input.charAt(0).toUpperCase() + input.slice(1);\n\n if (formatted.endsWith('s')) {\n formatted = formatted.slice(0, -1);\n }\n\n return formatted;\n};\n\nconst sampleConfig = (collectionNames: string[] = ['users']) => {\n const types = collectionNames\n .map(\n (name) =>\n `export type ${formatTypeName(name)} = { name: string; description: string; date: Date }`,\n )\n .join('\\n');\n\n const collections = collectionNames\n .map(\n (name) =>\n ` ${name}: pongoSchema.collection<${formatTypeName(name)}>('${name}'),`,\n )\n .join('\\n');\n\n return `import { pongoSchema } from '@event-driven-io/pongo';\n\n${types}\n\nexport default {\n schema: pongoSchema.client({\n database: pongoSchema.db({\n${collections}\n }),\n }),\n};`;\n};\n\nconst missingDefaultExport = `Error: Config should contain default export, e.g.\\n\\n${sampleConfig()}`;\nconst missingSchema = `Error: Config should contain schema property, e.g.\\n\\n${sampleConfig()}`;\nconst missingDbs = `Error: Config should have at least a single database defined, e.g.\\n\\n${sampleConfig()}`;\nconst missingDefaultDb = `Error: Config should have a default database defined (without name or or with default database name), e.g.\\n\\n${sampleConfig()}`;\nconst missingCollections = `Error: Database should have defined at least one collection, e.g.\\n\\n${sampleConfig()}`;\n\nexport const loadConfigFile = async (\n configPath: string,\n): Promise<PongoDbSchemaMetadata> => {\n const configUrl = new URL(configPath, `file://${process.cwd()}/`);\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment\n const imported: Partial<{ default: PongoSchemaConfig }> = await import(\n configUrl.href\n );\n\n const parsed = parseDefaultDbSchema(imported);\n\n if (typeof parsed === 'string') {\n console.error(parsed);\n process.exit(1);\n }\n\n return parsed;\n } catch {\n console.error(`Error: Couldn't load file: ${configUrl.href}`);\n process.exit(1);\n }\n};\n\nexport const generateConfigFile = (\n configPath: string,\n collectionNames: string[],\n): void => {\n try {\n fs.writeFileSync(configPath, sampleConfig(collectionNames), 'utf8');\n console.log(`Configuration file stored at: ${configPath}`);\n } catch (error) {\n console.error(`Error: Couldn't store config file: ${configPath}!`);\n console.error(error);\n process.exit(1);\n }\n};\n\nexport const parseDefaultDbSchema = (\n imported: Partial<{ default: PongoSchemaConfig }>,\n): PongoDbSchemaMetadata | string => {\n if (!imported.default) {\n return missingDefaultExport;\n }\n\n if (!imported.default.schema) {\n return missingSchema;\n }\n\n if (!imported.default.schema.dbs) {\n return missingDbs;\n }\n\n const dbs = objectEntries(imported.default.schema.dbs).map((db) => db[1]);\n\n const defaultDb = dbs.find((db) => db.name === undefined);\n\n if (!defaultDb) {\n return missingDefaultDb;\n }\n\n if (!defaultDb.collections) {\n return missingCollections;\n }\n\n const collections = objectEntries(defaultDb.collections).map((col) => col[1]);\n\n if (collections.length === 0) {\n return missingCollections;\n }\n\n return toDbSchemaMetadata(defaultDb);\n};\n\ntype SampleConfigOptions =\n | {\n collection: string[];\n print?: boolean;\n }\n | {\n collection: string[];\n generate?: boolean;\n file?: string;\n };\n\nexport const configCommand = new Command('config').description(\n 'Manage Pongo configuration',\n);\n\nconfigCommand\n .command('sample')\n .description('Generate or print sample configuration')\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option(\n '-f, --file <path>',\n 'Path to configuration file with collection list',\n )\n .option('-g, --generate', 'Generate sample config file')\n .option('-p, --print', 'Print sample config file')\n .action((options: SampleConfigOptions) => {\n const collectionNames =\n options.collection.length > 0 ? options.collection : ['users'];\n\n if (!('print' in options) && !('generate' in options)) {\n console.error(\n 'Error: Please provide either:\\n--print param to print sample config or\\n--generate to generate sample config file',\n );\n process.exit(1);\n }\n\n if ('print' in options) {\n console.log(`${sampleConfig(collectionNames)}`);\n } else if ('generate' in options) {\n if (!options.file) {\n console.error(\n 'Error: You need to provide a config file through a --file',\n );\n process.exit(1);\n }\n\n generateConfigFile(options.file, collectionNames);\n }\n });\n","import {\n combineMigrations,\n dumbo,\n parseConnectionString,\n runSQLMigrations,\n type DatabaseDriverType,\n} from '@event-driven-io/dumbo';\nimport { Command } from 'commander';\nimport {\n pongoDatabaseDriverRegistry,\n pongoSchema,\n type AnyPongoDatabaseDriverOptions,\n type PongoCollectionSchema,\n type PongoDatabaseFactoryOptions,\n type PongoDocument,\n} from '../core';\nimport { loadConfigFile } from './configFile';\n\ninterface MigrateRunOptions {\n collection: string[];\n connectionString: string;\n databaseType?: string;\n databaseName?: string | undefined;\n databaseDriver: string;\n config?: string;\n dryRun?: boolean;\n}\n\ninterface MigrateSqlOptions {\n print?: boolean;\n write?: string;\n databaseType: string;\n databaseName?: string | undefined;\n databaseDriver: string;\n config?: string;\n collection: string[];\n}\n\nexport const migrateCommand = new Command('migrate').description(\n 'Manage database migrations',\n);\n\nmigrateCommand\n .command('run')\n .description('Run database migrations')\n .option(\n '-dbt, --database-type <string>',\n 'Database type that should be used for connection (e.g., PostgreSQL or SQLite)',\n undefined,\n )\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n )\n .option(\n '-dbn, --database-name <string>',\n 'Database name to connect to',\n undefined,\n )\n .option(\n '-cs, --connection-string <string>',\n 'Connection string for the database',\n )\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option('-f, --config <path>', 'Path to configuration file with Pongo config')\n .option('-dr, --dryRun', 'Perform dry run without commiting changes', false)\n .action(async (options: MigrateRunOptions) => {\n const { collection, dryRun, databaseName, databaseDriver } = options;\n const connectionString =\n options.connectionString ?? process.env.DB_CONNECTION_STRING;\n\n const databaseType =\n options.databaseType ??\n parseConnectionString(connectionString).databaseType;\n\n let collectionNames: string[];\n\n if (!connectionString) {\n console.error(\n 'Error: Connection string is required. Provide it either as a \"--connection-string\" parameter or through the DB_CONNECTION_STRING environment variable.' +\n '\\nFor instance: --connection-string postgresql://postgres:postgres@localhost:5432/postgres',\n );\n process.exit(1);\n }\n\n if (options.config) {\n const config = await loadConfigFile(options.config);\n\n collectionNames = config.collections.map((c) => c.name);\n } else if (collection) {\n collectionNames = collection;\n } else {\n console.error(\n 'Error: You need to provide at least one collection name. Provide it either through \"--config\" file or as a \"--collection\" parameter.',\n );\n process.exit(1);\n }\n\n const driverType = `${databaseType}:${databaseDriver}` as const;\n\n const migrations = getMigrations({\n driverType,\n connectionString,\n databaseName,\n collectionNames,\n });\n\n const pool = dumbo({ connectionString, driverType });\n\n await runSQLMigrations(pool, migrations, {\n dryRun,\n });\n });\n\nmigrateCommand\n .command('sql')\n .description('Generate SQL for database migration')\n .option(\n '-dbt, --database-type <string>',\n 'Database type that should be used for connection (e.g., PostgreSQL or SQLite)',\n )\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n )\n .option(\n '-dbn, --database-name <string>',\n 'Database name to connect to',\n undefined,\n )\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option('-f, --config <path>', 'Path to configuration file with Pongo config')\n .option('--print', 'Print the SQL to the console (default)', true)\n //.option('--write <filename>', 'Write the SQL to a specified file')\n .action(async (options: MigrateSqlOptions) => {\n const { collection, databaseName, databaseType, databaseDriver } = options;\n\n let collectionNames: string[];\n\n if (options.config) {\n const config = await loadConfigFile(options.config);\n\n collectionNames = config.collections.map((c) => c.name);\n } else if (collection) {\n collectionNames = collection;\n } else {\n console.error(\n 'Error: You need to provide at least one collection name. Provide it either through \"--config\" file or as a \"--collection\" parameter.',\n );\n process.exit(1);\n }\n\n const driverType = `${databaseType}:${databaseDriver}` as const;\n\n const migrations = getMigrations({\n driverType,\n connectionString: undefined,\n databaseName,\n collectionNames,\n });\n\n console.log('Printing SQL:');\n console.log(combineMigrations(...migrations));\n });\n\nconst getMigrations = ({\n driverType,\n connectionString,\n databaseName,\n collectionNames,\n}: {\n driverType: DatabaseDriverType;\n connectionString: string | undefined;\n databaseName: string | undefined;\n collectionNames: string[];\n}) => {\n const driver = pongoDatabaseDriverRegistry.tryGet(driverType);\n\n if (driver === null) {\n console.error(\n `Error: No database driver found for driver type \"${driverType}\". Make sure the driver is registered and the connection string is correct.`,\n );\n process.exit(1);\n }\n\n const dbDefinition = pongoSchema.db.from(databaseName, collectionNames);\n\n const driverOptions: PongoDatabaseFactoryOptions<\n Record<string, PongoCollectionSchema<PongoDocument>>,\n AnyPongoDatabaseDriverOptions\n > = {\n schema: { definition: dbDefinition },\n };\n\n const customOptions = {\n connectionString: connectionString ?? driver.defaultConnectionString,\n databaseName,\n };\n\n const db = driver.databaseFactory({ ...driverOptions, ...customOptions });\n\n return db.schema.component.migrations;\n};\n","import {\n color,\n LogLevel,\n LogStyle,\n parseConnectionString,\n prettyJson,\n SQL,\n type MigrationStyle,\n} from '@event-driven-io/dumbo';\nimport { checkConnection } from '@event-driven-io/dumbo/pg';\nimport Table from 'cli-table3';\nimport { Command } from 'commander';\nimport repl from 'node:repl';\nimport {\n pongoClient,\n pongoDatabaseDriverRegistry,\n pongoSchema,\n type PongoClient,\n type PongoClientOptions,\n type PongoCollectionSchema,\n type PongoDb,\n} from '../core';\n\nlet pongo: PongoClient;\n\nconst calculateColumnWidths = (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n results: any[],\n columnNames: string[],\n): number[] => {\n const columnWidths = columnNames.map((col) => {\n const maxWidth = Math.max(\n col.length, // Header size\n ...results.map((result) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n result[col] ? String(result[col]).length : 0,\n ),\n );\n return maxWidth + 2; // Add padding\n });\n return columnWidths;\n};\n\nlet shouldDisplayResultsAsTable = false;\n\nconst printResultsAsTable = (print?: boolean) =>\n (shouldDisplayResultsAsTable = print === undefined || print === true);\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst printOutput = (obj: any): string =>\n Array.isArray(obj) && shouldDisplayResultsAsTable\n ? displayResultsAsTable(obj)\n : prettyJson(obj);\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst displayResultsAsTable = (results: any[]): string => {\n if (results.length === 0) {\n return color.yellow('No documents found.');\n }\n\n const columnNames = results\n\n .flatMap((result) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-argument\n typeof result === 'object' ? Object.keys(result) : typeof result,\n )\n .filter((value, index, array) => array.indexOf(value) === index);\n\n const columnWidths = calculateColumnWidths(results, columnNames);\n\n const table = new Table({\n head: columnNames.map((col) => color.cyan(col)),\n colWidths: columnWidths,\n });\n\n results.forEach((result) => {\n table.push(\n columnNames.map((col) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n result[col] !== undefined\n ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n Array.isArray(result[col])\n ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n displayResultsAsTable(result[col])\n : // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n prettyJson(result[col])\n : typeof result === 'object'\n ? ''\n : result != undefined && result != undefined\n ? prettyJson(result)\n : '',\n ),\n );\n });\n\n return table.toString();\n};\n\nconst setLogLevel = (logLevel: string) => {\n process.env.DUMBO_LOG_LEVEL = logLevel;\n};\n\nconst setLogStyle = (logLevel: string) => {\n process.env.DUMBO_LOG_STYLE = logLevel;\n};\n\nconst prettifyLogs = (logLevel?: string) => {\n if (logLevel !== undefined) setLogLevel(logLevel);\n setLogStyle(LogStyle.PRETTY);\n};\n\nconst startRepl = async (options: {\n logging: {\n printOptions: boolean;\n logLevel: LogLevel;\n logStyle: LogStyle;\n };\n schema: {\n database: string;\n collections: string[];\n autoMigration: MigrationStyle;\n };\n connectionString: string | undefined;\n databaseDriver: string;\n}) => {\n // TODO: This will change when we have proper tracing and logging config\n // For now, that's enough\n setLogLevel(process.env.DUMBO_LOG_LEVEL ?? options.logging.logLevel);\n setLogStyle(process.env.DUMBO_LOG_STYLE ?? options.logging.logStyle);\n\n console.log(color.green('Starting Pongo Shell (version: 0.17.0-beta.8)'));\n\n if (options.logging.printOptions) {\n console.log(color.green('With Options:'));\n console.log(prettyJson(options));\n }\n\n const connectionString =\n options.connectionString ??\n process.env.DB_CONNECTION_STRING ??\n 'postgresql://postgres:postgres@localhost:5432/postgres';\n\n if (!(options.connectionString ?? process.env.DB_CONNECTION_STRING)) {\n console.log(\n color.yellow(\n `No connection string provided, using: 'postgresql://postgres:postgres@localhost:5432/postgres'`,\n ),\n );\n }\n\n const { databaseType } = parseConnectionString(connectionString);\n const driverType = `${databaseType}:${options.databaseDriver}` as const;\n\n const driver = pongoDatabaseDriverRegistry.tryGet(driverType);\n\n if (driver === null) {\n console.error(\n `Error: No database driver found for driver type \"${driverType}\". Make sure the driver is installed and the connection string is correct.`,\n );\n process.exit(1);\n }\n\n const connectionCheck = await checkConnection(connectionString);\n\n if (!connectionCheck.successful) {\n if (connectionCheck.errorType === 'ConnectionRefused') {\n console.error(\n color.red(\n `Connection was refused. Check if the PostgreSQL server is running and accessible.`,\n ),\n );\n } else if (connectionCheck.errorType === 'Authentication') {\n console.error(\n color.red(\n `Authentication failed. Check the username and password in the connection string.`,\n ),\n );\n } else {\n console.error(color.red('Error connecting to PostgreSQL server'));\n }\n console.log(color.red('Exiting Pongo Shell...'));\n process.exit();\n }\n\n console.log(color.green(`Successfully connected`));\n console.log(color.green('Use db.<collection>.<method>() to query.'));\n\n const shell = repl.start({\n prompt: color.green('pongo> '),\n useGlobal: true,\n breakEvalOnSigint: true,\n writer: printOutput,\n });\n\n let db: PongoDb;\n\n if (options.schema.collections.length > 0) {\n const collectionsSchema: Record<string, PongoCollectionSchema> = {};\n\n for (const collectionName of options.schema.collections) {\n collectionsSchema[collectionName] =\n pongoSchema.collection(collectionName);\n }\n\n const schema = pongoSchema.client({\n database: pongoSchema.db(options.schema.database, collectionsSchema),\n });\n\n const driverOptions: PongoClientOptions = {\n driver,\n schema: {\n definition: schema,\n autoMigration: options.schema.autoMigration,\n },\n };\n\n // TODO: Find a better way to pass custom driver settings\n const customOptions = {\n connectionString,\n };\n\n const typedClient = pongoClient({\n ...driverOptions,\n ...customOptions,\n });\n\n db = typedClient.database!;\n\n for (const collectionName of options.schema.collections) {\n shell.context[collectionName] = typedClient.database![collectionName];\n }\n\n pongo = typedClient;\n } else {\n const driverOptions: PongoClientOptions = {\n driver,\n schema: {\n autoMigration: options.schema.autoMigration,\n },\n };\n\n // TODO: Find a better way to pass custom driver settings\n const customOptions = {\n connectionString,\n };\n\n pongo = pongoClient({\n ...driverOptions,\n ...customOptions,\n });\n\n db = pongo.db(options.schema.database);\n }\n\n shell.context.pongo = pongo;\n shell.context.db = db;\n\n // helpers\n shell.context.SQL = SQL;\n shell.context.setLogLevel = setLogLevel;\n shell.context.setLogStyle = setLogStyle;\n shell.context.prettifyLogs = prettifyLogs;\n shell.context.printResultsAsTable = printResultsAsTable;\n shell.context.LogStyle = LogStyle;\n shell.context.LogLevel = LogLevel;\n\n // Intercept REPL output to display results as a table if they are arrays\n shell.on('exit', async () => {\n await teardown();\n process.exit();\n });\n\n shell.on('SIGINT', async () => {\n await teardown();\n process.exit();\n });\n};\n\nconst teardown = async () => {\n console.log(color.yellow('Exiting Pongo Shell...'));\n await pongo.close();\n};\n\nprocess.on('uncaughtException', teardown);\nprocess.on('SIGINT', teardown);\n\ninterface ShellOptions {\n database: string;\n collection: string[];\n databaseDriver: string;\n connectionString?: string;\n disableAutoMigrations: boolean;\n logStyle?: string;\n logLevel?: string;\n prettyLog?: boolean;\n printOptions?: boolean;\n}\n\nconst shellCommand = new Command('shell')\n .description('Start an interactive Pongo shell')\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n 'pg',\n )\n .option(\n '-cs, --connectionString <string>',\n 'Connection string for the database',\n )\n .option('-db, --database <string>', 'Database name to connect', 'postgres')\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option(\n '-no-migrations, --disable-auto-migrations',\n 'Disable automatic migrations',\n )\n .option('-o, --print-options', 'Print shell options')\n .option(\n '-ll, --log-level <logLevel>',\n 'Log level: DISABLED, INFO, LOG, WARN, ERROR',\n 'DISABLED',\n )\n .option('-ls, --log-style', 'Log style: RAW, PRETTY', 'RAW')\n .option('-p, --pretty-log', 'Turn on logging with prettified output')\n .action(async (options: ShellOptions) => {\n const { collection, database } = options;\n const connectionString = options.connectionString;\n\n await startRepl({\n logging: {\n printOptions: options.printOptions === true,\n logStyle: options.prettyLog\n ? LogStyle.PRETTY\n : ((options.logStyle as LogStyle | undefined) ?? LogStyle.RAW),\n logLevel: options.logLevel\n ? (options.logLevel as LogLevel)\n : options.prettyLog\n ? LogLevel.INFO\n : LogLevel.DISABLED,\n },\n schema: {\n collections: collection,\n database,\n autoMigration: options.disableAutoMigrations\n ? 'None'\n : 'CreateOrUpdate',\n },\n connectionString,\n databaseDriver: options.databaseDriver,\n });\n });\n\nexport { shellCommand };\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/cli.cjs","../src/cli.ts","../src/commandLine/configFile.ts","../src/commandLine/migrate.ts","../src/commandLine/shell.ts"],"names":[],"mappings":"AAAA;AACA;AACE;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACRA,sCAAwB;ADUxB;AACA;AEZA;AACA,gEAAe;AAQf,IAAM,eAAA,EAAiB,CAAC,KAAA,EAAA,GAA0B;AAChD,EAAA,GAAA,CAAI,KAAA,CAAM,OAAA,IAAW,CAAA,EAAG;AACtB,IAAA,OAAO,KAAA;AAAA,EACT;AAEA,EAAA,IAAI,UAAA,EAAY,KAAA,CAAM,MAAA,CAAO,CAAC,CAAA,CAAE,WAAA,CAAY,EAAA,EAAI,KAAA,CAAM,KAAA,CAAM,CAAC,CAAA;AAE7D,EAAA,GAAA,CAAI,SAAA,CAAU,QAAA,CAAS,GAAG,CAAA,EAAG;AAC3B,IAAA,UAAA,EAAY,SAAA,CAAU,KAAA,CAAM,CAAA,EAAG,CAAA,CAAE,CAAA;AAAA,EACnC;AAEA,EAAA,OAAO,SAAA;AACT,CAAA;AAEA,IAAM,aAAA,EAAe,CAAC,gBAAA,EAA4B,CAAC,OAAO,CAAA,EAAA,GAAM;AAC9D,EAAA,MAAM,MAAA,EAAQ,eAAA,CACX,GAAA;AAAA,IACC,CAAC,IAAA,EAAA,GACC,CAAA,YAAA,EAAe,cAAA,CAAe,IAAI,CAAC,CAAA,oDAAA;AAAA,EACvC,CAAA,CACC,IAAA,CAAK,IAAI,CAAA;AAEZ,EAAA,MAAM,YAAA,EAAc,eAAA,CACjB,GAAA;AAAA,IACC,CAAC,IAAA,EAAA,GACC,CAAA,MAAA,EAAS,IAAI,CAAA,yBAAA,EAA4B,cAAA,CAAe,IAAI,CAAC,CAAA,GAAA,EAAM,IAAI,CAAA,GAAA;AAAA,EAC3E,CAAA,CACC,IAAA,CAAK,IAAI,CAAA;AAEZ,EAAA,OAAO,CAAA;AAAA;AAAA,EAEP,KAAK,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAKL,WAAW,CAAA;AAAA;AAAA;AAAA,EAAA,CAAA;AAIb,CAAA;AAEA,IAAM,qBAAA,EAAuB,CAAA;AAAA;AAAA,EAAwD,YAAA,CAAa,CAAC,CAAA,CAAA;AAC7F;AAAgB;AAAuE;AAC1E;AAAA;AAAuF;AACpG;AAAmB;AAA+H;AAClJ;AAAqB;AAAsF;AAEpG;AAGL,EAAA;AACF,EAAA;AAEI,IAAA;AAIA,IAAA;AAEK,IAAA;AACD,MAAA;AACA,MAAA;AACV,IAAA;AAEO,IAAA;AACD,EAAA;AACE,IAAA;AACK,IAAA;AACf,EAAA;AACF;AAEa;AAIP,EAAA;AACC,IAAA;AACS,IAAA;AACL,EAAA;AACC,IAAA;AACA,IAAA;AACK,IAAA;AACf,EAAA;AACF;AAEa;AAGG,EAAA;AACL,IAAA;AACT,EAAA;AAEc,EAAA;AACL,IAAA;AACT,EAAA;AAEc,EAAA;AACL,IAAA;AACT,EAAA;AAEY,EAAA;AAEN,EAAA;AAED,EAAA;AACI,IAAA;AACT,EAAA;AAEe,EAAA;AACN,IAAA;AACT,EAAA;AAEM,EAAA;AAEF,EAAA;AACK,IAAA;AACT,EAAA;AAEO,EAAA;AACT;AAaa;AACX,EAAA;AACF;AAGG;AAGC,EAAA;AACA,EAAA;AACgB,EAAA;AAEP,IAAA;AACT,EAAA;AACC,EAAA;AAEF;AACC,EAAA;AACA,EAAA;AAEM;AAGA,EAAA;AAGA,EAAA;AACI,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEe,EAAA;AACD,IAAA;AACH,EAAA;AACI,IAAA;AACH,MAAA;AACN,QAAA;AACF,MAAA;AACQ,MAAA;AACV,IAAA;AAEA,IAAA;AACF,EAAA;AACD;AF9Cc;AACA;AG9IjB;AACE;AACA;AACA;AACA;AAEK;AACE;AA+BI;AACX,EAAA;AACF;AAGG;AAGC,EAAA;AACA,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AACgB,EAAA;AAEP,IAAA;AACT,EAAA;AACC,EAAA;AAEK;AAGE,EAAA;AACF,EAAA;AAGA,EAAA;AAIF,EAAA;AAEC,EAAA;AACK,IAAA;AACN,MAAA;AAEF,IAAA;AACa,IAAA;AACf,EAAA;AAEY,EAAA;AACJ,IAAA;AAEN,IAAA;AACS,EAAA;AACT,IAAA;AACK,EAAA;AACG,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEM,EAAA;AAEA,EAAA;AACJ,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACD,EAAA;AAEY,EAAA;AAEP,EAAA;AACJ,IAAA;AACD,EAAA;AACF;AAGA;AAGC,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AACgB,EAAA;AAEP,IAAA;AACT,EAAA;AACC,EAAA;AAEK;AAIE,EAAA;AAEJ,EAAA;AAEQ,EAAA;AACJ,IAAA;AAEN,IAAA;AACS,EAAA;AACT,IAAA;AACK,EAAA;AACG,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEM,EAAA;AAEA,EAAA;AACJ,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACD,EAAA;AAEW,EAAA;AACA,EAAA;AACb;AAEG;AACJ,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAMI;AACW,EAAA;AAEA,EAAA;AACL,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEM,EAAA;AAEA,EAAA;AAIM,IAAA;AACZ,EAAA;AAEM,EAAA;AACJ,IAAA;AACA,IAAA;AACF,EAAA;AAEW,EAAA;AAED,EAAA;AACZ;AHuDiB;AACA;AIlRjB;AACE;AACA;AACA;AACA;AACA;AACA;AAEK;AACE;AACF;AACE;AACQ;AAWb;AAEE;AAKE,EAAA;AACE,IAAA;AACA,MAAA;AACO,MAAA;AAAK,QAAA;AAAA;AAEP,UAAA;AAAoC,QAAA;AAC7C,MAAA;AACF,IAAA;AACO,IAAA;AACR,EAAA;AACM,EAAA;AACT;AAEI;AAEE;AAIA;AAMA;AACQ,EAAA;AACG,IAAA;AACf,EAAA;AAEM,EAAA;AAEM,IAAA;AAAA;AAED,MAAA;AAAmD,IAAA;AAEnD,EAAA;AAEL,EAAA;AAEQ,EAAA;AACN,IAAA;AACK,IAAA;AACZ,EAAA;AAEO,EAAA;AACA,IAAA;AACJ,MAAA;AAAiB,QAAA;AAAA;AAER,UAAA;AAAS;AAEZ,YAAA;AAAyB;AAEvB,cAAA;AAAiC,YAAA;AAAA;AAEjC,cAAA;AAAsB,YAAA;AACxB,UAAA;AAII,QAAA;AACV,MAAA;AACF,IAAA;AACD,EAAA;AAEY,EAAA;AACf;AAEM;AACQ,EAAA;AACd;AAEM;AACQ,EAAA;AACd;AAEM;AACA,EAAA;AACQ,EAAA;AACd;AAEkB;AAgBJ,EAAA;AACA,EAAA;AAEA,EAAA;AAEA,EAAA;AACE,IAAA;AACA,IAAA;AACd,EAAA;AAEM,EAAA;AAKQ,EAAA;AACJ,IAAA;AACA,MAAA;AACJ,QAAA;AACF,MAAA;AACF,IAAA;AACF,EAAA;AAEQ,EAAA;AACF,EAAA;AAES,EAAA;AAEA,EAAA;AACL,IAAA;AACN,MAAA;AACF,IAAA;AACa,IAAA;AACf,EAAA;AAEM,EAAA;AAED,EAAA;AACC,IAAA;AACM,MAAA;AACA,QAAA;AACJ,UAAA;AACF,QAAA;AACF,MAAA;AACS,IAAA;AACD,MAAA;AACA,QAAA;AACJ,UAAA;AACF,QAAA;AACF,MAAA;AACK,IAAA;AACG,MAAA;AACV,IAAA;AACY,IAAA;AACC,IAAA;AACf,EAAA;AAEY,EAAA;AACA,EAAA;AAEE,EAAA;AACJ,IAAA;AACG,IAAA;AACX,IAAA;AACQ,IAAA;AACT,EAAA;AAEG,EAAA;AAEQ,EAAA;AACJ,IAAA;AAEK,IAAA;AACT,MAAA;AAEF,IAAA;AAEM,IAAA;AACM,MAAA;AACX,IAAA;AAEK,IAAA;AACJ,MAAA;AACQ,MAAA;AACN,QAAA;AACA,QAAA;AACF,MAAA;AACF,IAAA;AAGM,IAAA;AACJ,MAAA;AACF,IAAA;AAEM,IAAA;AACD,MAAA;AACA,MAAA;AACJ,IAAA;AAEI,IAAA;AAEM,IAAA;AACH,MAAA;AACR,IAAA;AAEQ,IAAA;AACH,EAAA;AACC,IAAA;AACJ,MAAA;AACQ,MAAA;AACN,QAAA;AACF,MAAA;AACF,IAAA;AAGM,IAAA;AACJ,MAAA;AACF,IAAA;AAEQ,IAAA;AACH,MAAA;AACA,MAAA;AACJ,IAAA;AAEU,IAAA;AACb,EAAA;AAEc,EAAA;AACA,EAAA;AAGA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AACA,EAAA;AAGL,EAAA;AACD,IAAA;AACO,IAAA;AACd,EAAA;AAEQ,EAAA;AACD,IAAA;AACO,IAAA;AACd,EAAA;AACH;AAEiB;AACH,EAAA;AACA,EAAA;AACd;AAEW;AACA;AAcL;AAGF,EAAA;AACA,EAAA;AACA,EAAA;AAED;AACC,EAAA;AACA,EAAA;AAEM;AAEN,EAAA;AACA,EAAA;AACgB,EAAA;AAEP,IAAA;AACT,EAAA;AACC,EAAA;AAEF;AACC,EAAA;AACA,EAAA;AAEM;AAEN,EAAA;AACA,EAAA;AACA,EAAA;AAEM;AAGE,EAAA;AACF,EAAA;AAEA,EAAA;AACK,IAAA;AACP,MAAA;AACU,MAAA;AAGA,MAAA;AAKZ,IAAA;AACQ,IAAA;AACN,MAAA;AACA,MAAA;AACA,MAAA;AAGF,IAAA;AACA,IAAA;AACA,IAAA;AACD,EAAA;AACF;AJ2Jc;AACA;AC7fD;AAEH;AAEL;AACA;AACA;AAEM;AAEP;AD2fU;AACA;AACA","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/cli.cjs","sourcesContent":[null,"#!/usr/bin/env node\nimport { Command } from 'commander';\nimport { configCommand, migrateCommand, shellCommand } from './commandLine';\n\nconst program = new Command();\n\nprogram.name('pongo').description('CLI tool for Pongo');\n\nprogram.addCommand(configCommand);\nprogram.addCommand(migrateCommand);\nprogram.addCommand(shellCommand);\n\nprogram.parse(process.argv);\n\nexport default program;\n","import { Command } from 'commander';\nimport fs from 'node:fs';\nimport {\n objectEntries,\n toDbSchemaMetadata,\n type PongoDbSchemaMetadata,\n type PongoSchemaConfig,\n} from '../core';\n\nconst formatTypeName = (input: string): string => {\n if (input.length === 0) {\n return input;\n }\n\n let formatted = input.charAt(0).toUpperCase() + input.slice(1);\n\n if (formatted.endsWith('s')) {\n formatted = formatted.slice(0, -1);\n }\n\n return formatted;\n};\n\nconst sampleConfig = (collectionNames: string[] = ['users']) => {\n const types = collectionNames\n .map(\n (name) =>\n `export type ${formatTypeName(name)} = { name: string; description: string; date: Date }`,\n )\n .join('\\n');\n\n const collections = collectionNames\n .map(\n (name) =>\n ` ${name}: pongoSchema.collection<${formatTypeName(name)}>('${name}'),`,\n )\n .join('\\n');\n\n return `import { pongoSchema } from '@event-driven-io/pongo';\n\n${types}\n\nexport default {\n schema: pongoSchema.client({\n database: pongoSchema.db({\n${collections}\n }),\n }),\n};`;\n};\n\nconst missingDefaultExport = `Error: Config should contain default export, e.g.\\n\\n${sampleConfig()}`;\nconst missingSchema = `Error: Config should contain schema property, e.g.\\n\\n${sampleConfig()}`;\nconst missingDbs = `Error: Config should have at least a single database defined, e.g.\\n\\n${sampleConfig()}`;\nconst missingDefaultDb = `Error: Config should have a default database defined (without name or or with default database name), e.g.\\n\\n${sampleConfig()}`;\nconst missingCollections = `Error: Database should have defined at least one collection, e.g.\\n\\n${sampleConfig()}`;\n\nexport const loadConfigFile = async (\n configPath: string,\n): Promise<PongoDbSchemaMetadata> => {\n const configUrl = new URL(configPath, `file://${process.cwd()}/`);\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment\n const imported: Partial<{ default: PongoSchemaConfig }> = await import(\n configUrl.href\n );\n\n const parsed = parseDefaultDbSchema(imported);\n\n if (typeof parsed === 'string') {\n console.error(parsed);\n process.exit(1);\n }\n\n return parsed;\n } catch {\n console.error(`Error: Couldn't load file: ${configUrl.href}`);\n process.exit(1);\n }\n};\n\nexport const generateConfigFile = (\n configPath: string,\n collectionNames: string[],\n): void => {\n try {\n fs.writeFileSync(configPath, sampleConfig(collectionNames), 'utf8');\n console.log(`Configuration file stored at: ${configPath}`);\n } catch (error) {\n console.error(`Error: Couldn't store config file: ${configPath}!`);\n console.error(error);\n process.exit(1);\n }\n};\n\nexport const parseDefaultDbSchema = (\n imported: Partial<{ default: PongoSchemaConfig }>,\n): PongoDbSchemaMetadata | string => {\n if (!imported.default) {\n return missingDefaultExport;\n }\n\n if (!imported.default.schema) {\n return missingSchema;\n }\n\n if (!imported.default.schema.dbs) {\n return missingDbs;\n }\n\n const dbs = objectEntries(imported.default.schema.dbs).map((db) => db[1]);\n\n const defaultDb = dbs.find((db) => db.name === undefined);\n\n if (!defaultDb) {\n return missingDefaultDb;\n }\n\n if (!defaultDb.collections) {\n return missingCollections;\n }\n\n const collections = objectEntries(defaultDb.collections).map((col) => col[1]);\n\n if (collections.length === 0) {\n return missingCollections;\n }\n\n return toDbSchemaMetadata(defaultDb);\n};\n\ntype SampleConfigOptions =\n | {\n collection: string[];\n print?: boolean;\n }\n | {\n collection: string[];\n generate?: boolean;\n file?: string;\n };\n\nexport const configCommand = new Command('config').description(\n 'Manage Pongo configuration',\n);\n\nconfigCommand\n .command('sample')\n .description('Generate or print sample configuration')\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option(\n '-f, --file <path>',\n 'Path to configuration file with collection list',\n )\n .option('-g, --generate', 'Generate sample config file')\n .option('-p, --print', 'Print sample config file')\n .action((options: SampleConfigOptions) => {\n const collectionNames =\n options.collection.length > 0 ? options.collection : ['users'];\n\n if (!('print' in options) && !('generate' in options)) {\n console.error(\n 'Error: Please provide either:\\n--print param to print sample config or\\n--generate to generate sample config file',\n );\n process.exit(1);\n }\n\n if ('print' in options) {\n console.log(`${sampleConfig(collectionNames)}`);\n } else if ('generate' in options) {\n if (!options.file) {\n console.error(\n 'Error: You need to provide a config file through a --file',\n );\n process.exit(1);\n }\n\n generateConfigFile(options.file, collectionNames);\n }\n });\n","import {\n combineMigrations,\n dumbo,\n parseConnectionString,\n runSQLMigrations,\n type DatabaseDriverType,\n} from '@event-driven-io/dumbo';\nimport { Command } from 'commander';\nimport {\n pongoDatabaseDriverRegistry,\n pongoSchema,\n type AnyPongoDatabaseDriverOptions,\n type PongoCollectionSchema,\n type PongoDatabaseFactoryOptions,\n type PongoDocument,\n} from '../core';\nimport { loadConfigFile } from './configFile';\n\ninterface MigrateRunOptions {\n collection: string[];\n connectionString: string;\n databaseType?: string;\n databaseName?: string | undefined;\n databaseDriver: string;\n config?: string;\n dryRun?: boolean;\n}\n\ninterface MigrateSqlOptions {\n print?: boolean;\n write?: string;\n databaseType: string;\n databaseName?: string | undefined;\n databaseDriver: string;\n config?: string;\n collection: string[];\n}\n\nexport const migrateCommand = new Command('migrate').description(\n 'Manage database migrations',\n);\n\nmigrateCommand\n .command('run')\n .description('Run database migrations')\n .option(\n '-dbt, --database-type <string>',\n 'Database type that should be used for connection (e.g., PostgreSQL or SQLite)',\n undefined,\n )\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n )\n .option(\n '-dbn, --database-name <string>',\n 'Database name to connect to',\n undefined,\n )\n .option(\n '-cs, --connection-string <string>',\n 'Connection string for the database',\n )\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option('-f, --config <path>', 'Path to configuration file with Pongo config')\n .option('-dr, --dryRun', 'Perform dry run without commiting changes', false)\n .action(async (options: MigrateRunOptions) => {\n const { collection, dryRun, databaseName, databaseDriver } = options;\n const connectionString =\n options.connectionString ?? process.env.DB_CONNECTION_STRING;\n\n const databaseType =\n options.databaseType ??\n parseConnectionString(connectionString).databaseType;\n\n let collectionNames: string[];\n\n if (!connectionString) {\n console.error(\n 'Error: Connection string is required. Provide it either as a \"--connection-string\" parameter or through the DB_CONNECTION_STRING environment variable.' +\n '\\nFor instance: --connection-string postgresql://postgres:postgres@localhost:5432/postgres',\n );\n process.exit(1);\n }\n\n if (options.config) {\n const config = await loadConfigFile(options.config);\n\n collectionNames = config.collections.map((c) => c.name);\n } else if (collection) {\n collectionNames = collection;\n } else {\n console.error(\n 'Error: You need to provide at least one collection name. Provide it either through \"--config\" file or as a \"--collection\" parameter.',\n );\n process.exit(1);\n }\n\n const driverType = `${databaseType}:${databaseDriver}` as const;\n\n const migrations = getMigrations({\n driverType,\n connectionString,\n databaseName,\n collectionNames,\n });\n\n const pool = dumbo({ connectionString, driverType });\n\n await runSQLMigrations(pool, migrations, {\n dryRun,\n });\n });\n\nmigrateCommand\n .command('sql')\n .description('Generate SQL for database migration')\n .option(\n '-dbt, --database-type <string>',\n 'Database type that should be used for connection (e.g., PostgreSQL or SQLite)',\n )\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n )\n .option(\n '-dbn, --database-name <string>',\n 'Database name to connect to',\n undefined,\n )\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option('-f, --config <path>', 'Path to configuration file with Pongo config')\n .option('--print', 'Print the SQL to the console (default)', true)\n //.option('--write <filename>', 'Write the SQL to a specified file')\n .action(async (options: MigrateSqlOptions) => {\n const { collection, databaseName, databaseType, databaseDriver } = options;\n\n let collectionNames: string[];\n\n if (options.config) {\n const config = await loadConfigFile(options.config);\n\n collectionNames = config.collections.map((c) => c.name);\n } else if (collection) {\n collectionNames = collection;\n } else {\n console.error(\n 'Error: You need to provide at least one collection name. Provide it either through \"--config\" file or as a \"--collection\" parameter.',\n );\n process.exit(1);\n }\n\n const driverType = `${databaseType}:${databaseDriver}` as const;\n\n const migrations = getMigrations({\n driverType,\n connectionString: undefined,\n databaseName,\n collectionNames,\n });\n\n console.log('Printing SQL:');\n console.log(combineMigrations(...migrations));\n });\n\nconst getMigrations = ({\n driverType,\n connectionString,\n databaseName,\n collectionNames,\n}: {\n driverType: DatabaseDriverType;\n connectionString: string | undefined;\n databaseName: string | undefined;\n collectionNames: string[];\n}) => {\n const driver = pongoDatabaseDriverRegistry.tryGet(driverType);\n\n if (driver === null) {\n console.error(\n `Error: No database driver found for driver type \"${driverType}\". Make sure the driver is registered and the connection string is correct.`,\n );\n process.exit(1);\n }\n\n const dbDefinition = pongoSchema.db.from(databaseName, collectionNames);\n\n const driverOptions: PongoDatabaseFactoryOptions<\n Record<string, PongoCollectionSchema<PongoDocument>>,\n AnyPongoDatabaseDriverOptions\n > = {\n schema: { definition: dbDefinition },\n };\n\n const customOptions = {\n connectionString: connectionString ?? driver.defaultConnectionString,\n databaseName,\n };\n\n const db = driver.databaseFactory({ ...driverOptions, ...customOptions });\n\n return db.schema.component.migrations;\n};\n","import {\n color,\n LogLevel,\n LogStyle,\n parseConnectionString,\n prettyJson,\n SQL,\n type MigrationStyle,\n} from '@event-driven-io/dumbo';\nimport { checkConnection } from '@event-driven-io/dumbo/pg';\nimport Table from 'cli-table3';\nimport { Command } from 'commander';\nimport repl from 'node:repl';\nimport {\n pongoClient,\n pongoDatabaseDriverRegistry,\n pongoSchema,\n type PongoClient,\n type PongoClientOptions,\n type PongoCollectionSchema,\n type PongoDb,\n} from '../core';\n\nlet pongo: PongoClient;\n\nconst calculateColumnWidths = (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n results: any[],\n columnNames: string[],\n): number[] => {\n const columnWidths = columnNames.map((col) => {\n const maxWidth = Math.max(\n col.length, // Header size\n ...results.map((result) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n result[col] ? String(result[col]).length : 0,\n ),\n );\n return maxWidth + 2; // Add padding\n });\n return columnWidths;\n};\n\nlet shouldDisplayResultsAsTable = false;\n\nconst printResultsAsTable = (print?: boolean) =>\n (shouldDisplayResultsAsTable = print === undefined || print === true);\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst printOutput = (obj: any): string =>\n Array.isArray(obj) && shouldDisplayResultsAsTable\n ? displayResultsAsTable(obj)\n : prettyJson(obj);\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst displayResultsAsTable = (results: any[]): string => {\n if (results.length === 0) {\n return color.yellow('No documents found.');\n }\n\n const columnNames = results\n\n .flatMap((result) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-argument\n typeof result === 'object' ? Object.keys(result) : typeof result,\n )\n .filter((value, index, array) => array.indexOf(value) === index);\n\n const columnWidths = calculateColumnWidths(results, columnNames);\n\n const table = new Table({\n head: columnNames.map((col) => color.cyan(col)),\n colWidths: columnWidths,\n });\n\n results.forEach((result) => {\n table.push(\n columnNames.map((col) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n result[col] !== undefined\n ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n Array.isArray(result[col])\n ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n displayResultsAsTable(result[col])\n : // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n prettyJson(result[col])\n : typeof result === 'object'\n ? ''\n : result != undefined && result != undefined\n ? prettyJson(result)\n : '',\n ),\n );\n });\n\n return table.toString();\n};\n\nconst setLogLevel = (logLevel: string) => {\n process.env.DUMBO_LOG_LEVEL = logLevel;\n};\n\nconst setLogStyle = (logLevel: string) => {\n process.env.DUMBO_LOG_STYLE = logLevel;\n};\n\nconst prettifyLogs = (logLevel?: string) => {\n if (logLevel !== undefined) setLogLevel(logLevel);\n setLogStyle(LogStyle.PRETTY);\n};\n\nconst startRepl = async (options: {\n logging: {\n printOptions: boolean;\n logLevel: LogLevel;\n logStyle: LogStyle;\n };\n schema: {\n database: string;\n collections: string[];\n autoMigration: MigrationStyle;\n };\n connectionString: string | undefined;\n databaseDriver: string;\n}) => {\n // TODO: This will change when we have proper tracing and logging config\n // For now, that's enough\n setLogLevel(process.env.DUMBO_LOG_LEVEL ?? options.logging.logLevel);\n setLogStyle(process.env.DUMBO_LOG_STYLE ?? options.logging.logStyle);\n\n console.log(color.green('Starting Pongo Shell (version: 0.17.0-beta.9)'));\n\n if (options.logging.printOptions) {\n console.log(color.green('With Options:'));\n console.log(prettyJson(options));\n }\n\n const connectionString =\n options.connectionString ??\n process.env.DB_CONNECTION_STRING ??\n 'postgresql://postgres:postgres@localhost:5432/postgres';\n\n if (!(options.connectionString ?? process.env.DB_CONNECTION_STRING)) {\n console.log(\n color.yellow(\n `No connection string provided, using: 'postgresql://postgres:postgres@localhost:5432/postgres'`,\n ),\n );\n }\n\n const { databaseType } = parseConnectionString(connectionString);\n const driverType = `${databaseType}:${options.databaseDriver}` as const;\n\n const driver = pongoDatabaseDriverRegistry.tryGet(driverType);\n\n if (driver === null) {\n console.error(\n `Error: No database driver found for driver type \"${driverType}\". Make sure the driver is installed and the connection string is correct.`,\n );\n process.exit(1);\n }\n\n const connectionCheck = await checkConnection(connectionString);\n\n if (!connectionCheck.successful) {\n if (connectionCheck.errorType === 'ConnectionRefused') {\n console.error(\n color.red(\n `Connection was refused. Check if the PostgreSQL server is running and accessible.`,\n ),\n );\n } else if (connectionCheck.errorType === 'Authentication') {\n console.error(\n color.red(\n `Authentication failed. Check the username and password in the connection string.`,\n ),\n );\n } else {\n console.error(color.red('Error connecting to PostgreSQL server'));\n }\n console.log(color.red('Exiting Pongo Shell...'));\n process.exit();\n }\n\n console.log(color.green(`Successfully connected`));\n console.log(color.green('Use db.<collection>.<method>() to query.'));\n\n const shell = repl.start({\n prompt: color.green('pongo> '),\n useGlobal: true,\n breakEvalOnSigint: true,\n writer: printOutput,\n });\n\n let db: PongoDb;\n\n if (options.schema.collections.length > 0) {\n const collectionsSchema: Record<string, PongoCollectionSchema> = {};\n\n for (const collectionName of options.schema.collections) {\n collectionsSchema[collectionName] =\n pongoSchema.collection(collectionName);\n }\n\n const schema = pongoSchema.client({\n database: pongoSchema.db(options.schema.database, collectionsSchema),\n });\n\n const driverOptions: PongoClientOptions = {\n driver,\n schema: {\n definition: schema,\n autoMigration: options.schema.autoMigration,\n },\n };\n\n // TODO: Find a better way to pass custom driver settings\n const customOptions = {\n connectionString,\n };\n\n const typedClient = pongoClient({\n ...driverOptions,\n ...customOptions,\n });\n\n db = typedClient.database!;\n\n for (const collectionName of options.schema.collections) {\n shell.context[collectionName] = typedClient.database![collectionName];\n }\n\n pongo = typedClient;\n } else {\n const driverOptions: PongoClientOptions = {\n driver,\n schema: {\n autoMigration: options.schema.autoMigration,\n },\n };\n\n // TODO: Find a better way to pass custom driver settings\n const customOptions = {\n connectionString,\n };\n\n pongo = pongoClient({\n ...driverOptions,\n ...customOptions,\n });\n\n db = pongo.db(options.schema.database);\n }\n\n shell.context.pongo = pongo;\n shell.context.db = db;\n\n // helpers\n shell.context.SQL = SQL;\n shell.context.setLogLevel = setLogLevel;\n shell.context.setLogStyle = setLogStyle;\n shell.context.prettifyLogs = prettifyLogs;\n shell.context.printResultsAsTable = printResultsAsTable;\n shell.context.LogStyle = LogStyle;\n shell.context.LogLevel = LogLevel;\n\n // Intercept REPL output to display results as a table if they are arrays\n shell.on('exit', async () => {\n await teardown();\n process.exit();\n });\n\n shell.on('SIGINT', async () => {\n await teardown();\n process.exit();\n });\n};\n\nconst teardown = async () => {\n console.log(color.yellow('Exiting Pongo Shell...'));\n await pongo.close();\n};\n\nprocess.on('uncaughtException', teardown);\nprocess.on('SIGINT', teardown);\n\ninterface ShellOptions {\n database: string;\n collection: string[];\n databaseDriver: string;\n connectionString?: string;\n disableAutoMigrations: boolean;\n logStyle?: string;\n logLevel?: string;\n prettyLog?: boolean;\n printOptions?: boolean;\n}\n\nconst shellCommand = new Command('shell')\n .description('Start an interactive Pongo shell')\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n 'pg',\n )\n .option(\n '-cs, --connectionString <string>',\n 'Connection string for the database',\n )\n .option('-db, --database <string>', 'Database name to connect', 'postgres')\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option(\n '-no-migrations, --disable-auto-migrations',\n 'Disable automatic migrations',\n )\n .option('-o, --print-options', 'Print shell options')\n .option(\n '-ll, --log-level <logLevel>',\n 'Log level: DISABLED, INFO, LOG, WARN, ERROR',\n 'DISABLED',\n )\n .option('-ls, --log-style', 'Log style: RAW, PRETTY', 'RAW')\n .option('-p, --pretty-log', 'Turn on logging with prettified output')\n .action(async (options: ShellOptions) => {\n const { collection, database } = options;\n const connectionString = options.connectionString;\n\n await startRepl({\n logging: {\n printOptions: options.printOptions === true,\n logStyle: options.prettyLog\n ? LogStyle.PRETTY\n : ((options.logStyle as LogStyle | undefined) ?? LogStyle.RAW),\n logLevel: options.logLevel\n ? (options.logLevel as LogLevel)\n : options.prettyLog\n ? LogLevel.INFO\n : LogLevel.DISABLED,\n },\n schema: {\n collections: collection,\n database,\n autoMigration: options.disableAutoMigrations\n ? 'None'\n : 'CreateOrUpdate',\n },\n connectionString,\n databaseDriver: options.databaseDriver,\n });\n });\n\nexport { shellCommand };\n"]}
|
package/dist/cli.js
CHANGED
|
@@ -353,7 +353,7 @@ var prettifyLogs = (logLevel) => {
|
|
|
353
353
|
var startRepl = async (options) => {
|
|
354
354
|
setLogLevel(process.env.DUMBO_LOG_LEVEL ?? options.logging.logLevel);
|
|
355
355
|
setLogStyle(process.env.DUMBO_LOG_STYLE ?? options.logging.logStyle);
|
|
356
|
-
console.log(color.green("Starting Pongo Shell (version: 0.17.0-beta.
|
|
356
|
+
console.log(color.green("Starting Pongo Shell (version: 0.17.0-beta.9)"));
|
|
357
357
|
if (options.logging.printOptions) {
|
|
358
358
|
console.log(color.green("With Options:"));
|
|
359
359
|
console.log(prettyJson(options));
|
package/dist/cli.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/cli.ts","../src/commandLine/configFile.ts","../src/commandLine/migrate.ts","../src/commandLine/shell.ts"],"sourcesContent":["#!/usr/bin/env node\nimport { Command } from 'commander';\nimport { configCommand, migrateCommand, shellCommand } from './commandLine';\n\nconst program = new Command();\n\nprogram.name('pongo').description('CLI tool for Pongo');\n\nprogram.addCommand(configCommand);\nprogram.addCommand(migrateCommand);\nprogram.addCommand(shellCommand);\n\nprogram.parse(process.argv);\n\nexport default program;\n","import { Command } from 'commander';\nimport fs from 'node:fs';\nimport {\n objectEntries,\n toDbSchemaMetadata,\n type PongoDbSchemaMetadata,\n type PongoSchemaConfig,\n} from '../core';\n\nconst formatTypeName = (input: string): string => {\n if (input.length === 0) {\n return input;\n }\n\n let formatted = input.charAt(0).toUpperCase() + input.slice(1);\n\n if (formatted.endsWith('s')) {\n formatted = formatted.slice(0, -1);\n }\n\n return formatted;\n};\n\nconst sampleConfig = (collectionNames: string[] = ['users']) => {\n const types = collectionNames\n .map(\n (name) =>\n `export type ${formatTypeName(name)} = { name: string; description: string; date: Date }`,\n )\n .join('\\n');\n\n const collections = collectionNames\n .map(\n (name) =>\n ` ${name}: pongoSchema.collection<${formatTypeName(name)}>('${name}'),`,\n )\n .join('\\n');\n\n return `import { pongoSchema } from '@event-driven-io/pongo';\n\n${types}\n\nexport default {\n schema: pongoSchema.client({\n database: pongoSchema.db({\n${collections}\n }),\n }),\n};`;\n};\n\nconst missingDefaultExport = `Error: Config should contain default export, e.g.\\n\\n${sampleConfig()}`;\nconst missingSchema = `Error: Config should contain schema property, e.g.\\n\\n${sampleConfig()}`;\nconst missingDbs = `Error: Config should have at least a single database defined, e.g.\\n\\n${sampleConfig()}`;\nconst missingDefaultDb = `Error: Config should have a default database defined (without name or or with default database name), e.g.\\n\\n${sampleConfig()}`;\nconst missingCollections = `Error: Database should have defined at least one collection, e.g.\\n\\n${sampleConfig()}`;\n\nexport const loadConfigFile = async (\n configPath: string,\n): Promise<PongoDbSchemaMetadata> => {\n const configUrl = new URL(configPath, `file://${process.cwd()}/`);\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment\n const imported: Partial<{ default: PongoSchemaConfig }> = await import(\n configUrl.href\n );\n\n const parsed = parseDefaultDbSchema(imported);\n\n if (typeof parsed === 'string') {\n console.error(parsed);\n process.exit(1);\n }\n\n return parsed;\n } catch {\n console.error(`Error: Couldn't load file: ${configUrl.href}`);\n process.exit(1);\n }\n};\n\nexport const generateConfigFile = (\n configPath: string,\n collectionNames: string[],\n): void => {\n try {\n fs.writeFileSync(configPath, sampleConfig(collectionNames), 'utf8');\n console.log(`Configuration file stored at: ${configPath}`);\n } catch (error) {\n console.error(`Error: Couldn't store config file: ${configPath}!`);\n console.error(error);\n process.exit(1);\n }\n};\n\nexport const parseDefaultDbSchema = (\n imported: Partial<{ default: PongoSchemaConfig }>,\n): PongoDbSchemaMetadata | string => {\n if (!imported.default) {\n return missingDefaultExport;\n }\n\n if (!imported.default.schema) {\n return missingSchema;\n }\n\n if (!imported.default.schema.dbs) {\n return missingDbs;\n }\n\n const dbs = objectEntries(imported.default.schema.dbs).map((db) => db[1]);\n\n const defaultDb = dbs.find((db) => db.name === undefined);\n\n if (!defaultDb) {\n return missingDefaultDb;\n }\n\n if (!defaultDb.collections) {\n return missingCollections;\n }\n\n const collections = objectEntries(defaultDb.collections).map((col) => col[1]);\n\n if (collections.length === 0) {\n return missingCollections;\n }\n\n return toDbSchemaMetadata(defaultDb);\n};\n\ntype SampleConfigOptions =\n | {\n collection: string[];\n print?: boolean;\n }\n | {\n collection: string[];\n generate?: boolean;\n file?: string;\n };\n\nexport const configCommand = new Command('config').description(\n 'Manage Pongo configuration',\n);\n\nconfigCommand\n .command('sample')\n .description('Generate or print sample configuration')\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option(\n '-f, --file <path>',\n 'Path to configuration file with collection list',\n )\n .option('-g, --generate', 'Generate sample config file')\n .option('-p, --print', 'Print sample config file')\n .action((options: SampleConfigOptions) => {\n const collectionNames =\n options.collection.length > 0 ? options.collection : ['users'];\n\n if (!('print' in options) && !('generate' in options)) {\n console.error(\n 'Error: Please provide either:\\n--print param to print sample config or\\n--generate to generate sample config file',\n );\n process.exit(1);\n }\n\n if ('print' in options) {\n console.log(`${sampleConfig(collectionNames)}`);\n } else if ('generate' in options) {\n if (!options.file) {\n console.error(\n 'Error: You need to provide a config file through a --file',\n );\n process.exit(1);\n }\n\n generateConfigFile(options.file, collectionNames);\n }\n });\n","import {\n combineMigrations,\n dumbo,\n parseConnectionString,\n runSQLMigrations,\n type DatabaseDriverType,\n} from '@event-driven-io/dumbo';\nimport { Command } from 'commander';\nimport {\n pongoDatabaseDriverRegistry,\n pongoSchema,\n type AnyPongoDatabaseDriverOptions,\n type PongoCollectionSchema,\n type PongoDatabaseFactoryOptions,\n type PongoDocument,\n} from '../core';\nimport { loadConfigFile } from './configFile';\n\ninterface MigrateRunOptions {\n collection: string[];\n connectionString: string;\n databaseType?: string;\n databaseName?: string | undefined;\n databaseDriver: string;\n config?: string;\n dryRun?: boolean;\n}\n\ninterface MigrateSqlOptions {\n print?: boolean;\n write?: string;\n databaseType: string;\n databaseName?: string | undefined;\n databaseDriver: string;\n config?: string;\n collection: string[];\n}\n\nexport const migrateCommand = new Command('migrate').description(\n 'Manage database migrations',\n);\n\nmigrateCommand\n .command('run')\n .description('Run database migrations')\n .option(\n '-dbt, --database-type <string>',\n 'Database type that should be used for connection (e.g., PostgreSQL or SQLite)',\n undefined,\n )\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n )\n .option(\n '-dbn, --database-name <string>',\n 'Database name to connect to',\n undefined,\n )\n .option(\n '-cs, --connection-string <string>',\n 'Connection string for the database',\n )\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option('-f, --config <path>', 'Path to configuration file with Pongo config')\n .option('-dr, --dryRun', 'Perform dry run without commiting changes', false)\n .action(async (options: MigrateRunOptions) => {\n const { collection, dryRun, databaseName, databaseDriver } = options;\n const connectionString =\n options.connectionString ?? process.env.DB_CONNECTION_STRING;\n\n const databaseType =\n options.databaseType ??\n parseConnectionString(connectionString).databaseType;\n\n let collectionNames: string[];\n\n if (!connectionString) {\n console.error(\n 'Error: Connection string is required. Provide it either as a \"--connection-string\" parameter or through the DB_CONNECTION_STRING environment variable.' +\n '\\nFor instance: --connection-string postgresql://postgres:postgres@localhost:5432/postgres',\n );\n process.exit(1);\n }\n\n if (options.config) {\n const config = await loadConfigFile(options.config);\n\n collectionNames = config.collections.map((c) => c.name);\n } else if (collection) {\n collectionNames = collection;\n } else {\n console.error(\n 'Error: You need to provide at least one collection name. Provide it either through \"--config\" file or as a \"--collection\" parameter.',\n );\n process.exit(1);\n }\n\n const driverType = `${databaseType}:${databaseDriver}` as const;\n\n const migrations = getMigrations({\n driverType,\n connectionString,\n databaseName,\n collectionNames,\n });\n\n const pool = dumbo({ connectionString, driverType });\n\n await runSQLMigrations(pool, migrations, {\n dryRun,\n });\n });\n\nmigrateCommand\n .command('sql')\n .description('Generate SQL for database migration')\n .option(\n '-dbt, --database-type <string>',\n 'Database type that should be used for connection (e.g., PostgreSQL or SQLite)',\n )\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n )\n .option(\n '-dbn, --database-name <string>',\n 'Database name to connect to',\n undefined,\n )\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option('-f, --config <path>', 'Path to configuration file with Pongo config')\n .option('--print', 'Print the SQL to the console (default)', true)\n //.option('--write <filename>', 'Write the SQL to a specified file')\n .action(async (options: MigrateSqlOptions) => {\n const { collection, databaseName, databaseType, databaseDriver } = options;\n\n let collectionNames: string[];\n\n if (options.config) {\n const config = await loadConfigFile(options.config);\n\n collectionNames = config.collections.map((c) => c.name);\n } else if (collection) {\n collectionNames = collection;\n } else {\n console.error(\n 'Error: You need to provide at least one collection name. Provide it either through \"--config\" file or as a \"--collection\" parameter.',\n );\n process.exit(1);\n }\n\n const driverType = `${databaseType}:${databaseDriver}` as const;\n\n const migrations = getMigrations({\n driverType,\n connectionString: undefined,\n databaseName,\n collectionNames,\n });\n\n console.log('Printing SQL:');\n console.log(combineMigrations(...migrations));\n });\n\nconst getMigrations = ({\n driverType,\n connectionString,\n databaseName,\n collectionNames,\n}: {\n driverType: DatabaseDriverType;\n connectionString: string | undefined;\n databaseName: string | undefined;\n collectionNames: string[];\n}) => {\n const driver = pongoDatabaseDriverRegistry.tryGet(driverType);\n\n if (driver === null) {\n console.error(\n `Error: No database driver found for driver type \"${driverType}\". Make sure the driver is registered and the connection string is correct.`,\n );\n process.exit(1);\n }\n\n const dbDefinition = pongoSchema.db.from(databaseName, collectionNames);\n\n const driverOptions: PongoDatabaseFactoryOptions<\n Record<string, PongoCollectionSchema<PongoDocument>>,\n AnyPongoDatabaseDriverOptions\n > = {\n schema: { definition: dbDefinition },\n };\n\n const customOptions = {\n connectionString: connectionString ?? driver.defaultConnectionString,\n databaseName,\n };\n\n const db = driver.databaseFactory({ ...driverOptions, ...customOptions });\n\n return db.schema.component.migrations;\n};\n","import {\n color,\n LogLevel,\n LogStyle,\n parseConnectionString,\n prettyJson,\n SQL,\n type MigrationStyle,\n} from '@event-driven-io/dumbo';\nimport { checkConnection } from '@event-driven-io/dumbo/pg';\nimport Table from 'cli-table3';\nimport { Command } from 'commander';\nimport repl from 'node:repl';\nimport {\n pongoClient,\n pongoDatabaseDriverRegistry,\n pongoSchema,\n type PongoClient,\n type PongoClientOptions,\n type PongoCollectionSchema,\n type PongoDb,\n} from '../core';\n\nlet pongo: PongoClient;\n\nconst calculateColumnWidths = (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n results: any[],\n columnNames: string[],\n): number[] => {\n const columnWidths = columnNames.map((col) => {\n const maxWidth = Math.max(\n col.length, // Header size\n ...results.map((result) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n result[col] ? String(result[col]).length : 0,\n ),\n );\n return maxWidth + 2; // Add padding\n });\n return columnWidths;\n};\n\nlet shouldDisplayResultsAsTable = false;\n\nconst printResultsAsTable = (print?: boolean) =>\n (shouldDisplayResultsAsTable = print === undefined || print === true);\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst printOutput = (obj: any): string =>\n Array.isArray(obj) && shouldDisplayResultsAsTable\n ? displayResultsAsTable(obj)\n : prettyJson(obj);\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst displayResultsAsTable = (results: any[]): string => {\n if (results.length === 0) {\n return color.yellow('No documents found.');\n }\n\n const columnNames = results\n\n .flatMap((result) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-argument\n typeof result === 'object' ? Object.keys(result) : typeof result,\n )\n .filter((value, index, array) => array.indexOf(value) === index);\n\n const columnWidths = calculateColumnWidths(results, columnNames);\n\n const table = new Table({\n head: columnNames.map((col) => color.cyan(col)),\n colWidths: columnWidths,\n });\n\n results.forEach((result) => {\n table.push(\n columnNames.map((col) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n result[col] !== undefined\n ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n Array.isArray(result[col])\n ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n displayResultsAsTable(result[col])\n : // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n prettyJson(result[col])\n : typeof result === 'object'\n ? ''\n : result != undefined && result != undefined\n ? prettyJson(result)\n : '',\n ),\n );\n });\n\n return table.toString();\n};\n\nconst setLogLevel = (logLevel: string) => {\n process.env.DUMBO_LOG_LEVEL = logLevel;\n};\n\nconst setLogStyle = (logLevel: string) => {\n process.env.DUMBO_LOG_STYLE = logLevel;\n};\n\nconst prettifyLogs = (logLevel?: string) => {\n if (logLevel !== undefined) setLogLevel(logLevel);\n setLogStyle(LogStyle.PRETTY);\n};\n\nconst startRepl = async (options: {\n logging: {\n printOptions: boolean;\n logLevel: LogLevel;\n logStyle: LogStyle;\n };\n schema: {\n database: string;\n collections: string[];\n autoMigration: MigrationStyle;\n };\n connectionString: string | undefined;\n databaseDriver: string;\n}) => {\n // TODO: This will change when we have proper tracing and logging config\n // For now, that's enough\n setLogLevel(process.env.DUMBO_LOG_LEVEL ?? options.logging.logLevel);\n setLogStyle(process.env.DUMBO_LOG_STYLE ?? options.logging.logStyle);\n\n console.log(color.green('Starting Pongo Shell (version: 0.17.0-beta.8)'));\n\n if (options.logging.printOptions) {\n console.log(color.green('With Options:'));\n console.log(prettyJson(options));\n }\n\n const connectionString =\n options.connectionString ??\n process.env.DB_CONNECTION_STRING ??\n 'postgresql://postgres:postgres@localhost:5432/postgres';\n\n if (!(options.connectionString ?? process.env.DB_CONNECTION_STRING)) {\n console.log(\n color.yellow(\n `No connection string provided, using: 'postgresql://postgres:postgres@localhost:5432/postgres'`,\n ),\n );\n }\n\n const { databaseType } = parseConnectionString(connectionString);\n const driverType = `${databaseType}:${options.databaseDriver}` as const;\n\n const driver = pongoDatabaseDriverRegistry.tryGet(driverType);\n\n if (driver === null) {\n console.error(\n `Error: No database driver found for driver type \"${driverType}\". Make sure the driver is installed and the connection string is correct.`,\n );\n process.exit(1);\n }\n\n const connectionCheck = await checkConnection(connectionString);\n\n if (!connectionCheck.successful) {\n if (connectionCheck.errorType === 'ConnectionRefused') {\n console.error(\n color.red(\n `Connection was refused. Check if the PostgreSQL server is running and accessible.`,\n ),\n );\n } else if (connectionCheck.errorType === 'Authentication') {\n console.error(\n color.red(\n `Authentication failed. Check the username and password in the connection string.`,\n ),\n );\n } else {\n console.error(color.red('Error connecting to PostgreSQL server'));\n }\n console.log(color.red('Exiting Pongo Shell...'));\n process.exit();\n }\n\n console.log(color.green(`Successfully connected`));\n console.log(color.green('Use db.<collection>.<method>() to query.'));\n\n const shell = repl.start({\n prompt: color.green('pongo> '),\n useGlobal: true,\n breakEvalOnSigint: true,\n writer: printOutput,\n });\n\n let db: PongoDb;\n\n if (options.schema.collections.length > 0) {\n const collectionsSchema: Record<string, PongoCollectionSchema> = {};\n\n for (const collectionName of options.schema.collections) {\n collectionsSchema[collectionName] =\n pongoSchema.collection(collectionName);\n }\n\n const schema = pongoSchema.client({\n database: pongoSchema.db(options.schema.database, collectionsSchema),\n });\n\n const driverOptions: PongoClientOptions = {\n driver,\n schema: {\n definition: schema,\n autoMigration: options.schema.autoMigration,\n },\n };\n\n // TODO: Find a better way to pass custom driver settings\n const customOptions = {\n connectionString,\n };\n\n const typedClient = pongoClient({\n ...driverOptions,\n ...customOptions,\n });\n\n db = typedClient.database!;\n\n for (const collectionName of options.schema.collections) {\n shell.context[collectionName] = typedClient.database![collectionName];\n }\n\n pongo = typedClient;\n } else {\n const driverOptions: PongoClientOptions = {\n driver,\n schema: {\n autoMigration: options.schema.autoMigration,\n },\n };\n\n // TODO: Find a better way to pass custom driver settings\n const customOptions = {\n connectionString,\n };\n\n pongo = pongoClient({\n ...driverOptions,\n ...customOptions,\n });\n\n db = pongo.db(options.schema.database);\n }\n\n shell.context.pongo = pongo;\n shell.context.db = db;\n\n // helpers\n shell.context.SQL = SQL;\n shell.context.setLogLevel = setLogLevel;\n shell.context.setLogStyle = setLogStyle;\n shell.context.prettifyLogs = prettifyLogs;\n shell.context.printResultsAsTable = printResultsAsTable;\n shell.context.LogStyle = LogStyle;\n shell.context.LogLevel = LogLevel;\n\n // Intercept REPL output to display results as a table if they are arrays\n shell.on('exit', async () => {\n await teardown();\n process.exit();\n });\n\n shell.on('SIGINT', async () => {\n await teardown();\n process.exit();\n });\n};\n\nconst teardown = async () => {\n console.log(color.yellow('Exiting Pongo Shell...'));\n await pongo.close();\n};\n\nprocess.on('uncaughtException', teardown);\nprocess.on('SIGINT', teardown);\n\ninterface ShellOptions {\n database: string;\n collection: string[];\n databaseDriver: string;\n connectionString?: string;\n disableAutoMigrations: boolean;\n logStyle?: string;\n logLevel?: string;\n prettyLog?: boolean;\n printOptions?: boolean;\n}\n\nconst shellCommand = new Command('shell')\n .description('Start an interactive Pongo shell')\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n 'pg',\n )\n .option(\n '-cs, --connectionString <string>',\n 'Connection string for the database',\n )\n .option('-db, --database <string>', 'Database name to connect', 'postgres')\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option(\n '-no-migrations, --disable-auto-migrations',\n 'Disable automatic migrations',\n )\n .option('-o, --print-options', 'Print shell options')\n .option(\n '-ll, --log-level <logLevel>',\n 'Log level: DISABLED, INFO, LOG, WARN, ERROR',\n 'DISABLED',\n )\n .option('-ls, --log-style', 'Log style: RAW, PRETTY', 'RAW')\n .option('-p, --pretty-log', 'Turn on logging with prettified output')\n .action(async (options: ShellOptions) => {\n const { collection, database } = options;\n const connectionString = options.connectionString;\n\n await startRepl({\n logging: {\n printOptions: options.printOptions === true,\n logStyle: options.prettyLog\n ? LogStyle.PRETTY\n : ((options.logStyle as LogStyle | undefined) ?? LogStyle.RAW),\n logLevel: options.logLevel\n ? (options.logLevel as LogLevel)\n : options.prettyLog\n ? LogLevel.INFO\n : LogLevel.DISABLED,\n },\n schema: {\n collections: collection,\n database,\n autoMigration: options.disableAutoMigrations\n ? 'None'\n : 'CreateOrUpdate',\n },\n connectionString,\n databaseDriver: options.databaseDriver,\n });\n });\n\nexport { shellCommand };\n"],"mappings":";;;;;;;;;;AACA,SAAS,WAAAA,gBAAe;;;ACDxB,SAAS,eAAe;AACxB,OAAO,QAAQ;AAQf,IAAM,iBAAiB,CAAC,UAA0B;AAChD,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO;AAAA,EACT;AAEA,MAAI,YAAY,MAAM,OAAO,CAAC,EAAE,YAAY,IAAI,MAAM,MAAM,CAAC;AAE7D,MAAI,UAAU,SAAS,GAAG,GAAG;AAC3B,gBAAY,UAAU,MAAM,GAAG,EAAE;AAAA,EACnC;AAEA,SAAO;AACT;AAEA,IAAM,eAAe,CAAC,kBAA4B,CAAC,OAAO,MAAM;AAC9D,QAAM,QAAQ,gBACX;AAAA,IACC,CAAC,SACC,eAAe,eAAe,IAAI,CAAC;AAAA,EACvC,EACC,KAAK,IAAI;AAEZ,QAAM,cAAc,gBACjB;AAAA,IACC,CAAC,SACC,SAAS,IAAI,4BAA4B,eAAe,IAAI,CAAC,MAAM,IAAI;AAAA,EAC3E,EACC,KAAK,IAAI;AAEZ,SAAO;AAAA;AAAA,EAEP,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,EAKL,WAAW;AAAA;AAAA;AAAA;AAIb;AAEA,IAAM,uBAAuB;AAAA;AAAA,EAAwD,aAAa,CAAC;AACnG,IAAM,gBAAgB;AAAA;AAAA,EAAyD,aAAa,CAAC;AAC7F,IAAM,aAAa;AAAA;AAAA,EAAyE,aAAa,CAAC;AAC1G,IAAM,mBAAmB;AAAA;AAAA,EAAiH,aAAa,CAAC;AACxJ,IAAM,qBAAqB;AAAA;AAAA,EAAwE,aAAa,CAAC;AAE1G,IAAM,iBAAiB,OAC5B,eACmC;AACnC,QAAM,YAAY,IAAI,IAAI,YAAY,UAAU,QAAQ,IAAI,CAAC,GAAG;AAChE,MAAI;AAEF,UAAM,WAAoD,MAAM,OAC9D,UAAU;AAGZ,UAAM,SAAS,qBAAqB,QAAQ;AAE5C,QAAI,OAAO,WAAW,UAAU;AAC9B,cAAQ,MAAM,MAAM;AACpB,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,EACT,QAAQ;AACN,YAAQ,MAAM,8BAA8B,UAAU,IAAI,EAAE;AAC5D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEO,IAAM,qBAAqB,CAChC,YACA,oBACS;AACT,MAAI;AACF,OAAG,cAAc,YAAY,aAAa,eAAe,GAAG,MAAM;AAClE,YAAQ,IAAI,iCAAiC,UAAU,EAAE;AAAA,EAC3D,SAAS,OAAO;AACd,YAAQ,MAAM,sCAAsC,UAAU,GAAG;AACjE,YAAQ,MAAM,KAAK;AACnB,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEO,IAAM,uBAAuB,CAClC,aACmC;AACnC,MAAI,CAAC,SAAS,SAAS;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,SAAS,QAAQ,QAAQ;AAC5B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,SAAS,QAAQ,OAAO,KAAK;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,MAAM,cAAc,SAAS,QAAQ,OAAO,GAAG,EAAE,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC;AAExE,QAAM,YAAY,IAAI,KAAK,CAAC,OAAO,GAAG,SAAS,MAAS;AAExD,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,UAAU,aAAa;AAC1B,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,cAAc,UAAU,WAAW,EAAE,IAAI,CAAC,QAAQ,IAAI,CAAC,CAAC;AAE5E,MAAI,YAAY,WAAW,GAAG;AAC5B,WAAO;AAAA,EACT;AAEA,SAAO,mBAAmB,SAAS;AACrC;AAaO,IAAM,gBAAgB,IAAI,QAAQ,QAAQ,EAAE;AAAA,EACjD;AACF;AAEA,cACG,QAAQ,QAAQ,EAChB,YAAY,wCAAwC,EACpD;AAAA,EACC;AAAA,EACA;AAAA,EACA,CAAC,OAAe,aAAuB;AAErC,WAAO,SAAS,OAAO,CAAC,KAAK,CAAC;AAAA,EAChC;AAAA,EACA,CAAC;AACH,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,kBAAkB,6BAA6B,EACtD,OAAO,eAAe,0BAA0B,EAChD,OAAO,CAAC,YAAiC;AACxC,QAAM,kBACJ,QAAQ,WAAW,SAAS,IAAI,QAAQ,aAAa,CAAC,OAAO;AAE/D,MAAI,EAAE,WAAW,YAAY,EAAE,cAAc,UAAU;AACrD,YAAQ;AAAA,MACN;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI,WAAW,SAAS;AACtB,YAAQ,IAAI,GAAG,aAAa,eAAe,CAAC,EAAE;AAAA,EAChD,WAAW,cAAc,SAAS;AAChC,QAAI,CAAC,QAAQ,MAAM;AACjB,cAAQ;AAAA,QACN;AAAA,MACF;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,uBAAmB,QAAQ,MAAM,eAAe;AAAA,EAClD;AACF,CAAC;;;AC3LH;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,WAAAC,gBAAe;AA+BjB,IAAM,iBAAiB,IAAIC,SAAQ,SAAS,EAAE;AAAA,EACnD;AACF;AAEA,eACG,QAAQ,KAAK,EACb,YAAY,yBAAyB,EACrC;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AAAA,EACA,CAAC,OAAe,aAAuB;AAErC,WAAO,SAAS,OAAO,CAAC,KAAK,CAAC;AAAA,EAChC;AAAA,EACA,CAAC;AACH,EACC,OAAO,uBAAuB,8CAA8C,EAC5E,OAAO,iBAAiB,6CAA6C,KAAK,EAC1E,OAAO,OAAO,YAA+B;AAC5C,QAAM,EAAE,YAAY,QAAQ,cAAc,eAAe,IAAI;AAC7D,QAAM,mBACJ,QAAQ,oBAAoB,QAAQ,IAAI;AAE1C,QAAM,eACJ,QAAQ,gBACR,sBAAsB,gBAAgB,EAAE;AAE1C,MAAI;AAEJ,MAAI,CAAC,kBAAkB;AACrB,YAAQ;AAAA,MACN;AAAA,IAEF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI,QAAQ,QAAQ;AAClB,UAAM,SAAS,MAAM,eAAe,QAAQ,MAAM;AAElD,sBAAkB,OAAO,YAAY,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EACxD,WAAW,YAAY;AACrB,sBAAkB;AAAA,EACpB,OAAO;AACL,YAAQ;AAAA,MACN;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,aAAa,GAAG,YAAY,IAAI,cAAc;AAEpD,QAAM,aAAa,cAAc;AAAA,IAC/B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,MAAM,EAAE,kBAAkB,WAAW,CAAC;AAEnD,QAAM,iBAAiB,MAAM,YAAY;AAAA,IACvC;AAAA,EACF,CAAC;AACH,CAAC;AAEH,eACG,QAAQ,KAAK,EACb,YAAY,qCAAqC,EACjD;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AAAA,EACA,CAAC,OAAe,aAAuB;AAErC,WAAO,SAAS,OAAO,CAAC,KAAK,CAAC;AAAA,EAChC;AAAA,EACA,CAAC;AACH,EACC,OAAO,uBAAuB,8CAA8C,EAC5E,OAAO,WAAW,0CAA0C,IAAI,EAEhE,OAAO,OAAO,YAA+B;AAC5C,QAAM,EAAE,YAAY,cAAc,cAAc,eAAe,IAAI;AAEnE,MAAI;AAEJ,MAAI,QAAQ,QAAQ;AAClB,UAAM,SAAS,MAAM,eAAe,QAAQ,MAAM;AAElD,sBAAkB,OAAO,YAAY,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EACxD,WAAW,YAAY;AACrB,sBAAkB;AAAA,EACpB,OAAO;AACL,YAAQ;AAAA,MACN;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,aAAa,GAAG,YAAY,IAAI,cAAc;AAEpD,QAAM,aAAa,cAAc;AAAA,IAC/B;AAAA,IACA,kBAAkB;AAAA,IAClB;AAAA,IACA;AAAA,EACF,CAAC;AAED,UAAQ,IAAI,eAAe;AAC3B,UAAQ,IAAI,kBAAkB,GAAG,UAAU,CAAC;AAC9C,CAAC;AAEH,IAAM,gBAAgB,CAAC;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAKM;AACJ,QAAM,SAAS,4BAA4B,OAAO,UAAU;AAE5D,MAAI,WAAW,MAAM;AACnB,YAAQ;AAAA,MACN,oDAAoD,UAAU;AAAA,IAChE;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,eAAe,YAAY,GAAG,KAAK,cAAc,eAAe;AAEtE,QAAM,gBAGF;AAAA,IACF,QAAQ,EAAE,YAAY,aAAa;AAAA,EACrC;AAEA,QAAM,gBAAgB;AAAA,IACpB,kBAAkB,oBAAoB,OAAO;AAAA,IAC7C;AAAA,EACF;AAEA,QAAM,KAAK,OAAO,gBAAgB,EAAE,GAAG,eAAe,GAAG,cAAc,CAAC;AAExE,SAAO,GAAG,OAAO,UAAU;AAC7B;;;AC1NA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA,yBAAAC;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,uBAAuB;AAChC,OAAO,WAAW;AAClB,SAAS,WAAAC,gBAAe;AACxB,OAAO,UAAU;AAWjB,IAAI;AAEJ,IAAM,wBAAwB,CAE5B,SACA,gBACa;AACb,QAAM,eAAe,YAAY,IAAI,CAAC,QAAQ;AAC5C,UAAM,WAAW,KAAK;AAAA,MACpB,IAAI;AAAA,MACJ,GAAG,QAAQ;AAAA,QAAI,CAAC;AAAA;AAAA,UAEd,OAAO,GAAG,IAAI,OAAO,OAAO,GAAG,CAAC,EAAE,SAAS;AAAA;AAAA,MAC7C;AAAA,IACF;AACA,WAAO,WAAW;AAAA,EACpB,CAAC;AACD,SAAO;AACT;AAEA,IAAI,8BAA8B;AAElC,IAAM,sBAAsB,CAAC,UAC1B,8BAA8B,UAAU,UAAa,UAAU;AAGlE,IAAM,cAAc,CAAC,QACnB,MAAM,QAAQ,GAAG,KAAK,8BAClB,sBAAsB,GAAG,IACzB,WAAW,GAAG;AAGpB,IAAM,wBAAwB,CAAC,YAA2B;AACxD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,MAAM,OAAO,qBAAqB;AAAA,EAC3C;AAEA,QAAM,cAAc,QAEjB;AAAA,IAAQ,CAAC;AAAA;AAAA,MAER,OAAO,WAAW,WAAW,OAAO,KAAK,MAAM,IAAI,OAAO;AAAA;AAAA,EAC5D,EACC,OAAO,CAAC,OAAO,OAAO,UAAU,MAAM,QAAQ,KAAK,MAAM,KAAK;AAEjE,QAAM,eAAe,sBAAsB,SAAS,WAAW;AAE/D,QAAM,QAAQ,IAAI,MAAM;AAAA,IACtB,MAAM,YAAY,IAAI,CAAC,QAAQ,MAAM,KAAK,GAAG,CAAC;AAAA,IAC9C,WAAW;AAAA,EACb,CAAC;AAED,UAAQ,QAAQ,CAAC,WAAW;AAC1B,UAAM;AAAA,MACJ,YAAY;AAAA,QAAI,CAAC;AAAA;AAAA,UAEf,OAAO,GAAG,MAAM;AAAA;AAAA,YAEZ,MAAM,QAAQ,OAAO,GAAG,CAAC;AAAA;AAAA,cAEvB,sBAAsB,OAAO,GAAG,CAAC;AAAA;AAAA;AAAA,cAEjC,WAAW,OAAO,GAAG,CAAC;AAAA;AAAA,cACxB,OAAO,WAAW,WAChB,KACA,UAAU,UAAa,UAAU,SAC/B,WAAW,MAAM,IACjB;AAAA;AAAA,MACV;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM,SAAS;AACxB;AAEA,IAAM,cAAc,CAAC,aAAqB;AACxC,UAAQ,IAAI,kBAAkB;AAChC;AAEA,IAAM,cAAc,CAAC,aAAqB;AACxC,UAAQ,IAAI,kBAAkB;AAChC;AAEA,IAAM,eAAe,CAAC,aAAsB;AAC1C,MAAI,aAAa,OAAW,aAAY,QAAQ;AAChD,cAAY,SAAS,MAAM;AAC7B;AAEA,IAAM,YAAY,OAAO,YAanB;AAGJ,cAAY,QAAQ,IAAI,mBAAmB,QAAQ,QAAQ,QAAQ;AACnE,cAAY,QAAQ,IAAI,mBAAmB,QAAQ,QAAQ,QAAQ;AAEnE,UAAQ,IAAI,MAAM,MAAM,+CAA+C,CAAC;AAExE,MAAI,QAAQ,QAAQ,cAAc;AAChC,YAAQ,IAAI,MAAM,MAAM,eAAe,CAAC;AACxC,YAAQ,IAAI,WAAW,OAAO,CAAC;AAAA,EACjC;AAEA,QAAM,mBACJ,QAAQ,oBACR,QAAQ,IAAI,wBACZ;AAEF,MAAI,EAAE,QAAQ,oBAAoB,QAAQ,IAAI,uBAAuB;AACnE,YAAQ;AAAA,MACN,MAAM;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,EAAE,aAAa,IAAIC,uBAAsB,gBAAgB;AAC/D,QAAM,aAAa,GAAG,YAAY,IAAI,QAAQ,cAAc;AAE5D,QAAM,SAAS,4BAA4B,OAAO,UAAU;AAE5D,MAAI,WAAW,MAAM;AACnB,YAAQ;AAAA,MACN,oDAAoD,UAAU;AAAA,IAChE;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,kBAAkB,MAAM,gBAAgB,gBAAgB;AAE9D,MAAI,CAAC,gBAAgB,YAAY;AAC/B,QAAI,gBAAgB,cAAc,qBAAqB;AACrD,cAAQ;AAAA,QACN,MAAM;AAAA,UACJ;AAAA,QACF;AAAA,MACF;AAAA,IACF,WAAW,gBAAgB,cAAc,kBAAkB;AACzD,cAAQ;AAAA,QACN,MAAM;AAAA,UACJ;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,cAAQ,MAAM,MAAM,IAAI,uCAAuC,CAAC;AAAA,IAClE;AACA,YAAQ,IAAI,MAAM,IAAI,wBAAwB,CAAC;AAC/C,YAAQ,KAAK;AAAA,EACf;AAEA,UAAQ,IAAI,MAAM,MAAM,wBAAwB,CAAC;AACjD,UAAQ,IAAI,MAAM,MAAM,0CAA0C,CAAC;AAEnE,QAAM,QAAQ,KAAK,MAAM;AAAA,IACvB,QAAQ,MAAM,MAAM,SAAS;AAAA,IAC7B,WAAW;AAAA,IACX,mBAAmB;AAAA,IACnB,QAAQ;AAAA,EACV,CAAC;AAED,MAAI;AAEJ,MAAI,QAAQ,OAAO,YAAY,SAAS,GAAG;AACzC,UAAM,oBAA2D,CAAC;AAElE,eAAW,kBAAkB,QAAQ,OAAO,aAAa;AACvD,wBAAkB,cAAc,IAC9B,YAAY,WAAW,cAAc;AAAA,IACzC;AAEA,UAAM,SAAS,YAAY,OAAO;AAAA,MAChC,UAAU,YAAY,GAAG,QAAQ,OAAO,UAAU,iBAAiB;AAAA,IACrE,CAAC;AAED,UAAM,gBAAoC;AAAA,MACxC;AAAA,MACA,QAAQ;AAAA,QACN,YAAY;AAAA,QACZ,eAAe,QAAQ,OAAO;AAAA,MAChC;AAAA,IACF;AAGA,UAAM,gBAAgB;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,cAAc,YAAY;AAAA,MAC9B,GAAG;AAAA,MACH,GAAG;AAAA,IACL,CAAC;AAED,SAAK,YAAY;AAEjB,eAAW,kBAAkB,QAAQ,OAAO,aAAa;AACvD,YAAM,QAAQ,cAAc,IAAI,YAAY,SAAU,cAAc;AAAA,IACtE;AAEA,YAAQ;AAAA,EACV,OAAO;AACL,UAAM,gBAAoC;AAAA,MACxC;AAAA,MACA,QAAQ;AAAA,QACN,eAAe,QAAQ,OAAO;AAAA,MAChC;AAAA,IACF;AAGA,UAAM,gBAAgB;AAAA,MACpB;AAAA,IACF;AAEA,YAAQ,YAAY;AAAA,MAClB,GAAG;AAAA,MACH,GAAG;AAAA,IACL,CAAC;AAED,SAAK,MAAM,GAAG,QAAQ,OAAO,QAAQ;AAAA,EACvC;AAEA,QAAM,QAAQ,QAAQ;AACtB,QAAM,QAAQ,KAAK;AAGnB,QAAM,QAAQ,MAAM;AACpB,QAAM,QAAQ,cAAc;AAC5B,QAAM,QAAQ,cAAc;AAC5B,QAAM,QAAQ,eAAe;AAC7B,QAAM,QAAQ,sBAAsB;AACpC,QAAM,QAAQ,WAAW;AACzB,QAAM,QAAQ,WAAW;AAGzB,QAAM,GAAG,QAAQ,YAAY;AAC3B,UAAM,SAAS;AACf,YAAQ,KAAK;AAAA,EACf,CAAC;AAED,QAAM,GAAG,UAAU,YAAY;AAC7B,UAAM,SAAS;AACf,YAAQ,KAAK;AAAA,EACf,CAAC;AACH;AAEA,IAAM,WAAW,YAAY;AAC3B,UAAQ,IAAI,MAAM,OAAO,wBAAwB,CAAC;AAClD,QAAM,MAAM,MAAM;AACpB;AAEA,QAAQ,GAAG,qBAAqB,QAAQ;AACxC,QAAQ,GAAG,UAAU,QAAQ;AAc7B,IAAM,eAAe,IAAIC,SAAQ,OAAO,EACrC,YAAY,kCAAkC,EAC9C;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,4BAA4B,4BAA4B,UAAU,EACzE;AAAA,EACC;AAAA,EACA;AAAA,EACA,CAAC,OAAe,aAAuB;AAErC,WAAO,SAAS,OAAO,CAAC,KAAK,CAAC;AAAA,EAChC;AAAA,EACA,CAAC;AACH,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,uBAAuB,qBAAqB,EACnD;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC,OAAO,oBAAoB,0BAA0B,KAAK,EAC1D,OAAO,oBAAoB,wCAAwC,EACnE,OAAO,OAAO,YAA0B;AACvC,QAAM,EAAE,YAAY,SAAS,IAAI;AACjC,QAAM,mBAAmB,QAAQ;AAEjC,QAAM,UAAU;AAAA,IACd,SAAS;AAAA,MACP,cAAc,QAAQ,iBAAiB;AAAA,MACvC,UAAU,QAAQ,YACd,SAAS,SACP,QAAQ,YAAqC,SAAS;AAAA,MAC5D,UAAU,QAAQ,WACb,QAAQ,WACT,QAAQ,YACN,SAAS,OACT,SAAS;AAAA,IACjB;AAAA,IACA,QAAQ;AAAA,MACN,aAAa;AAAA,MACb;AAAA,MACA,eAAe,QAAQ,wBACnB,SACA;AAAA,IACN;AAAA,IACA;AAAA,IACA,gBAAgB,QAAQ;AAAA,EAC1B,CAAC;AACH,CAAC;;;AHjWH,IAAM,UAAU,IAAIC,SAAQ;AAE5B,QAAQ,KAAK,OAAO,EAAE,YAAY,oBAAoB;AAEtD,QAAQ,WAAW,aAAa;AAChC,QAAQ,WAAW,cAAc;AACjC,QAAQ,WAAW,YAAY;AAE/B,QAAQ,MAAM,QAAQ,IAAI;AAE1B,IAAO,cAAQ;","names":["Command","Command","Command","parseConnectionString","Command","parseConnectionString","Command","Command"]}
|
|
1
|
+
{"version":3,"sources":["../src/cli.ts","../src/commandLine/configFile.ts","../src/commandLine/migrate.ts","../src/commandLine/shell.ts"],"sourcesContent":["#!/usr/bin/env node\nimport { Command } from 'commander';\nimport { configCommand, migrateCommand, shellCommand } from './commandLine';\n\nconst program = new Command();\n\nprogram.name('pongo').description('CLI tool for Pongo');\n\nprogram.addCommand(configCommand);\nprogram.addCommand(migrateCommand);\nprogram.addCommand(shellCommand);\n\nprogram.parse(process.argv);\n\nexport default program;\n","import { Command } from 'commander';\nimport fs from 'node:fs';\nimport {\n objectEntries,\n toDbSchemaMetadata,\n type PongoDbSchemaMetadata,\n type PongoSchemaConfig,\n} from '../core';\n\nconst formatTypeName = (input: string): string => {\n if (input.length === 0) {\n return input;\n }\n\n let formatted = input.charAt(0).toUpperCase() + input.slice(1);\n\n if (formatted.endsWith('s')) {\n formatted = formatted.slice(0, -1);\n }\n\n return formatted;\n};\n\nconst sampleConfig = (collectionNames: string[] = ['users']) => {\n const types = collectionNames\n .map(\n (name) =>\n `export type ${formatTypeName(name)} = { name: string; description: string; date: Date }`,\n )\n .join('\\n');\n\n const collections = collectionNames\n .map(\n (name) =>\n ` ${name}: pongoSchema.collection<${formatTypeName(name)}>('${name}'),`,\n )\n .join('\\n');\n\n return `import { pongoSchema } from '@event-driven-io/pongo';\n\n${types}\n\nexport default {\n schema: pongoSchema.client({\n database: pongoSchema.db({\n${collections}\n }),\n }),\n};`;\n};\n\nconst missingDefaultExport = `Error: Config should contain default export, e.g.\\n\\n${sampleConfig()}`;\nconst missingSchema = `Error: Config should contain schema property, e.g.\\n\\n${sampleConfig()}`;\nconst missingDbs = `Error: Config should have at least a single database defined, e.g.\\n\\n${sampleConfig()}`;\nconst missingDefaultDb = `Error: Config should have a default database defined (without name or or with default database name), e.g.\\n\\n${sampleConfig()}`;\nconst missingCollections = `Error: Database should have defined at least one collection, e.g.\\n\\n${sampleConfig()}`;\n\nexport const loadConfigFile = async (\n configPath: string,\n): Promise<PongoDbSchemaMetadata> => {\n const configUrl = new URL(configPath, `file://${process.cwd()}/`);\n try {\n // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment\n const imported: Partial<{ default: PongoSchemaConfig }> = await import(\n configUrl.href\n );\n\n const parsed = parseDefaultDbSchema(imported);\n\n if (typeof parsed === 'string') {\n console.error(parsed);\n process.exit(1);\n }\n\n return parsed;\n } catch {\n console.error(`Error: Couldn't load file: ${configUrl.href}`);\n process.exit(1);\n }\n};\n\nexport const generateConfigFile = (\n configPath: string,\n collectionNames: string[],\n): void => {\n try {\n fs.writeFileSync(configPath, sampleConfig(collectionNames), 'utf8');\n console.log(`Configuration file stored at: ${configPath}`);\n } catch (error) {\n console.error(`Error: Couldn't store config file: ${configPath}!`);\n console.error(error);\n process.exit(1);\n }\n};\n\nexport const parseDefaultDbSchema = (\n imported: Partial<{ default: PongoSchemaConfig }>,\n): PongoDbSchemaMetadata | string => {\n if (!imported.default) {\n return missingDefaultExport;\n }\n\n if (!imported.default.schema) {\n return missingSchema;\n }\n\n if (!imported.default.schema.dbs) {\n return missingDbs;\n }\n\n const dbs = objectEntries(imported.default.schema.dbs).map((db) => db[1]);\n\n const defaultDb = dbs.find((db) => db.name === undefined);\n\n if (!defaultDb) {\n return missingDefaultDb;\n }\n\n if (!defaultDb.collections) {\n return missingCollections;\n }\n\n const collections = objectEntries(defaultDb.collections).map((col) => col[1]);\n\n if (collections.length === 0) {\n return missingCollections;\n }\n\n return toDbSchemaMetadata(defaultDb);\n};\n\ntype SampleConfigOptions =\n | {\n collection: string[];\n print?: boolean;\n }\n | {\n collection: string[];\n generate?: boolean;\n file?: string;\n };\n\nexport const configCommand = new Command('config').description(\n 'Manage Pongo configuration',\n);\n\nconfigCommand\n .command('sample')\n .description('Generate or print sample configuration')\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option(\n '-f, --file <path>',\n 'Path to configuration file with collection list',\n )\n .option('-g, --generate', 'Generate sample config file')\n .option('-p, --print', 'Print sample config file')\n .action((options: SampleConfigOptions) => {\n const collectionNames =\n options.collection.length > 0 ? options.collection : ['users'];\n\n if (!('print' in options) && !('generate' in options)) {\n console.error(\n 'Error: Please provide either:\\n--print param to print sample config or\\n--generate to generate sample config file',\n );\n process.exit(1);\n }\n\n if ('print' in options) {\n console.log(`${sampleConfig(collectionNames)}`);\n } else if ('generate' in options) {\n if (!options.file) {\n console.error(\n 'Error: You need to provide a config file through a --file',\n );\n process.exit(1);\n }\n\n generateConfigFile(options.file, collectionNames);\n }\n });\n","import {\n combineMigrations,\n dumbo,\n parseConnectionString,\n runSQLMigrations,\n type DatabaseDriverType,\n} from '@event-driven-io/dumbo';\nimport { Command } from 'commander';\nimport {\n pongoDatabaseDriverRegistry,\n pongoSchema,\n type AnyPongoDatabaseDriverOptions,\n type PongoCollectionSchema,\n type PongoDatabaseFactoryOptions,\n type PongoDocument,\n} from '../core';\nimport { loadConfigFile } from './configFile';\n\ninterface MigrateRunOptions {\n collection: string[];\n connectionString: string;\n databaseType?: string;\n databaseName?: string | undefined;\n databaseDriver: string;\n config?: string;\n dryRun?: boolean;\n}\n\ninterface MigrateSqlOptions {\n print?: boolean;\n write?: string;\n databaseType: string;\n databaseName?: string | undefined;\n databaseDriver: string;\n config?: string;\n collection: string[];\n}\n\nexport const migrateCommand = new Command('migrate').description(\n 'Manage database migrations',\n);\n\nmigrateCommand\n .command('run')\n .description('Run database migrations')\n .option(\n '-dbt, --database-type <string>',\n 'Database type that should be used for connection (e.g., PostgreSQL or SQLite)',\n undefined,\n )\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n )\n .option(\n '-dbn, --database-name <string>',\n 'Database name to connect to',\n undefined,\n )\n .option(\n '-cs, --connection-string <string>',\n 'Connection string for the database',\n )\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option('-f, --config <path>', 'Path to configuration file with Pongo config')\n .option('-dr, --dryRun', 'Perform dry run without commiting changes', false)\n .action(async (options: MigrateRunOptions) => {\n const { collection, dryRun, databaseName, databaseDriver } = options;\n const connectionString =\n options.connectionString ?? process.env.DB_CONNECTION_STRING;\n\n const databaseType =\n options.databaseType ??\n parseConnectionString(connectionString).databaseType;\n\n let collectionNames: string[];\n\n if (!connectionString) {\n console.error(\n 'Error: Connection string is required. Provide it either as a \"--connection-string\" parameter or through the DB_CONNECTION_STRING environment variable.' +\n '\\nFor instance: --connection-string postgresql://postgres:postgres@localhost:5432/postgres',\n );\n process.exit(1);\n }\n\n if (options.config) {\n const config = await loadConfigFile(options.config);\n\n collectionNames = config.collections.map((c) => c.name);\n } else if (collection) {\n collectionNames = collection;\n } else {\n console.error(\n 'Error: You need to provide at least one collection name. Provide it either through \"--config\" file or as a \"--collection\" parameter.',\n );\n process.exit(1);\n }\n\n const driverType = `${databaseType}:${databaseDriver}` as const;\n\n const migrations = getMigrations({\n driverType,\n connectionString,\n databaseName,\n collectionNames,\n });\n\n const pool = dumbo({ connectionString, driverType });\n\n await runSQLMigrations(pool, migrations, {\n dryRun,\n });\n });\n\nmigrateCommand\n .command('sql')\n .description('Generate SQL for database migration')\n .option(\n '-dbt, --database-type <string>',\n 'Database type that should be used for connection (e.g., PostgreSQL or SQLite)',\n )\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n )\n .option(\n '-dbn, --database-name <string>',\n 'Database name to connect to',\n undefined,\n )\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option('-f, --config <path>', 'Path to configuration file with Pongo config')\n .option('--print', 'Print the SQL to the console (default)', true)\n //.option('--write <filename>', 'Write the SQL to a specified file')\n .action(async (options: MigrateSqlOptions) => {\n const { collection, databaseName, databaseType, databaseDriver } = options;\n\n let collectionNames: string[];\n\n if (options.config) {\n const config = await loadConfigFile(options.config);\n\n collectionNames = config.collections.map((c) => c.name);\n } else if (collection) {\n collectionNames = collection;\n } else {\n console.error(\n 'Error: You need to provide at least one collection name. Provide it either through \"--config\" file or as a \"--collection\" parameter.',\n );\n process.exit(1);\n }\n\n const driverType = `${databaseType}:${databaseDriver}` as const;\n\n const migrations = getMigrations({\n driverType,\n connectionString: undefined,\n databaseName,\n collectionNames,\n });\n\n console.log('Printing SQL:');\n console.log(combineMigrations(...migrations));\n });\n\nconst getMigrations = ({\n driverType,\n connectionString,\n databaseName,\n collectionNames,\n}: {\n driverType: DatabaseDriverType;\n connectionString: string | undefined;\n databaseName: string | undefined;\n collectionNames: string[];\n}) => {\n const driver = pongoDatabaseDriverRegistry.tryGet(driverType);\n\n if (driver === null) {\n console.error(\n `Error: No database driver found for driver type \"${driverType}\". Make sure the driver is registered and the connection string is correct.`,\n );\n process.exit(1);\n }\n\n const dbDefinition = pongoSchema.db.from(databaseName, collectionNames);\n\n const driverOptions: PongoDatabaseFactoryOptions<\n Record<string, PongoCollectionSchema<PongoDocument>>,\n AnyPongoDatabaseDriverOptions\n > = {\n schema: { definition: dbDefinition },\n };\n\n const customOptions = {\n connectionString: connectionString ?? driver.defaultConnectionString,\n databaseName,\n };\n\n const db = driver.databaseFactory({ ...driverOptions, ...customOptions });\n\n return db.schema.component.migrations;\n};\n","import {\n color,\n LogLevel,\n LogStyle,\n parseConnectionString,\n prettyJson,\n SQL,\n type MigrationStyle,\n} from '@event-driven-io/dumbo';\nimport { checkConnection } from '@event-driven-io/dumbo/pg';\nimport Table from 'cli-table3';\nimport { Command } from 'commander';\nimport repl from 'node:repl';\nimport {\n pongoClient,\n pongoDatabaseDriverRegistry,\n pongoSchema,\n type PongoClient,\n type PongoClientOptions,\n type PongoCollectionSchema,\n type PongoDb,\n} from '../core';\n\nlet pongo: PongoClient;\n\nconst calculateColumnWidths = (\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n results: any[],\n columnNames: string[],\n): number[] => {\n const columnWidths = columnNames.map((col) => {\n const maxWidth = Math.max(\n col.length, // Header size\n ...results.map((result) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n result[col] ? String(result[col]).length : 0,\n ),\n );\n return maxWidth + 2; // Add padding\n });\n return columnWidths;\n};\n\nlet shouldDisplayResultsAsTable = false;\n\nconst printResultsAsTable = (print?: boolean) =>\n (shouldDisplayResultsAsTable = print === undefined || print === true);\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst printOutput = (obj: any): string =>\n Array.isArray(obj) && shouldDisplayResultsAsTable\n ? displayResultsAsTable(obj)\n : prettyJson(obj);\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nconst displayResultsAsTable = (results: any[]): string => {\n if (results.length === 0) {\n return color.yellow('No documents found.');\n }\n\n const columnNames = results\n\n .flatMap((result) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-argument\n typeof result === 'object' ? Object.keys(result) : typeof result,\n )\n .filter((value, index, array) => array.indexOf(value) === index);\n\n const columnWidths = calculateColumnWidths(results, columnNames);\n\n const table = new Table({\n head: columnNames.map((col) => color.cyan(col)),\n colWidths: columnWidths,\n });\n\n results.forEach((result) => {\n table.push(\n columnNames.map((col) =>\n // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n result[col] !== undefined\n ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n Array.isArray(result[col])\n ? // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n displayResultsAsTable(result[col])\n : // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access\n prettyJson(result[col])\n : typeof result === 'object'\n ? ''\n : result != undefined && result != undefined\n ? prettyJson(result)\n : '',\n ),\n );\n });\n\n return table.toString();\n};\n\nconst setLogLevel = (logLevel: string) => {\n process.env.DUMBO_LOG_LEVEL = logLevel;\n};\n\nconst setLogStyle = (logLevel: string) => {\n process.env.DUMBO_LOG_STYLE = logLevel;\n};\n\nconst prettifyLogs = (logLevel?: string) => {\n if (logLevel !== undefined) setLogLevel(logLevel);\n setLogStyle(LogStyle.PRETTY);\n};\n\nconst startRepl = async (options: {\n logging: {\n printOptions: boolean;\n logLevel: LogLevel;\n logStyle: LogStyle;\n };\n schema: {\n database: string;\n collections: string[];\n autoMigration: MigrationStyle;\n };\n connectionString: string | undefined;\n databaseDriver: string;\n}) => {\n // TODO: This will change when we have proper tracing and logging config\n // For now, that's enough\n setLogLevel(process.env.DUMBO_LOG_LEVEL ?? options.logging.logLevel);\n setLogStyle(process.env.DUMBO_LOG_STYLE ?? options.logging.logStyle);\n\n console.log(color.green('Starting Pongo Shell (version: 0.17.0-beta.9)'));\n\n if (options.logging.printOptions) {\n console.log(color.green('With Options:'));\n console.log(prettyJson(options));\n }\n\n const connectionString =\n options.connectionString ??\n process.env.DB_CONNECTION_STRING ??\n 'postgresql://postgres:postgres@localhost:5432/postgres';\n\n if (!(options.connectionString ?? process.env.DB_CONNECTION_STRING)) {\n console.log(\n color.yellow(\n `No connection string provided, using: 'postgresql://postgres:postgres@localhost:5432/postgres'`,\n ),\n );\n }\n\n const { databaseType } = parseConnectionString(connectionString);\n const driverType = `${databaseType}:${options.databaseDriver}` as const;\n\n const driver = pongoDatabaseDriverRegistry.tryGet(driverType);\n\n if (driver === null) {\n console.error(\n `Error: No database driver found for driver type \"${driverType}\". Make sure the driver is installed and the connection string is correct.`,\n );\n process.exit(1);\n }\n\n const connectionCheck = await checkConnection(connectionString);\n\n if (!connectionCheck.successful) {\n if (connectionCheck.errorType === 'ConnectionRefused') {\n console.error(\n color.red(\n `Connection was refused. Check if the PostgreSQL server is running and accessible.`,\n ),\n );\n } else if (connectionCheck.errorType === 'Authentication') {\n console.error(\n color.red(\n `Authentication failed. Check the username and password in the connection string.`,\n ),\n );\n } else {\n console.error(color.red('Error connecting to PostgreSQL server'));\n }\n console.log(color.red('Exiting Pongo Shell...'));\n process.exit();\n }\n\n console.log(color.green(`Successfully connected`));\n console.log(color.green('Use db.<collection>.<method>() to query.'));\n\n const shell = repl.start({\n prompt: color.green('pongo> '),\n useGlobal: true,\n breakEvalOnSigint: true,\n writer: printOutput,\n });\n\n let db: PongoDb;\n\n if (options.schema.collections.length > 0) {\n const collectionsSchema: Record<string, PongoCollectionSchema> = {};\n\n for (const collectionName of options.schema.collections) {\n collectionsSchema[collectionName] =\n pongoSchema.collection(collectionName);\n }\n\n const schema = pongoSchema.client({\n database: pongoSchema.db(options.schema.database, collectionsSchema),\n });\n\n const driverOptions: PongoClientOptions = {\n driver,\n schema: {\n definition: schema,\n autoMigration: options.schema.autoMigration,\n },\n };\n\n // TODO: Find a better way to pass custom driver settings\n const customOptions = {\n connectionString,\n };\n\n const typedClient = pongoClient({\n ...driverOptions,\n ...customOptions,\n });\n\n db = typedClient.database!;\n\n for (const collectionName of options.schema.collections) {\n shell.context[collectionName] = typedClient.database![collectionName];\n }\n\n pongo = typedClient;\n } else {\n const driverOptions: PongoClientOptions = {\n driver,\n schema: {\n autoMigration: options.schema.autoMigration,\n },\n };\n\n // TODO: Find a better way to pass custom driver settings\n const customOptions = {\n connectionString,\n };\n\n pongo = pongoClient({\n ...driverOptions,\n ...customOptions,\n });\n\n db = pongo.db(options.schema.database);\n }\n\n shell.context.pongo = pongo;\n shell.context.db = db;\n\n // helpers\n shell.context.SQL = SQL;\n shell.context.setLogLevel = setLogLevel;\n shell.context.setLogStyle = setLogStyle;\n shell.context.prettifyLogs = prettifyLogs;\n shell.context.printResultsAsTable = printResultsAsTable;\n shell.context.LogStyle = LogStyle;\n shell.context.LogLevel = LogLevel;\n\n // Intercept REPL output to display results as a table if they are arrays\n shell.on('exit', async () => {\n await teardown();\n process.exit();\n });\n\n shell.on('SIGINT', async () => {\n await teardown();\n process.exit();\n });\n};\n\nconst teardown = async () => {\n console.log(color.yellow('Exiting Pongo Shell...'));\n await pongo.close();\n};\n\nprocess.on('uncaughtException', teardown);\nprocess.on('SIGINT', teardown);\n\ninterface ShellOptions {\n database: string;\n collection: string[];\n databaseDriver: string;\n connectionString?: string;\n disableAutoMigrations: boolean;\n logStyle?: string;\n logLevel?: string;\n prettyLog?: boolean;\n printOptions?: boolean;\n}\n\nconst shellCommand = new Command('shell')\n .description('Start an interactive Pongo shell')\n .option(\n '-drv, --database-driver <string>',\n 'Database driver that should be used for connection (e.g., \"pg\" for PostgreSQL, \"sqlite3\" for SQLite)',\n 'pg',\n )\n .option(\n '-cs, --connectionString <string>',\n 'Connection string for the database',\n )\n .option('-db, --database <string>', 'Database name to connect', 'postgres')\n .option(\n '-col, --collection <name>',\n 'Specify the collection name',\n (value: string, previous: string[]) => {\n // Accumulate collection names into an array (explicitly typing `previous` as `string[]`)\n return previous.concat([value]);\n },\n [] as string[],\n )\n .option(\n '-no-migrations, --disable-auto-migrations',\n 'Disable automatic migrations',\n )\n .option('-o, --print-options', 'Print shell options')\n .option(\n '-ll, --log-level <logLevel>',\n 'Log level: DISABLED, INFO, LOG, WARN, ERROR',\n 'DISABLED',\n )\n .option('-ls, --log-style', 'Log style: RAW, PRETTY', 'RAW')\n .option('-p, --pretty-log', 'Turn on logging with prettified output')\n .action(async (options: ShellOptions) => {\n const { collection, database } = options;\n const connectionString = options.connectionString;\n\n await startRepl({\n logging: {\n printOptions: options.printOptions === true,\n logStyle: options.prettyLog\n ? LogStyle.PRETTY\n : ((options.logStyle as LogStyle | undefined) ?? LogStyle.RAW),\n logLevel: options.logLevel\n ? (options.logLevel as LogLevel)\n : options.prettyLog\n ? LogLevel.INFO\n : LogLevel.DISABLED,\n },\n schema: {\n collections: collection,\n database,\n autoMigration: options.disableAutoMigrations\n ? 'None'\n : 'CreateOrUpdate',\n },\n connectionString,\n databaseDriver: options.databaseDriver,\n });\n });\n\nexport { shellCommand };\n"],"mappings":";;;;;;;;;;AACA,SAAS,WAAAA,gBAAe;;;ACDxB,SAAS,eAAe;AACxB,OAAO,QAAQ;AAQf,IAAM,iBAAiB,CAAC,UAA0B;AAChD,MAAI,MAAM,WAAW,GAAG;AACtB,WAAO;AAAA,EACT;AAEA,MAAI,YAAY,MAAM,OAAO,CAAC,EAAE,YAAY,IAAI,MAAM,MAAM,CAAC;AAE7D,MAAI,UAAU,SAAS,GAAG,GAAG;AAC3B,gBAAY,UAAU,MAAM,GAAG,EAAE;AAAA,EACnC;AAEA,SAAO;AACT;AAEA,IAAM,eAAe,CAAC,kBAA4B,CAAC,OAAO,MAAM;AAC9D,QAAM,QAAQ,gBACX;AAAA,IACC,CAAC,SACC,eAAe,eAAe,IAAI,CAAC;AAAA,EACvC,EACC,KAAK,IAAI;AAEZ,QAAM,cAAc,gBACjB;AAAA,IACC,CAAC,SACC,SAAS,IAAI,4BAA4B,eAAe,IAAI,CAAC,MAAM,IAAI;AAAA,EAC3E,EACC,KAAK,IAAI;AAEZ,SAAO;AAAA;AAAA,EAEP,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA,EAKL,WAAW;AAAA;AAAA;AAAA;AAIb;AAEA,IAAM,uBAAuB;AAAA;AAAA,EAAwD,aAAa,CAAC;AACnG,IAAM,gBAAgB;AAAA;AAAA,EAAyD,aAAa,CAAC;AAC7F,IAAM,aAAa;AAAA;AAAA,EAAyE,aAAa,CAAC;AAC1G,IAAM,mBAAmB;AAAA;AAAA,EAAiH,aAAa,CAAC;AACxJ,IAAM,qBAAqB;AAAA;AAAA,EAAwE,aAAa,CAAC;AAE1G,IAAM,iBAAiB,OAC5B,eACmC;AACnC,QAAM,YAAY,IAAI,IAAI,YAAY,UAAU,QAAQ,IAAI,CAAC,GAAG;AAChE,MAAI;AAEF,UAAM,WAAoD,MAAM,OAC9D,UAAU;AAGZ,UAAM,SAAS,qBAAqB,QAAQ;AAE5C,QAAI,OAAO,WAAW,UAAU;AAC9B,cAAQ,MAAM,MAAM;AACpB,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,WAAO;AAAA,EACT,QAAQ;AACN,YAAQ,MAAM,8BAA8B,UAAU,IAAI,EAAE;AAC5D,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEO,IAAM,qBAAqB,CAChC,YACA,oBACS;AACT,MAAI;AACF,OAAG,cAAc,YAAY,aAAa,eAAe,GAAG,MAAM;AAClE,YAAQ,IAAI,iCAAiC,UAAU,EAAE;AAAA,EAC3D,SAAS,OAAO;AACd,YAAQ,MAAM,sCAAsC,UAAU,GAAG;AACjE,YAAQ,MAAM,KAAK;AACnB,YAAQ,KAAK,CAAC;AAAA,EAChB;AACF;AAEO,IAAM,uBAAuB,CAClC,aACmC;AACnC,MAAI,CAAC,SAAS,SAAS;AACrB,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,SAAS,QAAQ,QAAQ;AAC5B,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,SAAS,QAAQ,OAAO,KAAK;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,MAAM,cAAc,SAAS,QAAQ,OAAO,GAAG,EAAE,IAAI,CAAC,OAAO,GAAG,CAAC,CAAC;AAExE,QAAM,YAAY,IAAI,KAAK,CAAC,OAAO,GAAG,SAAS,MAAS;AAExD,MAAI,CAAC,WAAW;AACd,WAAO;AAAA,EACT;AAEA,MAAI,CAAC,UAAU,aAAa;AAC1B,WAAO;AAAA,EACT;AAEA,QAAM,cAAc,cAAc,UAAU,WAAW,EAAE,IAAI,CAAC,QAAQ,IAAI,CAAC,CAAC;AAE5E,MAAI,YAAY,WAAW,GAAG;AAC5B,WAAO;AAAA,EACT;AAEA,SAAO,mBAAmB,SAAS;AACrC;AAaO,IAAM,gBAAgB,IAAI,QAAQ,QAAQ,EAAE;AAAA,EACjD;AACF;AAEA,cACG,QAAQ,QAAQ,EAChB,YAAY,wCAAwC,EACpD;AAAA,EACC;AAAA,EACA;AAAA,EACA,CAAC,OAAe,aAAuB;AAErC,WAAO,SAAS,OAAO,CAAC,KAAK,CAAC;AAAA,EAChC;AAAA,EACA,CAAC;AACH,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,kBAAkB,6BAA6B,EACtD,OAAO,eAAe,0BAA0B,EAChD,OAAO,CAAC,YAAiC;AACxC,QAAM,kBACJ,QAAQ,WAAW,SAAS,IAAI,QAAQ,aAAa,CAAC,OAAO;AAE/D,MAAI,EAAE,WAAW,YAAY,EAAE,cAAc,UAAU;AACrD,YAAQ;AAAA,MACN;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI,WAAW,SAAS;AACtB,YAAQ,IAAI,GAAG,aAAa,eAAe,CAAC,EAAE;AAAA,EAChD,WAAW,cAAc,SAAS;AAChC,QAAI,CAAC,QAAQ,MAAM;AACjB,cAAQ;AAAA,QACN;AAAA,MACF;AACA,cAAQ,KAAK,CAAC;AAAA,IAChB;AAEA,uBAAmB,QAAQ,MAAM,eAAe;AAAA,EAClD;AACF,CAAC;;;AC3LH;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,WAAAC,gBAAe;AA+BjB,IAAM,iBAAiB,IAAIC,SAAQ,SAAS,EAAE;AAAA,EACnD;AACF;AAEA,eACG,QAAQ,KAAK,EACb,YAAY,yBAAyB,EACrC;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AAAA,EACA,CAAC,OAAe,aAAuB;AAErC,WAAO,SAAS,OAAO,CAAC,KAAK,CAAC;AAAA,EAChC;AAAA,EACA,CAAC;AACH,EACC,OAAO,uBAAuB,8CAA8C,EAC5E,OAAO,iBAAiB,6CAA6C,KAAK,EAC1E,OAAO,OAAO,YAA+B;AAC5C,QAAM,EAAE,YAAY,QAAQ,cAAc,eAAe,IAAI;AAC7D,QAAM,mBACJ,QAAQ,oBAAoB,QAAQ,IAAI;AAE1C,QAAM,eACJ,QAAQ,gBACR,sBAAsB,gBAAgB,EAAE;AAE1C,MAAI;AAEJ,MAAI,CAAC,kBAAkB;AACrB,YAAQ;AAAA,MACN;AAAA,IAEF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,MAAI,QAAQ,QAAQ;AAClB,UAAM,SAAS,MAAM,eAAe,QAAQ,MAAM;AAElD,sBAAkB,OAAO,YAAY,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EACxD,WAAW,YAAY;AACrB,sBAAkB;AAAA,EACpB,OAAO;AACL,YAAQ;AAAA,MACN;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,aAAa,GAAG,YAAY,IAAI,cAAc;AAEpD,QAAM,aAAa,cAAc;AAAA,IAC/B;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF,CAAC;AAED,QAAM,OAAO,MAAM,EAAE,kBAAkB,WAAW,CAAC;AAEnD,QAAM,iBAAiB,MAAM,YAAY;AAAA,IACvC;AAAA,EACF,CAAC;AACH,CAAC;AAEH,eACG,QAAQ,KAAK,EACb,YAAY,qCAAqC,EACjD;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AAAA,EACA,CAAC,OAAe,aAAuB;AAErC,WAAO,SAAS,OAAO,CAAC,KAAK,CAAC;AAAA,EAChC;AAAA,EACA,CAAC;AACH,EACC,OAAO,uBAAuB,8CAA8C,EAC5E,OAAO,WAAW,0CAA0C,IAAI,EAEhE,OAAO,OAAO,YAA+B;AAC5C,QAAM,EAAE,YAAY,cAAc,cAAc,eAAe,IAAI;AAEnE,MAAI;AAEJ,MAAI,QAAQ,QAAQ;AAClB,UAAM,SAAS,MAAM,eAAe,QAAQ,MAAM;AAElD,sBAAkB,OAAO,YAAY,IAAI,CAAC,MAAM,EAAE,IAAI;AAAA,EACxD,WAAW,YAAY;AACrB,sBAAkB;AAAA,EACpB,OAAO;AACL,YAAQ;AAAA,MACN;AAAA,IACF;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,aAAa,GAAG,YAAY,IAAI,cAAc;AAEpD,QAAM,aAAa,cAAc;AAAA,IAC/B;AAAA,IACA,kBAAkB;AAAA,IAClB;AAAA,IACA;AAAA,EACF,CAAC;AAED,UAAQ,IAAI,eAAe;AAC3B,UAAQ,IAAI,kBAAkB,GAAG,UAAU,CAAC;AAC9C,CAAC;AAEH,IAAM,gBAAgB,CAAC;AAAA,EACrB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,MAKM;AACJ,QAAM,SAAS,4BAA4B,OAAO,UAAU;AAE5D,MAAI,WAAW,MAAM;AACnB,YAAQ;AAAA,MACN,oDAAoD,UAAU;AAAA,IAChE;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,eAAe,YAAY,GAAG,KAAK,cAAc,eAAe;AAEtE,QAAM,gBAGF;AAAA,IACF,QAAQ,EAAE,YAAY,aAAa;AAAA,EACrC;AAEA,QAAM,gBAAgB;AAAA,IACpB,kBAAkB,oBAAoB,OAAO;AAAA,IAC7C;AAAA,EACF;AAEA,QAAM,KAAK,OAAO,gBAAgB,EAAE,GAAG,eAAe,GAAG,cAAc,CAAC;AAExE,SAAO,GAAG,OAAO,UAAU;AAC7B;;;AC1NA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA,yBAAAC;AAAA,EACA;AAAA,EACA;AAAA,OAEK;AACP,SAAS,uBAAuB;AAChC,OAAO,WAAW;AAClB,SAAS,WAAAC,gBAAe;AACxB,OAAO,UAAU;AAWjB,IAAI;AAEJ,IAAM,wBAAwB,CAE5B,SACA,gBACa;AACb,QAAM,eAAe,YAAY,IAAI,CAAC,QAAQ;AAC5C,UAAM,WAAW,KAAK;AAAA,MACpB,IAAI;AAAA,MACJ,GAAG,QAAQ;AAAA,QAAI,CAAC;AAAA;AAAA,UAEd,OAAO,GAAG,IAAI,OAAO,OAAO,GAAG,CAAC,EAAE,SAAS;AAAA;AAAA,MAC7C;AAAA,IACF;AACA,WAAO,WAAW;AAAA,EACpB,CAAC;AACD,SAAO;AACT;AAEA,IAAI,8BAA8B;AAElC,IAAM,sBAAsB,CAAC,UAC1B,8BAA8B,UAAU,UAAa,UAAU;AAGlE,IAAM,cAAc,CAAC,QACnB,MAAM,QAAQ,GAAG,KAAK,8BAClB,sBAAsB,GAAG,IACzB,WAAW,GAAG;AAGpB,IAAM,wBAAwB,CAAC,YAA2B;AACxD,MAAI,QAAQ,WAAW,GAAG;AACxB,WAAO,MAAM,OAAO,qBAAqB;AAAA,EAC3C;AAEA,QAAM,cAAc,QAEjB;AAAA,IAAQ,CAAC;AAAA;AAAA,MAER,OAAO,WAAW,WAAW,OAAO,KAAK,MAAM,IAAI,OAAO;AAAA;AAAA,EAC5D,EACC,OAAO,CAAC,OAAO,OAAO,UAAU,MAAM,QAAQ,KAAK,MAAM,KAAK;AAEjE,QAAM,eAAe,sBAAsB,SAAS,WAAW;AAE/D,QAAM,QAAQ,IAAI,MAAM;AAAA,IACtB,MAAM,YAAY,IAAI,CAAC,QAAQ,MAAM,KAAK,GAAG,CAAC;AAAA,IAC9C,WAAW;AAAA,EACb,CAAC;AAED,UAAQ,QAAQ,CAAC,WAAW;AAC1B,UAAM;AAAA,MACJ,YAAY;AAAA,QAAI,CAAC;AAAA;AAAA,UAEf,OAAO,GAAG,MAAM;AAAA;AAAA,YAEZ,MAAM,QAAQ,OAAO,GAAG,CAAC;AAAA;AAAA,cAEvB,sBAAsB,OAAO,GAAG,CAAC;AAAA;AAAA;AAAA,cAEjC,WAAW,OAAO,GAAG,CAAC;AAAA;AAAA,cACxB,OAAO,WAAW,WAChB,KACA,UAAU,UAAa,UAAU,SAC/B,WAAW,MAAM,IACjB;AAAA;AAAA,MACV;AAAA,IACF;AAAA,EACF,CAAC;AAED,SAAO,MAAM,SAAS;AACxB;AAEA,IAAM,cAAc,CAAC,aAAqB;AACxC,UAAQ,IAAI,kBAAkB;AAChC;AAEA,IAAM,cAAc,CAAC,aAAqB;AACxC,UAAQ,IAAI,kBAAkB;AAChC;AAEA,IAAM,eAAe,CAAC,aAAsB;AAC1C,MAAI,aAAa,OAAW,aAAY,QAAQ;AAChD,cAAY,SAAS,MAAM;AAC7B;AAEA,IAAM,YAAY,OAAO,YAanB;AAGJ,cAAY,QAAQ,IAAI,mBAAmB,QAAQ,QAAQ,QAAQ;AACnE,cAAY,QAAQ,IAAI,mBAAmB,QAAQ,QAAQ,QAAQ;AAEnE,UAAQ,IAAI,MAAM,MAAM,+CAA+C,CAAC;AAExE,MAAI,QAAQ,QAAQ,cAAc;AAChC,YAAQ,IAAI,MAAM,MAAM,eAAe,CAAC;AACxC,YAAQ,IAAI,WAAW,OAAO,CAAC;AAAA,EACjC;AAEA,QAAM,mBACJ,QAAQ,oBACR,QAAQ,IAAI,wBACZ;AAEF,MAAI,EAAE,QAAQ,oBAAoB,QAAQ,IAAI,uBAAuB;AACnE,YAAQ;AAAA,MACN,MAAM;AAAA,QACJ;AAAA,MACF;AAAA,IACF;AAAA,EACF;AAEA,QAAM,EAAE,aAAa,IAAIC,uBAAsB,gBAAgB;AAC/D,QAAM,aAAa,GAAG,YAAY,IAAI,QAAQ,cAAc;AAE5D,QAAM,SAAS,4BAA4B,OAAO,UAAU;AAE5D,MAAI,WAAW,MAAM;AACnB,YAAQ;AAAA,MACN,oDAAoD,UAAU;AAAA,IAChE;AACA,YAAQ,KAAK,CAAC;AAAA,EAChB;AAEA,QAAM,kBAAkB,MAAM,gBAAgB,gBAAgB;AAE9D,MAAI,CAAC,gBAAgB,YAAY;AAC/B,QAAI,gBAAgB,cAAc,qBAAqB;AACrD,cAAQ;AAAA,QACN,MAAM;AAAA,UACJ;AAAA,QACF;AAAA,MACF;AAAA,IACF,WAAW,gBAAgB,cAAc,kBAAkB;AACzD,cAAQ;AAAA,QACN,MAAM;AAAA,UACJ;AAAA,QACF;AAAA,MACF;AAAA,IACF,OAAO;AACL,cAAQ,MAAM,MAAM,IAAI,uCAAuC,CAAC;AAAA,IAClE;AACA,YAAQ,IAAI,MAAM,IAAI,wBAAwB,CAAC;AAC/C,YAAQ,KAAK;AAAA,EACf;AAEA,UAAQ,IAAI,MAAM,MAAM,wBAAwB,CAAC;AACjD,UAAQ,IAAI,MAAM,MAAM,0CAA0C,CAAC;AAEnE,QAAM,QAAQ,KAAK,MAAM;AAAA,IACvB,QAAQ,MAAM,MAAM,SAAS;AAAA,IAC7B,WAAW;AAAA,IACX,mBAAmB;AAAA,IACnB,QAAQ;AAAA,EACV,CAAC;AAED,MAAI;AAEJ,MAAI,QAAQ,OAAO,YAAY,SAAS,GAAG;AACzC,UAAM,oBAA2D,CAAC;AAElE,eAAW,kBAAkB,QAAQ,OAAO,aAAa;AACvD,wBAAkB,cAAc,IAC9B,YAAY,WAAW,cAAc;AAAA,IACzC;AAEA,UAAM,SAAS,YAAY,OAAO;AAAA,MAChC,UAAU,YAAY,GAAG,QAAQ,OAAO,UAAU,iBAAiB;AAAA,IACrE,CAAC;AAED,UAAM,gBAAoC;AAAA,MACxC;AAAA,MACA,QAAQ;AAAA,QACN,YAAY;AAAA,QACZ,eAAe,QAAQ,OAAO;AAAA,MAChC;AAAA,IACF;AAGA,UAAM,gBAAgB;AAAA,MACpB;AAAA,IACF;AAEA,UAAM,cAAc,YAAY;AAAA,MAC9B,GAAG;AAAA,MACH,GAAG;AAAA,IACL,CAAC;AAED,SAAK,YAAY;AAEjB,eAAW,kBAAkB,QAAQ,OAAO,aAAa;AACvD,YAAM,QAAQ,cAAc,IAAI,YAAY,SAAU,cAAc;AAAA,IACtE;AAEA,YAAQ;AAAA,EACV,OAAO;AACL,UAAM,gBAAoC;AAAA,MACxC;AAAA,MACA,QAAQ;AAAA,QACN,eAAe,QAAQ,OAAO;AAAA,MAChC;AAAA,IACF;AAGA,UAAM,gBAAgB;AAAA,MACpB;AAAA,IACF;AAEA,YAAQ,YAAY;AAAA,MAClB,GAAG;AAAA,MACH,GAAG;AAAA,IACL,CAAC;AAED,SAAK,MAAM,GAAG,QAAQ,OAAO,QAAQ;AAAA,EACvC;AAEA,QAAM,QAAQ,QAAQ;AACtB,QAAM,QAAQ,KAAK;AAGnB,QAAM,QAAQ,MAAM;AACpB,QAAM,QAAQ,cAAc;AAC5B,QAAM,QAAQ,cAAc;AAC5B,QAAM,QAAQ,eAAe;AAC7B,QAAM,QAAQ,sBAAsB;AACpC,QAAM,QAAQ,WAAW;AACzB,QAAM,QAAQ,WAAW;AAGzB,QAAM,GAAG,QAAQ,YAAY;AAC3B,UAAM,SAAS;AACf,YAAQ,KAAK;AAAA,EACf,CAAC;AAED,QAAM,GAAG,UAAU,YAAY;AAC7B,UAAM,SAAS;AACf,YAAQ,KAAK;AAAA,EACf,CAAC;AACH;AAEA,IAAM,WAAW,YAAY;AAC3B,UAAQ,IAAI,MAAM,OAAO,wBAAwB,CAAC;AAClD,QAAM,MAAM,MAAM;AACpB;AAEA,QAAQ,GAAG,qBAAqB,QAAQ;AACxC,QAAQ,GAAG,UAAU,QAAQ;AAc7B,IAAM,eAAe,IAAIC,SAAQ,OAAO,EACrC,YAAY,kCAAkC,EAC9C;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,4BAA4B,4BAA4B,UAAU,EACzE;AAAA,EACC;AAAA,EACA;AAAA,EACA,CAAC,OAAe,aAAuB;AAErC,WAAO,SAAS,OAAO,CAAC,KAAK,CAAC;AAAA,EAChC;AAAA,EACA,CAAC;AACH,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,uBAAuB,qBAAqB,EACnD;AAAA,EACC;AAAA,EACA;AAAA,EACA;AACF,EACC,OAAO,oBAAoB,0BAA0B,KAAK,EAC1D,OAAO,oBAAoB,wCAAwC,EACnE,OAAO,OAAO,YAA0B;AACvC,QAAM,EAAE,YAAY,SAAS,IAAI;AACjC,QAAM,mBAAmB,QAAQ;AAEjC,QAAM,UAAU;AAAA,IACd,SAAS;AAAA,MACP,cAAc,QAAQ,iBAAiB;AAAA,MACvC,UAAU,QAAQ,YACd,SAAS,SACP,QAAQ,YAAqC,SAAS;AAAA,MAC5D,UAAU,QAAQ,WACb,QAAQ,WACT,QAAQ,YACN,SAAS,OACT,SAAS;AAAA,IACjB;AAAA,IACA,QAAQ;AAAA,MACN,aAAa;AAAA,MACb;AAAA,MACA,eAAe,QAAQ,wBACnB,SACA;AAAA,IACN;AAAA,IACA;AAAA,IACA,gBAAgB,QAAQ;AAAA,EAC1B,CAAC;AACH,CAAC;;;AHjWH,IAAM,UAAU,IAAIC,SAAQ;AAE5B,QAAQ,KAAK,OAAO,EAAE,YAAY,oBAAoB;AAEtD,QAAQ,WAAW,aAAa;AAChC,QAAQ,WAAW,cAAc;AACjC,QAAQ,WAAW,YAAY;AAE/B,QAAQ,MAAM,QAAQ,IAAI;AAE1B,IAAO,cAAQ;","names":["Command","Command","Command","parseConnectionString","Command","parseConnectionString","Command","Command"]}
|
package/dist/pg.cjs
CHANGED
|
@@ -315,7 +315,7 @@ var where = (filterQuery) => _dumbo.SQL.check.isEmpty(filterQuery) ? _dumbo.SQL.
|
|
|
315
315
|
var _pg = require('@event-driven-io/dumbo/pg');
|
|
316
316
|
require('pg');
|
|
317
317
|
var pgDatabaseDriver = {
|
|
318
|
-
driverType: _pg.
|
|
318
|
+
driverType: _pg.PgDriverType,
|
|
319
319
|
databaseFactory: (options) => {
|
|
320
320
|
const databaseName = _nullishCoalesce(options.databaseName, () => ( _pg.getDatabaseNameOrDefault.call(void 0, options.connectionString)));
|
|
321
321
|
return _chunk4BL6YWLWcjs.PongoDatabase.call(void 0, {
|
|
@@ -326,9 +326,9 @@ var pgDatabaseDriver = {
|
|
|
326
326
|
...options.connectionOptions
|
|
327
327
|
}),
|
|
328
328
|
schemaComponent: _chunk4BL6YWLWcjs.PongoDatabaseSchemaComponent.call(void 0, {
|
|
329
|
-
driverType: _pg.
|
|
329
|
+
driverType: _pg.PgDriverType,
|
|
330
330
|
collectionFactory: (schema) => _chunk4BL6YWLWcjs.PongoCollectionSchemaComponent.call(void 0, {
|
|
331
|
-
driverType: _pg.
|
|
331
|
+
driverType: _pg.PgDriverType,
|
|
332
332
|
definition: schema,
|
|
333
333
|
migrationsOrSchemaComponents: {
|
|
334
334
|
migrations: pongoCollectionPostgreSQLMigrations(schema.name)
|
|
@@ -346,10 +346,7 @@ var pgDatabaseDriver = {
|
|
|
346
346
|
defaultConnectionString: "postgresql://localhost:5432/postgres"
|
|
347
347
|
};
|
|
348
348
|
var usePgDatabaseDriver = () => {
|
|
349
|
-
_chunk4BL6YWLWcjs.pongoDatabaseDriverRegistry.register(
|
|
350
|
-
_pg.NodePostgresDriverType,
|
|
351
|
-
pgDatabaseDriver
|
|
352
|
-
);
|
|
349
|
+
_chunk4BL6YWLWcjs.pongoDatabaseDriverRegistry.register(_pg.PgDriverType, pgDatabaseDriver);
|
|
353
350
|
};
|
|
354
351
|
usePgDatabaseDriver();
|
|
355
352
|
|
package/dist/pg.cjs.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","../src/storage/postgresql/core/sqlBuilder/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts","../src/storage/postgresql/core/sqlBuilder/update/index.ts","../src/storage/postgresql/pg/index.ts"],"names":["SQL","JSONSerializer"],"mappings":"AAAA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACbA;AACE;AACA;AACA;AACA;AAAA,+CACK;ADeP;AACA;AErBA;AFuBA;AACA;AGxBA;AAGO,IAAM,eAAA,EAAiB,CAC5B,IAAA,EACA,QAAA,EACA,KAAA,EAAA,GACQ;AACR,EAAA,GAAA,CAAI,KAAA,IAAS,MAAA,GAAS,KAAA,IAAS,UAAA,EAAY;AACzC,IAAA,OAAO,sBAAA,CAAuB,IAAA,EAAM,QAAA,EAAU,KAAK,CAAA;AAAA,EACrD;AAEA,EAAA,OAAA,CAAQ,QAAA,EAAU;AAAA,IAChB,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,WAAA,EAAa,qBAAA,CAAe,SAAA;AAAA,QAChC,iBAAA,CAAkB,IAAA,EAAM,KAAK;AAAA,MAC/B,CAAA;AACA,MAAA,MAAM,gBAAA,EAAkB,qBAAA,CAAe,SAAA,CAAU,KAAK,CAAA;AAEtD,MAAA,OAAO,UAAA,CAAA,SAAA,EAAe,UAAU,CAAA,sCAAA,EAAyC,UAAA,CAAI,KAAA,CAAM,IAAI,CAAC,CAAA,YAAA,EAAe,UAAA,CAAI,KAAA,CAAM,eAAe,CAAC,CAAA,IAAA,CAAA;AAAA,IACnI;AAAA,IACA,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,SAAA,EAAW,UAAA,CAAI,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA;AAEpD,MAAA,OAAO,UAAA,CAAA,UAAA,EAAgB,QAAQ,CAAA,EAAA,EAAK,UAAA,CAAI,KAAA,CAAM,6BAAA,CAAY,QAAQ,CAAC,CAAC,CAAA,CAAA,EAAI,KAAK,CAAA,CAAA;AAC/E,IAAA;AACY,IAAA;AACoC,MAAA;AAES,MAAA;AACzD,IAAA;AACa,IAAA;AACmC,MAAA;AAEa,MAAA;AAC7D,IAAA;AACmB,IAAA;AAEd,MAAA;AAEgD,QAAA;AAErC,MAAA;AAC6D,MAAA;AAC7E,IAAA;AACa,IAAA;AACuB,MAAA;AACH,QAAA;AAC/B,MAAA;AAC+B,MAAA;AACjC,IAAA;AACc,IAAA;AACkC,MAAA;AAEc,MAAA;AAC9D,IAAA;AACA,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AAMU;AACU,EAAA;AACX,IAAA;AACyC,MAAA;AACzC,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AAC2E,MAAA;AAC3E,IAAA;AACuD,MAAA;AACvD,IAAA;AAC2D,MAAA;AAChE,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AASkB;AHFkE;AACA;AEnFxE;AAGN;AACqB,EAAA;AAGjB,IAAA;AACN,EAAA;AACO,EAAA;AACT;AAKQ;AAC8B,EAAA;AAE3B,EAAA;AACY,IAAA;AAEuB,MAAA;AAE5C,IAAA;AACO,IAAA;AACT,EAAA;AACF;AAGwD;AFwE4B;AACA;AIjHhD;AAWZ;AACiB,EAAA;AACvB,IAAA;AACL,MAAA;AAC2C,QAAA;AAC3C,MAAA;AAC6C,QAAA;AAC7C,MAAA;AAC2C,QAAA;AAC3C,MAAA;AAC4C,QAAA;AACjD,MAAA;AACS,QAAA;AACX,IAAA;AACF,EAAA;AACAA,EAAAA;AACF;AAG+BC;AAMD;AAOtB;AACwC,EAAA;AAGxB,IAAA;AAExB,EAAA;AACO,EAAA;AACT;AAKU;AACyC,EAAA;AACS,IAAA;AACoB,IAAA;AAC9E,EAAA;AACO,EAAA;AACT;AJqFoF;AACA;AC9HlFD;AAC6D,+BAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAAA;AAWgB;AACX,EAAA;AACjC,IAAA;AAChC,EAAA;AACH;AAIiC;AAC6B,EAAA;AACY,EAAA;AAClB,IAAA;AAChC,IAAA;AACiB,IAAA;AAE9BA,IAAAA;AACuC,kBAAA;AACL,cAAA;AAC3C,EAAA;AAC4E,EAAA;AACvD,IAAA;AACP,MAAA;AAEmD,QAAA;AAC7D,MAAA;AACA,MAAA;AACF,IAAA;AAEOA,IAAAA;AACwE,kBAAA;AAAM;AAAA,oBAAA;AAGvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AACJ,IAAA;AAE7DA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAE+B,iBAAA;AAAe;AAAA;AAGxB,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AAEjEA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAEM,iBAAA;AAA+D;AAAA;AAG/C,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAIU,EAAA;AACgE,IAAA;AACJ,IAAA;AAE7DA,IAAAA;AACkC,aAAA;AAAA;AAEjB,eAAA;AAAA;AAEF,MAAA;AACxB,EAAA;AAIU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AAEjEA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIf,oBAAA;AAAA;AAEiB,cAAA;AACnB,kBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAShD,EAAA;AACsD,EAAA;AACoB,IAAA;AAEK,IAAA;AAC/E,EAAA;AACmD,EAAA;AACuB,IAAA;AAEF,IAAA;AACxE,EAAA;AACuE,EAAA;AACG,IAAA;AAClD,IAAA;AAE4C,IAAA;AAErC,IAAA;AAET,IAAA;AACoB,MAAA;AACxC,IAAA;AAEmB,IAAA;AACqB,MAAA;AACxC,IAAA;AAEmC,IAAA;AACrC,EAAA;AAC0D,EAAA;AAG/B,IAAA;AACoD,IAAA;AAC/E,EAAA;AAEkD,EAAA;AAEjB,EAAA;AACnC;AAKU;AD4E0E;AACA;AKpT9D;AACtB;AACsB;AACpB;AACA;AAEK;AACQ;AA0DX;AACU,EAAA;AACkB,EAAA;AAGO,IAAA;AAEd,IAAA;AAChB,MAAA;AACS,MAAA;AACgB,QAAA;AAClB,QAAA;AACG,QAAA;AACZ,MAAA;AAC6C,MAAA;AAChC,QAAA;AAEqB,QAAA;AACjB,UAAA;AACA,UAAA;AACkB,UAAA;AAC+B,YAAA;AAC7D,UAAA;AAC0C,UAAA;AAC3C,QAAA;AAE4D,QAAA;AAChE,MAAA;AACD,MAAA;AACD,IAAA;AACH,EAAA;AACuC,EAAA;AAEsC,IAAA;AAE7E,EAAA;AACyB,EAAA;AAC3B;AAEyC;AACX,EAAA;AAC1B,IAAA;AACA,IAAA;AACF,EAAA;AACF;AAEoB;ALmPgE;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","sourcesContent":[null,"import {\n isSQL,\n JSONSerializer,\n SQL,\n sqlMigration,\n} from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY, \n data JSONB NOT NULL, \n metadata JSONB NOT NULL DEFAULT '{}',\n _version BIGINT NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived BOOLEAN NOT NULL DEFAULT FALSE,\n _created TIMESTAMPTZ NOT NULL DEFAULT now(),\n _updated TIMESTAMPTZ NOT NULL DEFAULT now()\n )`;\n\nexport const pongoCollectionPostgreSQLMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const postgresSQLBuilder = (\n collectionName: string,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = JSONSerializer.serialize(document);\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) \n VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${JSONSerializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n ON CONFLICT(_id) DO NOTHING\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${JSONSerializer.serialize(document)} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n ${where(filterQuery)};`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)}\n USING existing\n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id\n )\n SELECT \n existing._id,\n COUNT(existing._id) over() AS matched,\n COUNT(deleted._id) over() AS deleted\n FROM existing\n LEFT JOIN deleted \n ON existing._id = deleted._id;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`SELECT data FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const query: SQL[] = [];\n\n query.push(SQL`SELECT data FROM ${SQL.identifier(collectionName)}`);\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(filter: PongoFilter<T>): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value)\n : handleOperator(key, '$eq', value),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val)\n : handleOperator(key, nestedKey, val),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n const serializedValue = JSONSerializer.serialize(value);\n\n return SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (@ == ${SQL.plain(serializedValue)})'))`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = SQL.plain(path.split('.').join(','));\n\n return SQL`data ->> '${jsonPath}' ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} IN ${value as unknown[]}`;\n }\n case '$nin': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} NOT IN ${value as unknown[]}`;\n }\n case '$elemMatch': {\n const subQuery = objectEntries(value as Record<string, unknown>)\n .map(\n ([subKey, subValue]) =>\n `@.\"${subKey}\" == ${JSONSerializer.serialize(subValue)}`,\n )\n .join(' && ');\n return SQL`jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (${SQL.plain(subQuery)})')`;\n }\n case '$all': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n return SQL`data @> ${nestedPath}::jsonb`;\n }\n case '$size': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in':\n return SQL`${SQL.plain(fieldName)} IN ${value as unknown[]}`;\n case '$nin':\n return SQL`${SQL.plain(fieldName)} NOT IN ${value as unknown[]}`;\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildNestedObject = (\n path: string,\n value: unknown,\n): Record<string, unknown> =>\n path\n .split('.')\n .reverse()\n .reduce((acc, key) => ({ [key]: acc }), value as Record<string, unknown>);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(update: PongoUpdate<T>): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(set: $set<T>, currentUpdateQuery: SQL): SQL =>\n SQL`${currentUpdateQuery} || ${JSONSerializer.serialize(set)}::jsonb`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL =>\n SQL`${currentUpdateQuery} - ${Object.keys(unset)\n .map((k) => `{${k}}`)\n .join(', ')}`;\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)`\n : SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = JSONSerializer.serialize([value]);\n currentUpdateQuery = SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', (coalesce(data->'${SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;\n }\n return currentUpdateQuery;\n};\n","import { dumbo } from '@event-driven-io/dumbo';\nimport {\n pgDatabaseDriver as dumboDriver,\n getDatabaseNameOrDefault,\n NodePostgresDriverType,\n type NodePostgresConnection,\n} from '@event-driven-io/dumbo/pg';\nimport pg from 'pg';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n pongoDatabaseDriverRegistry,\n PongoDatabaseSchemaComponent,\n pongoSchema,\n type PongoDatabaseDriver,\n type PongoDatabaseDriverOptions,\n type PongoDb,\n} from '../../../core';\nimport {\n pongoCollectionPostgreSQLMigrations,\n postgresSQLBuilder,\n} from '../core';\n\nexport type NodePostgresPongoClientOptions =\n | PooledPongoClientOptions\n | NotPooledPongoOptions;\n\nexport type PooledPongoClientOptions =\n | {\n pool: pg.Pool;\n }\n | {\n pooled: true;\n }\n | {\n pool: pg.Pool;\n pooled: true;\n }\n | object;\n\nexport type NotPooledPongoOptions =\n | {\n client: pg.Client;\n }\n | {\n pooled: false;\n }\n | {\n client: pg.Client;\n pooled: false;\n }\n | {\n connection: NodePostgresConnection;\n pooled?: false;\n };\n\ntype NodePostgresDatabaseDriverOptions =\n PongoDatabaseDriverOptions<NodePostgresPongoClientOptions> & {\n databaseName?: string | undefined;\n connectionString: string;\n };\n\nconst pgDatabaseDriver: PongoDatabaseDriver<\n PongoDb<NodePostgresDriverType>,\n NodePostgresDatabaseDriverOptions\n> = {\n driverType: NodePostgresDriverType,\n databaseFactory: (options) => {\n const databaseName =\n options.databaseName ??\n getDatabaseNameOrDefault(options.connectionString);\n\n return PongoDatabase({\n ...options,\n pool: dumbo({\n connectionString: options.connectionString,\n driver: dumboDriver,\n ...options.connectionOptions,\n }),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: NodePostgresDriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: NodePostgresDriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionPostgreSQLMigrations(schema.name),\n },\n sqlBuilder: postgresSQLBuilder(schema.name),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n getDatabaseNameOrDefault: (options) => {\n return (\n options.databaseName ?? getDatabaseNameOrDefault(options.connectionString)\n );\n },\n defaultConnectionString: 'postgresql://localhost:5432/postgres',\n};\n\nexport const usePgDatabaseDriver = () => {\n pongoDatabaseDriverRegistry.register(\n NodePostgresDriverType,\n pgDatabaseDriver,\n );\n};\n\nusePgDatabaseDriver();\n\nexport { pgDatabaseDriver as databaseDriver, pgDatabaseDriver as pgDriver };\n"]}
|
|
1
|
+
{"version":3,"sources":["/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","../src/storage/postgresql/core/sqlBuilder/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts","../src/storage/postgresql/core/sqlBuilder/update/index.ts","../src/storage/postgresql/pg/index.ts"],"names":["SQL","JSONSerializer"],"mappings":"AAAA;AACE;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACF,wDAA6B;AAC7B;AACA;ACbA;AACE;AACA;AACA;AACA;AAAA,+CACK;ADeP;AACA;AErBA;AFuBA;AACA;AGxBA;AAGO,IAAM,eAAA,EAAiB,CAC5B,IAAA,EACA,QAAA,EACA,KAAA,EAAA,GACQ;AACR,EAAA,GAAA,CAAI,KAAA,IAAS,MAAA,GAAS,KAAA,IAAS,UAAA,EAAY;AACzC,IAAA,OAAO,sBAAA,CAAuB,IAAA,EAAM,QAAA,EAAU,KAAK,CAAA;AAAA,EACrD;AAEA,EAAA,OAAA,CAAQ,QAAA,EAAU;AAAA,IAChB,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,WAAA,EAAa,qBAAA,CAAe,SAAA;AAAA,QAChC,iBAAA,CAAkB,IAAA,EAAM,KAAK;AAAA,MAC/B,CAAA;AACA,MAAA,MAAM,gBAAA,EAAkB,qBAAA,CAAe,SAAA,CAAU,KAAK,CAAA;AAEtD,MAAA,OAAO,UAAA,CAAA,SAAA,EAAe,UAAU,CAAA,sCAAA,EAAyC,UAAA,CAAI,KAAA,CAAM,IAAI,CAAC,CAAA,YAAA,EAAe,UAAA,CAAI,KAAA,CAAM,eAAe,CAAC,CAAA,IAAA,CAAA;AAAA,IACnI;AAAA,IACA,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA;AAAA,IACL,KAAK,MAAA;AAAA,IACL,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,SAAA,EAAW,UAAA,CAAI,KAAA,CAAM,IAAA,CAAK,KAAA,CAAM,GAAG,CAAA,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA;AAEpD,MAAA,OAAO,UAAA,CAAA,UAAA,EAAgB,QAAQ,CAAA,EAAA,EAAK,UAAA,CAAI,KAAA,CAAM,6BAAA,CAAY,QAAQ,CAAC,CAAC,CAAA,CAAA,EAAI,KAAK,CAAA,CAAA;AAC/E,IAAA;AACY,IAAA;AACoC,MAAA;AAES,MAAA;AACzD,IAAA;AACa,IAAA;AACmC,MAAA;AAEa,MAAA;AAC7D,IAAA;AACmB,IAAA;AAEd,MAAA;AAEgD,QAAA;AAErC,MAAA;AAC6D,MAAA;AAC7E,IAAA;AACa,IAAA;AACuB,MAAA;AACH,QAAA;AAC/B,MAAA;AAC+B,MAAA;AACjC,IAAA;AACc,IAAA;AACkC,MAAA;AAEc,MAAA;AAC9D,IAAA;AACA,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AAMU;AACU,EAAA;AACX,IAAA;AACyC,MAAA;AACzC,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AACA,IAAA;AAC2E,MAAA;AAC3E,IAAA;AACuD,MAAA;AACvD,IAAA;AAC2D,MAAA;AAChE,IAAA;AACqD,MAAA;AACvD,EAAA;AACF;AASkB;AHFkE;AACA;AEnFxE;AAGN;AACqB,EAAA;AAGjB,IAAA;AACN,EAAA;AACO,EAAA;AACT;AAKQ;AAC8B,EAAA;AAE3B,EAAA;AACY,IAAA;AAEuB,MAAA;AAE5C,IAAA;AACO,IAAA;AACT,EAAA;AACF;AAGwD;AFwE4B;AACA;AIjHhD;AAWZ;AACiB,EAAA;AACvB,IAAA;AACL,MAAA;AAC2C,QAAA;AAC3C,MAAA;AAC6C,QAAA;AAC7C,MAAA;AAC2C,QAAA;AAC3C,MAAA;AAC4C,QAAA;AACjD,MAAA;AACS,QAAA;AACX,IAAA;AACF,EAAA;AACAA,EAAAA;AACF;AAG+BC;AAMD;AAOtB;AACwC,EAAA;AAGxB,IAAA;AAExB,EAAA;AACO,EAAA;AACT;AAKU;AACyC,EAAA;AACS,IAAA;AACoB,IAAA;AAC9E,EAAA;AACO,EAAA;AACT;AJqFoF;AACA;AC9HlFD;AAC6D,+BAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAAA;AAWgB;AACX,EAAA;AACjC,IAAA;AAChC,EAAA;AACH;AAIiC;AAC6B,EAAA;AACY,EAAA;AAClB,IAAA;AAChC,IAAA;AACiB,IAAA;AAE9BA,IAAAA;AACuC,kBAAA;AACL,cAAA;AAC3C,EAAA;AAC4E,EAAA;AACvD,IAAA;AACP,MAAA;AAEmD,QAAA;AAC7D,MAAA;AACA,MAAA;AACF,IAAA;AAEOA,IAAAA;AACwE,kBAAA;AAAM;AAAA,oBAAA;AAGvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AACJ,IAAA;AAE7DA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAE+B,iBAAA;AAAe;AAAA;AAGxB,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAKU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AAEjEA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIpB,eAAA;AAAA;AAEM,iBAAA;AAA+D;AAAA;AAG/C,cAAA;AACK,kBAAA;AAAe;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAUvF,EAAA;AAIU,EAAA;AACgE,IAAA;AACJ,IAAA;AAE7DA,IAAAA;AACkC,aAAA;AAAA;AAEjB,eAAA;AAAA;AAEF,MAAA;AACxB,EAAA;AAIU,EAAA;AAC6D,IAAA;AAGjD,IAAA;AAGoD,IAAA;AAEjEA,IAAAA;AAAA;AAAA;AAGwD,aAAA;AAAA;AAAA;AAAA;AAIf,oBAAA;AAAA;AAEiB,cAAA;AACnB,kBAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oCAAA;AAShD,EAAA;AACsD,EAAA;AACoB,IAAA;AAEK,IAAA;AAC/E,EAAA;AACmD,EAAA;AACuB,IAAA;AAEF,IAAA;AACxE,EAAA;AACuE,EAAA;AACG,IAAA;AAClD,IAAA;AAE4C,IAAA;AAErC,IAAA;AAET,IAAA;AACoB,MAAA;AACxC,IAAA;AAEmB,IAAA;AACqB,MAAA;AACxC,IAAA;AAEmC,IAAA;AACrC,EAAA;AAC0D,EAAA;AAG/B,IAAA;AACoD,IAAA;AAC/E,EAAA;AAEkD,EAAA;AAEjB,EAAA;AACnC;AAKU;AD4E0E;AACA;AKpT9D;AACtB;AACsB;AACpB;AACA;AAEK;AACQ;AA0DX;AACU,EAAA;AACkB,EAAA;AAGO,IAAA;AAEd,IAAA;AAChB,MAAA;AACS,MAAA;AACgB,QAAA;AAClB,QAAA;AACG,QAAA;AACZ,MAAA;AAC6C,MAAA;AAChC,QAAA;AAEqB,QAAA;AACjB,UAAA;AACA,UAAA;AACkB,UAAA;AAC+B,YAAA;AAC7D,UAAA;AAC0C,UAAA;AAC3C,QAAA;AAE4D,QAAA;AAChE,MAAA;AACD,MAAA;AACD,IAAA;AACH,EAAA;AACuC,EAAA;AAEsC,IAAA;AAE7E,EAAA;AACyB,EAAA;AAC3B;AAEyC;AAC4B,EAAA;AACrE;AAEoB;ALmPgE;AACA;AACA;AACA;AACA;AACA;AACA","file":"/home/runner/work/Pongo/Pongo/src/packages/pongo/dist/pg.cjs","sourcesContent":[null,"import {\n isSQL,\n JSONSerializer,\n SQL,\n sqlMigration,\n} from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY, \n data JSONB NOT NULL, \n metadata JSONB NOT NULL DEFAULT '{}',\n _version BIGINT NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived BOOLEAN NOT NULL DEFAULT FALSE,\n _created TIMESTAMPTZ NOT NULL DEFAULT now(),\n _updated TIMESTAMPTZ NOT NULL DEFAULT now()\n )`;\n\nexport const pongoCollectionPostgreSQLMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const postgresSQLBuilder = (\n collectionName: string,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = JSONSerializer.serialize(document);\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) \n VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${JSONSerializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n ON CONFLICT(_id) DO NOTHING\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${JSONSerializer.serialize(document)} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n ${where(filterQuery)};`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)}\n USING existing\n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id\n )\n SELECT \n existing._id,\n COUNT(existing._id) over() AS matched,\n COUNT(deleted._id) over() AS deleted\n FROM existing\n LEFT JOIN deleted \n ON existing._id = deleted._id;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`SELECT data FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const query: SQL[] = [];\n\n query.push(SQL`SELECT data FROM ${SQL.identifier(collectionName)}`);\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(filter: PongoFilter<T>): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value)\n : handleOperator(key, '$eq', value),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val)\n : handleOperator(key, nestedKey, val),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n const serializedValue = JSONSerializer.serialize(value);\n\n return SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (@ == ${SQL.plain(serializedValue)})'))`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = SQL.plain(path.split('.').join(','));\n\n return SQL`data ->> '${jsonPath}' ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} IN ${value as unknown[]}`;\n }\n case '$nin': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} NOT IN ${value as unknown[]}`;\n }\n case '$elemMatch': {\n const subQuery = objectEntries(value as Record<string, unknown>)\n .map(\n ([subKey, subValue]) =>\n `@.\"${subKey}\" == ${JSONSerializer.serialize(subValue)}`,\n )\n .join(' && ');\n return SQL`jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (${SQL.plain(subQuery)})')`;\n }\n case '$all': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n return SQL`data @> ${nestedPath}::jsonb`;\n }\n case '$size': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in':\n return SQL`${SQL.plain(fieldName)} IN ${value as unknown[]}`;\n case '$nin':\n return SQL`${SQL.plain(fieldName)} NOT IN ${value as unknown[]}`;\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildNestedObject = (\n path: string,\n value: unknown,\n): Record<string, unknown> =>\n path\n .split('.')\n .reverse()\n .reduce((acc, key) => ({ [key]: acc }), value as Record<string, unknown>);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(update: PongoUpdate<T>): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(set: $set<T>, currentUpdateQuery: SQL): SQL =>\n SQL`${currentUpdateQuery} || ${JSONSerializer.serialize(set)}::jsonb`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL =>\n SQL`${currentUpdateQuery} - ${Object.keys(unset)\n .map((k) => `{${k}}`)\n .join(', ')}`;\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)`\n : SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = JSONSerializer.serialize([value]);\n currentUpdateQuery = SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', (coalesce(data->'${SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;\n }\n return currentUpdateQuery;\n};\n","import { dumbo } from '@event-driven-io/dumbo';\nimport {\n pgDatabaseDriver as dumboDriver,\n getDatabaseNameOrDefault,\n PgDriverType,\n type PgConnection,\n} from '@event-driven-io/dumbo/pg';\nimport pg from 'pg';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n pongoDatabaseDriverRegistry,\n PongoDatabaseSchemaComponent,\n pongoSchema,\n type PongoDatabaseDriver,\n type PongoDatabaseDriverOptions,\n type PongoDb,\n} from '../../../core';\nimport {\n pongoCollectionPostgreSQLMigrations,\n postgresSQLBuilder,\n} from '../core';\n\nexport type PgPongoClientOptions =\n | PooledPongoClientOptions\n | NotPooledPongoOptions;\n\nexport type PooledPongoClientOptions =\n | {\n pool: pg.Pool;\n }\n | {\n pooled: true;\n }\n | {\n pool: pg.Pool;\n pooled: true;\n }\n | object;\n\nexport type NotPooledPongoOptions =\n | {\n client: pg.Client;\n }\n | {\n pooled: false;\n }\n | {\n client: pg.Client;\n pooled: false;\n }\n | {\n connection: PgConnection;\n pooled?: false;\n };\n\ntype PgDatabaseDriverOptions =\n PongoDatabaseDriverOptions<PgPongoClientOptions> & {\n databaseName?: string | undefined;\n connectionString: string;\n };\n\nconst pgDatabaseDriver: PongoDatabaseDriver<\n PongoDb<PgDriverType>,\n PgDatabaseDriverOptions\n> = {\n driverType: PgDriverType,\n databaseFactory: (options) => {\n const databaseName =\n options.databaseName ??\n getDatabaseNameOrDefault(options.connectionString);\n\n return PongoDatabase({\n ...options,\n pool: dumbo({\n connectionString: options.connectionString,\n driver: dumboDriver,\n ...options.connectionOptions,\n }),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: PgDriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: PgDriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionPostgreSQLMigrations(schema.name),\n },\n sqlBuilder: postgresSQLBuilder(schema.name),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n getDatabaseNameOrDefault: (options) => {\n return (\n options.databaseName ?? getDatabaseNameOrDefault(options.connectionString)\n );\n },\n defaultConnectionString: 'postgresql://localhost:5432/postgres',\n};\n\nexport const usePgDatabaseDriver = () => {\n pongoDatabaseDriverRegistry.register(PgDriverType, pgDatabaseDriver);\n};\n\nusePgDatabaseDriver();\n\nexport { pgDatabaseDriver as databaseDriver, pgDatabaseDriver as pgDriver };\n"]}
|
package/dist/pg.d.cts
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import * as _event_driven_io_dumbo from '@event-driven-io/dumbo';
|
|
2
2
|
import { r as PongoCollectionSQLBuilder, P as PongoDatabaseDriver, a as PongoDb, b as PongoDatabaseDriverOptions } from './pongoCollectionSchemaComponent-t_e9n2Wc.cjs';
|
|
3
|
-
import {
|
|
3
|
+
import { PgDriverType, PgConnection } from '@event-driven-io/dumbo/pg';
|
|
4
4
|
import pg from 'pg';
|
|
5
5
|
|
|
6
6
|
declare const pongoCollectionPostgreSQLMigrations: (collectionName: string) => _event_driven_io_dumbo.SQLMigration[];
|
|
7
7
|
declare const postgresSQLBuilder: (collectionName: string) => PongoCollectionSQLBuilder;
|
|
8
8
|
|
|
9
|
-
type
|
|
9
|
+
type PgPongoClientOptions = PooledPongoClientOptions | NotPooledPongoOptions;
|
|
10
10
|
type PooledPongoClientOptions = {
|
|
11
11
|
pool: pg.Pool;
|
|
12
12
|
} | {
|
|
@@ -23,14 +23,14 @@ type NotPooledPongoOptions = {
|
|
|
23
23
|
client: pg.Client;
|
|
24
24
|
pooled: false;
|
|
25
25
|
} | {
|
|
26
|
-
connection:
|
|
26
|
+
connection: PgConnection;
|
|
27
27
|
pooled?: false;
|
|
28
28
|
};
|
|
29
|
-
type
|
|
29
|
+
type PgDatabaseDriverOptions = PongoDatabaseDriverOptions<PgPongoClientOptions> & {
|
|
30
30
|
databaseName?: string | undefined;
|
|
31
31
|
connectionString: string;
|
|
32
32
|
};
|
|
33
|
-
declare const pgDatabaseDriver: PongoDatabaseDriver<PongoDb<
|
|
33
|
+
declare const pgDatabaseDriver: PongoDatabaseDriver<PongoDb<PgDriverType>, PgDatabaseDriverOptions>;
|
|
34
34
|
declare const usePgDatabaseDriver: () => void;
|
|
35
35
|
|
|
36
|
-
export { type
|
|
36
|
+
export { type NotPooledPongoOptions, type PgPongoClientOptions, type PooledPongoClientOptions, pgDatabaseDriver as databaseDriver, pgDatabaseDriver as pgDriver, pongoCollectionPostgreSQLMigrations, postgresSQLBuilder, usePgDatabaseDriver };
|
package/dist/pg.d.ts
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
import * as _event_driven_io_dumbo from '@event-driven-io/dumbo';
|
|
2
2
|
import { r as PongoCollectionSQLBuilder, P as PongoDatabaseDriver, a as PongoDb, b as PongoDatabaseDriverOptions } from './pongoCollectionSchemaComponent-t_e9n2Wc.js';
|
|
3
|
-
import {
|
|
3
|
+
import { PgDriverType, PgConnection } from '@event-driven-io/dumbo/pg';
|
|
4
4
|
import pg from 'pg';
|
|
5
5
|
|
|
6
6
|
declare const pongoCollectionPostgreSQLMigrations: (collectionName: string) => _event_driven_io_dumbo.SQLMigration[];
|
|
7
7
|
declare const postgresSQLBuilder: (collectionName: string) => PongoCollectionSQLBuilder;
|
|
8
8
|
|
|
9
|
-
type
|
|
9
|
+
type PgPongoClientOptions = PooledPongoClientOptions | NotPooledPongoOptions;
|
|
10
10
|
type PooledPongoClientOptions = {
|
|
11
11
|
pool: pg.Pool;
|
|
12
12
|
} | {
|
|
@@ -23,14 +23,14 @@ type NotPooledPongoOptions = {
|
|
|
23
23
|
client: pg.Client;
|
|
24
24
|
pooled: false;
|
|
25
25
|
} | {
|
|
26
|
-
connection:
|
|
26
|
+
connection: PgConnection;
|
|
27
27
|
pooled?: false;
|
|
28
28
|
};
|
|
29
|
-
type
|
|
29
|
+
type PgDatabaseDriverOptions = PongoDatabaseDriverOptions<PgPongoClientOptions> & {
|
|
30
30
|
databaseName?: string | undefined;
|
|
31
31
|
connectionString: string;
|
|
32
32
|
};
|
|
33
|
-
declare const pgDatabaseDriver: PongoDatabaseDriver<PongoDb<
|
|
33
|
+
declare const pgDatabaseDriver: PongoDatabaseDriver<PongoDb<PgDriverType>, PgDatabaseDriverOptions>;
|
|
34
34
|
declare const usePgDatabaseDriver: () => void;
|
|
35
35
|
|
|
36
|
-
export { type
|
|
36
|
+
export { type NotPooledPongoOptions, type PgPongoClientOptions, type PooledPongoClientOptions, pgDatabaseDriver as databaseDriver, pgDatabaseDriver as pgDriver, pongoCollectionPostgreSQLMigrations, postgresSQLBuilder, usePgDatabaseDriver };
|
package/dist/pg.js
CHANGED
|
@@ -311,11 +311,11 @@ import { dumbo } from "@event-driven-io/dumbo";
|
|
|
311
311
|
import {
|
|
312
312
|
pgDatabaseDriver as dumboDriver,
|
|
313
313
|
getDatabaseNameOrDefault,
|
|
314
|
-
|
|
314
|
+
PgDriverType
|
|
315
315
|
} from "@event-driven-io/dumbo/pg";
|
|
316
316
|
import "pg";
|
|
317
317
|
var pgDatabaseDriver = {
|
|
318
|
-
driverType:
|
|
318
|
+
driverType: PgDriverType,
|
|
319
319
|
databaseFactory: (options) => {
|
|
320
320
|
const databaseName = options.databaseName ?? getDatabaseNameOrDefault(options.connectionString);
|
|
321
321
|
return PongoDatabase({
|
|
@@ -326,9 +326,9 @@ var pgDatabaseDriver = {
|
|
|
326
326
|
...options.connectionOptions
|
|
327
327
|
}),
|
|
328
328
|
schemaComponent: PongoDatabaseSchemaComponent({
|
|
329
|
-
driverType:
|
|
329
|
+
driverType: PgDriverType,
|
|
330
330
|
collectionFactory: (schema) => PongoCollectionSchemaComponent({
|
|
331
|
-
driverType:
|
|
331
|
+
driverType: PgDriverType,
|
|
332
332
|
definition: schema,
|
|
333
333
|
migrationsOrSchemaComponents: {
|
|
334
334
|
migrations: pongoCollectionPostgreSQLMigrations(schema.name)
|
|
@@ -346,10 +346,7 @@ var pgDatabaseDriver = {
|
|
|
346
346
|
defaultConnectionString: "postgresql://localhost:5432/postgres"
|
|
347
347
|
};
|
|
348
348
|
var usePgDatabaseDriver = () => {
|
|
349
|
-
pongoDatabaseDriverRegistry.register(
|
|
350
|
-
NodePostgresDriverType,
|
|
351
|
-
pgDatabaseDriver
|
|
352
|
-
);
|
|
349
|
+
pongoDatabaseDriverRegistry.register(PgDriverType, pgDatabaseDriver);
|
|
353
350
|
};
|
|
354
351
|
usePgDatabaseDriver();
|
|
355
352
|
export {
|
package/dist/pg.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/storage/postgresql/core/sqlBuilder/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts","../src/storage/postgresql/core/sqlBuilder/update/index.ts","../src/storage/postgresql/pg/index.ts"],"sourcesContent":["import {\n isSQL,\n JSONSerializer,\n SQL,\n sqlMigration,\n} from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY, \n data JSONB NOT NULL, \n metadata JSONB NOT NULL DEFAULT '{}',\n _version BIGINT NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived BOOLEAN NOT NULL DEFAULT FALSE,\n _created TIMESTAMPTZ NOT NULL DEFAULT now(),\n _updated TIMESTAMPTZ NOT NULL DEFAULT now()\n )`;\n\nexport const pongoCollectionPostgreSQLMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const postgresSQLBuilder = (\n collectionName: string,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = JSONSerializer.serialize(document);\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) \n VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${JSONSerializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n ON CONFLICT(_id) DO NOTHING\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${JSONSerializer.serialize(document)} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n ${where(filterQuery)};`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)}\n USING existing\n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id\n )\n SELECT \n existing._id,\n COUNT(existing._id) over() AS matched,\n COUNT(deleted._id) over() AS deleted\n FROM existing\n LEFT JOIN deleted \n ON existing._id = deleted._id;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`SELECT data FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const query: SQL[] = [];\n\n query.push(SQL`SELECT data FROM ${SQL.identifier(collectionName)}`);\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(filter: PongoFilter<T>): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value)\n : handleOperator(key, '$eq', value),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val)\n : handleOperator(key, nestedKey, val),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n const serializedValue = JSONSerializer.serialize(value);\n\n return SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (@ == ${SQL.plain(serializedValue)})'))`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = SQL.plain(path.split('.').join(','));\n\n return SQL`data ->> '${jsonPath}' ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} IN ${value as unknown[]}`;\n }\n case '$nin': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} NOT IN ${value as unknown[]}`;\n }\n case '$elemMatch': {\n const subQuery = objectEntries(value as Record<string, unknown>)\n .map(\n ([subKey, subValue]) =>\n `@.\"${subKey}\" == ${JSONSerializer.serialize(subValue)}`,\n )\n .join(' && ');\n return SQL`jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (${SQL.plain(subQuery)})')`;\n }\n case '$all': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n return SQL`data @> ${nestedPath}::jsonb`;\n }\n case '$size': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in':\n return SQL`${SQL.plain(fieldName)} IN ${value as unknown[]}`;\n case '$nin':\n return SQL`${SQL.plain(fieldName)} NOT IN ${value as unknown[]}`;\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildNestedObject = (\n path: string,\n value: unknown,\n): Record<string, unknown> =>\n path\n .split('.')\n .reverse()\n .reduce((acc, key) => ({ [key]: acc }), value as Record<string, unknown>);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(update: PongoUpdate<T>): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(set: $set<T>, currentUpdateQuery: SQL): SQL =>\n SQL`${currentUpdateQuery} || ${JSONSerializer.serialize(set)}::jsonb`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL =>\n SQL`${currentUpdateQuery} - ${Object.keys(unset)\n .map((k) => `{${k}}`)\n .join(', ')}`;\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)`\n : SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = JSONSerializer.serialize([value]);\n currentUpdateQuery = SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', (coalesce(data->'${SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;\n }\n return currentUpdateQuery;\n};\n","import { dumbo } from '@event-driven-io/dumbo';\nimport {\n pgDatabaseDriver as dumboDriver,\n getDatabaseNameOrDefault,\n NodePostgresDriverType,\n type NodePostgresConnection,\n} from '@event-driven-io/dumbo/pg';\nimport pg from 'pg';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n pongoDatabaseDriverRegistry,\n PongoDatabaseSchemaComponent,\n pongoSchema,\n type PongoDatabaseDriver,\n type PongoDatabaseDriverOptions,\n type PongoDb,\n} from '../../../core';\nimport {\n pongoCollectionPostgreSQLMigrations,\n postgresSQLBuilder,\n} from '../core';\n\nexport type NodePostgresPongoClientOptions =\n | PooledPongoClientOptions\n | NotPooledPongoOptions;\n\nexport type PooledPongoClientOptions =\n | {\n pool: pg.Pool;\n }\n | {\n pooled: true;\n }\n | {\n pool: pg.Pool;\n pooled: true;\n }\n | object;\n\nexport type NotPooledPongoOptions =\n | {\n client: pg.Client;\n }\n | {\n pooled: false;\n }\n | {\n client: pg.Client;\n pooled: false;\n }\n | {\n connection: NodePostgresConnection;\n pooled?: false;\n };\n\ntype NodePostgresDatabaseDriverOptions =\n PongoDatabaseDriverOptions<NodePostgresPongoClientOptions> & {\n databaseName?: string | undefined;\n connectionString: string;\n };\n\nconst pgDatabaseDriver: PongoDatabaseDriver<\n PongoDb<NodePostgresDriverType>,\n NodePostgresDatabaseDriverOptions\n> = {\n driverType: NodePostgresDriverType,\n databaseFactory: (options) => {\n const databaseName =\n options.databaseName ??\n getDatabaseNameOrDefault(options.connectionString);\n\n return PongoDatabase({\n ...options,\n pool: dumbo({\n connectionString: options.connectionString,\n driver: dumboDriver,\n ...options.connectionOptions,\n }),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: NodePostgresDriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: NodePostgresDriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionPostgreSQLMigrations(schema.name),\n },\n sqlBuilder: postgresSQLBuilder(schema.name),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n getDatabaseNameOrDefault: (options) => {\n return (\n options.databaseName ?? getDatabaseNameOrDefault(options.connectionString)\n );\n },\n defaultConnectionString: 'postgresql://localhost:5432/postgres',\n};\n\nexport const usePgDatabaseDriver = () => {\n pongoDatabaseDriverRegistry.register(\n NodePostgresDriverType,\n pgDatabaseDriver,\n );\n};\n\nusePgDatabaseDriver();\n\nexport { pgDatabaseDriver as databaseDriver, pgDatabaseDriver as pgDriver };\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA,EACE;AAAA,EACA,kBAAAA;AAAA,EACA,OAAAC;AAAA,EACA;AAAA,OACK;;;ACLP,SAAS,OAAAC,YAAW;;;ACApB,SAAS,gBAAgB,WAAW;AAG7B,IAAM,iBAAiB,CAC5B,MACA,UACA,UACQ;AACR,MAAI,SAAS,SAAS,SAAS,YAAY;AACzC,WAAO,uBAAuB,MAAM,UAAU,KAAK;AAAA,EACrD;AAEA,UAAQ,UAAU;AAAA,IAChB,KAAK,OAAO;AACV,YAAM,aAAa,eAAe;AAAA,QAChC,kBAAkB,MAAM,KAAK;AAAA,MAC/B;AACA,YAAM,kBAAkB,eAAe,UAAU,KAAK;AAEtD,aAAO,eAAe,UAAU,yCAAyC,IAAI,MAAM,IAAI,CAAC,eAAe,IAAI,MAAM,eAAe,CAAC;AAAA,IACnI;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,OAAO;AACV,YAAM,WAAW,IAAI,MAAM,KAAK,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAEpD,aAAO,gBAAgB,QAAQ,KAAK,IAAI,MAAM,YAAY,QAAQ,CAAC,CAAC,IAAI,KAAK;AAAA,IAC/E;AAAA,IACA,KAAK,OAAO;AACV,YAAM,WAAW,IAAI,KAAK,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAE9C,aAAO,eAAe,QAAQ,OAAO,KAAkB;AAAA,IACzD;AAAA,IACA,KAAK,QAAQ;AACX,YAAM,WAAW,IAAI,KAAK,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAE9C,aAAO,eAAe,QAAQ,WAAW,KAAkB;AAAA,IAC7D;AAAA,IACA,KAAK,cAAc;AACjB,YAAM,WAAW,cAAc,KAAgC,EAC5D;AAAA,QACC,CAAC,CAAC,QAAQ,QAAQ,MAChB,MAAM,MAAM,QAAQ,eAAe,UAAU,QAAQ,CAAC;AAAA,MAC1D,EACC,KAAK,MAAM;AACd,aAAO,iCAAiC,IAAI,MAAM,IAAI,CAAC,UAAU,IAAI,MAAM,QAAQ,CAAC;AAAA,IACtF;AAAA,IACA,KAAK,QAAQ;AACX,YAAM,aAAa,eAAe;AAAA,QAChC,kBAAkB,MAAM,KAAK;AAAA,MAC/B;AACA,aAAO,cAAc,UAAU;AAAA,IACjC;AAAA,IACA,KAAK,SAAS;AACZ,YAAM,WAAW,IAAI,KAAK,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAE9C,aAAO,iCAAiC,QAAQ,OAAO,KAAK;AAAA,IAC9D;AAAA,IACA;AACE,YAAM,IAAI,MAAM,yBAAyB,QAAQ,EAAE;AAAA,EACvD;AACF;AAEA,IAAM,yBAAyB,CAC7B,WACA,UACA,UACQ;AACR,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,MAAM,KAAK;AAAA,IAC9C,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,IAAI,IAAI,MAAM,YAAY,QAAQ,CAAC,CAAC,IAAI,KAAK;AAAA,IAChF,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,OAAO,KAAkB;AAAA,IAC5D,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,WAAW,KAAkB;AAAA,IAChE;AACE,YAAM,IAAI,MAAM,yBAAyB,QAAQ,EAAE;AAAA,EACvD;AACF;AAEA,IAAM,oBAAoB,CACxB,MACA,UAEA,KACG,MAAM,GAAG,EACT,QAAQ,EACR,OAAO,CAAC,KAAK,SAAS,EAAE,CAAC,GAAG,GAAG,IAAI,IAAI,KAAgC;;;ADpF5E,IAAM,MAAM;AAEL,IAAM,uBAAuB,CAAI,WACtCC,KAAI;AAAA,EACF,OAAO,QAAQ,MAAM,EAAE;AAAA,IAAI,CAAC,CAAC,KAAK,KAAK,MACrC,SAAS,KAAK,IACV,4BAA4B,KAAK,KAAK,IACtC,eAAe,KAAK,OAAO,KAAK;AAAA,EACtC;AAAA,EACA,IAAI,GAAG;AACT;AAEF,IAAM,8BAA8B,CAClC,KACA,UACQ;AACR,QAAM,aAAa,CAAC,aAAa,KAAK;AAEtC,SAAOA,KAAI;AAAA,IACT,cAAc,KAAK,EAAE;AAAA,MAAI,CAAC,CAAC,WAAW,GAAG,MACvC,aACI,eAAe,GAAG,GAAG,IAAI,SAAS,IAAI,eAAe,KAAK,GAAG,IAC7D,eAAe,KAAK,WAAW,GAAG;AAAA,IACxC;AAAA,IACA,IAAI,GAAG;AAAA,EACT;AACF;AAEA,IAAM,WAAW,CAAC,UAChB,UAAU,QAAQ,OAAO,UAAU,YAAY,CAAC,MAAM,QAAQ,KAAK;;;AExCrE,SAAS,kBAAAC,iBAAgB,OAAAC,YAAW;AAU7B,IAAM,mBAAmB,CAAI,WAClC,cAAc,MAAM,EAAE;AAAA,EACpB,CAAC,oBAAoB,CAAC,IAAI,KAAK,MAAM;AACnC,YAAQ,IAAI;AAAA,MACV,KAAK;AACH,eAAO,cAAc,OAAO,kBAAkB;AAAA,MAChD,KAAK;AACH,eAAO,gBAAgB,OAAO,kBAAkB;AAAA,MAClD,KAAK;AACH,eAAO,cAAc,OAAO,kBAAkB;AAAA,MAChD,KAAK;AACH,eAAO,eAAe,OAAO,kBAAkB;AAAA,MACjD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA,EACAC;AACF;AAEK,IAAM,gBAAgB,CAAI,KAAc,uBAC7CA,OAAM,kBAAkB,OAAOC,gBAAe,UAAU,GAAG,CAAC;AAEvD,IAAM,kBAAkB,CAC7B,OACA,uBAEAD,OAAM,kBAAkB,MAAM,OAAO,KAAK,KAAK,EAC5C,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EACnB,KAAK,IAAI,CAAC;AAER,IAAM,gBAAgB,CAC3B,KACA,uBACQ;AACR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,yBACE,OAAO,UAAU,WACbA,iBAAgB,kBAAkB,OAAOA,KAAI,MAAM,GAAG,CAAC,mCAAmCA,KAAI,MAAM,GAAG,CAAC,oBAAoB,KAAK,oBACjIA,iBAAgB,kBAAkB,OAAOA,KAAI,MAAM,GAAG,CAAC,kCAAkCA,KAAI,MAAM,GAAG,CAAC,qBAAqB,KAAK;AAAA,EACzI;AACA,SAAO;AACT;AAEO,IAAM,iBAAiB,CAC5B,MACA,uBACQ;AACR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAC/C,UAAM,kBAAkBC,gBAAe,UAAU,CAAC,KAAK,CAAC;AACxD,yBAAqBD,iBAAgB,kBAAkB,OAAOA,KAAI,MAAM,GAAG,CAAC,wBAAwBA,KAAI,MAAM,GAAG,CAAC,sBAAsB,eAAe;AAAA,EACzJ;AACA,SAAO;AACT;;;AHzCA,IAAM,mBAAmB,CAAC,mBACxBE;AAAA,iCAC+BA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWxD,IAAM,sCAAsC,CAAC,mBAA2B;AAAA,EAC7E,aAAa,mBAAmB,cAAc,oBAAoB;AAAA,IAChE,iBAAiB,cAAc;AAAA,EACjC,CAAC;AACH;AAEO,IAAM,qBAAqB,CAChC,oBAC+B;AAAA,EAC/B,kBAAkB,MAAW,iBAAiB,cAAc;AAAA,EAC5D,WAAW,CAAI,aAAyD;AACtE,UAAM,aAAaC,gBAAe,UAAU,QAAQ;AACpD,UAAM,KAAK,SAAS;AACpB,UAAM,UAAU,SAAS,YAAY;AAErC,WAAOD;AAAA,oBACSA,KAAI,WAAW,cAAc,CAAC;AAAA,gBAClC,EAAE,KAAK,UAAU,KAAK,OAAO;AAAA,EAC3C;AAAA,EACA,YAAY,CAAI,cAA4D;AAC1E,UAAM,SAASA,KAAI;AAAA,MACjB,UAAU;AAAA,QACR,CAAC,QACCA,QAAO,IAAI,GAAG,KAAKC,gBAAe,UAAU,GAAG,CAAC,KAAK,IAAI,YAAY,EAAE;AAAA,MAC3E;AAAA,MACA;AAAA,IACF;AAEA,WAAOD;AAAA,oBACSA,KAAI,WAAW,cAAc,CAAC,iCAAiC,MAAM;AAAA;AAAA;AAAA,EAGvF;AAAA,EACA,WAAW,CACT,QACA,QACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,wBACJ,mBAAmB,OACfA,WAAUA,KAAI,WAAW,cAAc,CAAC,eAAe,eAAe,KACtEA;AAEN,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,iBAAiB,MAAM;AAEpE,WAAOA;AAAA;AAAA;AAAA,eAGIA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA,iBAIlDA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,mBAE5B,WAAW,iCAAiCA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA,gBAG7EA,KAAI,WAAW,cAAc,CAAC,uBAAuB,qBAAqB;AAAA,oBACtEA,KAAI,WAAW,cAAc,CAAC,SAASA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUvF;AAAA,EACA,YAAY,CACV,QACA,UACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,wBACJ,mBAAmB,OACfA,WAAUA,KAAI,WAAW,cAAc,CAAC,eAAe,eAAe,KACtEA;AAEN,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA;AAAA;AAAA;AAAA,eAGIA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA,iBAIlDA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,mBAE5BC,gBAAe,UAAU,QAAQ,CAAC,iCAAiCD,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA,gBAGpGA,KAAI,WAAW,cAAc,CAAC,uBAAuB,qBAAqB;AAAA,oBACtEA,KAAI,WAAW,cAAc,CAAC,SAASA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUvF;AAAA,EACA,YAAY,CACV,QACA,WACQ;AACR,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,iBAAiB,MAAM;AAEpE,WAAOA;AAAA,eACIA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,iBAE5B,WAAW;AAAA;AAAA,QAEpB,MAAM,WAAW,CAAC;AAAA,EACxB;AAAA,EACA,WAAW,CACT,QACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,wBACJ,mBAAmB,OACfA,WAAUA,KAAI,WAAW,cAAc,CAAC,eAAe,eAAe,KACtEA;AAEN,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA;AAAA;AAAA;AAAA,eAGIA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA,sBAI7CA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,gBAEpCA,KAAI,WAAW,cAAc,CAAC,uBAAuB,qBAAqB;AAAA,oBACtEA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAShD;AAAA,EACA,YAAY,CAAI,WAAsC;AACpD,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA,mBAAkBA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EAC/E;AAAA,EACA,SAAS,CAAI,WAAsC;AACjD,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA,wBAAuBA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EACpF;AAAA,EACA,MAAM,CAAI,QAA8B,YAA+B;AACrE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,QAAe,CAAC;AAEtB,UAAM,KAAKA,wBAAuBA,KAAI,WAAW,cAAc,CAAC,EAAE;AAElE,UAAM,KAAK,MAAM,WAAW,CAAC;AAE7B,QAAI,SAAS,OAAO;AAClB,YAAM,KAAKA,aAAY,QAAQ,KAAK,EAAE;AAAA,IACxC;AAEA,QAAI,SAAS,MAAM;AACjB,YAAM,KAAKA,cAAa,QAAQ,IAAI,EAAE;AAAA,IACxC;AAEA,WAAOA,KAAI,MAAM,CAAC,GAAG,OAAOA,OAAM,CAAC;AAAA,EACrC;AAAA,EACA,gBAAgB,CAAI,WAAsC;AACxD,UAAM,cAAcA,KAAI,MAAM,MAAM,MAAM,IACtC,SACA,qBAAqB,MAAM;AAC/B,WAAOA,qCAAoCA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EACjG;AAAA,EACA,QAAQ,CAAC,YACPA,mBAAkBA,KAAI,WAAW,cAAc,CAAC,cAAcA,KAAI,WAAW,OAAO,CAAC;AAAA,EACvF,MAAM,CAAC,aAAqB,mBAC1BA,4BAA2BA,KAAI,WAAW,UAAU,CAAC;AACzD;AAEA,IAAM,QAAQ,CAAC,gBACbA,KAAI,MAAM,QAAQ,WAAW,IACzBA,KAAI,QACJA,KAAI,MAAM,CAACA,cAAa,WAAW,CAAC;;;AIvO1C,SAAS,aAAa;AACtB;AAAA,EACE,oBAAoB;AAAA,EACpB;AAAA,EACA;AAAA,OAEK;AACP,OAAe;AAuDf,IAAM,mBAGF;AAAA,EACF,YAAY;AAAA,EACZ,iBAAiB,CAAC,YAAY;AAC5B,UAAM,eACJ,QAAQ,gBACR,yBAAyB,QAAQ,gBAAgB;AAEnD,WAAO,cAAc;AAAA,MACnB,GAAG;AAAA,MACH,MAAM,MAAM;AAAA,QACV,kBAAkB,QAAQ;AAAA,QAC1B,QAAQ;AAAA,QACR,GAAG,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,iBAAiB,6BAA6B;AAAA,QAC5C,YAAY;AAAA,QACZ,mBAAmB,CAAC,WAClB,+BAA+B;AAAA,UAC7B,YAAY;AAAA,UACZ,YAAY;AAAA,UACZ,8BAA8B;AAAA,YAC5B,YAAY,oCAAoC,OAAO,IAAI;AAAA,UAC7D;AAAA,UACA,YAAY,mBAAmB,OAAO,IAAI;AAAA,QAC5C,CAAC;AAAA,QACH,YACE,QAAQ,QAAQ,cAAc,YAAY,GAAG,cAAc,CAAC,CAAC;AAAA,MACjE,CAAC;AAAA,MACD;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EACA,0BAA0B,CAAC,YAAY;AACrC,WACE,QAAQ,gBAAgB,yBAAyB,QAAQ,gBAAgB;AAAA,EAE7E;AAAA,EACA,yBAAyB;AAC3B;AAEO,IAAM,sBAAsB,MAAM;AACvC,8BAA4B;AAAA,IAC1B;AAAA,IACA;AAAA,EACF;AACF;AAEA,oBAAoB;","names":["JSONSerializer","SQL","SQL","SQL","JSONSerializer","SQL","SQL","JSONSerializer","SQL","JSONSerializer"]}
|
|
1
|
+
{"version":3,"sources":["../src/storage/postgresql/core/sqlBuilder/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/index.ts","../src/storage/postgresql/core/sqlBuilder/filter/queryOperators.ts","../src/storage/postgresql/core/sqlBuilder/update/index.ts","../src/storage/postgresql/pg/index.ts"],"sourcesContent":["import {\n isSQL,\n JSONSerializer,\n SQL,\n sqlMigration,\n} from '@event-driven-io/dumbo';\nimport {\n expectedVersionValue,\n type DeleteOneOptions,\n type FindOptions,\n type OptionalUnlessRequiredIdAndVersion,\n type PongoCollectionSQLBuilder,\n type PongoFilter,\n type PongoUpdate,\n type ReplaceOneOptions,\n type UpdateOneOptions,\n type WithoutId,\n} from '../../../../core';\nimport { constructFilterQuery } from './filter';\nimport { buildUpdateQuery } from './update';\n\nconst createCollection = (collectionName: string): SQL =>\n SQL`\n CREATE TABLE IF NOT EXISTS ${SQL.identifier(collectionName)} (\n _id TEXT PRIMARY KEY, \n data JSONB NOT NULL, \n metadata JSONB NOT NULL DEFAULT '{}',\n _version BIGINT NOT NULL DEFAULT 1,\n _partition TEXT NOT NULL DEFAULT 'png_global',\n _archived BOOLEAN NOT NULL DEFAULT FALSE,\n _created TIMESTAMPTZ NOT NULL DEFAULT now(),\n _updated TIMESTAMPTZ NOT NULL DEFAULT now()\n )`;\n\nexport const pongoCollectionPostgreSQLMigrations = (collectionName: string) => [\n sqlMigration(`pongoCollection:${collectionName}:001:createtable`, [\n createCollection(collectionName),\n ]),\n];\n\nexport const postgresSQLBuilder = (\n collectionName: string,\n): PongoCollectionSQLBuilder => ({\n createCollection: (): SQL => createCollection(collectionName),\n insertOne: <T>(document: OptionalUnlessRequiredIdAndVersion<T>): SQL => {\n const serialized = JSONSerializer.serialize(document);\n const id = document._id;\n const version = document._version ?? 1n;\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) \n VALUES (${id}, ${serialized}, ${version}) ON CONFLICT(_id) DO NOTHING;`;\n },\n insertMany: <T>(documents: OptionalUnlessRequiredIdAndVersion<T>[]): SQL => {\n const values = SQL.merge(\n documents.map(\n (doc) =>\n SQL`(${doc._id}, ${JSONSerializer.serialize(doc)}, ${doc._version ?? 1n})`,\n ),\n ',',\n );\n\n return SQL`\n INSERT INTO ${SQL.identifier(collectionName)} (_id, data, _version) VALUES ${values}\n ON CONFLICT(_id) DO NOTHING\n RETURNING _id;`;\n },\n updateOne: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n options?: UpdateOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n replaceOne: <T>(\n filter: PongoFilter<T> | SQL,\n document: WithoutId<T>,\n options?: ReplaceOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id, _version as current_version\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n updated AS (\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${JSONSerializer.serialize(document)} || jsonb_build_object('_id', ${SQL.identifier(collectionName)}._id) || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n FROM existing \n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id, ${SQL.identifier(collectionName)}._version\n )\n SELECT \n existing._id,\n COALESCE(updated._version, existing.current_version) AS version,\n COUNT(existing._id) over() AS matched,\n COUNT(updated._id) over() AS modified\n FROM existing\n LEFT JOIN updated \n ON existing._id = updated._id;`;\n },\n updateMany: <T>(\n filter: PongoFilter<T> | SQL,\n update: PongoUpdate<T> | SQL,\n ): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const updateQuery = isSQL(update) ? update : buildUpdateQuery(update);\n\n return SQL`\n UPDATE ${SQL.identifier(collectionName)} \n SET \n data = ${updateQuery} || jsonb_build_object('_version', (_version + 1)::text),\n _version = _version + 1\n ${where(filterQuery)};`;\n },\n deleteOne: <T>(\n filter: PongoFilter<T> | SQL,\n options?: DeleteOneOptions,\n ): SQL => {\n const expectedVersion = expectedVersionValue(options?.expectedVersion);\n const expectedVersionUpdate =\n expectedVersion != null\n ? SQL`AND ${SQL.identifier(collectionName)}._version = ${expectedVersion}`\n : SQL``;\n\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`\n WITH existing AS (\n SELECT _id\n FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}\n LIMIT 1\n ),\n deleted AS (\n DELETE FROM ${SQL.identifier(collectionName)}\n USING existing\n WHERE ${SQL.identifier(collectionName)}._id = existing._id ${expectedVersionUpdate}\n RETURNING ${SQL.identifier(collectionName)}._id\n )\n SELECT \n existing._id,\n COUNT(existing._id) over() AS matched,\n COUNT(deleted._id) over() AS deleted\n FROM existing\n LEFT JOIN deleted \n ON existing._id = deleted._id;`;\n },\n deleteMany: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`DELETE FROM ${SQL.identifier(collectionName)} ${where(filterQuery)}`;\n },\n findOne: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n\n return SQL`SELECT data FROM ${SQL.identifier(collectionName)} ${where(filterQuery)} LIMIT 1;`;\n },\n find: <T>(filter: PongoFilter<T> | SQL, options?: FindOptions): SQL => {\n const filterQuery = isSQL(filter) ? filter : constructFilterQuery(filter);\n const query: SQL[] = [];\n\n query.push(SQL`SELECT data FROM ${SQL.identifier(collectionName)}`);\n\n query.push(where(filterQuery));\n\n if (options?.limit) {\n query.push(SQL`LIMIT ${options.limit}`);\n }\n\n if (options?.skip) {\n query.push(SQL`OFFSET ${options.skip}`);\n }\n\n return SQL.merge([...query, SQL`;`]);\n },\n countDocuments: <T>(filter: PongoFilter<T> | SQL): SQL => {\n const filterQuery = SQL.check.isSQL(filter)\n ? filter\n : constructFilterQuery(filter);\n return SQL`SELECT COUNT(1) as count FROM ${SQL.identifier(collectionName)} ${where(filterQuery)};`;\n },\n rename: (newName: string): SQL =>\n SQL`ALTER TABLE ${SQL.identifier(collectionName)} RENAME TO ${SQL.identifier(newName)};`,\n drop: (targetName: string = collectionName): SQL =>\n SQL`DROP TABLE IF EXISTS ${SQL.identifier(targetName)}`,\n});\n\nconst where = (filterQuery: SQL): SQL =>\n SQL.check.isEmpty(filterQuery)\n ? SQL.EMPTY\n : SQL.merge([SQL`WHERE `, filterQuery]);\n","import { SQL } from '@event-driven-io/dumbo';\nimport {\n hasOperators,\n objectEntries,\n QueryOperators,\n type PongoFilter,\n} from '../../../../../core';\nimport { handleOperator } from './queryOperators';\n\nexport * from './queryOperators';\n\nconst AND = 'AND';\n\nexport const constructFilterQuery = <T>(filter: PongoFilter<T>): SQL =>\n SQL.merge(\n Object.entries(filter).map(([key, value]) =>\n isRecord(value)\n ? constructComplexFilterQuery(key, value)\n : handleOperator(key, '$eq', value),\n ),\n ` ${AND} `,\n );\n\nconst constructComplexFilterQuery = (\n key: string,\n value: Record<string, unknown>,\n): SQL => {\n const isEquality = !hasOperators(value);\n\n return SQL.merge(\n objectEntries(value).map(([nestedKey, val]) =>\n isEquality\n ? handleOperator(`${key}.${nestedKey}`, QueryOperators.$eq, val)\n : handleOperator(key, nestedKey, val),\n ),\n ` ${AND} `,\n );\n};\n\nconst isRecord = (value: unknown): value is Record<string, unknown> =>\n value !== null && typeof value === 'object' && !Array.isArray(value);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport { objectEntries, OperatorMap } from '../../../../../core';\n\nexport const handleOperator = (\n path: string,\n operator: string,\n value: unknown,\n): SQL => {\n if (path === '_id' || path === '_version') {\n return handleMetadataOperator(path, operator, value);\n }\n\n switch (operator) {\n case '$eq': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n const serializedValue = JSONSerializer.serialize(value);\n\n return SQL`(data @> ${nestedPath}::jsonb OR jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (@ == ${SQL.plain(serializedValue)})'))`;\n }\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne': {\n const jsonPath = SQL.plain(path.split('.').join(','));\n\n return SQL`data ->> '${jsonPath}' ${SQL.plain(OperatorMap[operator])} ${value}`;\n }\n case '$in': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} IN ${value as unknown[]}`;\n }\n case '$nin': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`data #>> ${jsonPath} NOT IN ${value as unknown[]}`;\n }\n case '$elemMatch': {\n const subQuery = objectEntries(value as Record<string, unknown>)\n .map(\n ([subKey, subValue]) =>\n `@.\"${subKey}\" == ${JSONSerializer.serialize(subValue)}`,\n )\n .join(' && ');\n return SQL`jsonb_path_exists(data, '$.${SQL.plain(path)}[*] ? (${SQL.plain(subQuery)})')`;\n }\n case '$all': {\n const nestedPath = JSONSerializer.serialize(\n buildNestedObject(path, value),\n );\n return SQL`data @> ${nestedPath}::jsonb`;\n }\n case '$size': {\n const jsonPath = `{${path.split('.').join(',')}}`;\n\n return SQL`jsonb_array_length(data #> ${jsonPath}) = ${value}`;\n }\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst handleMetadataOperator = (\n fieldName: string,\n operator: string,\n value: unknown,\n): SQL => {\n switch (operator) {\n case '$eq':\n return SQL`${SQL.plain(fieldName)} = ${value}`;\n case '$gt':\n case '$gte':\n case '$lt':\n case '$lte':\n case '$ne':\n return SQL`${SQL.plain(fieldName)} ${SQL.plain(OperatorMap[operator])} ${value}`;\n case '$in':\n return SQL`${SQL.plain(fieldName)} IN ${value as unknown[]}`;\n case '$nin':\n return SQL`${SQL.plain(fieldName)} NOT IN ${value as unknown[]}`;\n default:\n throw new Error(`Unsupported operator: ${operator}`);\n }\n};\n\nconst buildNestedObject = (\n path: string,\n value: unknown,\n): Record<string, unknown> =>\n path\n .split('.')\n .reverse()\n .reduce((acc, key) => ({ [key]: acc }), value as Record<string, unknown>);\n","import { JSONSerializer, SQL } from '@event-driven-io/dumbo';\nimport {\n objectEntries,\n type $inc,\n type $push,\n type $set,\n type $unset,\n type PongoUpdate,\n} from '../../../../../core';\n\nexport const buildUpdateQuery = <T>(update: PongoUpdate<T>): SQL =>\n objectEntries(update).reduce(\n (currentUpdateQuery, [op, value]) => {\n switch (op) {\n case '$set':\n return buildSetQuery(value, currentUpdateQuery);\n case '$unset':\n return buildUnsetQuery(value, currentUpdateQuery);\n case '$inc':\n return buildIncQuery(value, currentUpdateQuery);\n case '$push':\n return buildPushQuery(value, currentUpdateQuery);\n default:\n return currentUpdateQuery;\n }\n },\n SQL`data`,\n );\n\nexport const buildSetQuery = <T>(set: $set<T>, currentUpdateQuery: SQL): SQL =>\n SQL`${currentUpdateQuery} || ${JSONSerializer.serialize(set)}::jsonb`;\n\nexport const buildUnsetQuery = <T>(\n unset: $unset<T>,\n currentUpdateQuery: SQL,\n): SQL =>\n SQL`${currentUpdateQuery} - ${Object.keys(unset)\n .map((k) => `{${k}}`)\n .join(', ')}`;\n\nexport const buildIncQuery = <T>(\n inc: $inc<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(inc)) {\n currentUpdateQuery =\n typeof value === 'bigint'\n ? SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb((COALESCE((data->>'${SQL.plain(key)}')::BIGINT, 0) + ${value})::TEXT), true)`\n : SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', to_jsonb(COALESCE((data->>'${SQL.plain(key)}')::NUMERIC, 0) + ${value}), true)`;\n }\n return currentUpdateQuery;\n};\n\nexport const buildPushQuery = <T>(\n push: $push<T>,\n currentUpdateQuery: SQL,\n): SQL => {\n for (const [key, value] of Object.entries(push)) {\n const serializedValue = JSONSerializer.serialize([value]);\n currentUpdateQuery = SQL`jsonb_set(${currentUpdateQuery}, '{${SQL.plain(key)}}', (coalesce(data->'${SQL.plain(key)}', '[]'::jsonb) || ${serializedValue}::jsonb), true)`;\n }\n return currentUpdateQuery;\n};\n","import { dumbo } from '@event-driven-io/dumbo';\nimport {\n pgDatabaseDriver as dumboDriver,\n getDatabaseNameOrDefault,\n PgDriverType,\n type PgConnection,\n} from '@event-driven-io/dumbo/pg';\nimport pg from 'pg';\nimport {\n PongoCollectionSchemaComponent,\n PongoDatabase,\n pongoDatabaseDriverRegistry,\n PongoDatabaseSchemaComponent,\n pongoSchema,\n type PongoDatabaseDriver,\n type PongoDatabaseDriverOptions,\n type PongoDb,\n} from '../../../core';\nimport {\n pongoCollectionPostgreSQLMigrations,\n postgresSQLBuilder,\n} from '../core';\n\nexport type PgPongoClientOptions =\n | PooledPongoClientOptions\n | NotPooledPongoOptions;\n\nexport type PooledPongoClientOptions =\n | {\n pool: pg.Pool;\n }\n | {\n pooled: true;\n }\n | {\n pool: pg.Pool;\n pooled: true;\n }\n | object;\n\nexport type NotPooledPongoOptions =\n | {\n client: pg.Client;\n }\n | {\n pooled: false;\n }\n | {\n client: pg.Client;\n pooled: false;\n }\n | {\n connection: PgConnection;\n pooled?: false;\n };\n\ntype PgDatabaseDriverOptions =\n PongoDatabaseDriverOptions<PgPongoClientOptions> & {\n databaseName?: string | undefined;\n connectionString: string;\n };\n\nconst pgDatabaseDriver: PongoDatabaseDriver<\n PongoDb<PgDriverType>,\n PgDatabaseDriverOptions\n> = {\n driverType: PgDriverType,\n databaseFactory: (options) => {\n const databaseName =\n options.databaseName ??\n getDatabaseNameOrDefault(options.connectionString);\n\n return PongoDatabase({\n ...options,\n pool: dumbo({\n connectionString: options.connectionString,\n driver: dumboDriver,\n ...options.connectionOptions,\n }),\n schemaComponent: PongoDatabaseSchemaComponent({\n driverType: PgDriverType,\n collectionFactory: (schema) =>\n PongoCollectionSchemaComponent({\n driverType: PgDriverType,\n definition: schema,\n migrationsOrSchemaComponents: {\n migrations: pongoCollectionPostgreSQLMigrations(schema.name),\n },\n sqlBuilder: postgresSQLBuilder(schema.name),\n }),\n definition:\n options.schema?.definition ?? pongoSchema.db(databaseName, {}),\n }),\n databaseName,\n });\n },\n getDatabaseNameOrDefault: (options) => {\n return (\n options.databaseName ?? getDatabaseNameOrDefault(options.connectionString)\n );\n },\n defaultConnectionString: 'postgresql://localhost:5432/postgres',\n};\n\nexport const usePgDatabaseDriver = () => {\n pongoDatabaseDriverRegistry.register(PgDriverType, pgDatabaseDriver);\n};\n\nusePgDatabaseDriver();\n\nexport { pgDatabaseDriver as databaseDriver, pgDatabaseDriver as pgDriver };\n"],"mappings":";;;;;;;;;;;;;;AAAA;AAAA,EACE;AAAA,EACA,kBAAAA;AAAA,EACA,OAAAC;AAAA,EACA;AAAA,OACK;;;ACLP,SAAS,OAAAC,YAAW;;;ACApB,SAAS,gBAAgB,WAAW;AAG7B,IAAM,iBAAiB,CAC5B,MACA,UACA,UACQ;AACR,MAAI,SAAS,SAAS,SAAS,YAAY;AACzC,WAAO,uBAAuB,MAAM,UAAU,KAAK;AAAA,EACrD;AAEA,UAAQ,UAAU;AAAA,IAChB,KAAK,OAAO;AACV,YAAM,aAAa,eAAe;AAAA,QAChC,kBAAkB,MAAM,KAAK;AAAA,MAC/B;AACA,YAAM,kBAAkB,eAAe,UAAU,KAAK;AAEtD,aAAO,eAAe,UAAU,yCAAyC,IAAI,MAAM,IAAI,CAAC,eAAe,IAAI,MAAM,eAAe,CAAC;AAAA,IACnI;AAAA,IACA,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK,OAAO;AACV,YAAM,WAAW,IAAI,MAAM,KAAK,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAEpD,aAAO,gBAAgB,QAAQ,KAAK,IAAI,MAAM,YAAY,QAAQ,CAAC,CAAC,IAAI,KAAK;AAAA,IAC/E;AAAA,IACA,KAAK,OAAO;AACV,YAAM,WAAW,IAAI,KAAK,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAE9C,aAAO,eAAe,QAAQ,OAAO,KAAkB;AAAA,IACzD;AAAA,IACA,KAAK,QAAQ;AACX,YAAM,WAAW,IAAI,KAAK,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAE9C,aAAO,eAAe,QAAQ,WAAW,KAAkB;AAAA,IAC7D;AAAA,IACA,KAAK,cAAc;AACjB,YAAM,WAAW,cAAc,KAAgC,EAC5D;AAAA,QACC,CAAC,CAAC,QAAQ,QAAQ,MAChB,MAAM,MAAM,QAAQ,eAAe,UAAU,QAAQ,CAAC;AAAA,MAC1D,EACC,KAAK,MAAM;AACd,aAAO,iCAAiC,IAAI,MAAM,IAAI,CAAC,UAAU,IAAI,MAAM,QAAQ,CAAC;AAAA,IACtF;AAAA,IACA,KAAK,QAAQ;AACX,YAAM,aAAa,eAAe;AAAA,QAChC,kBAAkB,MAAM,KAAK;AAAA,MAC/B;AACA,aAAO,cAAc,UAAU;AAAA,IACjC;AAAA,IACA,KAAK,SAAS;AACZ,YAAM,WAAW,IAAI,KAAK,MAAM,GAAG,EAAE,KAAK,GAAG,CAAC;AAE9C,aAAO,iCAAiC,QAAQ,OAAO,KAAK;AAAA,IAC9D;AAAA,IACA;AACE,YAAM,IAAI,MAAM,yBAAyB,QAAQ,EAAE;AAAA,EACvD;AACF;AAEA,IAAM,yBAAyB,CAC7B,WACA,UACA,UACQ;AACR,UAAQ,UAAU;AAAA,IAChB,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,MAAM,KAAK;AAAA,IAC9C,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AAAA,IACL,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,IAAI,IAAI,MAAM,YAAY,QAAQ,CAAC,CAAC,IAAI,KAAK;AAAA,IAChF,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,OAAO,KAAkB;AAAA,IAC5D,KAAK;AACH,aAAO,MAAM,IAAI,MAAM,SAAS,CAAC,WAAW,KAAkB;AAAA,IAChE;AACE,YAAM,IAAI,MAAM,yBAAyB,QAAQ,EAAE;AAAA,EACvD;AACF;AAEA,IAAM,oBAAoB,CACxB,MACA,UAEA,KACG,MAAM,GAAG,EACT,QAAQ,EACR,OAAO,CAAC,KAAK,SAAS,EAAE,CAAC,GAAG,GAAG,IAAI,IAAI,KAAgC;;;ADpF5E,IAAM,MAAM;AAEL,IAAM,uBAAuB,CAAI,WACtCC,KAAI;AAAA,EACF,OAAO,QAAQ,MAAM,EAAE;AAAA,IAAI,CAAC,CAAC,KAAK,KAAK,MACrC,SAAS,KAAK,IACV,4BAA4B,KAAK,KAAK,IACtC,eAAe,KAAK,OAAO,KAAK;AAAA,EACtC;AAAA,EACA,IAAI,GAAG;AACT;AAEF,IAAM,8BAA8B,CAClC,KACA,UACQ;AACR,QAAM,aAAa,CAAC,aAAa,KAAK;AAEtC,SAAOA,KAAI;AAAA,IACT,cAAc,KAAK,EAAE;AAAA,MAAI,CAAC,CAAC,WAAW,GAAG,MACvC,aACI,eAAe,GAAG,GAAG,IAAI,SAAS,IAAI,eAAe,KAAK,GAAG,IAC7D,eAAe,KAAK,WAAW,GAAG;AAAA,IACxC;AAAA,IACA,IAAI,GAAG;AAAA,EACT;AACF;AAEA,IAAM,WAAW,CAAC,UAChB,UAAU,QAAQ,OAAO,UAAU,YAAY,CAAC,MAAM,QAAQ,KAAK;;;AExCrE,SAAS,kBAAAC,iBAAgB,OAAAC,YAAW;AAU7B,IAAM,mBAAmB,CAAI,WAClC,cAAc,MAAM,EAAE;AAAA,EACpB,CAAC,oBAAoB,CAAC,IAAI,KAAK,MAAM;AACnC,YAAQ,IAAI;AAAA,MACV,KAAK;AACH,eAAO,cAAc,OAAO,kBAAkB;AAAA,MAChD,KAAK;AACH,eAAO,gBAAgB,OAAO,kBAAkB;AAAA,MAClD,KAAK;AACH,eAAO,cAAc,OAAO,kBAAkB;AAAA,MAChD,KAAK;AACH,eAAO,eAAe,OAAO,kBAAkB;AAAA,MACjD;AACE,eAAO;AAAA,IACX;AAAA,EACF;AAAA,EACAC;AACF;AAEK,IAAM,gBAAgB,CAAI,KAAc,uBAC7CA,OAAM,kBAAkB,OAAOC,gBAAe,UAAU,GAAG,CAAC;AAEvD,IAAM,kBAAkB,CAC7B,OACA,uBAEAD,OAAM,kBAAkB,MAAM,OAAO,KAAK,KAAK,EAC5C,IAAI,CAAC,MAAM,IAAI,CAAC,GAAG,EACnB,KAAK,IAAI,CAAC;AAER,IAAM,gBAAgB,CAC3B,KACA,uBACQ;AACR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,GAAG,GAAG;AAC9C,yBACE,OAAO,UAAU,WACbA,iBAAgB,kBAAkB,OAAOA,KAAI,MAAM,GAAG,CAAC,mCAAmCA,KAAI,MAAM,GAAG,CAAC,oBAAoB,KAAK,oBACjIA,iBAAgB,kBAAkB,OAAOA,KAAI,MAAM,GAAG,CAAC,kCAAkCA,KAAI,MAAM,GAAG,CAAC,qBAAqB,KAAK;AAAA,EACzI;AACA,SAAO;AACT;AAEO,IAAM,iBAAiB,CAC5B,MACA,uBACQ;AACR,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,IAAI,GAAG;AAC/C,UAAM,kBAAkBC,gBAAe,UAAU,CAAC,KAAK,CAAC;AACxD,yBAAqBD,iBAAgB,kBAAkB,OAAOA,KAAI,MAAM,GAAG,CAAC,wBAAwBA,KAAI,MAAM,GAAG,CAAC,sBAAsB,eAAe;AAAA,EACzJ;AACA,SAAO;AACT;;;AHzCA,IAAM,mBAAmB,CAAC,mBACxBE;AAAA,iCAC+BA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAWxD,IAAM,sCAAsC,CAAC,mBAA2B;AAAA,EAC7E,aAAa,mBAAmB,cAAc,oBAAoB;AAAA,IAChE,iBAAiB,cAAc;AAAA,EACjC,CAAC;AACH;AAEO,IAAM,qBAAqB,CAChC,oBAC+B;AAAA,EAC/B,kBAAkB,MAAW,iBAAiB,cAAc;AAAA,EAC5D,WAAW,CAAI,aAAyD;AACtE,UAAM,aAAaC,gBAAe,UAAU,QAAQ;AACpD,UAAM,KAAK,SAAS;AACpB,UAAM,UAAU,SAAS,YAAY;AAErC,WAAOD;AAAA,oBACSA,KAAI,WAAW,cAAc,CAAC;AAAA,gBAClC,EAAE,KAAK,UAAU,KAAK,OAAO;AAAA,EAC3C;AAAA,EACA,YAAY,CAAI,cAA4D;AAC1E,UAAM,SAASA,KAAI;AAAA,MACjB,UAAU;AAAA,QACR,CAAC,QACCA,QAAO,IAAI,GAAG,KAAKC,gBAAe,UAAU,GAAG,CAAC,KAAK,IAAI,YAAY,EAAE;AAAA,MAC3E;AAAA,MACA;AAAA,IACF;AAEA,WAAOD;AAAA,oBACSA,KAAI,WAAW,cAAc,CAAC,iCAAiC,MAAM;AAAA;AAAA;AAAA,EAGvF;AAAA,EACA,WAAW,CACT,QACA,QACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,wBACJ,mBAAmB,OACfA,WAAUA,KAAI,WAAW,cAAc,CAAC,eAAe,eAAe,KACtEA;AAEN,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,iBAAiB,MAAM;AAEpE,WAAOA;AAAA;AAAA;AAAA,eAGIA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA,iBAIlDA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,mBAE5B,WAAW,iCAAiCA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA,gBAG7EA,KAAI,WAAW,cAAc,CAAC,uBAAuB,qBAAqB;AAAA,oBACtEA,KAAI,WAAW,cAAc,CAAC,SAASA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUvF;AAAA,EACA,YAAY,CACV,QACA,UACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,wBACJ,mBAAmB,OACfA,WAAUA,KAAI,WAAW,cAAc,CAAC,eAAe,eAAe,KACtEA;AAEN,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA;AAAA;AAAA;AAAA,eAGIA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA,iBAIlDA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,mBAE5BC,gBAAe,UAAU,QAAQ,CAAC,iCAAiCD,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA,gBAGpGA,KAAI,WAAW,cAAc,CAAC,uBAAuB,qBAAqB;AAAA,oBACtEA,KAAI,WAAW,cAAc,CAAC,SAASA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAUvF;AAAA,EACA,YAAY,CACV,QACA,WACQ;AACR,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,iBAAiB,MAAM;AAEpE,WAAOA;AAAA,eACIA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,iBAE5B,WAAW;AAAA;AAAA,QAEpB,MAAM,WAAW,CAAC;AAAA,EACxB;AAAA,EACA,WAAW,CACT,QACA,YACQ;AACR,UAAM,kBAAkB,qBAAqB,SAAS,eAAe;AACrE,UAAM,wBACJ,mBAAmB,OACfA,WAAUA,KAAI,WAAW,cAAc,CAAC,eAAe,eAAe,KACtEA;AAEN,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA;AAAA;AAAA;AAAA,eAGIA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA;AAAA;AAAA;AAAA,sBAI7CA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA,gBAEpCA,KAAI,WAAW,cAAc,CAAC,uBAAuB,qBAAqB;AAAA,oBACtEA,KAAI,WAAW,cAAc,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAShD;AAAA,EACA,YAAY,CAAI,WAAsC;AACpD,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA,mBAAkBA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EAC/E;AAAA,EACA,SAAS,CAAI,WAAsC;AACjD,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AAExE,WAAOA,wBAAuBA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EACpF;AAAA,EACA,MAAM,CAAI,QAA8B,YAA+B;AACrE,UAAM,cAAc,MAAM,MAAM,IAAI,SAAS,qBAAqB,MAAM;AACxE,UAAM,QAAe,CAAC;AAEtB,UAAM,KAAKA,wBAAuBA,KAAI,WAAW,cAAc,CAAC,EAAE;AAElE,UAAM,KAAK,MAAM,WAAW,CAAC;AAE7B,QAAI,SAAS,OAAO;AAClB,YAAM,KAAKA,aAAY,QAAQ,KAAK,EAAE;AAAA,IACxC;AAEA,QAAI,SAAS,MAAM;AACjB,YAAM,KAAKA,cAAa,QAAQ,IAAI,EAAE;AAAA,IACxC;AAEA,WAAOA,KAAI,MAAM,CAAC,GAAG,OAAOA,OAAM,CAAC;AAAA,EACrC;AAAA,EACA,gBAAgB,CAAI,WAAsC;AACxD,UAAM,cAAcA,KAAI,MAAM,MAAM,MAAM,IACtC,SACA,qBAAqB,MAAM;AAC/B,WAAOA,qCAAoCA,KAAI,WAAW,cAAc,CAAC,IAAI,MAAM,WAAW,CAAC;AAAA,EACjG;AAAA,EACA,QAAQ,CAAC,YACPA,mBAAkBA,KAAI,WAAW,cAAc,CAAC,cAAcA,KAAI,WAAW,OAAO,CAAC;AAAA,EACvF,MAAM,CAAC,aAAqB,mBAC1BA,4BAA2BA,KAAI,WAAW,UAAU,CAAC;AACzD;AAEA,IAAM,QAAQ,CAAC,gBACbA,KAAI,MAAM,QAAQ,WAAW,IACzBA,KAAI,QACJA,KAAI,MAAM,CAACA,cAAa,WAAW,CAAC;;;AIvO1C,SAAS,aAAa;AACtB;AAAA,EACE,oBAAoB;AAAA,EACpB;AAAA,EACA;AAAA,OAEK;AACP,OAAe;AAuDf,IAAM,mBAGF;AAAA,EACF,YAAY;AAAA,EACZ,iBAAiB,CAAC,YAAY;AAC5B,UAAM,eACJ,QAAQ,gBACR,yBAAyB,QAAQ,gBAAgB;AAEnD,WAAO,cAAc;AAAA,MACnB,GAAG;AAAA,MACH,MAAM,MAAM;AAAA,QACV,kBAAkB,QAAQ;AAAA,QAC1B,QAAQ;AAAA,QACR,GAAG,QAAQ;AAAA,MACb,CAAC;AAAA,MACD,iBAAiB,6BAA6B;AAAA,QAC5C,YAAY;AAAA,QACZ,mBAAmB,CAAC,WAClB,+BAA+B;AAAA,UAC7B,YAAY;AAAA,UACZ,YAAY;AAAA,UACZ,8BAA8B;AAAA,YAC5B,YAAY,oCAAoC,OAAO,IAAI;AAAA,UAC7D;AAAA,UACA,YAAY,mBAAmB,OAAO,IAAI;AAAA,QAC5C,CAAC;AAAA,QACH,YACE,QAAQ,QAAQ,cAAc,YAAY,GAAG,cAAc,CAAC,CAAC;AAAA,MACjE,CAAC;AAAA,MACD;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EACA,0BAA0B,CAAC,YAAY;AACrC,WACE,QAAQ,gBAAgB,yBAAyB,QAAQ,gBAAgB;AAAA,EAE7E;AAAA,EACA,yBAAyB;AAC3B;AAEO,IAAM,sBAAsB,MAAM;AACvC,8BAA4B,SAAS,cAAc,gBAAgB;AACrE;AAEA,oBAAoB;","names":["JSONSerializer","SQL","SQL","SQL","JSONSerializer","SQL","SQL","JSONSerializer","SQL","JSONSerializer"]}
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@event-driven-io/pongo",
|
|
3
|
-
"version": "0.17.0-beta.
|
|
4
|
-
"description": "Pongo - Mongo with strong consistency on top of
|
|
3
|
+
"version": "0.17.0-beta.9",
|
|
4
|
+
"description": "Pongo - Mongo with strong consistency on top of PostgreSQL",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"scripts": {
|
|
7
7
|
"build": "tsup",
|
|
@@ -134,7 +134,7 @@
|
|
|
134
134
|
"pongo": "./dist/cli.js"
|
|
135
135
|
},
|
|
136
136
|
"peerDependencies": {
|
|
137
|
-
"@event-driven-io/dumbo": "0.13.0-beta.
|
|
137
|
+
"@event-driven-io/dumbo": "0.13.0-beta.9",
|
|
138
138
|
"@cloudflare/workers-types": "^4.20260115.0",
|
|
139
139
|
"@types/mongodb": "^4.0.7",
|
|
140
140
|
"@types/pg": "^8.15.5",
|