@geekmidas/testkit 1.0.1 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/dist/{Factory-ClyZLRsO.d.mts → Factory-CVR3GdkW.d.mts} +2 -2
- package/dist/{Factory-ClyZLRsO.d.mts.map → Factory-CVR3GdkW.d.mts.map} +1 -1
- package/dist/Factory.d.mts +2 -2
- package/dist/{KyselyFactory-CTZmiGI9.d.mts → KyselyFactory-DRlMv-WT.d.mts} +3 -3
- package/dist/{KyselyFactory-CTZmiGI9.d.mts.map → KyselyFactory-DRlMv-WT.d.mts.map} +1 -1
- package/dist/KyselyFactory.d.mts +3 -3
- package/dist/{ObjectionFactory-DDrKwAoO.d.mts → ObjectionFactory-DkJUf-uM.d.mts} +3 -3
- package/dist/{ObjectionFactory-DDrKwAoO.d.mts.map → ObjectionFactory-DkJUf-uM.d.mts.map} +1 -1
- package/dist/ObjectionFactory.d.mts +3 -3
- package/dist/{PostgresKyselyMigrator-6sE1KOni.mjs → PostgresKyselyMigrator-B4pScubb.mjs} +2 -2
- package/dist/{PostgresKyselyMigrator-6sE1KOni.mjs.map → PostgresKyselyMigrator-B4pScubb.mjs.map} +1 -1
- package/dist/{PostgresKyselyMigrator-D6IbPq8t.cjs → PostgresKyselyMigrator-C7ljZYvq.cjs} +2 -2
- package/dist/{PostgresKyselyMigrator-D6IbPq8t.cjs.map → PostgresKyselyMigrator-C7ljZYvq.cjs.map} +1 -1
- package/dist/PostgresKyselyMigrator.cjs +2 -2
- package/dist/PostgresKyselyMigrator.mjs +2 -2
- package/dist/PostgresMigrator-Bres0U6E.d.cts.map +1 -1
- package/dist/{PostgresMigrator-D6dQn0x2.cjs → PostgresMigrator-CHiBYEg_.cjs} +7 -2
- package/dist/PostgresMigrator-CHiBYEg_.cjs.map +1 -0
- package/dist/{PostgresMigrator-BjjenqSd.mjs → PostgresMigrator-DcP1o-T6.mjs} +7 -2
- package/dist/PostgresMigrator-DcP1o-T6.mjs.map +1 -0
- package/dist/PostgresMigrator-S-YYosAC.d.mts.map +1 -1
- package/dist/PostgresMigrator.cjs +1 -1
- package/dist/PostgresMigrator.mjs +1 -1
- package/dist/{PostgresObjectionMigrator-D_QxXbIN.mjs → PostgresObjectionMigrator-BJ5X48U8.mjs} +2 -2
- package/dist/{PostgresObjectionMigrator-D_QxXbIN.mjs.map → PostgresObjectionMigrator-BJ5X48U8.mjs.map} +1 -1
- package/dist/{PostgresObjectionMigrator-DK8ODIHQ.cjs → PostgresObjectionMigrator-BXLAVVwm.cjs} +2 -2
- package/dist/{PostgresObjectionMigrator-DK8ODIHQ.cjs.map → PostgresObjectionMigrator-BXLAVVwm.cjs.map} +1 -1
- package/dist/PostgresObjectionMigrator.cjs +2 -2
- package/dist/PostgresObjectionMigrator.mjs +2 -2
- package/dist/better-auth.d.mts +2 -2
- package/dist/{directory-CVrfTq1I.d.mts → directory-DGOcVlKD.d.cts} +1 -1
- package/dist/{directory-CVrfTq1I.d.mts.map → directory-DGOcVlKD.d.cts.map} +1 -1
- package/dist/{directory-DAnMWi50.d.cts → directory-YzQUGC5g.d.mts} +3 -3
- package/dist/{directory-DAnMWi50.d.cts.map → directory-YzQUGC5g.d.mts.map} +1 -1
- package/dist/{faker-BcjUfHxx.d.mts → faker-DsYCplsG.d.mts} +3 -3
- package/dist/{faker-BcjUfHxx.d.mts.map → faker-DsYCplsG.d.mts.map} +1 -1
- package/dist/faker.d.mts +1 -1
- package/dist/initScript.cjs +95 -0
- package/dist/initScript.cjs.map +1 -0
- package/dist/initScript.d.cts +45 -0
- package/dist/initScript.d.cts.map +1 -0
- package/dist/initScript.d.mts +45 -0
- package/dist/initScript.d.mts.map +1 -0
- package/dist/initScript.mjs +93 -0
- package/dist/initScript.mjs.map +1 -0
- package/dist/kysely.cjs +2 -2
- package/dist/kysely.d.mts +3 -3
- package/dist/kysely.mjs +2 -2
- package/dist/objection.cjs +2 -2
- package/dist/objection.d.mts +3 -3
- package/dist/objection.mjs +2 -2
- package/dist/os/directory.d.cts +1 -1
- package/dist/os/directory.d.mts +1 -1
- package/dist/os/index.d.cts +1 -1
- package/dist/os/index.d.mts +1 -1
- package/package.json +9 -4
- package/src/PostgresMigrator.ts +9 -1
- package/src/__tests__/initScript.spec.ts +308 -0
- package/src/initScript.ts +119 -0
- package/dist/PostgresMigrator-BjjenqSd.mjs.map +0 -1
- package/dist/PostgresMigrator-D6dQn0x2.cjs.map +0 -1
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
//#region src/initScript.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Parse a shell init script (like docker/postgres/init.sh) and extract
|
|
4
|
+
* SQL blocks from heredoc sections (<<-EOSQL ... EOSQL).
|
|
5
|
+
*
|
|
6
|
+
* @param content - The shell script content
|
|
7
|
+
* @param env - Environment variables to substitute ($VAR_NAME references)
|
|
8
|
+
* @returns Array of SQL strings ready to execute
|
|
9
|
+
* @internal Exported for testing
|
|
10
|
+
*/
|
|
11
|
+
declare function parseInitScript(content: string, env: Record<string, string>): string[];
|
|
12
|
+
/**
|
|
13
|
+
* Read a postgres init script, parse out the SQL blocks,
|
|
14
|
+
* substitute environment variables, and execute against a database.
|
|
15
|
+
*
|
|
16
|
+
* This is intended to run `docker/postgres/init.sh` against a test database
|
|
17
|
+
* so that per-app users and schemas are created (matching what Docker does
|
|
18
|
+
* on first volume initialization).
|
|
19
|
+
*
|
|
20
|
+
* Uses `CREATE ... IF NOT EXISTS` and `DO $$ ... END $$` wrappers where
|
|
21
|
+
* needed so the script is idempotent.
|
|
22
|
+
*
|
|
23
|
+
* @param scriptPath - Path to the init.sh file
|
|
24
|
+
* @param databaseUrl - PostgreSQL connection URL (should point to the test database)
|
|
25
|
+
*
|
|
26
|
+
* @example
|
|
27
|
+
* ```typescript
|
|
28
|
+
* // In your globalSetup.ts
|
|
29
|
+
* import { runInitScript } from '@geekmidas/testkit/postgres';
|
|
30
|
+
* import { Credentials } from '@geekmidas/envkit/credentials';
|
|
31
|
+
*
|
|
32
|
+
* const cleanup = await migrator.start();
|
|
33
|
+
*
|
|
34
|
+
* // Create per-app users in the test database
|
|
35
|
+
* await runInitScript('docker/postgres/init.sh', Credentials.DATABASE_URL, {
|
|
36
|
+
* ...process.env,
|
|
37
|
+
* ...Credentials,
|
|
38
|
+
* });
|
|
39
|
+
* ```
|
|
40
|
+
*/
|
|
41
|
+
declare function runInitScript(scriptPath: string, databaseUrl: string, env?: Record<string, string>): Promise<void>;
|
|
42
|
+
//# sourceMappingURL=initScript.d.ts.map
|
|
43
|
+
//#endregion
|
|
44
|
+
export { parseInitScript, runInitScript };
|
|
45
|
+
//# sourceMappingURL=initScript.d.cts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"initScript.d.cts","names":[],"sources":["../src/initScript.ts"],"sourcesContent":[],"mappings":";;AAcA;AA0EA;;;;AAIU;;;iBA9EM,eAAA,uBAEV;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBAwEgB,aAAA,gDAGf,yBACJ"}
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
//#region src/initScript.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Parse a shell init script (like docker/postgres/init.sh) and extract
|
|
4
|
+
* SQL blocks from heredoc sections (<<-EOSQL ... EOSQL).
|
|
5
|
+
*
|
|
6
|
+
* @param content - The shell script content
|
|
7
|
+
* @param env - Environment variables to substitute ($VAR_NAME references)
|
|
8
|
+
* @returns Array of SQL strings ready to execute
|
|
9
|
+
* @internal Exported for testing
|
|
10
|
+
*/
|
|
11
|
+
declare function parseInitScript(content: string, env: Record<string, string>): string[];
|
|
12
|
+
/**
|
|
13
|
+
* Read a postgres init script, parse out the SQL blocks,
|
|
14
|
+
* substitute environment variables, and execute against a database.
|
|
15
|
+
*
|
|
16
|
+
* This is intended to run `docker/postgres/init.sh` against a test database
|
|
17
|
+
* so that per-app users and schemas are created (matching what Docker does
|
|
18
|
+
* on first volume initialization).
|
|
19
|
+
*
|
|
20
|
+
* Uses `CREATE ... IF NOT EXISTS` and `DO $$ ... END $$` wrappers where
|
|
21
|
+
* needed so the script is idempotent.
|
|
22
|
+
*
|
|
23
|
+
* @param scriptPath - Path to the init.sh file
|
|
24
|
+
* @param databaseUrl - PostgreSQL connection URL (should point to the test database)
|
|
25
|
+
*
|
|
26
|
+
* @example
|
|
27
|
+
* ```typescript
|
|
28
|
+
* // In your globalSetup.ts
|
|
29
|
+
* import { runInitScript } from '@geekmidas/testkit/postgres';
|
|
30
|
+
* import { Credentials } from '@geekmidas/envkit/credentials';
|
|
31
|
+
*
|
|
32
|
+
* const cleanup = await migrator.start();
|
|
33
|
+
*
|
|
34
|
+
* // Create per-app users in the test database
|
|
35
|
+
* await runInitScript('docker/postgres/init.sh', Credentials.DATABASE_URL, {
|
|
36
|
+
* ...process.env,
|
|
37
|
+
* ...Credentials,
|
|
38
|
+
* });
|
|
39
|
+
* ```
|
|
40
|
+
*/
|
|
41
|
+
declare function runInitScript(scriptPath: string, databaseUrl: string, env?: Record<string, string>): Promise<void>;
|
|
42
|
+
//# sourceMappingURL=initScript.d.ts.map
|
|
43
|
+
//#endregion
|
|
44
|
+
export { parseInitScript, runInitScript };
|
|
45
|
+
//# sourceMappingURL=initScript.d.mts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"initScript.d.mts","names":[],"sources":["../src/initScript.ts"],"sourcesContent":[],"mappings":";;AAcA;AA0EA;;;;AAIU;;;iBA9EM,eAAA,uBAEV;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;iBAwEgB,aAAA,gDAGf,yBACJ"}
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
import pg from "pg";
|
|
2
|
+
import { readFileSync } from "node:fs";
|
|
3
|
+
|
|
4
|
+
//#region src/initScript.ts
|
|
5
|
+
const { Client } = pg;
|
|
6
|
+
/**
|
|
7
|
+
* Parse a shell init script (like docker/postgres/init.sh) and extract
|
|
8
|
+
* SQL blocks from heredoc sections (<<-EOSQL ... EOSQL).
|
|
9
|
+
*
|
|
10
|
+
* @param content - The shell script content
|
|
11
|
+
* @param env - Environment variables to substitute ($VAR_NAME references)
|
|
12
|
+
* @returns Array of SQL strings ready to execute
|
|
13
|
+
* @internal Exported for testing
|
|
14
|
+
*/
|
|
15
|
+
function parseInitScript(content, env) {
|
|
16
|
+
const blocks = [];
|
|
17
|
+
const lines = content.split("\n");
|
|
18
|
+
let inHeredoc = false;
|
|
19
|
+
let currentBlock = [];
|
|
20
|
+
for (const line of lines) if (inHeredoc) if (/^\s*EOSQL\s*$/.test(line)) {
|
|
21
|
+
const sql = substituteEnvVars(currentBlock.join("\n"), env);
|
|
22
|
+
blocks.push(sql);
|
|
23
|
+
currentBlock = [];
|
|
24
|
+
inHeredoc = false;
|
|
25
|
+
} else currentBlock.push(line);
|
|
26
|
+
else if (line.includes("<<-EOSQL") || line.includes("<< EOSQL") || line.includes("<<EOSQL")) {
|
|
27
|
+
inHeredoc = true;
|
|
28
|
+
currentBlock = [];
|
|
29
|
+
}
|
|
30
|
+
return blocks;
|
|
31
|
+
}
|
|
32
|
+
/**
|
|
33
|
+
* Replace shell variable references ($VAR_NAME and ${VAR_NAME})
|
|
34
|
+
* with values from the provided env object.
|
|
35
|
+
*/
|
|
36
|
+
function substituteEnvVars(sql, env) {
|
|
37
|
+
let result = sql.replace(/\$\{(\w+)\}/g, (_, name) => env[name] ?? "");
|
|
38
|
+
result = result.replace(/\$(\w+)/g, (_, name) => env[name] ?? "");
|
|
39
|
+
return result;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Read a postgres init script, parse out the SQL blocks,
|
|
43
|
+
* substitute environment variables, and execute against a database.
|
|
44
|
+
*
|
|
45
|
+
* This is intended to run `docker/postgres/init.sh` against a test database
|
|
46
|
+
* so that per-app users and schemas are created (matching what Docker does
|
|
47
|
+
* on first volume initialization).
|
|
48
|
+
*
|
|
49
|
+
* Uses `CREATE ... IF NOT EXISTS` and `DO $$ ... END $$` wrappers where
|
|
50
|
+
* needed so the script is idempotent.
|
|
51
|
+
*
|
|
52
|
+
* @param scriptPath - Path to the init.sh file
|
|
53
|
+
* @param databaseUrl - PostgreSQL connection URL (should point to the test database)
|
|
54
|
+
*
|
|
55
|
+
* @example
|
|
56
|
+
* ```typescript
|
|
57
|
+
* // In your globalSetup.ts
|
|
58
|
+
* import { runInitScript } from '@geekmidas/testkit/postgres';
|
|
59
|
+
* import { Credentials } from '@geekmidas/envkit/credentials';
|
|
60
|
+
*
|
|
61
|
+
* const cleanup = await migrator.start();
|
|
62
|
+
*
|
|
63
|
+
* // Create per-app users in the test database
|
|
64
|
+
* await runInitScript('docker/postgres/init.sh', Credentials.DATABASE_URL, {
|
|
65
|
+
* ...process.env,
|
|
66
|
+
* ...Credentials,
|
|
67
|
+
* });
|
|
68
|
+
* ```
|
|
69
|
+
*/
|
|
70
|
+
async function runInitScript(scriptPath, databaseUrl, env) {
|
|
71
|
+
const content = readFileSync(scriptPath, "utf-8");
|
|
72
|
+
const resolvedEnv = env ?? { ...process.env };
|
|
73
|
+
const blocks = parseInitScript(content, resolvedEnv);
|
|
74
|
+
if (blocks.length === 0) return;
|
|
75
|
+
const url = new URL(databaseUrl);
|
|
76
|
+
const client = new Client({
|
|
77
|
+
user: url.username,
|
|
78
|
+
password: decodeURIComponent(url.password),
|
|
79
|
+
host: url.hostname,
|
|
80
|
+
port: parseInt(url.port, 10),
|
|
81
|
+
database: url.pathname.slice(1)
|
|
82
|
+
});
|
|
83
|
+
try {
|
|
84
|
+
await client.connect();
|
|
85
|
+
for (const sql of blocks) await client.query(sql);
|
|
86
|
+
} finally {
|
|
87
|
+
await client.end();
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
//#endregion
|
|
92
|
+
export { parseInitScript, runInitScript };
|
|
93
|
+
//# sourceMappingURL=initScript.mjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"initScript.mjs","names":["content: string","env: Record<string, string>","blocks: string[]","currentBlock: string[]","sql: string","scriptPath: string","databaseUrl: string","env?: Record<string, string>"],"sources":["../src/initScript.ts"],"sourcesContent":["import { readFileSync } from 'node:fs';\nimport pg from 'pg';\n\nconst { Client } = pg;\n\n/**\n * Parse a shell init script (like docker/postgres/init.sh) and extract\n * SQL blocks from heredoc sections (<<-EOSQL ... EOSQL).\n *\n * @param content - The shell script content\n * @param env - Environment variables to substitute ($VAR_NAME references)\n * @returns Array of SQL strings ready to execute\n * @internal Exported for testing\n */\nexport function parseInitScript(\n\tcontent: string,\n\tenv: Record<string, string>,\n): string[] {\n\tconst blocks: string[] = [];\n\tconst lines = content.split('\\n');\n\tlet inHeredoc = false;\n\tlet currentBlock: string[] = [];\n\n\tfor (const line of lines) {\n\t\tif (inHeredoc) {\n\t\t\t// Check for heredoc terminator (EOSQL at start of line, with optional leading whitespace)\n\t\t\tif (/^\\s*EOSQL\\s*$/.test(line)) {\n\t\t\t\tconst sql = substituteEnvVars(currentBlock.join('\\n'), env);\n\t\t\t\tblocks.push(sql);\n\t\t\t\tcurrentBlock = [];\n\t\t\t\tinHeredoc = false;\n\t\t\t} else {\n\t\t\t\tcurrentBlock.push(line);\n\t\t\t}\n\t\t} else if (\n\t\t\tline.includes('<<-EOSQL') ||\n\t\t\tline.includes('<< EOSQL') ||\n\t\t\tline.includes('<<EOSQL')\n\t\t) {\n\t\t\tinHeredoc = true;\n\t\t\tcurrentBlock = [];\n\t\t}\n\t}\n\n\treturn blocks;\n}\n\n/**\n * Replace shell variable references ($VAR_NAME and ${VAR_NAME})\n * with values from the provided env object.\n */\nfunction substituteEnvVars(sql: string, env: Record<string, string>): string {\n\t// Replace ${VAR_NAME} syntax\n\tlet result = sql.replace(/\\$\\{(\\w+)\\}/g, (_, name) => env[name] ?? '');\n\t// Replace $VAR_NAME syntax (word boundary after)\n\tresult = result.replace(/\\$(\\w+)/g, (_, name) => env[name] ?? '');\n\treturn result;\n}\n\n/**\n * Read a postgres init script, parse out the SQL blocks,\n * substitute environment variables, and execute against a database.\n *\n * This is intended to run `docker/postgres/init.sh` against a test database\n * so that per-app users and schemas are created (matching what Docker does\n * on first volume initialization).\n *\n * Uses `CREATE ... IF NOT EXISTS` and `DO $$ ... END $$` wrappers where\n * needed so the script is idempotent.\n *\n * @param scriptPath - Path to the init.sh file\n * @param databaseUrl - PostgreSQL connection URL (should point to the test database)\n *\n * @example\n * ```typescript\n * // In your globalSetup.ts\n * import { runInitScript } from '@geekmidas/testkit/postgres';\n * import { Credentials } from '@geekmidas/envkit/credentials';\n *\n * const cleanup = await migrator.start();\n *\n * // Create per-app users in the test database\n * await runInitScript('docker/postgres/init.sh', Credentials.DATABASE_URL, {\n * ...process.env,\n * ...Credentials,\n * });\n * ```\n */\nexport async function runInitScript(\n\tscriptPath: string,\n\tdatabaseUrl: string,\n\tenv?: Record<string, string>,\n): Promise<void> {\n\tconst content = readFileSync(scriptPath, 'utf-8');\n\tconst resolvedEnv = env ?? ({ ...process.env } as Record<string, string>);\n\tconst blocks = parseInitScript(content, resolvedEnv);\n\n\tif (blocks.length === 0) {\n\t\treturn;\n\t}\n\n\tconst url = new URL(databaseUrl);\n\tconst client = new Client({\n\t\tuser: url.username,\n\t\tpassword: decodeURIComponent(url.password),\n\t\thost: url.hostname,\n\t\tport: parseInt(url.port, 10),\n\t\tdatabase: url.pathname.slice(1),\n\t});\n\n\ttry {\n\t\tawait client.connect();\n\t\tfor (const sql of blocks) {\n\t\t\tawait client.query(sql);\n\t\t}\n\t} finally {\n\t\tawait client.end();\n\t}\n}\n"],"mappings":";;;;AAGA,MAAM,EAAE,QAAQ,GAAG;;;;;;;;;;AAWnB,SAAgB,gBACfA,SACAC,KACW;CACX,MAAMC,SAAmB,CAAE;CAC3B,MAAM,QAAQ,QAAQ,MAAM,KAAK;CACjC,IAAI,YAAY;CAChB,IAAIC,eAAyB,CAAE;AAE/B,MAAK,MAAM,QAAQ,MAClB,KAAI,UAEH,KAAI,gBAAgB,KAAK,KAAK,EAAE;EAC/B,MAAM,MAAM,kBAAkB,aAAa,KAAK,KAAK,EAAE,IAAI;AAC3D,SAAO,KAAK,IAAI;AAChB,iBAAe,CAAE;AACjB,cAAY;CACZ,MACA,cAAa,KAAK,KAAK;UAGxB,KAAK,SAAS,WAAW,IACzB,KAAK,SAAS,WAAW,IACzB,KAAK,SAAS,UAAU,EACvB;AACD,cAAY;AACZ,iBAAe,CAAE;CACjB;AAGF,QAAO;AACP;;;;;AAMD,SAAS,kBAAkBC,KAAaH,KAAqC;CAE5E,IAAI,SAAS,IAAI,QAAQ,gBAAgB,CAAC,GAAG,SAAS,IAAI,SAAS,GAAG;AAEtE,UAAS,OAAO,QAAQ,YAAY,CAAC,GAAG,SAAS,IAAI,SAAS,GAAG;AACjE,QAAO;AACP;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AA+BD,eAAsB,cACrBI,YACAC,aACAC,KACgB;CAChB,MAAM,UAAU,aAAa,YAAY,QAAQ;CACjD,MAAM,cAAc,OAAQ,EAAE,GAAG,QAAQ,IAAK;CAC9C,MAAM,SAAS,gBAAgB,SAAS,YAAY;AAEpD,KAAI,OAAO,WAAW,EACrB;CAGD,MAAM,MAAM,IAAI,IAAI;CACpB,MAAM,SAAS,IAAI,OAAO;EACzB,MAAM,IAAI;EACV,UAAU,mBAAmB,IAAI,SAAS;EAC1C,MAAM,IAAI;EACV,MAAM,SAAS,IAAI,MAAM,GAAG;EAC5B,UAAU,IAAI,SAAS,MAAM,EAAE;CAC/B;AAED,KAAI;AACH,QAAM,OAAO,SAAS;AACtB,OAAK,MAAM,OAAO,OACjB,OAAM,OAAO,MAAM,IAAI;CAExB,UAAS;AACT,QAAM,OAAO,KAAK;CAClB;AACD"}
|
package/dist/kysely.cjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
require('./Factory-BhjUOBWN.cjs');
|
|
2
2
|
const require_faker = require('./faker-B14IEMIN.cjs');
|
|
3
3
|
const require_KyselyFactory = require('./KyselyFactory-BFqVIn_0.cjs');
|
|
4
|
-
require('./PostgresMigrator-
|
|
5
|
-
const require_PostgresKyselyMigrator = require('./PostgresKyselyMigrator-
|
|
4
|
+
require('./PostgresMigrator-CHiBYEg_.cjs');
|
|
5
|
+
const require_PostgresKyselyMigrator = require('./PostgresKyselyMigrator-C7ljZYvq.cjs');
|
|
6
6
|
const require_VitestTransactionIsolator = require('./VitestTransactionIsolator-CMfJXZP8.cjs');
|
|
7
7
|
const require_VitestKyselyTransactionIsolator = require('./VitestKyselyTransactionIsolator-D7RRXOBa.cjs');
|
|
8
8
|
|
package/dist/kysely.d.mts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { FakerFactory, faker } from "./faker-
|
|
2
|
-
import { ExtractSeedAttrs, FactorySeed } from "./Factory-
|
|
3
|
-
import { KyselyFactory } from "./KyselyFactory-
|
|
1
|
+
import { FakerFactory, faker } from "./faker-DsYCplsG.mjs";
|
|
2
|
+
import { ExtractSeedAttrs, FactorySeed } from "./Factory-CVR3GdkW.mjs";
|
|
3
|
+
import { KyselyFactory } from "./KyselyFactory-DRlMv-WT.mjs";
|
|
4
4
|
import "./PostgresMigrator-S-YYosAC.mjs";
|
|
5
5
|
import { PostgresKyselyMigrator } from "./PostgresKyselyMigrator-DrVWncqd.mjs";
|
|
6
6
|
import { DatabaseConnection, DatabaseFixtures, ExtendedDatabaseFixtures, FixtureCreators, IsolationLevel, TestWithExtendedFixtures, TransactionWrapperOptions } from "./VitestTransactionIsolator-BNWJqh9f.mjs";
|
package/dist/kysely.mjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import "./Factory-BFVnMMCC.mjs";
|
|
2
2
|
import { faker } from "./faker-BGKYFoCT.mjs";
|
|
3
3
|
import { KyselyFactory } from "./KyselyFactory-DMswpwji.mjs";
|
|
4
|
-
import "./PostgresMigrator-
|
|
5
|
-
import { PostgresKyselyMigrator } from "./PostgresKyselyMigrator-
|
|
4
|
+
import "./PostgresMigrator-DcP1o-T6.mjs";
|
|
5
|
+
import { PostgresKyselyMigrator } from "./PostgresKyselyMigrator-B4pScubb.mjs";
|
|
6
6
|
import { IsolationLevel, extendWithFixtures } from "./VitestTransactionIsolator-DQ7tLqgV.mjs";
|
|
7
7
|
import { VitestKyselyTransactionIsolator } from "./VitestKyselyTransactionIsolator-DceyIqr4.mjs";
|
|
8
8
|
|
package/dist/objection.cjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
require('./Factory-BhjUOBWN.cjs');
|
|
2
2
|
const require_faker = require('./faker-B14IEMIN.cjs');
|
|
3
3
|
const require_ObjectionFactory = require('./ObjectionFactory-BeFBYcan.cjs');
|
|
4
|
-
require('./PostgresMigrator-
|
|
5
|
-
const require_PostgresObjectionMigrator = require('./PostgresObjectionMigrator-
|
|
4
|
+
require('./PostgresMigrator-CHiBYEg_.cjs');
|
|
5
|
+
const require_PostgresObjectionMigrator = require('./PostgresObjectionMigrator-BXLAVVwm.cjs');
|
|
6
6
|
const require_VitestTransactionIsolator = require('./VitestTransactionIsolator-CMfJXZP8.cjs');
|
|
7
7
|
const require_VitestObjectionTransactionIsolator = require('./VitestObjectionTransactionIsolator-CdLRrzNf.cjs');
|
|
8
8
|
|
package/dist/objection.d.mts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
import { FakerFactory, faker } from "./faker-
|
|
2
|
-
import { ExtractSeedAttrs, FactorySeed } from "./Factory-
|
|
3
|
-
import { ObjectionFactory } from "./ObjectionFactory-
|
|
1
|
+
import { FakerFactory, faker } from "./faker-DsYCplsG.mjs";
|
|
2
|
+
import { ExtractSeedAttrs, FactorySeed } from "./Factory-CVR3GdkW.mjs";
|
|
3
|
+
import { ObjectionFactory } from "./ObjectionFactory-DkJUf-uM.mjs";
|
|
4
4
|
import "./PostgresMigrator-S-YYosAC.mjs";
|
|
5
5
|
import { PostgresObjectionMigrator } from "./PostgresObjectionMigrator-DVEqB5tp.mjs";
|
|
6
6
|
import { DatabaseConnection, DatabaseFixtures, ExtendedDatabaseFixtures, FixtureCreators, IsolationLevel, TestWithExtendedFixtures, TransactionWrapperOptions } from "./VitestTransactionIsolator-BNWJqh9f.mjs";
|
package/dist/objection.mjs
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import "./Factory-BFVnMMCC.mjs";
|
|
2
2
|
import { faker } from "./faker-BGKYFoCT.mjs";
|
|
3
3
|
import { ObjectionFactory } from "./ObjectionFactory-QCJ7u0Ql.mjs";
|
|
4
|
-
import "./PostgresMigrator-
|
|
5
|
-
import { PostgresObjectionMigrator } from "./PostgresObjectionMigrator-
|
|
4
|
+
import "./PostgresMigrator-DcP1o-T6.mjs";
|
|
5
|
+
import { PostgresObjectionMigrator } from "./PostgresObjectionMigrator-BJ5X48U8.mjs";
|
|
6
6
|
import { IsolationLevel, extendWithFixtures } from "./VitestTransactionIsolator-DQ7tLqgV.mjs";
|
|
7
7
|
import { VitestObjectionTransactionIsolator } from "./VitestObjectionTransactionIsolator-OF2osYY5.mjs";
|
|
8
8
|
|
package/dist/os/directory.d.cts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { DirectoryFixtures, itWithDir } from "../directory-
|
|
1
|
+
import { DirectoryFixtures, itWithDir } from "../directory-DGOcVlKD.cjs";
|
|
2
2
|
export { DirectoryFixtures, itWithDir };
|
package/dist/os/directory.d.mts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { DirectoryFixtures, itWithDir } from "../directory-
|
|
1
|
+
import { DirectoryFixtures, itWithDir } from "../directory-YzQUGC5g.mjs";
|
|
2
2
|
export { DirectoryFixtures, itWithDir };
|
package/dist/os/index.d.cts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { itWithDir } from "../directory-
|
|
1
|
+
import { itWithDir } from "../directory-DGOcVlKD.cjs";
|
|
2
2
|
export { itWithDir };
|
package/dist/os/index.d.mts
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import { itWithDir } from "../directory-
|
|
1
|
+
import { itWithDir } from "../directory-YzQUGC5g.mjs";
|
|
2
2
|
export { itWithDir };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@geekmidas/testkit",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.3",
|
|
4
4
|
"private": false,
|
|
5
5
|
"type": "module",
|
|
6
6
|
"exports": {
|
|
@@ -48,6 +48,11 @@
|
|
|
48
48
|
"types": "./dist/benchmark.d.ts",
|
|
49
49
|
"import": "./dist/benchmark.mjs",
|
|
50
50
|
"require": "./dist/benchmark.cjs"
|
|
51
|
+
},
|
|
52
|
+
"./postgres": {
|
|
53
|
+
"types": "./dist/initScript.d.ts",
|
|
54
|
+
"import": "./dist/initScript.mjs",
|
|
55
|
+
"require": "./dist/initScript.cjs"
|
|
51
56
|
}
|
|
52
57
|
},
|
|
53
58
|
"dependencies": {
|
|
@@ -55,7 +60,7 @@
|
|
|
55
60
|
},
|
|
56
61
|
"devDependencies": {
|
|
57
62
|
"@types/pg": "~8.15.4",
|
|
58
|
-
"@geekmidas/envkit": "^1.0.
|
|
63
|
+
"@geekmidas/envkit": "^1.0.3",
|
|
59
64
|
"@geekmidas/logger": "^1.0.0"
|
|
60
65
|
},
|
|
61
66
|
"repository": {
|
|
@@ -75,8 +80,8 @@
|
|
|
75
80
|
"vitest": "~3.2.4",
|
|
76
81
|
"@types/aws-lambda": ">=8.10.92",
|
|
77
82
|
"better-auth": ">=1.3.34",
|
|
78
|
-
"@geekmidas/
|
|
79
|
-
"@geekmidas/
|
|
83
|
+
"@geekmidas/logger": "^1.0.0",
|
|
84
|
+
"@geekmidas/envkit": "^1.0.3"
|
|
80
85
|
},
|
|
81
86
|
"peerDependenciesMeta": {
|
|
82
87
|
"@geekmidas/envkit": {
|
package/src/PostgresMigrator.ts
CHANGED
|
@@ -98,7 +98,15 @@ export abstract class PostgresMigrator {
|
|
|
98
98
|
);
|
|
99
99
|
|
|
100
100
|
if (result.rowCount === 0) {
|
|
101
|
-
|
|
101
|
+
try {
|
|
102
|
+
await db.query(`CREATE DATABASE "${database}"`);
|
|
103
|
+
} catch (error: any) {
|
|
104
|
+
// 42P04 = duplicate_database — another process created it between our check and create
|
|
105
|
+
if (error?.code === '42P04') {
|
|
106
|
+
return { alreadyExisted: true };
|
|
107
|
+
}
|
|
108
|
+
throw error;
|
|
109
|
+
}
|
|
102
110
|
}
|
|
103
111
|
|
|
104
112
|
return {
|
|
@@ -0,0 +1,308 @@
|
|
|
1
|
+
import { writeFileSync } from 'node:fs';
|
|
2
|
+
import { tmpdir } from 'node:os';
|
|
3
|
+
import { join } from 'node:path';
|
|
4
|
+
import pg from 'pg';
|
|
5
|
+
import { afterAll, beforeAll, describe, expect, it } from 'vitest';
|
|
6
|
+
import { parseInitScript, runInitScript } from '../initScript';
|
|
7
|
+
|
|
8
|
+
describe('parseInitScript', () => {
|
|
9
|
+
it('should extract SQL from EOSQL heredoc blocks', () => {
|
|
10
|
+
const script = `#!/bin/bash
|
|
11
|
+
set -e
|
|
12
|
+
|
|
13
|
+
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
14
|
+
CREATE USER api WITH PASSWORD '$API_DB_PASSWORD';
|
|
15
|
+
GRANT ALL ON SCHEMA public TO api;
|
|
16
|
+
EOSQL
|
|
17
|
+
`;
|
|
18
|
+
|
|
19
|
+
const blocks = parseInitScript(script, {
|
|
20
|
+
POSTGRES_USER: 'app',
|
|
21
|
+
POSTGRES_DB: 'mydb',
|
|
22
|
+
API_DB_PASSWORD: 'secret123',
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
expect(blocks).toHaveLength(1);
|
|
26
|
+
expect(blocks[0]).toContain("CREATE USER api WITH PASSWORD 'secret123'");
|
|
27
|
+
expect(blocks[0]).toContain('GRANT ALL ON SCHEMA public TO api');
|
|
28
|
+
});
|
|
29
|
+
|
|
30
|
+
it('should extract multiple heredoc blocks', () => {
|
|
31
|
+
const script = `#!/bin/bash
|
|
32
|
+
set -e
|
|
33
|
+
|
|
34
|
+
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
35
|
+
CREATE USER api WITH PASSWORD '$API_DB_PASSWORD';
|
|
36
|
+
EOSQL
|
|
37
|
+
|
|
38
|
+
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
39
|
+
CREATE USER auth WITH PASSWORD '$AUTH_DB_PASSWORD';
|
|
40
|
+
CREATE SCHEMA auth AUTHORIZATION auth;
|
|
41
|
+
EOSQL
|
|
42
|
+
|
|
43
|
+
echo "Done!"
|
|
44
|
+
`;
|
|
45
|
+
|
|
46
|
+
const blocks = parseInitScript(script, {
|
|
47
|
+
POSTGRES_USER: 'app',
|
|
48
|
+
POSTGRES_DB: 'mydb',
|
|
49
|
+
API_DB_PASSWORD: 'apipass',
|
|
50
|
+
AUTH_DB_PASSWORD: 'authpass',
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
expect(blocks).toHaveLength(2);
|
|
54
|
+
expect(blocks[0]).toContain("CREATE USER api WITH PASSWORD 'apipass'");
|
|
55
|
+
expect(blocks[1]).toContain("CREATE USER auth WITH PASSWORD 'authpass'");
|
|
56
|
+
expect(blocks[1]).toContain('CREATE SCHEMA auth AUTHORIZATION auth');
|
|
57
|
+
});
|
|
58
|
+
|
|
59
|
+
it('should substitute $VAR_NAME syntax', () => {
|
|
60
|
+
const script = `psql <<-EOSQL
|
|
61
|
+
CREATE USER $APP_USER WITH PASSWORD '$APP_PASSWORD';
|
|
62
|
+
EOSQL`;
|
|
63
|
+
|
|
64
|
+
const blocks = parseInitScript(script, {
|
|
65
|
+
APP_USER: 'myuser',
|
|
66
|
+
APP_PASSWORD: 'mypass',
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
expect(blocks[0]).toContain("CREATE USER myuser WITH PASSWORD 'mypass'");
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it('should substitute ${VAR_NAME} syntax', () => {
|
|
73
|
+
const script = `psql <<-EOSQL
|
|
74
|
+
CREATE USER \${APP_USER} WITH PASSWORD '\${APP_PASSWORD}';
|
|
75
|
+
EOSQL`;
|
|
76
|
+
|
|
77
|
+
const blocks = parseInitScript(script, {
|
|
78
|
+
APP_USER: 'myuser',
|
|
79
|
+
APP_PASSWORD: 'mypass',
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
expect(blocks[0]).toContain("CREATE USER myuser WITH PASSWORD 'mypass'");
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
it('should replace missing vars with empty string', () => {
|
|
86
|
+
const script = `psql <<-EOSQL
|
|
87
|
+
CREATE USER api WITH PASSWORD '$MISSING_VAR';
|
|
88
|
+
EOSQL`;
|
|
89
|
+
|
|
90
|
+
const blocks = parseInitScript(script, {});
|
|
91
|
+
|
|
92
|
+
expect(blocks[0]).toContain("CREATE USER api WITH PASSWORD ''");
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
it('should return empty array for script with no heredocs', () => {
|
|
96
|
+
const script = `#!/bin/bash
|
|
97
|
+
echo "Hello world"
|
|
98
|
+
`;
|
|
99
|
+
|
|
100
|
+
const blocks = parseInitScript(script, {});
|
|
101
|
+
|
|
102
|
+
expect(blocks).toEqual([]);
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
it('should handle the full generated init.sh format', () => {
|
|
106
|
+
const script = `#!/bin/bash
|
|
107
|
+
set -e
|
|
108
|
+
|
|
109
|
+
# Auto-generated PostgreSQL init script
|
|
110
|
+
# Creates per-app users with separate schemas in a single database
|
|
111
|
+
# - api: uses public schema
|
|
112
|
+
# - auth: uses auth schema (search_path=auth)
|
|
113
|
+
|
|
114
|
+
# Create api user (uses public schema)
|
|
115
|
+
echo "Creating user api..."
|
|
116
|
+
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
117
|
+
CREATE USER api WITH PASSWORD '$API_DB_PASSWORD';
|
|
118
|
+
GRANT ALL ON SCHEMA public TO api;
|
|
119
|
+
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO api;
|
|
120
|
+
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO api;
|
|
121
|
+
EOSQL
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
# Create auth user with dedicated schema
|
|
125
|
+
echo "Creating user auth with schema auth..."
|
|
126
|
+
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
127
|
+
CREATE USER auth WITH PASSWORD '$AUTH_DB_PASSWORD';
|
|
128
|
+
CREATE SCHEMA auth AUTHORIZATION auth;
|
|
129
|
+
ALTER USER auth SET search_path TO auth;
|
|
130
|
+
GRANT USAGE ON SCHEMA auth TO auth;
|
|
131
|
+
GRANT ALL ON ALL TABLES IN SCHEMA auth TO auth;
|
|
132
|
+
GRANT ALL ON ALL SEQUENCES IN SCHEMA auth TO auth;
|
|
133
|
+
ALTER DEFAULT PRIVILEGES IN SCHEMA auth GRANT ALL ON TABLES TO auth;
|
|
134
|
+
ALTER DEFAULT PRIVILEGES IN SCHEMA auth GRANT ALL ON SEQUENCES TO auth;
|
|
135
|
+
EOSQL
|
|
136
|
+
|
|
137
|
+
echo "Database initialization complete!"
|
|
138
|
+
`;
|
|
139
|
+
|
|
140
|
+
const blocks = parseInitScript(script, {
|
|
141
|
+
POSTGRES_USER: 'app',
|
|
142
|
+
POSTGRES_DB: 'residentman_dev_test',
|
|
143
|
+
API_DB_PASSWORD: 'apipass123',
|
|
144
|
+
AUTH_DB_PASSWORD: 'authpass456',
|
|
145
|
+
});
|
|
146
|
+
|
|
147
|
+
expect(blocks).toHaveLength(2);
|
|
148
|
+
|
|
149
|
+
// API block
|
|
150
|
+
expect(blocks[0]).toContain("CREATE USER api WITH PASSWORD 'apipass123'");
|
|
151
|
+
expect(blocks[0]).toContain('GRANT ALL ON SCHEMA public TO api');
|
|
152
|
+
expect(blocks[0]).toContain('ALTER DEFAULT PRIVILEGES IN SCHEMA public');
|
|
153
|
+
|
|
154
|
+
// Auth block
|
|
155
|
+
expect(blocks[1]).toContain("CREATE USER auth WITH PASSWORD 'authpass456'");
|
|
156
|
+
expect(blocks[1]).toContain('CREATE SCHEMA auth AUTHORIZATION auth');
|
|
157
|
+
expect(blocks[1]).toContain('ALTER USER auth SET search_path TO auth');
|
|
158
|
+
});
|
|
159
|
+
});
|
|
160
|
+
|
|
161
|
+
const PG_CONFIG = {
|
|
162
|
+
host: 'localhost',
|
|
163
|
+
port: 5432,
|
|
164
|
+
user: 'geekmidas',
|
|
165
|
+
password: 'geekmidas',
|
|
166
|
+
};
|
|
167
|
+
|
|
168
|
+
/**
|
|
169
|
+
* Helper to run queries against the postgres admin database.
|
|
170
|
+
*/
|
|
171
|
+
async function adminQuery(...queries: string[]): Promise<void> {
|
|
172
|
+
const client = new pg.Client({ ...PG_CONFIG, database: 'postgres' });
|
|
173
|
+
try {
|
|
174
|
+
await client.connect();
|
|
175
|
+
for (const sql of queries) {
|
|
176
|
+
await client.query(sql);
|
|
177
|
+
}
|
|
178
|
+
} finally {
|
|
179
|
+
await client.end();
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/**
|
|
184
|
+
* Force-drop a role by reassigning owned objects and revoking privileges first.
|
|
185
|
+
*/
|
|
186
|
+
async function forceDropRole(role: string): Promise<void> {
|
|
187
|
+
// Find all databases where this role might own objects
|
|
188
|
+
const client = new pg.Client({ ...PG_CONFIG, database: 'postgres' });
|
|
189
|
+
try {
|
|
190
|
+
await client.connect();
|
|
191
|
+
const result = await client.query(
|
|
192
|
+
"SELECT datname FROM pg_database WHERE datistemplate = false AND datname != 'postgres'",
|
|
193
|
+
);
|
|
194
|
+
for (const row of result.rows) {
|
|
195
|
+
const dbClient = new pg.Client({
|
|
196
|
+
...PG_CONFIG,
|
|
197
|
+
database: row.datname,
|
|
198
|
+
});
|
|
199
|
+
try {
|
|
200
|
+
await dbClient.connect();
|
|
201
|
+
await dbClient.query(`REASSIGN OWNED BY ${role} TO ${PG_CONFIG.user}`);
|
|
202
|
+
await dbClient.query(`DROP OWNED BY ${role}`);
|
|
203
|
+
} catch {
|
|
204
|
+
// Role might not exist in this database, ignore
|
|
205
|
+
} finally {
|
|
206
|
+
await dbClient.end();
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
await client.query(`DROP ROLE IF EXISTS ${role}`);
|
|
210
|
+
} finally {
|
|
211
|
+
await client.end();
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
describe('runInitScript', () => {
|
|
216
|
+
const dbName = `test_init_script_${Date.now()}`;
|
|
217
|
+
const dbUrl = `postgresql://${PG_CONFIG.user}:${PG_CONFIG.password}@${PG_CONFIG.host}:${PG_CONFIG.port}/${dbName}`;
|
|
218
|
+
|
|
219
|
+
beforeAll(async () => {
|
|
220
|
+
// Clean up stale state from previous failed runs
|
|
221
|
+
await forceDropRole('test_api');
|
|
222
|
+
await forceDropRole('test_auth');
|
|
223
|
+
await adminQuery(
|
|
224
|
+
`DROP DATABASE IF EXISTS "${dbName}"`,
|
|
225
|
+
`CREATE DATABASE "${dbName}"`,
|
|
226
|
+
);
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
afterAll(async () => {
|
|
230
|
+
// Drop the database first (removes object dependencies), then roles
|
|
231
|
+
await adminQuery(`DROP DATABASE IF EXISTS "${dbName}"`);
|
|
232
|
+
await forceDropRole('test_api');
|
|
233
|
+
await forceDropRole('test_auth');
|
|
234
|
+
});
|
|
235
|
+
|
|
236
|
+
it('should create users and schemas from init script', async () => {
|
|
237
|
+
// Write a test init script to a temp file
|
|
238
|
+
const scriptPath = join(tmpdir(), `init-${Date.now()}.sh`);
|
|
239
|
+
writeFileSync(
|
|
240
|
+
scriptPath,
|
|
241
|
+
`#!/bin/bash
|
|
242
|
+
set -e
|
|
243
|
+
|
|
244
|
+
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
245
|
+
CREATE USER test_api WITH PASSWORD '$API_DB_PASSWORD';
|
|
246
|
+
GRANT ALL ON SCHEMA public TO test_api;
|
|
247
|
+
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON TABLES TO test_api;
|
|
248
|
+
ALTER DEFAULT PRIVILEGES IN SCHEMA public GRANT ALL ON SEQUENCES TO test_api;
|
|
249
|
+
EOSQL
|
|
250
|
+
|
|
251
|
+
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
|
252
|
+
CREATE USER test_auth WITH PASSWORD '$AUTH_DB_PASSWORD';
|
|
253
|
+
CREATE SCHEMA IF NOT EXISTS test_auth AUTHORIZATION test_auth;
|
|
254
|
+
ALTER USER test_auth SET search_path TO test_auth;
|
|
255
|
+
GRANT USAGE ON SCHEMA test_auth TO test_auth;
|
|
256
|
+
GRANT ALL ON ALL TABLES IN SCHEMA test_auth TO test_auth;
|
|
257
|
+
GRANT ALL ON ALL SEQUENCES IN SCHEMA test_auth TO test_auth;
|
|
258
|
+
ALTER DEFAULT PRIVILEGES IN SCHEMA test_auth GRANT ALL ON TABLES TO test_auth;
|
|
259
|
+
ALTER DEFAULT PRIVILEGES IN SCHEMA test_auth GRANT ALL ON SEQUENCES TO test_auth;
|
|
260
|
+
EOSQL
|
|
261
|
+
|
|
262
|
+
echo "Done!"
|
|
263
|
+
`,
|
|
264
|
+
);
|
|
265
|
+
|
|
266
|
+
// Run the init script against the test database
|
|
267
|
+
await runInitScript(scriptPath, dbUrl, {
|
|
268
|
+
POSTGRES_USER: PG_CONFIG.user,
|
|
269
|
+
POSTGRES_DB: dbName,
|
|
270
|
+
API_DB_PASSWORD: 'apipass',
|
|
271
|
+
AUTH_DB_PASSWORD: 'authpass',
|
|
272
|
+
});
|
|
273
|
+
|
|
274
|
+
// Verify: connect as test_api and check access to public schema
|
|
275
|
+
const apiClient = new pg.Client({
|
|
276
|
+
...PG_CONFIG,
|
|
277
|
+
user: 'test_api',
|
|
278
|
+
password: 'apipass',
|
|
279
|
+
database: dbName,
|
|
280
|
+
});
|
|
281
|
+
await apiClient.connect();
|
|
282
|
+
await apiClient.query(
|
|
283
|
+
'CREATE TABLE IF NOT EXISTS api_test_table (id serial PRIMARY KEY)',
|
|
284
|
+
);
|
|
285
|
+
const apiResult = await apiClient.query(
|
|
286
|
+
"SELECT table_name FROM information_schema.tables WHERE table_schema = 'public' AND table_name = 'api_test_table'",
|
|
287
|
+
);
|
|
288
|
+
expect(apiResult.rowCount).toBe(1);
|
|
289
|
+
await apiClient.end();
|
|
290
|
+
|
|
291
|
+
// Verify: connect as test_auth and check dedicated schema
|
|
292
|
+
const authClient = new pg.Client({
|
|
293
|
+
...PG_CONFIG,
|
|
294
|
+
user: 'test_auth',
|
|
295
|
+
password: 'authpass',
|
|
296
|
+
database: dbName,
|
|
297
|
+
});
|
|
298
|
+
await authClient.connect();
|
|
299
|
+
const schemaResult = await authClient.query(
|
|
300
|
+
"SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'test_auth'",
|
|
301
|
+
);
|
|
302
|
+
expect(schemaResult.rowCount).toBe(1);
|
|
303
|
+
await authClient.query(
|
|
304
|
+
'CREATE TABLE IF NOT EXISTS test_auth.auth_test_table (id serial PRIMARY KEY)',
|
|
305
|
+
);
|
|
306
|
+
await authClient.end();
|
|
307
|
+
});
|
|
308
|
+
});
|