@stonyx/orm 0.2.1-beta.86 → 0.2.1-beta.87
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/mysql/migration-runner.js +5 -0
- package/dist/postgres/connection.js +3 -1
- package/dist/postgres/migration-runner.js +5 -0
- package/dist/postgres/type-map.js +3 -0
- package/dist/timescale/query-builder.d.ts +2 -0
- package/dist/timescale/query-builder.js +30 -2
- package/package.json +1 -1
- package/src/mysql/migration-runner.ts +5 -0
- package/src/postgres/connection.ts +3 -1
- package/src/postgres/migration-runner.ts +5 -0
- package/src/postgres/type-map.ts +4 -0
- package/src/timescale/query-builder.ts +39 -2
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { fileExists } from '@stonyx/utils/file';
|
|
2
2
|
import fs from 'fs/promises';
|
|
3
|
+
import { validateIdentifier } from './query-builder.js';
|
|
3
4
|
export async function ensureMigrationsTable(pool, tableName = '__migrations') {
|
|
5
|
+
validateIdentifier(tableName, 'migration table name');
|
|
4
6
|
await pool.execute(`
|
|
5
7
|
CREATE TABLE IF NOT EXISTS \`${tableName}\` (
|
|
6
8
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
@@ -10,6 +12,7 @@ export async function ensureMigrationsTable(pool, tableName = '__migrations') {
|
|
|
10
12
|
`);
|
|
11
13
|
}
|
|
12
14
|
export async function getAppliedMigrations(pool, tableName = '__migrations') {
|
|
15
|
+
validateIdentifier(tableName, 'migration table name');
|
|
13
16
|
const [rows] = await pool.execute(`SELECT filename FROM \`${tableName}\` ORDER BY id ASC`);
|
|
14
17
|
return rows.map(row => row.filename);
|
|
15
18
|
}
|
|
@@ -37,6 +40,7 @@ export function parseMigrationFile(content) {
|
|
|
37
40
|
return { up, down };
|
|
38
41
|
}
|
|
39
42
|
export async function applyMigration(pool, filename, upSql, tableName = '__migrations') {
|
|
43
|
+
validateIdentifier(tableName, 'migration table name');
|
|
40
44
|
const connection = await pool.getConnection();
|
|
41
45
|
try {
|
|
42
46
|
await connection.beginTransaction();
|
|
@@ -57,6 +61,7 @@ export async function applyMigration(pool, filename, upSql, tableName = '__migra
|
|
|
57
61
|
}
|
|
58
62
|
}
|
|
59
63
|
export async function rollbackMigration(pool, filename, downSql, tableName = '__migrations') {
|
|
64
|
+
validateIdentifier(tableName, 'migration table name');
|
|
60
65
|
const connection = await pool.getConnection();
|
|
61
66
|
try {
|
|
62
67
|
await connection.beginTransaction();
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { validateIdentifier } from './query-builder.js';
|
|
1
2
|
let pool = null;
|
|
2
3
|
/**
|
|
3
4
|
* Create or return the singleton pg Pool.
|
|
@@ -18,7 +19,8 @@ export async function getPool(pgConfig, extensions = ['vector']) {
|
|
|
18
19
|
});
|
|
19
20
|
// Enable requested PostgreSQL extensions
|
|
20
21
|
for (const ext of extensions) {
|
|
21
|
-
|
|
22
|
+
validateIdentifier(ext, 'extension name');
|
|
23
|
+
await pool.query(`CREATE EXTENSION IF NOT EXISTS "${ext}"`);
|
|
22
24
|
}
|
|
23
25
|
return pool;
|
|
24
26
|
}
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { fileExists } from '@stonyx/utils/file';
|
|
2
2
|
import fs from 'fs/promises';
|
|
3
|
+
import { validateIdentifier } from './query-builder.js';
|
|
3
4
|
export async function ensureMigrationsTable(pool, tableName = '__migrations') {
|
|
5
|
+
validateIdentifier(tableName, 'migration table name');
|
|
4
6
|
await pool.query(`
|
|
5
7
|
CREATE TABLE IF NOT EXISTS "${tableName}" (
|
|
6
8
|
id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
|
|
@@ -10,6 +12,7 @@ export async function ensureMigrationsTable(pool, tableName = '__migrations') {
|
|
|
10
12
|
`);
|
|
11
13
|
}
|
|
12
14
|
export async function getAppliedMigrations(pool, tableName = '__migrations') {
|
|
15
|
+
validateIdentifier(tableName, 'migration table name');
|
|
13
16
|
const result = await pool.query(`SELECT filename FROM "${tableName}" ORDER BY id ASC`);
|
|
14
17
|
return result.rows.map(row => row.filename);
|
|
15
18
|
}
|
|
@@ -37,6 +40,7 @@ export function parseMigrationFile(content) {
|
|
|
37
40
|
return { up, down };
|
|
38
41
|
}
|
|
39
42
|
export async function applyMigration(pool, filename, upSql, tableName = '__migrations') {
|
|
43
|
+
validateIdentifier(tableName, 'migration table name');
|
|
40
44
|
const client = await pool.connect();
|
|
41
45
|
try {
|
|
42
46
|
await client.query('BEGIN');
|
|
@@ -56,6 +60,7 @@ export async function applyMigration(pool, filename, upSql, tableName = '__migra
|
|
|
56
60
|
}
|
|
57
61
|
}
|
|
58
62
|
export async function rollbackMigration(pool, filename, downSql, tableName = '__migrations') {
|
|
63
|
+
validateIdentifier(tableName, 'migration table name');
|
|
59
64
|
const client = await pool.connect();
|
|
60
65
|
try {
|
|
61
66
|
await client.query('BEGIN');
|
|
@@ -36,6 +36,9 @@ export function getPgType(attrType, transformFn) {
|
|
|
36
36
|
* Returns a vector column type for the given dimensions.
|
|
37
37
|
*/
|
|
38
38
|
export function getVectorType(dimensions) {
|
|
39
|
+
if (!Number.isInteger(dimensions) || dimensions < 1 || dimensions > 16000) {
|
|
40
|
+
throw new Error(`Invalid vector dimensions: ${dimensions}. Must be an integer between 1 and 16000.`);
|
|
41
|
+
}
|
|
39
42
|
return `vector(${dimensions})`;
|
|
40
43
|
}
|
|
41
44
|
function mysqlTypeToPg(mysqlType) {
|
|
@@ -1,4 +1,6 @@
|
|
|
1
1
|
export { validateIdentifier, buildInsert, buildUpdate, buildDelete, buildSelect } from '../postgres/query-builder.js';
|
|
2
|
+
export declare function validateInterval(interval: string, context?: string): string;
|
|
3
|
+
export declare function validateAggregate(expr: string, context?: string): string;
|
|
2
4
|
interface QueryResult {
|
|
3
5
|
sql: string;
|
|
4
6
|
values: unknown[];
|
|
@@ -1,6 +1,20 @@
|
|
|
1
1
|
// Re-export all base PostgreSQL query builders
|
|
2
2
|
export { validateIdentifier, buildInsert, buildUpdate, buildDelete, buildSelect } from '../postgres/query-builder.js';
|
|
3
3
|
import { validateIdentifier } from '../postgres/query-builder.js';
|
|
4
|
+
const SAFE_INTERVAL = /^\d+\s+(microsecond|millisecond|second|minute|hour|day|week|month|year)s?$/i;
|
|
5
|
+
export function validateInterval(interval, context = 'interval') {
|
|
6
|
+
if (!interval || typeof interval !== 'string' || !SAFE_INTERVAL.test(interval.trim())) {
|
|
7
|
+
throw new Error(`Invalid SQL ${context}: "${interval}". Intervals must match pattern like "7 days", "1 hour", "30 minutes".`);
|
|
8
|
+
}
|
|
9
|
+
return interval.trim();
|
|
10
|
+
}
|
|
11
|
+
const SAFE_AGGREGATE = /^(COUNT|SUM|AVG|MIN|MAX|FIRST|LAST)\s*\(\s*("?[a-zA-Z_][a-zA-Z0-9_]*"?|\*)\s*\)\s*(AS\s+"?[a-zA-Z_][a-zA-Z0-9_]*"?)?$/i;
|
|
12
|
+
export function validateAggregate(expr, context = 'aggregate') {
|
|
13
|
+
if (!expr || typeof expr !== 'string' || !SAFE_AGGREGATE.test(expr.trim())) {
|
|
14
|
+
throw new Error(`Invalid SQL ${context}: "${expr}". Aggregates must be simple function calls like "AVG(value) AS avg_value".`);
|
|
15
|
+
}
|
|
16
|
+
return expr.trim();
|
|
17
|
+
}
|
|
4
18
|
/**
|
|
5
19
|
* Build a CREATE TABLE + hypertable conversion statement.
|
|
6
20
|
* TimescaleDB hypertables are regular tables converted via create_hypertable().
|
|
@@ -9,6 +23,7 @@ export function buildCreateHypertable(table, timeColumn, options = {}) {
|
|
|
9
23
|
validateIdentifier(table, 'table name');
|
|
10
24
|
validateIdentifier(timeColumn, 'column name');
|
|
11
25
|
const { chunkInterval = '7 days' } = options;
|
|
26
|
+
validateInterval(chunkInterval, 'chunk interval');
|
|
12
27
|
const sql = `SELECT create_hypertable('"${table}"', '${timeColumn}', chunk_time_interval => INTERVAL '${chunkInterval}', if_not_exists => TRUE)`;
|
|
13
28
|
return { sql, values: [] };
|
|
14
29
|
}
|
|
@@ -24,7 +39,7 @@ export function buildTimeBucket(table, timeColumn, bucketSize, options = {}) {
|
|
|
24
39
|
const selectCols = [`time_bucket($${paramIndex++}, "${timeColumn}") AS bucket`];
|
|
25
40
|
values.push(bucketSize);
|
|
26
41
|
for (const agg of aggregates) {
|
|
27
|
-
selectCols.push(agg);
|
|
42
|
+
selectCols.push(validateAggregate(agg));
|
|
28
43
|
}
|
|
29
44
|
const whereClauses = [];
|
|
30
45
|
if (where) {
|
|
@@ -35,7 +50,17 @@ export function buildTimeBucket(table, timeColumn, bucketSize, options = {}) {
|
|
|
35
50
|
}
|
|
36
51
|
}
|
|
37
52
|
const whereStr = whereClauses.length > 0 ? ` WHERE ${whereClauses.join(' AND ')}` : '';
|
|
38
|
-
|
|
53
|
+
let orderStr = '';
|
|
54
|
+
if (orderBy) {
|
|
55
|
+
const parts = orderBy.trim().split(/\s+/);
|
|
56
|
+
const col = parts[0];
|
|
57
|
+
const dir = parts[1]?.toUpperCase();
|
|
58
|
+
validateIdentifier(col, 'ORDER BY column');
|
|
59
|
+
if (dir && dir !== 'ASC' && dir !== 'DESC') {
|
|
60
|
+
throw new Error(`Invalid ORDER BY direction: "${dir}". Must be ASC or DESC.`);
|
|
61
|
+
}
|
|
62
|
+
orderStr = ` ORDER BY "${col}"${dir ? ` ${dir}` : ''}`;
|
|
63
|
+
}
|
|
39
64
|
let limitStr = '';
|
|
40
65
|
if (limit != null) {
|
|
41
66
|
limitStr = ` LIMIT $${paramIndex++}`;
|
|
@@ -52,6 +77,8 @@ export function buildContinuousAggregate(viewName, table, timeColumn, bucketSize
|
|
|
52
77
|
validateIdentifier(table, 'table name');
|
|
53
78
|
validateIdentifier(timeColumn, 'column name');
|
|
54
79
|
const { withNoData = false } = options;
|
|
80
|
+
validateInterval(bucketSize, 'bucket size');
|
|
81
|
+
aggregates.forEach(agg => validateAggregate(agg));
|
|
55
82
|
const selectCols = [
|
|
56
83
|
`time_bucket('${bucketSize}', "${timeColumn}") AS bucket`,
|
|
57
84
|
...aggregates,
|
|
@@ -65,6 +92,7 @@ export function buildContinuousAggregate(viewName, table, timeColumn, bucketSize
|
|
|
65
92
|
*/
|
|
66
93
|
export function buildCompressionPolicy(table, compressAfter) {
|
|
67
94
|
validateIdentifier(table, 'table name');
|
|
95
|
+
validateInterval(compressAfter, 'compress after interval');
|
|
68
96
|
const sql = `SELECT add_compression_policy('"${table}"', INTERVAL '${compressAfter}', if_not_exists => TRUE)`;
|
|
69
97
|
return { sql };
|
|
70
98
|
}
|
package/package.json
CHANGED
|
@@ -2,6 +2,7 @@ import { readFile, fileExists } from '@stonyx/utils/file';
|
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import fs from 'fs/promises';
|
|
4
4
|
import type { Pool, PoolConnection } from 'mysql2/promise';
|
|
5
|
+
import { validateIdentifier } from './query-builder.js';
|
|
5
6
|
|
|
6
7
|
interface ParsedMigration {
|
|
7
8
|
up: string;
|
|
@@ -9,6 +10,7 @@ interface ParsedMigration {
|
|
|
9
10
|
}
|
|
10
11
|
|
|
11
12
|
export async function ensureMigrationsTable(pool: Pool, tableName: string = '__migrations'): Promise<void> {
|
|
13
|
+
validateIdentifier(tableName, 'migration table name');
|
|
12
14
|
await pool.execute(`
|
|
13
15
|
CREATE TABLE IF NOT EXISTS \`${tableName}\` (
|
|
14
16
|
id INT AUTO_INCREMENT PRIMARY KEY,
|
|
@@ -19,6 +21,7 @@ export async function ensureMigrationsTable(pool: Pool, tableName: string = '__m
|
|
|
19
21
|
}
|
|
20
22
|
|
|
21
23
|
export async function getAppliedMigrations(pool: Pool, tableName: string = '__migrations'): Promise<string[]> {
|
|
24
|
+
validateIdentifier(tableName, 'migration table name');
|
|
22
25
|
const [rows] = await pool.execute(
|
|
23
26
|
`SELECT filename FROM \`${tableName}\` ORDER BY id ASC`
|
|
24
27
|
) as [Array<{ filename: string }>, unknown];
|
|
@@ -56,6 +59,7 @@ export function parseMigrationFile(content: string): ParsedMigration {
|
|
|
56
59
|
}
|
|
57
60
|
|
|
58
61
|
export async function applyMigration(pool: Pool, filename: string, upSql: string, tableName: string = '__migrations'): Promise<void> {
|
|
62
|
+
validateIdentifier(tableName, 'migration table name');
|
|
59
63
|
const connection = await pool.getConnection();
|
|
60
64
|
|
|
61
65
|
try {
|
|
@@ -83,6 +87,7 @@ export async function applyMigration(pool: Pool, filename: string, upSql: string
|
|
|
83
87
|
}
|
|
84
88
|
|
|
85
89
|
export async function rollbackMigration(pool: Pool, filename: string, downSql: string, tableName: string = '__migrations'): Promise<void> {
|
|
90
|
+
validateIdentifier(tableName, 'migration table name');
|
|
86
91
|
const connection = await pool.getConnection();
|
|
87
92
|
|
|
88
93
|
try {
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import type { Pool as PgPool } from 'pg';
|
|
2
|
+
import { validateIdentifier } from './query-builder.js';
|
|
2
3
|
|
|
3
4
|
interface PgConfig {
|
|
4
5
|
host: string;
|
|
@@ -32,7 +33,8 @@ export async function getPool(pgConfig: PgConfig, extensions: string[] = ['vecto
|
|
|
32
33
|
|
|
33
34
|
// Enable requested PostgreSQL extensions
|
|
34
35
|
for (const ext of extensions) {
|
|
35
|
-
|
|
36
|
+
validateIdentifier(ext, 'extension name');
|
|
37
|
+
await pool.query(`CREATE EXTENSION IF NOT EXISTS "${ext}"`);
|
|
36
38
|
}
|
|
37
39
|
|
|
38
40
|
return pool;
|
|
@@ -2,8 +2,10 @@ import { readFile, fileExists } from '@stonyx/utils/file';
|
|
|
2
2
|
import path from 'path';
|
|
3
3
|
import fs from 'fs/promises';
|
|
4
4
|
import type { Pool, PoolClient } from 'pg';
|
|
5
|
+
import { validateIdentifier } from './query-builder.js';
|
|
5
6
|
|
|
6
7
|
export async function ensureMigrationsTable(pool: Pool, tableName: string = '__migrations'): Promise<void> {
|
|
8
|
+
validateIdentifier(tableName, 'migration table name');
|
|
7
9
|
await pool.query(`
|
|
8
10
|
CREATE TABLE IF NOT EXISTS "${tableName}" (
|
|
9
11
|
id INTEGER GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
|
|
@@ -14,6 +16,7 @@ export async function ensureMigrationsTable(pool: Pool, tableName: string = '__m
|
|
|
14
16
|
}
|
|
15
17
|
|
|
16
18
|
export async function getAppliedMigrations(pool: Pool, tableName: string = '__migrations'): Promise<string[]> {
|
|
19
|
+
validateIdentifier(tableName, 'migration table name');
|
|
17
20
|
const result = await pool.query(
|
|
18
21
|
`SELECT filename FROM "${tableName}" ORDER BY id ASC`
|
|
19
22
|
);
|
|
@@ -51,6 +54,7 @@ export function parseMigrationFile(content: string): { up: string; down: string
|
|
|
51
54
|
}
|
|
52
55
|
|
|
53
56
|
export async function applyMigration(pool: Pool, filename: string, upSql: string, tableName: string = '__migrations'): Promise<void> {
|
|
57
|
+
validateIdentifier(tableName, 'migration table name');
|
|
54
58
|
const client: PoolClient = await pool.connect();
|
|
55
59
|
|
|
56
60
|
try {
|
|
@@ -77,6 +81,7 @@ export async function applyMigration(pool: Pool, filename: string, upSql: string
|
|
|
77
81
|
}
|
|
78
82
|
|
|
79
83
|
export async function rollbackMigration(pool: Pool, filename: string, downSql: string, tableName: string = '__migrations'): Promise<void> {
|
|
84
|
+
validateIdentifier(tableName, 'migration table name');
|
|
80
85
|
const client: PoolClient = await pool.connect();
|
|
81
86
|
|
|
82
87
|
try {
|
package/src/postgres/type-map.ts
CHANGED
|
@@ -42,6 +42,10 @@ export function getPgType(attrType: string, transformFn?: TransformFn): string {
|
|
|
42
42
|
* Returns a vector column type for the given dimensions.
|
|
43
43
|
*/
|
|
44
44
|
export function getVectorType(dimensions: number): string {
|
|
45
|
+
if (!Number.isInteger(dimensions) || dimensions < 1 || dimensions > 16000) {
|
|
46
|
+
throw new Error(`Invalid vector dimensions: ${dimensions}. Must be an integer between 1 and 16000.`);
|
|
47
|
+
}
|
|
48
|
+
|
|
45
49
|
return `vector(${dimensions})`;
|
|
46
50
|
}
|
|
47
51
|
|
|
@@ -3,6 +3,26 @@ export { validateIdentifier, buildInsert, buildUpdate, buildDelete, buildSelect
|
|
|
3
3
|
|
|
4
4
|
import { validateIdentifier } from '../postgres/query-builder.js';
|
|
5
5
|
|
|
6
|
+
const SAFE_INTERVAL = /^\d+\s+(microsecond|millisecond|second|minute|hour|day|week|month|year)s?$/i;
|
|
7
|
+
|
|
8
|
+
export function validateInterval(interval: string, context: string = 'interval'): string {
|
|
9
|
+
if (!interval || typeof interval !== 'string' || !SAFE_INTERVAL.test(interval.trim())) {
|
|
10
|
+
throw new Error(`Invalid SQL ${context}: "${interval}". Intervals must match pattern like "7 days", "1 hour", "30 minutes".`);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
return interval.trim();
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
const SAFE_AGGREGATE = /^(COUNT|SUM|AVG|MIN|MAX|FIRST|LAST)\s*\(\s*("?[a-zA-Z_][a-zA-Z0-9_]*"?|\*)\s*\)\s*(AS\s+"?[a-zA-Z_][a-zA-Z0-9_]*"?)?$/i;
|
|
17
|
+
|
|
18
|
+
export function validateAggregate(expr: string, context: string = 'aggregate'): string {
|
|
19
|
+
if (!expr || typeof expr !== 'string' || !SAFE_AGGREGATE.test(expr.trim())) {
|
|
20
|
+
throw new Error(`Invalid SQL ${context}: "${expr}". Aggregates must be simple function calls like "AVG(value) AS avg_value".`);
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
return expr.trim();
|
|
24
|
+
}
|
|
25
|
+
|
|
6
26
|
interface QueryResult {
|
|
7
27
|
sql: string;
|
|
8
28
|
values: unknown[];
|
|
@@ -36,6 +56,7 @@ export function buildCreateHypertable(table: string, timeColumn: string, options
|
|
|
36
56
|
validateIdentifier(timeColumn, 'column name');
|
|
37
57
|
|
|
38
58
|
const { chunkInterval = '7 days' } = options;
|
|
59
|
+
validateInterval(chunkInterval, 'chunk interval');
|
|
39
60
|
|
|
40
61
|
const sql = `SELECT create_hypertable('"${table}"', '${timeColumn}', chunk_time_interval => INTERVAL '${chunkInterval}', if_not_exists => TRUE)`;
|
|
41
62
|
|
|
@@ -57,7 +78,7 @@ export function buildTimeBucket(table: string, timeColumn: string, bucketSize: s
|
|
|
57
78
|
values.push(bucketSize);
|
|
58
79
|
|
|
59
80
|
for (const agg of aggregates) {
|
|
60
|
-
selectCols.push(agg);
|
|
81
|
+
selectCols.push(validateAggregate(agg));
|
|
61
82
|
}
|
|
62
83
|
|
|
63
84
|
const whereClauses: string[] = [];
|
|
@@ -70,7 +91,20 @@ export function buildTimeBucket(table: string, timeColumn: string, bucketSize: s
|
|
|
70
91
|
}
|
|
71
92
|
|
|
72
93
|
const whereStr = whereClauses.length > 0 ? ` WHERE ${whereClauses.join(' AND ')}` : '';
|
|
73
|
-
|
|
94
|
+
let orderStr = '';
|
|
95
|
+
if (orderBy) {
|
|
96
|
+
const parts = orderBy.trim().split(/\s+/);
|
|
97
|
+
const col = parts[0];
|
|
98
|
+
const dir = parts[1]?.toUpperCase();
|
|
99
|
+
|
|
100
|
+
validateIdentifier(col, 'ORDER BY column');
|
|
101
|
+
|
|
102
|
+
if (dir && dir !== 'ASC' && dir !== 'DESC') {
|
|
103
|
+
throw new Error(`Invalid ORDER BY direction: "${dir}". Must be ASC or DESC.`);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
orderStr = ` ORDER BY "${col}"${dir ? ` ${dir}` : ''}`;
|
|
107
|
+
}
|
|
74
108
|
let limitStr = '';
|
|
75
109
|
if (limit != null) {
|
|
76
110
|
limitStr = ` LIMIT $${paramIndex++}`;
|
|
@@ -91,6 +125,8 @@ export function buildContinuousAggregate(viewName: string, table: string, timeCo
|
|
|
91
125
|
validateIdentifier(timeColumn, 'column name');
|
|
92
126
|
|
|
93
127
|
const { withNoData = false } = options;
|
|
128
|
+
validateInterval(bucketSize, 'bucket size');
|
|
129
|
+
aggregates.forEach(agg => validateAggregate(agg));
|
|
94
130
|
|
|
95
131
|
const selectCols: string[] = [
|
|
96
132
|
`time_bucket('${bucketSize}', "${timeColumn}") AS bucket`,
|
|
@@ -109,6 +145,7 @@ export function buildContinuousAggregate(viewName: string, table: string, timeCo
|
|
|
109
145
|
*/
|
|
110
146
|
export function buildCompressionPolicy(table: string, compressAfter: string): SqlResult {
|
|
111
147
|
validateIdentifier(table, 'table name');
|
|
148
|
+
validateInterval(compressAfter, 'compress after interval');
|
|
112
149
|
|
|
113
150
|
const sql = `SELECT add_compression_policy('"${table}"', INTERVAL '${compressAfter}', if_not_exists => TRUE)`;
|
|
114
151
|
|