@usebetterdev/audit-cli 0.7.0 → 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/check.d.ts +2 -2
- package/dist/check.js +3 -3
- package/dist/{chunk-6HCQ22YY.js → chunk-AWFOUH4H.js} +30 -5
- package/dist/chunk-AWFOUH4H.js.map +1 -0
- package/dist/chunk-EEYS3G5Y.js +15 -0
- package/dist/chunk-EEYS3G5Y.js.map +1 -0
- package/dist/{chunk-IJDJMOCO.js → chunk-GHOKL227.js} +34 -1
- package/dist/chunk-GHOKL227.js.map +1 -0
- package/dist/{chunk-TLZWAZAJ.js → chunk-O4577NVP.js} +13 -9
- package/dist/chunk-O4577NVP.js.map +1 -0
- package/dist/{chunk-AURUOQDN.js → chunk-PGOZIYS5.js} +12 -8
- package/dist/chunk-PGOZIYS5.js.map +1 -0
- package/dist/chunk-Q23AEDWF.js +231 -0
- package/dist/chunk-Q23AEDWF.js.map +1 -0
- package/dist/{chunk-HV3X7C45.js → chunk-WYLZXWZ4.js} +7 -8
- package/dist/chunk-WYLZXWZ4.js.map +1 -0
- package/dist/cli.js +12 -33
- package/dist/cli.js.map +1 -1
- package/dist/export.d.ts +3 -1
- package/dist/export.js +5 -2
- package/dist/migrate.d.ts +2 -0
- package/dist/migrate.js +3 -3
- package/dist/purge.js +3 -2
- package/package.json +7 -4
- package/dist/chunk-6HCQ22YY.js.map +0 -1
- package/dist/chunk-AURUOQDN.js.map +0 -1
- package/dist/chunk-HV3X7C45.js.map +0 -1
- package/dist/chunk-IJDJMOCO.js.map +0 -1
- package/dist/chunk-O5LHE2AC.js +0 -119
- package/dist/chunk-O5LHE2AC.js.map +0 -1
- package/dist/chunk-TLZWAZAJ.js.map +0 -1
|
@@ -0,0 +1,231 @@
|
|
|
1
|
+
// src/generate-sql.ts
|
|
2
|
+
import {
|
|
3
|
+
AUDIT_LOG_SCHEMA
|
|
4
|
+
} from "@usebetterdev/audit-core";
|
|
5
|
+
var INDEX_DEFINITIONS = [
|
|
6
|
+
{
|
|
7
|
+
name: "audit_logs_table_name_timestamp_idx",
|
|
8
|
+
columns: ["table_name", "timestamp"]
|
|
9
|
+
},
|
|
10
|
+
{ name: "audit_logs_actor_id_idx", columns: ["actor_id"] },
|
|
11
|
+
{ name: "audit_logs_record_id_idx", columns: ["record_id"] },
|
|
12
|
+
{
|
|
13
|
+
name: "audit_logs_table_name_record_id_idx",
|
|
14
|
+
columns: ["table_name", "record_id"]
|
|
15
|
+
},
|
|
16
|
+
{ name: "audit_logs_operation_idx", columns: ["operation"] },
|
|
17
|
+
{ name: "audit_logs_timestamp_idx", columns: ["timestamp"] },
|
|
18
|
+
{ name: "audit_logs_timestamp_id_idx", columns: ["timestamp", "id"] }
|
|
19
|
+
];
|
|
20
|
+
function escapeIdentifier(name, dialect) {
|
|
21
|
+
if (dialect === "mysql") {
|
|
22
|
+
return `\`${name.replaceAll("`", "``")}\``;
|
|
23
|
+
}
|
|
24
|
+
return `"${name.replaceAll('"', '""')}"`;
|
|
25
|
+
}
|
|
26
|
+
function sqlType(type, dialect) {
|
|
27
|
+
const map = {
|
|
28
|
+
uuid: { postgres: "UUID", mysql: "CHAR(36)", sqlite: "TEXT" },
|
|
29
|
+
timestamptz: {
|
|
30
|
+
postgres: "TIMESTAMPTZ",
|
|
31
|
+
mysql: "DATETIME(6)",
|
|
32
|
+
sqlite: "TEXT"
|
|
33
|
+
},
|
|
34
|
+
text: { postgres: "TEXT", mysql: "TEXT", sqlite: "TEXT" },
|
|
35
|
+
jsonb: { postgres: "JSONB", mysql: "JSON", sqlite: "TEXT" },
|
|
36
|
+
boolean: { postgres: "BOOLEAN", mysql: "BOOLEAN", sqlite: "INTEGER" },
|
|
37
|
+
integer: { postgres: "INTEGER", mysql: "BIGINT", sqlite: "INTEGER" }
|
|
38
|
+
};
|
|
39
|
+
return map[type][dialect];
|
|
40
|
+
}
|
|
41
|
+
function defaultExpression(expression, dialect) {
|
|
42
|
+
const map = {
|
|
43
|
+
"gen_random_uuid()": {
|
|
44
|
+
postgres: "gen_random_uuid()",
|
|
45
|
+
mysql: "(UUID())",
|
|
46
|
+
sqlite: void 0
|
|
47
|
+
},
|
|
48
|
+
"now()": {
|
|
49
|
+
postgres: "now()",
|
|
50
|
+
mysql: "CURRENT_TIMESTAMP(6)",
|
|
51
|
+
sqlite: "(datetime('now'))"
|
|
52
|
+
}
|
|
53
|
+
};
|
|
54
|
+
const entry = map[expression];
|
|
55
|
+
if (entry === void 0) {
|
|
56
|
+
if (/^-?\d+(\.\d+)?$/.test(expression) || expression === "TRUE" || expression === "FALSE") {
|
|
57
|
+
return expression;
|
|
58
|
+
}
|
|
59
|
+
console.warn(
|
|
60
|
+
`[better-audit] Warning: unrecognized default expression "${expression}" \u2014 passing through verbatim for ${dialect}. It may not be dialect-compatible.`
|
|
61
|
+
);
|
|
62
|
+
return expression;
|
|
63
|
+
}
|
|
64
|
+
return entry[dialect];
|
|
65
|
+
}
|
|
66
|
+
function columnDdl(name, definition, dialect) {
|
|
67
|
+
const parts = [];
|
|
68
|
+
parts.push(escapeIdentifier(name, dialect));
|
|
69
|
+
parts.push(sqlType(definition.type, dialect));
|
|
70
|
+
if (!definition.nullable) {
|
|
71
|
+
parts.push("NOT NULL");
|
|
72
|
+
}
|
|
73
|
+
if (definition.defaultExpression !== void 0) {
|
|
74
|
+
const expr = defaultExpression(definition.defaultExpression, dialect);
|
|
75
|
+
if (expr !== void 0) {
|
|
76
|
+
parts.push(`DEFAULT ${expr}`);
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
return parts.join(" ");
|
|
80
|
+
}
|
|
81
|
+
function indexDdl(indexDef, dialect) {
|
|
82
|
+
const cols = indexDef.columns.map((c) => escapeIdentifier(c, dialect)).join(", ");
|
|
83
|
+
const tableName = escapeIdentifier(AUDIT_LOG_SCHEMA.tableName, dialect);
|
|
84
|
+
const indexName = escapeIdentifier(indexDef.name, dialect);
|
|
85
|
+
return `CREATE INDEX IF NOT EXISTS ${indexName} ON ${tableName} (${cols});`;
|
|
86
|
+
}
|
|
87
|
+
function generateMigrationSql(dialect) {
|
|
88
|
+
const { tableName, columns } = AUDIT_LOG_SCHEMA;
|
|
89
|
+
const quotedTable = escapeIdentifier(tableName, dialect);
|
|
90
|
+
const columnLines = [];
|
|
91
|
+
let primaryKeyColumn;
|
|
92
|
+
for (const [name, definition] of Object.entries(columns)) {
|
|
93
|
+
columnLines.push(` ${columnDdl(name, definition, dialect)}`);
|
|
94
|
+
if (definition.primaryKey === true) {
|
|
95
|
+
primaryKeyColumn = name;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
if (primaryKeyColumn !== void 0) {
|
|
99
|
+
columnLines.push(
|
|
100
|
+
` PRIMARY KEY (${escapeIdentifier(primaryKeyColumn, dialect)})`
|
|
101
|
+
);
|
|
102
|
+
}
|
|
103
|
+
const parts = [
|
|
104
|
+
`-- better-audit: ${dialect} migration for ${tableName}`,
|
|
105
|
+
`-- Generated by @usebetterdev/audit-cli`,
|
|
106
|
+
"",
|
|
107
|
+
`CREATE TABLE IF NOT EXISTS ${quotedTable} (`,
|
|
108
|
+
columnLines.join(",\n"),
|
|
109
|
+
");",
|
|
110
|
+
""
|
|
111
|
+
];
|
|
112
|
+
for (const idx of INDEX_DEFINITIONS) {
|
|
113
|
+
parts.push(indexDdl(idx, dialect));
|
|
114
|
+
}
|
|
115
|
+
parts.push("");
|
|
116
|
+
return parts.join("\n");
|
|
117
|
+
}
|
|
118
|
+
var FIELD_TYPE_TO_COLUMN_TYPE = {
|
|
119
|
+
string: "text",
|
|
120
|
+
number: "integer",
|
|
121
|
+
boolean: "boolean",
|
|
122
|
+
date: "timestamptz",
|
|
123
|
+
json: "jsonb"
|
|
124
|
+
};
|
|
125
|
+
function fieldTypeToColumnType(type) {
|
|
126
|
+
return FIELD_TYPE_TO_COLUMN_TYPE[type];
|
|
127
|
+
}
|
|
128
|
+
function pluginFieldToColumnDef(field) {
|
|
129
|
+
const colType = fieldTypeToColumnType(field.type);
|
|
130
|
+
const def = {
|
|
131
|
+
type: colType,
|
|
132
|
+
nullable: field.required !== true,
|
|
133
|
+
description: ""
|
|
134
|
+
};
|
|
135
|
+
if (field.defaultValue !== void 0 && field.defaultValue !== null && typeof field.defaultValue !== "function") {
|
|
136
|
+
if (field.type === "date") {
|
|
137
|
+
def.defaultExpression = "now()";
|
|
138
|
+
} else if (field.type === "number") {
|
|
139
|
+
def.defaultExpression = String(field.defaultValue);
|
|
140
|
+
} else if (field.type === "string") {
|
|
141
|
+
def.defaultExpression = `'${String(field.defaultValue)}'`;
|
|
142
|
+
} else if (field.type === "boolean") {
|
|
143
|
+
def.defaultExpression = field.defaultValue ? "TRUE" : "FALSE";
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
if (field.type === "date" && typeof field.defaultValue === "function") {
|
|
147
|
+
def.defaultExpression = "now()";
|
|
148
|
+
}
|
|
149
|
+
return def;
|
|
150
|
+
}
|
|
151
|
+
function generatePluginTableSql(tableName, table, dialect) {
|
|
152
|
+
const quotedTable = escapeIdentifier(tableName, dialect);
|
|
153
|
+
const columnLines = [];
|
|
154
|
+
const uniqueConstraints = [];
|
|
155
|
+
const indexStatements = [];
|
|
156
|
+
for (const [fieldName, field] of Object.entries(table.fields)) {
|
|
157
|
+
const colDef = pluginFieldToColumnDef(field);
|
|
158
|
+
columnLines.push(` ${columnDdl(fieldName, colDef, dialect)}`);
|
|
159
|
+
if (field.unique === true) {
|
|
160
|
+
uniqueConstraints.push(
|
|
161
|
+
` UNIQUE (${escapeIdentifier(fieldName, dialect)})`
|
|
162
|
+
);
|
|
163
|
+
}
|
|
164
|
+
if (field.index === true) {
|
|
165
|
+
const idxName = `${tableName}_${fieldName}_idx`;
|
|
166
|
+
indexStatements.push(
|
|
167
|
+
`CREATE INDEX IF NOT EXISTS ${escapeIdentifier(idxName, dialect)} ON ${quotedTable} (${escapeIdentifier(fieldName, dialect)});`
|
|
168
|
+
);
|
|
169
|
+
}
|
|
170
|
+
}
|
|
171
|
+
const allLines = [...columnLines, ...uniqueConstraints];
|
|
172
|
+
const parts = [
|
|
173
|
+
`CREATE TABLE IF NOT EXISTS ${quotedTable} (`,
|
|
174
|
+
allLines.join(",\n"),
|
|
175
|
+
");"
|
|
176
|
+
];
|
|
177
|
+
for (const idx of indexStatements) {
|
|
178
|
+
parts.push(idx);
|
|
179
|
+
}
|
|
180
|
+
return parts.join("\n");
|
|
181
|
+
}
|
|
182
|
+
function generateExtensionSql(tableName, fields, dialect) {
|
|
183
|
+
const quotedTable = escapeIdentifier(tableName, dialect);
|
|
184
|
+
const statements = [];
|
|
185
|
+
for (const [fieldName, field] of Object.entries(fields)) {
|
|
186
|
+
const colDef = pluginFieldToColumnDef(field);
|
|
187
|
+
const colSql = columnDdl(fieldName, colDef, dialect);
|
|
188
|
+
if (dialect === "postgres") {
|
|
189
|
+
statements.push(
|
|
190
|
+
`ALTER TABLE ${quotedTable} ADD COLUMN IF NOT EXISTS ${colSql};`
|
|
191
|
+
);
|
|
192
|
+
} else {
|
|
193
|
+
statements.push(
|
|
194
|
+
`ALTER TABLE ${quotedTable} ADD COLUMN ${colSql};`
|
|
195
|
+
);
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
return statements.join("\n");
|
|
199
|
+
}
|
|
200
|
+
function generatePluginMigrationSql(schema, dialect) {
|
|
201
|
+
const hasTables = Object.keys(schema.tables).length > 0;
|
|
202
|
+
const hasExtensions = Object.keys(schema.extend).length > 0;
|
|
203
|
+
if (!hasTables && !hasExtensions) {
|
|
204
|
+
return "";
|
|
205
|
+
}
|
|
206
|
+
const parts = [
|
|
207
|
+
"",
|
|
208
|
+
"-- Plugin tables",
|
|
209
|
+
"-- Generated from better.config.ts plugins",
|
|
210
|
+
""
|
|
211
|
+
];
|
|
212
|
+
for (const [tableName, tableDef] of Object.entries(schema.tables)) {
|
|
213
|
+
parts.push(generatePluginTableSql(tableName, tableDef, dialect));
|
|
214
|
+
parts.push("");
|
|
215
|
+
}
|
|
216
|
+
if (hasExtensions) {
|
|
217
|
+
parts.push("-- Extensions to existing tables");
|
|
218
|
+
for (const [tableName, fields] of Object.entries(schema.extend)) {
|
|
219
|
+
parts.push(generateExtensionSql(tableName, fields, dialect));
|
|
220
|
+
}
|
|
221
|
+
parts.push("");
|
|
222
|
+
}
|
|
223
|
+
return parts.join("\n");
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
export {
|
|
227
|
+
INDEX_DEFINITIONS,
|
|
228
|
+
generateMigrationSql,
|
|
229
|
+
generatePluginMigrationSql
|
|
230
|
+
};
|
|
231
|
+
//# sourceMappingURL=chunk-Q23AEDWF.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/generate-sql.ts"],"sourcesContent":["/**\n * Multi-dialect SQL generation for the `audit_logs` table.\n *\n * Reads the declarative `AUDIT_LOG_SCHEMA` from audit-core and produces\n * dialect-appropriate DDL for Postgres, MySQL, and SQLite.\n */\n\nimport {\n AUDIT_LOG_SCHEMA,\n type ColumnDefinition,\n type ColumnType,\n} from \"@usebetterdev/audit-core\";\nimport type {\n FieldType,\n PluginFieldDefinition,\n PluginTableDefinition,\n} from \"@usebetterdev/plugin\";\nimport type { MergedPluginSchema } from \"@usebetterdev/plugin\";\n\nexport type { DatabaseDialect } from \"./detect-adapter.js\";\nimport type { DatabaseDialect } from \"./detect-adapter.js\";\n\n/**\n * Index definitions matching `packages/audit/drizzle/src/schema.ts`.\n * Each entry has a name and the list of column names it covers.\n */\nexport const INDEX_DEFINITIONS: ReadonlyArray<{\n name: string;\n columns: ReadonlyArray<string>;\n}> = [\n {\n name: \"audit_logs_table_name_timestamp_idx\",\n columns: [\"table_name\", \"timestamp\"],\n },\n { name: \"audit_logs_actor_id_idx\", columns: [\"actor_id\"] },\n { name: \"audit_logs_record_id_idx\", columns: [\"record_id\"] },\n {\n name: \"audit_logs_table_name_record_id_idx\",\n columns: [\"table_name\", \"record_id\"],\n },\n { name: \"audit_logs_operation_idx\", columns: [\"operation\"] },\n { name: \"audit_logs_timestamp_idx\", columns: [\"timestamp\"] },\n { name: \"audit_logs_timestamp_id_idx\", columns: [\"timestamp\", \"id\"] },\n];\n\n/** Escape an identifier (table name, column name, index name) for safe SQL interpolation. */\nfunction escapeIdentifier(name: string, dialect: DatabaseDialect): string {\n if (dialect === \"mysql\") {\n return `\\`${name.replaceAll(\"`\", \"``\")}\\``;\n }\n return `\"${name.replaceAll('\"', '\"\"')}\"`;\n}\n\n/** Map a core ColumnType to a dialect-specific SQL type. */\nfunction sqlType(type: ColumnType, dialect: DatabaseDialect): string {\n const map: Record<ColumnType, Record<DatabaseDialect, string>> = {\n uuid: { postgres: \"UUID\", mysql: \"CHAR(36)\", sqlite: \"TEXT\" },\n timestamptz: {\n postgres: \"TIMESTAMPTZ\",\n mysql: \"DATETIME(6)\",\n sqlite: \"TEXT\",\n },\n text: { postgres: \"TEXT\", mysql: \"TEXT\", sqlite: \"TEXT\" },\n jsonb: { postgres: \"JSONB\", mysql: \"JSON\", sqlite: \"TEXT\" },\n boolean: { postgres: \"BOOLEAN\", mysql: \"BOOLEAN\", sqlite: \"INTEGER\" },\n integer: { postgres: \"INTEGER\", mysql: \"BIGINT\", sqlite: \"INTEGER\" },\n };\n return map[type][dialect];\n}\n\n/** Map a core default expression to a dialect-specific SQL expression. */\nfunction defaultExpression(\n expression: string,\n dialect: DatabaseDialect,\n): string | undefined {\n const map: Record<string, Record<DatabaseDialect, string | undefined>> = {\n \"gen_random_uuid()\": {\n postgres: \"gen_random_uuid()\",\n mysql: \"(UUID())\",\n sqlite: undefined,\n },\n \"now()\": {\n postgres: \"now()\",\n mysql: \"CURRENT_TIMESTAMP(6)\",\n sqlite: \"(datetime('now'))\",\n },\n };\n const entry = map[expression];\n if (entry === undefined) {\n // Numeric literals and simple SQL constants are dialect-universal\n if (/^-?\\d+(\\.\\d+)?$/.test(expression) || expression === \"TRUE\" || expression === \"FALSE\") {\n return expression;\n }\n console.warn(\n `[better-audit] Warning: unrecognized default expression \"${expression}\" — ` +\n `passing through verbatim for ${dialect}. It may not be dialect-compatible.`,\n );\n return expression;\n }\n return entry[dialect];\n}\n\n/** Build a single column DDL fragment (e.g. `\"id\" UUID NOT NULL DEFAULT gen_random_uuid()`). */\nfunction columnDdl(\n name: string,\n definition: ColumnDefinition,\n dialect: DatabaseDialect,\n): string {\n const parts: string[] = [];\n\n parts.push(escapeIdentifier(name, dialect));\n parts.push(sqlType(definition.type, dialect));\n\n if (!definition.nullable) {\n parts.push(\"NOT NULL\");\n }\n\n if (definition.defaultExpression !== undefined) {\n const expr = defaultExpression(definition.defaultExpression, dialect);\n if (expr !== undefined) {\n parts.push(`DEFAULT ${expr}`);\n }\n }\n\n return parts.join(\" \");\n}\n\n/** Build a single index DDL statement. */\nfunction indexDdl(\n indexDef: { name: string; columns: ReadonlyArray<string> },\n dialect: DatabaseDialect,\n): string {\n const cols = indexDef.columns.map((c) => escapeIdentifier(c, dialect)).join(\", \");\n const tableName = escapeIdentifier(AUDIT_LOG_SCHEMA.tableName, dialect);\n const indexName = escapeIdentifier(indexDef.name, dialect);\n\n return `CREATE INDEX IF NOT EXISTS ${indexName} ON ${tableName} (${cols});`;\n}\n\n/**\n * Generate the full migration SQL for the `audit_logs` table in the given dialect.\n *\n * Includes `CREATE TABLE IF NOT EXISTS` and all indexes.\n */\nexport function generateMigrationSql(dialect: DatabaseDialect): string {\n const { tableName, columns } = AUDIT_LOG_SCHEMA;\n const quotedTable = escapeIdentifier(tableName, dialect);\n\n // Column definitions\n const columnLines: string[] = [];\n let primaryKeyColumn: string | undefined;\n\n for (const [name, definition] of Object.entries(columns)) {\n columnLines.push(` ${columnDdl(name, definition, dialect)}`);\n if (definition.primaryKey === true) {\n primaryKeyColumn = name;\n }\n }\n\n // Primary key constraint\n if (primaryKeyColumn !== undefined) {\n columnLines.push(\n ` PRIMARY KEY (${escapeIdentifier(primaryKeyColumn, dialect)})`,\n );\n }\n\n const parts: string[] = [\n `-- better-audit: ${dialect} migration for ${tableName}`,\n `-- Generated by @usebetterdev/audit-cli`,\n \"\",\n `CREATE TABLE IF NOT EXISTS ${quotedTable} (`,\n columnLines.join(\",\\n\"),\n \");\",\n \"\",\n ];\n\n // Index definitions\n for (const idx of INDEX_DEFINITIONS) {\n parts.push(indexDdl(idx, dialect));\n }\n\n parts.push(\"\");\n return parts.join(\"\\n\");\n}\n\n// ---------------------------------------------------------------------------\n// Plugin schema → SQL\n// ---------------------------------------------------------------------------\n\nconst FIELD_TYPE_TO_COLUMN_TYPE: Record<FieldType, ColumnType> = {\n string: \"text\",\n number: \"integer\",\n boolean: \"boolean\",\n date: \"timestamptz\",\n json: \"jsonb\",\n};\n\n/** Map a plugin's high-level FieldType to core's ColumnType. */\nexport function fieldTypeToColumnType(type: FieldType): ColumnType {\n return FIELD_TYPE_TO_COLUMN_TYPE[type];\n}\n\n/** Convert a PluginFieldDefinition to a ColumnDefinition suitable for columnDdl(). */\nfunction pluginFieldToColumnDef(field: PluginFieldDefinition): ColumnDefinition {\n const colType = fieldTypeToColumnType(field.type);\n const def: ColumnDefinition = {\n type: colType,\n nullable: field.required !== true,\n description: \"\",\n };\n\n if (field.defaultValue !== undefined && field.defaultValue !== null && typeof field.defaultValue !== \"function\") {\n if (field.type === \"date\") {\n def.defaultExpression = \"now()\";\n } else if (field.type === \"number\") {\n def.defaultExpression = String(field.defaultValue);\n } else if (field.type === \"string\") {\n def.defaultExpression = `'${String(field.defaultValue)}'`;\n } else if (field.type === \"boolean\") {\n def.defaultExpression = field.defaultValue ? \"TRUE\" : \"FALSE\";\n }\n }\n\n // Date fields with a function defaultValue (e.g. () => new Date()) map to now()\n if (field.type === \"date\" && typeof field.defaultValue === \"function\") {\n def.defaultExpression = \"now()\";\n }\n\n return def;\n}\n\n/**\n * Generate CREATE TABLE + indexes for a single plugin-declared table.\n */\nexport function generatePluginTableSql(\n tableName: string,\n table: PluginTableDefinition,\n dialect: DatabaseDialect,\n): string {\n const quotedTable = escapeIdentifier(tableName, dialect);\n const columnLines: string[] = [];\n const uniqueConstraints: string[] = [];\n const indexStatements: string[] = [];\n\n for (const [fieldName, field] of Object.entries(table.fields)) {\n const colDef = pluginFieldToColumnDef(field);\n columnLines.push(` ${columnDdl(fieldName, colDef, dialect)}`);\n\n if (field.unique === true) {\n uniqueConstraints.push(\n ` UNIQUE (${escapeIdentifier(fieldName, dialect)})`,\n );\n }\n\n if (field.index === true) {\n const idxName = `${tableName}_${fieldName}_idx`;\n indexStatements.push(\n `CREATE INDEX IF NOT EXISTS ${escapeIdentifier(idxName, dialect)} ON ${quotedTable} (${escapeIdentifier(fieldName, dialect)});`,\n );\n }\n }\n\n const allLines = [...columnLines, ...uniqueConstraints];\n\n const parts: string[] = [\n `CREATE TABLE IF NOT EXISTS ${quotedTable} (`,\n allLines.join(\",\\n\"),\n \");\",\n ];\n\n for (const idx of indexStatements) {\n parts.push(idx);\n }\n\n return parts.join(\"\\n\");\n}\n\n/**\n * Generate ALTER TABLE ... ADD COLUMN statements for plugin schema extensions.\n */\nexport function generateExtensionSql(\n tableName: string,\n fields: Record<string, PluginFieldDefinition>,\n dialect: DatabaseDialect,\n): string {\n const quotedTable = escapeIdentifier(tableName, dialect);\n const statements: string[] = [];\n\n for (const [fieldName, field] of Object.entries(fields)) {\n const colDef = pluginFieldToColumnDef(field);\n const colSql = columnDdl(fieldName, colDef, dialect);\n\n if (dialect === \"postgres\") {\n statements.push(\n `ALTER TABLE ${quotedTable} ADD COLUMN IF NOT EXISTS ${colSql};`,\n );\n } else {\n // MySQL and SQLite don't support IF NOT EXISTS on ADD COLUMN\n statements.push(\n `ALTER TABLE ${quotedTable} ADD COLUMN ${colSql};`,\n );\n }\n }\n\n return statements.join(\"\\n\");\n}\n\n/**\n * Generate migration SQL for all plugin-declared tables and extensions.\n * Returns empty string if the merged schema has no tables or extensions.\n */\nexport function generatePluginMigrationSql(\n schema: MergedPluginSchema,\n dialect: DatabaseDialect,\n): string {\n const hasTables = Object.keys(schema.tables).length > 0;\n const hasExtensions = Object.keys(schema.extend).length > 0;\n\n if (!hasTables && !hasExtensions) {\n return \"\";\n }\n\n const parts: string[] = [\n \"\",\n \"-- Plugin tables\",\n \"-- Generated from better.config.ts plugins\",\n \"\",\n ];\n\n for (const [tableName, tableDef] of Object.entries(schema.tables)) {\n parts.push(generatePluginTableSql(tableName, tableDef, dialect));\n parts.push(\"\");\n }\n\n if (hasExtensions) {\n parts.push(\"-- Extensions to existing tables\");\n for (const [tableName, fields] of Object.entries(schema.extend)) {\n parts.push(generateExtensionSql(tableName, fields, dialect));\n }\n parts.push(\"\");\n }\n\n return parts.join(\"\\n\");\n}\n"],"mappings":";AAOA;AAAA,EACE;AAAA,OAGK;AAeA,IAAM,oBAGR;AAAA,EACH;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,cAAc,WAAW;AAAA,EACrC;AAAA,EACA,EAAE,MAAM,2BAA2B,SAAS,CAAC,UAAU,EAAE;AAAA,EACzD,EAAE,MAAM,4BAA4B,SAAS,CAAC,WAAW,EAAE;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,cAAc,WAAW;AAAA,EACrC;AAAA,EACA,EAAE,MAAM,4BAA4B,SAAS,CAAC,WAAW,EAAE;AAAA,EAC3D,EAAE,MAAM,4BAA4B,SAAS,CAAC,WAAW,EAAE;AAAA,EAC3D,EAAE,MAAM,+BAA+B,SAAS,CAAC,aAAa,IAAI,EAAE;AACtE;AAGA,SAAS,iBAAiB,MAAc,SAAkC;AACxE,MAAI,YAAY,SAAS;AACvB,WAAO,KAAK,KAAK,WAAW,KAAK,IAAI,CAAC;AAAA,EACxC;AACA,SAAO,IAAI,KAAK,WAAW,KAAK,IAAI,CAAC;AACvC;AAGA,SAAS,QAAQ,MAAkB,SAAkC;AACnE,QAAM,MAA2D;AAAA,IAC/D,MAAM,EAAE,UAAU,QAAQ,OAAO,YAAY,QAAQ,OAAO;AAAA,IAC5D,aAAa;AAAA,MACX,UAAU;AAAA,MACV,OAAO;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,IACA,MAAM,EAAE,UAAU,QAAQ,OAAO,QAAQ,QAAQ,OAAO;AAAA,IACxD,OAAO,EAAE,UAAU,SAAS,OAAO,QAAQ,QAAQ,OAAO;AAAA,IAC1D,SAAS,EAAE,UAAU,WAAW,OAAO,WAAW,QAAQ,UAAU;AAAA,IACpE,SAAS,EAAE,UAAU,WAAW,OAAO,UAAU,QAAQ,UAAU;AAAA,EACrE;AACA,SAAO,IAAI,IAAI,EAAE,OAAO;AAC1B;AAGA,SAAS,kBACP,YACA,SACoB;AACpB,QAAM,MAAmE;AAAA,IACvE,qBAAqB;AAAA,MACnB,UAAU;AAAA,MACV,OAAO;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,OAAO;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,UAAU;AAC5B,MAAI,UAAU,QAAW;AAEvB,QAAI,kBAAkB,KAAK,UAAU,KAAK,eAAe,UAAU,eAAe,SAAS;AACzF,aAAO;AAAA,IACT;AACA,YAAQ;AAAA,MACN,4DAA4D,UAAU,yCACtC,OAAO;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AACA,SAAO,MAAM,OAAO;AACtB;AAGA,SAAS,UACP,MACA,YACA,SACQ;AACR,QAAM,QAAkB,CAAC;AAEzB,QAAM,KAAK,iBAAiB,MAAM,OAAO,CAAC;AAC1C,QAAM,KAAK,QAAQ,WAAW,MAAM,OAAO,CAAC;AAE5C,MAAI,CAAC,WAAW,UAAU;AACxB,UAAM,KAAK,UAAU;AAAA,EACvB;AAEA,MAAI,WAAW,sBAAsB,QAAW;AAC9C,UAAM,OAAO,kBAAkB,WAAW,mBAAmB,OAAO;AACpE,QAAI,SAAS,QAAW;AACtB,YAAM,KAAK,WAAW,IAAI,EAAE;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,GAAG;AACvB;AAGA,SAAS,SACP,UACA,SACQ;AACR,QAAM,OAAO,SAAS,QAAQ,IAAI,CAAC,MAAM,iBAAiB,GAAG,OAAO,CAAC,EAAE,KAAK,IAAI;AAChF,QAAM,YAAY,iBAAiB,iBAAiB,WAAW,OAAO;AACtE,QAAM,YAAY,iBAAiB,SAAS,MAAM,OAAO;AAEzD,SAAO,8BAA8B,SAAS,OAAO,SAAS,KAAK,IAAI;AACzE;AAOO,SAAS,qBAAqB,SAAkC;AACrE,QAAM,EAAE,WAAW,QAAQ,IAAI;AAC/B,QAAM,cAAc,iBAAiB,WAAW,OAAO;AAGvD,QAAM,cAAwB,CAAC;AAC/B,MAAI;AAEJ,aAAW,CAAC,MAAM,UAAU,KAAK,OAAO,QAAQ,OAAO,GAAG;AACxD,gBAAY,KAAK,KAAK,UAAU,MAAM,YAAY,OAAO,CAAC,EAAE;AAC5D,QAAI,WAAW,eAAe,MAAM;AAClC,yBAAmB;AAAA,IACrB;AAAA,EACF;AAGA,MAAI,qBAAqB,QAAW;AAClC,gBAAY;AAAA,MACV,kBAAkB,iBAAiB,kBAAkB,OAAO,CAAC;AAAA,IAC/D;AAAA,EACF;AAEA,QAAM,QAAkB;AAAA,IACtB,oBAAoB,OAAO,kBAAkB,SAAS;AAAA,IACtD;AAAA,IACA;AAAA,IACA,8BAA8B,WAAW;AAAA,IACzC,YAAY,KAAK,KAAK;AAAA,IACtB;AAAA,IACA;AAAA,EACF;AAGA,aAAW,OAAO,mBAAmB;AACnC,UAAM,KAAK,SAAS,KAAK,OAAO,CAAC;AAAA,EACnC;AAEA,QAAM,KAAK,EAAE;AACb,SAAO,MAAM,KAAK,IAAI;AACxB;AAMA,IAAM,4BAA2D;AAAA,EAC/D,QAAQ;AAAA,EACR,QAAQ;AAAA,EACR,SAAS;AAAA,EACT,MAAM;AAAA,EACN,MAAM;AACR;AAGO,SAAS,sBAAsB,MAA6B;AACjE,SAAO,0BAA0B,IAAI;AACvC;AAGA,SAAS,uBAAuB,OAAgD;AAC9E,QAAM,UAAU,sBAAsB,MAAM,IAAI;AAChD,QAAM,MAAwB;AAAA,IAC5B,MAAM;AAAA,IACN,UAAU,MAAM,aAAa;AAAA,IAC7B,aAAa;AAAA,EACf;AAEA,MAAI,MAAM,iBAAiB,UAAa,MAAM,iBAAiB,QAAQ,OAAO,MAAM,iBAAiB,YAAY;AAC/G,QAAI,MAAM,SAAS,QAAQ;AACzB,UAAI,oBAAoB;AAAA,IAC1B,WAAW,MAAM,SAAS,UAAU;AAClC,UAAI,oBAAoB,OAAO,MAAM,YAAY;AAAA,IACnD,WAAW,MAAM,SAAS,UAAU;AAClC,UAAI,oBAAoB,IAAI,OAAO,MAAM,YAAY,CAAC;AAAA,IACxD,WAAW,MAAM,SAAS,WAAW;AACnC,UAAI,oBAAoB,MAAM,eAAe,SAAS;AAAA,IACxD;AAAA,EACF;AAGA,MAAI,MAAM,SAAS,UAAU,OAAO,MAAM,iBAAiB,YAAY;AACrE,QAAI,oBAAoB;AAAA,EAC1B;AAEA,SAAO;AACT;AAKO,SAAS,uBACd,WACA,OACA,SACQ;AACR,QAAM,cAAc,iBAAiB,WAAW,OAAO;AACvD,QAAM,cAAwB,CAAC;AAC/B,QAAM,oBAA8B,CAAC;AACrC,QAAM,kBAA4B,CAAC;AAEnC,aAAW,CAAC,WAAW,KAAK,KAAK,OAAO,QAAQ,MAAM,MAAM,GAAG;AAC7D,UAAM,SAAS,uBAAuB,KAAK;AAC3C,gBAAY,KAAK,KAAK,UAAU,WAAW,QAAQ,OAAO,CAAC,EAAE;AAE7D,QAAI,MAAM,WAAW,MAAM;AACzB,wBAAkB;AAAA,QAChB,aAAa,iBAAiB,WAAW,OAAO,CAAC;AAAA,MACnD;AAAA,IACF;AAEA,QAAI,MAAM,UAAU,MAAM;AACxB,YAAM,UAAU,GAAG,SAAS,IAAI,SAAS;AACzC,sBAAgB;AAAA,QACd,8BAA8B,iBAAiB,SAAS,OAAO,CAAC,OAAO,WAAW,KAAK,iBAAiB,WAAW,OAAO,CAAC;AAAA,MAC7H;AAAA,IACF;AAAA,EACF;AAEA,QAAM,WAAW,CAAC,GAAG,aAAa,GAAG,iBAAiB;AAEtD,QAAM,QAAkB;AAAA,IACtB,8BAA8B,WAAW;AAAA,IACzC,SAAS,KAAK,KAAK;AAAA,IACnB;AAAA,EACF;AAEA,aAAW,OAAO,iBAAiB;AACjC,UAAM,KAAK,GAAG;AAAA,EAChB;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAKO,SAAS,qBACd,WACA,QACA,SACQ;AACR,QAAM,cAAc,iBAAiB,WAAW,OAAO;AACvD,QAAM,aAAuB,CAAC;AAE9B,aAAW,CAAC,WAAW,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACvD,UAAM,SAAS,uBAAuB,KAAK;AAC3C,UAAM,SAAS,UAAU,WAAW,QAAQ,OAAO;AAEnD,QAAI,YAAY,YAAY;AAC1B,iBAAW;AAAA,QACT,eAAe,WAAW,6BAA6B,MAAM;AAAA,MAC/D;AAAA,IACF,OAAO;AAEL,iBAAW;AAAA,QACT,eAAe,WAAW,eAAe,MAAM;AAAA,MACjD;AAAA,IACF;AAAA,EACF;AAEA,SAAO,WAAW,KAAK,IAAI;AAC7B;AAMO,SAAS,2BACd,QACA,SACQ;AACR,QAAM,YAAY,OAAO,KAAK,OAAO,MAAM,EAAE,SAAS;AACtD,QAAM,gBAAgB,OAAO,KAAK,OAAO,MAAM,EAAE,SAAS;AAE1D,MAAI,CAAC,aAAa,CAAC,eAAe;AAChC,WAAO;AAAA,EACT;AAEA,QAAM,QAAkB;AAAA,IACtB;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EACF;AAEA,aAAW,CAAC,WAAW,QAAQ,KAAK,OAAO,QAAQ,OAAO,MAAM,GAAG;AACjE,UAAM,KAAK,uBAAuB,WAAW,UAAU,OAAO,CAAC;AAC/D,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,MAAI,eAAe;AACjB,UAAM,KAAK,kCAAkC;AAC7C,eAAW,CAAC,WAAW,MAAM,KAAK,OAAO,QAAQ,OAAO,MAAM,GAAG;AAC/D,YAAM,KAAK,qBAAqB,WAAW,QAAQ,OAAO,CAAC;AAAA,IAC7D;AACA,UAAM,KAAK,EAAE;AAAA,EACf;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;","names":[]}
|
|
@@ -1,9 +1,13 @@
|
|
|
1
|
+
import {
|
|
2
|
+
ISO_DATE_REGEX,
|
|
3
|
+
parseIsoDate
|
|
4
|
+
} from "./chunk-EEYS3G5Y.js";
|
|
1
5
|
import {
|
|
2
6
|
createKyselyInstance
|
|
3
7
|
} from "./chunk-7GSN73TA.js";
|
|
4
8
|
import {
|
|
5
9
|
detectDialect
|
|
6
|
-
} from "./chunk-
|
|
10
|
+
} from "./chunk-GHOKL227.js";
|
|
7
11
|
|
|
8
12
|
// src/purge.ts
|
|
9
13
|
import pc from "picocolors";
|
|
@@ -13,17 +17,12 @@ import {
|
|
|
13
17
|
ConfigValidationError,
|
|
14
18
|
requireAuditConfig
|
|
15
19
|
} from "@usebetterdev/plugin/config";
|
|
16
|
-
var ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}(T[\w:.+-]+)?$/;
|
|
17
20
|
var DURATION_REGEX = /^(\d+)(d|w|m|y)$/i;
|
|
18
21
|
var DEFAULT_BATCH_SIZE = 1e3;
|
|
19
22
|
var MAX_BATCHES = 1e5;
|
|
20
23
|
function parseSinceValue(value) {
|
|
21
24
|
if (ISO_DATE_REGEX.test(value)) {
|
|
22
|
-
|
|
23
|
-
if (Number.isNaN(date.getTime())) {
|
|
24
|
-
throw new Error(`Invalid date "${value}". Expected ISO-8601 format (e.g. "2025-01-01").`);
|
|
25
|
-
}
|
|
26
|
-
return date;
|
|
25
|
+
return parseIsoDate(value);
|
|
27
26
|
}
|
|
28
27
|
const match = DURATION_REGEX.exec(value);
|
|
29
28
|
if (match !== null) {
|
|
@@ -179,4 +178,4 @@ export {
|
|
|
179
178
|
formatDuration,
|
|
180
179
|
purge
|
|
181
180
|
};
|
|
182
|
-
//# sourceMappingURL=chunk-
|
|
181
|
+
//# sourceMappingURL=chunk-WYLZXWZ4.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/purge.ts"],"sourcesContent":["/**\n * `better-audit purge` — Delete audit logs older than the configured retention period.\n *\n * Connects to the database directly via Kysely (no ORM adapter needed).\n * Executes batched DELETEs to avoid holding long row-level locks on large tables.\n *\n * Always run with --dry-run first to preview what will be deleted.\n */\n\nimport pc from \"picocolors\";\nimport { createKyselyInstance } from \"./sql-executor.js\";\nimport { detectDialect } from \"./detect-adapter.js\";\nimport {\n loadBetterConfig,\n BetterConfigNotFoundError,\n ConfigValidationError,\n requireAuditConfig,\n} from \"@usebetterdev/plugin/config\";\nimport type { Database } from \"./sql-executor.js\";\nimport type { Kysely } from \"kysely\";\nimport { ISO_DATE_REGEX, parseIsoDate } from \"./parse-since.js\";\n\nconst DURATION_REGEX = /^(\\d+)(d|w|m|y)$/i;\nconst DEFAULT_BATCH_SIZE = 1000;\nconst MAX_BATCHES = 100_000;\n\nexport interface PurgeOptions {\n /** Preview rows to be deleted without deleting them. */\n dryRun?: boolean;\n /** ISO date string (e.g. \"2025-01-01\") or duration shorthand (e.g. \"90d\", \"1y\"). */\n since?: string;\n /** Rows per DELETE batch. Default: 1000. */\n batchSize?: number;\n /** Database URL (default: DATABASE_URL env). */\n databaseUrl?: string;\n /** Skip confirmation prompt (required for live deletion). */\n yes?: boolean;\n}\n\n/**\n * Parse a `--since` value to an absolute cutoff `Date`.\n *\n * Accepts:\n * - ISO-8601 date strings: \"2025-01-01\" or \"2025-01-01T00:00:00Z\"\n * - Duration shorthands: \"90d\", \"4w\", \"3m\", \"1y\"\n *\n * Exported for testing.\n */\nexport function parseSinceValue(value: string): Date {\n if (ISO_DATE_REGEX.test(value)) {\n return parseIsoDate(value);\n }\n\n const match = DURATION_REGEX.exec(value);\n if (match !== null) {\n const amount = parseInt(match[1]!, 10);\n if (amount <= 0) {\n throw new Error(\n `Invalid --since value \"${value}\". Duration amount must be greater than zero.`,\n );\n }\n const unit = match[2]!.toLowerCase();\n const now = new Date();\n if (unit === \"d\") {\n now.setDate(now.getDate() - amount);\n } else if (unit === \"w\") {\n now.setDate(now.getDate() - amount * 7);\n } else if (unit === \"m\") {\n now.setMonth(now.getMonth() - amount);\n } else {\n // y\n now.setFullYear(now.getFullYear() - amount);\n }\n return now;\n }\n\n throw new Error(\n `Invalid --since value \"${value}\". ` +\n `Expected an ISO date (e.g. \"2025-01-01\") or duration shorthand (e.g. \"90d\", \"4w\", \"3m\", \"1y\").`,\n );\n}\n\n/**\n * Resolve the cutoff date from options.\n *\n * Priority: `--since` flag > config `retention.days`.\n * Throws if neither is available.\n *\n * Exported for testing.\n */\nexport async function resolveCutoffDate(options: {\n since?: string;\n cwd?: string;\n}): Promise<Date> {\n if (options.since !== undefined) {\n return parseSinceValue(options.since);\n }\n\n try {\n const config = await loadBetterConfig(\n options.cwd !== undefined ? { cwd: options.cwd } : {},\n );\n const auditConfig = requireAuditConfig(config);\n if (auditConfig.retention !== undefined) {\n const cutoff = new Date();\n cutoff.setDate(cutoff.getDate() - auditConfig.retention.days);\n return cutoff;\n }\n } catch (err) {\n if (\n err instanceof BetterConfigNotFoundError ||\n err instanceof ConfigValidationError\n ) {\n // Config file not found or audit section missing/invalid — fall through\n } else {\n throw err;\n }\n }\n\n throw new Error(\n \"No retention policy configured. \" +\n \"Pass --since <date|duration> or set audit.retention.days in your better.config file.\",\n );\n}\n\n/**\n * Count rows eligible for deletion (for --dry-run).\n */\nasync function countEligibleRows(\n db: Kysely<Database>,\n before: Date,\n): Promise<number> {\n const result = await db\n .selectFrom(\"audit_logs\")\n .select((eb) => eb.fn.countAll<string>().as(\"count\"))\n .where(\"timestamp\", \"<\", before)\n .executeTakeFirst();\n return result !== undefined ? Number(result.count) : 0;\n}\n\n/**\n * Delete one batch of rows older than `before`.\n *\n * Uses `DELETE … WHERE id IN (SELECT id … LIMIT n)` on all dialects.\n * This avoids the lack of LIMIT support on DELETE in Postgres and SQLite.\n *\n * Returns the number of rows deleted in this batch.\n */\nasync function deleteBatch(\n db: Kysely<Database>,\n before: Date,\n batchSize: number,\n): Promise<number> {\n const subquery = db\n .selectFrom(\"audit_logs\")\n .select(\"id\")\n .where(\"timestamp\", \"<\", before)\n .limit(batchSize);\n\n const result = await db\n .deleteFrom(\"audit_logs\")\n .where(\"id\", \"in\", subquery)\n .executeTakeFirst();\n\n return Number(result.numDeletedRows);\n}\n\n/**\n * Format a number with locale-aware thousands separators.\n */\nfunction formatCount(n: number): string {\n return n.toLocaleString(\"en-US\");\n}\n\n/**\n * Format elapsed milliseconds to a human-readable string.\n *\n * Exported for testing.\n */\nexport function formatDuration(ms: number): string {\n if (ms < 1000) {\n return `${ms}ms`;\n }\n return `${(ms / 1000).toFixed(1)}s`;\n}\n\nexport async function purge(options: PurgeOptions = {}): Promise<void> {\n // 1. Resolve database URL\n const databaseUrl = options.databaseUrl ?? process.env[\"DATABASE_URL\"];\n if (databaseUrl === undefined || databaseUrl === \"\") {\n throw new Error(\n \"DATABASE_URL is required. Set the DATABASE_URL environment variable or pass --database-url.\",\n );\n }\n\n // 2. Resolve the cutoff date\n // Only consult config when --since is absent (short-circuit when --since is set)\n const before = await resolveCutoffDate({\n ...(options.since !== undefined ? { since: options.since } : {}),\n cwd: process.cwd(),\n });\n\n // 3. Validate cutoff is in the past\n if (before > new Date()) {\n throw new Error(\n `Cutoff date ${before.toISOString().split(\"T\")[0]!} is in the future. ` +\n `Purge only accepts past dates.`,\n );\n }\n\n const batchSize = options.batchSize ?? DEFAULT_BATCH_SIZE;\n if (!Number.isInteger(batchSize) || batchSize <= 0) {\n throw new Error(`--batch-size must be a positive integer, got ${String(batchSize)}`);\n }\n\n // 4. Connect\n const dialect = detectDialect(databaseUrl);\n const db = await createKyselyInstance(databaseUrl, dialect);\n\n const startTime = Date.now();\n\n try {\n // 4. Dry run — count only, no confirmation needed\n if (options.dryRun) {\n const count = await countEligibleRows(db, before);\n process.stderr.write(\n `${pc.cyan(\"→\")} ${pc.bold(formatCount(count))} rows would be deleted` +\n ` (cutoff: ${pc.dim(before.toISOString().split(\"T\")[0]!)})\\n`,\n );\n process.stderr.write(`${pc.dim(\" Dry run — no changes made.\")}\\n`);\n return;\n }\n\n // 5. Confirmation guard — require --yes for live deletion\n if (!options.yes) {\n const count = await countEligibleRows(db, before);\n process.stderr.write(\n `${pc.yellow(\"!\")} ${pc.bold(formatCount(count))} rows will be deleted` +\n ` (cutoff: ${pc.dim(before.toISOString().split(\"T\")[0]!)})\\n` +\n ` Run with ${pc.bold(\"--yes\")} to confirm deletion, or ${pc.bold(\"--dry-run\")} to preview.\\n`,\n );\n return;\n }\n\n // 6. Batched delete loop\n let totalDeleted = 0;\n let batchNumber = 0;\n\n while (batchNumber < MAX_BATCHES) {\n const deleted = await deleteBatch(db, before, batchSize);\n totalDeleted += deleted;\n batchNumber++;\n\n if (batchNumber % 10 === 0) {\n process.stderr.write(\n ` ${pc.dim(`batch ${batchNumber}: ${formatCount(totalDeleted)} rows deleted so far...`)}\\n`,\n );\n }\n\n if (deleted < batchSize) {\n break;\n }\n }\n\n if (batchNumber >= MAX_BATCHES) {\n process.stderr.write(\n pc.yellow(\n ` Warning: reached max batch limit (${MAX_BATCHES.toLocaleString(\"en-US\")}). Some rows may remain.\\n`,\n ),\n );\n }\n\n const elapsed = Date.now() - startTime;\n\n // 7. Summary\n process.stderr.write(`${pc.green(\"✓\")} Purge complete\\n`);\n process.stderr.write(` Rows deleted: ${pc.bold(formatCount(totalDeleted))}\\n`);\n process.stderr.write(` Cutoff date: ${pc.dim(before.toISOString().split(\"T\")[0]!)}\\n`);\n process.stderr.write(` Time taken: ${pc.dim(formatDuration(elapsed))}\\n`);\n } finally {\n await db.destroy();\n }\n}\n"],"mappings":";;;;;;;;;;;;AASA,OAAO,QAAQ;AAGf;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AAKP,IAAM,iBAAiB;AACvB,IAAM,qBAAqB;AAC3B,IAAM,cAAc;AAwBb,SAAS,gBAAgB,OAAqB;AACnD,MAAI,eAAe,KAAK,KAAK,GAAG;AAC9B,WAAO,aAAa,KAAK;AAAA,EAC3B;AAEA,QAAM,QAAQ,eAAe,KAAK,KAAK;AACvC,MAAI,UAAU,MAAM;AAClB,UAAM,SAAS,SAAS,MAAM,CAAC,GAAI,EAAE;AACrC,QAAI,UAAU,GAAG;AACf,YAAM,IAAI;AAAA,QACR,0BAA0B,KAAK;AAAA,MACjC;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC,EAAG,YAAY;AACnC,UAAM,MAAM,oBAAI,KAAK;AACrB,QAAI,SAAS,KAAK;AAChB,UAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM;AAAA,IACpC,WAAW,SAAS,KAAK;AACvB,UAAI,QAAQ,IAAI,QAAQ,IAAI,SAAS,CAAC;AAAA,IACxC,WAAW,SAAS,KAAK;AACvB,UAAI,SAAS,IAAI,SAAS,IAAI,MAAM;AAAA,IACtC,OAAO;AAEL,UAAI,YAAY,IAAI,YAAY,IAAI,MAAM;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAEA,QAAM,IAAI;AAAA,IACR,0BAA0B,KAAK;AAAA,EAEjC;AACF;AAUA,eAAsB,kBAAkB,SAGtB;AAChB,MAAI,QAAQ,UAAU,QAAW;AAC/B,WAAO,gBAAgB,QAAQ,KAAK;AAAA,EACtC;AAEA,MAAI;AACF,UAAM,SAAS,MAAM;AAAA,MACnB,QAAQ,QAAQ,SAAY,EAAE,KAAK,QAAQ,IAAI,IAAI,CAAC;AAAA,IACtD;AACA,UAAM,cAAc,mBAAmB,MAAM;AAC7C,QAAI,YAAY,cAAc,QAAW;AACvC,YAAM,SAAS,oBAAI,KAAK;AACxB,aAAO,QAAQ,OAAO,QAAQ,IAAI,YAAY,UAAU,IAAI;AAC5D,aAAO;AAAA,IACT;AAAA,EACF,SAAS,KAAK;AACZ,QACE,eAAe,6BACf,eAAe,uBACf;AAAA,IAEF,OAAO;AACL,YAAM;AAAA,IACR;AAAA,EACF;AAEA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AAKA,eAAe,kBACb,IACA,QACiB;AACjB,QAAM,SAAS,MAAM,GAClB,WAAW,YAAY,EACvB,OAAO,CAAC,OAAO,GAAG,GAAG,SAAiB,EAAE,GAAG,OAAO,CAAC,EACnD,MAAM,aAAa,KAAK,MAAM,EAC9B,iBAAiB;AACpB,SAAO,WAAW,SAAY,OAAO,OAAO,KAAK,IAAI;AACvD;AAUA,eAAe,YACb,IACA,QACA,WACiB;AACjB,QAAM,WAAW,GACd,WAAW,YAAY,EACvB,OAAO,IAAI,EACX,MAAM,aAAa,KAAK,MAAM,EAC9B,MAAM,SAAS;AAElB,QAAM,SAAS,MAAM,GAClB,WAAW,YAAY,EACvB,MAAM,MAAM,MAAM,QAAQ,EAC1B,iBAAiB;AAEpB,SAAO,OAAO,OAAO,cAAc;AACrC;AAKA,SAAS,YAAY,GAAmB;AACtC,SAAO,EAAE,eAAe,OAAO;AACjC;AAOO,SAAS,eAAe,IAAoB;AACjD,MAAI,KAAK,KAAM;AACb,WAAO,GAAG,EAAE;AAAA,EACd;AACA,SAAO,IAAI,KAAK,KAAM,QAAQ,CAAC,CAAC;AAClC;AAEA,eAAsB,MAAM,UAAwB,CAAC,GAAkB;AAErE,QAAM,cAAc,QAAQ,eAAe,QAAQ,IAAI,cAAc;AACrE,MAAI,gBAAgB,UAAa,gBAAgB,IAAI;AACnD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAIA,QAAM,SAAS,MAAM,kBAAkB;AAAA,IACrC,GAAI,QAAQ,UAAU,SAAY,EAAE,OAAO,QAAQ,MAAM,IAAI,CAAC;AAAA,IAC9D,KAAK,QAAQ,IAAI;AAAA,EACnB,CAAC;AAGD,MAAI,SAAS,oBAAI,KAAK,GAAG;AACvB,UAAM,IAAI;AAAA,MACR,eAAe,OAAO,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAE;AAAA,IAEpD;AAAA,EACF;AAEA,QAAM,YAAY,QAAQ,aAAa;AACvC,MAAI,CAAC,OAAO,UAAU,SAAS,KAAK,aAAa,GAAG;AAClD,UAAM,IAAI,MAAM,gDAAgD,OAAO,SAAS,CAAC,EAAE;AAAA,EACrF;AAGA,QAAM,UAAU,cAAc,WAAW;AACzC,QAAM,KAAK,MAAM,qBAAqB,aAAa,OAAO;AAE1D,QAAM,YAAY,KAAK,IAAI;AAE3B,MAAI;AAEF,QAAI,QAAQ,QAAQ;AAClB,YAAM,QAAQ,MAAM,kBAAkB,IAAI,MAAM;AAChD,cAAQ,OAAO;AAAA,QACb,GAAG,GAAG,KAAK,QAAG,CAAC,IAAI,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,mCACjC,GAAG,IAAI,OAAO,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAE,CAAC;AAAA;AAAA,MAC1D;AACA,cAAQ,OAAO,MAAM,GAAG,GAAG,IAAI,mCAA8B,CAAC;AAAA,CAAI;AAClE;AAAA,IACF;AAGA,QAAI,CAAC,QAAQ,KAAK;AAChB,YAAM,QAAQ,MAAM,kBAAkB,IAAI,MAAM;AAChD,cAAQ,OAAO;AAAA,QACb,GAAG,GAAG,OAAO,GAAG,CAAC,IAAI,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,kCACnC,GAAG,IAAI,OAAO,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAE,CAAC;AAAA,aAC1C,GAAG,KAAK,OAAO,CAAC,4BAA4B,GAAG,KAAK,WAAW,CAAC;AAAA;AAAA,MAChF;AACA;AAAA,IACF;AAGA,QAAI,eAAe;AACnB,QAAI,cAAc;AAElB,WAAO,cAAc,aAAa;AAChC,YAAM,UAAU,MAAM,YAAY,IAAI,QAAQ,SAAS;AACvD,sBAAgB;AAChB;AAEA,UAAI,cAAc,OAAO,GAAG;AAC1B,gBAAQ,OAAO;AAAA,UACb,KAAK,GAAG,IAAI,SAAS,WAAW,KAAK,YAAY,YAAY,CAAC,yBAAyB,CAAC;AAAA;AAAA,QAC1F;AAAA,MACF;AAEA,UAAI,UAAU,WAAW;AACvB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,eAAe,aAAa;AAC9B,cAAQ,OAAO;AAAA,QACb,GAAG;AAAA,UACD,uCAAuC,YAAY,eAAe,OAAO,CAAC;AAAA;AAAA,QAC5E;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,IAAI,IAAI;AAG7B,YAAQ,OAAO,MAAM,GAAG,GAAG,MAAM,QAAG,CAAC;AAAA,CAAmB;AACxD,YAAQ,OAAO,MAAM,oBAAoB,GAAG,KAAK,YAAY,YAAY,CAAC,CAAC;AAAA,CAAI;AAC/E,YAAQ,OAAO,MAAM,oBAAoB,GAAG,IAAI,OAAO,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAE,CAAC;AAAA,CAAI;AACxF,YAAQ,OAAO,MAAM,oBAAoB,GAAG,IAAI,eAAe,OAAO,CAAC,CAAC;AAAA,CAAI;AAAA,EAC9E,UAAE;AACA,UAAM,GAAG,QAAQ;AAAA,EACnB;AACF;","names":[]}
|
package/dist/cli.js
CHANGED
|
@@ -1,29 +1,29 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
2
|
import {
|
|
3
|
-
CheckFailedError,
|
|
4
3
|
check
|
|
5
|
-
} from "./chunk-
|
|
4
|
+
} from "./chunk-PGOZIYS5.js";
|
|
6
5
|
import {
|
|
7
6
|
exportLogs
|
|
8
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-O4577NVP.js";
|
|
9
8
|
import {
|
|
10
9
|
migrate
|
|
11
|
-
} from "./chunk-
|
|
12
|
-
import "./chunk-
|
|
10
|
+
} from "./chunk-AWFOUH4H.js";
|
|
11
|
+
import "./chunk-Q23AEDWF.js";
|
|
13
12
|
import {
|
|
14
13
|
purge
|
|
15
|
-
} from "./chunk-
|
|
14
|
+
} from "./chunk-WYLZXWZ4.js";
|
|
15
|
+
import "./chunk-EEYS3G5Y.js";
|
|
16
16
|
import "./chunk-7GSN73TA.js";
|
|
17
17
|
import {
|
|
18
|
-
readCliVersion
|
|
19
|
-
|
|
18
|
+
readCliVersion,
|
|
19
|
+
runAction
|
|
20
|
+
} from "./chunk-GHOKL227.js";
|
|
20
21
|
import {
|
|
21
22
|
stats
|
|
22
23
|
} from "./chunk-UASMKVFP.js";
|
|
23
24
|
|
|
24
25
|
// src/cli.ts
|
|
25
26
|
import { Command } from "commander";
|
|
26
|
-
import pc from "picocolors";
|
|
27
27
|
var cliVersion = readCliVersion(import.meta.url);
|
|
28
28
|
function parseAdapter(value) {
|
|
29
29
|
if (value === "drizzle" || value === "prisma" || value === void 0) {
|
|
@@ -37,28 +37,6 @@ function parseDialect(value) {
|
|
|
37
37
|
}
|
|
38
38
|
throw new Error("--dialect must be 'postgres', 'mysql', or 'sqlite'");
|
|
39
39
|
}
|
|
40
|
-
function formatErrorMessage(err) {
|
|
41
|
-
if (err instanceof AggregateError && err.errors.length > 0) {
|
|
42
|
-
const nested = err.errors.map((e) => e instanceof Error ? e.message : String(e)).join("; ");
|
|
43
|
-
return err.message !== "" ? `${err.message}: ${nested}` : nested;
|
|
44
|
-
}
|
|
45
|
-
if (err instanceof Error) {
|
|
46
|
-
return err.message;
|
|
47
|
-
}
|
|
48
|
-
return String(err);
|
|
49
|
-
}
|
|
50
|
-
async function runAction(fn) {
|
|
51
|
-
try {
|
|
52
|
-
await fn();
|
|
53
|
-
} catch (err) {
|
|
54
|
-
if (err instanceof CheckFailedError) {
|
|
55
|
-
process.exit(1);
|
|
56
|
-
}
|
|
57
|
-
const message = formatErrorMessage(err);
|
|
58
|
-
console.error(pc.red(message));
|
|
59
|
-
process.exit(1);
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
40
|
var program = new Command().name("@usebetterdev/audit-cli").description("CLI for @usebetterdev/audit \u2014 compliance-ready audit logging").version(cliVersion);
|
|
63
41
|
program.command("migrate").description("Generate the audit_logs table migration").option(
|
|
64
42
|
"--dry-run",
|
|
@@ -69,14 +47,15 @@ program.command("migrate").description("Generate the audit_logs table migration"
|
|
|
69
47
|
).option(
|
|
70
48
|
"--dialect <dialect>",
|
|
71
49
|
"Database dialect: postgres, mysql, or sqlite (auto-detected from DATABASE_URL)"
|
|
72
|
-
).option("-o, --output <path>", "Output directory or .sql file path").action(
|
|
50
|
+
).option("-o, --output <path>", "Output directory or .sql file path").option("--no-plugins", "Skip plugin table generation").action(
|
|
73
51
|
async (opts) => {
|
|
74
52
|
await runAction(
|
|
75
53
|
() => migrate({
|
|
76
54
|
dryRun: opts.dryRun,
|
|
77
55
|
adapter: parseAdapter(opts.adapter),
|
|
78
56
|
dialect: parseDialect(opts.dialect),
|
|
79
|
-
output: opts.output
|
|
57
|
+
output: opts.output,
|
|
58
|
+
noPlugins: opts.plugins === false
|
|
80
59
|
})
|
|
81
60
|
);
|
|
82
61
|
}
|
package/dist/cli.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/cli.ts"],"sourcesContent":["#!/usr/bin/env node\nimport { Command } from \"commander\";\nimport
|
|
1
|
+
{"version":3,"sources":["../src/cli.ts"],"sourcesContent":["#!/usr/bin/env node\nimport { Command } from \"commander\";\nimport { readCliVersion, runAction } from \"@usebetterdev/cli-utils\";\nimport { migrate } from \"./migrate.js\";\nimport { check } from \"./check.js\";\nimport { stats } from \"./stats.js\";\nimport { purge } from \"./purge.js\";\nimport { exportLogs } from \"./export.js\";\n\nconst cliVersion = readCliVersion(import.meta.url);\n\n/** Validate and narrow the --adapter flag value. */\nfunction parseAdapter(\n value: string | undefined,\n): \"drizzle\" | \"prisma\" | undefined {\n if (value === \"drizzle\" || value === \"prisma\" || value === undefined) {\n return value;\n }\n throw new Error(\"--adapter must be 'drizzle' or 'prisma'\");\n}\n\n/** Validate and narrow the --dialect flag value. */\nfunction parseDialect(\n value: string | undefined,\n): \"postgres\" | \"mysql\" | \"sqlite\" | undefined {\n if (\n value === \"postgres\" ||\n value === \"mysql\" ||\n value === \"sqlite\" ||\n value === undefined\n ) {\n return value;\n }\n throw new Error(\"--dialect must be 'postgres', 'mysql', or 'sqlite'\");\n}\n\nconst program = new Command()\n .name(\"@usebetterdev/audit-cli\")\n .description(\"CLI for @usebetterdev/audit — compliance-ready audit logging\")\n .version(cliVersion);\n\nprogram\n .command(\"migrate\")\n .description(\"Generate the audit_logs table migration\")\n .option(\n \"--dry-run\",\n \"Print SQL to stdout without writing a file\",\n )\n .option(\n \"--adapter <adapter>\",\n \"ORM adapter: drizzle or prisma (auto-detected from package.json)\",\n )\n .option(\n \"--dialect <dialect>\",\n \"Database dialect: postgres, mysql, or sqlite (auto-detected from DATABASE_URL)\",\n )\n .option(\"-o, --output <path>\", \"Output directory or .sql file path\")\n .option(\"--no-plugins\", \"Skip plugin table generation\")\n .action(\n async (opts: {\n dryRun?: boolean;\n adapter?: string;\n dialect?: string;\n output?: string;\n plugins?: boolean;\n }) => {\n await runAction(() =>\n migrate({\n dryRun: opts.dryRun,\n adapter: parseAdapter(opts.adapter),\n dialect: parseDialect(opts.dialect),\n output: opts.output,\n noPlugins: opts.plugins === false,\n }),\n );\n },\n );\n\nprogram\n .command(\"check\")\n .description(\"Verify the audit_logs table and ORM adapter are working\")\n .option(\"--verbose\", \"Show detailed results for each check\")\n .option(\n \"--database-url <url>\",\n \"Database URL (default: DATABASE_URL env)\",\n )\n .action(async (opts: { verbose?: boolean; databaseUrl?: string }) => {\n await runAction(() =>\n check({ verbose: opts.verbose, databaseUrl: opts.databaseUrl }),\n );\n });\n\nprogram\n .command(\"stats\")\n .description(\"Show audit log statistics\")\n .option(\"--since <period>\", \"Time window (e.g. 30d)\", \"30d\")\n .action(async (opts: { since: string }) => {\n await runAction(() => stats({ since: opts.since }));\n });\n\nprogram\n .command(\"purge\")\n .description(\"Delete audit logs older than the retention period\")\n .option(\"--dry-run\", \"Preview rows to be deleted without deleting them\")\n .option(\n \"--since <value>\",\n 'ISO date (e.g. \"2025-01-01\") or duration shorthand (e.g. \"90d\") — overrides config',\n )\n .option(\"--batch-size <n>\", \"Rows per DELETE batch (default: 1000)\")\n .option(\n \"--database-url <url>\",\n \"Database URL (default: DATABASE_URL env)\",\n )\n .option(\"-y, --yes\", \"Skip confirmation prompt (required for live deletion)\")\n .action(\n async (opts: {\n dryRun?: boolean;\n since?: string;\n batchSize?: string;\n databaseUrl?: string;\n yes?: boolean;\n }) => {\n await runAction(async () => {\n const purgeOpts: Parameters<typeof purge>[0] = {};\n if (opts.dryRun !== undefined) {\n purgeOpts.dryRun = opts.dryRun;\n }\n if (opts.since !== undefined) {\n purgeOpts.since = opts.since;\n }\n if (opts.batchSize !== undefined) {\n const n = Number(opts.batchSize);\n if (!Number.isInteger(n) || n <= 0) {\n throw new Error(\n `Invalid --batch-size \"${opts.batchSize}\". Expected a positive integer.`,\n );\n }\n purgeOpts.batchSize = n;\n }\n if (opts.databaseUrl !== undefined) {\n purgeOpts.databaseUrl = opts.databaseUrl;\n }\n if (opts.yes !== undefined) {\n purgeOpts.yes = opts.yes;\n }\n await purge(purgeOpts);\n });\n },\n );\n\nprogram\n .command(\"export\")\n .description(\"Export audit log entries as CSV or JSON\")\n .option(\"--format <format>\", \"Output format: csv or json (default: csv)\")\n .option(\"-o, --output <path>\", \"Output file path (default: stdout)\")\n .option(\n \"--since <value>\",\n 'Duration (e.g. \"90d\") or ISO date (e.g. \"2025-01-01\")',\n )\n .option(\n \"--severity <level>\",\n \"Filter by severity: low, medium, high, or critical\",\n )\n .option(\n \"--compliance <tags>\",\n 'Comma-separated compliance tags (e.g. \"soc2:access-control,gdpr\")',\n )\n .option(\"--actor <id>\", \"Filter by actor ID\")\n .option(\"--limit <n>\", \"Maximum number of rows to export\")\n .option(\n \"--database-url <url>\",\n \"Database URL (default: DATABASE_URL env)\",\n )\n .action(\n async (opts: {\n format?: string;\n output?: string;\n since?: string;\n severity?: string;\n compliance?: string;\n actor?: string;\n limit?: string;\n databaseUrl?: string;\n }) => {\n await runAction(() => exportLogs(opts));\n },\n );\n\nprogram.parseAsync(process.argv).catch((err: unknown) => {\n console.error(err);\n process.exit(1);\n});\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AACA,SAAS,eAAe;AAQxB,IAAM,aAAa,eAAe,YAAY,GAAG;AAGjD,SAAS,aACP,OACkC;AAClC,MAAI,UAAU,aAAa,UAAU,YAAY,UAAU,QAAW;AACpE,WAAO;AAAA,EACT;AACA,QAAM,IAAI,MAAM,yCAAyC;AAC3D;AAGA,SAAS,aACP,OAC6C;AAC7C,MACE,UAAU,cACV,UAAU,WACV,UAAU,YACV,UAAU,QACV;AACA,WAAO;AAAA,EACT;AACA,QAAM,IAAI,MAAM,oDAAoD;AACtE;AAEA,IAAM,UAAU,IAAI,QAAQ,EACzB,KAAK,yBAAyB,EAC9B,YAAY,mEAA8D,EAC1E,QAAQ,UAAU;AAErB,QACG,QAAQ,SAAS,EACjB,YAAY,yCAAyC,EACrD;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,uBAAuB,oCAAoC,EAClE,OAAO,gBAAgB,8BAA8B,EACrD;AAAA,EACC,OAAO,SAMD;AACJ,UAAM;AAAA,MAAU,MACd,QAAQ;AAAA,QACN,QAAQ,KAAK;AAAA,QACb,SAAS,aAAa,KAAK,OAAO;AAAA,QAClC,SAAS,aAAa,KAAK,OAAO;AAAA,QAClC,QAAQ,KAAK;AAAA,QACb,WAAW,KAAK,YAAY;AAAA,MAC9B,CAAC;AAAA,IACH;AAAA,EACF;AACF;AAEF,QACG,QAAQ,OAAO,EACf,YAAY,yDAAyD,EACrE,OAAO,aAAa,sCAAsC,EAC1D;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,OAAO,SAAsD;AACnE,QAAM;AAAA,IAAU,MACd,MAAM,EAAE,SAAS,KAAK,SAAS,aAAa,KAAK,YAAY,CAAC;AAAA,EAChE;AACF,CAAC;AAEH,QACG,QAAQ,OAAO,EACf,YAAY,2BAA2B,EACvC,OAAO,oBAAoB,0BAA0B,KAAK,EAC1D,OAAO,OAAO,SAA4B;AACzC,QAAM,UAAU,MAAM,MAAM,EAAE,OAAO,KAAK,MAAM,CAAC,CAAC;AACpD,CAAC;AAEH,QACG,QAAQ,OAAO,EACf,YAAY,mDAAmD,EAC/D,OAAO,aAAa,kDAAkD,EACtE;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,oBAAoB,uCAAuC,EAClE;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,aAAa,uDAAuD,EAC3E;AAAA,EACC,OAAO,SAMD;AACJ,UAAM,UAAU,YAAY;AAC1B,YAAM,YAAyC,CAAC;AAChD,UAAI,KAAK,WAAW,QAAW;AAC7B,kBAAU,SAAS,KAAK;AAAA,MAC1B;AACA,UAAI,KAAK,UAAU,QAAW;AAC5B,kBAAU,QAAQ,KAAK;AAAA,MACzB;AACA,UAAI,KAAK,cAAc,QAAW;AAChC,cAAM,IAAI,OAAO,KAAK,SAAS;AAC/B,YAAI,CAAC,OAAO,UAAU,CAAC,KAAK,KAAK,GAAG;AAClC,gBAAM,IAAI;AAAA,YACR,yBAAyB,KAAK,SAAS;AAAA,UACzC;AAAA,QACF;AACA,kBAAU,YAAY;AAAA,MACxB;AACA,UAAI,KAAK,gBAAgB,QAAW;AAClC,kBAAU,cAAc,KAAK;AAAA,MAC/B;AACA,UAAI,KAAK,QAAQ,QAAW;AAC1B,kBAAU,MAAM,KAAK;AAAA,MACvB;AACA,YAAM,MAAM,SAAS;AAAA,IACvB,CAAC;AAAA,EACH;AACF;AAEF,QACG,QAAQ,QAAQ,EAChB,YAAY,yCAAyC,EACrD,OAAO,qBAAqB,2CAA2C,EACvE,OAAO,uBAAuB,oCAAoC,EAClE;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC;AAAA,EACA;AACF,EACC,OAAO,gBAAgB,oBAAoB,EAC3C,OAAO,eAAe,kCAAkC,EACxD;AAAA,EACC;AAAA,EACA;AACF,EACC;AAAA,EACC,OAAO,SASD;AACJ,UAAM,UAAU,MAAM,WAAW,IAAI,CAAC;AAAA,EACxC;AACF;AAEF,QAAQ,WAAW,QAAQ,IAAI,EAAE,MAAM,CAAC,QAAiB;AACvD,UAAQ,MAAM,GAAG;AACjB,UAAQ,KAAK,CAAC;AAChB,CAAC;","names":[]}
|
package/dist/export.d.ts
CHANGED
|
@@ -19,6 +19,8 @@ interface ExportCommandOptions {
|
|
|
19
19
|
}
|
|
20
20
|
/** Parse `--format` flag, validate and default to "csv". */
|
|
21
21
|
declare function parseFormat(value: string | undefined): "csv" | "json";
|
|
22
|
+
/** Parse `--actor` flag, split comma-separated values and deduplicate. */
|
|
23
|
+
declare function parseActors(value: string): string[];
|
|
22
24
|
/** Parse `--severity` flag, validate against known values. */
|
|
23
25
|
declare function parseSeverity(value: string): AuditSeverity;
|
|
24
26
|
/**
|
|
@@ -35,4 +37,4 @@ declare function createFileWritableStream(path: string): WritableStream<string>;
|
|
|
35
37
|
declare function createStdoutWritableStream(): WritableStream<string>;
|
|
36
38
|
declare function exportLogs(options: ExportCommandOptions): Promise<void>;
|
|
37
39
|
|
|
38
|
-
export { type ExportCommandOptions, createFileWritableStream, createStdoutWritableStream, exportLogs, parseFormat, parseSeverity, parseSinceValue };
|
|
40
|
+
export { type ExportCommandOptions, createFileWritableStream, createStdoutWritableStream, exportLogs, parseActors, parseFormat, parseSeverity, parseSinceValue };
|
package/dist/export.js
CHANGED
|
@@ -2,16 +2,19 @@ import {
|
|
|
2
2
|
createFileWritableStream,
|
|
3
3
|
createStdoutWritableStream,
|
|
4
4
|
exportLogs,
|
|
5
|
+
parseActors,
|
|
5
6
|
parseFormat,
|
|
6
7
|
parseSeverity,
|
|
7
8
|
parseSinceValue
|
|
8
|
-
} from "./chunk-
|
|
9
|
+
} from "./chunk-O4577NVP.js";
|
|
10
|
+
import "./chunk-EEYS3G5Y.js";
|
|
9
11
|
import "./chunk-7GSN73TA.js";
|
|
10
|
-
import "./chunk-
|
|
12
|
+
import "./chunk-GHOKL227.js";
|
|
11
13
|
export {
|
|
12
14
|
createFileWritableStream,
|
|
13
15
|
createStdoutWritableStream,
|
|
14
16
|
exportLogs,
|
|
17
|
+
parseActors,
|
|
15
18
|
parseFormat,
|
|
16
19
|
parseSeverity,
|
|
17
20
|
parseSinceValue
|
package/dist/migrate.d.ts
CHANGED
|
@@ -26,6 +26,8 @@ interface MigrateOptions {
|
|
|
26
26
|
cwd?: string | undefined;
|
|
27
27
|
/** Database connection URL for dialect detection (defaults to DATABASE_URL env) */
|
|
28
28
|
databaseUrl?: string | undefined;
|
|
29
|
+
/** Skip plugin table generation even if plugins are configured */
|
|
30
|
+
noPlugins?: boolean | undefined;
|
|
29
31
|
}
|
|
30
32
|
declare function migrate(options?: MigrateOptions): Promise<void>;
|
|
31
33
|
|
package/dist/migrate.js
CHANGED
package/dist/purge.js
CHANGED
|
@@ -3,9 +3,10 @@ import {
|
|
|
3
3
|
parseSinceValue,
|
|
4
4
|
purge,
|
|
5
5
|
resolveCutoffDate
|
|
6
|
-
} from "./chunk-
|
|
6
|
+
} from "./chunk-WYLZXWZ4.js";
|
|
7
|
+
import "./chunk-EEYS3G5Y.js";
|
|
7
8
|
import "./chunk-7GSN73TA.js";
|
|
8
|
-
import "./chunk-
|
|
9
|
+
import "./chunk-GHOKL227.js";
|
|
9
10
|
export {
|
|
10
11
|
formatDuration,
|
|
11
12
|
parseSinceValue,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@usebetterdev/audit-cli",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.8.1",
|
|
4
4
|
"repository": "github:usebetter-dev/usebetter",
|
|
5
5
|
"bugs": "https://github.com/usebetter-dev/usebetter/issues",
|
|
6
6
|
"homepage": "https://github.com/usebetter-dev/usebetter#readme",
|
|
@@ -22,8 +22,8 @@
|
|
|
22
22
|
"commander": "^12.1.0",
|
|
23
23
|
"kysely": "^0.28.11",
|
|
24
24
|
"picocolors": "^1.1.0",
|
|
25
|
-
"@usebetterdev/audit-core": "0.
|
|
26
|
-
"@usebetterdev/plugin": "0.
|
|
25
|
+
"@usebetterdev/audit-core": "0.8.1",
|
|
26
|
+
"@usebetterdev/plugin": "0.8.1"
|
|
27
27
|
},
|
|
28
28
|
"peerDependencies": {
|
|
29
29
|
"better-sqlite3": ">=11.0.0",
|
|
@@ -42,12 +42,14 @@
|
|
|
42
42
|
}
|
|
43
43
|
},
|
|
44
44
|
"devDependencies": {
|
|
45
|
+
"@testcontainers/postgresql": "^11.11.0",
|
|
46
|
+
"@types/better-sqlite3": "^7.6.0",
|
|
45
47
|
"@types/node": "^22.10.0",
|
|
46
48
|
"tsup": "^8.3.5",
|
|
47
49
|
"typescript": "~5.7.2",
|
|
48
50
|
"vitest": "^2.1.6",
|
|
49
51
|
"@usebetterdev/cli-utils": "0.1.0",
|
|
50
|
-
"@usebetterdev/test-utils": "^0.5.
|
|
52
|
+
"@usebetterdev/test-utils": "^0.5.4"
|
|
51
53
|
},
|
|
52
54
|
"engines": {
|
|
53
55
|
"node": ">=22"
|
|
@@ -57,6 +59,7 @@
|
|
|
57
59
|
"build:types": "tsc --build tsconfig.build.json",
|
|
58
60
|
"test": "vitest run",
|
|
59
61
|
"test:integration": "vitest run -c vitest.integration.config.ts",
|
|
62
|
+
"test:integration:sqlite": "vitest run -c vitest.integration.sqlite.config.ts",
|
|
60
63
|
"typecheck": "tsc --noEmit"
|
|
61
64
|
}
|
|
62
65
|
}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/migrate.ts"],"sourcesContent":["/**\n * `better-audit migrate` — Generate the audit_logs table migration.\n *\n * Generates appropriate DDL for the configured database:\n * - Postgres: JSONB columns, TIMESTAMPTZ, gen_random_uuid()\n * - MySQL: JSON columns, DATETIME(6), UUID() default\n * - SQLite: TEXT columns for JSON, no UUID default\n *\n * Writes the migration file into the appropriate ORM directory,\n * or prints to stdout with --dry-run.\n */\n\nimport { existsSync, mkdirSync, readdirSync, readFileSync, writeFileSync } from \"node:fs\";\nimport { dirname, join } from \"node:path\";\nimport pc from \"picocolors\";\nimport { generateMigrationSql, type DatabaseDialect } from \"./generate-sql.js\";\nimport { formatTimestamp } from \"@usebetterdev/cli-utils\";\nimport {\n detectAdapter,\n detectDialect,\n findMigrationDirectory,\n type AdapterType,\n} from \"./detect-adapter.js\";\n\nexport interface MigrateOptions {\n /** Output SQL to stdout instead of writing a file */\n dryRun?: boolean | undefined;\n /** Override ORM auto-detection */\n adapter?: AdapterType | undefined;\n /** Override database dialect detection */\n dialect?: DatabaseDialect | undefined;\n /** Override output directory/file */\n output?: string | undefined;\n /** Working directory (defaults to process.cwd()) */\n cwd?: string | undefined;\n /** Database connection URL for dialect detection (defaults to DATABASE_URL env) */\n databaseUrl?: string | undefined;\n}\n\n/** Regex to detect an existing audit_logs CREATE TABLE statement (case-insensitive). */\nconst AUDIT_TABLE_REGEX = /create\\s+table\\b[^;]*\\baudit_logs\\b/i;\n\n/** Maximum directory depth for the idempotency scan. */\nconst MAX_SCAN_DEPTH = 3;\n\n/**\n * Check if a migration for `audit_logs` already exists in the given directory.\n * Scans `.sql` files up to `MAX_SCAN_DEPTH` levels deep, skipping symlinks.\n */\nfunction migrationAlreadyExists(directory: string, depth: number = 0): boolean {\n if (depth > MAX_SCAN_DEPTH) {\n return false;\n }\n if (!existsSync(directory)) {\n return false;\n }\n\n try {\n const entries = readdirSync(directory, { withFileTypes: true });\n for (const entry of entries) {\n if (entry.isSymbolicLink()) {\n continue;\n }\n const fullPath = join(directory, entry.name);\n if (entry.isFile() && entry.name.endsWith(\".sql\")) {\n const content = readFileSync(fullPath, \"utf-8\");\n if (AUDIT_TABLE_REGEX.test(content)) {\n return true;\n }\n }\n if (entry.isDirectory()) {\n if (migrationAlreadyExists(fullPath, depth + 1)) {\n return true;\n }\n }\n }\n } catch {\n // Read error — treat as no existing migration\n }\n\n return false;\n}\n\nexport async function migrate(options: MigrateOptions = {}): Promise<void> {\n const cwd = options.cwd ?? process.cwd();\n\n // 1. Resolve dialect\n const dialect: DatabaseDialect =\n options.dialect ?? detectDialect(options.databaseUrl ?? process.env[\"DATABASE_URL\"]);\n\n // 2. Generate SQL\n const sql = generateMigrationSql(dialect);\n\n // 3. Dry-run: print to stdout and return\n if (options.dryRun === true) {\n process.stdout.write(sql);\n return;\n }\n\n // 4. Resolve adapter\n const adapter: AdapterType | undefined =\n options.adapter ?? detectAdapter(cwd);\n\n if (adapter === undefined) {\n throw new Error(\n \"Could not detect ORM adapter. Install drizzle-orm or @prisma/client, \" +\n \"or pass --adapter drizzle|prisma.\",\n );\n }\n\n // 5. Resolve output directory\n let outputPath: string;\n let filePath: string;\n\n if (options.output !== undefined) {\n if (options.output.endsWith(\".sql\")) {\n filePath = options.output;\n outputPath = dirname(filePath);\n } else {\n outputPath = options.output;\n filePath =\n adapter === \"prisma\"\n ? join(outputPath, \"migration.sql\")\n : join(outputPath, `${formatTimestamp(new Date(), \"\")}_audit_logs.sql`);\n }\n } else {\n const migrationDir = findMigrationDirectory(cwd, adapter);\n outputPath = migrationDir;\n filePath =\n adapter === \"prisma\"\n ? join(migrationDir, \"migration.sql\")\n : join(migrationDir, `${formatTimestamp(new Date(), \"\")}_audit_logs.sql`);\n }\n\n // 6. Check idempotency — scan parent migration directory\n const scanDir =\n adapter === \"prisma\"\n ? join(cwd, \"prisma\", \"migrations\")\n : outputPath;\n\n if (migrationAlreadyExists(scanDir)) {\n console.log(\n pc.green(\"✓\") + \" audit_logs migration already exists — up to date.\",\n );\n return;\n }\n\n // 7. Warn if output file already exists (explicit --output path)\n if (options.output !== undefined && existsSync(filePath)) {\n console.warn(pc.yellow(`Warning: overwriting existing file ${filePath}`));\n }\n\n // 8. Write migration file\n mkdirSync(outputPath, { recursive: true });\n writeFileSync(filePath, sql, \"utf-8\");\n\n console.log(`${pc.green(\"✓\")} Wrote migration: ${pc.dim(filePath)}`);\n console.log(\"\");\n console.log(pc.bold(\"Next steps:\"));\n\n if (adapter === \"drizzle\") {\n console.log(` ${pc.yellow(\"$\")} npx drizzle-kit migrate`);\n } else {\n console.log(` ${pc.yellow(\"$\")} npx prisma migrate dev`);\n }\n}\n"],"mappings":";;;;;;;;;;;AAYA,SAAS,YAAY,WAAW,aAAa,cAAc,qBAAqB;AAChF,SAAS,SAAS,YAAY;AAC9B,OAAO,QAAQ;AA0Bf,IAAM,oBAAoB;AAG1B,IAAM,iBAAiB;AAMvB,SAAS,uBAAuB,WAAmB,QAAgB,GAAY;AAC7E,MAAI,QAAQ,gBAAgB;AAC1B,WAAO;AAAA,EACT;AACA,MAAI,CAAC,WAAW,SAAS,GAAG;AAC1B,WAAO;AAAA,EACT;AAEA,MAAI;AACF,UAAM,UAAU,YAAY,WAAW,EAAE,eAAe,KAAK,CAAC;AAC9D,eAAW,SAAS,SAAS;AAC3B,UAAI,MAAM,eAAe,GAAG;AAC1B;AAAA,MACF;AACA,YAAM,WAAW,KAAK,WAAW,MAAM,IAAI;AAC3C,UAAI,MAAM,OAAO,KAAK,MAAM,KAAK,SAAS,MAAM,GAAG;AACjD,cAAM,UAAU,aAAa,UAAU,OAAO;AAC9C,YAAI,kBAAkB,KAAK,OAAO,GAAG;AACnC,iBAAO;AAAA,QACT;AAAA,MACF;AACA,UAAI,MAAM,YAAY,GAAG;AACvB,YAAI,uBAAuB,UAAU,QAAQ,CAAC,GAAG;AAC/C,iBAAO;AAAA,QACT;AAAA,MACF;AAAA,IACF;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEA,eAAsB,QAAQ,UAA0B,CAAC,GAAkB;AACzE,QAAM,MAAM,QAAQ,OAAO,QAAQ,IAAI;AAGvC,QAAM,UACJ,QAAQ,WAAW,cAAc,QAAQ,eAAe,QAAQ,IAAI,cAAc,CAAC;AAGrF,QAAM,MAAM,qBAAqB,OAAO;AAGxC,MAAI,QAAQ,WAAW,MAAM;AAC3B,YAAQ,OAAO,MAAM,GAAG;AACxB;AAAA,EACF;AAGA,QAAM,UACJ,QAAQ,WAAW,iBAAc,GAAG;AAEtC,MAAI,YAAY,QAAW;AACzB,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF;AAGA,MAAI;AACJ,MAAI;AAEJ,MAAI,QAAQ,WAAW,QAAW;AAChC,QAAI,QAAQ,OAAO,SAAS,MAAM,GAAG;AACnC,iBAAW,QAAQ;AACnB,mBAAa,QAAQ,QAAQ;AAAA,IAC/B,OAAO;AACL,mBAAa,QAAQ;AACrB,iBACE,YAAY,WACR,KAAK,YAAY,eAAe,IAChC,KAAK,YAAY,GAAG,gBAAgB,oBAAI,KAAK,GAAG,EAAE,CAAC,iBAAiB;AAAA,IAC5E;AAAA,EACF,OAAO;AACL,UAAM,eAAe,uBAAuB,KAAK,OAAO;AACxD,iBAAa;AACb,eACE,YAAY,WACR,KAAK,cAAc,eAAe,IAClC,KAAK,cAAc,GAAG,gBAAgB,oBAAI,KAAK,GAAG,EAAE,CAAC,iBAAiB;AAAA,EAC9E;AAGA,QAAM,UACJ,YAAY,WACR,KAAK,KAAK,UAAU,YAAY,IAChC;AAEN,MAAI,uBAAuB,OAAO,GAAG;AACnC,YAAQ;AAAA,MACN,GAAG,MAAM,QAAG,IAAI;AAAA,IAClB;AACA;AAAA,EACF;AAGA,MAAI,QAAQ,WAAW,UAAa,WAAW,QAAQ,GAAG;AACxD,YAAQ,KAAK,GAAG,OAAO,sCAAsC,QAAQ,EAAE,CAAC;AAAA,EAC1E;AAGA,YAAU,YAAY,EAAE,WAAW,KAAK,CAAC;AACzC,gBAAc,UAAU,KAAK,OAAO;AAEpC,UAAQ,IAAI,GAAG,GAAG,MAAM,QAAG,CAAC,qBAAqB,GAAG,IAAI,QAAQ,CAAC,EAAE;AACnE,UAAQ,IAAI,EAAE;AACd,UAAQ,IAAI,GAAG,KAAK,aAAa,CAAC;AAElC,MAAI,YAAY,WAAW;AACzB,YAAQ,IAAI,KAAK,GAAG,OAAO,GAAG,CAAC,0BAA0B;AAAA,EAC3D,OAAO;AACL,YAAQ,IAAI,KAAK,GAAG,OAAO,GAAG,CAAC,yBAAyB;AAAA,EAC1D;AACF;","names":[]}
|