@usebetterdev/audit-cli 0.5.0-beta.1 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/check.d.ts +64 -0
- package/dist/check.js +20 -0
- package/dist/check.js.map +1 -0
- package/dist/chunk-55KKYFKR.js +408 -0
- package/dist/chunk-55KKYFKR.js.map +1 -0
- package/dist/chunk-7GSN73TA.js +345 -0
- package/dist/chunk-7GSN73TA.js.map +1 -0
- package/dist/chunk-AGFBL646.js +10 -0
- package/dist/chunk-AGFBL646.js.map +1 -0
- package/dist/chunk-HDO5P6X7.js +77 -0
- package/dist/chunk-HDO5P6X7.js.map +1 -0
- package/dist/chunk-M46VJ3FO.js +182 -0
- package/dist/chunk-M46VJ3FO.js.map +1 -0
- package/dist/chunk-O5LHE2AC.js +119 -0
- package/dist/chunk-O5LHE2AC.js.map +1 -0
- package/dist/chunk-SJSGTCG4.js +225 -0
- package/dist/chunk-SJSGTCG4.js.map +1 -0
- package/dist/chunk-WVH5TQ2O.js +101 -0
- package/dist/chunk-WVH5TQ2O.js.map +1 -0
- package/dist/cli.d.ts +1 -0
- package/dist/cli.js +155 -0
- package/dist/cli.js.map +1 -0
- package/dist/detect-adapter-DNHcPCKz.d.ts +7 -0
- package/dist/export.d.ts +38 -0
- package/dist/export.js +19 -0
- package/dist/export.js.map +1 -0
- package/dist/migrate.d.ts +31 -0
- package/dist/migrate.js +9 -0
- package/dist/migrate.js.map +1 -0
- package/dist/purge.d.ts +52 -0
- package/dist/purge.js +15 -0
- package/dist/purge.js.map +1 -0
- package/dist/stats.d.ts +19 -0
- package/dist/stats.js +7 -0
- package/dist/stats.js.map +1 -0
- package/package.json +20 -2
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
import {
|
|
2
|
+
createKyselyInstance,
|
|
3
|
+
createSqlExecutor
|
|
4
|
+
} from "./chunk-7GSN73TA.js";
|
|
5
|
+
import {
|
|
6
|
+
detectDialect
|
|
7
|
+
} from "./chunk-HDO5P6X7.js";
|
|
8
|
+
|
|
9
|
+
// src/export.ts
|
|
10
|
+
import { createWriteStream } from "fs";
|
|
11
|
+
import pc from "picocolors";
|
|
12
|
+
import { runExport, AuditQueryBuilder, parseDuration } from "@usebetterdev/audit-core";
|
|
13
|
+
var VALID_FORMATS = /* @__PURE__ */ new Set(["csv", "json"]);
|
|
14
|
+
var VALID_SEVERITIES = /* @__PURE__ */ new Set([
|
|
15
|
+
"low",
|
|
16
|
+
"medium",
|
|
17
|
+
"high",
|
|
18
|
+
"critical"
|
|
19
|
+
]);
|
|
20
|
+
var ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}(T[\w:.+-]+)?$/;
|
|
21
|
+
function parseFormat(value) {
|
|
22
|
+
if (value === void 0) {
|
|
23
|
+
return "csv";
|
|
24
|
+
}
|
|
25
|
+
if (!VALID_FORMATS.has(value)) {
|
|
26
|
+
throw new Error(
|
|
27
|
+
`Invalid format "${value}". Expected "csv" or "json".`
|
|
28
|
+
);
|
|
29
|
+
}
|
|
30
|
+
return value;
|
|
31
|
+
}
|
|
32
|
+
function parseSeverity(value) {
|
|
33
|
+
if (!VALID_SEVERITIES.has(value)) {
|
|
34
|
+
throw new Error(
|
|
35
|
+
`Invalid severity "${value}". Expected one of: low, medium, high, critical.`
|
|
36
|
+
);
|
|
37
|
+
}
|
|
38
|
+
return value;
|
|
39
|
+
}
|
|
40
|
+
function parseSinceValue(value) {
|
|
41
|
+
if (ISO_DATE_REGEX.test(value)) {
|
|
42
|
+
const date = new Date(value);
|
|
43
|
+
if (Number.isNaN(date.getTime())) {
|
|
44
|
+
throw new Error(`Invalid date "${value}". Expected ISO-8601 format.`);
|
|
45
|
+
}
|
|
46
|
+
return date;
|
|
47
|
+
}
|
|
48
|
+
parseDuration(value);
|
|
49
|
+
return value;
|
|
50
|
+
}
|
|
51
|
+
function writeToNodeStream(nodeStream, chunk) {
|
|
52
|
+
return new Promise((resolve, reject) => {
|
|
53
|
+
let settled = false;
|
|
54
|
+
const settle = (fn) => {
|
|
55
|
+
if (!settled) {
|
|
56
|
+
settled = true;
|
|
57
|
+
cleanup();
|
|
58
|
+
fn();
|
|
59
|
+
}
|
|
60
|
+
};
|
|
61
|
+
const onError = (err) => {
|
|
62
|
+
settle(() => reject(err));
|
|
63
|
+
};
|
|
64
|
+
const onDrain = () => {
|
|
65
|
+
settle(resolve);
|
|
66
|
+
};
|
|
67
|
+
const cleanup = () => {
|
|
68
|
+
nodeStream.removeListener("error", onError);
|
|
69
|
+
nodeStream.removeListener("drain", onDrain);
|
|
70
|
+
};
|
|
71
|
+
nodeStream.once("error", onError);
|
|
72
|
+
const canContinue = nodeStream.write(chunk);
|
|
73
|
+
if (canContinue) {
|
|
74
|
+
settle(resolve);
|
|
75
|
+
} else {
|
|
76
|
+
nodeStream.once("drain", onDrain);
|
|
77
|
+
}
|
|
78
|
+
});
|
|
79
|
+
}
|
|
80
|
+
function createFileWritableStream(path) {
|
|
81
|
+
const nodeStream = createWriteStream(path, { encoding: "utf-8" });
|
|
82
|
+
return new WritableStream({
|
|
83
|
+
write(chunk) {
|
|
84
|
+
return writeToNodeStream(nodeStream, chunk);
|
|
85
|
+
},
|
|
86
|
+
close() {
|
|
87
|
+
return new Promise((resolve, reject) => {
|
|
88
|
+
const onError = (err) => {
|
|
89
|
+
reject(err);
|
|
90
|
+
};
|
|
91
|
+
nodeStream.once("error", onError);
|
|
92
|
+
nodeStream.end(() => {
|
|
93
|
+
nodeStream.removeListener("error", onError);
|
|
94
|
+
resolve();
|
|
95
|
+
});
|
|
96
|
+
});
|
|
97
|
+
},
|
|
98
|
+
abort() {
|
|
99
|
+
nodeStream.destroy();
|
|
100
|
+
}
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
function createStdoutWritableStream() {
|
|
104
|
+
return new WritableStream({
|
|
105
|
+
write(chunk) {
|
|
106
|
+
return writeToNodeStream(process.stdout, chunk);
|
|
107
|
+
}
|
|
108
|
+
});
|
|
109
|
+
}
|
|
110
|
+
async function exportLogs(options) {
|
|
111
|
+
const databaseUrl = options.databaseUrl ?? process.env["DATABASE_URL"];
|
|
112
|
+
if (databaseUrl === void 0 || databaseUrl === "") {
|
|
113
|
+
throw new Error(
|
|
114
|
+
"DATABASE_URL is required. Set the DATABASE_URL environment variable or pass --database-url."
|
|
115
|
+
);
|
|
116
|
+
}
|
|
117
|
+
const format = parseFormat(options.format);
|
|
118
|
+
const dialect = detectDialect(databaseUrl);
|
|
119
|
+
const db = await createKyselyInstance(databaseUrl, dialect);
|
|
120
|
+
try {
|
|
121
|
+
const executor = createSqlExecutor(db, dialect);
|
|
122
|
+
let query;
|
|
123
|
+
const hasFilters = options.since !== void 0 || options.severity !== void 0 || options.compliance !== void 0 || options.actor !== void 0 || options.limit !== void 0;
|
|
124
|
+
if (hasFilters) {
|
|
125
|
+
let builder = new AuditQueryBuilder(executor);
|
|
126
|
+
if (options.since !== void 0) {
|
|
127
|
+
const sinceValue = parseSinceValue(options.since);
|
|
128
|
+
builder = builder.since(sinceValue);
|
|
129
|
+
}
|
|
130
|
+
if (options.severity !== void 0) {
|
|
131
|
+
const severity = parseSeverity(options.severity);
|
|
132
|
+
builder = builder.severity(severity);
|
|
133
|
+
}
|
|
134
|
+
if (options.compliance !== void 0) {
|
|
135
|
+
const tags = options.compliance.split(",").map((t) => t.trim());
|
|
136
|
+
builder = builder.compliance(...tags);
|
|
137
|
+
}
|
|
138
|
+
if (options.actor !== void 0) {
|
|
139
|
+
builder = builder.actor(options.actor);
|
|
140
|
+
}
|
|
141
|
+
if (options.limit !== void 0) {
|
|
142
|
+
const n = Number(options.limit);
|
|
143
|
+
if (Number.isNaN(n) || n <= 0 || !Number.isInteger(n)) {
|
|
144
|
+
throw new Error(
|
|
145
|
+
`Invalid limit "${options.limit}". Expected a positive integer.`
|
|
146
|
+
);
|
|
147
|
+
}
|
|
148
|
+
builder = builder.limit(n);
|
|
149
|
+
}
|
|
150
|
+
query = builder;
|
|
151
|
+
}
|
|
152
|
+
const outputPath = options.output;
|
|
153
|
+
const output = outputPath !== void 0 ? createFileWritableStream(outputPath) : createStdoutWritableStream();
|
|
154
|
+
const exportOptions = {
|
|
155
|
+
format,
|
|
156
|
+
output,
|
|
157
|
+
...query !== void 0 && { query }
|
|
158
|
+
};
|
|
159
|
+
const result = await runExport(executor, exportOptions);
|
|
160
|
+
if (outputPath !== void 0) {
|
|
161
|
+
console.error(
|
|
162
|
+
`${pc.green("\u2713")} Exported ${result.rowCount} rows to ${pc.dim(outputPath)}`
|
|
163
|
+
);
|
|
164
|
+
} else {
|
|
165
|
+
console.error(
|
|
166
|
+
`${pc.green("\u2713")} Exported ${result.rowCount} rows`
|
|
167
|
+
);
|
|
168
|
+
}
|
|
169
|
+
} finally {
|
|
170
|
+
await db.destroy();
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
export {
|
|
175
|
+
parseFormat,
|
|
176
|
+
parseSeverity,
|
|
177
|
+
parseSinceValue,
|
|
178
|
+
createFileWritableStream,
|
|
179
|
+
createStdoutWritableStream,
|
|
180
|
+
exportLogs
|
|
181
|
+
};
|
|
182
|
+
//# sourceMappingURL=chunk-M46VJ3FO.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/export.ts"],"sourcesContent":["/**\n * `better-audit export` — Export audit log entries as CSV or JSON.\n *\n * Connects to the database via Kysely (multi-dialect), builds query filters\n * from CLI flags, and streams results to a file or stdout.\n */\n\nimport { createWriteStream } from \"node:fs\";\nimport pc from \"picocolors\";\nimport { runExport, AuditQueryBuilder, parseDuration } from \"@usebetterdev/audit-core\";\nimport type { AuditSeverity, ExportOptions } from \"@usebetterdev/audit-core\";\nimport { createKyselyInstance, createSqlExecutor } from \"./sql-executor.js\";\nimport { detectDialect } from \"./detect-adapter.js\";\n\nconst VALID_FORMATS = new Set([\"csv\", \"json\"]);\nconst VALID_SEVERITIES: ReadonlySet<string> = new Set([\n \"low\",\n \"medium\",\n \"high\",\n \"critical\",\n]);\n\nconst ISO_DATE_REGEX = /^\\d{4}-\\d{2}-\\d{2}(T[\\w:.+-]+)?$/;\n\nexport interface ExportCommandOptions {\n format?: string;\n output?: string;\n since?: string;\n severity?: string;\n compliance?: string;\n actor?: string;\n limit?: string;\n databaseUrl?: string;\n}\n\n/** Parse `--format` flag, validate and default to \"csv\". */\nexport function parseFormat(value: string | undefined): \"csv\" | \"json\" {\n if (value === undefined) {\n return \"csv\";\n }\n if (!VALID_FORMATS.has(value)) {\n throw new Error(\n `Invalid format \"${value}\". Expected \"csv\" or \"json\".`,\n );\n }\n return value as \"csv\" | \"json\";\n}\n\n/** Parse `--severity` flag, validate against known values. */\nexport function parseSeverity(value: string): AuditSeverity {\n if (!VALID_SEVERITIES.has(value)) {\n throw new Error(\n `Invalid severity \"${value}\". Expected one of: low, medium, high, critical.`,\n );\n }\n return value as AuditSeverity;\n}\n\n/**\n * Parse `--since` flag. ISO date strings (e.g. \"2025-01-01\") become Date objects.\n * Duration strings (e.g. \"90d\") are returned as-is for the query builder.\n */\nexport function parseSinceValue(value: string): Date | string {\n if (ISO_DATE_REGEX.test(value)) {\n const date = new Date(value);\n if (Number.isNaN(date.getTime())) {\n throw new Error(`Invalid date \"${value}\". Expected ISO-8601 format.`);\n }\n return date;\n }\n // Validate as duration — throws if invalid\n parseDuration(value);\n return value;\n}\n\n/**\n * Bridge a single `nodeStream.write()` call into a Promise that respects\n * backpressure and settles exactly once — whichever of error/drain fires first.\n */\nfunction writeToNodeStream(\n nodeStream: NodeJS.WritableStream,\n chunk: string,\n): Promise<void> {\n return new Promise<void>((resolve, reject) => {\n let settled = false;\n const settle = (fn: () => void) => {\n if (!settled) {\n settled = true;\n cleanup();\n fn();\n }\n };\n const onError = (err: Error) => { settle(() => reject(err)); };\n const onDrain = () => { settle(resolve); };\n const cleanup = () => {\n nodeStream.removeListener(\"error\", onError);\n nodeStream.removeListener(\"drain\", onDrain);\n };\n\n nodeStream.once(\"error\", onError);\n const canContinue = nodeStream.write(chunk);\n if (canContinue) {\n settle(resolve);\n } else {\n nodeStream.once(\"drain\", onDrain);\n }\n });\n}\n\n/**\n * Create a WHATWG WritableStream<string> that writes to a Node.js file.\n * Handles backpressure by awaiting the drain event.\n */\nexport function createFileWritableStream(\n path: string,\n): WritableStream<string> {\n const nodeStream = createWriteStream(path, { encoding: \"utf-8\" });\n\n return new WritableStream<string>({\n write(chunk) {\n return writeToNodeStream(nodeStream, chunk);\n },\n close() {\n return new Promise<void>((resolve, reject) => {\n const onError = (err: Error) => { reject(err); };\n nodeStream.once(\"error\", onError);\n nodeStream.end(() => {\n nodeStream.removeListener(\"error\", onError);\n resolve();\n });\n });\n },\n abort() {\n nodeStream.destroy();\n },\n });\n}\n\n/** Create a WHATWG WritableStream<string> that writes to process.stdout. */\nexport function createStdoutWritableStream(): WritableStream<string> {\n return new WritableStream<string>({\n write(chunk) {\n return writeToNodeStream(process.stdout, chunk);\n },\n });\n}\n\nexport async function exportLogs(options: ExportCommandOptions): Promise<void> {\n // 1. Resolve DATABASE_URL\n const databaseUrl = options.databaseUrl ?? process.env[\"DATABASE_URL\"];\n if (databaseUrl === undefined || databaseUrl === \"\") {\n throw new Error(\n \"DATABASE_URL is required. Set the DATABASE_URL environment variable or pass --database-url.\",\n );\n }\n\n // 2. Parse flags\n const format = parseFormat(options.format);\n\n // 3. Detect dialect + create Kysely instance\n const dialect = detectDialect(databaseUrl);\n const db = await createKyselyInstance(databaseUrl, dialect);\n\n try {\n const executor = createSqlExecutor(db, dialect);\n\n // 4. Build query\n let query: AuditQueryBuilder | undefined;\n const hasFilters =\n options.since !== undefined ||\n options.severity !== undefined ||\n options.compliance !== undefined ||\n options.actor !== undefined ||\n options.limit !== undefined;\n\n if (hasFilters) {\n let builder = new AuditQueryBuilder(executor);\n\n if (options.since !== undefined) {\n const sinceValue = parseSinceValue(options.since);\n builder = builder.since(sinceValue);\n }\n\n if (options.severity !== undefined) {\n const severity = parseSeverity(options.severity);\n builder = builder.severity(severity);\n }\n\n if (options.compliance !== undefined) {\n const tags = options.compliance.split(\",\").map((t) => t.trim());\n builder = builder.compliance(...tags);\n }\n\n if (options.actor !== undefined) {\n builder = builder.actor(options.actor);\n }\n\n if (options.limit !== undefined) {\n const n = Number(options.limit);\n if (Number.isNaN(n) || n <= 0 || !Number.isInteger(n)) {\n throw new Error(\n `Invalid limit \"${options.limit}\". Expected a positive integer.`,\n );\n }\n builder = builder.limit(n);\n }\n\n query = builder;\n }\n\n // 5. Create output sink\n const outputPath = options.output;\n const output: WritableStream<string> = outputPath !== undefined\n ? createFileWritableStream(outputPath)\n : createStdoutWritableStream();\n\n // 6. Run export\n const exportOptions: ExportOptions = {\n format,\n output,\n ...(query !== undefined && { query }),\n };\n\n const result = await runExport(executor, exportOptions);\n\n // 7. Summary message\n if (outputPath !== undefined) {\n console.error(\n `${pc.green(\"✓\")} Exported ${result.rowCount} rows to ${pc.dim(outputPath)}`,\n );\n } else {\n console.error(\n `${pc.green(\"✓\")} Exported ${result.rowCount} rows`,\n );\n }\n } finally {\n await db.destroy();\n }\n}\n"],"mappings":";;;;;;;;;AAOA,SAAS,yBAAyB;AAClC,OAAO,QAAQ;AACf,SAAS,WAAW,mBAAmB,qBAAqB;AAK5D,IAAM,gBAAgB,oBAAI,IAAI,CAAC,OAAO,MAAM,CAAC;AAC7C,IAAM,mBAAwC,oBAAI,IAAI;AAAA,EACpD;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,CAAC;AAED,IAAM,iBAAiB;AAchB,SAAS,YAAY,OAA2C;AACrE,MAAI,UAAU,QAAW;AACvB,WAAO;AAAA,EACT;AACA,MAAI,CAAC,cAAc,IAAI,KAAK,GAAG;AAC7B,UAAM,IAAI;AAAA,MACR,mBAAmB,KAAK;AAAA,IAC1B;AAAA,EACF;AACA,SAAO;AACT;AAGO,SAAS,cAAc,OAA8B;AAC1D,MAAI,CAAC,iBAAiB,IAAI,KAAK,GAAG;AAChC,UAAM,IAAI;AAAA,MACR,qBAAqB,KAAK;AAAA,IAC5B;AAAA,EACF;AACA,SAAO;AACT;AAMO,SAAS,gBAAgB,OAA8B;AAC5D,MAAI,eAAe,KAAK,KAAK,GAAG;AAC9B,UAAM,OAAO,IAAI,KAAK,KAAK;AAC3B,QAAI,OAAO,MAAM,KAAK,QAAQ,CAAC,GAAG;AAChC,YAAM,IAAI,MAAM,iBAAiB,KAAK,8BAA8B;AAAA,IACtE;AACA,WAAO;AAAA,EACT;AAEA,gBAAc,KAAK;AACnB,SAAO;AACT;AAMA,SAAS,kBACP,YACA,OACe;AACf,SAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,QAAI,UAAU;AACd,UAAM,SAAS,CAAC,OAAmB;AACjC,UAAI,CAAC,SAAS;AACZ,kBAAU;AACV,gBAAQ;AACR,WAAG;AAAA,MACL;AAAA,IACF;AACA,UAAM,UAAU,CAAC,QAAe;AAAE,aAAO,MAAM,OAAO,GAAG,CAAC;AAAA,IAAG;AAC7D,UAAM,UAAU,MAAM;AAAE,aAAO,OAAO;AAAA,IAAG;AACzC,UAAM,UAAU,MAAM;AACpB,iBAAW,eAAe,SAAS,OAAO;AAC1C,iBAAW,eAAe,SAAS,OAAO;AAAA,IAC5C;AAEA,eAAW,KAAK,SAAS,OAAO;AAChC,UAAM,cAAc,WAAW,MAAM,KAAK;AAC1C,QAAI,aAAa;AACf,aAAO,OAAO;AAAA,IAChB,OAAO;AACL,iBAAW,KAAK,SAAS,OAAO;AAAA,IAClC;AAAA,EACF,CAAC;AACH;AAMO,SAAS,yBACd,MACwB;AACxB,QAAM,aAAa,kBAAkB,MAAM,EAAE,UAAU,QAAQ,CAAC;AAEhE,SAAO,IAAI,eAAuB;AAAA,IAChC,MAAM,OAAO;AACX,aAAO,kBAAkB,YAAY,KAAK;AAAA,IAC5C;AAAA,IACA,QAAQ;AACN,aAAO,IAAI,QAAc,CAAC,SAAS,WAAW;AAC5C,cAAM,UAAU,CAAC,QAAe;AAAE,iBAAO,GAAG;AAAA,QAAG;AAC/C,mBAAW,KAAK,SAAS,OAAO;AAChC,mBAAW,IAAI,MAAM;AACnB,qBAAW,eAAe,SAAS,OAAO;AAC1C,kBAAQ;AAAA,QACV,CAAC;AAAA,MACH,CAAC;AAAA,IACH;AAAA,IACA,QAAQ;AACN,iBAAW,QAAQ;AAAA,IACrB;AAAA,EACF,CAAC;AACH;AAGO,SAAS,6BAAqD;AACnE,SAAO,IAAI,eAAuB;AAAA,IAChC,MAAM,OAAO;AACX,aAAO,kBAAkB,QAAQ,QAAQ,KAAK;AAAA,IAChD;AAAA,EACF,CAAC;AACH;AAEA,eAAsB,WAAW,SAA8C;AAE7E,QAAM,cAAc,QAAQ,eAAe,QAAQ,IAAI,cAAc;AACrE,MAAI,gBAAgB,UAAa,gBAAgB,IAAI;AACnD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAGA,QAAM,SAAS,YAAY,QAAQ,MAAM;AAGzC,QAAM,UAAU,cAAc,WAAW;AACzC,QAAM,KAAK,MAAM,qBAAqB,aAAa,OAAO;AAE1D,MAAI;AACF,UAAM,WAAW,kBAAkB,IAAI,OAAO;AAG9C,QAAI;AACJ,UAAM,aACJ,QAAQ,UAAU,UAClB,QAAQ,aAAa,UACrB,QAAQ,eAAe,UACvB,QAAQ,UAAU,UAClB,QAAQ,UAAU;AAEpB,QAAI,YAAY;AACd,UAAI,UAAU,IAAI,kBAAkB,QAAQ;AAE5C,UAAI,QAAQ,UAAU,QAAW;AAC/B,cAAM,aAAa,gBAAgB,QAAQ,KAAK;AAChD,kBAAU,QAAQ,MAAM,UAAU;AAAA,MACpC;AAEA,UAAI,QAAQ,aAAa,QAAW;AAClC,cAAM,WAAW,cAAc,QAAQ,QAAQ;AAC/C,kBAAU,QAAQ,SAAS,QAAQ;AAAA,MACrC;AAEA,UAAI,QAAQ,eAAe,QAAW;AACpC,cAAM,OAAO,QAAQ,WAAW,MAAM,GAAG,EAAE,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC;AAC9D,kBAAU,QAAQ,WAAW,GAAG,IAAI;AAAA,MACtC;AAEA,UAAI,QAAQ,UAAU,QAAW;AAC/B,kBAAU,QAAQ,MAAM,QAAQ,KAAK;AAAA,MACvC;AAEA,UAAI,QAAQ,UAAU,QAAW;AAC/B,cAAM,IAAI,OAAO,QAAQ,KAAK;AAC9B,YAAI,OAAO,MAAM,CAAC,KAAK,KAAK,KAAK,CAAC,OAAO,UAAU,CAAC,GAAG;AACrD,gBAAM,IAAI;AAAA,YACR,kBAAkB,QAAQ,KAAK;AAAA,UACjC;AAAA,QACF;AACA,kBAAU,QAAQ,MAAM,CAAC;AAAA,MAC3B;AAEA,cAAQ;AAAA,IACV;AAGA,UAAM,aAAa,QAAQ;AAC3B,UAAM,SAAiC,eAAe,SAClD,yBAAyB,UAAU,IACnC,2BAA2B;AAG/B,UAAM,gBAA+B;AAAA,MACnC;AAAA,MACA;AAAA,MACA,GAAI,UAAU,UAAa,EAAE,MAAM;AAAA,IACrC;AAEA,UAAM,SAAS,MAAM,UAAU,UAAU,aAAa;AAGtD,QAAI,eAAe,QAAW;AAC5B,cAAQ;AAAA,QACN,GAAG,GAAG,MAAM,QAAG,CAAC,aAAa,OAAO,QAAQ,YAAY,GAAG,IAAI,UAAU,CAAC;AAAA,MAC5E;AAAA,IACF,OAAO;AACL,cAAQ;AAAA,QACN,GAAG,GAAG,MAAM,QAAG,CAAC,aAAa,OAAO,QAAQ;AAAA,MAC9C;AAAA,IACF;AAAA,EACF,UAAE;AACA,UAAM,GAAG,QAAQ;AAAA,EACnB;AACF;","names":[]}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
// src/generate-sql.ts
|
|
2
|
+
import {
|
|
3
|
+
AUDIT_LOG_SCHEMA
|
|
4
|
+
} from "@usebetterdev/audit-core";
|
|
5
|
+
var INDEX_DEFINITIONS = [
|
|
6
|
+
{
|
|
7
|
+
name: "audit_logs_table_name_timestamp_idx",
|
|
8
|
+
columns: ["table_name", "timestamp"]
|
|
9
|
+
},
|
|
10
|
+
{ name: "audit_logs_actor_id_idx", columns: ["actor_id"] },
|
|
11
|
+
{ name: "audit_logs_record_id_idx", columns: ["record_id"] },
|
|
12
|
+
{
|
|
13
|
+
name: "audit_logs_table_name_record_id_idx",
|
|
14
|
+
columns: ["table_name", "record_id"]
|
|
15
|
+
},
|
|
16
|
+
{ name: "audit_logs_operation_idx", columns: ["operation"] },
|
|
17
|
+
{ name: "audit_logs_timestamp_idx", columns: ["timestamp"] },
|
|
18
|
+
{ name: "audit_logs_timestamp_id_idx", columns: ["timestamp", "id"] }
|
|
19
|
+
];
|
|
20
|
+
function escapeIdentifier(name, dialect) {
|
|
21
|
+
if (dialect === "mysql") {
|
|
22
|
+
return `\`${name.replaceAll("`", "``")}\``;
|
|
23
|
+
}
|
|
24
|
+
return `"${name.replaceAll('"', '""')}"`;
|
|
25
|
+
}
|
|
26
|
+
function sqlType(type, dialect) {
|
|
27
|
+
const map = {
|
|
28
|
+
uuid: { postgres: "UUID", mysql: "CHAR(36)", sqlite: "TEXT" },
|
|
29
|
+
timestamptz: {
|
|
30
|
+
postgres: "TIMESTAMPTZ",
|
|
31
|
+
mysql: "DATETIME(6)",
|
|
32
|
+
sqlite: "TEXT"
|
|
33
|
+
},
|
|
34
|
+
text: { postgres: "TEXT", mysql: "TEXT", sqlite: "TEXT" },
|
|
35
|
+
jsonb: { postgres: "JSONB", mysql: "JSON", sqlite: "TEXT" },
|
|
36
|
+
boolean: { postgres: "BOOLEAN", mysql: "BOOLEAN", sqlite: "INTEGER" }
|
|
37
|
+
};
|
|
38
|
+
return map[type][dialect];
|
|
39
|
+
}
|
|
40
|
+
function defaultExpression(expression, dialect) {
|
|
41
|
+
const map = {
|
|
42
|
+
"gen_random_uuid()": {
|
|
43
|
+
postgres: "gen_random_uuid()",
|
|
44
|
+
mysql: "(UUID())",
|
|
45
|
+
sqlite: void 0
|
|
46
|
+
},
|
|
47
|
+
"now()": {
|
|
48
|
+
postgres: "now()",
|
|
49
|
+
mysql: "CURRENT_TIMESTAMP(6)",
|
|
50
|
+
sqlite: "(datetime('now'))"
|
|
51
|
+
}
|
|
52
|
+
};
|
|
53
|
+
const entry = map[expression];
|
|
54
|
+
if (entry === void 0) {
|
|
55
|
+
console.warn(
|
|
56
|
+
`[better-audit] Warning: unrecognized default expression "${expression}" \u2014 passing through verbatim for ${dialect}. It may not be dialect-compatible.`
|
|
57
|
+
);
|
|
58
|
+
return expression;
|
|
59
|
+
}
|
|
60
|
+
return entry[dialect];
|
|
61
|
+
}
|
|
62
|
+
function columnDdl(name, definition, dialect) {
|
|
63
|
+
const parts = [];
|
|
64
|
+
parts.push(escapeIdentifier(name, dialect));
|
|
65
|
+
parts.push(sqlType(definition.type, dialect));
|
|
66
|
+
if (!definition.nullable) {
|
|
67
|
+
parts.push("NOT NULL");
|
|
68
|
+
}
|
|
69
|
+
if (definition.defaultExpression !== void 0) {
|
|
70
|
+
const expr = defaultExpression(definition.defaultExpression, dialect);
|
|
71
|
+
if (expr !== void 0) {
|
|
72
|
+
parts.push(`DEFAULT ${expr}`);
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
return parts.join(" ");
|
|
76
|
+
}
|
|
77
|
+
function indexDdl(indexDef, dialect) {
|
|
78
|
+
const cols = indexDef.columns.map((c) => escapeIdentifier(c, dialect)).join(", ");
|
|
79
|
+
const tableName = escapeIdentifier(AUDIT_LOG_SCHEMA.tableName, dialect);
|
|
80
|
+
const indexName = escapeIdentifier(indexDef.name, dialect);
|
|
81
|
+
return `CREATE INDEX IF NOT EXISTS ${indexName} ON ${tableName} (${cols});`;
|
|
82
|
+
}
|
|
83
|
+
function generateMigrationSql(dialect) {
|
|
84
|
+
const { tableName, columns } = AUDIT_LOG_SCHEMA;
|
|
85
|
+
const quotedTable = escapeIdentifier(tableName, dialect);
|
|
86
|
+
const columnLines = [];
|
|
87
|
+
let primaryKeyColumn;
|
|
88
|
+
for (const [name, definition] of Object.entries(columns)) {
|
|
89
|
+
columnLines.push(` ${columnDdl(name, definition, dialect)}`);
|
|
90
|
+
if (definition.primaryKey === true) {
|
|
91
|
+
primaryKeyColumn = name;
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
if (primaryKeyColumn !== void 0) {
|
|
95
|
+
columnLines.push(
|
|
96
|
+
` PRIMARY KEY (${escapeIdentifier(primaryKeyColumn, dialect)})`
|
|
97
|
+
);
|
|
98
|
+
}
|
|
99
|
+
const parts = [
|
|
100
|
+
`-- better-audit: ${dialect} migration for ${tableName}`,
|
|
101
|
+
`-- Generated by @usebetterdev/audit-cli`,
|
|
102
|
+
"",
|
|
103
|
+
`CREATE TABLE IF NOT EXISTS ${quotedTable} (`,
|
|
104
|
+
columnLines.join(",\n"),
|
|
105
|
+
");",
|
|
106
|
+
""
|
|
107
|
+
];
|
|
108
|
+
for (const idx of INDEX_DEFINITIONS) {
|
|
109
|
+
parts.push(indexDdl(idx, dialect));
|
|
110
|
+
}
|
|
111
|
+
parts.push("");
|
|
112
|
+
return parts.join("\n");
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
export {
|
|
116
|
+
INDEX_DEFINITIONS,
|
|
117
|
+
generateMigrationSql
|
|
118
|
+
};
|
|
119
|
+
//# sourceMappingURL=chunk-O5LHE2AC.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/generate-sql.ts"],"sourcesContent":["/**\n * Multi-dialect SQL generation for the `audit_logs` table.\n *\n * Reads the declarative `AUDIT_LOG_SCHEMA` from audit-core and produces\n * dialect-appropriate DDL for Postgres, MySQL, and SQLite.\n */\n\nimport {\n AUDIT_LOG_SCHEMA,\n type ColumnDefinition,\n type ColumnType,\n} from \"@usebetterdev/audit-core\";\n\nexport type { DatabaseDialect } from \"./detect-adapter.js\";\nimport type { DatabaseDialect } from \"./detect-adapter.js\";\n\n/**\n * Index definitions matching `packages/audit/drizzle/src/schema.ts`.\n * Each entry has a name and the list of column names it covers.\n */\nexport const INDEX_DEFINITIONS: ReadonlyArray<{\n name: string;\n columns: ReadonlyArray<string>;\n}> = [\n {\n name: \"audit_logs_table_name_timestamp_idx\",\n columns: [\"table_name\", \"timestamp\"],\n },\n { name: \"audit_logs_actor_id_idx\", columns: [\"actor_id\"] },\n { name: \"audit_logs_record_id_idx\", columns: [\"record_id\"] },\n {\n name: \"audit_logs_table_name_record_id_idx\",\n columns: [\"table_name\", \"record_id\"],\n },\n { name: \"audit_logs_operation_idx\", columns: [\"operation\"] },\n { name: \"audit_logs_timestamp_idx\", columns: [\"timestamp\"] },\n { name: \"audit_logs_timestamp_id_idx\", columns: [\"timestamp\", \"id\"] },\n];\n\n/** Escape an identifier (table name, column name, index name) for safe SQL interpolation. */\nfunction escapeIdentifier(name: string, dialect: DatabaseDialect): string {\n if (dialect === \"mysql\") {\n return `\\`${name.replaceAll(\"`\", \"``\")}\\``;\n }\n return `\"${name.replaceAll('\"', '\"\"')}\"`;\n}\n\n/** Map a core ColumnType to a dialect-specific SQL type. */\nfunction sqlType(type: ColumnType, dialect: DatabaseDialect): string {\n const map: Record<ColumnType, Record<DatabaseDialect, string>> = {\n uuid: { postgres: \"UUID\", mysql: \"CHAR(36)\", sqlite: \"TEXT\" },\n timestamptz: {\n postgres: \"TIMESTAMPTZ\",\n mysql: \"DATETIME(6)\",\n sqlite: \"TEXT\",\n },\n text: { postgres: \"TEXT\", mysql: \"TEXT\", sqlite: \"TEXT\" },\n jsonb: { postgres: \"JSONB\", mysql: \"JSON\", sqlite: \"TEXT\" },\n boolean: { postgres: \"BOOLEAN\", mysql: \"BOOLEAN\", sqlite: \"INTEGER\" },\n };\n return map[type][dialect];\n}\n\n/** Map a core default expression to a dialect-specific SQL expression. */\nfunction defaultExpression(\n expression: string,\n dialect: DatabaseDialect,\n): string | undefined {\n const map: Record<string, Record<DatabaseDialect, string | undefined>> = {\n \"gen_random_uuid()\": {\n postgres: \"gen_random_uuid()\",\n mysql: \"(UUID())\",\n sqlite: undefined,\n },\n \"now()\": {\n postgres: \"now()\",\n mysql: \"CURRENT_TIMESTAMP(6)\",\n sqlite: \"(datetime('now'))\",\n },\n };\n const entry = map[expression];\n if (entry === undefined) {\n console.warn(\n `[better-audit] Warning: unrecognized default expression \"${expression}\" — ` +\n `passing through verbatim for ${dialect}. It may not be dialect-compatible.`,\n );\n return expression;\n }\n return entry[dialect];\n}\n\n/** Build a single column DDL fragment (e.g. `\"id\" UUID NOT NULL DEFAULT gen_random_uuid()`). */\nfunction columnDdl(\n name: string,\n definition: ColumnDefinition,\n dialect: DatabaseDialect,\n): string {\n const parts: string[] = [];\n\n parts.push(escapeIdentifier(name, dialect));\n parts.push(sqlType(definition.type, dialect));\n\n if (!definition.nullable) {\n parts.push(\"NOT NULL\");\n }\n\n if (definition.defaultExpression !== undefined) {\n const expr = defaultExpression(definition.defaultExpression, dialect);\n if (expr !== undefined) {\n parts.push(`DEFAULT ${expr}`);\n }\n }\n\n return parts.join(\" \");\n}\n\n/** Build a single index DDL statement. */\nfunction indexDdl(\n indexDef: { name: string; columns: ReadonlyArray<string> },\n dialect: DatabaseDialect,\n): string {\n const cols = indexDef.columns.map((c) => escapeIdentifier(c, dialect)).join(\", \");\n const tableName = escapeIdentifier(AUDIT_LOG_SCHEMA.tableName, dialect);\n const indexName = escapeIdentifier(indexDef.name, dialect);\n\n return `CREATE INDEX IF NOT EXISTS ${indexName} ON ${tableName} (${cols});`;\n}\n\n/**\n * Generate the full migration SQL for the `audit_logs` table in the given dialect.\n *\n * Includes `CREATE TABLE IF NOT EXISTS` and all indexes.\n */\nexport function generateMigrationSql(dialect: DatabaseDialect): string {\n const { tableName, columns } = AUDIT_LOG_SCHEMA;\n const quotedTable = escapeIdentifier(tableName, dialect);\n\n // Column definitions\n const columnLines: string[] = [];\n let primaryKeyColumn: string | undefined;\n\n for (const [name, definition] of Object.entries(columns)) {\n columnLines.push(` ${columnDdl(name, definition, dialect)}`);\n if (definition.primaryKey === true) {\n primaryKeyColumn = name;\n }\n }\n\n // Primary key constraint\n if (primaryKeyColumn !== undefined) {\n columnLines.push(\n ` PRIMARY KEY (${escapeIdentifier(primaryKeyColumn, dialect)})`,\n );\n }\n\n const parts: string[] = [\n `-- better-audit: ${dialect} migration for ${tableName}`,\n `-- Generated by @usebetterdev/audit-cli`,\n \"\",\n `CREATE TABLE IF NOT EXISTS ${quotedTable} (`,\n columnLines.join(\",\\n\"),\n \");\",\n \"\",\n ];\n\n // Index definitions\n for (const idx of INDEX_DEFINITIONS) {\n parts.push(indexDdl(idx, dialect));\n }\n\n parts.push(\"\");\n return parts.join(\"\\n\");\n}\n"],"mappings":";AAOA;AAAA,EACE;AAAA,OAGK;AASA,IAAM,oBAGR;AAAA,EACH;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,cAAc,WAAW;AAAA,EACrC;AAAA,EACA,EAAE,MAAM,2BAA2B,SAAS,CAAC,UAAU,EAAE;AAAA,EACzD,EAAE,MAAM,4BAA4B,SAAS,CAAC,WAAW,EAAE;AAAA,EAC3D;AAAA,IACE,MAAM;AAAA,IACN,SAAS,CAAC,cAAc,WAAW;AAAA,EACrC;AAAA,EACA,EAAE,MAAM,4BAA4B,SAAS,CAAC,WAAW,EAAE;AAAA,EAC3D,EAAE,MAAM,4BAA4B,SAAS,CAAC,WAAW,EAAE;AAAA,EAC3D,EAAE,MAAM,+BAA+B,SAAS,CAAC,aAAa,IAAI,EAAE;AACtE;AAGA,SAAS,iBAAiB,MAAc,SAAkC;AACxE,MAAI,YAAY,SAAS;AACvB,WAAO,KAAK,KAAK,WAAW,KAAK,IAAI,CAAC;AAAA,EACxC;AACA,SAAO,IAAI,KAAK,WAAW,KAAK,IAAI,CAAC;AACvC;AAGA,SAAS,QAAQ,MAAkB,SAAkC;AACnE,QAAM,MAA2D;AAAA,IAC/D,MAAM,EAAE,UAAU,QAAQ,OAAO,YAAY,QAAQ,OAAO;AAAA,IAC5D,aAAa;AAAA,MACX,UAAU;AAAA,MACV,OAAO;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,IACA,MAAM,EAAE,UAAU,QAAQ,OAAO,QAAQ,QAAQ,OAAO;AAAA,IACxD,OAAO,EAAE,UAAU,SAAS,OAAO,QAAQ,QAAQ,OAAO;AAAA,IAC1D,SAAS,EAAE,UAAU,WAAW,OAAO,WAAW,QAAQ,UAAU;AAAA,EACtE;AACA,SAAO,IAAI,IAAI,EAAE,OAAO;AAC1B;AAGA,SAAS,kBACP,YACA,SACoB;AACpB,QAAM,MAAmE;AAAA,IACvE,qBAAqB;AAAA,MACnB,UAAU;AAAA,MACV,OAAO;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,IACA,SAAS;AAAA,MACP,UAAU;AAAA,MACV,OAAO;AAAA,MACP,QAAQ;AAAA,IACV;AAAA,EACF;AACA,QAAM,QAAQ,IAAI,UAAU;AAC5B,MAAI,UAAU,QAAW;AACvB,YAAQ;AAAA,MACN,4DAA4D,UAAU,yCACtC,OAAO;AAAA,IACzC;AACA,WAAO;AAAA,EACT;AACA,SAAO,MAAM,OAAO;AACtB;AAGA,SAAS,UACP,MACA,YACA,SACQ;AACR,QAAM,QAAkB,CAAC;AAEzB,QAAM,KAAK,iBAAiB,MAAM,OAAO,CAAC;AAC1C,QAAM,KAAK,QAAQ,WAAW,MAAM,OAAO,CAAC;AAE5C,MAAI,CAAC,WAAW,UAAU;AACxB,UAAM,KAAK,UAAU;AAAA,EACvB;AAEA,MAAI,WAAW,sBAAsB,QAAW;AAC9C,UAAM,OAAO,kBAAkB,WAAW,mBAAmB,OAAO;AACpE,QAAI,SAAS,QAAW;AACtB,YAAM,KAAK,WAAW,IAAI,EAAE;AAAA,IAC9B;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,GAAG;AACvB;AAGA,SAAS,SACP,UACA,SACQ;AACR,QAAM,OAAO,SAAS,QAAQ,IAAI,CAAC,MAAM,iBAAiB,GAAG,OAAO,CAAC,EAAE,KAAK,IAAI;AAChF,QAAM,YAAY,iBAAiB,iBAAiB,WAAW,OAAO;AACtE,QAAM,YAAY,iBAAiB,SAAS,MAAM,OAAO;AAEzD,SAAO,8BAA8B,SAAS,OAAO,SAAS,KAAK,IAAI;AACzE;AAOO,SAAS,qBAAqB,SAAkC;AACrE,QAAM,EAAE,WAAW,QAAQ,IAAI;AAC/B,QAAM,cAAc,iBAAiB,WAAW,OAAO;AAGvD,QAAM,cAAwB,CAAC;AAC/B,MAAI;AAEJ,aAAW,CAAC,MAAM,UAAU,KAAK,OAAO,QAAQ,OAAO,GAAG;AACxD,gBAAY,KAAK,KAAK,UAAU,MAAM,YAAY,OAAO,CAAC,EAAE;AAC5D,QAAI,WAAW,eAAe,MAAM;AAClC,yBAAmB;AAAA,IACrB;AAAA,EACF;AAGA,MAAI,qBAAqB,QAAW;AAClC,gBAAY;AAAA,MACV,kBAAkB,iBAAiB,kBAAkB,OAAO,CAAC;AAAA,IAC/D;AAAA,EACF;AAEA,QAAM,QAAkB;AAAA,IACtB,oBAAoB,OAAO,kBAAkB,SAAS;AAAA,IACtD;AAAA,IACA;AAAA,IACA,8BAA8B,WAAW;AAAA,IACzC,YAAY,KAAK,KAAK;AAAA,IACtB;AAAA,IACA;AAAA,EACF;AAGA,aAAW,OAAO,mBAAmB;AACnC,UAAM,KAAK,SAAS,KAAK,OAAO,CAAC;AAAA,EACnC;AAEA,QAAM,KAAK,EAAE;AACb,SAAO,MAAM,KAAK,IAAI;AACxB;","names":[]}
|
|
@@ -0,0 +1,225 @@
|
|
|
1
|
+
import {
|
|
2
|
+
createKyselyInstance
|
|
3
|
+
} from "./chunk-7GSN73TA.js";
|
|
4
|
+
import {
|
|
5
|
+
detectDialect
|
|
6
|
+
} from "./chunk-HDO5P6X7.js";
|
|
7
|
+
|
|
8
|
+
// src/purge.ts
|
|
9
|
+
import pc from "picocolors";
|
|
10
|
+
|
|
11
|
+
// src/config-loader.ts
|
|
12
|
+
import { stat } from "fs/promises";
|
|
13
|
+
import { resolve } from "path";
|
|
14
|
+
function importFile(path) {
|
|
15
|
+
return import(
|
|
16
|
+
/* webpackIgnore: true */
|
|
17
|
+
path
|
|
18
|
+
);
|
|
19
|
+
}
|
|
20
|
+
function validateRetention(retention) {
|
|
21
|
+
if (typeof retention !== "object" || retention === null) {
|
|
22
|
+
throw new Error("retention must be an object with a 'days' property");
|
|
23
|
+
}
|
|
24
|
+
const r = retention;
|
|
25
|
+
if (!Number.isInteger(r["days"]) || !Number.isFinite(r["days"]) || r["days"] <= 0) {
|
|
26
|
+
throw new Error(`retention.days must be a positive integer, got ${String(r["days"])}`);
|
|
27
|
+
}
|
|
28
|
+
if (r["tables"] !== void 0) {
|
|
29
|
+
if (!Array.isArray(r["tables"]) || r["tables"].length === 0 || r["tables"].some((t) => typeof t !== "string" || t === "")) {
|
|
30
|
+
throw new Error("retention.tables must be a non-empty array of non-empty strings");
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
async function loadConfig(configPath) {
|
|
35
|
+
const resolved = resolve(process.cwd(), configPath);
|
|
36
|
+
let mod;
|
|
37
|
+
try {
|
|
38
|
+
mod = await importFile(resolved);
|
|
39
|
+
} catch (err) {
|
|
40
|
+
const exists = await stat(resolved).then(() => true, () => false);
|
|
41
|
+
if (!exists) {
|
|
42
|
+
return null;
|
|
43
|
+
}
|
|
44
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
45
|
+
throw new Error(
|
|
46
|
+
`Failed to load config file "${configPath}": ${message}
|
|
47
|
+
Hint: TypeScript config files require a TypeScript loader. Run with "tsx" or "ts-node", or compile your config first.`
|
|
48
|
+
);
|
|
49
|
+
}
|
|
50
|
+
const config = mod.default;
|
|
51
|
+
if (typeof config !== "object" || config === null) {
|
|
52
|
+
throw new Error(
|
|
53
|
+
`Config file "${configPath}" must export a BetterAuditConfig as the default export.`
|
|
54
|
+
);
|
|
55
|
+
}
|
|
56
|
+
const configObj = config;
|
|
57
|
+
if (configObj["retention"] !== void 0) {
|
|
58
|
+
validateRetention(configObj["retention"]);
|
|
59
|
+
}
|
|
60
|
+
return config;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// src/purge.ts
|
|
64
|
+
var ISO_DATE_REGEX = /^\d{4}-\d{2}-\d{2}(T[\w:.+-]+)?$/;
|
|
65
|
+
var DURATION_REGEX = /^(\d+)(d|w|m|y)$/i;
|
|
66
|
+
var DEFAULT_BATCH_SIZE = 1e3;
|
|
67
|
+
var MAX_BATCHES = 1e5;
|
|
68
|
+
function parseSinceValue(value) {
|
|
69
|
+
if (ISO_DATE_REGEX.test(value)) {
|
|
70
|
+
const date = new Date(value);
|
|
71
|
+
if (Number.isNaN(date.getTime())) {
|
|
72
|
+
throw new Error(`Invalid date "${value}". Expected ISO-8601 format (e.g. "2025-01-01").`);
|
|
73
|
+
}
|
|
74
|
+
return date;
|
|
75
|
+
}
|
|
76
|
+
const match = DURATION_REGEX.exec(value);
|
|
77
|
+
if (match !== null) {
|
|
78
|
+
const amount = parseInt(match[1], 10);
|
|
79
|
+
if (amount <= 0) {
|
|
80
|
+
throw new Error(
|
|
81
|
+
`Invalid --since value "${value}". Duration amount must be greater than zero.`
|
|
82
|
+
);
|
|
83
|
+
}
|
|
84
|
+
const unit = match[2].toLowerCase();
|
|
85
|
+
const now = /* @__PURE__ */ new Date();
|
|
86
|
+
if (unit === "d") {
|
|
87
|
+
now.setDate(now.getDate() - amount);
|
|
88
|
+
} else if (unit === "w") {
|
|
89
|
+
now.setDate(now.getDate() - amount * 7);
|
|
90
|
+
} else if (unit === "m") {
|
|
91
|
+
now.setMonth(now.getMonth() - amount);
|
|
92
|
+
} else {
|
|
93
|
+
now.setFullYear(now.getFullYear() - amount);
|
|
94
|
+
}
|
|
95
|
+
return now;
|
|
96
|
+
}
|
|
97
|
+
throw new Error(
|
|
98
|
+
`Invalid --since value "${value}". Expected an ISO date (e.g. "2025-01-01") or duration shorthand (e.g. "90d", "4w", "3m", "1y").`
|
|
99
|
+
);
|
|
100
|
+
}
|
|
101
|
+
async function resolveCutoffDate(options) {
|
|
102
|
+
if (options.since !== void 0) {
|
|
103
|
+
return parseSinceValue(options.since);
|
|
104
|
+
}
|
|
105
|
+
if (options.config !== void 0) {
|
|
106
|
+
const config = await loadConfig(options.config);
|
|
107
|
+
if (config?.retention !== void 0) {
|
|
108
|
+
const cutoff = /* @__PURE__ */ new Date();
|
|
109
|
+
cutoff.setDate(cutoff.getDate() - config.retention.days);
|
|
110
|
+
return cutoff;
|
|
111
|
+
}
|
|
112
|
+
}
|
|
113
|
+
throw new Error(
|
|
114
|
+
"No retention policy configured. Pass --since <date|duration> or set retention.days in your config file."
|
|
115
|
+
);
|
|
116
|
+
}
|
|
117
|
+
async function countEligibleRows(db, before) {
|
|
118
|
+
const result = await db.selectFrom("audit_logs").select((eb) => eb.fn.countAll().as("count")).where("timestamp", "<", before).executeTakeFirst();
|
|
119
|
+
return result !== void 0 ? Number(result.count) : 0;
|
|
120
|
+
}
|
|
121
|
+
async function deleteBatch(db, before, batchSize) {
|
|
122
|
+
const subquery = db.selectFrom("audit_logs").select("id").where("timestamp", "<", before).limit(batchSize);
|
|
123
|
+
const result = await db.deleteFrom("audit_logs").where("id", "in", subquery).executeTakeFirst();
|
|
124
|
+
return Number(result.numDeletedRows);
|
|
125
|
+
}
|
|
126
|
+
function formatCount(n) {
|
|
127
|
+
return n.toLocaleString("en-US");
|
|
128
|
+
}
|
|
129
|
+
function formatDuration(ms) {
|
|
130
|
+
if (ms < 1e3) {
|
|
131
|
+
return `${ms}ms`;
|
|
132
|
+
}
|
|
133
|
+
return `${(ms / 1e3).toFixed(1)}s`;
|
|
134
|
+
}
|
|
135
|
+
async function purge(options = {}) {
|
|
136
|
+
const databaseUrl = options.databaseUrl ?? process.env["DATABASE_URL"];
|
|
137
|
+
if (databaseUrl === void 0 || databaseUrl === "") {
|
|
138
|
+
throw new Error(
|
|
139
|
+
"DATABASE_URL is required. Set the DATABASE_URL environment variable or pass --database-url."
|
|
140
|
+
);
|
|
141
|
+
}
|
|
142
|
+
const resolvOpts = {};
|
|
143
|
+
if (options.since !== void 0) {
|
|
144
|
+
resolvOpts.since = options.since;
|
|
145
|
+
} else if (options.config !== void 0) {
|
|
146
|
+
resolvOpts.config = options.config;
|
|
147
|
+
}
|
|
148
|
+
const before = await resolveCutoffDate(resolvOpts);
|
|
149
|
+
if (before > /* @__PURE__ */ new Date()) {
|
|
150
|
+
throw new Error(
|
|
151
|
+
`Cutoff date ${before.toISOString().split("T")[0]} is in the future. Purge only accepts past dates.`
|
|
152
|
+
);
|
|
153
|
+
}
|
|
154
|
+
const batchSize = options.batchSize ?? DEFAULT_BATCH_SIZE;
|
|
155
|
+
if (!Number.isInteger(batchSize) || batchSize <= 0) {
|
|
156
|
+
throw new Error(`--batch-size must be a positive integer, got ${String(batchSize)}`);
|
|
157
|
+
}
|
|
158
|
+
const dialect = detectDialect(databaseUrl);
|
|
159
|
+
const db = await createKyselyInstance(databaseUrl, dialect);
|
|
160
|
+
const startTime = Date.now();
|
|
161
|
+
try {
|
|
162
|
+
if (options.dryRun) {
|
|
163
|
+
const count = await countEligibleRows(db, before);
|
|
164
|
+
process.stderr.write(
|
|
165
|
+
`${pc.cyan("\u2192")} ${pc.bold(formatCount(count))} rows would be deleted (cutoff: ${pc.dim(before.toISOString().split("T")[0])})
|
|
166
|
+
`
|
|
167
|
+
);
|
|
168
|
+
process.stderr.write(`${pc.dim(" Dry run \u2014 no changes made.")}
|
|
169
|
+
`);
|
|
170
|
+
return;
|
|
171
|
+
}
|
|
172
|
+
if (!options.yes) {
|
|
173
|
+
const count = await countEligibleRows(db, before);
|
|
174
|
+
process.stderr.write(
|
|
175
|
+
`${pc.yellow("!")} ${pc.bold(formatCount(count))} rows will be deleted (cutoff: ${pc.dim(before.toISOString().split("T")[0])})
|
|
176
|
+
Run with ${pc.bold("--yes")} to confirm deletion, or ${pc.bold("--dry-run")} to preview.
|
|
177
|
+
`
|
|
178
|
+
);
|
|
179
|
+
return;
|
|
180
|
+
}
|
|
181
|
+
let totalDeleted = 0;
|
|
182
|
+
let batchNumber = 0;
|
|
183
|
+
while (batchNumber < MAX_BATCHES) {
|
|
184
|
+
const deleted = await deleteBatch(db, before, batchSize);
|
|
185
|
+
totalDeleted += deleted;
|
|
186
|
+
batchNumber++;
|
|
187
|
+
if (batchNumber % 10 === 0) {
|
|
188
|
+
process.stderr.write(
|
|
189
|
+
` ${pc.dim(`batch ${batchNumber}: ${formatCount(totalDeleted)} rows deleted so far...`)}
|
|
190
|
+
`
|
|
191
|
+
);
|
|
192
|
+
}
|
|
193
|
+
if (deleted < batchSize) {
|
|
194
|
+
break;
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
if (batchNumber >= MAX_BATCHES) {
|
|
198
|
+
process.stderr.write(
|
|
199
|
+
pc.yellow(
|
|
200
|
+
` Warning: reached max batch limit (${MAX_BATCHES.toLocaleString("en-US")}). Some rows may remain.
|
|
201
|
+
`
|
|
202
|
+
)
|
|
203
|
+
);
|
|
204
|
+
}
|
|
205
|
+
const elapsed = Date.now() - startTime;
|
|
206
|
+
process.stderr.write(`${pc.green("\u2713")} Purge complete
|
|
207
|
+
`);
|
|
208
|
+
process.stderr.write(` Rows deleted: ${pc.bold(formatCount(totalDeleted))}
|
|
209
|
+
`);
|
|
210
|
+
process.stderr.write(` Cutoff date: ${pc.dim(before.toISOString().split("T")[0])}
|
|
211
|
+
`);
|
|
212
|
+
process.stderr.write(` Time taken: ${pc.dim(formatDuration(elapsed))}
|
|
213
|
+
`);
|
|
214
|
+
} finally {
|
|
215
|
+
await db.destroy();
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
export {
|
|
220
|
+
parseSinceValue,
|
|
221
|
+
resolveCutoffDate,
|
|
222
|
+
formatDuration,
|
|
223
|
+
purge
|
|
224
|
+
};
|
|
225
|
+
//# sourceMappingURL=chunk-SJSGTCG4.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/purge.ts","../src/config-loader.ts"],"sourcesContent":["/**\n * `better-audit purge` — Delete audit logs older than the configured retention period.\n *\n * Connects to the database directly via Kysely (no ORM adapter needed).\n * Executes batched DELETEs to avoid holding long row-level locks on large tables.\n *\n * Always run with --dry-run first to preview what will be deleted.\n */\n\nimport pc from \"picocolors\";\nimport { createKyselyInstance } from \"./sql-executor.js\";\nimport { detectDialect } from \"./detect-adapter.js\";\nimport { loadConfig } from \"./config-loader.js\";\nimport type { Database } from \"./sql-executor.js\";\nimport type { Kysely } from \"kysely\";\n\nconst ISO_DATE_REGEX = /^\\d{4}-\\d{2}-\\d{2}(T[\\w:.+-]+)?$/;\nconst DURATION_REGEX = /^(\\d+)(d|w|m|y)$/i;\nconst DEFAULT_BATCH_SIZE = 1000;\nconst MAX_BATCHES = 100_000;\n\nexport interface PurgeOptions {\n config?: string;\n /** Preview rows to be deleted without deleting them. */\n dryRun?: boolean;\n /** ISO date string (e.g. \"2025-01-01\") or duration shorthand (e.g. \"90d\", \"1y\"). */\n since?: string;\n /** Rows per DELETE batch. Default: 1000. */\n batchSize?: number;\n /** Database URL (default: DATABASE_URL env). */\n databaseUrl?: string;\n /** Skip confirmation prompt (required for live deletion). */\n yes?: boolean;\n}\n\n/**\n * Parse a `--since` value to an absolute cutoff `Date`.\n *\n * Accepts:\n * - ISO-8601 date strings: \"2025-01-01\" or \"2025-01-01T00:00:00Z\"\n * - Duration shorthands: \"90d\", \"4w\", \"3m\", \"1y\"\n *\n * Exported for testing.\n */\nexport function parseSinceValue(value: string): Date {\n if (ISO_DATE_REGEX.test(value)) {\n const date = new Date(value);\n if (Number.isNaN(date.getTime())) {\n throw new Error(`Invalid date \"${value}\". Expected ISO-8601 format (e.g. \"2025-01-01\").`);\n }\n return date;\n }\n\n const match = DURATION_REGEX.exec(value);\n if (match !== null) {\n const amount = parseInt(match[1]!, 10);\n if (amount <= 0) {\n throw new Error(\n `Invalid --since value \"${value}\". Duration amount must be greater than zero.`,\n );\n }\n const unit = match[2]!.toLowerCase();\n const now = new Date();\n if (unit === \"d\") {\n now.setDate(now.getDate() - amount);\n } else if (unit === \"w\") {\n now.setDate(now.getDate() - amount * 7);\n } else if (unit === \"m\") {\n now.setMonth(now.getMonth() - amount);\n } else {\n // y\n now.setFullYear(now.getFullYear() - amount);\n }\n return now;\n }\n\n throw new Error(\n `Invalid --since value \"${value}\". ` +\n `Expected an ISO date (e.g. \"2025-01-01\") or duration shorthand (e.g. \"90d\", \"4w\", \"3m\", \"1y\").`,\n );\n}\n\n/**\n * Resolve the cutoff date from options.\n *\n * Priority: `--since` flag > config `retention.days`.\n * Throws if neither is available.\n *\n * Exported for testing.\n */\nexport async function resolveCutoffDate(options: {\n since?: string;\n config?: string;\n}): Promise<Date> {\n if (options.since !== undefined) {\n return parseSinceValue(options.since);\n }\n\n if (options.config !== undefined) {\n const config = await loadConfig(options.config);\n if (config?.retention !== undefined) {\n const cutoff = new Date();\n cutoff.setDate(cutoff.getDate() - config.retention.days);\n return cutoff;\n }\n }\n\n throw new Error(\n \"No retention policy configured. \" +\n \"Pass --since <date|duration> or set retention.days in your config file.\",\n );\n}\n\n/**\n * Count rows eligible for deletion (for --dry-run).\n */\nasync function countEligibleRows(\n db: Kysely<Database>,\n before: Date,\n): Promise<number> {\n const result = await db\n .selectFrom(\"audit_logs\")\n .select((eb) => eb.fn.countAll<string>().as(\"count\"))\n .where(\"timestamp\", \"<\", before)\n .executeTakeFirst();\n return result !== undefined ? Number(result.count) : 0;\n}\n\n/**\n * Delete one batch of rows older than `before`.\n *\n * Uses `DELETE … WHERE id IN (SELECT id … LIMIT n)` on all dialects.\n * This avoids the lack of LIMIT support on DELETE in Postgres and SQLite.\n *\n * Returns the number of rows deleted in this batch.\n */\nasync function deleteBatch(\n db: Kysely<Database>,\n before: Date,\n batchSize: number,\n): Promise<number> {\n const subquery = db\n .selectFrom(\"audit_logs\")\n .select(\"id\")\n .where(\"timestamp\", \"<\", before)\n .limit(batchSize);\n\n const result = await db\n .deleteFrom(\"audit_logs\")\n .where(\"id\", \"in\", subquery)\n .executeTakeFirst();\n\n return Number(result.numDeletedRows);\n}\n\n/**\n * Format a number with locale-aware thousands separators.\n */\nfunction formatCount(n: number): string {\n return n.toLocaleString(\"en-US\");\n}\n\n/**\n * Format elapsed milliseconds to a human-readable string.\n *\n * Exported for testing.\n */\nexport function formatDuration(ms: number): string {\n if (ms < 1000) {\n return `${ms}ms`;\n }\n return `${(ms / 1000).toFixed(1)}s`;\n}\n\nexport async function purge(options: PurgeOptions = {}): Promise<void> {\n // 1. Resolve database URL\n const databaseUrl = options.databaseUrl ?? process.env[\"DATABASE_URL\"];\n if (databaseUrl === undefined || databaseUrl === \"\") {\n throw new Error(\n \"DATABASE_URL is required. Set the DATABASE_URL environment variable or pass --database-url.\",\n );\n }\n\n // 2. Resolve the cutoff date\n // Only consult config when --since is absent (short-circuit when --since is set)\n const resolvOpts: { since?: string; config?: string } = {};\n if (options.since !== undefined) {\n resolvOpts.since = options.since;\n } else if (options.config !== undefined) {\n resolvOpts.config = options.config;\n }\n const before = await resolveCutoffDate(resolvOpts);\n\n // 3. Validate cutoff is in the past\n if (before > new Date()) {\n throw new Error(\n `Cutoff date ${before.toISOString().split(\"T\")[0]!} is in the future. ` +\n `Purge only accepts past dates.`,\n );\n }\n\n const batchSize = options.batchSize ?? DEFAULT_BATCH_SIZE;\n if (!Number.isInteger(batchSize) || batchSize <= 0) {\n throw new Error(`--batch-size must be a positive integer, got ${String(batchSize)}`);\n }\n\n // 4. Connect\n const dialect = detectDialect(databaseUrl);\n const db = await createKyselyInstance(databaseUrl, dialect);\n\n const startTime = Date.now();\n\n try {\n // 4. Dry run — count only, no confirmation needed\n if (options.dryRun) {\n const count = await countEligibleRows(db, before);\n process.stderr.write(\n `${pc.cyan(\"→\")} ${pc.bold(formatCount(count))} rows would be deleted` +\n ` (cutoff: ${pc.dim(before.toISOString().split(\"T\")[0]!)})\\n`,\n );\n process.stderr.write(`${pc.dim(\" Dry run — no changes made.\")}\\n`);\n return;\n }\n\n // 5. Confirmation guard — require --yes for live deletion\n if (!options.yes) {\n const count = await countEligibleRows(db, before);\n process.stderr.write(\n `${pc.yellow(\"!\")} ${pc.bold(formatCount(count))} rows will be deleted` +\n ` (cutoff: ${pc.dim(before.toISOString().split(\"T\")[0]!)})\\n` +\n ` Run with ${pc.bold(\"--yes\")} to confirm deletion, or ${pc.bold(\"--dry-run\")} to preview.\\n`,\n );\n return;\n }\n\n // 6. Batched delete loop\n let totalDeleted = 0;\n let batchNumber = 0;\n\n while (batchNumber < MAX_BATCHES) {\n const deleted = await deleteBatch(db, before, batchSize);\n totalDeleted += deleted;\n batchNumber++;\n\n if (batchNumber % 10 === 0) {\n process.stderr.write(\n ` ${pc.dim(`batch ${batchNumber}: ${formatCount(totalDeleted)} rows deleted so far...`)}\\n`,\n );\n }\n\n if (deleted < batchSize) {\n break;\n }\n }\n\n if (batchNumber >= MAX_BATCHES) {\n process.stderr.write(\n pc.yellow(\n ` Warning: reached max batch limit (${MAX_BATCHES.toLocaleString(\"en-US\")}). Some rows may remain.\\n`,\n ),\n );\n }\n\n const elapsed = Date.now() - startTime;\n\n // 7. Summary\n process.stderr.write(`${pc.green(\"✓\")} Purge complete\\n`);\n process.stderr.write(` Rows deleted: ${pc.bold(formatCount(totalDeleted))}\\n`);\n process.stderr.write(` Cutoff date: ${pc.dim(before.toISOString().split(\"T\")[0]!)}\\n`);\n process.stderr.write(` Time taken: ${pc.dim(formatDuration(elapsed))}\\n`);\n } finally {\n await db.destroy();\n }\n}\n","/**\n * Loader for `BetterAuditConfig` from a user-supplied config file.\n *\n * Dynamically imports the config file and validates the `retention` policy.\n * Returns `null` when the file does not exist — callers may fall back to CLI flags.\n *\n * TypeScript config files (`.ts`) require the Node process to be started with\n * `--import tsx` or an equivalent TypeScript loader. Compiled JS config files\n * (`.js`, `.mjs`, `.cjs`) work without any loader.\n */\n\nimport { stat } from \"node:fs/promises\";\nimport { resolve } from \"node:path\";\nimport type { BetterAuditConfig } from \"@usebetterdev/audit-core\";\n\n/**\n * Dynamic module import helper — uses a variable to prevent bundlers from\n * resolving the path at compile time.\n */\nfunction importFile(path: string): Promise<{ default?: unknown }> {\n return import(/* webpackIgnore: true */ path) as Promise<{ default?: unknown }>;\n}\n\nfunction validateRetention(retention: unknown): void {\n if (typeof retention !== \"object\" || retention === null) {\n throw new Error(\"retention must be an object with a 'days' property\");\n }\n const r = retention as Record<string, unknown>;\n if (!Number.isInteger(r[\"days\"]) || !Number.isFinite(r[\"days\"] as number) || (r[\"days\"] as number) <= 0) {\n throw new Error(`retention.days must be a positive integer, got ${String(r[\"days\"])}`);\n }\n if (r[\"tables\"] !== undefined) {\n if (\n !Array.isArray(r[\"tables\"]) ||\n r[\"tables\"].length === 0 ||\n r[\"tables\"].some((t: unknown) => typeof t !== \"string\" || t === \"\")\n ) {\n throw new Error(\"retention.tables must be a non-empty array of non-empty strings\");\n }\n }\n}\n\n/**\n * Loads a `BetterAuditConfig` from the given path.\n *\n * Returns `null` if the file does not exist.\n * Throws if the file exists but cannot be loaded or has an invalid shape.\n */\nexport async function loadConfig(configPath: string): Promise<BetterAuditConfig | null> {\n const resolved = resolve(process.cwd(), configPath);\n\n let mod: { default?: unknown };\n try {\n mod = await importFile(resolved);\n } catch (err) {\n // Categorise the error: did the file simply not exist?\n // We stat after the failed import to avoid TOCTOU — the stat result only\n // classifies an error that already occurred; it does not gate the import.\n const exists = await stat(resolved).then(() => true, () => false);\n if (!exists) {\n // File does not exist — caller may fall back to CLI flags\n return null;\n }\n const message = err instanceof Error ? err.message : String(err);\n throw new Error(\n `Failed to load config file \"${configPath}\": ${message}\\n` +\n `Hint: TypeScript config files require a TypeScript loader. ` +\n `Run with \"tsx\" or \"ts-node\", or compile your config first.`,\n );\n }\n\n const config = mod.default;\n\n if (typeof config !== \"object\" || config === null) {\n throw new Error(\n `Config file \"${configPath}\" must export a BetterAuditConfig as the default export.`,\n );\n }\n\n const configObj = config as Record<string, unknown>;\n if (configObj[\"retention\"] !== undefined) {\n validateRetention(configObj[\"retention\"]);\n }\n\n return config as BetterAuditConfig;\n}\n"],"mappings":";;;;;;;;AASA,OAAO,QAAQ;;;ACEf,SAAS,YAAY;AACrB,SAAS,eAAe;AAOxB,SAAS,WAAW,MAA8C;AAChE,SAAO;AAAA;AAAA,IAAiC;AAAA;AAC1C;AAEA,SAAS,kBAAkB,WAA0B;AACnD,MAAI,OAAO,cAAc,YAAY,cAAc,MAAM;AACvD,UAAM,IAAI,MAAM,oDAAoD;AAAA,EACtE;AACA,QAAM,IAAI;AACV,MAAI,CAAC,OAAO,UAAU,EAAE,MAAM,CAAC,KAAK,CAAC,OAAO,SAAS,EAAE,MAAM,CAAW,KAAM,EAAE,MAAM,KAAgB,GAAG;AACvG,UAAM,IAAI,MAAM,kDAAkD,OAAO,EAAE,MAAM,CAAC,CAAC,EAAE;AAAA,EACvF;AACA,MAAI,EAAE,QAAQ,MAAM,QAAW;AAC7B,QACE,CAAC,MAAM,QAAQ,EAAE,QAAQ,CAAC,KAC1B,EAAE,QAAQ,EAAE,WAAW,KACvB,EAAE,QAAQ,EAAE,KAAK,CAAC,MAAe,OAAO,MAAM,YAAY,MAAM,EAAE,GAClE;AACA,YAAM,IAAI,MAAM,iEAAiE;AAAA,IACnF;AAAA,EACF;AACF;AAQA,eAAsB,WAAW,YAAuD;AACtF,QAAM,WAAW,QAAQ,QAAQ,IAAI,GAAG,UAAU;AAElD,MAAI;AACJ,MAAI;AACF,UAAM,MAAM,WAAW,QAAQ;AAAA,EACjC,SAAS,KAAK;AAIZ,UAAM,SAAS,MAAM,KAAK,QAAQ,EAAE,KAAK,MAAM,MAAM,MAAM,KAAK;AAChE,QAAI,CAAC,QAAQ;AAEX,aAAO;AAAA,IACT;AACA,UAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG;AAC/D,UAAM,IAAI;AAAA,MACR,+BAA+B,UAAU,MAAM,OAAO;AAAA;AAAA,IAGxD;AAAA,EACF;AAEA,QAAM,SAAS,IAAI;AAEnB,MAAI,OAAO,WAAW,YAAY,WAAW,MAAM;AACjD,UAAM,IAAI;AAAA,MACR,gBAAgB,UAAU;AAAA,IAC5B;AAAA,EACF;AAEA,QAAM,YAAY;AAClB,MAAI,UAAU,WAAW,MAAM,QAAW;AACxC,sBAAkB,UAAU,WAAW,CAAC;AAAA,EAC1C;AAEA,SAAO;AACT;;;ADrEA,IAAM,iBAAiB;AACvB,IAAM,iBAAiB;AACvB,IAAM,qBAAqB;AAC3B,IAAM,cAAc;AAyBb,SAAS,gBAAgB,OAAqB;AACnD,MAAI,eAAe,KAAK,KAAK,GAAG;AAC9B,UAAM,OAAO,IAAI,KAAK,KAAK;AAC3B,QAAI,OAAO,MAAM,KAAK,QAAQ,CAAC,GAAG;AAChC,YAAM,IAAI,MAAM,iBAAiB,KAAK,kDAAkD;AAAA,IAC1F;AACA,WAAO;AAAA,EACT;AAEA,QAAM,QAAQ,eAAe,KAAK,KAAK;AACvC,MAAI,UAAU,MAAM;AAClB,UAAM,SAAS,SAAS,MAAM,CAAC,GAAI,EAAE;AACrC,QAAI,UAAU,GAAG;AACf,YAAM,IAAI;AAAA,QACR,0BAA0B,KAAK;AAAA,MACjC;AAAA,IACF;AACA,UAAM,OAAO,MAAM,CAAC,EAAG,YAAY;AACnC,UAAM,MAAM,oBAAI,KAAK;AACrB,QAAI,SAAS,KAAK;AAChB,UAAI,QAAQ,IAAI,QAAQ,IAAI,MAAM;AAAA,IACpC,WAAW,SAAS,KAAK;AACvB,UAAI,QAAQ,IAAI,QAAQ,IAAI,SAAS,CAAC;AAAA,IACxC,WAAW,SAAS,KAAK;AACvB,UAAI,SAAS,IAAI,SAAS,IAAI,MAAM;AAAA,IACtC,OAAO;AAEL,UAAI,YAAY,IAAI,YAAY,IAAI,MAAM;AAAA,IAC5C;AACA,WAAO;AAAA,EACT;AAEA,QAAM,IAAI;AAAA,IACR,0BAA0B,KAAK;AAAA,EAEjC;AACF;AAUA,eAAsB,kBAAkB,SAGtB;AAChB,MAAI,QAAQ,UAAU,QAAW;AAC/B,WAAO,gBAAgB,QAAQ,KAAK;AAAA,EACtC;AAEA,MAAI,QAAQ,WAAW,QAAW;AAChC,UAAM,SAAS,MAAM,WAAW,QAAQ,MAAM;AAC9C,QAAI,QAAQ,cAAc,QAAW;AACnC,YAAM,SAAS,oBAAI,KAAK;AACxB,aAAO,QAAQ,OAAO,QAAQ,IAAI,OAAO,UAAU,IAAI;AACvD,aAAO;AAAA,IACT;AAAA,EACF;AAEA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AAKA,eAAe,kBACb,IACA,QACiB;AACjB,QAAM,SAAS,MAAM,GAClB,WAAW,YAAY,EACvB,OAAO,CAAC,OAAO,GAAG,GAAG,SAAiB,EAAE,GAAG,OAAO,CAAC,EACnD,MAAM,aAAa,KAAK,MAAM,EAC9B,iBAAiB;AACpB,SAAO,WAAW,SAAY,OAAO,OAAO,KAAK,IAAI;AACvD;AAUA,eAAe,YACb,IACA,QACA,WACiB;AACjB,QAAM,WAAW,GACd,WAAW,YAAY,EACvB,OAAO,IAAI,EACX,MAAM,aAAa,KAAK,MAAM,EAC9B,MAAM,SAAS;AAElB,QAAM,SAAS,MAAM,GAClB,WAAW,YAAY,EACvB,MAAM,MAAM,MAAM,QAAQ,EAC1B,iBAAiB;AAEpB,SAAO,OAAO,OAAO,cAAc;AACrC;AAKA,SAAS,YAAY,GAAmB;AACtC,SAAO,EAAE,eAAe,OAAO;AACjC;AAOO,SAAS,eAAe,IAAoB;AACjD,MAAI,KAAK,KAAM;AACb,WAAO,GAAG,EAAE;AAAA,EACd;AACA,SAAO,IAAI,KAAK,KAAM,QAAQ,CAAC,CAAC;AAClC;AAEA,eAAsB,MAAM,UAAwB,CAAC,GAAkB;AAErE,QAAM,cAAc,QAAQ,eAAe,QAAQ,IAAI,cAAc;AACrE,MAAI,gBAAgB,UAAa,gBAAgB,IAAI;AACnD,UAAM,IAAI;AAAA,MACR;AAAA,IACF;AAAA,EACF;AAIA,QAAM,aAAkD,CAAC;AACzD,MAAI,QAAQ,UAAU,QAAW;AAC/B,eAAW,QAAQ,QAAQ;AAAA,EAC7B,WAAW,QAAQ,WAAW,QAAW;AACvC,eAAW,SAAS,QAAQ;AAAA,EAC9B;AACA,QAAM,SAAS,MAAM,kBAAkB,UAAU;AAGjD,MAAI,SAAS,oBAAI,KAAK,GAAG;AACvB,UAAM,IAAI;AAAA,MACR,eAAe,OAAO,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAE;AAAA,IAEpD;AAAA,EACF;AAEA,QAAM,YAAY,QAAQ,aAAa;AACvC,MAAI,CAAC,OAAO,UAAU,SAAS,KAAK,aAAa,GAAG;AAClD,UAAM,IAAI,MAAM,gDAAgD,OAAO,SAAS,CAAC,EAAE;AAAA,EACrF;AAGA,QAAM,UAAU,cAAc,WAAW;AACzC,QAAM,KAAK,MAAM,qBAAqB,aAAa,OAAO;AAE1D,QAAM,YAAY,KAAK,IAAI;AAE3B,MAAI;AAEF,QAAI,QAAQ,QAAQ;AAClB,YAAM,QAAQ,MAAM,kBAAkB,IAAI,MAAM;AAChD,cAAQ,OAAO;AAAA,QACb,GAAG,GAAG,KAAK,QAAG,CAAC,IAAI,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,mCACjC,GAAG,IAAI,OAAO,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAE,CAAC;AAAA;AAAA,MAC1D;AACA,cAAQ,OAAO,MAAM,GAAG,GAAG,IAAI,mCAA8B,CAAC;AAAA,CAAI;AAClE;AAAA,IACF;AAGA,QAAI,CAAC,QAAQ,KAAK;AAChB,YAAM,QAAQ,MAAM,kBAAkB,IAAI,MAAM;AAChD,cAAQ,OAAO;AAAA,QACb,GAAG,GAAG,OAAO,GAAG,CAAC,IAAI,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,kCACnC,GAAG,IAAI,OAAO,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAE,CAAC;AAAA,aAC1C,GAAG,KAAK,OAAO,CAAC,4BAA4B,GAAG,KAAK,WAAW,CAAC;AAAA;AAAA,MAChF;AACA;AAAA,IACF;AAGA,QAAI,eAAe;AACnB,QAAI,cAAc;AAElB,WAAO,cAAc,aAAa;AAChC,YAAM,UAAU,MAAM,YAAY,IAAI,QAAQ,SAAS;AACvD,sBAAgB;AAChB;AAEA,UAAI,cAAc,OAAO,GAAG;AAC1B,gBAAQ,OAAO;AAAA,UACb,KAAK,GAAG,IAAI,SAAS,WAAW,KAAK,YAAY,YAAY,CAAC,yBAAyB,CAAC;AAAA;AAAA,QAC1F;AAAA,MACF;AAEA,UAAI,UAAU,WAAW;AACvB;AAAA,MACF;AAAA,IACF;AAEA,QAAI,eAAe,aAAa;AAC9B,cAAQ,OAAO;AAAA,QACb,GAAG;AAAA,UACD,uCAAuC,YAAY,eAAe,OAAO,CAAC;AAAA;AAAA,QAC5E;AAAA,MACF;AAAA,IACF;AAEA,UAAM,UAAU,KAAK,IAAI,IAAI;AAG7B,YAAQ,OAAO,MAAM,GAAG,GAAG,MAAM,QAAG,CAAC;AAAA,CAAmB;AACxD,YAAQ,OAAO,MAAM,oBAAoB,GAAG,KAAK,YAAY,YAAY,CAAC,CAAC;AAAA,CAAI;AAC/E,YAAQ,OAAO,MAAM,oBAAoB,GAAG,IAAI,OAAO,YAAY,EAAE,MAAM,GAAG,EAAE,CAAC,CAAE,CAAC;AAAA,CAAI;AACxF,YAAQ,OAAO,MAAM,oBAAoB,GAAG,IAAI,eAAe,OAAO,CAAC,CAAC;AAAA,CAAI;AAAA,EAC9E,UAAE;AACA,UAAM,GAAG,QAAQ;AAAA,EACnB;AACF;","names":[]}
|