@noy-db/as-sql 0.1.0-pre.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +33 -0
- package/dist/index.cjs +203 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +49 -0
- package/dist/index.d.ts +49 -0
- package/dist/index.js +166 -0
- package/dist/index.js.map +1 -0
- package/package.json +68 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 vLannaAi
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
# @noy-db/as-sql
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/@noy-db/as-sql)
|
|
4
|
+
|
|
5
|
+
> SQL dump export for noy-db
|
|
6
|
+
|
|
7
|
+
Part of [**`@noy-db/hub`**](https://www.npmjs.com/package/@noy-db/hub) — the zero-knowledge, offline-first, encrypted document store.
|
|
8
|
+
|
|
9
|
+
## Install
|
|
10
|
+
|
|
11
|
+
```bash
|
|
12
|
+
pnpm add @noy-db/hub @noy-db/as-sql
|
|
13
|
+
```
|
|
14
|
+
|
|
15
|
+
## What it is
|
|
16
|
+
|
|
17
|
+
SQL dump export for noy-db — decrypts records and emits dialect-aware CREATE TABLE + INSERT statements for postgres / mysql / sqlite. One-way migration helper. Gated by RFC #249 canExportPlaintext.
|
|
18
|
+
|
|
19
|
+
## Status
|
|
20
|
+
|
|
21
|
+
**Pre-release** (`0.1.0-pre.1`). API may change before `1.0`.
|
|
22
|
+
|
|
23
|
+
## Documentation
|
|
24
|
+
|
|
25
|
+
See the [main repository](https://github.com/vLannaAi/noy-db#readme) for setup, examples, and the full subsystem catalog.
|
|
26
|
+
|
|
27
|
+
- Source — [`packages/as-sql`](https://github.com/vLannaAi/noy-db/tree/main/packages/as-sql)
|
|
28
|
+
- Issues — [github.com/vLannaAi/noy-db/issues](https://github.com/vLannaAi/noy-db/issues)
|
|
29
|
+
- Spec — [`SPEC.md`](https://github.com/vLannaAi/noy-db/blob/main/SPEC.md)
|
|
30
|
+
|
|
31
|
+
## License
|
|
32
|
+
|
|
33
|
+
[MIT](./LICENSE) © vLannaAi
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __export = (target, all) => {
|
|
9
|
+
for (var name in all)
|
|
10
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
29
|
+
|
|
30
|
+
// src/index.ts
|
|
31
|
+
var index_exports = {};
|
|
32
|
+
__export(index_exports, {
|
|
33
|
+
download: () => download,
|
|
34
|
+
toString: () => toString,
|
|
35
|
+
write: () => write
|
|
36
|
+
});
|
|
37
|
+
module.exports = __toCommonJS(index_exports);
|
|
38
|
+
async function toString(vault, options = {}) {
|
|
39
|
+
vault.assertCanExport("plaintext", "sql");
|
|
40
|
+
const dialect = options.dialect ?? "postgres";
|
|
41
|
+
const mode = options.mode ?? "schema+data";
|
|
42
|
+
const tableName = options.tableNames ?? ((c) => c);
|
|
43
|
+
const includeAll = !options.include || options.include.length === 0;
|
|
44
|
+
const allowlist = options.include ? new Set(options.include) : null;
|
|
45
|
+
const buckets = /* @__PURE__ */ new Map();
|
|
46
|
+
for await (const chunk of vault.exportStream({ granularity: "collection" })) {
|
|
47
|
+
if (!includeAll && allowlist && !allowlist.has(chunk.collection)) continue;
|
|
48
|
+
const bucket = buckets.get(chunk.collection) ?? [];
|
|
49
|
+
for (const r of chunk.records) bucket.push(stripMeta(r));
|
|
50
|
+
buckets.set(chunk.collection, bucket);
|
|
51
|
+
}
|
|
52
|
+
const parts = [];
|
|
53
|
+
parts.push(`-- Generated by @noy-db/as-sql \xB7 dialect: ${dialect} \xB7 mode: ${mode}`);
|
|
54
|
+
parts.push(`-- Bundle snapshot \u2014 NOT a live connection. Load with: ${loadCommand(dialect)}`);
|
|
55
|
+
parts.push("");
|
|
56
|
+
for (const [collection, records] of buckets) {
|
|
57
|
+
const table = tableName(collection);
|
|
58
|
+
const schema = inferSchema(records, options.metadataColumns === true);
|
|
59
|
+
if (mode !== "data-only") {
|
|
60
|
+
parts.push(createTable(dialect, table, schema));
|
|
61
|
+
parts.push("");
|
|
62
|
+
}
|
|
63
|
+
if (mode !== "schema-only") {
|
|
64
|
+
for (const rec of records) {
|
|
65
|
+
parts.push(insertRow(dialect, table, schema, rec));
|
|
66
|
+
}
|
|
67
|
+
parts.push("");
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
return parts.join("\n");
|
|
71
|
+
}
|
|
72
|
+
async function download(vault, options = {}) {
|
|
73
|
+
const sql = await toString(vault, options);
|
|
74
|
+
const filename = options.filename ?? "vault-export.sql";
|
|
75
|
+
const blob = new Blob([sql], { type: "application/sql;charset=utf-8" });
|
|
76
|
+
const url = URL.createObjectURL(blob);
|
|
77
|
+
const a = document.createElement("a");
|
|
78
|
+
a.href = url;
|
|
79
|
+
a.download = filename;
|
|
80
|
+
a.click();
|
|
81
|
+
URL.revokeObjectURL(url);
|
|
82
|
+
}
|
|
83
|
+
async function write(vault, path, options) {
|
|
84
|
+
if (options.acknowledgeRisks !== true) {
|
|
85
|
+
throw new Error(
|
|
86
|
+
'as-sql.write: acknowledgeRisks: true is required for on-disk plaintext output. See docs/patterns/as-exports.md \xA7"The three tiers of \\"plaintext out\\""'
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
const sql = await toString(vault, options);
|
|
90
|
+
const { writeFile } = await import("fs/promises");
|
|
91
|
+
await writeFile(path, sql, "utf-8");
|
|
92
|
+
}
|
|
93
|
+
function inferSchema(records, includeMeta) {
|
|
94
|
+
const observed = /* @__PURE__ */ new Map();
|
|
95
|
+
const sample = records.slice(0, 100);
|
|
96
|
+
for (const rec of sample) {
|
|
97
|
+
for (const key of Object.keys(rec)) {
|
|
98
|
+
if (!observed.has(key)) observed.set(key, { types: /* @__PURE__ */ new Set(), nullable: false });
|
|
99
|
+
const slot = observed.get(key);
|
|
100
|
+
const value = rec[key];
|
|
101
|
+
if (value === null || value === void 0) {
|
|
102
|
+
slot.nullable = true;
|
|
103
|
+
continue;
|
|
104
|
+
}
|
|
105
|
+
slot.types.add(inferType(value));
|
|
106
|
+
}
|
|
107
|
+
for (const key of observed.keys()) {
|
|
108
|
+
if (!(key in rec)) observed.get(key).nullable = true;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
const columns = [];
|
|
112
|
+
for (const [name, info] of observed) {
|
|
113
|
+
if (!includeMeta && (name === "_v" || name === "_ts" || name === "_by")) continue;
|
|
114
|
+
const type = info.types.size === 1 ? [...info.types][0] : "text";
|
|
115
|
+
columns.push({ name, type, nullable: info.nullable });
|
|
116
|
+
}
|
|
117
|
+
if (includeMeta) {
|
|
118
|
+
columns.push({ name: "_noydb_version", type: "integer", nullable: true });
|
|
119
|
+
columns.push({ name: "_noydb_ts", type: "timestamp", nullable: true });
|
|
120
|
+
}
|
|
121
|
+
return columns;
|
|
122
|
+
}
|
|
123
|
+
function inferType(value) {
|
|
124
|
+
if (typeof value === "boolean") return "boolean";
|
|
125
|
+
if (typeof value === "number") return Number.isInteger(value) ? "integer" : "real";
|
|
126
|
+
if (value instanceof Date) return "timestamp";
|
|
127
|
+
if (typeof value === "string") {
|
|
128
|
+
if (/^\d{4}-\d{2}-\d{2}T/.test(value)) return "timestamp";
|
|
129
|
+
return "text";
|
|
130
|
+
}
|
|
131
|
+
return "jsonb";
|
|
132
|
+
}
|
|
133
|
+
function createTable(dialect, table, schema) {
|
|
134
|
+
const cols = schema.map((c) => ` ${quoteIdent(dialect, c.name)} ${mapType(dialect, c.type)}${c.nullable ? "" : " NOT NULL"}`);
|
|
135
|
+
return `CREATE TABLE ${quoteIdent(dialect, table)} (
|
|
136
|
+
${cols.join(",\n")}
|
|
137
|
+
);`;
|
|
138
|
+
}
|
|
139
|
+
function mapType(dialect, type) {
|
|
140
|
+
const map = {
|
|
141
|
+
postgres: { text: "TEXT", integer: "INTEGER", real: "REAL", boolean: "BOOLEAN", timestamp: "TIMESTAMPTZ", jsonb: "JSONB" },
|
|
142
|
+
mysql: { text: "TEXT", integer: "BIGINT", real: "DOUBLE", boolean: "TINYINT(1)", timestamp: "DATETIME", jsonb: "JSON" },
|
|
143
|
+
sqlite: { text: "TEXT", integer: "INTEGER", real: "REAL", boolean: "INTEGER", timestamp: "TEXT", jsonb: "TEXT" }
|
|
144
|
+
};
|
|
145
|
+
return map[dialect][type];
|
|
146
|
+
}
|
|
147
|
+
function insertRow(dialect, table, schema, record) {
|
|
148
|
+
const cols = schema.map((c) => quoteIdent(dialect, c.name)).join(", ");
|
|
149
|
+
const values = schema.map((c) => formatLiteral(dialect, c.type, record[c.name])).join(", ");
|
|
150
|
+
return `INSERT INTO ${quoteIdent(dialect, table)} (${cols}) VALUES (${values});`;
|
|
151
|
+
}
|
|
152
|
+
function formatLiteral(dialect, type, value) {
|
|
153
|
+
if (value === null || value === void 0) return "NULL";
|
|
154
|
+
if (type === "boolean") {
|
|
155
|
+
if (dialect === "mysql" || dialect === "sqlite") return value ? "1" : "0";
|
|
156
|
+
return value ? "TRUE" : "FALSE";
|
|
157
|
+
}
|
|
158
|
+
if (type === "integer" || type === "real") return stringifyScalar(value);
|
|
159
|
+
if (type === "timestamp") {
|
|
160
|
+
const s = value instanceof Date ? value.toISOString() : stringifyScalar(value);
|
|
161
|
+
return quoteString(dialect, s);
|
|
162
|
+
}
|
|
163
|
+
if (type === "jsonb") {
|
|
164
|
+
return quoteString(dialect, JSON.stringify(value));
|
|
165
|
+
}
|
|
166
|
+
return quoteString(dialect, stringifyScalar(value));
|
|
167
|
+
}
|
|
168
|
+
function stringifyScalar(value) {
|
|
169
|
+
if (typeof value === "string") return value;
|
|
170
|
+
if (typeof value === "number" || typeof value === "boolean" || typeof value === "bigint") {
|
|
171
|
+
return String(value);
|
|
172
|
+
}
|
|
173
|
+
if (value === null || value === void 0) return "";
|
|
174
|
+
return JSON.stringify(value);
|
|
175
|
+
}
|
|
176
|
+
function quoteString(dialect, s) {
|
|
177
|
+
const escaped = s.replace(/'/g, "''");
|
|
178
|
+
return `'${escaped}'`;
|
|
179
|
+
}
|
|
180
|
+
function quoteIdent(dialect, name) {
|
|
181
|
+
if (dialect === "mysql") return `\`${name.replace(/`/g, "``")}\``;
|
|
182
|
+
return `"${name.replace(/"/g, '""')}"`;
|
|
183
|
+
}
|
|
184
|
+
function loadCommand(dialect) {
|
|
185
|
+
if (dialect === "postgres") return "psql -f dump.sql";
|
|
186
|
+
if (dialect === "mysql") return "mysql -u <user> -p < dump.sql";
|
|
187
|
+
return "sqlite3 <database>.db < dump.sql";
|
|
188
|
+
}
|
|
189
|
+
function stripMeta(record) {
|
|
190
|
+
const out = {};
|
|
191
|
+
for (const [key, value] of Object.entries(record)) {
|
|
192
|
+
if (key === "_iv" || key === "_data" || key === "_noydb") continue;
|
|
193
|
+
out[key] = value;
|
|
194
|
+
}
|
|
195
|
+
return out;
|
|
196
|
+
}
|
|
197
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
198
|
+
0 && (module.exports = {
|
|
199
|
+
download,
|
|
200
|
+
toString,
|
|
201
|
+
write
|
|
202
|
+
});
|
|
203
|
+
//# sourceMappingURL=index.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["/**\n * **@noy-db/as-sql** — SQL dump export for migration.\n *\n * One-way, at-export-time string formatter that emits dialect-aware\n * `CREATE TABLE` + `INSERT INTO` statements. This is NOT a runtime\n * SQL query frontend — it's the migration bridge for consumers moving\n * noy-db data into Postgres, MySQL, or SQLite.\n *\n * Column types are inferred from value types in the first 100 records\n * per collection. Every record's row uses parameterless inline-literal\n * INSERT (no prepared statements) so the dump loads with a single\n * `psql -f`, `mysql <`, or `sqlite3 < dump.sql` invocation.\n *\n * **Zero dependencies** — hand-rolled string formatter, ~300 LoC.\n *\n * See `docs/patterns/as-exports.md` for the three-tier egress model.\n *\n * @packageDocumentation\n */\n\nimport type { Vault } from '@noy-db/hub'\n\nexport type SqlDialect = 'postgres' | 'mysql' | 'sqlite'\nexport type SqlMode = 'schema-only' | 'data-only' | 'schema+data'\n\nexport interface AsSQLOptions {\n /** Target dialect. Default `'postgres'`. */\n readonly dialect?: SqlDialect\n /** Collection allowlist. Omit for all. */\n readonly include?: readonly string[]\n /** Schema + data, schema only, or data only. Default `'schema+data'`. */\n readonly mode?: SqlMode\n /** Map collection name → table name. Default identity. */\n readonly tableNames?: (collection: string) => string\n /** Include `_noydb_version` / `_noydb_ts` metadata columns. Default `false`. */\n readonly metadataColumns?: boolean\n}\n\nexport interface AsSQLDownloadOptions extends AsSQLOptions {\n /** Filename offered to the browser. Default `'vault-export.sql'`. */\n readonly filename?: string\n}\n\nexport interface AsSQLWriteOptions extends AsSQLOptions {\n /** Required for Node file-write — Tier 3 risk gate. */\n readonly acknowledgeRisks: true\n}\n\nexport async function toString(vault: Vault, options: AsSQLOptions = {}): Promise<string> {\n vault.assertCanExport('plaintext', 'sql')\n\n const dialect = options.dialect ?? 'postgres'\n const mode = options.mode ?? 'schema+data'\n const tableName = options.tableNames ?? ((c: string) => c)\n const includeAll = !options.include || options.include.length === 0\n const allowlist = options.include ? new Set(options.include) : null\n\n // Bucket records by collection so we can emit schema+data atomically.\n const buckets = new Map<string, Record<string, unknown>[]>()\n for await (const chunk of vault.exportStream({ granularity: 'collection' })) {\n if (!includeAll && allowlist && !allowlist.has(chunk.collection)) continue\n const bucket = buckets.get(chunk.collection) ?? []\n for (const r of chunk.records) bucket.push(stripMeta(r as Record<string, unknown>))\n buckets.set(chunk.collection, bucket)\n }\n\n const parts: string[] = []\n parts.push(`-- Generated by @noy-db/as-sql · dialect: ${dialect} · mode: ${mode}`)\n parts.push(`-- Bundle snapshot — NOT a live connection. Load with: ${loadCommand(dialect)}`)\n parts.push('')\n\n for (const [collection, records] of buckets) {\n const table = tableName(collection)\n const schema = inferSchema(records, options.metadataColumns === true)\n if (mode !== 'data-only') {\n parts.push(createTable(dialect, table, schema))\n parts.push('')\n }\n if (mode !== 'schema-only') {\n for (const rec of records) {\n parts.push(insertRow(dialect, table, schema, rec))\n }\n parts.push('')\n }\n }\n\n return parts.join('\\n')\n}\n\nexport async function download(vault: Vault, options: AsSQLDownloadOptions = {}): Promise<void> {\n const sql = await toString(vault, options)\n const filename = options.filename ?? 'vault-export.sql'\n const blob = new Blob([sql], { type: 'application/sql;charset=utf-8' })\n const url = URL.createObjectURL(blob)\n const a = document.createElement('a')\n a.href = url\n a.download = filename\n a.click()\n URL.revokeObjectURL(url)\n}\n\nexport async function write(vault: Vault, path: string, options: AsSQLWriteOptions): Promise<void> {\n if (options.acknowledgeRisks !== true) {\n throw new Error(\n 'as-sql.write: acknowledgeRisks: true is required for on-disk plaintext output. ' +\n 'See docs/patterns/as-exports.md §\"The three tiers of \\\\\"plaintext out\\\\\"\"',\n )\n }\n const sql = await toString(vault, options)\n const { writeFile } = await import('node:fs/promises')\n await writeFile(path, sql, 'utf-8')\n}\n\n// ─── SQL formatting internals ───────────────────────────────────────────\n\ntype SqlType = 'text' | 'integer' | 'real' | 'boolean' | 'timestamp' | 'jsonb'\n\ninterface ColumnSchema {\n readonly name: string\n readonly type: SqlType\n readonly nullable: boolean\n}\n\nfunction inferSchema(records: readonly Record<string, unknown>[], includeMeta: boolean): ColumnSchema[] {\n const observed = new Map<string, { types: Set<SqlType>; nullable: boolean }>()\n const sample = records.slice(0, 100) // sample up to 100 rows for type inference\n for (const rec of sample) {\n for (const key of Object.keys(rec)) {\n if (!observed.has(key)) observed.set(key, { types: new Set(), nullable: false })\n const slot = observed.get(key)!\n const value = rec[key]\n if (value === null || value === undefined) {\n slot.nullable = true\n continue\n }\n slot.types.add(inferType(value))\n }\n // Fields missing from this record → nullable\n for (const key of observed.keys()) {\n if (!(key in rec)) observed.get(key)!.nullable = true\n }\n }\n\n const columns: ColumnSchema[] = []\n for (const [name, info] of observed) {\n if (!includeMeta && (name === '_v' || name === '_ts' || name === '_by')) continue\n const type: SqlType = info.types.size === 1\n ? [...info.types][0]!\n : 'text' // mixed types → fall back to text\n columns.push({ name, type, nullable: info.nullable })\n }\n if (includeMeta) {\n columns.push({ name: '_noydb_version', type: 'integer', nullable: true })\n columns.push({ name: '_noydb_ts', type: 'timestamp', nullable: true })\n }\n return columns\n}\n\nfunction inferType(value: unknown): SqlType {\n if (typeof value === 'boolean') return 'boolean'\n if (typeof value === 'number') return Number.isInteger(value) ? 'integer' : 'real'\n if (value instanceof Date) return 'timestamp'\n if (typeof value === 'string') {\n if (/^\\d{4}-\\d{2}-\\d{2}T/.test(value)) return 'timestamp'\n return 'text'\n }\n return 'jsonb'\n}\n\nfunction createTable(dialect: SqlDialect, table: string, schema: readonly ColumnSchema[]): string {\n const cols = schema.map(c => ` ${quoteIdent(dialect, c.name)} ${mapType(dialect, c.type)}${c.nullable ? '' : ' NOT NULL'}`)\n return `CREATE TABLE ${quoteIdent(dialect, table)} (\\n${cols.join(',\\n')}\\n);`\n}\n\nfunction mapType(dialect: SqlDialect, type: SqlType): string {\n const map: Record<SqlDialect, Record<SqlType, string>> = {\n postgres: { text: 'TEXT', integer: 'INTEGER', real: 'REAL', boolean: 'BOOLEAN', timestamp: 'TIMESTAMPTZ', jsonb: 'JSONB' },\n mysql: { text: 'TEXT', integer: 'BIGINT', real: 'DOUBLE', boolean: 'TINYINT(1)', timestamp: 'DATETIME', jsonb: 'JSON' },\n sqlite: { text: 'TEXT', integer: 'INTEGER', real: 'REAL', boolean: 'INTEGER', timestamp: 'TEXT', jsonb: 'TEXT' },\n }\n return map[dialect][type]\n}\n\nfunction insertRow(\n dialect: SqlDialect,\n table: string,\n schema: readonly ColumnSchema[],\n record: Record<string, unknown>,\n): string {\n const cols = schema.map(c => quoteIdent(dialect, c.name)).join(', ')\n const values = schema.map(c => formatLiteral(dialect, c.type, record[c.name])).join(', ')\n return `INSERT INTO ${quoteIdent(dialect, table)} (${cols}) VALUES (${values});`\n}\n\nfunction formatLiteral(dialect: SqlDialect, type: SqlType, value: unknown): string {\n if (value === null || value === undefined) return 'NULL'\n if (type === 'boolean') {\n if (dialect === 'mysql' || dialect === 'sqlite') return value ? '1' : '0'\n return value ? 'TRUE' : 'FALSE'\n }\n if (type === 'integer' || type === 'real') return stringifyScalar(value)\n if (type === 'timestamp') {\n const s = value instanceof Date ? value.toISOString() : stringifyScalar(value)\n return quoteString(dialect, s)\n }\n if (type === 'jsonb') {\n return quoteString(dialect, JSON.stringify(value))\n }\n return quoteString(dialect, stringifyScalar(value))\n}\n\nfunction stringifyScalar(value: unknown): string {\n if (typeof value === 'string') return value\n if (typeof value === 'number' || typeof value === 'boolean' || typeof value === 'bigint') {\n return String(value)\n }\n if (value === null || value === undefined) return ''\n return JSON.stringify(value)\n}\n\nfunction quoteString(dialect: SqlDialect, s: string): string {\n // Both standard SQL and common dialects use '' to escape single quotes.\n const escaped = s.replace(/'/g, \"''\")\n return `'${escaped}'`\n}\n\nfunction quoteIdent(dialect: SqlDialect, name: string): string {\n if (dialect === 'mysql') return `\\`${name.replace(/`/g, '``')}\\``\n return `\"${name.replace(/\"/g, '\"\"')}\"`\n}\n\nfunction loadCommand(dialect: SqlDialect): string {\n if (dialect === 'postgres') return 'psql -f dump.sql'\n if (dialect === 'mysql') return 'mysql -u <user> -p < dump.sql'\n return 'sqlite3 <database>.db < dump.sql'\n}\n\nfunction stripMeta(record: Record<string, unknown>): Record<string, unknown> {\n const out: Record<string, unknown> = {}\n for (const [key, value] of Object.entries(record)) {\n if (key === '_iv' || key === '_data' || key === '_noydb') continue\n out[key] = value\n }\n return out\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAgDA,eAAsB,SAAS,OAAc,UAAwB,CAAC,GAAoB;AACxF,QAAM,gBAAgB,aAAa,KAAK;AAExC,QAAM,UAAU,QAAQ,WAAW;AACnC,QAAM,OAAO,QAAQ,QAAQ;AAC7B,QAAM,YAAY,QAAQ,eAAe,CAAC,MAAc;AACxD,QAAM,aAAa,CAAC,QAAQ,WAAW,QAAQ,QAAQ,WAAW;AAClE,QAAM,YAAY,QAAQ,UAAU,IAAI,IAAI,QAAQ,OAAO,IAAI;AAG/D,QAAM,UAAU,oBAAI,IAAuC;AAC3D,mBAAiB,SAAS,MAAM,aAAa,EAAE,aAAa,aAAa,CAAC,GAAG;AAC3E,QAAI,CAAC,cAAc,aAAa,CAAC,UAAU,IAAI,MAAM,UAAU,EAAG;AAClE,UAAM,SAAS,QAAQ,IAAI,MAAM,UAAU,KAAK,CAAC;AACjD,eAAW,KAAK,MAAM,QAAS,QAAO,KAAK,UAAU,CAA4B,CAAC;AAClF,YAAQ,IAAI,MAAM,YAAY,MAAM;AAAA,EACtC;AAEA,QAAM,QAAkB,CAAC;AACzB,QAAM,KAAK,gDAA6C,OAAO,eAAY,IAAI,EAAE;AACjF,QAAM,KAAK,+DAA0D,YAAY,OAAO,CAAC,EAAE;AAC3F,QAAM,KAAK,EAAE;AAEb,aAAW,CAAC,YAAY,OAAO,KAAK,SAAS;AAC3C,UAAM,QAAQ,UAAU,UAAU;AAClC,UAAM,SAAS,YAAY,SAAS,QAAQ,oBAAoB,IAAI;AACpE,QAAI,SAAS,aAAa;AACxB,YAAM,KAAK,YAAY,SAAS,OAAO,MAAM,CAAC;AAC9C,YAAM,KAAK,EAAE;AAAA,IACf;AACA,QAAI,SAAS,eAAe;AAC1B,iBAAW,OAAO,SAAS;AACzB,cAAM,KAAK,UAAU,SAAS,OAAO,QAAQ,GAAG,CAAC;AAAA,MACnD;AACA,YAAM,KAAK,EAAE;AAAA,IACf;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAEA,eAAsB,SAAS,OAAc,UAAgC,CAAC,GAAkB;AAC9F,QAAM,MAAM,MAAM,SAAS,OAAO,OAAO;AACzC,QAAM,WAAW,QAAQ,YAAY;AACrC,QAAM,OAAO,IAAI,KAAK,CAAC,GAAG,GAAG,EAAE,MAAM,gCAAgC,CAAC;AACtE,QAAM,MAAM,IAAI,gBAAgB,IAAI;AACpC,QAAM,IAAI,SAAS,cAAc,GAAG;AACpC,IAAE,OAAO;AACT,IAAE,WAAW;AACb,IAAE,MAAM;AACR,MAAI,gBAAgB,GAAG;AACzB;AAEA,eAAsB,MAAM,OAAc,MAAc,SAA2C;AACjG,MAAI,QAAQ,qBAAqB,MAAM;AACrC,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF;AACA,QAAM,MAAM,MAAM,SAAS,OAAO,OAAO;AACzC,QAAM,EAAE,UAAU,IAAI,MAAM,OAAO,aAAkB;AACrD,QAAM,UAAU,MAAM,KAAK,OAAO;AACpC;AAYA,SAAS,YAAY,SAA6C,aAAsC;AACtG,QAAM,WAAW,oBAAI,IAAwD;AAC7E,QAAM,SAAS,QAAQ,MAAM,GAAG,GAAG;AACnC,aAAW,OAAO,QAAQ;AACxB,eAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,UAAI,CAAC,SAAS,IAAI,GAAG,EAAG,UAAS,IAAI,KAAK,EAAE,OAAO,oBAAI,IAAI,GAAG,UAAU,MAAM,CAAC;AAC/E,YAAM,OAAO,SAAS,IAAI,GAAG;AAC7B,YAAM,QAAQ,IAAI,GAAG;AACrB,UAAI,UAAU,QAAQ,UAAU,QAAW;AACzC,aAAK,WAAW;AAChB;AAAA,MACF;AACA,WAAK,MAAM,IAAI,UAAU,KAAK,CAAC;AAAA,IACjC;AAEA,eAAW,OAAO,SAAS,KAAK,GAAG;AACjC,UAAI,EAAE,OAAO,KAAM,UAAS,IAAI,GAAG,EAAG,WAAW;AAAA,IACnD;AAAA,EACF;AAEA,QAAM,UAA0B,CAAC;AACjC,aAAW,CAAC,MAAM,IAAI,KAAK,UAAU;AACnC,QAAI,CAAC,gBAAgB,SAAS,QAAQ,SAAS,SAAS,SAAS,OAAQ;AACzE,UAAM,OAAgB,KAAK,MAAM,SAAS,IACtC,CAAC,GAAG,KAAK,KAAK,EAAE,CAAC,IACjB;AACJ,YAAQ,KAAK,EAAE,MAAM,MAAM,UAAU,KAAK,SAAS,CAAC;AAAA,EACtD;AACA,MAAI,aAAa;AACf,YAAQ,KAAK,EAAE,MAAM,kBAAkB,MAAM,WAAW,UAAU,KAAK,CAAC;AACxE,YAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,aAAa,UAAU,KAAK,CAAC;AAAA,EACvE;AACA,SAAO;AACT;AAEA,SAAS,UAAU,OAAyB;AAC1C,MAAI,OAAO,UAAU,UAAW,QAAO;AACvC,MAAI,OAAO,UAAU,SAAU,QAAO,OAAO,UAAU,KAAK,IAAI,YAAY;AAC5E,MAAI,iBAAiB,KAAM,QAAO;AAClC,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,sBAAsB,KAAK,KAAK,EAAG,QAAO;AAC9C,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEA,SAAS,YAAY,SAAqB,OAAe,QAAyC;AAChG,QAAM,OAAO,OAAO,IAAI,OAAK,KAAK,WAAW,SAAS,EAAE,IAAI,CAAC,IAAI,QAAQ,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,WAAW,KAAK,WAAW,EAAE;AAC3H,SAAO,gBAAgB,WAAW,SAAS,KAAK,CAAC;AAAA,EAAO,KAAK,KAAK,KAAK,CAAC;AAAA;AAC1E;AAEA,SAAS,QAAQ,SAAqB,MAAuB;AAC3D,QAAM,MAAmD;AAAA,IACvD,UAAU,EAAE,MAAM,QAAQ,SAAS,WAAW,MAAM,QAAQ,SAAS,WAAW,WAAW,eAAe,OAAO,QAAQ;AAAA,IACzH,OAAU,EAAE,MAAM,QAAQ,SAAS,UAAU,MAAM,UAAU,SAAS,cAAc,WAAW,YAAY,OAAO,OAAO;AAAA,IACzH,QAAU,EAAE,MAAM,QAAQ,SAAS,WAAW,MAAM,QAAQ,SAAS,WAAW,WAAW,QAAQ,OAAO,OAAO;AAAA,EACnH;AACA,SAAO,IAAI,OAAO,EAAE,IAAI;AAC1B;AAEA,SAAS,UACP,SACA,OACA,QACA,QACQ;AACR,QAAM,OAAO,OAAO,IAAI,OAAK,WAAW,SAAS,EAAE,IAAI,CAAC,EAAE,KAAK,IAAI;AACnE,QAAM,SAAS,OAAO,IAAI,OAAK,cAAc,SAAS,EAAE,MAAM,OAAO,EAAE,IAAI,CAAC,CAAC,EAAE,KAAK,IAAI;AACxF,SAAO,eAAe,WAAW,SAAS,KAAK,CAAC,KAAK,IAAI,aAAa,MAAM;AAC9E;AAEA,SAAS,cAAc,SAAqB,MAAe,OAAwB;AACjF,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,SAAS,WAAW;AACtB,QAAI,YAAY,WAAW,YAAY,SAAU,QAAO,QAAQ,MAAM;AACtE,WAAO,QAAQ,SAAS;AAAA,EAC1B;AACA,MAAI,SAAS,aAAa,SAAS,OAAQ,QAAO,gBAAgB,KAAK;AACvE,MAAI,SAAS,aAAa;AACxB,UAAM,IAAI,iBAAiB,OAAO,MAAM,YAAY,IAAI,gBAAgB,KAAK;AAC7E,WAAO,YAAY,SAAS,CAAC;AAAA,EAC/B;AACA,MAAI,SAAS,SAAS;AACpB,WAAO,YAAY,SAAS,KAAK,UAAU,KAAK,CAAC;AAAA,EACnD;AACA,SAAO,YAAY,SAAS,gBAAgB,KAAK,CAAC;AACpD;AAEA,SAAS,gBAAgB,OAAwB;AAC/C,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,aAAa,OAAO,UAAU,UAAU;AACxF,WAAO,OAAO,KAAK;AAAA,EACrB;AACA,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,SAAO,KAAK,UAAU,KAAK;AAC7B;AAEA,SAAS,YAAY,SAAqB,GAAmB;AAE3D,QAAM,UAAU,EAAE,QAAQ,MAAM,IAAI;AACpC,SAAO,IAAI,OAAO;AACpB;AAEA,SAAS,WAAW,SAAqB,MAAsB;AAC7D,MAAI,YAAY,QAAS,QAAO,KAAK,KAAK,QAAQ,MAAM,IAAI,CAAC;AAC7D,SAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AACrC;AAEA,SAAS,YAAY,SAA6B;AAChD,MAAI,YAAY,WAAY,QAAO;AACnC,MAAI,YAAY,QAAS,QAAO;AAChC,SAAO;AACT;AAEA,SAAS,UAAU,QAA0D;AAC3E,QAAM,MAA+B,CAAC;AACtC,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,QAAI,QAAQ,SAAS,QAAQ,WAAW,QAAQ,SAAU;AAC1D,QAAI,GAAG,IAAI;AAAA,EACb;AACA,SAAO;AACT;","names":[]}
|
package/dist/index.d.cts
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { Vault } from '@noy-db/hub';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* **@noy-db/as-sql** — SQL dump export for migration.
|
|
5
|
+
*
|
|
6
|
+
* One-way, at-export-time string formatter that emits dialect-aware
|
|
7
|
+
* `CREATE TABLE` + `INSERT INTO` statements. This is NOT a runtime
|
|
8
|
+
* SQL query frontend — it's the migration bridge for consumers moving
|
|
9
|
+
* noy-db data into Postgres, MySQL, or SQLite.
|
|
10
|
+
*
|
|
11
|
+
* Column types are inferred from value types in the first 100 records
|
|
12
|
+
* per collection. Every record's row uses parameterless inline-literal
|
|
13
|
+
* INSERT (no prepared statements) so the dump loads with a single
|
|
14
|
+
* `psql -f`, `mysql <`, or `sqlite3 < dump.sql` invocation.
|
|
15
|
+
*
|
|
16
|
+
* **Zero dependencies** — hand-rolled string formatter, ~300 LoC.
|
|
17
|
+
*
|
|
18
|
+
* See `docs/patterns/as-exports.md` for the three-tier egress model.
|
|
19
|
+
*
|
|
20
|
+
* @packageDocumentation
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
type SqlDialect = 'postgres' | 'mysql' | 'sqlite';
|
|
24
|
+
type SqlMode = 'schema-only' | 'data-only' | 'schema+data';
|
|
25
|
+
interface AsSQLOptions {
|
|
26
|
+
/** Target dialect. Default `'postgres'`. */
|
|
27
|
+
readonly dialect?: SqlDialect;
|
|
28
|
+
/** Collection allowlist. Omit for all. */
|
|
29
|
+
readonly include?: readonly string[];
|
|
30
|
+
/** Schema + data, schema only, or data only. Default `'schema+data'`. */
|
|
31
|
+
readonly mode?: SqlMode;
|
|
32
|
+
/** Map collection name → table name. Default identity. */
|
|
33
|
+
readonly tableNames?: (collection: string) => string;
|
|
34
|
+
/** Include `_noydb_version` / `_noydb_ts` metadata columns. Default `false`. */
|
|
35
|
+
readonly metadataColumns?: boolean;
|
|
36
|
+
}
|
|
37
|
+
interface AsSQLDownloadOptions extends AsSQLOptions {
|
|
38
|
+
/** Filename offered to the browser. Default `'vault-export.sql'`. */
|
|
39
|
+
readonly filename?: string;
|
|
40
|
+
}
|
|
41
|
+
interface AsSQLWriteOptions extends AsSQLOptions {
|
|
42
|
+
/** Required for Node file-write — Tier 3 risk gate. */
|
|
43
|
+
readonly acknowledgeRisks: true;
|
|
44
|
+
}
|
|
45
|
+
declare function toString(vault: Vault, options?: AsSQLOptions): Promise<string>;
|
|
46
|
+
declare function download(vault: Vault, options?: AsSQLDownloadOptions): Promise<void>;
|
|
47
|
+
declare function write(vault: Vault, path: string, options: AsSQLWriteOptions): Promise<void>;
|
|
48
|
+
|
|
49
|
+
export { type AsSQLDownloadOptions, type AsSQLOptions, type AsSQLWriteOptions, type SqlDialect, type SqlMode, download, toString, write };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { Vault } from '@noy-db/hub';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* **@noy-db/as-sql** — SQL dump export for migration.
|
|
5
|
+
*
|
|
6
|
+
* One-way, at-export-time string formatter that emits dialect-aware
|
|
7
|
+
* `CREATE TABLE` + `INSERT INTO` statements. This is NOT a runtime
|
|
8
|
+
* SQL query frontend — it's the migration bridge for consumers moving
|
|
9
|
+
* noy-db data into Postgres, MySQL, or SQLite.
|
|
10
|
+
*
|
|
11
|
+
* Column types are inferred from value types in the first 100 records
|
|
12
|
+
* per collection. Every record's row uses parameterless inline-literal
|
|
13
|
+
* INSERT (no prepared statements) so the dump loads with a single
|
|
14
|
+
* `psql -f`, `mysql <`, or `sqlite3 < dump.sql` invocation.
|
|
15
|
+
*
|
|
16
|
+
* **Zero dependencies** — hand-rolled string formatter, ~300 LoC.
|
|
17
|
+
*
|
|
18
|
+
* See `docs/patterns/as-exports.md` for the three-tier egress model.
|
|
19
|
+
*
|
|
20
|
+
* @packageDocumentation
|
|
21
|
+
*/
|
|
22
|
+
|
|
23
|
+
type SqlDialect = 'postgres' | 'mysql' | 'sqlite';
|
|
24
|
+
type SqlMode = 'schema-only' | 'data-only' | 'schema+data';
|
|
25
|
+
interface AsSQLOptions {
|
|
26
|
+
/** Target dialect. Default `'postgres'`. */
|
|
27
|
+
readonly dialect?: SqlDialect;
|
|
28
|
+
/** Collection allowlist. Omit for all. */
|
|
29
|
+
readonly include?: readonly string[];
|
|
30
|
+
/** Schema + data, schema only, or data only. Default `'schema+data'`. */
|
|
31
|
+
readonly mode?: SqlMode;
|
|
32
|
+
/** Map collection name → table name. Default identity. */
|
|
33
|
+
readonly tableNames?: (collection: string) => string;
|
|
34
|
+
/** Include `_noydb_version` / `_noydb_ts` metadata columns. Default `false`. */
|
|
35
|
+
readonly metadataColumns?: boolean;
|
|
36
|
+
}
|
|
37
|
+
interface AsSQLDownloadOptions extends AsSQLOptions {
|
|
38
|
+
/** Filename offered to the browser. Default `'vault-export.sql'`. */
|
|
39
|
+
readonly filename?: string;
|
|
40
|
+
}
|
|
41
|
+
interface AsSQLWriteOptions extends AsSQLOptions {
|
|
42
|
+
/** Required for Node file-write — Tier 3 risk gate. */
|
|
43
|
+
readonly acknowledgeRisks: true;
|
|
44
|
+
}
|
|
45
|
+
declare function toString(vault: Vault, options?: AsSQLOptions): Promise<string>;
|
|
46
|
+
declare function download(vault: Vault, options?: AsSQLDownloadOptions): Promise<void>;
|
|
47
|
+
declare function write(vault: Vault, path: string, options: AsSQLWriteOptions): Promise<void>;
|
|
48
|
+
|
|
49
|
+
export { type AsSQLDownloadOptions, type AsSQLOptions, type AsSQLWriteOptions, type SqlDialect, type SqlMode, download, toString, write };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
async function toString(vault, options = {}) {
|
|
3
|
+
vault.assertCanExport("plaintext", "sql");
|
|
4
|
+
const dialect = options.dialect ?? "postgres";
|
|
5
|
+
const mode = options.mode ?? "schema+data";
|
|
6
|
+
const tableName = options.tableNames ?? ((c) => c);
|
|
7
|
+
const includeAll = !options.include || options.include.length === 0;
|
|
8
|
+
const allowlist = options.include ? new Set(options.include) : null;
|
|
9
|
+
const buckets = /* @__PURE__ */ new Map();
|
|
10
|
+
for await (const chunk of vault.exportStream({ granularity: "collection" })) {
|
|
11
|
+
if (!includeAll && allowlist && !allowlist.has(chunk.collection)) continue;
|
|
12
|
+
const bucket = buckets.get(chunk.collection) ?? [];
|
|
13
|
+
for (const r of chunk.records) bucket.push(stripMeta(r));
|
|
14
|
+
buckets.set(chunk.collection, bucket);
|
|
15
|
+
}
|
|
16
|
+
const parts = [];
|
|
17
|
+
parts.push(`-- Generated by @noy-db/as-sql \xB7 dialect: ${dialect} \xB7 mode: ${mode}`);
|
|
18
|
+
parts.push(`-- Bundle snapshot \u2014 NOT a live connection. Load with: ${loadCommand(dialect)}`);
|
|
19
|
+
parts.push("");
|
|
20
|
+
for (const [collection, records] of buckets) {
|
|
21
|
+
const table = tableName(collection);
|
|
22
|
+
const schema = inferSchema(records, options.metadataColumns === true);
|
|
23
|
+
if (mode !== "data-only") {
|
|
24
|
+
parts.push(createTable(dialect, table, schema));
|
|
25
|
+
parts.push("");
|
|
26
|
+
}
|
|
27
|
+
if (mode !== "schema-only") {
|
|
28
|
+
for (const rec of records) {
|
|
29
|
+
parts.push(insertRow(dialect, table, schema, rec));
|
|
30
|
+
}
|
|
31
|
+
parts.push("");
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return parts.join("\n");
|
|
35
|
+
}
|
|
36
|
+
async function download(vault, options = {}) {
|
|
37
|
+
const sql = await toString(vault, options);
|
|
38
|
+
const filename = options.filename ?? "vault-export.sql";
|
|
39
|
+
const blob = new Blob([sql], { type: "application/sql;charset=utf-8" });
|
|
40
|
+
const url = URL.createObjectURL(blob);
|
|
41
|
+
const a = document.createElement("a");
|
|
42
|
+
a.href = url;
|
|
43
|
+
a.download = filename;
|
|
44
|
+
a.click();
|
|
45
|
+
URL.revokeObjectURL(url);
|
|
46
|
+
}
|
|
47
|
+
async function write(vault, path, options) {
|
|
48
|
+
if (options.acknowledgeRisks !== true) {
|
|
49
|
+
throw new Error(
|
|
50
|
+
'as-sql.write: acknowledgeRisks: true is required for on-disk plaintext output. See docs/patterns/as-exports.md \xA7"The three tiers of \\"plaintext out\\""'
|
|
51
|
+
);
|
|
52
|
+
}
|
|
53
|
+
const sql = await toString(vault, options);
|
|
54
|
+
const { writeFile } = await import("fs/promises");
|
|
55
|
+
await writeFile(path, sql, "utf-8");
|
|
56
|
+
}
|
|
57
|
+
function inferSchema(records, includeMeta) {
|
|
58
|
+
const observed = /* @__PURE__ */ new Map();
|
|
59
|
+
const sample = records.slice(0, 100);
|
|
60
|
+
for (const rec of sample) {
|
|
61
|
+
for (const key of Object.keys(rec)) {
|
|
62
|
+
if (!observed.has(key)) observed.set(key, { types: /* @__PURE__ */ new Set(), nullable: false });
|
|
63
|
+
const slot = observed.get(key);
|
|
64
|
+
const value = rec[key];
|
|
65
|
+
if (value === null || value === void 0) {
|
|
66
|
+
slot.nullable = true;
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
slot.types.add(inferType(value));
|
|
70
|
+
}
|
|
71
|
+
for (const key of observed.keys()) {
|
|
72
|
+
if (!(key in rec)) observed.get(key).nullable = true;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
const columns = [];
|
|
76
|
+
for (const [name, info] of observed) {
|
|
77
|
+
if (!includeMeta && (name === "_v" || name === "_ts" || name === "_by")) continue;
|
|
78
|
+
const type = info.types.size === 1 ? [...info.types][0] : "text";
|
|
79
|
+
columns.push({ name, type, nullable: info.nullable });
|
|
80
|
+
}
|
|
81
|
+
if (includeMeta) {
|
|
82
|
+
columns.push({ name: "_noydb_version", type: "integer", nullable: true });
|
|
83
|
+
columns.push({ name: "_noydb_ts", type: "timestamp", nullable: true });
|
|
84
|
+
}
|
|
85
|
+
return columns;
|
|
86
|
+
}
|
|
87
|
+
function inferType(value) {
|
|
88
|
+
if (typeof value === "boolean") return "boolean";
|
|
89
|
+
if (typeof value === "number") return Number.isInteger(value) ? "integer" : "real";
|
|
90
|
+
if (value instanceof Date) return "timestamp";
|
|
91
|
+
if (typeof value === "string") {
|
|
92
|
+
if (/^\d{4}-\d{2}-\d{2}T/.test(value)) return "timestamp";
|
|
93
|
+
return "text";
|
|
94
|
+
}
|
|
95
|
+
return "jsonb";
|
|
96
|
+
}
|
|
97
|
+
function createTable(dialect, table, schema) {
|
|
98
|
+
const cols = schema.map((c) => ` ${quoteIdent(dialect, c.name)} ${mapType(dialect, c.type)}${c.nullable ? "" : " NOT NULL"}`);
|
|
99
|
+
return `CREATE TABLE ${quoteIdent(dialect, table)} (
|
|
100
|
+
${cols.join(",\n")}
|
|
101
|
+
);`;
|
|
102
|
+
}
|
|
103
|
+
function mapType(dialect, type) {
|
|
104
|
+
const map = {
|
|
105
|
+
postgres: { text: "TEXT", integer: "INTEGER", real: "REAL", boolean: "BOOLEAN", timestamp: "TIMESTAMPTZ", jsonb: "JSONB" },
|
|
106
|
+
mysql: { text: "TEXT", integer: "BIGINT", real: "DOUBLE", boolean: "TINYINT(1)", timestamp: "DATETIME", jsonb: "JSON" },
|
|
107
|
+
sqlite: { text: "TEXT", integer: "INTEGER", real: "REAL", boolean: "INTEGER", timestamp: "TEXT", jsonb: "TEXT" }
|
|
108
|
+
};
|
|
109
|
+
return map[dialect][type];
|
|
110
|
+
}
|
|
111
|
+
function insertRow(dialect, table, schema, record) {
|
|
112
|
+
const cols = schema.map((c) => quoteIdent(dialect, c.name)).join(", ");
|
|
113
|
+
const values = schema.map((c) => formatLiteral(dialect, c.type, record[c.name])).join(", ");
|
|
114
|
+
return `INSERT INTO ${quoteIdent(dialect, table)} (${cols}) VALUES (${values});`;
|
|
115
|
+
}
|
|
116
|
+
function formatLiteral(dialect, type, value) {
|
|
117
|
+
if (value === null || value === void 0) return "NULL";
|
|
118
|
+
if (type === "boolean") {
|
|
119
|
+
if (dialect === "mysql" || dialect === "sqlite") return value ? "1" : "0";
|
|
120
|
+
return value ? "TRUE" : "FALSE";
|
|
121
|
+
}
|
|
122
|
+
if (type === "integer" || type === "real") return stringifyScalar(value);
|
|
123
|
+
if (type === "timestamp") {
|
|
124
|
+
const s = value instanceof Date ? value.toISOString() : stringifyScalar(value);
|
|
125
|
+
return quoteString(dialect, s);
|
|
126
|
+
}
|
|
127
|
+
if (type === "jsonb") {
|
|
128
|
+
return quoteString(dialect, JSON.stringify(value));
|
|
129
|
+
}
|
|
130
|
+
return quoteString(dialect, stringifyScalar(value));
|
|
131
|
+
}
|
|
132
|
+
function stringifyScalar(value) {
|
|
133
|
+
if (typeof value === "string") return value;
|
|
134
|
+
if (typeof value === "number" || typeof value === "boolean" || typeof value === "bigint") {
|
|
135
|
+
return String(value);
|
|
136
|
+
}
|
|
137
|
+
if (value === null || value === void 0) return "";
|
|
138
|
+
return JSON.stringify(value);
|
|
139
|
+
}
|
|
140
|
+
function quoteString(dialect, s) {
|
|
141
|
+
const escaped = s.replace(/'/g, "''");
|
|
142
|
+
return `'${escaped}'`;
|
|
143
|
+
}
|
|
144
|
+
function quoteIdent(dialect, name) {
|
|
145
|
+
if (dialect === "mysql") return `\`${name.replace(/`/g, "``")}\``;
|
|
146
|
+
return `"${name.replace(/"/g, '""')}"`;
|
|
147
|
+
}
|
|
148
|
+
function loadCommand(dialect) {
|
|
149
|
+
if (dialect === "postgres") return "psql -f dump.sql";
|
|
150
|
+
if (dialect === "mysql") return "mysql -u <user> -p < dump.sql";
|
|
151
|
+
return "sqlite3 <database>.db < dump.sql";
|
|
152
|
+
}
|
|
153
|
+
function stripMeta(record) {
|
|
154
|
+
const out = {};
|
|
155
|
+
for (const [key, value] of Object.entries(record)) {
|
|
156
|
+
if (key === "_iv" || key === "_data" || key === "_noydb") continue;
|
|
157
|
+
out[key] = value;
|
|
158
|
+
}
|
|
159
|
+
return out;
|
|
160
|
+
}
|
|
161
|
+
export {
|
|
162
|
+
download,
|
|
163
|
+
toString,
|
|
164
|
+
write
|
|
165
|
+
};
|
|
166
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["/**\n * **@noy-db/as-sql** — SQL dump export for migration.\n *\n * One-way, at-export-time string formatter that emits dialect-aware\n * `CREATE TABLE` + `INSERT INTO` statements. This is NOT a runtime\n * SQL query frontend — it's the migration bridge for consumers moving\n * noy-db data into Postgres, MySQL, or SQLite.\n *\n * Column types are inferred from value types in the first 100 records\n * per collection. Every record's row uses parameterless inline-literal\n * INSERT (no prepared statements) so the dump loads with a single\n * `psql -f`, `mysql <`, or `sqlite3 < dump.sql` invocation.\n *\n * **Zero dependencies** — hand-rolled string formatter, ~300 LoC.\n *\n * See `docs/patterns/as-exports.md` for the three-tier egress model.\n *\n * @packageDocumentation\n */\n\nimport type { Vault } from '@noy-db/hub'\n\nexport type SqlDialect = 'postgres' | 'mysql' | 'sqlite'\nexport type SqlMode = 'schema-only' | 'data-only' | 'schema+data'\n\nexport interface AsSQLOptions {\n /** Target dialect. Default `'postgres'`. */\n readonly dialect?: SqlDialect\n /** Collection allowlist. Omit for all. */\n readonly include?: readonly string[]\n /** Schema + data, schema only, or data only. Default `'schema+data'`. */\n readonly mode?: SqlMode\n /** Map collection name → table name. Default identity. */\n readonly tableNames?: (collection: string) => string\n /** Include `_noydb_version` / `_noydb_ts` metadata columns. Default `false`. */\n readonly metadataColumns?: boolean\n}\n\nexport interface AsSQLDownloadOptions extends AsSQLOptions {\n /** Filename offered to the browser. Default `'vault-export.sql'`. */\n readonly filename?: string\n}\n\nexport interface AsSQLWriteOptions extends AsSQLOptions {\n /** Required for Node file-write — Tier 3 risk gate. */\n readonly acknowledgeRisks: true\n}\n\nexport async function toString(vault: Vault, options: AsSQLOptions = {}): Promise<string> {\n vault.assertCanExport('plaintext', 'sql')\n\n const dialect = options.dialect ?? 'postgres'\n const mode = options.mode ?? 'schema+data'\n const tableName = options.tableNames ?? ((c: string) => c)\n const includeAll = !options.include || options.include.length === 0\n const allowlist = options.include ? new Set(options.include) : null\n\n // Bucket records by collection so we can emit schema+data atomically.\n const buckets = new Map<string, Record<string, unknown>[]>()\n for await (const chunk of vault.exportStream({ granularity: 'collection' })) {\n if (!includeAll && allowlist && !allowlist.has(chunk.collection)) continue\n const bucket = buckets.get(chunk.collection) ?? []\n for (const r of chunk.records) bucket.push(stripMeta(r as Record<string, unknown>))\n buckets.set(chunk.collection, bucket)\n }\n\n const parts: string[] = []\n parts.push(`-- Generated by @noy-db/as-sql · dialect: ${dialect} · mode: ${mode}`)\n parts.push(`-- Bundle snapshot — NOT a live connection. Load with: ${loadCommand(dialect)}`)\n parts.push('')\n\n for (const [collection, records] of buckets) {\n const table = tableName(collection)\n const schema = inferSchema(records, options.metadataColumns === true)\n if (mode !== 'data-only') {\n parts.push(createTable(dialect, table, schema))\n parts.push('')\n }\n if (mode !== 'schema-only') {\n for (const rec of records) {\n parts.push(insertRow(dialect, table, schema, rec))\n }\n parts.push('')\n }\n }\n\n return parts.join('\\n')\n}\n\nexport async function download(vault: Vault, options: AsSQLDownloadOptions = {}): Promise<void> {\n const sql = await toString(vault, options)\n const filename = options.filename ?? 'vault-export.sql'\n const blob = new Blob([sql], { type: 'application/sql;charset=utf-8' })\n const url = URL.createObjectURL(blob)\n const a = document.createElement('a')\n a.href = url\n a.download = filename\n a.click()\n URL.revokeObjectURL(url)\n}\n\nexport async function write(vault: Vault, path: string, options: AsSQLWriteOptions): Promise<void> {\n if (options.acknowledgeRisks !== true) {\n throw new Error(\n 'as-sql.write: acknowledgeRisks: true is required for on-disk plaintext output. ' +\n 'See docs/patterns/as-exports.md §\"The three tiers of \\\\\"plaintext out\\\\\"\"',\n )\n }\n const sql = await toString(vault, options)\n const { writeFile } = await import('node:fs/promises')\n await writeFile(path, sql, 'utf-8')\n}\n\n// ─── SQL formatting internals ───────────────────────────────────────────\n\ntype SqlType = 'text' | 'integer' | 'real' | 'boolean' | 'timestamp' | 'jsonb'\n\ninterface ColumnSchema {\n readonly name: string\n readonly type: SqlType\n readonly nullable: boolean\n}\n\nfunction inferSchema(records: readonly Record<string, unknown>[], includeMeta: boolean): ColumnSchema[] {\n const observed = new Map<string, { types: Set<SqlType>; nullable: boolean }>()\n const sample = records.slice(0, 100) // sample up to 100 rows for type inference\n for (const rec of sample) {\n for (const key of Object.keys(rec)) {\n if (!observed.has(key)) observed.set(key, { types: new Set(), nullable: false })\n const slot = observed.get(key)!\n const value = rec[key]\n if (value === null || value === undefined) {\n slot.nullable = true\n continue\n }\n slot.types.add(inferType(value))\n }\n // Fields missing from this record → nullable\n for (const key of observed.keys()) {\n if (!(key in rec)) observed.get(key)!.nullable = true\n }\n }\n\n const columns: ColumnSchema[] = []\n for (const [name, info] of observed) {\n if (!includeMeta && (name === '_v' || name === '_ts' || name === '_by')) continue\n const type: SqlType = info.types.size === 1\n ? [...info.types][0]!\n : 'text' // mixed types → fall back to text\n columns.push({ name, type, nullable: info.nullable })\n }\n if (includeMeta) {\n columns.push({ name: '_noydb_version', type: 'integer', nullable: true })\n columns.push({ name: '_noydb_ts', type: 'timestamp', nullable: true })\n }\n return columns\n}\n\nfunction inferType(value: unknown): SqlType {\n if (typeof value === 'boolean') return 'boolean'\n if (typeof value === 'number') return Number.isInteger(value) ? 'integer' : 'real'\n if (value instanceof Date) return 'timestamp'\n if (typeof value === 'string') {\n if (/^\\d{4}-\\d{2}-\\d{2}T/.test(value)) return 'timestamp'\n return 'text'\n }\n return 'jsonb'\n}\n\nfunction createTable(dialect: SqlDialect, table: string, schema: readonly ColumnSchema[]): string {\n const cols = schema.map(c => ` ${quoteIdent(dialect, c.name)} ${mapType(dialect, c.type)}${c.nullable ? '' : ' NOT NULL'}`)\n return `CREATE TABLE ${quoteIdent(dialect, table)} (\\n${cols.join(',\\n')}\\n);`\n}\n\nfunction mapType(dialect: SqlDialect, type: SqlType): string {\n const map: Record<SqlDialect, Record<SqlType, string>> = {\n postgres: { text: 'TEXT', integer: 'INTEGER', real: 'REAL', boolean: 'BOOLEAN', timestamp: 'TIMESTAMPTZ', jsonb: 'JSONB' },\n mysql: { text: 'TEXT', integer: 'BIGINT', real: 'DOUBLE', boolean: 'TINYINT(1)', timestamp: 'DATETIME', jsonb: 'JSON' },\n sqlite: { text: 'TEXT', integer: 'INTEGER', real: 'REAL', boolean: 'INTEGER', timestamp: 'TEXT', jsonb: 'TEXT' },\n }\n return map[dialect][type]\n}\n\nfunction insertRow(\n dialect: SqlDialect,\n table: string,\n schema: readonly ColumnSchema[],\n record: Record<string, unknown>,\n): string {\n const cols = schema.map(c => quoteIdent(dialect, c.name)).join(', ')\n const values = schema.map(c => formatLiteral(dialect, c.type, record[c.name])).join(', ')\n return `INSERT INTO ${quoteIdent(dialect, table)} (${cols}) VALUES (${values});`\n}\n\nfunction formatLiteral(dialect: SqlDialect, type: SqlType, value: unknown): string {\n if (value === null || value === undefined) return 'NULL'\n if (type === 'boolean') {\n if (dialect === 'mysql' || dialect === 'sqlite') return value ? '1' : '0'\n return value ? 'TRUE' : 'FALSE'\n }\n if (type === 'integer' || type === 'real') return stringifyScalar(value)\n if (type === 'timestamp') {\n const s = value instanceof Date ? value.toISOString() : stringifyScalar(value)\n return quoteString(dialect, s)\n }\n if (type === 'jsonb') {\n return quoteString(dialect, JSON.stringify(value))\n }\n return quoteString(dialect, stringifyScalar(value))\n}\n\nfunction stringifyScalar(value: unknown): string {\n if (typeof value === 'string') return value\n if (typeof value === 'number' || typeof value === 'boolean' || typeof value === 'bigint') {\n return String(value)\n }\n if (value === null || value === undefined) return ''\n return JSON.stringify(value)\n}\n\nfunction quoteString(dialect: SqlDialect, s: string): string {\n // Both standard SQL and common dialects use '' to escape single quotes.\n const escaped = s.replace(/'/g, \"''\")\n return `'${escaped}'`\n}\n\nfunction quoteIdent(dialect: SqlDialect, name: string): string {\n if (dialect === 'mysql') return `\\`${name.replace(/`/g, '``')}\\``\n return `\"${name.replace(/\"/g, '\"\"')}\"`\n}\n\nfunction loadCommand(dialect: SqlDialect): string {\n if (dialect === 'postgres') return 'psql -f dump.sql'\n if (dialect === 'mysql') return 'mysql -u <user> -p < dump.sql'\n return 'sqlite3 <database>.db < dump.sql'\n}\n\nfunction stripMeta(record: Record<string, unknown>): Record<string, unknown> {\n const out: Record<string, unknown> = {}\n for (const [key, value] of Object.entries(record)) {\n if (key === '_iv' || key === '_data' || key === '_noydb') continue\n out[key] = value\n }\n return out\n}\n"],"mappings":";AAgDA,eAAsB,SAAS,OAAc,UAAwB,CAAC,GAAoB;AACxF,QAAM,gBAAgB,aAAa,KAAK;AAExC,QAAM,UAAU,QAAQ,WAAW;AACnC,QAAM,OAAO,QAAQ,QAAQ;AAC7B,QAAM,YAAY,QAAQ,eAAe,CAAC,MAAc;AACxD,QAAM,aAAa,CAAC,QAAQ,WAAW,QAAQ,QAAQ,WAAW;AAClE,QAAM,YAAY,QAAQ,UAAU,IAAI,IAAI,QAAQ,OAAO,IAAI;AAG/D,QAAM,UAAU,oBAAI,IAAuC;AAC3D,mBAAiB,SAAS,MAAM,aAAa,EAAE,aAAa,aAAa,CAAC,GAAG;AAC3E,QAAI,CAAC,cAAc,aAAa,CAAC,UAAU,IAAI,MAAM,UAAU,EAAG;AAClE,UAAM,SAAS,QAAQ,IAAI,MAAM,UAAU,KAAK,CAAC;AACjD,eAAW,KAAK,MAAM,QAAS,QAAO,KAAK,UAAU,CAA4B,CAAC;AAClF,YAAQ,IAAI,MAAM,YAAY,MAAM;AAAA,EACtC;AAEA,QAAM,QAAkB,CAAC;AACzB,QAAM,KAAK,gDAA6C,OAAO,eAAY,IAAI,EAAE;AACjF,QAAM,KAAK,+DAA0D,YAAY,OAAO,CAAC,EAAE;AAC3F,QAAM,KAAK,EAAE;AAEb,aAAW,CAAC,YAAY,OAAO,KAAK,SAAS;AAC3C,UAAM,QAAQ,UAAU,UAAU;AAClC,UAAM,SAAS,YAAY,SAAS,QAAQ,oBAAoB,IAAI;AACpE,QAAI,SAAS,aAAa;AACxB,YAAM,KAAK,YAAY,SAAS,OAAO,MAAM,CAAC;AAC9C,YAAM,KAAK,EAAE;AAAA,IACf;AACA,QAAI,SAAS,eAAe;AAC1B,iBAAW,OAAO,SAAS;AACzB,cAAM,KAAK,UAAU,SAAS,OAAO,QAAQ,GAAG,CAAC;AAAA,MACnD;AACA,YAAM,KAAK,EAAE;AAAA,IACf;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,IAAI;AACxB;AAEA,eAAsB,SAAS,OAAc,UAAgC,CAAC,GAAkB;AAC9F,QAAM,MAAM,MAAM,SAAS,OAAO,OAAO;AACzC,QAAM,WAAW,QAAQ,YAAY;AACrC,QAAM,OAAO,IAAI,KAAK,CAAC,GAAG,GAAG,EAAE,MAAM,gCAAgC,CAAC;AACtE,QAAM,MAAM,IAAI,gBAAgB,IAAI;AACpC,QAAM,IAAI,SAAS,cAAc,GAAG;AACpC,IAAE,OAAO;AACT,IAAE,WAAW;AACb,IAAE,MAAM;AACR,MAAI,gBAAgB,GAAG;AACzB;AAEA,eAAsB,MAAM,OAAc,MAAc,SAA2C;AACjG,MAAI,QAAQ,qBAAqB,MAAM;AACrC,UAAM,IAAI;AAAA,MACR;AAAA,IAEF;AAAA,EACF;AACA,QAAM,MAAM,MAAM,SAAS,OAAO,OAAO;AACzC,QAAM,EAAE,UAAU,IAAI,MAAM,OAAO,aAAkB;AACrD,QAAM,UAAU,MAAM,KAAK,OAAO;AACpC;AAYA,SAAS,YAAY,SAA6C,aAAsC;AACtG,QAAM,WAAW,oBAAI,IAAwD;AAC7E,QAAM,SAAS,QAAQ,MAAM,GAAG,GAAG;AACnC,aAAW,OAAO,QAAQ;AACxB,eAAW,OAAO,OAAO,KAAK,GAAG,GAAG;AAClC,UAAI,CAAC,SAAS,IAAI,GAAG,EAAG,UAAS,IAAI,KAAK,EAAE,OAAO,oBAAI,IAAI,GAAG,UAAU,MAAM,CAAC;AAC/E,YAAM,OAAO,SAAS,IAAI,GAAG;AAC7B,YAAM,QAAQ,IAAI,GAAG;AACrB,UAAI,UAAU,QAAQ,UAAU,QAAW;AACzC,aAAK,WAAW;AAChB;AAAA,MACF;AACA,WAAK,MAAM,IAAI,UAAU,KAAK,CAAC;AAAA,IACjC;AAEA,eAAW,OAAO,SAAS,KAAK,GAAG;AACjC,UAAI,EAAE,OAAO,KAAM,UAAS,IAAI,GAAG,EAAG,WAAW;AAAA,IACnD;AAAA,EACF;AAEA,QAAM,UAA0B,CAAC;AACjC,aAAW,CAAC,MAAM,IAAI,KAAK,UAAU;AACnC,QAAI,CAAC,gBAAgB,SAAS,QAAQ,SAAS,SAAS,SAAS,OAAQ;AACzE,UAAM,OAAgB,KAAK,MAAM,SAAS,IACtC,CAAC,GAAG,KAAK,KAAK,EAAE,CAAC,IACjB;AACJ,YAAQ,KAAK,EAAE,MAAM,MAAM,UAAU,KAAK,SAAS,CAAC;AAAA,EACtD;AACA,MAAI,aAAa;AACf,YAAQ,KAAK,EAAE,MAAM,kBAAkB,MAAM,WAAW,UAAU,KAAK,CAAC;AACxE,YAAQ,KAAK,EAAE,MAAM,aAAa,MAAM,aAAa,UAAU,KAAK,CAAC;AAAA,EACvE;AACA,SAAO;AACT;AAEA,SAAS,UAAU,OAAyB;AAC1C,MAAI,OAAO,UAAU,UAAW,QAAO;AACvC,MAAI,OAAO,UAAU,SAAU,QAAO,OAAO,UAAU,KAAK,IAAI,YAAY;AAC5E,MAAI,iBAAiB,KAAM,QAAO;AAClC,MAAI,OAAO,UAAU,UAAU;AAC7B,QAAI,sBAAsB,KAAK,KAAK,EAAG,QAAO;AAC9C,WAAO;AAAA,EACT;AACA,SAAO;AACT;AAEA,SAAS,YAAY,SAAqB,OAAe,QAAyC;AAChG,QAAM,OAAO,OAAO,IAAI,OAAK,KAAK,WAAW,SAAS,EAAE,IAAI,CAAC,IAAI,QAAQ,SAAS,EAAE,IAAI,CAAC,GAAG,EAAE,WAAW,KAAK,WAAW,EAAE;AAC3H,SAAO,gBAAgB,WAAW,SAAS,KAAK,CAAC;AAAA,EAAO,KAAK,KAAK,KAAK,CAAC;AAAA;AAC1E;AAEA,SAAS,QAAQ,SAAqB,MAAuB;AAC3D,QAAM,MAAmD;AAAA,IACvD,UAAU,EAAE,MAAM,QAAQ,SAAS,WAAW,MAAM,QAAQ,SAAS,WAAW,WAAW,eAAe,OAAO,QAAQ;AAAA,IACzH,OAAU,EAAE,MAAM,QAAQ,SAAS,UAAU,MAAM,UAAU,SAAS,cAAc,WAAW,YAAY,OAAO,OAAO;AAAA,IACzH,QAAU,EAAE,MAAM,QAAQ,SAAS,WAAW,MAAM,QAAQ,SAAS,WAAW,WAAW,QAAQ,OAAO,OAAO;AAAA,EACnH;AACA,SAAO,IAAI,OAAO,EAAE,IAAI;AAC1B;AAEA,SAAS,UACP,SACA,OACA,QACA,QACQ;AACR,QAAM,OAAO,OAAO,IAAI,OAAK,WAAW,SAAS,EAAE,IAAI,CAAC,EAAE,KAAK,IAAI;AACnE,QAAM,SAAS,OAAO,IAAI,OAAK,cAAc,SAAS,EAAE,MAAM,OAAO,EAAE,IAAI,CAAC,CAAC,EAAE,KAAK,IAAI;AACxF,SAAO,eAAe,WAAW,SAAS,KAAK,CAAC,KAAK,IAAI,aAAa,MAAM;AAC9E;AAEA,SAAS,cAAc,SAAqB,MAAe,OAAwB;AACjF,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,MAAI,SAAS,WAAW;AACtB,QAAI,YAAY,WAAW,YAAY,SAAU,QAAO,QAAQ,MAAM;AACtE,WAAO,QAAQ,SAAS;AAAA,EAC1B;AACA,MAAI,SAAS,aAAa,SAAS,OAAQ,QAAO,gBAAgB,KAAK;AACvE,MAAI,SAAS,aAAa;AACxB,UAAM,IAAI,iBAAiB,OAAO,MAAM,YAAY,IAAI,gBAAgB,KAAK;AAC7E,WAAO,YAAY,SAAS,CAAC;AAAA,EAC/B;AACA,MAAI,SAAS,SAAS;AACpB,WAAO,YAAY,SAAS,KAAK,UAAU,KAAK,CAAC;AAAA,EACnD;AACA,SAAO,YAAY,SAAS,gBAAgB,KAAK,CAAC;AACpD;AAEA,SAAS,gBAAgB,OAAwB;AAC/C,MAAI,OAAO,UAAU,SAAU,QAAO;AACtC,MAAI,OAAO,UAAU,YAAY,OAAO,UAAU,aAAa,OAAO,UAAU,UAAU;AACxF,WAAO,OAAO,KAAK;AAAA,EACrB;AACA,MAAI,UAAU,QAAQ,UAAU,OAAW,QAAO;AAClD,SAAO,KAAK,UAAU,KAAK;AAC7B;AAEA,SAAS,YAAY,SAAqB,GAAmB;AAE3D,QAAM,UAAU,EAAE,QAAQ,MAAM,IAAI;AACpC,SAAO,IAAI,OAAO;AACpB;AAEA,SAAS,WAAW,SAAqB,MAAsB;AAC7D,MAAI,YAAY,QAAS,QAAO,KAAK,KAAK,QAAQ,MAAM,IAAI,CAAC;AAC7D,SAAO,IAAI,KAAK,QAAQ,MAAM,IAAI,CAAC;AACrC;AAEA,SAAS,YAAY,SAA6B;AAChD,MAAI,YAAY,WAAY,QAAO;AACnC,MAAI,YAAY,QAAS,QAAO;AAChC,SAAO;AACT;AAEA,SAAS,UAAU,QAA0D;AAC3E,QAAM,MAA+B,CAAC;AACtC,aAAW,CAAC,KAAK,KAAK,KAAK,OAAO,QAAQ,MAAM,GAAG;AACjD,QAAI,QAAQ,SAAS,QAAQ,WAAW,QAAQ,SAAU;AAC1D,QAAI,GAAG,IAAI;AAAA,EACb;AACA,SAAO;AACT;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@noy-db/as-sql",
|
|
3
|
+
"version": "0.1.0-pre.3",
|
|
4
|
+
"description": "SQL dump export for noy-db — decrypts records and emits dialect-aware CREATE TABLE + INSERT statements for postgres / mysql / sqlite. One-way migration helper. Gated by RFC #249 canExportPlaintext.",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"author": "vLannaAi <vicio@lanna.ai>",
|
|
7
|
+
"homepage": "https://github.com/vLannaAi/noy-db/tree/main/packages/as-sql#readme",
|
|
8
|
+
"repository": {
|
|
9
|
+
"type": "git",
|
|
10
|
+
"url": "git+https://github.com/vLannaAi/noy-db.git",
|
|
11
|
+
"directory": "packages/as-sql"
|
|
12
|
+
},
|
|
13
|
+
"bugs": {
|
|
14
|
+
"url": "https://github.com/vLannaAi/noy-db/issues"
|
|
15
|
+
},
|
|
16
|
+
"type": "module",
|
|
17
|
+
"sideEffects": false,
|
|
18
|
+
"exports": {
|
|
19
|
+
".": {
|
|
20
|
+
"import": {
|
|
21
|
+
"types": "./dist/index.d.ts",
|
|
22
|
+
"default": "./dist/index.js"
|
|
23
|
+
},
|
|
24
|
+
"require": {
|
|
25
|
+
"types": "./dist/index.d.cts",
|
|
26
|
+
"default": "./dist/index.cjs"
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
},
|
|
30
|
+
"main": "./dist/index.cjs",
|
|
31
|
+
"module": "./dist/index.js",
|
|
32
|
+
"types": "./dist/index.d.ts",
|
|
33
|
+
"files": [
|
|
34
|
+
"dist",
|
|
35
|
+
"README.md",
|
|
36
|
+
"LICENSE"
|
|
37
|
+
],
|
|
38
|
+
"engines": {
|
|
39
|
+
"node": ">=18.0.0"
|
|
40
|
+
},
|
|
41
|
+
"peerDependencies": {
|
|
42
|
+
"@noy-db/hub": "0.1.0-pre.3"
|
|
43
|
+
},
|
|
44
|
+
"devDependencies": {
|
|
45
|
+
"@types/node": "^22.0.0",
|
|
46
|
+
"@noy-db/hub": "0.1.0-pre.3"
|
|
47
|
+
},
|
|
48
|
+
"keywords": [
|
|
49
|
+
"noy-db",
|
|
50
|
+
"as-sql",
|
|
51
|
+
"sql",
|
|
52
|
+
"export",
|
|
53
|
+
"migration",
|
|
54
|
+
"postgres",
|
|
55
|
+
"mysql",
|
|
56
|
+
"sqlite"
|
|
57
|
+
],
|
|
58
|
+
"publishConfig": {
|
|
59
|
+
"access": "public",
|
|
60
|
+
"tag": "latest"
|
|
61
|
+
},
|
|
62
|
+
"scripts": {
|
|
63
|
+
"build": "tsup",
|
|
64
|
+
"test": "vitest run",
|
|
65
|
+
"lint": "eslint src/",
|
|
66
|
+
"typecheck": "tsc --noEmit"
|
|
67
|
+
}
|
|
68
|
+
}
|