latticesql 0.18.2 → 0.18.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2012,6 +2012,14 @@ Three complete, commented examples are in [docs/examples/](./docs/examples/):
2012
2012
 
2013
2013
  ---
2014
2014
 
2015
+ ## Staying up to date
2016
+
2017
+ **CLI users:** The `lattice` CLI checks for new versions automatically and prints a notice when an update is available. Run `lattice update` to upgrade in place. Alternatively, use `npx lattice` to always run the latest version without a global install.
2018
+
2019
+ **Library consumers:** By default, `npm install latticesql` adds a `^` semver range to your `package.json`, so patch and minor updates are picked up on your next `npm install`. For fully automated dependency updates, set up [Dependabot](https://docs.github.com/en/code-security/dependabot) or [Renovate](https://github.com/renovatebot/renovate) — they'll create PRs in your repo whenever a new version is published.
2020
+
2021
+ ---
2022
+
2015
2023
  ## Contributing
2016
2024
 
2017
2025
  See [CONTRIBUTING.md](./CONTRIBUTING.md) for dev setup, test commands, and contribution guidelines.
package/dist/cli.js CHANGED
@@ -1,8 +1,9 @@
1
1
  #!/usr/bin/env node
2
2
 
3
3
  // src/cli.ts
4
- import { resolve as resolve3, dirname as dirname5 } from "path";
5
- import { readFileSync as readFileSync6 } from "fs";
4
+ import { resolve as resolve4, dirname as dirname5 } from "path";
5
+ import { readFileSync as readFileSync7 } from "fs";
6
+ import { execSync } from "child_process";
6
7
  import { parse as parse2 } from "yaml";
7
8
 
8
9
  // src/codegen/generate.ts
@@ -493,7 +494,7 @@ var SchemaManager = class {
493
494
  applySchema(adapter) {
494
495
  for (const [name, def] of this._tables) {
495
496
  const pkCols = this._tablePK.get(name) ?? ["id"];
496
- let constraints = def.tableConstraints ? [...def.tableConstraints] : [];
497
+ const constraints = def.tableConstraints ? [...def.tableConstraints] : [];
497
498
  if (pkCols.length > 1) {
498
499
  const alreadyHasPK = constraints.some((c) => c.toUpperCase().startsWith("PRIMARY KEY"));
499
500
  if (!alreadyHasPK) {
@@ -537,7 +538,7 @@ var SchemaManager = class {
537
538
  queryTable(adapter, name) {
538
539
  if (this._tables.has(name)) {
539
540
  const def = this._tables.get(name);
540
- if (def.columns && "deleted_at" in def.columns) {
541
+ if (def?.columns && "deleted_at" in def.columns) {
541
542
  return adapter.all(`SELECT * FROM "${name}" WHERE deleted_at IS NULL`);
542
543
  }
543
544
  return adapter.all(`SELECT * FROM "${name}"`);
@@ -634,8 +635,8 @@ var Sanitizer = class {
634
635
  };
635
636
 
636
637
  // src/render/engine.ts
637
- import { join as join5 } from "path";
638
- import { mkdirSync as mkdirSync3 } from "fs";
638
+ import { join as join5, basename, isAbsolute, resolve as resolve3 } from "path";
639
+ import { mkdirSync as mkdirSync3, existsSync as existsSync5, copyFileSync } from "fs";
639
640
 
640
641
  // src/render/entity-query.ts
641
642
  var SAFE_COL_RE = /^[a-zA-Z0-9_]+$/;
@@ -721,7 +722,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
721
722
  case "self":
722
723
  return [entityRow];
723
724
  case "hasMany": {
724
- if (protection && protection.protectedTables.has(source.table)) {
725
+ if (protection?.protectedTables.has(source.table)) {
725
726
  if (source.table === protection.currentTable) return [entityRow];
726
727
  return [];
727
728
  }
@@ -733,7 +734,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
733
734
  return adapter.all(sql, params);
734
735
  }
735
736
  case "manyToMany": {
736
- if (protection && protection.protectedTables.has(source.remoteTable)) {
737
+ if (protection?.protectedTables.has(source.remoteTable)) {
737
738
  if (source.remoteTable === protection.currentTable) return [entityRow];
738
739
  return [];
739
740
  }
@@ -759,7 +760,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
759
760
  return adapter.all(sql, params);
760
761
  }
761
762
  case "belongsTo": {
762
- if (protection && protection.protectedTables.has(source.table)) {
763
+ if (protection?.protectedTables.has(source.table)) {
763
764
  if (source.table === protection.currentTable) return [entityRow];
764
765
  return [];
765
766
  }
@@ -1175,6 +1176,22 @@ var RenderEngine = class {
1175
1176
  const slug = def.slug(entityRow);
1176
1177
  const entityDir = def.directory ? join5(outputDir, def.directory(entityRow)) : join5(outputDir, directoryRoot, slug);
1177
1178
  mkdirSync3(entityDir, { recursive: true });
1179
+ if (def.attachFileColumn) {
1180
+ const filePath = entityRow[def.attachFileColumn];
1181
+ if (filePath && typeof filePath === "string" && filePath.length > 0) {
1182
+ const absPath = isAbsolute(filePath) ? filePath : resolve3(outputDir, filePath);
1183
+ if (existsSync5(absPath)) {
1184
+ const destPath = join5(entityDir, basename(absPath));
1185
+ if (!existsSync5(destPath)) {
1186
+ try {
1187
+ copyFileSync(absPath, destPath);
1188
+ filesWritten.push(destPath);
1189
+ } catch {
1190
+ }
1191
+ }
1192
+ }
1193
+ }
1194
+ }
1178
1195
  const renderedFiles = /* @__PURE__ */ new Map();
1179
1196
  const entityFileHashes = {};
1180
1197
  const protection = protectedTables.size > 0 ? { protectedTables, currentTable: table } : void 0;
@@ -1224,7 +1241,7 @@ var RenderEngine = class {
1224
1241
 
1225
1242
  // src/reverse-sync/engine.ts
1226
1243
  import { join as join6 } from "path";
1227
- import { existsSync as existsSync5, readFileSync as readFileSync4 } from "fs";
1244
+ import { existsSync as existsSync6, readFileSync as readFileSync4 } from "fs";
1228
1245
  var ReverseSyncEngine = class {
1229
1246
  _schema;
1230
1247
  _adapter;
@@ -1276,7 +1293,7 @@ var ReverseSyncEngine = class {
1276
1293
  if (!fileInfo.hash) continue;
1277
1294
  const filePath = join6(entityDir, filename);
1278
1295
  result.filesScanned++;
1279
- if (!existsSync5(filePath)) continue;
1296
+ if (!existsSync6(filePath)) continue;
1280
1297
  let currentContent;
1281
1298
  try {
1282
1299
  currentContent = readFileSync4(filePath, "utf8");
@@ -1376,8 +1393,8 @@ var SyncLoop = class {
1376
1393
  };
1377
1394
 
1378
1395
  // src/writeback/pipeline.ts
1379
- import { readFileSync as readFileSync5, statSync as statSync2, existsSync as existsSync6, readdirSync as readdirSync2 } from "fs";
1380
- import { join as join7, dirname as dirname4, basename } from "path";
1396
+ import { readFileSync as readFileSync5, statSync as statSync2, existsSync as existsSync7, readdirSync as readdirSync2 } from "fs";
1397
+ import { join as join7, dirname as dirname4, basename as basename2 } from "path";
1381
1398
 
1382
1399
  // src/writeback/state-store.ts
1383
1400
  var InMemoryStateStore = class {
@@ -1427,7 +1444,7 @@ var WritebackPipeline = class {
1427
1444
  let processed = 0;
1428
1445
  const store = def.stateStore ?? this._stateStore;
1429
1446
  for (const filePath of paths) {
1430
- if (!existsSync6(filePath)) continue;
1447
+ if (!existsSync7(filePath)) continue;
1431
1448
  const stat = statSync2(filePath);
1432
1449
  const currentSize = stat.size;
1433
1450
  const storedOffset = store.getOffset(filePath);
@@ -1461,8 +1478,8 @@ var WritebackPipeline = class {
1461
1478
  return [pattern];
1462
1479
  }
1463
1480
  const dir = dirname4(pattern);
1464
- const filePattern = basename(pattern);
1465
- if (!existsSync6(dir)) return [];
1481
+ const filePattern = basename2(pattern);
1482
+ if (!existsSync7(dir)) return [];
1466
1483
  const regexStr = filePattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*/g, ".*").replace(/\?/g, ".");
1467
1484
  const regex = new RegExp(`^${regexStr}$`);
1468
1485
  return readdirSync2(dir).filter((f) => regex.test(f)).map((f) => join7(dir, f));
@@ -1774,9 +1791,7 @@ var Lattice = class {
1774
1791
  `Entity context "${table}" has encrypted: true but no encryptionKey was provided in Lattice options`
1775
1792
  );
1776
1793
  }
1777
- if (!this._encryptionKey) {
1778
- this._encryptionKey = deriveKey(this._encryptionKeyRaw);
1779
- }
1794
+ this._encryptionKey ??= deriveKey(this._encryptionKeyRaw);
1780
1795
  const pragmaRows = this._adapter.all(`PRAGMA table_info("${table}")`);
1781
1796
  const allCols = pragmaRows.map((r) => r.name);
1782
1797
  const encCols = resolveEncryptedColumns(def.encrypted, allCols);
@@ -2605,6 +2620,49 @@ var Lattice = class {
2605
2620
  }
2606
2621
  };
2607
2622
 
2623
+ // src/update-check.ts
2624
+ import { readFileSync as readFileSync6, writeFileSync as writeFileSync3, mkdirSync as mkdirSync4, existsSync as existsSync8 } from "fs";
2625
+ import { join as join8 } from "path";
2626
+ import { homedir } from "os";
2627
+ var ONE_DAY_MS = 864e5;
2628
+ function isNewer(latest, current) {
2629
+ const a = latest.split(".").map(Number);
2630
+ const b = current.split(".").map(Number);
2631
+ for (let i = 0; i < Math.max(a.length, b.length); i++) {
2632
+ const av = a[i] ?? 0;
2633
+ const bv = b[i] ?? 0;
2634
+ if (av > bv) return true;
2635
+ if (av < bv) return false;
2636
+ }
2637
+ return false;
2638
+ }
2639
+ async function checkForUpdate(pkgName, currentVersion) {
2640
+ const cacheDir = join8(homedir(), `.${pkgName}`);
2641
+ const cachePath = join8(cacheDir, "update-check.json");
2642
+ try {
2643
+ if (existsSync8(cachePath)) {
2644
+ const cached = JSON.parse(readFileSync6(cachePath, "utf-8"));
2645
+ if (Date.now() - cached.checked < ONE_DAY_MS) {
2646
+ return isNewer(cached.latest, currentVersion) ? cached.latest : null;
2647
+ }
2648
+ }
2649
+ } catch {
2650
+ }
2651
+ const res = await fetch(`https://registry.npmjs.org/${pkgName}/latest`, {
2652
+ headers: { accept: "application/json" },
2653
+ signal: AbortSignal.timeout(5e3)
2654
+ });
2655
+ if (!res.ok) return null;
2656
+ const data = await res.json();
2657
+ const latest = data.version;
2658
+ try {
2659
+ if (!existsSync8(cacheDir)) mkdirSync4(cacheDir, { recursive: true });
2660
+ writeFileSync3(cachePath, JSON.stringify({ latest, checked: Date.now() }));
2661
+ } catch {
2662
+ }
2663
+ return isNewer(latest, currentVersion) ? latest : null;
2664
+ }
2665
+
2608
2666
  // src/cli.ts
2609
2667
  function parseArgs(argv) {
2610
2668
  let command;
@@ -2691,6 +2749,7 @@ function printHelp() {
2691
2749
  " reconcile Render + cleanup orphaned entity directories and files",
2692
2750
  " status Dry-run reconcile \u2014 show what would change without writing",
2693
2751
  " watch Poll for changes and re-render on each cycle",
2752
+ " update Upgrade latticesql to the latest version",
2694
2753
  "",
2695
2754
  "Options (generate):",
2696
2755
  " --config, -c <path> Path to config file (default: ./lattice.config.yml)",
@@ -2728,20 +2787,40 @@ function printHelp() {
2728
2787
  ].join("\n")
2729
2788
  );
2730
2789
  }
2731
- function printVersion() {
2790
+ function getVersion() {
2732
2791
  try {
2733
2792
  const pkgPath = new URL("../package.json", import.meta.url).pathname;
2734
- const pkg = JSON.parse(readFileSync6(pkgPath, "utf-8"));
2735
- console.log(pkg.version);
2793
+ const pkg = JSON.parse(readFileSync7(pkgPath, "utf-8"));
2794
+ return pkg.version;
2795
+ } catch {
2796
+ return "unknown";
2797
+ }
2798
+ }
2799
+ function printVersion() {
2800
+ console.log(getVersion());
2801
+ }
2802
+ async function runUpdate() {
2803
+ const currentVersion = getVersion();
2804
+ console.log(`Current version: ${currentVersion}`);
2805
+ const latest = await checkForUpdate("latticesql", currentVersion);
2806
+ if (!latest) {
2807
+ console.log("Already up to date.");
2808
+ return;
2809
+ }
2810
+ console.log(`Updating to ${latest}...`);
2811
+ try {
2812
+ execSync("npm install -g latticesql@latest", { stdio: "inherit" });
2813
+ console.log(`Updated latticesql ${currentVersion} \u2192 ${latest}`);
2736
2814
  } catch {
2737
- console.log("unknown");
2815
+ console.error("Update failed. Try running manually: npm install -g latticesql@latest");
2816
+ process.exit(1);
2738
2817
  }
2739
2818
  }
2740
2819
  function runGenerate(args) {
2741
- const configPath = resolve3(args.config);
2820
+ const configPath = resolve4(args.config);
2742
2821
  let raw;
2743
2822
  try {
2744
- raw = readFileSync6(configPath, "utf-8");
2823
+ raw = readFileSync7(configPath, "utf-8");
2745
2824
  } catch {
2746
2825
  console.error(`Error: cannot read config file at "${configPath}"`);
2747
2826
  process.exit(1);
@@ -2758,7 +2837,7 @@ function runGenerate(args) {
2758
2837
  process.exit(1);
2759
2838
  }
2760
2839
  const configDir = dirname5(configPath);
2761
- const outDir = resolve3(args.out);
2840
+ const outDir = resolve4(args.out);
2762
2841
  try {
2763
2842
  const result = generateAll({ config, configDir, outDir, scaffold: args.scaffold });
2764
2843
  console.log(`Generated ${String(result.filesWritten.length)} file(s):`);
@@ -2771,15 +2850,15 @@ function runGenerate(args) {
2771
2850
  }
2772
2851
  }
2773
2852
  async function runRender(args) {
2774
- const outputDir = resolve3(args.output);
2853
+ const outputDir = resolve4(args.output);
2775
2854
  let parsed;
2776
2855
  try {
2777
- parsed = parseConfigFile(resolve3(args.config));
2856
+ parsed = parseConfigFile(resolve4(args.config));
2778
2857
  } catch (e) {
2779
2858
  console.error(`Error: ${e.message}`);
2780
2859
  process.exit(1);
2781
2860
  }
2782
- const db = new Lattice({ config: resolve3(args.config) });
2861
+ const db = new Lattice({ config: resolve4(args.config) });
2783
2862
  try {
2784
2863
  await db.init();
2785
2864
  const start = Date.now();
@@ -2798,8 +2877,8 @@ async function runRender(args) {
2798
2877
  void parsed;
2799
2878
  }
2800
2879
  async function runReconcile(args, isDryRun) {
2801
- const outputDir = resolve3(args.output);
2802
- const db = new Lattice({ config: resolve3(args.config) });
2880
+ const outputDir = resolve4(args.output);
2881
+ const db = new Lattice({ config: resolve4(args.config) });
2803
2882
  try {
2804
2883
  await db.init();
2805
2884
  const start = Date.now();
@@ -2858,8 +2937,8 @@ function formatTimestamp() {
2858
2937
  return `${hh}:${mm}:${ss}`;
2859
2938
  }
2860
2939
  async function runWatch(args) {
2861
- const outputDir = resolve3(args.output);
2862
- const db = new Lattice({ config: resolve3(args.config) });
2940
+ const outputDir = resolve4(args.output);
2941
+ const db = new Lattice({ config: resolve4(args.config) });
2863
2942
  try {
2864
2943
  await db.init();
2865
2944
  } catch (e) {
@@ -2908,6 +2987,19 @@ function main() {
2908
2987
  printHelp();
2909
2988
  process.exit(args.command === void 0 && !args.help ? 1 : 0);
2910
2989
  }
2990
+ const version = getVersion();
2991
+ if (version !== "unknown") {
2992
+ checkForUpdate("latticesql", version).then((latest) => {
2993
+ if (latest) {
2994
+ process.on("exit", () => {
2995
+ console.log(
2996
+ `
2997
+ Update available: ${version} \u2192 ${latest} \u2014 run "lattice update" to upgrade`
2998
+ );
2999
+ });
3000
+ }
3001
+ }).catch(() => void 0);
3002
+ }
2911
3003
  switch (args.command) {
2912
3004
  case "generate":
2913
3005
  runGenerate(args);
@@ -2924,6 +3016,9 @@ function main() {
2924
3016
  case "watch":
2925
3017
  void runWatch(args);
2926
3018
  break;
3019
+ case "update":
3020
+ void runUpdate();
3021
+ break;
2927
3022
  default:
2928
3023
  console.error(`Unknown command: ${args.command}`);
2929
3024
  printHelp();
package/dist/index.cjs CHANGED
@@ -246,7 +246,7 @@ var SchemaManager = class {
246
246
  applySchema(adapter) {
247
247
  for (const [name, def] of this._tables) {
248
248
  const pkCols = this._tablePK.get(name) ?? ["id"];
249
- let constraints = def.tableConstraints ? [...def.tableConstraints] : [];
249
+ const constraints = def.tableConstraints ? [...def.tableConstraints] : [];
250
250
  if (pkCols.length > 1) {
251
251
  const alreadyHasPK = constraints.some((c) => c.toUpperCase().startsWith("PRIMARY KEY"));
252
252
  if (!alreadyHasPK) {
@@ -290,7 +290,7 @@ var SchemaManager = class {
290
290
  queryTable(adapter, name) {
291
291
  if (this._tables.has(name)) {
292
292
  const def = this._tables.get(name);
293
- if (def.columns && "deleted_at" in def.columns) {
293
+ if (def?.columns && "deleted_at" in def.columns) {
294
294
  return adapter.all(`SELECT * FROM "${name}" WHERE deleted_at IS NULL`);
295
295
  }
296
296
  return adapter.all(`SELECT * FROM "${name}"`);
@@ -474,7 +474,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
474
474
  case "self":
475
475
  return [entityRow];
476
476
  case "hasMany": {
477
- if (protection && protection.protectedTables.has(source.table)) {
477
+ if (protection?.protectedTables.has(source.table)) {
478
478
  if (source.table === protection.currentTable) return [entityRow];
479
479
  return [];
480
480
  }
@@ -486,7 +486,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
486
486
  return adapter.all(sql, params);
487
487
  }
488
488
  case "manyToMany": {
489
- if (protection && protection.protectedTables.has(source.remoteTable)) {
489
+ if (protection?.protectedTables.has(source.remoteTable)) {
490
490
  if (source.remoteTable === protection.currentTable) return [entityRow];
491
491
  return [];
492
492
  }
@@ -512,7 +512,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
512
512
  return adapter.all(sql, params);
513
513
  }
514
514
  case "belongsTo": {
515
- if (protection && protection.protectedTables.has(source.table)) {
515
+ if (protection?.protectedTables.has(source.table)) {
516
516
  if (source.table === protection.currentTable) return [entityRow];
517
517
  return [];
518
518
  }
@@ -935,6 +935,22 @@ var RenderEngine = class {
935
935
  const slug = def.slug(entityRow);
936
936
  const entityDir = def.directory ? (0, import_node_path4.join)(outputDir, def.directory(entityRow)) : (0, import_node_path4.join)(outputDir, directoryRoot, slug);
937
937
  (0, import_node_fs4.mkdirSync)(entityDir, { recursive: true });
938
+ if (def.attachFileColumn) {
939
+ const filePath = entityRow[def.attachFileColumn];
940
+ if (filePath && typeof filePath === "string" && filePath.length > 0) {
941
+ const absPath = (0, import_node_path4.isAbsolute)(filePath) ? filePath : (0, import_node_path4.resolve)(outputDir, filePath);
942
+ if ((0, import_node_fs4.existsSync)(absPath)) {
943
+ const destPath = (0, import_node_path4.join)(entityDir, (0, import_node_path4.basename)(absPath));
944
+ if (!(0, import_node_fs4.existsSync)(destPath)) {
945
+ try {
946
+ (0, import_node_fs4.copyFileSync)(absPath, destPath);
947
+ filesWritten.push(destPath);
948
+ } catch {
949
+ }
950
+ }
951
+ }
952
+ }
953
+ }
938
954
  const renderedFiles = /* @__PURE__ */ new Map();
939
955
  const entityFileHashes = {};
940
956
  const protection = protectedTables.size > 0 ? { protectedTables, currentTable: table } : void 0;
@@ -1818,9 +1834,7 @@ var Lattice = class {
1818
1834
  `Entity context "${table}" has encrypted: true but no encryptionKey was provided in Lattice options`
1819
1835
  );
1820
1836
  }
1821
- if (!this._encryptionKey) {
1822
- this._encryptionKey = deriveKey(this._encryptionKeyRaw);
1823
- }
1837
+ this._encryptionKey ??= deriveKey(this._encryptionKeyRaw);
1824
1838
  const pragmaRows = this._adapter.all(`PRAGMA table_info("${table}")`);
1825
1839
  const allCols = pragmaRows.map((r) => r.name);
1826
1840
  const encCols = resolveEncryptedColumns(def.encrypted, allCols);
@@ -2661,7 +2675,7 @@ function fixSchemaConflicts(db, checks) {
2661
2675
  }
2662
2676
  if (tableExists(db, "__lattice_migrations")) {
2663
2677
  const versionCol = db.prepare('PRAGMA table_info("__lattice_migrations")').all().find((c) => c.name === "version");
2664
- if (versionCol && versionCol.type.toUpperCase().includes("INTEGER")) {
2678
+ if (versionCol?.type.toUpperCase().includes("INTEGER")) {
2665
2679
  db.exec(
2666
2680
  'ALTER TABLE "__lattice_migrations" RENAME TO "__lattice_migrations_v1"'
2667
2681
  );
package/dist/index.d.cts CHANGED
@@ -534,6 +534,24 @@ interface EntityContextDefinition {
534
534
  encrypted?: boolean | {
535
535
  columns: string[];
536
536
  };
537
+ /**
538
+ * Column name containing a file path. When set, the render pipeline
539
+ * copies the referenced file into each entity's rendered directory.
540
+ *
541
+ * If the path is relative, it's resolved from `outputDir`.
542
+ * If the file doesn't exist, it's silently skipped.
543
+ * The column value is updated to the new relative path after copy.
544
+ *
545
+ * @example
546
+ * ```ts
547
+ * db.defineEntityContext('files', {
548
+ * slug: (r) => r.name,
549
+ * attachFileColumn: 'file_path', // copies file at row.file_path into entity dir
550
+ * files: { 'FILE.md': { ... } },
551
+ * });
552
+ * ```
553
+ */
554
+ attachFileColumn?: string;
537
555
  }
538
556
 
539
557
  interface CleanupOptions {
@@ -1521,10 +1539,10 @@ declare function contentHash(content: string): string;
1521
1539
  * // Now safe to call lattice.init()
1522
1540
  * ```
1523
1541
  */
1524
- declare function fixSchemaConflicts(db: Database.Database, checks: Array<{
1542
+ declare function fixSchemaConflicts(db: Database.Database, checks: {
1525
1543
  table: string;
1526
1544
  requiredColumns: string[];
1527
- }>): void;
1545
+ }[]): void;
1528
1546
 
1529
1547
  /**
1530
1548
  * Derive a 256-bit AES key from a master password using scrypt.
package/dist/index.d.ts CHANGED
@@ -534,6 +534,24 @@ interface EntityContextDefinition {
534
534
  encrypted?: boolean | {
535
535
  columns: string[];
536
536
  };
537
+ /**
538
+ * Column name containing a file path. When set, the render pipeline
539
+ * copies the referenced file into each entity's rendered directory.
540
+ *
541
+ * If the path is relative, it's resolved from `outputDir`.
542
+ * If the file doesn't exist, it's silently skipped.
543
+ * The column value is updated to the new relative path after copy.
544
+ *
545
+ * @example
546
+ * ```ts
547
+ * db.defineEntityContext('files', {
548
+ * slug: (r) => r.name,
549
+ * attachFileColumn: 'file_path', // copies file at row.file_path into entity dir
550
+ * files: { 'FILE.md': { ... } },
551
+ * });
552
+ * ```
553
+ */
554
+ attachFileColumn?: string;
537
555
  }
538
556
 
539
557
  interface CleanupOptions {
@@ -1521,10 +1539,10 @@ declare function contentHash(content: string): string;
1521
1539
  * // Now safe to call lattice.init()
1522
1540
  * ```
1523
1541
  */
1524
- declare function fixSchemaConflicts(db: Database.Database, checks: Array<{
1542
+ declare function fixSchemaConflicts(db: Database.Database, checks: {
1525
1543
  table: string;
1526
1544
  requiredColumns: string[];
1527
- }>): void;
1545
+ }[]): void;
1528
1546
 
1529
1547
  /**
1530
1548
  * Derive a 256-bit AES key from a master password using scrypt.
package/dist/index.js CHANGED
@@ -179,7 +179,7 @@ var SchemaManager = class {
179
179
  applySchema(adapter) {
180
180
  for (const [name, def] of this._tables) {
181
181
  const pkCols = this._tablePK.get(name) ?? ["id"];
182
- let constraints = def.tableConstraints ? [...def.tableConstraints] : [];
182
+ const constraints = def.tableConstraints ? [...def.tableConstraints] : [];
183
183
  if (pkCols.length > 1) {
184
184
  const alreadyHasPK = constraints.some((c) => c.toUpperCase().startsWith("PRIMARY KEY"));
185
185
  if (!alreadyHasPK) {
@@ -223,7 +223,7 @@ var SchemaManager = class {
223
223
  queryTable(adapter, name) {
224
224
  if (this._tables.has(name)) {
225
225
  const def = this._tables.get(name);
226
- if (def.columns && "deleted_at" in def.columns) {
226
+ if (def?.columns && "deleted_at" in def.columns) {
227
227
  return adapter.all(`SELECT * FROM "${name}" WHERE deleted_at IS NULL`);
228
228
  }
229
229
  return adapter.all(`SELECT * FROM "${name}"`);
@@ -320,8 +320,8 @@ var Sanitizer = class {
320
320
  };
321
321
 
322
322
  // src/render/engine.ts
323
- import { join as join4 } from "path";
324
- import { mkdirSync as mkdirSync2 } from "fs";
323
+ import { join as join4, basename, isAbsolute, resolve } from "path";
324
+ import { mkdirSync as mkdirSync2, existsSync as existsSync4, copyFileSync } from "fs";
325
325
 
326
326
  // src/render/entity-query.ts
327
327
  var SAFE_COL_RE = /^[a-zA-Z0-9_]+$/;
@@ -407,7 +407,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
407
407
  case "self":
408
408
  return [entityRow];
409
409
  case "hasMany": {
410
- if (protection && protection.protectedTables.has(source.table)) {
410
+ if (protection?.protectedTables.has(source.table)) {
411
411
  if (source.table === protection.currentTable) return [entityRow];
412
412
  return [];
413
413
  }
@@ -419,7 +419,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
419
419
  return adapter.all(sql, params);
420
420
  }
421
421
  case "manyToMany": {
422
- if (protection && protection.protectedTables.has(source.remoteTable)) {
422
+ if (protection?.protectedTables.has(source.remoteTable)) {
423
423
  if (source.remoteTable === protection.currentTable) return [entityRow];
424
424
  return [];
425
425
  }
@@ -445,7 +445,7 @@ function resolveEntitySource(source, entityRow, entityPk, adapter, protection) {
445
445
  return adapter.all(sql, params);
446
446
  }
447
447
  case "belongsTo": {
448
- if (protection && protection.protectedTables.has(source.table)) {
448
+ if (protection?.protectedTables.has(source.table)) {
449
449
  if (source.table === protection.currentTable) return [entityRow];
450
450
  return [];
451
451
  }
@@ -868,6 +868,22 @@ var RenderEngine = class {
868
868
  const slug = def.slug(entityRow);
869
869
  const entityDir = def.directory ? join4(outputDir, def.directory(entityRow)) : join4(outputDir, directoryRoot, slug);
870
870
  mkdirSync2(entityDir, { recursive: true });
871
+ if (def.attachFileColumn) {
872
+ const filePath = entityRow[def.attachFileColumn];
873
+ if (filePath && typeof filePath === "string" && filePath.length > 0) {
874
+ const absPath = isAbsolute(filePath) ? filePath : resolve(outputDir, filePath);
875
+ if (existsSync4(absPath)) {
876
+ const destPath = join4(entityDir, basename(absPath));
877
+ if (!existsSync4(destPath)) {
878
+ try {
879
+ copyFileSync(absPath, destPath);
880
+ filesWritten.push(destPath);
881
+ } catch {
882
+ }
883
+ }
884
+ }
885
+ }
886
+ }
871
887
  const renderedFiles = /* @__PURE__ */ new Map();
872
888
  const entityFileHashes = {};
873
889
  const protection = protectedTables.size > 0 ? { protectedTables, currentTable: table } : void 0;
@@ -917,7 +933,7 @@ var RenderEngine = class {
917
933
 
918
934
  // src/reverse-sync/engine.ts
919
935
  import { join as join5 } from "path";
920
- import { existsSync as existsSync4, readFileSync as readFileSync3 } from "fs";
936
+ import { existsSync as existsSync5, readFileSync as readFileSync3 } from "fs";
921
937
  var ReverseSyncEngine = class {
922
938
  _schema;
923
939
  _adapter;
@@ -969,7 +985,7 @@ var ReverseSyncEngine = class {
969
985
  if (!fileInfo.hash) continue;
970
986
  const filePath = join5(entityDir, filename);
971
987
  result.filesScanned++;
972
- if (!existsSync4(filePath)) continue;
988
+ if (!existsSync5(filePath)) continue;
973
989
  let currentContent;
974
990
  try {
975
991
  currentContent = readFileSync3(filePath, "utf8");
@@ -1069,8 +1085,8 @@ var SyncLoop = class {
1069
1085
  };
1070
1086
 
1071
1087
  // src/writeback/pipeline.ts
1072
- import { readFileSync as readFileSync4, statSync as statSync2, existsSync as existsSync5, readdirSync as readdirSync2 } from "fs";
1073
- import { join as join6, dirname as dirname2, basename } from "path";
1088
+ import { readFileSync as readFileSync4, statSync as statSync2, existsSync as existsSync6, readdirSync as readdirSync2 } from "fs";
1089
+ import { join as join6, dirname as dirname2, basename as basename2 } from "path";
1074
1090
 
1075
1091
  // src/writeback/state-store.ts
1076
1092
  var InMemoryStateStore = class {
@@ -1176,7 +1192,7 @@ var WritebackPipeline = class {
1176
1192
  let processed = 0;
1177
1193
  const store = def.stateStore ?? this._stateStore;
1178
1194
  for (const filePath of paths) {
1179
- if (!existsSync5(filePath)) continue;
1195
+ if (!existsSync6(filePath)) continue;
1180
1196
  const stat = statSync2(filePath);
1181
1197
  const currentSize = stat.size;
1182
1198
  const storedOffset = store.getOffset(filePath);
@@ -1210,8 +1226,8 @@ var WritebackPipeline = class {
1210
1226
  return [pattern];
1211
1227
  }
1212
1228
  const dir = dirname2(pattern);
1213
- const filePattern = basename(pattern);
1214
- if (!existsSync5(dir)) return [];
1229
+ const filePattern = basename2(pattern);
1230
+ if (!existsSync6(dir)) return [];
1215
1231
  const regexStr = filePattern.replace(/[.+^${}()|[\]\\]/g, "\\$&").replace(/\*/g, ".*").replace(/\?/g, ".");
1216
1232
  const regex = new RegExp(`^${regexStr}$`);
1217
1233
  return readdirSync2(dir).filter((f) => regex.test(f)).map((f) => join6(dir, f));
@@ -1335,10 +1351,10 @@ ${body}`;
1335
1351
 
1336
1352
  // src/config/parser.ts
1337
1353
  import { readFileSync as readFileSync5 } from "fs";
1338
- import { resolve, dirname as dirname3 } from "path";
1354
+ import { resolve as resolve2, dirname as dirname3 } from "path";
1339
1355
  import { parse } from "yaml";
1340
1356
  function parseConfigFile(configPath) {
1341
- const absPath = resolve(configPath);
1357
+ const absPath = resolve2(configPath);
1342
1358
  const configDir = dirname3(absPath);
1343
1359
  let raw;
1344
1360
  try {
@@ -1377,7 +1393,7 @@ function buildParsedConfig(raw, sourceName, configDir) {
1377
1393
  throw new Error(`Lattice: config.entities must be an object`);
1378
1394
  }
1379
1395
  const config = raw;
1380
- const dbPath = resolve(configDir, config.db);
1396
+ const dbPath = resolve2(configDir, config.db);
1381
1397
  const tables = [];
1382
1398
  for (const [entityName, entityDef] of Object.entries(config.entities)) {
1383
1399
  const definition = entityToTableDef(entityName, entityDef);
@@ -1751,9 +1767,7 @@ var Lattice = class {
1751
1767
  `Entity context "${table}" has encrypted: true but no encryptionKey was provided in Lattice options`
1752
1768
  );
1753
1769
  }
1754
- if (!this._encryptionKey) {
1755
- this._encryptionKey = deriveKey(this._encryptionKeyRaw);
1756
- }
1770
+ this._encryptionKey ??= deriveKey(this._encryptionKeyRaw);
1757
1771
  const pragmaRows = this._adapter.all(`PRAGMA table_info("${table}")`);
1758
1772
  const allCols = pragmaRows.map((r) => r.name);
1759
1773
  const encCols = resolveEncryptedColumns(def.encrypted, allCols);
@@ -2594,7 +2608,7 @@ function fixSchemaConflicts(db, checks) {
2594
2608
  }
2595
2609
  if (tableExists(db, "__lattice_migrations")) {
2596
2610
  const versionCol = db.prepare('PRAGMA table_info("__lattice_migrations")').all().find((c) => c.name === "version");
2597
- if (versionCol && versionCol.type.toUpperCase().includes("INTEGER")) {
2611
+ if (versionCol?.type.toUpperCase().includes("INTEGER")) {
2598
2612
  db.exec(
2599
2613
  'ALTER TABLE "__lattice_migrations" RENAME TO "__lattice_migrations_v1"'
2600
2614
  );
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "latticesql",
3
- "version": "0.18.2",
3
+ "version": "0.18.4",
4
4
  "description": "Persistent structured memory for AI agent systems — SQLite ↔ LLM context bridge",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",