drizzle-orm 0.33.0-b921e79 → 0.33.0-c5d1196

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26,10 +26,9 @@ var import_react = require("react");
26
26
  async function readMigrationFiles({ journal, migrations }) {
27
27
  const migrationQueries = [];
28
28
  for await (const journalEntry of journal.entries) {
29
- const name = `m${journalEntry.idx.toString().padStart(4, "0")}`;
30
- const query = migrations[name];
29
+ const query = migrations[`m${journalEntry.idx.toString().padStart(4, "0")}`];
31
30
  if (!query) {
32
- throw new Error(`Missing migration: ${name}`);
31
+ throw new Error(`Missing migration: ${journalEntry.tag}`);
33
32
  }
34
33
  try {
35
34
  const result = query.split("--> statement-breakpoint").map((it) => {
@@ -37,11 +36,12 @@ async function readMigrationFiles({ journal, migrations }) {
37
36
  });
38
37
  migrationQueries.push({
39
38
  sql: result,
39
+ bps: journalEntry.breakpoints,
40
40
  folderMillis: journalEntry.when,
41
41
  hash: ""
42
42
  });
43
43
  } catch {
44
- throw new Error(`Failed to parse migration: ${name}`);
44
+ throw new Error(`Failed to parse migration: ${journalEntry.tag}`);
45
45
  }
46
46
  }
47
47
  return migrationQueries;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/expo-sqlite/migrator.ts"],"sourcesContent":["import { useEffect, useReducer } from 'react';\nimport type { MigrationMeta } from '~/migrator.ts';\nimport type { ExpoSQLiteDatabase } from './driver.ts';\n\ninterface MigrationConfig {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}\n\nasync function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise<MigrationMeta[]> {\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tfor await (const journalEntry of journal.entries) {\n\t\tconst name = `m${journalEntry.idx.toString().padStart(4, '0')}`;\n\t\tconst query = migrations[name];\n\n\t\tif (!query) {\n\t\t\tthrow new Error(`Missing migration: ${name}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: '',\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`Failed to parse migration: ${name}`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: ExpoSQLiteDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = await readMigrationFiles(config);\n\treturn db.dialect.migrate(migrations, db.session);\n}\n\ninterface State {\n\tsuccess: boolean;\n\terror?: Error;\n}\n\ntype Action =\n\t| { type: 'migrating' }\n\t| { type: 'migrated'; payload: true }\n\t| { type: 'error'; payload: Error };\n\nexport const useMigrations = (db: ExpoSQLiteDatabase<any>, migrations: {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}): State => {\n\tconst initialState: State = {\n\t\tsuccess: false,\n\t\terror: undefined,\n\t};\n\n\tconst fetchReducer = (state: State, action: Action): State => {\n\t\tswitch (action.type) {\n\t\t\tcase 'migrating': {\n\t\t\t\treturn { ...initialState };\n\t\t\t}\n\t\t\tcase 'migrated': {\n\t\t\t\treturn { ...initialState, success: action.payload };\n\t\t\t}\n\t\t\tcase 'error': {\n\t\t\t\treturn { ...initialState, error: action.payload };\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\treturn state;\n\t\t\t}\n\t\t}\n\t};\n\n\tconst [state, dispatch] = useReducer(fetchReducer, initialState);\n\n\tuseEffect(() => {\n\t\tdispatch({ type: 'migrating' });\n\t\tmigrate(db, migrations as any).then(() => {\n\t\t\tdispatch({ type: 'migrated', payload: true });\n\t\t}).catch((error) => {\n\t\t\tdispatch({ type: 'error', payload: error as Error });\n\t\t});\n\t}, []);\n\n\treturn state;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAsC;AAWtC,eAAe,mBAAmB,EAAE,SAAS,WAAW,GAA8C;AACrG,QAAM,mBAAoC,CAAC;AAE3C,mBAAiB,gBAAgB,QAAQ,SAAS;AACjD,UAAM,OAAO,IAAI,aAAa,IAAI,SAAS,EAAE,SAAS,GAAG,GAAG,CAAC;AAC7D,UAAM,QAAQ,WAAW,IAAI;AAE7B,QAAI,CAAC,OAAO;AACX,YAAM,IAAI,MAAM,sBAAsB,IAAI,EAAE;AAAA,IAC7C;AAEA,QAAI;AACH,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,cAAc,aAAa;AAAA,QAC3B,MAAM;AAAA,MACP,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,8BAA8B,IAAI,EAAE;AAAA,IACrD;AAAA,EACD;AAEA,SAAO;AACR;AAEA,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,MAAM,mBAAmB,MAAM;AAClD,SAAO,GAAG,QAAQ,QAAQ,YAAY,GAAG,OAAO;AACjD;AAYO,MAAM,gBAAgB,CAAC,IAA6B,eAK9C;AACZ,QAAM,eAAsB;AAAA,IAC3B,SAAS;AAAA,IACT,OAAO;AAAA,EACR;AAEA,QAAM,eAAe,CAACA,QAAc,WAA0B;AAC7D,YAAQ,OAAO,MAAM;AAAA,MACpB,KAAK,aAAa;AACjB,eAAO,EAAE,GAAG,aAAa;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY;AAChB,eAAO,EAAE,GAAG,cAAc,SAAS,OAAO,QAAQ;AAAA,MACnD;AAAA,MACA,KAAK,SAAS;AACb,eAAO,EAAE,GAAG,cAAc,OAAO,OAAO,QAAQ;AAAA,MACjD;AAAA,MACA,SAAS;AACR,eAAOA;AAAA,MACR;AAAA,IACD;AAAA,EACD;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,yBAAW,cAAc,YAAY;AAE/D,8BAAU,MAAM;AACf,aAAS,EAAE,MAAM,YAAY,CAAC;AAC9B,YAAQ,IAAI,UAAiB,EAAE,KAAK,MAAM;AACzC,eAAS,EAAE,MAAM,YAAY,SAAS,KAAK,CAAC;AAAA,IAC7C,CAAC,EAAE,MAAM,CAAC,UAAU;AACnB,eAAS,EAAE,MAAM,SAAS,SAAS,MAAe,CAAC;AAAA,IACpD,CAAC;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,SAAO;AACR;","names":["state"]}
1
+ {"version":3,"sources":["../../src/expo-sqlite/migrator.ts"],"sourcesContent":["import { useEffect, useReducer } from 'react';\nimport type { MigrationMeta } from '~/migrator.ts';\nimport type { ExpoSQLiteDatabase } from './driver.ts';\n\ninterface MigrationConfig {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}\n\nasync function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise<MigrationMeta[]> {\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tfor await (const journalEntry of journal.entries) {\n\t\tconst query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`];\n\n\t\tif (!query) {\n\t\t\tthrow new Error(`Missing migration: ${journalEntry.tag}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tbps: journalEntry.breakpoints,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: '',\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`Failed to parse migration: ${journalEntry.tag}`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: ExpoSQLiteDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = await readMigrationFiles(config);\n\treturn db.dialect.migrate(migrations, db.session);\n}\n\ninterface State {\n\tsuccess: boolean;\n\terror?: Error;\n}\n\ntype Action =\n\t| { type: 'migrating' }\n\t| { type: 'migrated'; payload: true }\n\t| { type: 'error'; payload: Error };\n\nexport const useMigrations = (db: ExpoSQLiteDatabase<any>, migrations: {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}): State => {\n\tconst initialState: State = {\n\t\tsuccess: false,\n\t\terror: undefined,\n\t};\n\n\tconst fetchReducer = (state: State, action: Action): State => {\n\t\tswitch (action.type) {\n\t\t\tcase 'migrating': {\n\t\t\t\treturn { ...initialState };\n\t\t\t}\n\t\t\tcase 'migrated': {\n\t\t\t\treturn { ...initialState, success: action.payload };\n\t\t\t}\n\t\t\tcase 'error': {\n\t\t\t\treturn { ...initialState, error: action.payload };\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\treturn state;\n\t\t\t}\n\t\t}\n\t};\n\n\tconst [state, dispatch] = useReducer(fetchReducer, initialState);\n\n\tuseEffect(() => {\n\t\tdispatch({ type: 'migrating' });\n\t\tmigrate(db, migrations as any).then(() => {\n\t\t\tdispatch({ type: 'migrated', payload: true });\n\t\t}).catch((error) => {\n\t\t\tdispatch({ type: 'error', payload: error as Error });\n\t\t});\n\t}, []);\n\n\treturn state;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAsC;AAWtC,eAAe,mBAAmB,EAAE,SAAS,WAAW,GAA8C;AACrG,QAAM,mBAAoC,CAAC;AAE3C,mBAAiB,gBAAgB,QAAQ,SAAS;AACjD,UAAM,QAAQ,WAAW,IAAI,aAAa,IAAI,SAAS,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE;AAE3E,QAAI,CAAC,OAAO;AACX,YAAM,IAAI,MAAM,sBAAsB,aAAa,GAAG,EAAE;AAAA,IACzD;AAEA,QAAI;AACH,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,KAAK,aAAa;AAAA,QAClB,cAAc,aAAa;AAAA,QAC3B,MAAM;AAAA,MACP,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,8BAA8B,aAAa,GAAG,EAAE;AAAA,IACjE;AAAA,EACD;AAEA,SAAO;AACR;AAEA,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,MAAM,mBAAmB,MAAM;AAClD,SAAO,GAAG,QAAQ,QAAQ,YAAY,GAAG,OAAO;AACjD;AAYO,MAAM,gBAAgB,CAAC,IAA6B,eAK9C;AACZ,QAAM,eAAsB;AAAA,IAC3B,SAAS;AAAA,IACT,OAAO;AAAA,EACR;AAEA,QAAM,eAAe,CAACA,QAAc,WAA0B;AAC7D,YAAQ,OAAO,MAAM;AAAA,MACpB,KAAK,aAAa;AACjB,eAAO,EAAE,GAAG,aAAa;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY;AAChB,eAAO,EAAE,GAAG,cAAc,SAAS,OAAO,QAAQ;AAAA,MACnD;AAAA,MACA,KAAK,SAAS;AACb,eAAO,EAAE,GAAG,cAAc,OAAO,OAAO,QAAQ;AAAA,MACjD;AAAA,MACA,SAAS;AACR,eAAOA;AAAA,MACR;AAAA,IACD;AAAA,EACD;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,yBAAW,cAAc,YAAY;AAE/D,8BAAU,MAAM;AACf,aAAS,EAAE,MAAM,YAAY,CAAC;AAC9B,YAAQ,IAAI,UAAiB,EAAE,KAAK,MAAM;AACzC,eAAS,EAAE,MAAM,YAAY,SAAS,KAAK,CAAC;AAAA,IAC7C,CAAC,EAAE,MAAM,CAAC,UAAU;AACnB,eAAS,EAAE,MAAM,SAAS,SAAS,MAAe,CAAC;AAAA,IACpD,CAAC;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,SAAO;AACR;","names":["state"]}
@@ -2,10 +2,9 @@ import { useEffect, useReducer } from "react";
2
2
  async function readMigrationFiles({ journal, migrations }) {
3
3
  const migrationQueries = [];
4
4
  for await (const journalEntry of journal.entries) {
5
- const name = `m${journalEntry.idx.toString().padStart(4, "0")}`;
6
- const query = migrations[name];
5
+ const query = migrations[`m${journalEntry.idx.toString().padStart(4, "0")}`];
7
6
  if (!query) {
8
- throw new Error(`Missing migration: ${name}`);
7
+ throw new Error(`Missing migration: ${journalEntry.tag}`);
9
8
  }
10
9
  try {
11
10
  const result = query.split("--> statement-breakpoint").map((it) => {
@@ -13,11 +12,12 @@ async function readMigrationFiles({ journal, migrations }) {
13
12
  });
14
13
  migrationQueries.push({
15
14
  sql: result,
15
+ bps: journalEntry.breakpoints,
16
16
  folderMillis: journalEntry.when,
17
17
  hash: ""
18
18
  });
19
19
  } catch {
20
- throw new Error(`Failed to parse migration: ${name}`);
20
+ throw new Error(`Failed to parse migration: ${journalEntry.tag}`);
21
21
  }
22
22
  }
23
23
  return migrationQueries;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/expo-sqlite/migrator.ts"],"sourcesContent":["import { useEffect, useReducer } from 'react';\nimport type { MigrationMeta } from '~/migrator.ts';\nimport type { ExpoSQLiteDatabase } from './driver.ts';\n\ninterface MigrationConfig {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}\n\nasync function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise<MigrationMeta[]> {\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tfor await (const journalEntry of journal.entries) {\n\t\tconst name = `m${journalEntry.idx.toString().padStart(4, '0')}`;\n\t\tconst query = migrations[name];\n\n\t\tif (!query) {\n\t\t\tthrow new Error(`Missing migration: ${name}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: '',\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`Failed to parse migration: ${name}`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: ExpoSQLiteDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = await readMigrationFiles(config);\n\treturn db.dialect.migrate(migrations, db.session);\n}\n\ninterface State {\n\tsuccess: boolean;\n\terror?: Error;\n}\n\ntype Action =\n\t| { type: 'migrating' }\n\t| { type: 'migrated'; payload: true }\n\t| { type: 'error'; payload: Error };\n\nexport const useMigrations = (db: ExpoSQLiteDatabase<any>, migrations: {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}): State => {\n\tconst initialState: State = {\n\t\tsuccess: false,\n\t\terror: undefined,\n\t};\n\n\tconst fetchReducer = (state: State, action: Action): State => {\n\t\tswitch (action.type) {\n\t\t\tcase 'migrating': {\n\t\t\t\treturn { ...initialState };\n\t\t\t}\n\t\t\tcase 'migrated': {\n\t\t\t\treturn { ...initialState, success: action.payload };\n\t\t\t}\n\t\t\tcase 'error': {\n\t\t\t\treturn { ...initialState, error: action.payload };\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\treturn state;\n\t\t\t}\n\t\t}\n\t};\n\n\tconst [state, dispatch] = useReducer(fetchReducer, initialState);\n\n\tuseEffect(() => {\n\t\tdispatch({ type: 'migrating' });\n\t\tmigrate(db, migrations as any).then(() => {\n\t\t\tdispatch({ type: 'migrated', payload: true });\n\t\t}).catch((error) => {\n\t\t\tdispatch({ type: 'error', payload: error as Error });\n\t\t});\n\t}, []);\n\n\treturn state;\n};\n"],"mappings":"AAAA,SAAS,WAAW,kBAAkB;AAWtC,eAAe,mBAAmB,EAAE,SAAS,WAAW,GAA8C;AACrG,QAAM,mBAAoC,CAAC;AAE3C,mBAAiB,gBAAgB,QAAQ,SAAS;AACjD,UAAM,OAAO,IAAI,aAAa,IAAI,SAAS,EAAE,SAAS,GAAG,GAAG,CAAC;AAC7D,UAAM,QAAQ,WAAW,IAAI;AAE7B,QAAI,CAAC,OAAO;AACX,YAAM,IAAI,MAAM,sBAAsB,IAAI,EAAE;AAAA,IAC7C;AAEA,QAAI;AACH,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,cAAc,aAAa;AAAA,QAC3B,MAAM;AAAA,MACP,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,8BAA8B,IAAI,EAAE;AAAA,IACrD;AAAA,EACD;AAEA,SAAO;AACR;AAEA,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,MAAM,mBAAmB,MAAM;AAClD,SAAO,GAAG,QAAQ,QAAQ,YAAY,GAAG,OAAO;AACjD;AAYO,MAAM,gBAAgB,CAAC,IAA6B,eAK9C;AACZ,QAAM,eAAsB;AAAA,IAC3B,SAAS;AAAA,IACT,OAAO;AAAA,EACR;AAEA,QAAM,eAAe,CAACA,QAAc,WAA0B;AAC7D,YAAQ,OAAO,MAAM;AAAA,MACpB,KAAK,aAAa;AACjB,eAAO,EAAE,GAAG,aAAa;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY;AAChB,eAAO,EAAE,GAAG,cAAc,SAAS,OAAO,QAAQ;AAAA,MACnD;AAAA,MACA,KAAK,SAAS;AACb,eAAO,EAAE,GAAG,cAAc,OAAO,OAAO,QAAQ;AAAA,MACjD;AAAA,MACA,SAAS;AACR,eAAOA;AAAA,MACR;AAAA,IACD;AAAA,EACD;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAI,WAAW,cAAc,YAAY;AAE/D,YAAU,MAAM;AACf,aAAS,EAAE,MAAM,YAAY,CAAC;AAC9B,YAAQ,IAAI,UAAiB,EAAE,KAAK,MAAM;AACzC,eAAS,EAAE,MAAM,YAAY,SAAS,KAAK,CAAC;AAAA,IAC7C,CAAC,EAAE,MAAM,CAAC,UAAU;AACnB,eAAS,EAAE,MAAM,SAAS,SAAS,MAAe,CAAC;AAAA,IACpD,CAAC;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,SAAO;AACR;","names":["state"]}
1
+ {"version":3,"sources":["../../src/expo-sqlite/migrator.ts"],"sourcesContent":["import { useEffect, useReducer } from 'react';\nimport type { MigrationMeta } from '~/migrator.ts';\nimport type { ExpoSQLiteDatabase } from './driver.ts';\n\ninterface MigrationConfig {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}\n\nasync function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise<MigrationMeta[]> {\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tfor await (const journalEntry of journal.entries) {\n\t\tconst query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`];\n\n\t\tif (!query) {\n\t\t\tthrow new Error(`Missing migration: ${journalEntry.tag}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tbps: journalEntry.breakpoints,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: '',\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`Failed to parse migration: ${journalEntry.tag}`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: ExpoSQLiteDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = await readMigrationFiles(config);\n\treturn db.dialect.migrate(migrations, db.session);\n}\n\ninterface State {\n\tsuccess: boolean;\n\terror?: Error;\n}\n\ntype Action =\n\t| { type: 'migrating' }\n\t| { type: 'migrated'; payload: true }\n\t| { type: 'error'; payload: Error };\n\nexport const useMigrations = (db: ExpoSQLiteDatabase<any>, migrations: {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}): State => {\n\tconst initialState: State = {\n\t\tsuccess: false,\n\t\terror: undefined,\n\t};\n\n\tconst fetchReducer = (state: State, action: Action): State => {\n\t\tswitch (action.type) {\n\t\t\tcase 'migrating': {\n\t\t\t\treturn { ...initialState };\n\t\t\t}\n\t\t\tcase 'migrated': {\n\t\t\t\treturn { ...initialState, success: action.payload };\n\t\t\t}\n\t\t\tcase 'error': {\n\t\t\t\treturn { ...initialState, error: action.payload };\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\treturn state;\n\t\t\t}\n\t\t}\n\t};\n\n\tconst [state, dispatch] = useReducer(fetchReducer, initialState);\n\n\tuseEffect(() => {\n\t\tdispatch({ type: 'migrating' });\n\t\tmigrate(db, migrations as any).then(() => {\n\t\t\tdispatch({ type: 'migrated', payload: true });\n\t\t}).catch((error) => {\n\t\t\tdispatch({ type: 'error', payload: error as Error });\n\t\t});\n\t}, []);\n\n\treturn state;\n};\n"],"mappings":"AAAA,SAAS,WAAW,kBAAkB;AAWtC,eAAe,mBAAmB,EAAE,SAAS,WAAW,GAA8C;AACrG,QAAM,mBAAoC,CAAC;AAE3C,mBAAiB,gBAAgB,QAAQ,SAAS;AACjD,UAAM,QAAQ,WAAW,IAAI,aAAa,IAAI,SAAS,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE;AAE3E,QAAI,CAAC,OAAO;AACX,YAAM,IAAI,MAAM,sBAAsB,aAAa,GAAG,EAAE;AAAA,IACzD;AAEA,QAAI;AACH,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,KAAK,aAAa;AAAA,QAClB,cAAc,aAAa;AAAA,QAC3B,MAAM;AAAA,MACP,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,8BAA8B,aAAa,GAAG,EAAE;AAAA,IACjE;AAAA,EACD;AAEA,SAAO;AACR;AAEA,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,MAAM,mBAAmB,MAAM;AAClD,SAAO,GAAG,QAAQ,QAAQ,YAAY,GAAG,OAAO;AACjD;AAYO,MAAM,gBAAgB,CAAC,IAA6B,eAK9C;AACZ,QAAM,eAAsB;AAAA,IAC3B,SAAS;AAAA,IACT,OAAO;AAAA,EACR;AAEA,QAAM,eAAe,CAACA,QAAc,WAA0B;AAC7D,YAAQ,OAAO,MAAM;AAAA,MACpB,KAAK,aAAa;AACjB,eAAO,EAAE,GAAG,aAAa;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY;AAChB,eAAO,EAAE,GAAG,cAAc,SAAS,OAAO,QAAQ;AAAA,MACnD;AAAA,MACA,KAAK,SAAS;AACb,eAAO,EAAE,GAAG,cAAc,OAAO,OAAO,QAAQ;AAAA,MACjD;AAAA,MACA,SAAS;AACR,eAAOA;AAAA,MACR;AAAA,IACD;AAAA,EACD;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAI,WAAW,cAAc,YAAY;AAE/D,YAAU,MAAM;AACf,aAAS,EAAE,MAAM,YAAY,CAAC;AAC9B,YAAQ,IAAI,UAAiB,EAAE,KAAK,MAAM;AACzC,eAAS,EAAE,MAAM,YAAY,SAAS,KAAK,CAAC;AAAA,IAC7C,CAAC,EAAE,MAAM,CAAC,UAAU;AACnB,eAAS,EAAE,MAAM,SAAS,SAAS,MAAe,CAAC;AAAA,IACpD,CAAC;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,SAAO;AACR;","names":["state"]}
@@ -51,7 +51,7 @@ async function migrate(db, config) {
51
51
  );
52
52
  }
53
53
  }
54
- await db.session.batch(statementToBatch);
54
+ await db.session.migrate(statementToBatch);
55
55
  }
56
56
  // Annotate the CommonJS export names for ESM import in node:
57
57
  0 && (module.exports = {
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/libsql/migrator.ts"],"sourcesContent":["import type { MigrationConfig } from '~/migrator.ts';\nimport { readMigrationFiles } from '~/migrator.ts';\nimport { sql } from '~/sql/sql.ts';\nimport type { LibSQLDatabase } from './driver.ts';\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: LibSQLDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = readMigrationFiles(config);\n\tconst migrationsTable = config === undefined\n\t\t? '__drizzle_migrations'\n\t\t: typeof config === 'string'\n\t\t? '__drizzle_migrations'\n\t\t: config.migrationsTable ?? '__drizzle_migrations';\n\n\tconst migrationTableCreate = sql`\n\t\tCREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\thash text NOT NULL,\n\t\t\tcreated_at numeric\n\t\t)\n\t`;\n\tawait db.session.run(migrationTableCreate);\n\n\tconst dbMigrations = await db.values<[number, string, string]>(\n\t\tsql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`,\n\t);\n\n\tconst lastDbMigration = dbMigrations[0] ?? undefined;\n\n\tconst statementToBatch = [];\n\n\tfor (const migration of migrations) {\n\t\tif (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) {\n\t\t\tfor (const stmt of migration.sql) {\n\t\t\t\tstatementToBatch.push(db.run(sql.raw(stmt)));\n\t\t\t}\n\n\t\t\tstatementToBatch.push(\n\t\t\t\tdb.run(\n\t\t\t\t\tsql`INSERT INTO ${\n\t\t\t\t\t\tsql.identifier(migrationsTable)\n\t\t\t\t\t} (\"hash\", \"created_at\") VALUES(${migration.hash}, ${migration.folderMillis})`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\tawait db.session.batch(statementToBatch);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAAmC;AACnC,iBAAoB;AAGpB,eAAsB,QACrB,IACA,QACC;AACD,QAAM,iBAAa,oCAAmB,MAAM;AAC5C,QAAM,kBAAkB,WAAW,SAChC,yBACA,OAAO,WAAW,WAClB,yBACA,OAAO,mBAAmB;AAE7B,QAAM,uBAAuB;AAAA,+BACC,eAAI,WAAW,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAM7D,QAAM,GAAG,QAAQ,IAAI,oBAAoB;AAEzC,QAAM,eAAe,MAAM,GAAG;AAAA,IAC7B,kDAAuC,eAAI,WAAW,eAAe,CAAC;AAAA,EACvE;AAEA,QAAM,kBAAkB,aAAa,CAAC,KAAK;AAE3C,QAAM,mBAAmB,CAAC;AAE1B,aAAW,aAAa,YAAY;AACnC,QAAI,CAAC,mBAAmB,OAAO,gBAAgB,CAAC,CAAC,IAAK,UAAU,cAAc;AAC7E,iBAAW,QAAQ,UAAU,KAAK;AACjC,yBAAiB,KAAK,GAAG,IAAI,eAAI,IAAI,IAAI,CAAC,CAAC;AAAA,MAC5C;AAEA,uBAAiB;AAAA,QAChB,GAAG;AAAA,UACF,6BACC,eAAI,WAAW,eAAe,CAC/B,kCAAkC,UAAU,IAAI,KAAK,UAAU,YAAY;AAAA,QAC5E;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAEA,QAAM,GAAG,QAAQ,MAAM,gBAAgB;AACxC;","names":[]}
1
+ {"version":3,"sources":["../../src/libsql/migrator.ts"],"sourcesContent":["import type { MigrationConfig } from '~/migrator.ts';\nimport { readMigrationFiles } from '~/migrator.ts';\nimport { sql } from '~/sql/sql.ts';\nimport type { LibSQLDatabase } from './driver.ts';\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: LibSQLDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = readMigrationFiles(config);\n\tconst migrationsTable = config === undefined\n\t\t? '__drizzle_migrations'\n\t\t: typeof config === 'string'\n\t\t? '__drizzle_migrations'\n\t\t: config.migrationsTable ?? '__drizzle_migrations';\n\n\tconst migrationTableCreate = sql`\n\t\tCREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\thash text NOT NULL,\n\t\t\tcreated_at numeric\n\t\t)\n\t`;\n\tawait db.session.run(migrationTableCreate);\n\n\tconst dbMigrations = await db.values<[number, string, string]>(\n\t\tsql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`,\n\t);\n\n\tconst lastDbMigration = dbMigrations[0] ?? undefined;\n\n\tconst statementToBatch = [];\n\n\tfor (const migration of migrations) {\n\t\tif (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) {\n\t\t\tfor (const stmt of migration.sql) {\n\t\t\t\tstatementToBatch.push(db.run(sql.raw(stmt)));\n\t\t\t}\n\n\t\t\tstatementToBatch.push(\n\t\t\t\tdb.run(\n\t\t\t\t\tsql`INSERT INTO ${\n\t\t\t\t\t\tsql.identifier(migrationsTable)\n\t\t\t\t\t} (\"hash\", \"created_at\") VALUES(${migration.hash}, ${migration.folderMillis})`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\tawait db.session.migrate(statementToBatch);\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,sBAAmC;AACnC,iBAAoB;AAGpB,eAAsB,QACrB,IACA,QACC;AACD,QAAM,iBAAa,oCAAmB,MAAM;AAC5C,QAAM,kBAAkB,WAAW,SAChC,yBACA,OAAO,WAAW,WAClB,yBACA,OAAO,mBAAmB;AAE7B,QAAM,uBAAuB;AAAA,+BACC,eAAI,WAAW,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAM7D,QAAM,GAAG,QAAQ,IAAI,oBAAoB;AAEzC,QAAM,eAAe,MAAM,GAAG;AAAA,IAC7B,kDAAuC,eAAI,WAAW,eAAe,CAAC;AAAA,EACvE;AAEA,QAAM,kBAAkB,aAAa,CAAC,KAAK;AAE3C,QAAM,mBAAmB,CAAC;AAE1B,aAAW,aAAa,YAAY;AACnC,QAAI,CAAC,mBAAmB,OAAO,gBAAgB,CAAC,CAAC,IAAK,UAAU,cAAc;AAC7E,iBAAW,QAAQ,UAAU,KAAK;AACjC,yBAAiB,KAAK,GAAG,IAAI,eAAI,IAAI,IAAI,CAAC,CAAC;AAAA,MAC5C;AAEA,uBAAiB;AAAA,QAChB,GAAG;AAAA,UACF,6BACC,eAAI,WAAW,eAAe,CAC/B,kCAAkC,UAAU,IAAI,KAAK,UAAU,YAAY;AAAA,QAC5E;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAEA,QAAM,GAAG,QAAQ,QAAQ,gBAAgB;AAC1C;","names":[]}
@@ -28,7 +28,7 @@ async function migrate(db, config) {
28
28
  );
29
29
  }
30
30
  }
31
- await db.session.batch(statementToBatch);
31
+ await db.session.migrate(statementToBatch);
32
32
  }
33
33
  export {
34
34
  migrate
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/libsql/migrator.ts"],"sourcesContent":["import type { MigrationConfig } from '~/migrator.ts';\nimport { readMigrationFiles } from '~/migrator.ts';\nimport { sql } from '~/sql/sql.ts';\nimport type { LibSQLDatabase } from './driver.ts';\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: LibSQLDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = readMigrationFiles(config);\n\tconst migrationsTable = config === undefined\n\t\t? '__drizzle_migrations'\n\t\t: typeof config === 'string'\n\t\t? '__drizzle_migrations'\n\t\t: config.migrationsTable ?? '__drizzle_migrations';\n\n\tconst migrationTableCreate = sql`\n\t\tCREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\thash text NOT NULL,\n\t\t\tcreated_at numeric\n\t\t)\n\t`;\n\tawait db.session.run(migrationTableCreate);\n\n\tconst dbMigrations = await db.values<[number, string, string]>(\n\t\tsql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`,\n\t);\n\n\tconst lastDbMigration = dbMigrations[0] ?? undefined;\n\n\tconst statementToBatch = [];\n\n\tfor (const migration of migrations) {\n\t\tif (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) {\n\t\t\tfor (const stmt of migration.sql) {\n\t\t\t\tstatementToBatch.push(db.run(sql.raw(stmt)));\n\t\t\t}\n\n\t\t\tstatementToBatch.push(\n\t\t\t\tdb.run(\n\t\t\t\t\tsql`INSERT INTO ${\n\t\t\t\t\t\tsql.identifier(migrationsTable)\n\t\t\t\t\t} (\"hash\", \"created_at\") VALUES(${migration.hash}, ${migration.folderMillis})`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\tawait db.session.batch(statementToBatch);\n}\n"],"mappings":"AACA,SAAS,0BAA0B;AACnC,SAAS,WAAW;AAGpB,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,mBAAmB,MAAM;AAC5C,QAAM,kBAAkB,WAAW,SAChC,yBACA,OAAO,WAAW,WAClB,yBACA,OAAO,mBAAmB;AAE7B,QAAM,uBAAuB;AAAA,+BACC,IAAI,WAAW,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAM7D,QAAM,GAAG,QAAQ,IAAI,oBAAoB;AAEzC,QAAM,eAAe,MAAM,GAAG;AAAA,IAC7B,uCAAuC,IAAI,WAAW,eAAe,CAAC;AAAA,EACvE;AAEA,QAAM,kBAAkB,aAAa,CAAC,KAAK;AAE3C,QAAM,mBAAmB,CAAC;AAE1B,aAAW,aAAa,YAAY;AACnC,QAAI,CAAC,mBAAmB,OAAO,gBAAgB,CAAC,CAAC,IAAK,UAAU,cAAc;AAC7E,iBAAW,QAAQ,UAAU,KAAK;AACjC,yBAAiB,KAAK,GAAG,IAAI,IAAI,IAAI,IAAI,CAAC,CAAC;AAAA,MAC5C;AAEA,uBAAiB;AAAA,QAChB,GAAG;AAAA,UACF,kBACC,IAAI,WAAW,eAAe,CAC/B,kCAAkC,UAAU,IAAI,KAAK,UAAU,YAAY;AAAA,QAC5E;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAEA,QAAM,GAAG,QAAQ,MAAM,gBAAgB;AACxC;","names":[]}
1
+ {"version":3,"sources":["../../src/libsql/migrator.ts"],"sourcesContent":["import type { MigrationConfig } from '~/migrator.ts';\nimport { readMigrationFiles } from '~/migrator.ts';\nimport { sql } from '~/sql/sql.ts';\nimport type { LibSQLDatabase } from './driver.ts';\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: LibSQLDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = readMigrationFiles(config);\n\tconst migrationsTable = config === undefined\n\t\t? '__drizzle_migrations'\n\t\t: typeof config === 'string'\n\t\t? '__drizzle_migrations'\n\t\t: config.migrationsTable ?? '__drizzle_migrations';\n\n\tconst migrationTableCreate = sql`\n\t\tCREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsTable)} (\n\t\t\tid SERIAL PRIMARY KEY,\n\t\t\thash text NOT NULL,\n\t\t\tcreated_at numeric\n\t\t)\n\t`;\n\tawait db.session.run(migrationTableCreate);\n\n\tconst dbMigrations = await db.values<[number, string, string]>(\n\t\tsql`SELECT id, hash, created_at FROM ${sql.identifier(migrationsTable)} ORDER BY created_at DESC LIMIT 1`,\n\t);\n\n\tconst lastDbMigration = dbMigrations[0] ?? undefined;\n\n\tconst statementToBatch = [];\n\n\tfor (const migration of migrations) {\n\t\tif (!lastDbMigration || Number(lastDbMigration[2])! < migration.folderMillis) {\n\t\t\tfor (const stmt of migration.sql) {\n\t\t\t\tstatementToBatch.push(db.run(sql.raw(stmt)));\n\t\t\t}\n\n\t\t\tstatementToBatch.push(\n\t\t\t\tdb.run(\n\t\t\t\t\tsql`INSERT INTO ${\n\t\t\t\t\t\tsql.identifier(migrationsTable)\n\t\t\t\t\t} (\"hash\", \"created_at\") VALUES(${migration.hash}, ${migration.folderMillis})`,\n\t\t\t\t),\n\t\t\t);\n\t\t}\n\t}\n\n\tawait db.session.migrate(statementToBatch);\n}\n"],"mappings":"AACA,SAAS,0BAA0B;AACnC,SAAS,WAAW;AAGpB,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,mBAAmB,MAAM;AAC5C,QAAM,kBAAkB,WAAW,SAChC,yBACA,OAAO,WAAW,WAClB,yBACA,OAAO,mBAAmB;AAE7B,QAAM,uBAAuB;AAAA,+BACC,IAAI,WAAW,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAM7D,QAAM,GAAG,QAAQ,IAAI,oBAAoB;AAEzC,QAAM,eAAe,MAAM,GAAG;AAAA,IAC7B,uCAAuC,IAAI,WAAW,eAAe,CAAC;AAAA,EACvE;AAEA,QAAM,kBAAkB,aAAa,CAAC,KAAK;AAE3C,QAAM,mBAAmB,CAAC;AAE1B,aAAW,aAAa,YAAY;AACnC,QAAI,CAAC,mBAAmB,OAAO,gBAAgB,CAAC,CAAC,IAAK,UAAU,cAAc;AAC7E,iBAAW,QAAQ,UAAU,KAAK;AACjC,yBAAiB,KAAK,GAAG,IAAI,IAAI,IAAI,IAAI,CAAC,CAAC;AAAA,MAC5C;AAEA,uBAAiB;AAAA,QAChB,GAAG;AAAA,UACF,kBACC,IAAI,WAAW,eAAe,CAC/B,kCAAkC,UAAU,IAAI,KAAK,UAAU,YAAY;AAAA,QAC5E;AAAA,MACD;AAAA,IACD;AAAA,EACD;AAEA,QAAM,GAAG,QAAQ,QAAQ,gBAAgB;AAC1C;","names":[]}
@@ -64,6 +64,18 @@ class LibSQLSession extends import_session.SQLiteSession {
64
64
  const batchResults = await this.client.batch(builtQueries);
65
65
  return batchResults.map((result, i) => preparedQueries[i].mapResult(result, true));
66
66
  }
67
+ async migrate(queries) {
68
+ const preparedQueries = [];
69
+ const builtQueries = [];
70
+ for (const query of queries) {
71
+ const preparedQuery = query._prepare();
72
+ const builtQuery = preparedQuery.getQuery();
73
+ preparedQueries.push(preparedQuery);
74
+ builtQueries.push({ sql: builtQuery.sql, args: builtQuery.params });
75
+ }
76
+ const batchResults = await this.client.migrate(builtQueries);
77
+ return batchResults.map((result, i) => preparedQueries[i].mapResult(result, true));
78
+ }
67
79
  async transaction(transaction, _config) {
68
80
  const libsqlTx = await this.client.transaction();
69
81
  const session = new LibSQLSession(
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/libsql/session.ts"],"sourcesContent":["import type { Client, InArgs, InStatement, ResultSet, Transaction } from '@libsql/client';\nimport type { BatchItem as BatchItem } from '~/batch.ts';\nimport { entityKind } from '~/entity.ts';\nimport type { Logger } from '~/logger.ts';\nimport { NoopLogger } from '~/logger.ts';\nimport type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts';\nimport type { PreparedQuery } from '~/session.ts';\nimport { fillPlaceholders, type Query, sql } from '~/sql/sql.ts';\nimport type { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts';\nimport { SQLiteTransaction } from '~/sqlite-core/index.ts';\nimport type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts';\nimport type {\n\tPreparedQueryConfig as PreparedQueryConfigBase,\n\tSQLiteExecuteMethod,\n\tSQLiteTransactionConfig,\n} from '~/sqlite-core/session.ts';\nimport { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts';\nimport { mapResultRow } from '~/utils.ts';\n\nexport interface LibSQLSessionOptions {\n\tlogger?: Logger;\n}\n\ntype PreparedQueryConfig = Omit<PreparedQueryConfigBase, 'statement' | 'run'>;\n\nexport class LibSQLSession<\n\tTFullSchema extends Record<string, unknown>,\n\tTSchema extends TablesRelationalConfig,\n> extends SQLiteSession<'async', ResultSet, TFullSchema, TSchema> {\n\tstatic readonly [entityKind]: string = 'LibSQLSession';\n\n\tprivate logger: Logger;\n\n\tconstructor(\n\t\tprivate client: Client,\n\t\tdialect: SQLiteAsyncDialect,\n\t\tprivate schema: RelationalSchemaConfig<TSchema> | undefined,\n\t\tprivate options: LibSQLSessionOptions,\n\t\tprivate tx: Transaction | undefined,\n\t) {\n\t\tsuper(dialect);\n\t\tthis.logger = options.logger ?? new NoopLogger();\n\t}\n\n\tprepareQuery<T extends Omit<PreparedQueryConfig, 'run'>>(\n\t\tquery: Query,\n\t\tfields: SelectedFieldsOrdered | undefined,\n\t\texecuteMethod: SQLiteExecuteMethod,\n\t\tisResponseInArrayMode: boolean,\n\t\tcustomResultMapper?: (rows: unknown[][]) => unknown,\n\t): LibSQLPreparedQuery<T> {\n\t\treturn new LibSQLPreparedQuery(\n\t\t\tthis.client,\n\t\t\tquery,\n\t\t\tthis.logger,\n\t\t\tfields,\n\t\t\tthis.tx,\n\t\t\texecuteMethod,\n\t\t\tisResponseInArrayMode,\n\t\t\tcustomResultMapper,\n\t\t);\n\t}\n\n\tasync batch<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T) {\n\t\tconst preparedQueries: PreparedQuery[] = [];\n\t\tconst builtQueries: InStatement[] = [];\n\n\t\tfor (const query of queries) {\n\t\t\tconst preparedQuery = query._prepare();\n\t\t\tconst builtQuery = preparedQuery.getQuery();\n\t\t\tpreparedQueries.push(preparedQuery);\n\t\t\tbuiltQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs });\n\t\t}\n\n\t\tconst batchResults = await this.client.batch(builtQueries);\n\t\treturn batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true));\n\t}\n\n\toverride async transaction<T>(\n\t\ttransaction: (db: LibSQLTransaction<TFullSchema, TSchema>) => T | Promise<T>,\n\t\t_config?: SQLiteTransactionConfig,\n\t): Promise<T> {\n\t\t// TODO: support transaction behavior\n\t\tconst libsqlTx = await this.client.transaction();\n\t\tconst session = new LibSQLSession<TFullSchema, TSchema>(\n\t\t\tthis.client,\n\t\t\tthis.dialect,\n\t\t\tthis.schema,\n\t\t\tthis.options,\n\t\t\tlibsqlTx,\n\t\t);\n\t\tconst tx = new LibSQLTransaction<TFullSchema, TSchema>('async', this.dialect, session, this.schema);\n\t\ttry {\n\t\t\tconst result = await transaction(tx);\n\t\t\tawait libsqlTx.commit();\n\t\t\treturn result;\n\t\t} catch (err) {\n\t\t\tawait libsqlTx.rollback();\n\t\t\tthrow err;\n\t\t}\n\t}\n\n\toverride extractRawAllValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows;\n\t}\n\n\toverride extractRawGetValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows[0];\n\t}\n\n\toverride extractRawValuesValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows;\n\t}\n}\n\nexport class LibSQLTransaction<\n\tTFullSchema extends Record<string, unknown>,\n\tTSchema extends TablesRelationalConfig,\n> extends SQLiteTransaction<'async', ResultSet, TFullSchema, TSchema> {\n\tstatic readonly [entityKind]: string = 'LibSQLTransaction';\n\n\toverride async transaction<T>(transaction: (tx: LibSQLTransaction<TFullSchema, TSchema>) => Promise<T>): Promise<T> {\n\t\tconst savepointName = `sp${this.nestedIndex}`;\n\t\tconst tx = new LibSQLTransaction('async', this.dialect, this.session, this.schema, this.nestedIndex + 1);\n\t\tawait this.session.run(sql.raw(`savepoint ${savepointName}`));\n\t\ttry {\n\t\t\tconst result = await transaction(tx);\n\t\t\tawait this.session.run(sql.raw(`release savepoint ${savepointName}`));\n\t\t\treturn result;\n\t\t} catch (err) {\n\t\t\tawait this.session.run(sql.raw(`rollback to savepoint ${savepointName}`));\n\t\t\tthrow err;\n\t\t}\n\t}\n}\n\nexport class LibSQLPreparedQuery<T extends PreparedQueryConfig = PreparedQueryConfig> extends SQLitePreparedQuery<\n\t{ type: 'async'; run: ResultSet; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] }\n> {\n\tstatic readonly [entityKind]: string = 'LibSQLPreparedQuery';\n\n\tconstructor(\n\t\tprivate client: Client,\n\t\tquery: Query,\n\t\tprivate logger: Logger,\n\t\t/** @internal */ public fields: SelectedFieldsOrdered | undefined,\n\t\tprivate tx: Transaction | undefined,\n\t\texecuteMethod: SQLiteExecuteMethod,\n\t\tprivate _isResponseInArrayMode: boolean,\n\t\t/** @internal */ public customResultMapper?: (\n\t\t\trows: unknown[][],\n\t\t\tmapColumnValue?: (value: unknown) => unknown,\n\t\t) => unknown,\n\t) {\n\t\tsuper('async', executeMethod, query);\n\t\tthis.customResultMapper = customResultMapper;\n\t\tthis.fields = fields;\n\t}\n\n\trun(placeholderValues?: Record<string, unknown>): Promise<ResultSet> {\n\t\tconst params = fillPlaceholders(this.query.params, placeholderValues ?? {});\n\t\tthis.logger.logQuery(this.query.sql, params);\n\t\tconst stmt: InStatement = { sql: this.query.sql, args: params as InArgs };\n\t\treturn this.tx ? this.tx.execute(stmt) : this.client.execute(stmt);\n\t}\n\n\tasync all(placeholderValues?: Record<string, unknown>): Promise<T['all']> {\n\t\tconst { fields, logger, query, tx, client, customResultMapper } = this;\n\t\tif (!fields && !customResultMapper) {\n\t\t\tconst params = fillPlaceholders(query.params, placeholderValues ?? {});\n\t\t\tlogger.logQuery(query.sql, params);\n\t\t\tconst stmt: InStatement = { sql: query.sql, args: params as InArgs };\n\t\t\treturn (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapAllResult(rows));\n\t\t}\n\n\t\tconst rows = await this.values(placeholderValues) as unknown[][];\n\n\t\treturn this.mapAllResult(rows);\n\t}\n\n\toverride mapAllResult(rows: unknown, isFromBatch?: boolean): unknown {\n\t\tif (isFromBatch) {\n\t\t\trows = (rows as ResultSet).rows;\n\t\t}\n\n\t\tif (!this.fields && !this.customResultMapper) {\n\t\t\treturn (rows as unknown[]).map((row) => normalizeRow(row));\n\t\t}\n\n\t\tif (this.customResultMapper) {\n\t\t\treturn this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['all'];\n\t\t}\n\n\t\treturn (rows as unknown[]).map((row) => {\n\t\t\treturn mapResultRow(\n\t\t\t\tthis.fields!,\n\t\t\t\tArray.prototype.slice.call(row).map((v) => normalizeFieldValue(v)),\n\t\t\t\tthis.joinsNotNullableMap,\n\t\t\t);\n\t\t});\n\t}\n\n\tasync get(placeholderValues?: Record<string, unknown>): Promise<T['get']> {\n\t\tconst { fields, logger, query, tx, client, customResultMapper } = this;\n\t\tif (!fields && !customResultMapper) {\n\t\t\tconst params = fillPlaceholders(query.params, placeholderValues ?? {});\n\t\t\tlogger.logQuery(query.sql, params);\n\t\t\tconst stmt: InStatement = { sql: query.sql, args: params as InArgs };\n\t\t\treturn (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapGetResult(rows));\n\t\t}\n\n\t\tconst rows = await this.values(placeholderValues) as unknown[][];\n\n\t\treturn this.mapGetResult(rows);\n\t}\n\n\toverride mapGetResult(rows: unknown, isFromBatch?: boolean): unknown {\n\t\tif (isFromBatch) {\n\t\t\trows = (rows as ResultSet).rows;\n\t\t}\n\n\t\tconst row = (rows as unknown[])[0];\n\n\t\tif (!this.fields && !this.customResultMapper) {\n\t\t\treturn normalizeRow(row);\n\t\t}\n\n\t\tif (!row) {\n\t\t\treturn undefined;\n\t\t}\n\n\t\tif (this.customResultMapper) {\n\t\t\treturn this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['get'];\n\t\t}\n\n\t\treturn mapResultRow(\n\t\t\tthis.fields!,\n\t\t\tArray.prototype.slice.call(row).map((v) => normalizeFieldValue(v)),\n\t\t\tthis.joinsNotNullableMap,\n\t\t);\n\t}\n\n\tvalues(placeholderValues?: Record<string, unknown>): Promise<T['values']> {\n\t\tconst params = fillPlaceholders(this.query.params, placeholderValues ?? {});\n\t\tthis.logger.logQuery(this.query.sql, params);\n\t\tconst stmt: InStatement = { sql: this.query.sql, args: params as InArgs };\n\t\treturn (this.tx ? this.tx.execute(stmt) : this.client.execute(stmt)).then(({ rows }) => rows) as Promise<\n\t\t\tT['values']\n\t\t>;\n\t}\n\n\t/** @internal */\n\tisResponseInArrayMode(): boolean {\n\t\treturn this._isResponseInArrayMode;\n\t}\n}\n\nfunction normalizeRow(obj: any) {\n\t// The libSQL node-sqlite3 compatibility wrapper returns rows\n\t// that can be accessed both as objects and arrays. Let's\n\t// turn them into objects what's what other SQLite drivers\n\t// do.\n\treturn Object.keys(obj).reduce((acc: Record<string, any>, key) => {\n\t\tif (Object.prototype.propertyIsEnumerable.call(obj, key)) {\n\t\t\tacc[key] = obj[key];\n\t\t}\n\t\treturn acc;\n\t}, {});\n}\n\nfunction normalizeFieldValue(value: unknown) {\n\tif (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { // eslint-disable-line no-instanceof/no-instanceof\n\t\tif (typeof Buffer !== 'undefined') {\n\t\t\tif (!(value instanceof Buffer)) { // eslint-disable-line no-instanceof/no-instanceof\n\t\t\t\treturn Buffer.from(value);\n\t\t\t}\n\t\t\treturn value;\n\t\t}\n\t\tif (typeof TextDecoder !== 'undefined') {\n\t\t\treturn new TextDecoder().decode(value);\n\t\t}\n\t\tthrow new Error('TextDecoder is not available. Please provide either Buffer or TextDecoder polyfill.');\n\t}\n\treturn value;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA2B;AAE3B,oBAA2B;AAG3B,iBAAkD;AAElD,yBAAkC;AAOlC,qBAAmD;AACnD,mBAA6B;AAQtB,MAAM,sBAGH,6BAAwD;AAAA,EAKjE,YACS,QACR,SACQ,QACA,SACA,IACP;AACD,UAAM,OAAO;AANL;AAEA;AACA;AACA;AAGR,SAAK,SAAS,QAAQ,UAAU,IAAI,yBAAW;AAAA,EAChD;AAAA,EAbA,QAAiB,wBAAU,IAAY;AAAA,EAE/B;AAAA,EAaR,aACC,OACA,QACA,eACA,uBACA,oBACyB;AACzB,WAAO,IAAI;AAAA,MACV,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,MAAwE,SAAY;AACzF,UAAM,kBAAmC,CAAC;AAC1C,UAAM,eAA8B,CAAC;AAErC,eAAW,SAAS,SAAS;AAC5B,YAAM,gBAAgB,MAAM,SAAS;AACrC,YAAM,aAAa,cAAc,SAAS;AAC1C,sBAAgB,KAAK,aAAa;AAClC,mBAAa,KAAK,EAAE,KAAK,WAAW,KAAK,MAAM,WAAW,OAAiB,CAAC;AAAA,IAC7E;AAEA,UAAM,eAAe,MAAM,KAAK,OAAO,MAAM,YAAY;AACzD,WAAO,aAAa,IAAI,CAAC,QAAQ,MAAM,gBAAgB,CAAC,EAAG,UAAU,QAAQ,IAAI,CAAC;AAAA,EACnF;AAAA,EAEA,MAAe,YACd,aACA,SACa;AAEb,UAAM,WAAW,MAAM,KAAK,OAAO,YAAY;AAC/C,UAAM,UAAU,IAAI;AAAA,MACnB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACD;AACA,UAAM,KAAK,IAAI,kBAAwC,SAAS,KAAK,SAAS,SAAS,KAAK,MAAM;AAClG,QAAI;AACH,YAAM,SAAS,MAAM,YAAY,EAAE;AACnC,YAAM,SAAS,OAAO;AACtB,aAAO;AAAA,IACR,SAAS,KAAK;AACb,YAAM,SAAS,SAAS;AACxB,YAAM;AAAA,IACP;AAAA,EACD;AAAA,EAES,kCAAkC,QAA0B;AACpE,WAAQ,OAAqB;AAAA,EAC9B;AAAA,EAES,kCAAkC,QAA0B;AACpE,WAAQ,OAAqB,KAAK,CAAC;AAAA,EACpC;AAAA,EAES,qCAAqC,QAA0B;AACvE,WAAQ,OAAqB;AAAA,EAC9B;AACD;AAEO,MAAM,0BAGH,qCAA4D;AAAA,EACrE,QAAiB,wBAAU,IAAY;AAAA,EAEvC,MAAe,YAAe,aAAsF;AACnH,UAAM,gBAAgB,KAAK,KAAK,WAAW;AAC3C,UAAM,KAAK,IAAI,kBAAkB,SAAS,KAAK,SAAS,KAAK,SAAS,KAAK,QAAQ,KAAK,cAAc,CAAC;AACvG,UAAM,KAAK,QAAQ,IAAI,eAAI,IAAI,aAAa,aAAa,EAAE,CAAC;AAC5D,QAAI;AACH,YAAM,SAAS,MAAM,YAAY,EAAE;AACnC,YAAM,KAAK,QAAQ,IAAI,eAAI,IAAI,qBAAqB,aAAa,EAAE,CAAC;AACpE,aAAO;AAAA,IACR,SAAS,KAAK;AACb,YAAM,KAAK,QAAQ,IAAI,eAAI,IAAI,yBAAyB,aAAa,EAAE,CAAC;AACxE,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,4BAAiF,mCAE5F;AAAA,EAGD,YACS,QACR,OACQ,QACgB,QAChB,IACR,eACQ,wBACgB,oBAIvB;AACD,UAAM,SAAS,eAAe,KAAK;AAZ3B;AAEA;AACgB;AAChB;AAEA;AACgB;AAMxB,SAAK,qBAAqB;AAC1B,SAAK,SAAS;AAAA,EACf;AAAA,EAlBA,QAAiB,wBAAU,IAAY;AAAA,EAoBvC,IAAI,mBAAiE;AACpE,UAAM,aAAS,6BAAiB,KAAK,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AAC1E,SAAK,OAAO,SAAS,KAAK,MAAM,KAAK,MAAM;AAC3C,UAAM,OAAoB,EAAE,KAAK,KAAK,MAAM,KAAK,MAAM,OAAiB;AACxE,WAAO,KAAK,KAAK,KAAK,GAAG,QAAQ,IAAI,IAAI,KAAK,OAAO,QAAQ,IAAI;AAAA,EAClE;AAAA,EAEA,MAAM,IAAI,mBAAgE;AACzE,UAAM,EAAE,QAAQ,QAAQ,OAAO,IAAI,QAAQ,mBAAmB,IAAI;AAClE,QAAI,CAAC,UAAU,CAAC,oBAAoB;AACnC,YAAM,aAAS,6BAAiB,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AACrE,aAAO,SAAS,MAAM,KAAK,MAAM;AACjC,YAAM,OAAoB,EAAE,KAAK,MAAM,KAAK,MAAM,OAAiB;AACnE,cAAQ,KAAK,GAAG,QAAQ,IAAI,IAAI,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,MAAAA,MAAK,MAAM,KAAK,aAAaA,KAAI,CAAC;AAAA,IACjG;AAEA,UAAM,OAAO,MAAM,KAAK,OAAO,iBAAiB;AAEhD,WAAO,KAAK,aAAa,IAAI;AAAA,EAC9B;AAAA,EAES,aAAa,MAAe,aAAgC;AACpE,QAAI,aAAa;AAChB,aAAQ,KAAmB;AAAA,IAC5B;AAEA,QAAI,CAAC,KAAK,UAAU,CAAC,KAAK,oBAAoB;AAC7C,aAAQ,KAAmB,IAAI,CAAC,QAAQ,aAAa,GAAG,CAAC;AAAA,IAC1D;AAEA,QAAI,KAAK,oBAAoB;AAC5B,aAAO,KAAK,mBAAmB,MAAqB,mBAAmB;AAAA,IACxE;AAEA,WAAQ,KAAmB,IAAI,CAAC,QAAQ;AACvC,iBAAO;AAAA,QACN,KAAK;AAAA,QACL,MAAM,UAAU,MAAM,KAAK,GAAG,EAAE,IAAI,CAAC,MAAM,oBAAoB,CAAC,CAAC;AAAA,QACjE,KAAK;AAAA,MACN;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,mBAAgE;AACzE,UAAM,EAAE,QAAQ,QAAQ,OAAO,IAAI,QAAQ,mBAAmB,IAAI;AAClE,QAAI,CAAC,UAAU,CAAC,oBAAoB;AACnC,YAAM,aAAS,6BAAiB,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AACrE,aAAO,SAAS,MAAM,KAAK,MAAM;AACjC,YAAM,OAAoB,EAAE,KAAK,MAAM,KAAK,MAAM,OAAiB;AACnE,cAAQ,KAAK,GAAG,QAAQ,IAAI,IAAI,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,MAAAA,MAAK,MAAM,KAAK,aAAaA,KAAI,CAAC;AAAA,IACjG;AAEA,UAAM,OAAO,MAAM,KAAK,OAAO,iBAAiB;AAEhD,WAAO,KAAK,aAAa,IAAI;AAAA,EAC9B;AAAA,EAES,aAAa,MAAe,aAAgC;AACpE,QAAI,aAAa;AAChB,aAAQ,KAAmB;AAAA,IAC5B;AAEA,UAAM,MAAO,KAAmB,CAAC;AAEjC,QAAI,CAAC,KAAK,UAAU,CAAC,KAAK,oBAAoB;AAC7C,aAAO,aAAa,GAAG;AAAA,IACxB;AAEA,QAAI,CAAC,KAAK;AACT,aAAO;AAAA,IACR;AAEA,QAAI,KAAK,oBAAoB;AAC5B,aAAO,KAAK,mBAAmB,MAAqB,mBAAmB;AAAA,IACxE;AAEA,eAAO;AAAA,MACN,KAAK;AAAA,MACL,MAAM,UAAU,MAAM,KAAK,GAAG,EAAE,IAAI,CAAC,MAAM,oBAAoB,CAAC,CAAC;AAAA,MACjE,KAAK;AAAA,IACN;AAAA,EACD;AAAA,EAEA,OAAO,mBAAmE;AACzE,UAAM,aAAS,6BAAiB,KAAK,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AAC1E,SAAK,OAAO,SAAS,KAAK,MAAM,KAAK,MAAM;AAC3C,UAAM,OAAoB,EAAE,KAAK,KAAK,MAAM,KAAK,MAAM,OAAiB;AACxE,YAAQ,KAAK,KAAK,KAAK,GAAG,QAAQ,IAAI,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,KAAK,MAAM,IAAI;AAAA,EAG7F;AAAA;AAAA,EAGA,wBAAiC;AAChC,WAAO,KAAK;AAAA,EACb;AACD;AAEA,SAAS,aAAa,KAAU;AAK/B,SAAO,OAAO,KAAK,GAAG,EAAE,OAAO,CAAC,KAA0B,QAAQ;AACjE,QAAI,OAAO,UAAU,qBAAqB,KAAK,KAAK,GAAG,GAAG;AACzD,UAAI,GAAG,IAAI,IAAI,GAAG;AAAA,IACnB;AACA,WAAO;AAAA,EACR,GAAG,CAAC,CAAC;AACN;AAEA,SAAS,oBAAoB,OAAgB;AAC5C,MAAI,OAAO,gBAAgB,eAAe,iBAAiB,aAAa;AACvE,QAAI,OAAO,WAAW,aAAa;AAClC,UAAI,EAAE,iBAAiB,SAAS;AAC/B,eAAO,OAAO,KAAK,KAAK;AAAA,MACzB;AACA,aAAO;AAAA,IACR;AACA,QAAI,OAAO,gBAAgB,aAAa;AACvC,aAAO,IAAI,YAAY,EAAE,OAAO,KAAK;AAAA,IACtC;AACA,UAAM,IAAI,MAAM,qFAAqF;AAAA,EACtG;AACA,SAAO;AACR;","names":["rows"]}
1
+ {"version":3,"sources":["../../src/libsql/session.ts"],"sourcesContent":["import type { Client, InArgs, InStatement, ResultSet, Transaction } from '@libsql/client';\nimport type { BatchItem as BatchItem } from '~/batch.ts';\nimport { entityKind } from '~/entity.ts';\nimport type { Logger } from '~/logger.ts';\nimport { NoopLogger } from '~/logger.ts';\nimport type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts';\nimport type { PreparedQuery } from '~/session.ts';\nimport { fillPlaceholders, type Query, sql } from '~/sql/sql.ts';\nimport type { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts';\nimport { SQLiteTransaction } from '~/sqlite-core/index.ts';\nimport type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts';\nimport type {\n\tPreparedQueryConfig as PreparedQueryConfigBase,\n\tSQLiteExecuteMethod,\n\tSQLiteTransactionConfig,\n} from '~/sqlite-core/session.ts';\nimport { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts';\nimport { mapResultRow } from '~/utils.ts';\n\nexport interface LibSQLSessionOptions {\n\tlogger?: Logger;\n}\n\ntype PreparedQueryConfig = Omit<PreparedQueryConfigBase, 'statement' | 'run'>;\n\nexport class LibSQLSession<\n\tTFullSchema extends Record<string, unknown>,\n\tTSchema extends TablesRelationalConfig,\n> extends SQLiteSession<'async', ResultSet, TFullSchema, TSchema> {\n\tstatic readonly [entityKind]: string = 'LibSQLSession';\n\n\tprivate logger: Logger;\n\n\tconstructor(\n\t\tprivate client: Client,\n\t\tdialect: SQLiteAsyncDialect,\n\t\tprivate schema: RelationalSchemaConfig<TSchema> | undefined,\n\t\tprivate options: LibSQLSessionOptions,\n\t\tprivate tx: Transaction | undefined,\n\t) {\n\t\tsuper(dialect);\n\t\tthis.logger = options.logger ?? new NoopLogger();\n\t}\n\n\tprepareQuery<T extends Omit<PreparedQueryConfig, 'run'>>(\n\t\tquery: Query,\n\t\tfields: SelectedFieldsOrdered | undefined,\n\t\texecuteMethod: SQLiteExecuteMethod,\n\t\tisResponseInArrayMode: boolean,\n\t\tcustomResultMapper?: (rows: unknown[][]) => unknown,\n\t): LibSQLPreparedQuery<T> {\n\t\treturn new LibSQLPreparedQuery(\n\t\t\tthis.client,\n\t\t\tquery,\n\t\t\tthis.logger,\n\t\t\tfields,\n\t\t\tthis.tx,\n\t\t\texecuteMethod,\n\t\t\tisResponseInArrayMode,\n\t\t\tcustomResultMapper,\n\t\t);\n\t}\n\n\tasync batch<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T) {\n\t\tconst preparedQueries: PreparedQuery[] = [];\n\t\tconst builtQueries: InStatement[] = [];\n\n\t\tfor (const query of queries) {\n\t\t\tconst preparedQuery = query._prepare();\n\t\t\tconst builtQuery = preparedQuery.getQuery();\n\t\t\tpreparedQueries.push(preparedQuery);\n\t\t\tbuiltQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs });\n\t\t}\n\n\t\tconst batchResults = await this.client.batch(builtQueries);\n\t\treturn batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true));\n\t}\n\n\tasync migrate<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T) {\n\t\tconst preparedQueries: PreparedQuery[] = [];\n\t\tconst builtQueries: InStatement[] = [];\n\n\t\tfor (const query of queries) {\n\t\t\tconst preparedQuery = query._prepare();\n\t\t\tconst builtQuery = preparedQuery.getQuery();\n\t\t\tpreparedQueries.push(preparedQuery);\n\t\t\tbuiltQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs });\n\t\t}\n\n\t\tconst batchResults = await this.client.migrate(builtQueries);\n\t\treturn batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true));\n\t}\n\n\toverride async transaction<T>(\n\t\ttransaction: (db: LibSQLTransaction<TFullSchema, TSchema>) => T | Promise<T>,\n\t\t_config?: SQLiteTransactionConfig,\n\t): Promise<T> {\n\t\t// TODO: support transaction behavior\n\t\tconst libsqlTx = await this.client.transaction();\n\t\tconst session = new LibSQLSession<TFullSchema, TSchema>(\n\t\t\tthis.client,\n\t\t\tthis.dialect,\n\t\t\tthis.schema,\n\t\t\tthis.options,\n\t\t\tlibsqlTx,\n\t\t);\n\t\tconst tx = new LibSQLTransaction<TFullSchema, TSchema>('async', this.dialect, session, this.schema);\n\t\ttry {\n\t\t\tconst result = await transaction(tx);\n\t\t\tawait libsqlTx.commit();\n\t\t\treturn result;\n\t\t} catch (err) {\n\t\t\tawait libsqlTx.rollback();\n\t\t\tthrow err;\n\t\t}\n\t}\n\n\toverride extractRawAllValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows;\n\t}\n\n\toverride extractRawGetValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows[0];\n\t}\n\n\toverride extractRawValuesValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows;\n\t}\n}\n\nexport class LibSQLTransaction<\n\tTFullSchema extends Record<string, unknown>,\n\tTSchema extends TablesRelationalConfig,\n> extends SQLiteTransaction<'async', ResultSet, TFullSchema, TSchema> {\n\tstatic readonly [entityKind]: string = 'LibSQLTransaction';\n\n\toverride async transaction<T>(transaction: (tx: LibSQLTransaction<TFullSchema, TSchema>) => Promise<T>): Promise<T> {\n\t\tconst savepointName = `sp${this.nestedIndex}`;\n\t\tconst tx = new LibSQLTransaction('async', this.dialect, this.session, this.schema, this.nestedIndex + 1);\n\t\tawait this.session.run(sql.raw(`savepoint ${savepointName}`));\n\t\ttry {\n\t\t\tconst result = await transaction(tx);\n\t\t\tawait this.session.run(sql.raw(`release savepoint ${savepointName}`));\n\t\t\treturn result;\n\t\t} catch (err) {\n\t\t\tawait this.session.run(sql.raw(`rollback to savepoint ${savepointName}`));\n\t\t\tthrow err;\n\t\t}\n\t}\n}\n\nexport class LibSQLPreparedQuery<T extends PreparedQueryConfig = PreparedQueryConfig> extends SQLitePreparedQuery<\n\t{ type: 'async'; run: ResultSet; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] }\n> {\n\tstatic readonly [entityKind]: string = 'LibSQLPreparedQuery';\n\n\tconstructor(\n\t\tprivate client: Client,\n\t\tquery: Query,\n\t\tprivate logger: Logger,\n\t\t/** @internal */ public fields: SelectedFieldsOrdered | undefined,\n\t\tprivate tx: Transaction | undefined,\n\t\texecuteMethod: SQLiteExecuteMethod,\n\t\tprivate _isResponseInArrayMode: boolean,\n\t\t/** @internal */ public customResultMapper?: (\n\t\t\trows: unknown[][],\n\t\t\tmapColumnValue?: (value: unknown) => unknown,\n\t\t) => unknown,\n\t) {\n\t\tsuper('async', executeMethod, query);\n\t\tthis.customResultMapper = customResultMapper;\n\t\tthis.fields = fields;\n\t}\n\n\trun(placeholderValues?: Record<string, unknown>): Promise<ResultSet> {\n\t\tconst params = fillPlaceholders(this.query.params, placeholderValues ?? {});\n\t\tthis.logger.logQuery(this.query.sql, params);\n\t\tconst stmt: InStatement = { sql: this.query.sql, args: params as InArgs };\n\t\treturn this.tx ? this.tx.execute(stmt) : this.client.execute(stmt);\n\t}\n\n\tasync all(placeholderValues?: Record<string, unknown>): Promise<T['all']> {\n\t\tconst { fields, logger, query, tx, client, customResultMapper } = this;\n\t\tif (!fields && !customResultMapper) {\n\t\t\tconst params = fillPlaceholders(query.params, placeholderValues ?? {});\n\t\t\tlogger.logQuery(query.sql, params);\n\t\t\tconst stmt: InStatement = { sql: query.sql, args: params as InArgs };\n\t\t\treturn (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapAllResult(rows));\n\t\t}\n\n\t\tconst rows = await this.values(placeholderValues) as unknown[][];\n\n\t\treturn this.mapAllResult(rows);\n\t}\n\n\toverride mapAllResult(rows: unknown, isFromBatch?: boolean): unknown {\n\t\tif (isFromBatch) {\n\t\t\trows = (rows as ResultSet).rows;\n\t\t}\n\n\t\tif (!this.fields && !this.customResultMapper) {\n\t\t\treturn (rows as unknown[]).map((row) => normalizeRow(row));\n\t\t}\n\n\t\tif (this.customResultMapper) {\n\t\t\treturn this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['all'];\n\t\t}\n\n\t\treturn (rows as unknown[]).map((row) => {\n\t\t\treturn mapResultRow(\n\t\t\t\tthis.fields!,\n\t\t\t\tArray.prototype.slice.call(row).map((v) => normalizeFieldValue(v)),\n\t\t\t\tthis.joinsNotNullableMap,\n\t\t\t);\n\t\t});\n\t}\n\n\tasync get(placeholderValues?: Record<string, unknown>): Promise<T['get']> {\n\t\tconst { fields, logger, query, tx, client, customResultMapper } = this;\n\t\tif (!fields && !customResultMapper) {\n\t\t\tconst params = fillPlaceholders(query.params, placeholderValues ?? {});\n\t\t\tlogger.logQuery(query.sql, params);\n\t\t\tconst stmt: InStatement = { sql: query.sql, args: params as InArgs };\n\t\t\treturn (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapGetResult(rows));\n\t\t}\n\n\t\tconst rows = await this.values(placeholderValues) as unknown[][];\n\n\t\treturn this.mapGetResult(rows);\n\t}\n\n\toverride mapGetResult(rows: unknown, isFromBatch?: boolean): unknown {\n\t\tif (isFromBatch) {\n\t\t\trows = (rows as ResultSet).rows;\n\t\t}\n\n\t\tconst row = (rows as unknown[])[0];\n\n\t\tif (!this.fields && !this.customResultMapper) {\n\t\t\treturn normalizeRow(row);\n\t\t}\n\n\t\tif (!row) {\n\t\t\treturn undefined;\n\t\t}\n\n\t\tif (this.customResultMapper) {\n\t\t\treturn this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['get'];\n\t\t}\n\n\t\treturn mapResultRow(\n\t\t\tthis.fields!,\n\t\t\tArray.prototype.slice.call(row).map((v) => normalizeFieldValue(v)),\n\t\t\tthis.joinsNotNullableMap,\n\t\t);\n\t}\n\n\tvalues(placeholderValues?: Record<string, unknown>): Promise<T['values']> {\n\t\tconst params = fillPlaceholders(this.query.params, placeholderValues ?? {});\n\t\tthis.logger.logQuery(this.query.sql, params);\n\t\tconst stmt: InStatement = { sql: this.query.sql, args: params as InArgs };\n\t\treturn (this.tx ? this.tx.execute(stmt) : this.client.execute(stmt)).then(({ rows }) => rows) as Promise<\n\t\t\tT['values']\n\t\t>;\n\t}\n\n\t/** @internal */\n\tisResponseInArrayMode(): boolean {\n\t\treturn this._isResponseInArrayMode;\n\t}\n}\n\nfunction normalizeRow(obj: any) {\n\t// The libSQL node-sqlite3 compatibility wrapper returns rows\n\t// that can be accessed both as objects and arrays. Let's\n\t// turn them into objects what's what other SQLite drivers\n\t// do.\n\treturn Object.keys(obj).reduce((acc: Record<string, any>, key) => {\n\t\tif (Object.prototype.propertyIsEnumerable.call(obj, key)) {\n\t\t\tacc[key] = obj[key];\n\t\t}\n\t\treturn acc;\n\t}, {});\n}\n\nfunction normalizeFieldValue(value: unknown) {\n\tif (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { // eslint-disable-line no-instanceof/no-instanceof\n\t\tif (typeof Buffer !== 'undefined') {\n\t\t\tif (!(value instanceof Buffer)) { // eslint-disable-line no-instanceof/no-instanceof\n\t\t\t\treturn Buffer.from(value);\n\t\t\t}\n\t\t\treturn value;\n\t\t}\n\t\tif (typeof TextDecoder !== 'undefined') {\n\t\t\treturn new TextDecoder().decode(value);\n\t\t}\n\t\tthrow new Error('TextDecoder is not available. Please provide either Buffer or TextDecoder polyfill.');\n\t}\n\treturn value;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAEA,oBAA2B;AAE3B,oBAA2B;AAG3B,iBAAkD;AAElD,yBAAkC;AAOlC,qBAAmD;AACnD,mBAA6B;AAQtB,MAAM,sBAGH,6BAAwD;AAAA,EAKjE,YACS,QACR,SACQ,QACA,SACA,IACP;AACD,UAAM,OAAO;AANL;AAEA;AACA;AACA;AAGR,SAAK,SAAS,QAAQ,UAAU,IAAI,yBAAW;AAAA,EAChD;AAAA,EAbA,QAAiB,wBAAU,IAAY;AAAA,EAE/B;AAAA,EAaR,aACC,OACA,QACA,eACA,uBACA,oBACyB;AACzB,WAAO,IAAI;AAAA,MACV,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,MAAwE,SAAY;AACzF,UAAM,kBAAmC,CAAC;AAC1C,UAAM,eAA8B,CAAC;AAErC,eAAW,SAAS,SAAS;AAC5B,YAAM,gBAAgB,MAAM,SAAS;AACrC,YAAM,aAAa,cAAc,SAAS;AAC1C,sBAAgB,KAAK,aAAa;AAClC,mBAAa,KAAK,EAAE,KAAK,WAAW,KAAK,MAAM,WAAW,OAAiB,CAAC;AAAA,IAC7E;AAEA,UAAM,eAAe,MAAM,KAAK,OAAO,MAAM,YAAY;AACzD,WAAO,aAAa,IAAI,CAAC,QAAQ,MAAM,gBAAgB,CAAC,EAAG,UAAU,QAAQ,IAAI,CAAC;AAAA,EACnF;AAAA,EAEA,MAAM,QAA0E,SAAY;AAC3F,UAAM,kBAAmC,CAAC;AAC1C,UAAM,eAA8B,CAAC;AAErC,eAAW,SAAS,SAAS;AAC5B,YAAM,gBAAgB,MAAM,SAAS;AACrC,YAAM,aAAa,cAAc,SAAS;AAC1C,sBAAgB,KAAK,aAAa;AAClC,mBAAa,KAAK,EAAE,KAAK,WAAW,KAAK,MAAM,WAAW,OAAiB,CAAC;AAAA,IAC7E;AAEA,UAAM,eAAe,MAAM,KAAK,OAAO,QAAQ,YAAY;AAC3D,WAAO,aAAa,IAAI,CAAC,QAAQ,MAAM,gBAAgB,CAAC,EAAG,UAAU,QAAQ,IAAI,CAAC;AAAA,EACnF;AAAA,EAEA,MAAe,YACd,aACA,SACa;AAEb,UAAM,WAAW,MAAM,KAAK,OAAO,YAAY;AAC/C,UAAM,UAAU,IAAI;AAAA,MACnB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACD;AACA,UAAM,KAAK,IAAI,kBAAwC,SAAS,KAAK,SAAS,SAAS,KAAK,MAAM;AAClG,QAAI;AACH,YAAM,SAAS,MAAM,YAAY,EAAE;AACnC,YAAM,SAAS,OAAO;AACtB,aAAO;AAAA,IACR,SAAS,KAAK;AACb,YAAM,SAAS,SAAS;AACxB,YAAM;AAAA,IACP;AAAA,EACD;AAAA,EAES,kCAAkC,QAA0B;AACpE,WAAQ,OAAqB;AAAA,EAC9B;AAAA,EAES,kCAAkC,QAA0B;AACpE,WAAQ,OAAqB,KAAK,CAAC;AAAA,EACpC;AAAA,EAES,qCAAqC,QAA0B;AACvE,WAAQ,OAAqB;AAAA,EAC9B;AACD;AAEO,MAAM,0BAGH,qCAA4D;AAAA,EACrE,QAAiB,wBAAU,IAAY;AAAA,EAEvC,MAAe,YAAe,aAAsF;AACnH,UAAM,gBAAgB,KAAK,KAAK,WAAW;AAC3C,UAAM,KAAK,IAAI,kBAAkB,SAAS,KAAK,SAAS,KAAK,SAAS,KAAK,QAAQ,KAAK,cAAc,CAAC;AACvG,UAAM,KAAK,QAAQ,IAAI,eAAI,IAAI,aAAa,aAAa,EAAE,CAAC;AAC5D,QAAI;AACH,YAAM,SAAS,MAAM,YAAY,EAAE;AACnC,YAAM,KAAK,QAAQ,IAAI,eAAI,IAAI,qBAAqB,aAAa,EAAE,CAAC;AACpE,aAAO;AAAA,IACR,SAAS,KAAK;AACb,YAAM,KAAK,QAAQ,IAAI,eAAI,IAAI,yBAAyB,aAAa,EAAE,CAAC;AACxE,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,4BAAiF,mCAE5F;AAAA,EAGD,YACS,QACR,OACQ,QACgB,QAChB,IACR,eACQ,wBACgB,oBAIvB;AACD,UAAM,SAAS,eAAe,KAAK;AAZ3B;AAEA;AACgB;AAChB;AAEA;AACgB;AAMxB,SAAK,qBAAqB;AAC1B,SAAK,SAAS;AAAA,EACf;AAAA,EAlBA,QAAiB,wBAAU,IAAY;AAAA,EAoBvC,IAAI,mBAAiE;AACpE,UAAM,aAAS,6BAAiB,KAAK,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AAC1E,SAAK,OAAO,SAAS,KAAK,MAAM,KAAK,MAAM;AAC3C,UAAM,OAAoB,EAAE,KAAK,KAAK,MAAM,KAAK,MAAM,OAAiB;AACxE,WAAO,KAAK,KAAK,KAAK,GAAG,QAAQ,IAAI,IAAI,KAAK,OAAO,QAAQ,IAAI;AAAA,EAClE;AAAA,EAEA,MAAM,IAAI,mBAAgE;AACzE,UAAM,EAAE,QAAQ,QAAQ,OAAO,IAAI,QAAQ,mBAAmB,IAAI;AAClE,QAAI,CAAC,UAAU,CAAC,oBAAoB;AACnC,YAAM,aAAS,6BAAiB,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AACrE,aAAO,SAAS,MAAM,KAAK,MAAM;AACjC,YAAM,OAAoB,EAAE,KAAK,MAAM,KAAK,MAAM,OAAiB;AACnE,cAAQ,KAAK,GAAG,QAAQ,IAAI,IAAI,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,MAAAA,MAAK,MAAM,KAAK,aAAaA,KAAI,CAAC;AAAA,IACjG;AAEA,UAAM,OAAO,MAAM,KAAK,OAAO,iBAAiB;AAEhD,WAAO,KAAK,aAAa,IAAI;AAAA,EAC9B;AAAA,EAES,aAAa,MAAe,aAAgC;AACpE,QAAI,aAAa;AAChB,aAAQ,KAAmB;AAAA,IAC5B;AAEA,QAAI,CAAC,KAAK,UAAU,CAAC,KAAK,oBAAoB;AAC7C,aAAQ,KAAmB,IAAI,CAAC,QAAQ,aAAa,GAAG,CAAC;AAAA,IAC1D;AAEA,QAAI,KAAK,oBAAoB;AAC5B,aAAO,KAAK,mBAAmB,MAAqB,mBAAmB;AAAA,IACxE;AAEA,WAAQ,KAAmB,IAAI,CAAC,QAAQ;AACvC,iBAAO;AAAA,QACN,KAAK;AAAA,QACL,MAAM,UAAU,MAAM,KAAK,GAAG,EAAE,IAAI,CAAC,MAAM,oBAAoB,CAAC,CAAC;AAAA,QACjE,KAAK;AAAA,MACN;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,mBAAgE;AACzE,UAAM,EAAE,QAAQ,QAAQ,OAAO,IAAI,QAAQ,mBAAmB,IAAI;AAClE,QAAI,CAAC,UAAU,CAAC,oBAAoB;AACnC,YAAM,aAAS,6BAAiB,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AACrE,aAAO,SAAS,MAAM,KAAK,MAAM;AACjC,YAAM,OAAoB,EAAE,KAAK,MAAM,KAAK,MAAM,OAAiB;AACnE,cAAQ,KAAK,GAAG,QAAQ,IAAI,IAAI,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,MAAAA,MAAK,MAAM,KAAK,aAAaA,KAAI,CAAC;AAAA,IACjG;AAEA,UAAM,OAAO,MAAM,KAAK,OAAO,iBAAiB;AAEhD,WAAO,KAAK,aAAa,IAAI;AAAA,EAC9B;AAAA,EAES,aAAa,MAAe,aAAgC;AACpE,QAAI,aAAa;AAChB,aAAQ,KAAmB;AAAA,IAC5B;AAEA,UAAM,MAAO,KAAmB,CAAC;AAEjC,QAAI,CAAC,KAAK,UAAU,CAAC,KAAK,oBAAoB;AAC7C,aAAO,aAAa,GAAG;AAAA,IACxB;AAEA,QAAI,CAAC,KAAK;AACT,aAAO;AAAA,IACR;AAEA,QAAI,KAAK,oBAAoB;AAC5B,aAAO,KAAK,mBAAmB,MAAqB,mBAAmB;AAAA,IACxE;AAEA,eAAO;AAAA,MACN,KAAK;AAAA,MACL,MAAM,UAAU,MAAM,KAAK,GAAG,EAAE,IAAI,CAAC,MAAM,oBAAoB,CAAC,CAAC;AAAA,MACjE,KAAK;AAAA,IACN;AAAA,EACD;AAAA,EAEA,OAAO,mBAAmE;AACzE,UAAM,aAAS,6BAAiB,KAAK,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AAC1E,SAAK,OAAO,SAAS,KAAK,MAAM,KAAK,MAAM;AAC3C,UAAM,OAAoB,EAAE,KAAK,KAAK,MAAM,KAAK,MAAM,OAAiB;AACxE,YAAQ,KAAK,KAAK,KAAK,GAAG,QAAQ,IAAI,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,KAAK,MAAM,IAAI;AAAA,EAG7F;AAAA;AAAA,EAGA,wBAAiC;AAChC,WAAO,KAAK;AAAA,EACb;AACD;AAEA,SAAS,aAAa,KAAU;AAK/B,SAAO,OAAO,KAAK,GAAG,EAAE,OAAO,CAAC,KAA0B,QAAQ;AACjE,QAAI,OAAO,UAAU,qBAAqB,KAAK,KAAK,GAAG,GAAG;AACzD,UAAI,GAAG,IAAI,IAAI,GAAG;AAAA,IACnB;AACA,WAAO;AAAA,EACR,GAAG,CAAC,CAAC;AACN;AAEA,SAAS,oBAAoB,OAAgB;AAC5C,MAAI,OAAO,gBAAgB,eAAe,iBAAiB,aAAa;AACvE,QAAI,OAAO,WAAW,aAAa;AAClC,UAAI,EAAE,iBAAiB,SAAS;AAC/B,eAAO,OAAO,KAAK,KAAK;AAAA,MACzB;AACA,aAAO;AAAA,IACR;AACA,QAAI,OAAO,gBAAgB,aAAa;AACvC,aAAO,IAAI,YAAY,EAAE,OAAO,KAAK;AAAA,IACtC;AACA,UAAM,IAAI,MAAM,qFAAqF;AAAA,EACtG;AACA,SAAO;AACR;","names":["rows"]}
@@ -23,6 +23,7 @@ export declare class LibSQLSession<TFullSchema extends Record<string, unknown>,
23
23
  constructor(client: Client, dialect: SQLiteAsyncDialect, schema: RelationalSchemaConfig<TSchema> | undefined, options: LibSQLSessionOptions, tx: Transaction | undefined);
24
24
  prepareQuery<T extends Omit<PreparedQueryConfig, 'run'>>(query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown): LibSQLPreparedQuery<T>;
25
25
  batch<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T): Promise<unknown[]>;
26
+ migrate<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T): Promise<unknown[]>;
26
27
  transaction<T>(transaction: (db: LibSQLTransaction<TFullSchema, TSchema>) => T | Promise<T>, _config?: SQLiteTransactionConfig): Promise<T>;
27
28
  extractRawAllValueFromBatchResult(result: unknown): unknown;
28
29
  extractRawGetValueFromBatchResult(result: unknown): unknown;
@@ -23,6 +23,7 @@ export declare class LibSQLSession<TFullSchema extends Record<string, unknown>,
23
23
  constructor(client: Client, dialect: SQLiteAsyncDialect, schema: RelationalSchemaConfig<TSchema> | undefined, options: LibSQLSessionOptions, tx: Transaction | undefined);
24
24
  prepareQuery<T extends Omit<PreparedQueryConfig, 'run'>>(query: Query, fields: SelectedFieldsOrdered | undefined, executeMethod: SQLiteExecuteMethod, isResponseInArrayMode: boolean, customResultMapper?: (rows: unknown[][]) => unknown): LibSQLPreparedQuery<T>;
25
25
  batch<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T): Promise<unknown[]>;
26
+ migrate<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T): Promise<unknown[]>;
26
27
  transaction<T>(transaction: (db: LibSQLTransaction<TFullSchema, TSchema>) => T | Promise<T>, _config?: SQLiteTransactionConfig): Promise<T>;
27
28
  extractRawAllValueFromBatchResult(result: unknown): unknown;
28
29
  extractRawGetValueFromBatchResult(result: unknown): unknown;
package/libsql/session.js CHANGED
@@ -39,6 +39,18 @@ class LibSQLSession extends SQLiteSession {
39
39
  const batchResults = await this.client.batch(builtQueries);
40
40
  return batchResults.map((result, i) => preparedQueries[i].mapResult(result, true));
41
41
  }
42
+ async migrate(queries) {
43
+ const preparedQueries = [];
44
+ const builtQueries = [];
45
+ for (const query of queries) {
46
+ const preparedQuery = query._prepare();
47
+ const builtQuery = preparedQuery.getQuery();
48
+ preparedQueries.push(preparedQuery);
49
+ builtQueries.push({ sql: builtQuery.sql, args: builtQuery.params });
50
+ }
51
+ const batchResults = await this.client.migrate(builtQueries);
52
+ return batchResults.map((result, i) => preparedQueries[i].mapResult(result, true));
53
+ }
42
54
  async transaction(transaction, _config) {
43
55
  const libsqlTx = await this.client.transaction();
44
56
  const session = new LibSQLSession(
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/libsql/session.ts"],"sourcesContent":["import type { Client, InArgs, InStatement, ResultSet, Transaction } from '@libsql/client';\nimport type { BatchItem as BatchItem } from '~/batch.ts';\nimport { entityKind } from '~/entity.ts';\nimport type { Logger } from '~/logger.ts';\nimport { NoopLogger } from '~/logger.ts';\nimport type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts';\nimport type { PreparedQuery } from '~/session.ts';\nimport { fillPlaceholders, type Query, sql } from '~/sql/sql.ts';\nimport type { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts';\nimport { SQLiteTransaction } from '~/sqlite-core/index.ts';\nimport type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts';\nimport type {\n\tPreparedQueryConfig as PreparedQueryConfigBase,\n\tSQLiteExecuteMethod,\n\tSQLiteTransactionConfig,\n} from '~/sqlite-core/session.ts';\nimport { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts';\nimport { mapResultRow } from '~/utils.ts';\n\nexport interface LibSQLSessionOptions {\n\tlogger?: Logger;\n}\n\ntype PreparedQueryConfig = Omit<PreparedQueryConfigBase, 'statement' | 'run'>;\n\nexport class LibSQLSession<\n\tTFullSchema extends Record<string, unknown>,\n\tTSchema extends TablesRelationalConfig,\n> extends SQLiteSession<'async', ResultSet, TFullSchema, TSchema> {\n\tstatic readonly [entityKind]: string = 'LibSQLSession';\n\n\tprivate logger: Logger;\n\n\tconstructor(\n\t\tprivate client: Client,\n\t\tdialect: SQLiteAsyncDialect,\n\t\tprivate schema: RelationalSchemaConfig<TSchema> | undefined,\n\t\tprivate options: LibSQLSessionOptions,\n\t\tprivate tx: Transaction | undefined,\n\t) {\n\t\tsuper(dialect);\n\t\tthis.logger = options.logger ?? new NoopLogger();\n\t}\n\n\tprepareQuery<T extends Omit<PreparedQueryConfig, 'run'>>(\n\t\tquery: Query,\n\t\tfields: SelectedFieldsOrdered | undefined,\n\t\texecuteMethod: SQLiteExecuteMethod,\n\t\tisResponseInArrayMode: boolean,\n\t\tcustomResultMapper?: (rows: unknown[][]) => unknown,\n\t): LibSQLPreparedQuery<T> {\n\t\treturn new LibSQLPreparedQuery(\n\t\t\tthis.client,\n\t\t\tquery,\n\t\t\tthis.logger,\n\t\t\tfields,\n\t\t\tthis.tx,\n\t\t\texecuteMethod,\n\t\t\tisResponseInArrayMode,\n\t\t\tcustomResultMapper,\n\t\t);\n\t}\n\n\tasync batch<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T) {\n\t\tconst preparedQueries: PreparedQuery[] = [];\n\t\tconst builtQueries: InStatement[] = [];\n\n\t\tfor (const query of queries) {\n\t\t\tconst preparedQuery = query._prepare();\n\t\t\tconst builtQuery = preparedQuery.getQuery();\n\t\t\tpreparedQueries.push(preparedQuery);\n\t\t\tbuiltQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs });\n\t\t}\n\n\t\tconst batchResults = await this.client.batch(builtQueries);\n\t\treturn batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true));\n\t}\n\n\toverride async transaction<T>(\n\t\ttransaction: (db: LibSQLTransaction<TFullSchema, TSchema>) => T | Promise<T>,\n\t\t_config?: SQLiteTransactionConfig,\n\t): Promise<T> {\n\t\t// TODO: support transaction behavior\n\t\tconst libsqlTx = await this.client.transaction();\n\t\tconst session = new LibSQLSession<TFullSchema, TSchema>(\n\t\t\tthis.client,\n\t\t\tthis.dialect,\n\t\t\tthis.schema,\n\t\t\tthis.options,\n\t\t\tlibsqlTx,\n\t\t);\n\t\tconst tx = new LibSQLTransaction<TFullSchema, TSchema>('async', this.dialect, session, this.schema);\n\t\ttry {\n\t\t\tconst result = await transaction(tx);\n\t\t\tawait libsqlTx.commit();\n\t\t\treturn result;\n\t\t} catch (err) {\n\t\t\tawait libsqlTx.rollback();\n\t\t\tthrow err;\n\t\t}\n\t}\n\n\toverride extractRawAllValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows;\n\t}\n\n\toverride extractRawGetValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows[0];\n\t}\n\n\toverride extractRawValuesValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows;\n\t}\n}\n\nexport class LibSQLTransaction<\n\tTFullSchema extends Record<string, unknown>,\n\tTSchema extends TablesRelationalConfig,\n> extends SQLiteTransaction<'async', ResultSet, TFullSchema, TSchema> {\n\tstatic readonly [entityKind]: string = 'LibSQLTransaction';\n\n\toverride async transaction<T>(transaction: (tx: LibSQLTransaction<TFullSchema, TSchema>) => Promise<T>): Promise<T> {\n\t\tconst savepointName = `sp${this.nestedIndex}`;\n\t\tconst tx = new LibSQLTransaction('async', this.dialect, this.session, this.schema, this.nestedIndex + 1);\n\t\tawait this.session.run(sql.raw(`savepoint ${savepointName}`));\n\t\ttry {\n\t\t\tconst result = await transaction(tx);\n\t\t\tawait this.session.run(sql.raw(`release savepoint ${savepointName}`));\n\t\t\treturn result;\n\t\t} catch (err) {\n\t\t\tawait this.session.run(sql.raw(`rollback to savepoint ${savepointName}`));\n\t\t\tthrow err;\n\t\t}\n\t}\n}\n\nexport class LibSQLPreparedQuery<T extends PreparedQueryConfig = PreparedQueryConfig> extends SQLitePreparedQuery<\n\t{ type: 'async'; run: ResultSet; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] }\n> {\n\tstatic readonly [entityKind]: string = 'LibSQLPreparedQuery';\n\n\tconstructor(\n\t\tprivate client: Client,\n\t\tquery: Query,\n\t\tprivate logger: Logger,\n\t\t/** @internal */ public fields: SelectedFieldsOrdered | undefined,\n\t\tprivate tx: Transaction | undefined,\n\t\texecuteMethod: SQLiteExecuteMethod,\n\t\tprivate _isResponseInArrayMode: boolean,\n\t\t/** @internal */ public customResultMapper?: (\n\t\t\trows: unknown[][],\n\t\t\tmapColumnValue?: (value: unknown) => unknown,\n\t\t) => unknown,\n\t) {\n\t\tsuper('async', executeMethod, query);\n\t\tthis.customResultMapper = customResultMapper;\n\t\tthis.fields = fields;\n\t}\n\n\trun(placeholderValues?: Record<string, unknown>): Promise<ResultSet> {\n\t\tconst params = fillPlaceholders(this.query.params, placeholderValues ?? {});\n\t\tthis.logger.logQuery(this.query.sql, params);\n\t\tconst stmt: InStatement = { sql: this.query.sql, args: params as InArgs };\n\t\treturn this.tx ? this.tx.execute(stmt) : this.client.execute(stmt);\n\t}\n\n\tasync all(placeholderValues?: Record<string, unknown>): Promise<T['all']> {\n\t\tconst { fields, logger, query, tx, client, customResultMapper } = this;\n\t\tif (!fields && !customResultMapper) {\n\t\t\tconst params = fillPlaceholders(query.params, placeholderValues ?? {});\n\t\t\tlogger.logQuery(query.sql, params);\n\t\t\tconst stmt: InStatement = { sql: query.sql, args: params as InArgs };\n\t\t\treturn (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapAllResult(rows));\n\t\t}\n\n\t\tconst rows = await this.values(placeholderValues) as unknown[][];\n\n\t\treturn this.mapAllResult(rows);\n\t}\n\n\toverride mapAllResult(rows: unknown, isFromBatch?: boolean): unknown {\n\t\tif (isFromBatch) {\n\t\t\trows = (rows as ResultSet).rows;\n\t\t}\n\n\t\tif (!this.fields && !this.customResultMapper) {\n\t\t\treturn (rows as unknown[]).map((row) => normalizeRow(row));\n\t\t}\n\n\t\tif (this.customResultMapper) {\n\t\t\treturn this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['all'];\n\t\t}\n\n\t\treturn (rows as unknown[]).map((row) => {\n\t\t\treturn mapResultRow(\n\t\t\t\tthis.fields!,\n\t\t\t\tArray.prototype.slice.call(row).map((v) => normalizeFieldValue(v)),\n\t\t\t\tthis.joinsNotNullableMap,\n\t\t\t);\n\t\t});\n\t}\n\n\tasync get(placeholderValues?: Record<string, unknown>): Promise<T['get']> {\n\t\tconst { fields, logger, query, tx, client, customResultMapper } = this;\n\t\tif (!fields && !customResultMapper) {\n\t\t\tconst params = fillPlaceholders(query.params, placeholderValues ?? {});\n\t\t\tlogger.logQuery(query.sql, params);\n\t\t\tconst stmt: InStatement = { sql: query.sql, args: params as InArgs };\n\t\t\treturn (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapGetResult(rows));\n\t\t}\n\n\t\tconst rows = await this.values(placeholderValues) as unknown[][];\n\n\t\treturn this.mapGetResult(rows);\n\t}\n\n\toverride mapGetResult(rows: unknown, isFromBatch?: boolean): unknown {\n\t\tif (isFromBatch) {\n\t\t\trows = (rows as ResultSet).rows;\n\t\t}\n\n\t\tconst row = (rows as unknown[])[0];\n\n\t\tif (!this.fields && !this.customResultMapper) {\n\t\t\treturn normalizeRow(row);\n\t\t}\n\n\t\tif (!row) {\n\t\t\treturn undefined;\n\t\t}\n\n\t\tif (this.customResultMapper) {\n\t\t\treturn this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['get'];\n\t\t}\n\n\t\treturn mapResultRow(\n\t\t\tthis.fields!,\n\t\t\tArray.prototype.slice.call(row).map((v) => normalizeFieldValue(v)),\n\t\t\tthis.joinsNotNullableMap,\n\t\t);\n\t}\n\n\tvalues(placeholderValues?: Record<string, unknown>): Promise<T['values']> {\n\t\tconst params = fillPlaceholders(this.query.params, placeholderValues ?? {});\n\t\tthis.logger.logQuery(this.query.sql, params);\n\t\tconst stmt: InStatement = { sql: this.query.sql, args: params as InArgs };\n\t\treturn (this.tx ? this.tx.execute(stmt) : this.client.execute(stmt)).then(({ rows }) => rows) as Promise<\n\t\t\tT['values']\n\t\t>;\n\t}\n\n\t/** @internal */\n\tisResponseInArrayMode(): boolean {\n\t\treturn this._isResponseInArrayMode;\n\t}\n}\n\nfunction normalizeRow(obj: any) {\n\t// The libSQL node-sqlite3 compatibility wrapper returns rows\n\t// that can be accessed both as objects and arrays. Let's\n\t// turn them into objects what's what other SQLite drivers\n\t// do.\n\treturn Object.keys(obj).reduce((acc: Record<string, any>, key) => {\n\t\tif (Object.prototype.propertyIsEnumerable.call(obj, key)) {\n\t\t\tacc[key] = obj[key];\n\t\t}\n\t\treturn acc;\n\t}, {});\n}\n\nfunction normalizeFieldValue(value: unknown) {\n\tif (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { // eslint-disable-line no-instanceof/no-instanceof\n\t\tif (typeof Buffer !== 'undefined') {\n\t\t\tif (!(value instanceof Buffer)) { // eslint-disable-line no-instanceof/no-instanceof\n\t\t\t\treturn Buffer.from(value);\n\t\t\t}\n\t\t\treturn value;\n\t\t}\n\t\tif (typeof TextDecoder !== 'undefined') {\n\t\t\treturn new TextDecoder().decode(value);\n\t\t}\n\t\tthrow new Error('TextDecoder is not available. Please provide either Buffer or TextDecoder polyfill.');\n\t}\n\treturn value;\n}\n"],"mappings":"AAEA,SAAS,kBAAkB;AAE3B,SAAS,kBAAkB;AAG3B,SAAS,kBAA8B,WAAW;AAElD,SAAS,yBAAyB;AAOlC,SAAS,qBAAqB,qBAAqB;AACnD,SAAS,oBAAoB;AAQtB,MAAM,sBAGH,cAAwD;AAAA,EAKjE,YACS,QACR,SACQ,QACA,SACA,IACP;AACD,UAAM,OAAO;AANL;AAEA;AACA;AACA;AAGR,SAAK,SAAS,QAAQ,UAAU,IAAI,WAAW;AAAA,EAChD;AAAA,EAbA,QAAiB,UAAU,IAAY;AAAA,EAE/B;AAAA,EAaR,aACC,OACA,QACA,eACA,uBACA,oBACyB;AACzB,WAAO,IAAI;AAAA,MACV,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,MAAwE,SAAY;AACzF,UAAM,kBAAmC,CAAC;AAC1C,UAAM,eAA8B,CAAC;AAErC,eAAW,SAAS,SAAS;AAC5B,YAAM,gBAAgB,MAAM,SAAS;AACrC,YAAM,aAAa,cAAc,SAAS;AAC1C,sBAAgB,KAAK,aAAa;AAClC,mBAAa,KAAK,EAAE,KAAK,WAAW,KAAK,MAAM,WAAW,OAAiB,CAAC;AAAA,IAC7E;AAEA,UAAM,eAAe,MAAM,KAAK,OAAO,MAAM,YAAY;AACzD,WAAO,aAAa,IAAI,CAAC,QAAQ,MAAM,gBAAgB,CAAC,EAAG,UAAU,QAAQ,IAAI,CAAC;AAAA,EACnF;AAAA,EAEA,MAAe,YACd,aACA,SACa;AAEb,UAAM,WAAW,MAAM,KAAK,OAAO,YAAY;AAC/C,UAAM,UAAU,IAAI;AAAA,MACnB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACD;AACA,UAAM,KAAK,IAAI,kBAAwC,SAAS,KAAK,SAAS,SAAS,KAAK,MAAM;AAClG,QAAI;AACH,YAAM,SAAS,MAAM,YAAY,EAAE;AACnC,YAAM,SAAS,OAAO;AACtB,aAAO;AAAA,IACR,SAAS,KAAK;AACb,YAAM,SAAS,SAAS;AACxB,YAAM;AAAA,IACP;AAAA,EACD;AAAA,EAES,kCAAkC,QAA0B;AACpE,WAAQ,OAAqB;AAAA,EAC9B;AAAA,EAES,kCAAkC,QAA0B;AACpE,WAAQ,OAAqB,KAAK,CAAC;AAAA,EACpC;AAAA,EAES,qCAAqC,QAA0B;AACvE,WAAQ,OAAqB;AAAA,EAC9B;AACD;AAEO,MAAM,0BAGH,kBAA4D;AAAA,EACrE,QAAiB,UAAU,IAAY;AAAA,EAEvC,MAAe,YAAe,aAAsF;AACnH,UAAM,gBAAgB,KAAK,KAAK,WAAW;AAC3C,UAAM,KAAK,IAAI,kBAAkB,SAAS,KAAK,SAAS,KAAK,SAAS,KAAK,QAAQ,KAAK,cAAc,CAAC;AACvG,UAAM,KAAK,QAAQ,IAAI,IAAI,IAAI,aAAa,aAAa,EAAE,CAAC;AAC5D,QAAI;AACH,YAAM,SAAS,MAAM,YAAY,EAAE;AACnC,YAAM,KAAK,QAAQ,IAAI,IAAI,IAAI,qBAAqB,aAAa,EAAE,CAAC;AACpE,aAAO;AAAA,IACR,SAAS,KAAK;AACb,YAAM,KAAK,QAAQ,IAAI,IAAI,IAAI,yBAAyB,aAAa,EAAE,CAAC;AACxE,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,4BAAiF,oBAE5F;AAAA,EAGD,YACS,QACR,OACQ,QACgB,QAChB,IACR,eACQ,wBACgB,oBAIvB;AACD,UAAM,SAAS,eAAe,KAAK;AAZ3B;AAEA;AACgB;AAChB;AAEA;AACgB;AAMxB,SAAK,qBAAqB;AAC1B,SAAK,SAAS;AAAA,EACf;AAAA,EAlBA,QAAiB,UAAU,IAAY;AAAA,EAoBvC,IAAI,mBAAiE;AACpE,UAAM,SAAS,iBAAiB,KAAK,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AAC1E,SAAK,OAAO,SAAS,KAAK,MAAM,KAAK,MAAM;AAC3C,UAAM,OAAoB,EAAE,KAAK,KAAK,MAAM,KAAK,MAAM,OAAiB;AACxE,WAAO,KAAK,KAAK,KAAK,GAAG,QAAQ,IAAI,IAAI,KAAK,OAAO,QAAQ,IAAI;AAAA,EAClE;AAAA,EAEA,MAAM,IAAI,mBAAgE;AACzE,UAAM,EAAE,QAAQ,QAAQ,OAAO,IAAI,QAAQ,mBAAmB,IAAI;AAClE,QAAI,CAAC,UAAU,CAAC,oBAAoB;AACnC,YAAM,SAAS,iBAAiB,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AACrE,aAAO,SAAS,MAAM,KAAK,MAAM;AACjC,YAAM,OAAoB,EAAE,KAAK,MAAM,KAAK,MAAM,OAAiB;AACnE,cAAQ,KAAK,GAAG,QAAQ,IAAI,IAAI,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,MAAAA,MAAK,MAAM,KAAK,aAAaA,KAAI,CAAC;AAAA,IACjG;AAEA,UAAM,OAAO,MAAM,KAAK,OAAO,iBAAiB;AAEhD,WAAO,KAAK,aAAa,IAAI;AAAA,EAC9B;AAAA,EAES,aAAa,MAAe,aAAgC;AACpE,QAAI,aAAa;AAChB,aAAQ,KAAmB;AAAA,IAC5B;AAEA,QAAI,CAAC,KAAK,UAAU,CAAC,KAAK,oBAAoB;AAC7C,aAAQ,KAAmB,IAAI,CAAC,QAAQ,aAAa,GAAG,CAAC;AAAA,IAC1D;AAEA,QAAI,KAAK,oBAAoB;AAC5B,aAAO,KAAK,mBAAmB,MAAqB,mBAAmB;AAAA,IACxE;AAEA,WAAQ,KAAmB,IAAI,CAAC,QAAQ;AACvC,aAAO;AAAA,QACN,KAAK;AAAA,QACL,MAAM,UAAU,MAAM,KAAK,GAAG,EAAE,IAAI,CAAC,MAAM,oBAAoB,CAAC,CAAC;AAAA,QACjE,KAAK;AAAA,MACN;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,mBAAgE;AACzE,UAAM,EAAE,QAAQ,QAAQ,OAAO,IAAI,QAAQ,mBAAmB,IAAI;AAClE,QAAI,CAAC,UAAU,CAAC,oBAAoB;AACnC,YAAM,SAAS,iBAAiB,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AACrE,aAAO,SAAS,MAAM,KAAK,MAAM;AACjC,YAAM,OAAoB,EAAE,KAAK,MAAM,KAAK,MAAM,OAAiB;AACnE,cAAQ,KAAK,GAAG,QAAQ,IAAI,IAAI,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,MAAAA,MAAK,MAAM,KAAK,aAAaA,KAAI,CAAC;AAAA,IACjG;AAEA,UAAM,OAAO,MAAM,KAAK,OAAO,iBAAiB;AAEhD,WAAO,KAAK,aAAa,IAAI;AAAA,EAC9B;AAAA,EAES,aAAa,MAAe,aAAgC;AACpE,QAAI,aAAa;AAChB,aAAQ,KAAmB;AAAA,IAC5B;AAEA,UAAM,MAAO,KAAmB,CAAC;AAEjC,QAAI,CAAC,KAAK,UAAU,CAAC,KAAK,oBAAoB;AAC7C,aAAO,aAAa,GAAG;AAAA,IACxB;AAEA,QAAI,CAAC,KAAK;AACT,aAAO;AAAA,IACR;AAEA,QAAI,KAAK,oBAAoB;AAC5B,aAAO,KAAK,mBAAmB,MAAqB,mBAAmB;AAAA,IACxE;AAEA,WAAO;AAAA,MACN,KAAK;AAAA,MACL,MAAM,UAAU,MAAM,KAAK,GAAG,EAAE,IAAI,CAAC,MAAM,oBAAoB,CAAC,CAAC;AAAA,MACjE,KAAK;AAAA,IACN;AAAA,EACD;AAAA,EAEA,OAAO,mBAAmE;AACzE,UAAM,SAAS,iBAAiB,KAAK,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AAC1E,SAAK,OAAO,SAAS,KAAK,MAAM,KAAK,MAAM;AAC3C,UAAM,OAAoB,EAAE,KAAK,KAAK,MAAM,KAAK,MAAM,OAAiB;AACxE,YAAQ,KAAK,KAAK,KAAK,GAAG,QAAQ,IAAI,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,KAAK,MAAM,IAAI;AAAA,EAG7F;AAAA;AAAA,EAGA,wBAAiC;AAChC,WAAO,KAAK;AAAA,EACb;AACD;AAEA,SAAS,aAAa,KAAU;AAK/B,SAAO,OAAO,KAAK,GAAG,EAAE,OAAO,CAAC,KAA0B,QAAQ;AACjE,QAAI,OAAO,UAAU,qBAAqB,KAAK,KAAK,GAAG,GAAG;AACzD,UAAI,GAAG,IAAI,IAAI,GAAG;AAAA,IACnB;AACA,WAAO;AAAA,EACR,GAAG,CAAC,CAAC;AACN;AAEA,SAAS,oBAAoB,OAAgB;AAC5C,MAAI,OAAO,gBAAgB,eAAe,iBAAiB,aAAa;AACvE,QAAI,OAAO,WAAW,aAAa;AAClC,UAAI,EAAE,iBAAiB,SAAS;AAC/B,eAAO,OAAO,KAAK,KAAK;AAAA,MACzB;AACA,aAAO;AAAA,IACR;AACA,QAAI,OAAO,gBAAgB,aAAa;AACvC,aAAO,IAAI,YAAY,EAAE,OAAO,KAAK;AAAA,IACtC;AACA,UAAM,IAAI,MAAM,qFAAqF;AAAA,EACtG;AACA,SAAO;AACR;","names":["rows"]}
1
+ {"version":3,"sources":["../../src/libsql/session.ts"],"sourcesContent":["import type { Client, InArgs, InStatement, ResultSet, Transaction } from '@libsql/client';\nimport type { BatchItem as BatchItem } from '~/batch.ts';\nimport { entityKind } from '~/entity.ts';\nimport type { Logger } from '~/logger.ts';\nimport { NoopLogger } from '~/logger.ts';\nimport type { RelationalSchemaConfig, TablesRelationalConfig } from '~/relations.ts';\nimport type { PreparedQuery } from '~/session.ts';\nimport { fillPlaceholders, type Query, sql } from '~/sql/sql.ts';\nimport type { SQLiteAsyncDialect } from '~/sqlite-core/dialect.ts';\nimport { SQLiteTransaction } from '~/sqlite-core/index.ts';\nimport type { SelectedFieldsOrdered } from '~/sqlite-core/query-builders/select.types.ts';\nimport type {\n\tPreparedQueryConfig as PreparedQueryConfigBase,\n\tSQLiteExecuteMethod,\n\tSQLiteTransactionConfig,\n} from '~/sqlite-core/session.ts';\nimport { SQLitePreparedQuery, SQLiteSession } from '~/sqlite-core/session.ts';\nimport { mapResultRow } from '~/utils.ts';\n\nexport interface LibSQLSessionOptions {\n\tlogger?: Logger;\n}\n\ntype PreparedQueryConfig = Omit<PreparedQueryConfigBase, 'statement' | 'run'>;\n\nexport class LibSQLSession<\n\tTFullSchema extends Record<string, unknown>,\n\tTSchema extends TablesRelationalConfig,\n> extends SQLiteSession<'async', ResultSet, TFullSchema, TSchema> {\n\tstatic readonly [entityKind]: string = 'LibSQLSession';\n\n\tprivate logger: Logger;\n\n\tconstructor(\n\t\tprivate client: Client,\n\t\tdialect: SQLiteAsyncDialect,\n\t\tprivate schema: RelationalSchemaConfig<TSchema> | undefined,\n\t\tprivate options: LibSQLSessionOptions,\n\t\tprivate tx: Transaction | undefined,\n\t) {\n\t\tsuper(dialect);\n\t\tthis.logger = options.logger ?? new NoopLogger();\n\t}\n\n\tprepareQuery<T extends Omit<PreparedQueryConfig, 'run'>>(\n\t\tquery: Query,\n\t\tfields: SelectedFieldsOrdered | undefined,\n\t\texecuteMethod: SQLiteExecuteMethod,\n\t\tisResponseInArrayMode: boolean,\n\t\tcustomResultMapper?: (rows: unknown[][]) => unknown,\n\t): LibSQLPreparedQuery<T> {\n\t\treturn new LibSQLPreparedQuery(\n\t\t\tthis.client,\n\t\t\tquery,\n\t\t\tthis.logger,\n\t\t\tfields,\n\t\t\tthis.tx,\n\t\t\texecuteMethod,\n\t\t\tisResponseInArrayMode,\n\t\t\tcustomResultMapper,\n\t\t);\n\t}\n\n\tasync batch<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T) {\n\t\tconst preparedQueries: PreparedQuery[] = [];\n\t\tconst builtQueries: InStatement[] = [];\n\n\t\tfor (const query of queries) {\n\t\t\tconst preparedQuery = query._prepare();\n\t\t\tconst builtQuery = preparedQuery.getQuery();\n\t\t\tpreparedQueries.push(preparedQuery);\n\t\t\tbuiltQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs });\n\t\t}\n\n\t\tconst batchResults = await this.client.batch(builtQueries);\n\t\treturn batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true));\n\t}\n\n\tasync migrate<T extends BatchItem<'sqlite'>[] | readonly BatchItem<'sqlite'>[]>(queries: T) {\n\t\tconst preparedQueries: PreparedQuery[] = [];\n\t\tconst builtQueries: InStatement[] = [];\n\n\t\tfor (const query of queries) {\n\t\t\tconst preparedQuery = query._prepare();\n\t\t\tconst builtQuery = preparedQuery.getQuery();\n\t\t\tpreparedQueries.push(preparedQuery);\n\t\t\tbuiltQueries.push({ sql: builtQuery.sql, args: builtQuery.params as InArgs });\n\t\t}\n\n\t\tconst batchResults = await this.client.migrate(builtQueries);\n\t\treturn batchResults.map((result, i) => preparedQueries[i]!.mapResult(result, true));\n\t}\n\n\toverride async transaction<T>(\n\t\ttransaction: (db: LibSQLTransaction<TFullSchema, TSchema>) => T | Promise<T>,\n\t\t_config?: SQLiteTransactionConfig,\n\t): Promise<T> {\n\t\t// TODO: support transaction behavior\n\t\tconst libsqlTx = await this.client.transaction();\n\t\tconst session = new LibSQLSession<TFullSchema, TSchema>(\n\t\t\tthis.client,\n\t\t\tthis.dialect,\n\t\t\tthis.schema,\n\t\t\tthis.options,\n\t\t\tlibsqlTx,\n\t\t);\n\t\tconst tx = new LibSQLTransaction<TFullSchema, TSchema>('async', this.dialect, session, this.schema);\n\t\ttry {\n\t\t\tconst result = await transaction(tx);\n\t\t\tawait libsqlTx.commit();\n\t\t\treturn result;\n\t\t} catch (err) {\n\t\t\tawait libsqlTx.rollback();\n\t\t\tthrow err;\n\t\t}\n\t}\n\n\toverride extractRawAllValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows;\n\t}\n\n\toverride extractRawGetValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows[0];\n\t}\n\n\toverride extractRawValuesValueFromBatchResult(result: unknown): unknown {\n\t\treturn (result as ResultSet).rows;\n\t}\n}\n\nexport class LibSQLTransaction<\n\tTFullSchema extends Record<string, unknown>,\n\tTSchema extends TablesRelationalConfig,\n> extends SQLiteTransaction<'async', ResultSet, TFullSchema, TSchema> {\n\tstatic readonly [entityKind]: string = 'LibSQLTransaction';\n\n\toverride async transaction<T>(transaction: (tx: LibSQLTransaction<TFullSchema, TSchema>) => Promise<T>): Promise<T> {\n\t\tconst savepointName = `sp${this.nestedIndex}`;\n\t\tconst tx = new LibSQLTransaction('async', this.dialect, this.session, this.schema, this.nestedIndex + 1);\n\t\tawait this.session.run(sql.raw(`savepoint ${savepointName}`));\n\t\ttry {\n\t\t\tconst result = await transaction(tx);\n\t\t\tawait this.session.run(sql.raw(`release savepoint ${savepointName}`));\n\t\t\treturn result;\n\t\t} catch (err) {\n\t\t\tawait this.session.run(sql.raw(`rollback to savepoint ${savepointName}`));\n\t\t\tthrow err;\n\t\t}\n\t}\n}\n\nexport class LibSQLPreparedQuery<T extends PreparedQueryConfig = PreparedQueryConfig> extends SQLitePreparedQuery<\n\t{ type: 'async'; run: ResultSet; all: T['all']; get: T['get']; values: T['values']; execute: T['execute'] }\n> {\n\tstatic readonly [entityKind]: string = 'LibSQLPreparedQuery';\n\n\tconstructor(\n\t\tprivate client: Client,\n\t\tquery: Query,\n\t\tprivate logger: Logger,\n\t\t/** @internal */ public fields: SelectedFieldsOrdered | undefined,\n\t\tprivate tx: Transaction | undefined,\n\t\texecuteMethod: SQLiteExecuteMethod,\n\t\tprivate _isResponseInArrayMode: boolean,\n\t\t/** @internal */ public customResultMapper?: (\n\t\t\trows: unknown[][],\n\t\t\tmapColumnValue?: (value: unknown) => unknown,\n\t\t) => unknown,\n\t) {\n\t\tsuper('async', executeMethod, query);\n\t\tthis.customResultMapper = customResultMapper;\n\t\tthis.fields = fields;\n\t}\n\n\trun(placeholderValues?: Record<string, unknown>): Promise<ResultSet> {\n\t\tconst params = fillPlaceholders(this.query.params, placeholderValues ?? {});\n\t\tthis.logger.logQuery(this.query.sql, params);\n\t\tconst stmt: InStatement = { sql: this.query.sql, args: params as InArgs };\n\t\treturn this.tx ? this.tx.execute(stmt) : this.client.execute(stmt);\n\t}\n\n\tasync all(placeholderValues?: Record<string, unknown>): Promise<T['all']> {\n\t\tconst { fields, logger, query, tx, client, customResultMapper } = this;\n\t\tif (!fields && !customResultMapper) {\n\t\t\tconst params = fillPlaceholders(query.params, placeholderValues ?? {});\n\t\t\tlogger.logQuery(query.sql, params);\n\t\t\tconst stmt: InStatement = { sql: query.sql, args: params as InArgs };\n\t\t\treturn (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapAllResult(rows));\n\t\t}\n\n\t\tconst rows = await this.values(placeholderValues) as unknown[][];\n\n\t\treturn this.mapAllResult(rows);\n\t}\n\n\toverride mapAllResult(rows: unknown, isFromBatch?: boolean): unknown {\n\t\tif (isFromBatch) {\n\t\t\trows = (rows as ResultSet).rows;\n\t\t}\n\n\t\tif (!this.fields && !this.customResultMapper) {\n\t\t\treturn (rows as unknown[]).map((row) => normalizeRow(row));\n\t\t}\n\n\t\tif (this.customResultMapper) {\n\t\t\treturn this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['all'];\n\t\t}\n\n\t\treturn (rows as unknown[]).map((row) => {\n\t\t\treturn mapResultRow(\n\t\t\t\tthis.fields!,\n\t\t\t\tArray.prototype.slice.call(row).map((v) => normalizeFieldValue(v)),\n\t\t\t\tthis.joinsNotNullableMap,\n\t\t\t);\n\t\t});\n\t}\n\n\tasync get(placeholderValues?: Record<string, unknown>): Promise<T['get']> {\n\t\tconst { fields, logger, query, tx, client, customResultMapper } = this;\n\t\tif (!fields && !customResultMapper) {\n\t\t\tconst params = fillPlaceholders(query.params, placeholderValues ?? {});\n\t\t\tlogger.logQuery(query.sql, params);\n\t\t\tconst stmt: InStatement = { sql: query.sql, args: params as InArgs };\n\t\t\treturn (tx ? tx.execute(stmt) : client.execute(stmt)).then(({ rows }) => this.mapGetResult(rows));\n\t\t}\n\n\t\tconst rows = await this.values(placeholderValues) as unknown[][];\n\n\t\treturn this.mapGetResult(rows);\n\t}\n\n\toverride mapGetResult(rows: unknown, isFromBatch?: boolean): unknown {\n\t\tif (isFromBatch) {\n\t\t\trows = (rows as ResultSet).rows;\n\t\t}\n\n\t\tconst row = (rows as unknown[])[0];\n\n\t\tif (!this.fields && !this.customResultMapper) {\n\t\t\treturn normalizeRow(row);\n\t\t}\n\n\t\tif (!row) {\n\t\t\treturn undefined;\n\t\t}\n\n\t\tif (this.customResultMapper) {\n\t\t\treturn this.customResultMapper(rows as unknown[][], normalizeFieldValue) as T['get'];\n\t\t}\n\n\t\treturn mapResultRow(\n\t\t\tthis.fields!,\n\t\t\tArray.prototype.slice.call(row).map((v) => normalizeFieldValue(v)),\n\t\t\tthis.joinsNotNullableMap,\n\t\t);\n\t}\n\n\tvalues(placeholderValues?: Record<string, unknown>): Promise<T['values']> {\n\t\tconst params = fillPlaceholders(this.query.params, placeholderValues ?? {});\n\t\tthis.logger.logQuery(this.query.sql, params);\n\t\tconst stmt: InStatement = { sql: this.query.sql, args: params as InArgs };\n\t\treturn (this.tx ? this.tx.execute(stmt) : this.client.execute(stmt)).then(({ rows }) => rows) as Promise<\n\t\t\tT['values']\n\t\t>;\n\t}\n\n\t/** @internal */\n\tisResponseInArrayMode(): boolean {\n\t\treturn this._isResponseInArrayMode;\n\t}\n}\n\nfunction normalizeRow(obj: any) {\n\t// The libSQL node-sqlite3 compatibility wrapper returns rows\n\t// that can be accessed both as objects and arrays. Let's\n\t// turn them into objects what's what other SQLite drivers\n\t// do.\n\treturn Object.keys(obj).reduce((acc: Record<string, any>, key) => {\n\t\tif (Object.prototype.propertyIsEnumerable.call(obj, key)) {\n\t\t\tacc[key] = obj[key];\n\t\t}\n\t\treturn acc;\n\t}, {});\n}\n\nfunction normalizeFieldValue(value: unknown) {\n\tif (typeof ArrayBuffer !== 'undefined' && value instanceof ArrayBuffer) { // eslint-disable-line no-instanceof/no-instanceof\n\t\tif (typeof Buffer !== 'undefined') {\n\t\t\tif (!(value instanceof Buffer)) { // eslint-disable-line no-instanceof/no-instanceof\n\t\t\t\treturn Buffer.from(value);\n\t\t\t}\n\t\t\treturn value;\n\t\t}\n\t\tif (typeof TextDecoder !== 'undefined') {\n\t\t\treturn new TextDecoder().decode(value);\n\t\t}\n\t\tthrow new Error('TextDecoder is not available. Please provide either Buffer or TextDecoder polyfill.');\n\t}\n\treturn value;\n}\n"],"mappings":"AAEA,SAAS,kBAAkB;AAE3B,SAAS,kBAAkB;AAG3B,SAAS,kBAA8B,WAAW;AAElD,SAAS,yBAAyB;AAOlC,SAAS,qBAAqB,qBAAqB;AACnD,SAAS,oBAAoB;AAQtB,MAAM,sBAGH,cAAwD;AAAA,EAKjE,YACS,QACR,SACQ,QACA,SACA,IACP;AACD,UAAM,OAAO;AANL;AAEA;AACA;AACA;AAGR,SAAK,SAAS,QAAQ,UAAU,IAAI,WAAW;AAAA,EAChD;AAAA,EAbA,QAAiB,UAAU,IAAY;AAAA,EAE/B;AAAA,EAaR,aACC,OACA,QACA,eACA,uBACA,oBACyB;AACzB,WAAO,IAAI;AAAA,MACV,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA,KAAK;AAAA,MACL;AAAA,MACA;AAAA,MACA;AAAA,IACD;AAAA,EACD;AAAA,EAEA,MAAM,MAAwE,SAAY;AACzF,UAAM,kBAAmC,CAAC;AAC1C,UAAM,eAA8B,CAAC;AAErC,eAAW,SAAS,SAAS;AAC5B,YAAM,gBAAgB,MAAM,SAAS;AACrC,YAAM,aAAa,cAAc,SAAS;AAC1C,sBAAgB,KAAK,aAAa;AAClC,mBAAa,KAAK,EAAE,KAAK,WAAW,KAAK,MAAM,WAAW,OAAiB,CAAC;AAAA,IAC7E;AAEA,UAAM,eAAe,MAAM,KAAK,OAAO,MAAM,YAAY;AACzD,WAAO,aAAa,IAAI,CAAC,QAAQ,MAAM,gBAAgB,CAAC,EAAG,UAAU,QAAQ,IAAI,CAAC;AAAA,EACnF;AAAA,EAEA,MAAM,QAA0E,SAAY;AAC3F,UAAM,kBAAmC,CAAC;AAC1C,UAAM,eAA8B,CAAC;AAErC,eAAW,SAAS,SAAS;AAC5B,YAAM,gBAAgB,MAAM,SAAS;AACrC,YAAM,aAAa,cAAc,SAAS;AAC1C,sBAAgB,KAAK,aAAa;AAClC,mBAAa,KAAK,EAAE,KAAK,WAAW,KAAK,MAAM,WAAW,OAAiB,CAAC;AAAA,IAC7E;AAEA,UAAM,eAAe,MAAM,KAAK,OAAO,QAAQ,YAAY;AAC3D,WAAO,aAAa,IAAI,CAAC,QAAQ,MAAM,gBAAgB,CAAC,EAAG,UAAU,QAAQ,IAAI,CAAC;AAAA,EACnF;AAAA,EAEA,MAAe,YACd,aACA,SACa;AAEb,UAAM,WAAW,MAAM,KAAK,OAAO,YAAY;AAC/C,UAAM,UAAU,IAAI;AAAA,MACnB,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IACD;AACA,UAAM,KAAK,IAAI,kBAAwC,SAAS,KAAK,SAAS,SAAS,KAAK,MAAM;AAClG,QAAI;AACH,YAAM,SAAS,MAAM,YAAY,EAAE;AACnC,YAAM,SAAS,OAAO;AACtB,aAAO;AAAA,IACR,SAAS,KAAK;AACb,YAAM,SAAS,SAAS;AACxB,YAAM;AAAA,IACP;AAAA,EACD;AAAA,EAES,kCAAkC,QAA0B;AACpE,WAAQ,OAAqB;AAAA,EAC9B;AAAA,EAES,kCAAkC,QAA0B;AACpE,WAAQ,OAAqB,KAAK,CAAC;AAAA,EACpC;AAAA,EAES,qCAAqC,QAA0B;AACvE,WAAQ,OAAqB;AAAA,EAC9B;AACD;AAEO,MAAM,0BAGH,kBAA4D;AAAA,EACrE,QAAiB,UAAU,IAAY;AAAA,EAEvC,MAAe,YAAe,aAAsF;AACnH,UAAM,gBAAgB,KAAK,KAAK,WAAW;AAC3C,UAAM,KAAK,IAAI,kBAAkB,SAAS,KAAK,SAAS,KAAK,SAAS,KAAK,QAAQ,KAAK,cAAc,CAAC;AACvG,UAAM,KAAK,QAAQ,IAAI,IAAI,IAAI,aAAa,aAAa,EAAE,CAAC;AAC5D,QAAI;AACH,YAAM,SAAS,MAAM,YAAY,EAAE;AACnC,YAAM,KAAK,QAAQ,IAAI,IAAI,IAAI,qBAAqB,aAAa,EAAE,CAAC;AACpE,aAAO;AAAA,IACR,SAAS,KAAK;AACb,YAAM,KAAK,QAAQ,IAAI,IAAI,IAAI,yBAAyB,aAAa,EAAE,CAAC;AACxE,YAAM;AAAA,IACP;AAAA,EACD;AACD;AAEO,MAAM,4BAAiF,oBAE5F;AAAA,EAGD,YACS,QACR,OACQ,QACgB,QAChB,IACR,eACQ,wBACgB,oBAIvB;AACD,UAAM,SAAS,eAAe,KAAK;AAZ3B;AAEA;AACgB;AAChB;AAEA;AACgB;AAMxB,SAAK,qBAAqB;AAC1B,SAAK,SAAS;AAAA,EACf;AAAA,EAlBA,QAAiB,UAAU,IAAY;AAAA,EAoBvC,IAAI,mBAAiE;AACpE,UAAM,SAAS,iBAAiB,KAAK,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AAC1E,SAAK,OAAO,SAAS,KAAK,MAAM,KAAK,MAAM;AAC3C,UAAM,OAAoB,EAAE,KAAK,KAAK,MAAM,KAAK,MAAM,OAAiB;AACxE,WAAO,KAAK,KAAK,KAAK,GAAG,QAAQ,IAAI,IAAI,KAAK,OAAO,QAAQ,IAAI;AAAA,EAClE;AAAA,EAEA,MAAM,IAAI,mBAAgE;AACzE,UAAM,EAAE,QAAQ,QAAQ,OAAO,IAAI,QAAQ,mBAAmB,IAAI;AAClE,QAAI,CAAC,UAAU,CAAC,oBAAoB;AACnC,YAAM,SAAS,iBAAiB,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AACrE,aAAO,SAAS,MAAM,KAAK,MAAM;AACjC,YAAM,OAAoB,EAAE,KAAK,MAAM,KAAK,MAAM,OAAiB;AACnE,cAAQ,KAAK,GAAG,QAAQ,IAAI,IAAI,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,MAAAA,MAAK,MAAM,KAAK,aAAaA,KAAI,CAAC;AAAA,IACjG;AAEA,UAAM,OAAO,MAAM,KAAK,OAAO,iBAAiB;AAEhD,WAAO,KAAK,aAAa,IAAI;AAAA,EAC9B;AAAA,EAES,aAAa,MAAe,aAAgC;AACpE,QAAI,aAAa;AAChB,aAAQ,KAAmB;AAAA,IAC5B;AAEA,QAAI,CAAC,KAAK,UAAU,CAAC,KAAK,oBAAoB;AAC7C,aAAQ,KAAmB,IAAI,CAAC,QAAQ,aAAa,GAAG,CAAC;AAAA,IAC1D;AAEA,QAAI,KAAK,oBAAoB;AAC5B,aAAO,KAAK,mBAAmB,MAAqB,mBAAmB;AAAA,IACxE;AAEA,WAAQ,KAAmB,IAAI,CAAC,QAAQ;AACvC,aAAO;AAAA,QACN,KAAK;AAAA,QACL,MAAM,UAAU,MAAM,KAAK,GAAG,EAAE,IAAI,CAAC,MAAM,oBAAoB,CAAC,CAAC;AAAA,QACjE,KAAK;AAAA,MACN;AAAA,IACD,CAAC;AAAA,EACF;AAAA,EAEA,MAAM,IAAI,mBAAgE;AACzE,UAAM,EAAE,QAAQ,QAAQ,OAAO,IAAI,QAAQ,mBAAmB,IAAI;AAClE,QAAI,CAAC,UAAU,CAAC,oBAAoB;AACnC,YAAM,SAAS,iBAAiB,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AACrE,aAAO,SAAS,MAAM,KAAK,MAAM;AACjC,YAAM,OAAoB,EAAE,KAAK,MAAM,KAAK,MAAM,OAAiB;AACnE,cAAQ,KAAK,GAAG,QAAQ,IAAI,IAAI,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,MAAAA,MAAK,MAAM,KAAK,aAAaA,KAAI,CAAC;AAAA,IACjG;AAEA,UAAM,OAAO,MAAM,KAAK,OAAO,iBAAiB;AAEhD,WAAO,KAAK,aAAa,IAAI;AAAA,EAC9B;AAAA,EAES,aAAa,MAAe,aAAgC;AACpE,QAAI,aAAa;AAChB,aAAQ,KAAmB;AAAA,IAC5B;AAEA,UAAM,MAAO,KAAmB,CAAC;AAEjC,QAAI,CAAC,KAAK,UAAU,CAAC,KAAK,oBAAoB;AAC7C,aAAO,aAAa,GAAG;AAAA,IACxB;AAEA,QAAI,CAAC,KAAK;AACT,aAAO;AAAA,IACR;AAEA,QAAI,KAAK,oBAAoB;AAC5B,aAAO,KAAK,mBAAmB,MAAqB,mBAAmB;AAAA,IACxE;AAEA,WAAO;AAAA,MACN,KAAK;AAAA,MACL,MAAM,UAAU,MAAM,KAAK,GAAG,EAAE,IAAI,CAAC,MAAM,oBAAoB,CAAC,CAAC;AAAA,MACjE,KAAK;AAAA,IACN;AAAA,EACD;AAAA,EAEA,OAAO,mBAAmE;AACzE,UAAM,SAAS,iBAAiB,KAAK,MAAM,QAAQ,qBAAqB,CAAC,CAAC;AAC1E,SAAK,OAAO,SAAS,KAAK,MAAM,KAAK,MAAM;AAC3C,UAAM,OAAoB,EAAE,KAAK,KAAK,MAAM,KAAK,MAAM,OAAiB;AACxE,YAAQ,KAAK,KAAK,KAAK,GAAG,QAAQ,IAAI,IAAI,KAAK,OAAO,QAAQ,IAAI,GAAG,KAAK,CAAC,EAAE,KAAK,MAAM,IAAI;AAAA,EAG7F;AAAA;AAAA,EAGA,wBAAiC;AAChC,WAAO,KAAK;AAAA,EACb;AACD;AAEA,SAAS,aAAa,KAAU;AAK/B,SAAO,OAAO,KAAK,GAAG,EAAE,OAAO,CAAC,KAA0B,QAAQ;AACjE,QAAI,OAAO,UAAU,qBAAqB,KAAK,KAAK,GAAG,GAAG;AACzD,UAAI,GAAG,IAAI,IAAI,GAAG;AAAA,IACnB;AACA,WAAO;AAAA,EACR,GAAG,CAAC,CAAC;AACN;AAEA,SAAS,oBAAoB,OAAgB;AAC5C,MAAI,OAAO,gBAAgB,eAAe,iBAAiB,aAAa;AACvE,QAAI,OAAO,WAAW,aAAa;AAClC,UAAI,EAAE,iBAAiB,SAAS;AAC/B,eAAO,OAAO,KAAK,KAAK;AAAA,MACzB;AACA,aAAO;AAAA,IACR;AACA,QAAI,OAAO,gBAAgB,aAAa;AACvC,aAAO,IAAI,YAAY,EAAE,OAAO,KAAK;AAAA,IACtC;AACA,UAAM,IAAI,MAAM,qFAAqF;AAAA,EACtG;AACA,SAAO;AACR;","names":["rows"]}
package/migrator.cjs CHANGED
@@ -1,7 +1,9 @@
1
1
  "use strict";
2
+ var __create = Object.create;
2
3
  var __defProp = Object.defineProperty;
3
4
  var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
5
  var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
5
7
  var __hasOwnProp = Object.prototype.hasOwnProperty;
6
8
  var __export = (target, all) => {
7
9
  for (var name in all)
@@ -15,54 +17,58 @@ var __copyProps = (to, from, except, desc) => {
15
17
  }
16
18
  return to;
17
19
  };
20
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
21
+ // If the importer is in node compatibility mode or this is not an ESM
22
+ // file that has been converted to a CommonJS file using a Babel-
23
+ // compatible transform (i.e. "__esModule" has not been set), then set
24
+ // "default" to the CommonJS "module.exports" for node compatibility.
25
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
26
+ mod
27
+ ));
18
28
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
29
  var migrator_exports = {};
20
30
  __export(migrator_exports, {
21
31
  readMigrationFiles: () => readMigrationFiles
22
32
  });
23
33
  module.exports = __toCommonJS(migrator_exports);
24
- var import_node_crypto = require("node:crypto");
25
- var import_node_fs = require("node:fs");
26
- var import_node_path = require("node:path");
27
- const timestampToMillis = (timestamp) => {
28
- const year = timestamp.slice(0, 4);
29
- const month = timestamp.slice(4, 6);
30
- const day = timestamp.slice(6, 8);
31
- const hr = timestamp.slice(8, 10);
32
- const min = timestamp.slice(10, 12);
33
- const sec = timestamp.slice(12, 14);
34
- const isoString = `${year}-${month}-${day}T${hr}:${min}:${sec}.000Z`;
35
- return +new Date(isoString);
36
- };
34
+ var import_node_crypto = __toESM(require("node:crypto"), 1);
35
+ var import_node_fs = __toESM(require("node:fs"), 1);
36
+ var import_node_path = __toESM(require("node:path"), 1);
37
37
  function readMigrationFiles(config) {
38
- let outFolder;
38
+ let migrationFolderTo;
39
39
  if (typeof config === "string") {
40
- const kitConfig = JSON.parse(
41
- (0, import_node_fs.readFileSync)((0, import_node_path.resolve)(".", config), "utf8")
42
- );
43
- outFolder = kitConfig.out;
40
+ const configAsString = import_node_fs.default.readFileSync(import_node_path.default.resolve(".", config), "utf8");
41
+ const jsonConfig = JSON.parse(configAsString);
42
+ migrationFolderTo = jsonConfig.out;
44
43
  } else {
45
- outFolder = config.migrationsFolder;
44
+ migrationFolderTo = config.migrationsFolder;
46
45
  }
47
- if (!outFolder) {
46
+ if (!migrationFolderTo) {
48
47
  throw new Error("no migration folder defined");
49
48
  }
50
49
  const migrationQueries = [];
51
- const migrationFolders = (0, import_node_fs.readdirSync)(outFolder).filter((it) => (0, import_node_fs.lstatSync)((0, import_node_path.join)(outFolder, it)).isDirectory());
52
- for (const migrationFolder of migrationFolders) {
53
- const sqlMigrationPath = (0, import_node_path.join)(outFolder, migrationFolder, "migration.sql");
54
- if (!(0, import_node_fs.existsSync)(sqlMigrationPath)) {
55
- console.error("SQL migration file does not exist:", sqlMigrationPath);
56
- continue;
50
+ const journalPath = `${migrationFolderTo}/meta/_journal.json`;
51
+ if (!import_node_fs.default.existsSync(journalPath)) {
52
+ throw new Error(`Can't find meta/_journal.json file`);
53
+ }
54
+ const journalAsString = import_node_fs.default.readFileSync(`${migrationFolderTo}/meta/_journal.json`).toString();
55
+ const journal = JSON.parse(journalAsString);
56
+ for (const journalEntry of journal.entries) {
57
+ const migrationPath = `${migrationFolderTo}/${journalEntry.tag}.sql`;
58
+ try {
59
+ const query = import_node_fs.default.readFileSync(`${migrationFolderTo}/${journalEntry.tag}.sql`).toString();
60
+ const result = query.split("--> statement-breakpoint").map((it) => {
61
+ return it;
62
+ });
63
+ migrationQueries.push({
64
+ sql: result,
65
+ bps: journalEntry.breakpoints,
66
+ folderMillis: journalEntry.when,
67
+ hash: import_node_crypto.default.createHash("sha256").update(query).digest("hex")
68
+ });
69
+ } catch {
70
+ throw new Error(`No file ${migrationPath} found in ${migrationFolderTo} folder`);
57
71
  }
58
- const when = timestampToMillis(migrationFolder.slice(0, 14));
59
- const query = (0, import_node_fs.readFileSync)(sqlMigrationPath, "utf8");
60
- const result = query.split("--> statement-breakpoint");
61
- migrationQueries.push({
62
- sql: result,
63
- folderMillis: when,
64
- hash: (0, import_node_crypto.createHash)("sha256").update(query).digest("hex")
65
- });
66
72
  }
67
73
  return migrationQueries;
68
74
  }
package/migrator.cjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/migrator.ts"],"sourcesContent":["import { createHash } from 'node:crypto';\nimport { existsSync, lstatSync, readdirSync, readFileSync } from 'node:fs';\nimport { join, resolve } from 'node:path';\n\nexport interface KitConfig {\n\tout: string;\n\tschema: string;\n}\n\nexport interface MigrationConfig {\n\tmigrationsFolder: string;\n\tmigrationsTable?: string;\n\tmigrationsSchema?: string;\n}\n\nexport interface MigrationMeta {\n\tsql: string[];\n\tfolderMillis: number;\n\thash: string;\n}\n\nconst timestampToMillis = (timestamp: string) => {\n\tconst year = timestamp.slice(0, 4);\n\tconst month = timestamp.slice(4, 6);\n\tconst day = timestamp.slice(6, 8);\n\tconst hr = timestamp.slice(8, 10);\n\tconst min = timestamp.slice(10, 12);\n\tconst sec = timestamp.slice(12, 14);\n\tconst isoString = `${year}-${month}-${day}T${hr}:${min}:${sec}.000Z`;\n\treturn +new Date(isoString);\n};\n\nexport function readMigrationFiles(\n\tconfig: string | MigrationConfig,\n): MigrationMeta[] {\n\tlet outFolder: string | undefined;\n\tif (typeof config === 'string') {\n\t\tconst kitConfig = JSON.parse(\n\t\t\treadFileSync(resolve('.', config), 'utf8'),\n\t\t) as KitConfig;\n\t\toutFolder = kitConfig.out;\n\t} else {\n\t\toutFolder = config.migrationsFolder;\n\t}\n\n\tif (!outFolder) {\n\t\tthrow new Error('no migration folder defined');\n\t}\n\n\tconst migrationQueries: MigrationMeta[] = [];\n\tconst migrationFolders = readdirSync(outFolder).filter((it) => lstatSync(join(outFolder, it)).isDirectory());\n\tfor (const migrationFolder of migrationFolders) {\n\t\tconst sqlMigrationPath = join(outFolder, migrationFolder, 'migration.sql');\n\t\tif (!existsSync(sqlMigrationPath)) {\n\t\t\tconsole.error('SQL migration file does not exist:', sqlMigrationPath);\n\t\t\tcontinue;\n\t\t}\n\n\t\tconst when = timestampToMillis(migrationFolder.slice(0, 14));\n\n\t\tconst query = readFileSync(sqlMigrationPath, 'utf8');\n\t\tconst result = query.split('--> statement-breakpoint');\n\n\t\tmigrationQueries.push({\n\t\t\tsql: result,\n\t\t\tfolderMillis: when,\n\t\t\thash: createHash('sha256').update(query).digest('hex'),\n\t\t});\n\t}\n\n\treturn migrationQueries;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAA2B;AAC3B,qBAAiE;AACjE,uBAA8B;AAmB9B,MAAM,oBAAoB,CAAC,cAAsB;AAChD,QAAM,OAAO,UAAU,MAAM,GAAG,CAAC;AACjC,QAAM,QAAQ,UAAU,MAAM,GAAG,CAAC;AAClC,QAAM,MAAM,UAAU,MAAM,GAAG,CAAC;AAChC,QAAM,KAAK,UAAU,MAAM,GAAG,EAAE;AAChC,QAAM,MAAM,UAAU,MAAM,IAAI,EAAE;AAClC,QAAM,MAAM,UAAU,MAAM,IAAI,EAAE;AAClC,QAAM,YAAY,GAAG,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,EAAE,IAAI,GAAG,IAAI,GAAG;AAC7D,SAAO,CAAC,IAAI,KAAK,SAAS;AAC3B;AAEO,SAAS,mBACf,QACkB;AAClB,MAAI;AACJ,MAAI,OAAO,WAAW,UAAU;AAC/B,UAAM,YAAY,KAAK;AAAA,UACtB,iCAAa,0BAAQ,KAAK,MAAM,GAAG,MAAM;AAAA,IAC1C;AACA,gBAAY,UAAU;AAAA,EACvB,OAAO;AACN,gBAAY,OAAO;AAAA,EACpB;AAEA,MAAI,CAAC,WAAW;AACf,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC9C;AAEA,QAAM,mBAAoC,CAAC;AAC3C,QAAM,uBAAmB,4BAAY,SAAS,EAAE,OAAO,CAAC,WAAO,8BAAU,uBAAK,WAAW,EAAE,CAAC,EAAE,YAAY,CAAC;AAC3G,aAAW,mBAAmB,kBAAkB;AAC/C,UAAM,uBAAmB,uBAAK,WAAW,iBAAiB,eAAe;AACzE,QAAI,KAAC,2BAAW,gBAAgB,GAAG;AAClC,cAAQ,MAAM,sCAAsC,gBAAgB;AACpE;AAAA,IACD;AAEA,UAAM,OAAO,kBAAkB,gBAAgB,MAAM,GAAG,EAAE,CAAC;AAE3D,UAAM,YAAQ,6BAAa,kBAAkB,MAAM;AACnD,UAAM,SAAS,MAAM,MAAM,0BAA0B;AAErD,qBAAiB,KAAK;AAAA,MACrB,KAAK;AAAA,MACL,cAAc;AAAA,MACd,UAAM,+BAAW,QAAQ,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK;AAAA,IACtD,CAAC;AAAA,EACF;AAEA,SAAO;AACR;","names":[]}
1
+ {"version":3,"sources":["../src/migrator.ts"],"sourcesContent":["import crypto from 'node:crypto';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nexport interface KitConfig {\n\tout: string;\n\tschema: string;\n}\n\nexport interface MigrationConfig {\n\tmigrationsFolder: string;\n\tmigrationsTable?: string;\n\tmigrationsSchema?: string;\n}\n\nexport interface MigrationMeta {\n\tsql: string[];\n\tfolderMillis: number;\n\thash: string;\n\tbps: boolean;\n}\n\nexport function readMigrationFiles(config: string | MigrationConfig): MigrationMeta[] {\n\tlet migrationFolderTo: string | undefined;\n\tif (typeof config === 'string') {\n\t\tconst configAsString = fs.readFileSync(path.resolve('.', config), 'utf8');\n\t\tconst jsonConfig = JSON.parse(configAsString) as KitConfig;\n\t\tmigrationFolderTo = jsonConfig.out;\n\t} else {\n\t\tmigrationFolderTo = config.migrationsFolder;\n\t}\n\n\tif (!migrationFolderTo) {\n\t\tthrow new Error('no migration folder defined');\n\t}\n\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tconst journalPath = `${migrationFolderTo}/meta/_journal.json`;\n\tif (!fs.existsSync(journalPath)) {\n\t\tthrow new Error(`Can't find meta/_journal.json file`);\n\t}\n\n\tconst journalAsString = fs.readFileSync(`${migrationFolderTo}/meta/_journal.json`).toString();\n\n\tconst journal = JSON.parse(journalAsString) as {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\n\tfor (const journalEntry of journal.entries) {\n\t\tconst migrationPath = `${migrationFolderTo}/${journalEntry.tag}.sql`;\n\n\t\ttry {\n\t\t\tconst query = fs.readFileSync(`${migrationFolderTo}/${journalEntry.tag}.sql`).toString();\n\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tbps: journalEntry.breakpoints,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: crypto.createHash('sha256').update(query).digest('hex'),\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`No file ${migrationPath} found in ${migrationFolderTo} folder`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAAmB;AACnB,qBAAe;AACf,uBAAiB;AAoBV,SAAS,mBAAmB,QAAmD;AACrF,MAAI;AACJ,MAAI,OAAO,WAAW,UAAU;AAC/B,UAAM,iBAAiB,eAAAA,QAAG,aAAa,iBAAAC,QAAK,QAAQ,KAAK,MAAM,GAAG,MAAM;AACxE,UAAM,aAAa,KAAK,MAAM,cAAc;AAC5C,wBAAoB,WAAW;AAAA,EAChC,OAAO;AACN,wBAAoB,OAAO;AAAA,EAC5B;AAEA,MAAI,CAAC,mBAAmB;AACvB,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC9C;AAEA,QAAM,mBAAoC,CAAC;AAE3C,QAAM,cAAc,GAAG,iBAAiB;AACxC,MAAI,CAAC,eAAAD,QAAG,WAAW,WAAW,GAAG;AAChC,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACrD;AAEA,QAAM,kBAAkB,eAAAA,QAAG,aAAa,GAAG,iBAAiB,qBAAqB,EAAE,SAAS;AAE5F,QAAM,UAAU,KAAK,MAAM,eAAe;AAI1C,aAAW,gBAAgB,QAAQ,SAAS;AAC3C,UAAM,gBAAgB,GAAG,iBAAiB,IAAI,aAAa,GAAG;AAE9D,QAAI;AACH,YAAM,QAAQ,eAAAA,QAAG,aAAa,GAAG,iBAAiB,IAAI,aAAa,GAAG,MAAM,EAAE,SAAS;AAEvF,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,KAAK,aAAa;AAAA,QAClB,cAAc,aAAa;AAAA,QAC3B,MAAM,mBAAAE,QAAO,WAAW,QAAQ,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK;AAAA,MAC7D,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,WAAW,aAAa,aAAa,iBAAiB,SAAS;AAAA,IAChF;AAAA,EACD;AAEA,SAAO;AACR;","names":["fs","path","crypto"]}
package/migrator.d.cts CHANGED
@@ -11,5 +11,6 @@ export interface MigrationMeta {
11
11
  sql: string[];
12
12
  folderMillis: number;
13
13
  hash: string;
14
+ bps: boolean;
14
15
  }
15
16
  export declare function readMigrationFiles(config: string | MigrationConfig): MigrationMeta[];
package/migrator.d.ts CHANGED
@@ -11,5 +11,6 @@ export interface MigrationMeta {
11
11
  sql: string[];
12
12
  folderMillis: number;
13
13
  hash: string;
14
+ bps: boolean;
14
15
  }
15
16
  export declare function readMigrationFiles(config: string | MigrationConfig): MigrationMeta[];
package/migrator.js CHANGED
@@ -1,45 +1,41 @@
1
- import { createHash } from "node:crypto";
2
- import { existsSync, lstatSync, readdirSync, readFileSync } from "node:fs";
3
- import { join, resolve } from "node:path";
4
- const timestampToMillis = (timestamp) => {
5
- const year = timestamp.slice(0, 4);
6
- const month = timestamp.slice(4, 6);
7
- const day = timestamp.slice(6, 8);
8
- const hr = timestamp.slice(8, 10);
9
- const min = timestamp.slice(10, 12);
10
- const sec = timestamp.slice(12, 14);
11
- const isoString = `${year}-${month}-${day}T${hr}:${min}:${sec}.000Z`;
12
- return +new Date(isoString);
13
- };
1
+ import crypto from "node:crypto";
2
+ import fs from "node:fs";
3
+ import path from "node:path";
14
4
  function readMigrationFiles(config) {
15
- let outFolder;
5
+ let migrationFolderTo;
16
6
  if (typeof config === "string") {
17
- const kitConfig = JSON.parse(
18
- readFileSync(resolve(".", config), "utf8")
19
- );
20
- outFolder = kitConfig.out;
7
+ const configAsString = fs.readFileSync(path.resolve(".", config), "utf8");
8
+ const jsonConfig = JSON.parse(configAsString);
9
+ migrationFolderTo = jsonConfig.out;
21
10
  } else {
22
- outFolder = config.migrationsFolder;
11
+ migrationFolderTo = config.migrationsFolder;
23
12
  }
24
- if (!outFolder) {
13
+ if (!migrationFolderTo) {
25
14
  throw new Error("no migration folder defined");
26
15
  }
27
16
  const migrationQueries = [];
28
- const migrationFolders = readdirSync(outFolder).filter((it) => lstatSync(join(outFolder, it)).isDirectory());
29
- for (const migrationFolder of migrationFolders) {
30
- const sqlMigrationPath = join(outFolder, migrationFolder, "migration.sql");
31
- if (!existsSync(sqlMigrationPath)) {
32
- console.error("SQL migration file does not exist:", sqlMigrationPath);
33
- continue;
17
+ const journalPath = `${migrationFolderTo}/meta/_journal.json`;
18
+ if (!fs.existsSync(journalPath)) {
19
+ throw new Error(`Can't find meta/_journal.json file`);
20
+ }
21
+ const journalAsString = fs.readFileSync(`${migrationFolderTo}/meta/_journal.json`).toString();
22
+ const journal = JSON.parse(journalAsString);
23
+ for (const journalEntry of journal.entries) {
24
+ const migrationPath = `${migrationFolderTo}/${journalEntry.tag}.sql`;
25
+ try {
26
+ const query = fs.readFileSync(`${migrationFolderTo}/${journalEntry.tag}.sql`).toString();
27
+ const result = query.split("--> statement-breakpoint").map((it) => {
28
+ return it;
29
+ });
30
+ migrationQueries.push({
31
+ sql: result,
32
+ bps: journalEntry.breakpoints,
33
+ folderMillis: journalEntry.when,
34
+ hash: crypto.createHash("sha256").update(query).digest("hex")
35
+ });
36
+ } catch {
37
+ throw new Error(`No file ${migrationPath} found in ${migrationFolderTo} folder`);
34
38
  }
35
- const when = timestampToMillis(migrationFolder.slice(0, 14));
36
- const query = readFileSync(sqlMigrationPath, "utf8");
37
- const result = query.split("--> statement-breakpoint");
38
- migrationQueries.push({
39
- sql: result,
40
- folderMillis: when,
41
- hash: createHash("sha256").update(query).digest("hex")
42
- });
43
39
  }
44
40
  return migrationQueries;
45
41
  }
package/migrator.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/migrator.ts"],"sourcesContent":["import { createHash } from 'node:crypto';\nimport { existsSync, lstatSync, readdirSync, readFileSync } from 'node:fs';\nimport { join, resolve } from 'node:path';\n\nexport interface KitConfig {\n\tout: string;\n\tschema: string;\n}\n\nexport interface MigrationConfig {\n\tmigrationsFolder: string;\n\tmigrationsTable?: string;\n\tmigrationsSchema?: string;\n}\n\nexport interface MigrationMeta {\n\tsql: string[];\n\tfolderMillis: number;\n\thash: string;\n}\n\nconst timestampToMillis = (timestamp: string) => {\n\tconst year = timestamp.slice(0, 4);\n\tconst month = timestamp.slice(4, 6);\n\tconst day = timestamp.slice(6, 8);\n\tconst hr = timestamp.slice(8, 10);\n\tconst min = timestamp.slice(10, 12);\n\tconst sec = timestamp.slice(12, 14);\n\tconst isoString = `${year}-${month}-${day}T${hr}:${min}:${sec}.000Z`;\n\treturn +new Date(isoString);\n};\n\nexport function readMigrationFiles(\n\tconfig: string | MigrationConfig,\n): MigrationMeta[] {\n\tlet outFolder: string | undefined;\n\tif (typeof config === 'string') {\n\t\tconst kitConfig = JSON.parse(\n\t\t\treadFileSync(resolve('.', config), 'utf8'),\n\t\t) as KitConfig;\n\t\toutFolder = kitConfig.out;\n\t} else {\n\t\toutFolder = config.migrationsFolder;\n\t}\n\n\tif (!outFolder) {\n\t\tthrow new Error('no migration folder defined');\n\t}\n\n\tconst migrationQueries: MigrationMeta[] = [];\n\tconst migrationFolders = readdirSync(outFolder).filter((it) => lstatSync(join(outFolder, it)).isDirectory());\n\tfor (const migrationFolder of migrationFolders) {\n\t\tconst sqlMigrationPath = join(outFolder, migrationFolder, 'migration.sql');\n\t\tif (!existsSync(sqlMigrationPath)) {\n\t\t\tconsole.error('SQL migration file does not exist:', sqlMigrationPath);\n\t\t\tcontinue;\n\t\t}\n\n\t\tconst when = timestampToMillis(migrationFolder.slice(0, 14));\n\n\t\tconst query = readFileSync(sqlMigrationPath, 'utf8');\n\t\tconst result = query.split('--> statement-breakpoint');\n\n\t\tmigrationQueries.push({\n\t\t\tsql: result,\n\t\t\tfolderMillis: when,\n\t\t\thash: createHash('sha256').update(query).digest('hex'),\n\t\t});\n\t}\n\n\treturn migrationQueries;\n}\n"],"mappings":"AAAA,SAAS,kBAAkB;AAC3B,SAAS,YAAY,WAAW,aAAa,oBAAoB;AACjE,SAAS,MAAM,eAAe;AAmB9B,MAAM,oBAAoB,CAAC,cAAsB;AAChD,QAAM,OAAO,UAAU,MAAM,GAAG,CAAC;AACjC,QAAM,QAAQ,UAAU,MAAM,GAAG,CAAC;AAClC,QAAM,MAAM,UAAU,MAAM,GAAG,CAAC;AAChC,QAAM,KAAK,UAAU,MAAM,GAAG,EAAE;AAChC,QAAM,MAAM,UAAU,MAAM,IAAI,EAAE;AAClC,QAAM,MAAM,UAAU,MAAM,IAAI,EAAE;AAClC,QAAM,YAAY,GAAG,IAAI,IAAI,KAAK,IAAI,GAAG,IAAI,EAAE,IAAI,GAAG,IAAI,GAAG;AAC7D,SAAO,CAAC,IAAI,KAAK,SAAS;AAC3B;AAEO,SAAS,mBACf,QACkB;AAClB,MAAI;AACJ,MAAI,OAAO,WAAW,UAAU;AAC/B,UAAM,YAAY,KAAK;AAAA,MACtB,aAAa,QAAQ,KAAK,MAAM,GAAG,MAAM;AAAA,IAC1C;AACA,gBAAY,UAAU;AAAA,EACvB,OAAO;AACN,gBAAY,OAAO;AAAA,EACpB;AAEA,MAAI,CAAC,WAAW;AACf,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC9C;AAEA,QAAM,mBAAoC,CAAC;AAC3C,QAAM,mBAAmB,YAAY,SAAS,EAAE,OAAO,CAAC,OAAO,UAAU,KAAK,WAAW,EAAE,CAAC,EAAE,YAAY,CAAC;AAC3G,aAAW,mBAAmB,kBAAkB;AAC/C,UAAM,mBAAmB,KAAK,WAAW,iBAAiB,eAAe;AACzE,QAAI,CAAC,WAAW,gBAAgB,GAAG;AAClC,cAAQ,MAAM,sCAAsC,gBAAgB;AACpE;AAAA,IACD;AAEA,UAAM,OAAO,kBAAkB,gBAAgB,MAAM,GAAG,EAAE,CAAC;AAE3D,UAAM,QAAQ,aAAa,kBAAkB,MAAM;AACnD,UAAM,SAAS,MAAM,MAAM,0BAA0B;AAErD,qBAAiB,KAAK;AAAA,MACrB,KAAK;AAAA,MACL,cAAc;AAAA,MACd,MAAM,WAAW,QAAQ,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK;AAAA,IACtD,CAAC;AAAA,EACF;AAEA,SAAO;AACR;","names":[]}
1
+ {"version":3,"sources":["../src/migrator.ts"],"sourcesContent":["import crypto from 'node:crypto';\nimport fs from 'node:fs';\nimport path from 'node:path';\n\nexport interface KitConfig {\n\tout: string;\n\tschema: string;\n}\n\nexport interface MigrationConfig {\n\tmigrationsFolder: string;\n\tmigrationsTable?: string;\n\tmigrationsSchema?: string;\n}\n\nexport interface MigrationMeta {\n\tsql: string[];\n\tfolderMillis: number;\n\thash: string;\n\tbps: boolean;\n}\n\nexport function readMigrationFiles(config: string | MigrationConfig): MigrationMeta[] {\n\tlet migrationFolderTo: string | undefined;\n\tif (typeof config === 'string') {\n\t\tconst configAsString = fs.readFileSync(path.resolve('.', config), 'utf8');\n\t\tconst jsonConfig = JSON.parse(configAsString) as KitConfig;\n\t\tmigrationFolderTo = jsonConfig.out;\n\t} else {\n\t\tmigrationFolderTo = config.migrationsFolder;\n\t}\n\n\tif (!migrationFolderTo) {\n\t\tthrow new Error('no migration folder defined');\n\t}\n\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tconst journalPath = `${migrationFolderTo}/meta/_journal.json`;\n\tif (!fs.existsSync(journalPath)) {\n\t\tthrow new Error(`Can't find meta/_journal.json file`);\n\t}\n\n\tconst journalAsString = fs.readFileSync(`${migrationFolderTo}/meta/_journal.json`).toString();\n\n\tconst journal = JSON.parse(journalAsString) as {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\n\tfor (const journalEntry of journal.entries) {\n\t\tconst migrationPath = `${migrationFolderTo}/${journalEntry.tag}.sql`;\n\n\t\ttry {\n\t\t\tconst query = fs.readFileSync(`${migrationFolderTo}/${journalEntry.tag}.sql`).toString();\n\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tbps: journalEntry.breakpoints,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: crypto.createHash('sha256').update(query).digest('hex'),\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`No file ${migrationPath} found in ${migrationFolderTo} folder`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n"],"mappings":"AAAA,OAAO,YAAY;AACnB,OAAO,QAAQ;AACf,OAAO,UAAU;AAoBV,SAAS,mBAAmB,QAAmD;AACrF,MAAI;AACJ,MAAI,OAAO,WAAW,UAAU;AAC/B,UAAM,iBAAiB,GAAG,aAAa,KAAK,QAAQ,KAAK,MAAM,GAAG,MAAM;AACxE,UAAM,aAAa,KAAK,MAAM,cAAc;AAC5C,wBAAoB,WAAW;AAAA,EAChC,OAAO;AACN,wBAAoB,OAAO;AAAA,EAC5B;AAEA,MAAI,CAAC,mBAAmB;AACvB,UAAM,IAAI,MAAM,6BAA6B;AAAA,EAC9C;AAEA,QAAM,mBAAoC,CAAC;AAE3C,QAAM,cAAc,GAAG,iBAAiB;AACxC,MAAI,CAAC,GAAG,WAAW,WAAW,GAAG;AAChC,UAAM,IAAI,MAAM,oCAAoC;AAAA,EACrD;AAEA,QAAM,kBAAkB,GAAG,aAAa,GAAG,iBAAiB,qBAAqB,EAAE,SAAS;AAE5F,QAAM,UAAU,KAAK,MAAM,eAAe;AAI1C,aAAW,gBAAgB,QAAQ,SAAS;AAC3C,UAAM,gBAAgB,GAAG,iBAAiB,IAAI,aAAa,GAAG;AAE9D,QAAI;AACH,YAAM,QAAQ,GAAG,aAAa,GAAG,iBAAiB,IAAI,aAAa,GAAG,MAAM,EAAE,SAAS;AAEvF,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,KAAK,aAAa;AAAA,QAClB,cAAc,aAAa;AAAA,QAC3B,MAAM,OAAO,WAAW,QAAQ,EAAE,OAAO,KAAK,EAAE,OAAO,KAAK;AAAA,MAC7D,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,WAAW,aAAa,aAAa,iBAAiB,SAAS;AAAA,IAChF;AAAA,EACD;AAEA,SAAO;AACR;","names":[]}
@@ -26,10 +26,9 @@ var import_react = require("react");
26
26
  async function readMigrationFiles({ journal, migrations }) {
27
27
  const migrationQueries = [];
28
28
  for await (const journalEntry of journal.entries) {
29
- const name = `m${journalEntry.idx.toString().padStart(4, "0")}`;
30
- const query = migrations[name];
29
+ const query = migrations[`m${journalEntry.idx.toString().padStart(4, "0")}`];
31
30
  if (!query) {
32
- throw new Error(`Missing migration: ${name}`);
31
+ throw new Error(`Missing migration: ${journalEntry.tag}`);
33
32
  }
34
33
  try {
35
34
  const result = query.split("--> statement-breakpoint").map((it) => {
@@ -37,11 +36,12 @@ async function readMigrationFiles({ journal, migrations }) {
37
36
  });
38
37
  migrationQueries.push({
39
38
  sql: result,
39
+ bps: journalEntry.breakpoints,
40
40
  folderMillis: journalEntry.when,
41
41
  hash: ""
42
42
  });
43
43
  } catch {
44
- throw new Error(`Failed to parse migration: ${name}`);
44
+ throw new Error(`Failed to parse migration: ${journalEntry.tag}`);
45
45
  }
46
46
  }
47
47
  return migrationQueries;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/op-sqlite/migrator.ts"],"sourcesContent":["import { useEffect, useReducer } from 'react';\nimport type { MigrationMeta } from '~/migrator.ts';\nimport type { OPSQLiteDatabase } from './driver.ts';\n\ninterface MigrationConfig {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}\n\nasync function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise<MigrationMeta[]> {\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tfor await (const journalEntry of journal.entries) {\n\t\tconst name = `m${journalEntry.idx.toString().padStart(4, '0')}`;\n\t\tconst query = migrations[name];\n\n\t\tif (!query) {\n\t\t\tthrow new Error(`Missing migration: ${name}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: '',\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`Failed to parse migration: ${name}`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: OPSQLiteDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = await readMigrationFiles(config);\n\treturn db.dialect.migrate(migrations, db.session);\n}\n\ninterface State {\n\tsuccess: boolean;\n\terror?: Error;\n}\n\ntype Action =\n\t| { type: 'migrating' }\n\t| { type: 'migrated'; payload: true }\n\t| { type: 'error'; payload: Error };\n\nexport const useMigrations = (db: OPSQLiteDatabase<any>, migrations: {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}): State => {\n\tconst initialState: State = {\n\t\tsuccess: false,\n\t\terror: undefined,\n\t};\n\n\tconst fetchReducer = (state: State, action: Action): State => {\n\t\tswitch (action.type) {\n\t\t\tcase 'migrating': {\n\t\t\t\treturn { ...initialState };\n\t\t\t}\n\t\t\tcase 'migrated': {\n\t\t\t\treturn { ...initialState, success: action.payload };\n\t\t\t}\n\t\t\tcase 'error': {\n\t\t\t\treturn { ...initialState, error: action.payload };\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\treturn state;\n\t\t\t}\n\t\t}\n\t};\n\n\tconst [state, dispatch] = useReducer(fetchReducer, initialState);\n\n\tuseEffect(() => {\n\t\tdispatch({ type: 'migrating' });\n\t\tmigrate(db, migrations).then(() => {\n\t\t\tdispatch({ type: 'migrated', payload: true });\n\t\t}).catch((error) => {\n\t\t\tdispatch({ type: 'error', payload: error as Error });\n\t\t});\n\t}, []);\n\n\treturn state;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAsC;AAWtC,eAAe,mBAAmB,EAAE,SAAS,WAAW,GAA8C;AACrG,QAAM,mBAAoC,CAAC;AAE3C,mBAAiB,gBAAgB,QAAQ,SAAS;AACjD,UAAM,OAAO,IAAI,aAAa,IAAI,SAAS,EAAE,SAAS,GAAG,GAAG,CAAC;AAC7D,UAAM,QAAQ,WAAW,IAAI;AAE7B,QAAI,CAAC,OAAO;AACX,YAAM,IAAI,MAAM,sBAAsB,IAAI,EAAE;AAAA,IAC7C;AAEA,QAAI;AACH,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,cAAc,aAAa;AAAA,QAC3B,MAAM;AAAA,MACP,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,8BAA8B,IAAI,EAAE;AAAA,IACrD;AAAA,EACD;AAEA,SAAO;AACR;AAEA,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,MAAM,mBAAmB,MAAM;AAClD,SAAO,GAAG,QAAQ,QAAQ,YAAY,GAAG,OAAO;AACjD;AAYO,MAAM,gBAAgB,CAAC,IAA2B,eAK5C;AACZ,QAAM,eAAsB;AAAA,IAC3B,SAAS;AAAA,IACT,OAAO;AAAA,EACR;AAEA,QAAM,eAAe,CAACA,QAAc,WAA0B;AAC7D,YAAQ,OAAO,MAAM;AAAA,MACpB,KAAK,aAAa;AACjB,eAAO,EAAE,GAAG,aAAa;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY;AAChB,eAAO,EAAE,GAAG,cAAc,SAAS,OAAO,QAAQ;AAAA,MACnD;AAAA,MACA,KAAK,SAAS;AACb,eAAO,EAAE,GAAG,cAAc,OAAO,OAAO,QAAQ;AAAA,MACjD;AAAA,MACA,SAAS;AACR,eAAOA;AAAA,MACR;AAAA,IACD;AAAA,EACD;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,yBAAW,cAAc,YAAY;AAE/D,8BAAU,MAAM;AACf,aAAS,EAAE,MAAM,YAAY,CAAC;AAC9B,YAAQ,IAAI,UAAU,EAAE,KAAK,MAAM;AAClC,eAAS,EAAE,MAAM,YAAY,SAAS,KAAK,CAAC;AAAA,IAC7C,CAAC,EAAE,MAAM,CAAC,UAAU;AACnB,eAAS,EAAE,MAAM,SAAS,SAAS,MAAe,CAAC;AAAA,IACpD,CAAC;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,SAAO;AACR;","names":["state"]}
1
+ {"version":3,"sources":["../../src/op-sqlite/migrator.ts"],"sourcesContent":["import { useEffect, useReducer } from 'react';\nimport type { MigrationMeta } from '~/migrator.ts';\nimport type { OPSQLiteDatabase } from './driver.ts';\n\ninterface MigrationConfig {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}\n\nasync function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise<MigrationMeta[]> {\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tfor await (const journalEntry of journal.entries) {\n\t\tconst query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`];\n\n\t\tif (!query) {\n\t\t\tthrow new Error(`Missing migration: ${journalEntry.tag}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tbps: journalEntry.breakpoints,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: '',\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`Failed to parse migration: ${journalEntry.tag}`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: OPSQLiteDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = await readMigrationFiles(config);\n\treturn db.dialect.migrate(migrations, db.session);\n}\n\ninterface State {\n\tsuccess: boolean;\n\terror?: Error;\n}\n\ntype Action =\n\t| { type: 'migrating' }\n\t| { type: 'migrated'; payload: true }\n\t| { type: 'error'; payload: Error };\n\nexport const useMigrations = (db: OPSQLiteDatabase<any>, migrations: {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}): State => {\n\tconst initialState: State = {\n\t\tsuccess: false,\n\t\terror: undefined,\n\t};\n\n\tconst fetchReducer = (state: State, action: Action): State => {\n\t\tswitch (action.type) {\n\t\t\tcase 'migrating': {\n\t\t\t\treturn { ...initialState };\n\t\t\t}\n\t\t\tcase 'migrated': {\n\t\t\t\treturn { ...initialState, success: action.payload };\n\t\t\t}\n\t\t\tcase 'error': {\n\t\t\t\treturn { ...initialState, error: action.payload };\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\treturn state;\n\t\t\t}\n\t\t}\n\t};\n\n\tconst [state, dispatch] = useReducer(fetchReducer, initialState);\n\n\tuseEffect(() => {\n\t\tdispatch({ type: 'migrating' });\n\t\tmigrate(db, migrations).then(() => {\n\t\t\tdispatch({ type: 'migrated', payload: true });\n\t\t}).catch((error) => {\n\t\t\tdispatch({ type: 'error', payload: error as Error });\n\t\t});\n\t}, []);\n\n\treturn state;\n};\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAAsC;AAWtC,eAAe,mBAAmB,EAAE,SAAS,WAAW,GAA8C;AACrG,QAAM,mBAAoC,CAAC;AAE3C,mBAAiB,gBAAgB,QAAQ,SAAS;AACjD,UAAM,QAAQ,WAAW,IAAI,aAAa,IAAI,SAAS,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE;AAE3E,QAAI,CAAC,OAAO;AACX,YAAM,IAAI,MAAM,sBAAsB,aAAa,GAAG,EAAE;AAAA,IACzD;AAEA,QAAI;AACH,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,KAAK,aAAa;AAAA,QAClB,cAAc,aAAa;AAAA,QAC3B,MAAM;AAAA,MACP,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,8BAA8B,aAAa,GAAG,EAAE;AAAA,IACjE;AAAA,EACD;AAEA,SAAO;AACR;AAEA,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,MAAM,mBAAmB,MAAM;AAClD,SAAO,GAAG,QAAQ,QAAQ,YAAY,GAAG,OAAO;AACjD;AAYO,MAAM,gBAAgB,CAAC,IAA2B,eAK5C;AACZ,QAAM,eAAsB;AAAA,IAC3B,SAAS;AAAA,IACT,OAAO;AAAA,EACR;AAEA,QAAM,eAAe,CAACA,QAAc,WAA0B;AAC7D,YAAQ,OAAO,MAAM;AAAA,MACpB,KAAK,aAAa;AACjB,eAAO,EAAE,GAAG,aAAa;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY;AAChB,eAAO,EAAE,GAAG,cAAc,SAAS,OAAO,QAAQ;AAAA,MACnD;AAAA,MACA,KAAK,SAAS;AACb,eAAO,EAAE,GAAG,cAAc,OAAO,OAAO,QAAQ;AAAA,MACjD;AAAA,MACA,SAAS;AACR,eAAOA;AAAA,MACR;AAAA,IACD;AAAA,EACD;AAEA,QAAM,CAAC,OAAO,QAAQ,QAAI,yBAAW,cAAc,YAAY;AAE/D,8BAAU,MAAM;AACf,aAAS,EAAE,MAAM,YAAY,CAAC;AAC9B,YAAQ,IAAI,UAAU,EAAE,KAAK,MAAM;AAClC,eAAS,EAAE,MAAM,YAAY,SAAS,KAAK,CAAC;AAAA,IAC7C,CAAC,EAAE,MAAM,CAAC,UAAU;AACnB,eAAS,EAAE,MAAM,SAAS,SAAS,MAAe,CAAC;AAAA,IACpD,CAAC;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,SAAO;AACR;","names":["state"]}
@@ -2,10 +2,9 @@ import { useEffect, useReducer } from "react";
2
2
  async function readMigrationFiles({ journal, migrations }) {
3
3
  const migrationQueries = [];
4
4
  for await (const journalEntry of journal.entries) {
5
- const name = `m${journalEntry.idx.toString().padStart(4, "0")}`;
6
- const query = migrations[name];
5
+ const query = migrations[`m${journalEntry.idx.toString().padStart(4, "0")}`];
7
6
  if (!query) {
8
- throw new Error(`Missing migration: ${name}`);
7
+ throw new Error(`Missing migration: ${journalEntry.tag}`);
9
8
  }
10
9
  try {
11
10
  const result = query.split("--> statement-breakpoint").map((it) => {
@@ -13,11 +12,12 @@ async function readMigrationFiles({ journal, migrations }) {
13
12
  });
14
13
  migrationQueries.push({
15
14
  sql: result,
15
+ bps: journalEntry.breakpoints,
16
16
  folderMillis: journalEntry.when,
17
17
  hash: ""
18
18
  });
19
19
  } catch {
20
- throw new Error(`Failed to parse migration: ${name}`);
20
+ throw new Error(`Failed to parse migration: ${journalEntry.tag}`);
21
21
  }
22
22
  }
23
23
  return migrationQueries;
@@ -1 +1 @@
1
- {"version":3,"sources":["../../src/op-sqlite/migrator.ts"],"sourcesContent":["import { useEffect, useReducer } from 'react';\nimport type { MigrationMeta } from '~/migrator.ts';\nimport type { OPSQLiteDatabase } from './driver.ts';\n\ninterface MigrationConfig {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}\n\nasync function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise<MigrationMeta[]> {\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tfor await (const journalEntry of journal.entries) {\n\t\tconst name = `m${journalEntry.idx.toString().padStart(4, '0')}`;\n\t\tconst query = migrations[name];\n\n\t\tif (!query) {\n\t\t\tthrow new Error(`Missing migration: ${name}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: '',\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`Failed to parse migration: ${name}`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: OPSQLiteDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = await readMigrationFiles(config);\n\treturn db.dialect.migrate(migrations, db.session);\n}\n\ninterface State {\n\tsuccess: boolean;\n\terror?: Error;\n}\n\ntype Action =\n\t| { type: 'migrating' }\n\t| { type: 'migrated'; payload: true }\n\t| { type: 'error'; payload: Error };\n\nexport const useMigrations = (db: OPSQLiteDatabase<any>, migrations: {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}): State => {\n\tconst initialState: State = {\n\t\tsuccess: false,\n\t\terror: undefined,\n\t};\n\n\tconst fetchReducer = (state: State, action: Action): State => {\n\t\tswitch (action.type) {\n\t\t\tcase 'migrating': {\n\t\t\t\treturn { ...initialState };\n\t\t\t}\n\t\t\tcase 'migrated': {\n\t\t\t\treturn { ...initialState, success: action.payload };\n\t\t\t}\n\t\t\tcase 'error': {\n\t\t\t\treturn { ...initialState, error: action.payload };\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\treturn state;\n\t\t\t}\n\t\t}\n\t};\n\n\tconst [state, dispatch] = useReducer(fetchReducer, initialState);\n\n\tuseEffect(() => {\n\t\tdispatch({ type: 'migrating' });\n\t\tmigrate(db, migrations).then(() => {\n\t\t\tdispatch({ type: 'migrated', payload: true });\n\t\t}).catch((error) => {\n\t\t\tdispatch({ type: 'error', payload: error as Error });\n\t\t});\n\t}, []);\n\n\treturn state;\n};\n"],"mappings":"AAAA,SAAS,WAAW,kBAAkB;AAWtC,eAAe,mBAAmB,EAAE,SAAS,WAAW,GAA8C;AACrG,QAAM,mBAAoC,CAAC;AAE3C,mBAAiB,gBAAgB,QAAQ,SAAS;AACjD,UAAM,OAAO,IAAI,aAAa,IAAI,SAAS,EAAE,SAAS,GAAG,GAAG,CAAC;AAC7D,UAAM,QAAQ,WAAW,IAAI;AAE7B,QAAI,CAAC,OAAO;AACX,YAAM,IAAI,MAAM,sBAAsB,IAAI,EAAE;AAAA,IAC7C;AAEA,QAAI;AACH,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,cAAc,aAAa;AAAA,QAC3B,MAAM;AAAA,MACP,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,8BAA8B,IAAI,EAAE;AAAA,IACrD;AAAA,EACD;AAEA,SAAO;AACR;AAEA,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,MAAM,mBAAmB,MAAM;AAClD,SAAO,GAAG,QAAQ,QAAQ,YAAY,GAAG,OAAO;AACjD;AAYO,MAAM,gBAAgB,CAAC,IAA2B,eAK5C;AACZ,QAAM,eAAsB;AAAA,IAC3B,SAAS;AAAA,IACT,OAAO;AAAA,EACR;AAEA,QAAM,eAAe,CAACA,QAAc,WAA0B;AAC7D,YAAQ,OAAO,MAAM;AAAA,MACpB,KAAK,aAAa;AACjB,eAAO,EAAE,GAAG,aAAa;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY;AAChB,eAAO,EAAE,GAAG,cAAc,SAAS,OAAO,QAAQ;AAAA,MACnD;AAAA,MACA,KAAK,SAAS;AACb,eAAO,EAAE,GAAG,cAAc,OAAO,OAAO,QAAQ;AAAA,MACjD;AAAA,MACA,SAAS;AACR,eAAOA;AAAA,MACR;AAAA,IACD;AAAA,EACD;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAI,WAAW,cAAc,YAAY;AAE/D,YAAU,MAAM;AACf,aAAS,EAAE,MAAM,YAAY,CAAC;AAC9B,YAAQ,IAAI,UAAU,EAAE,KAAK,MAAM;AAClC,eAAS,EAAE,MAAM,YAAY,SAAS,KAAK,CAAC;AAAA,IAC7C,CAAC,EAAE,MAAM,CAAC,UAAU;AACnB,eAAS,EAAE,MAAM,SAAS,SAAS,MAAe,CAAC;AAAA,IACpD,CAAC;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,SAAO;AACR;","names":["state"]}
1
+ {"version":3,"sources":["../../src/op-sqlite/migrator.ts"],"sourcesContent":["import { useEffect, useReducer } from 'react';\nimport type { MigrationMeta } from '~/migrator.ts';\nimport type { OPSQLiteDatabase } from './driver.ts';\n\ninterface MigrationConfig {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}\n\nasync function readMigrationFiles({ journal, migrations }: MigrationConfig): Promise<MigrationMeta[]> {\n\tconst migrationQueries: MigrationMeta[] = [];\n\n\tfor await (const journalEntry of journal.entries) {\n\t\tconst query = migrations[`m${journalEntry.idx.toString().padStart(4, '0')}`];\n\n\t\tif (!query) {\n\t\t\tthrow new Error(`Missing migration: ${journalEntry.tag}`);\n\t\t}\n\n\t\ttry {\n\t\t\tconst result = query.split('--> statement-breakpoint').map((it) => {\n\t\t\t\treturn it;\n\t\t\t});\n\n\t\t\tmigrationQueries.push({\n\t\t\t\tsql: result,\n\t\t\t\tbps: journalEntry.breakpoints,\n\t\t\t\tfolderMillis: journalEntry.when,\n\t\t\t\thash: '',\n\t\t\t});\n\t\t} catch {\n\t\t\tthrow new Error(`Failed to parse migration: ${journalEntry.tag}`);\n\t\t}\n\t}\n\n\treturn migrationQueries;\n}\n\nexport async function migrate<TSchema extends Record<string, unknown>>(\n\tdb: OPSQLiteDatabase<TSchema>,\n\tconfig: MigrationConfig,\n) {\n\tconst migrations = await readMigrationFiles(config);\n\treturn db.dialect.migrate(migrations, db.session);\n}\n\ninterface State {\n\tsuccess: boolean;\n\terror?: Error;\n}\n\ntype Action =\n\t| { type: 'migrating' }\n\t| { type: 'migrated'; payload: true }\n\t| { type: 'error'; payload: Error };\n\nexport const useMigrations = (db: OPSQLiteDatabase<any>, migrations: {\n\tjournal: {\n\t\tentries: { idx: number; when: number; tag: string; breakpoints: boolean }[];\n\t};\n\tmigrations: Record<string, string>;\n}): State => {\n\tconst initialState: State = {\n\t\tsuccess: false,\n\t\terror: undefined,\n\t};\n\n\tconst fetchReducer = (state: State, action: Action): State => {\n\t\tswitch (action.type) {\n\t\t\tcase 'migrating': {\n\t\t\t\treturn { ...initialState };\n\t\t\t}\n\t\t\tcase 'migrated': {\n\t\t\t\treturn { ...initialState, success: action.payload };\n\t\t\t}\n\t\t\tcase 'error': {\n\t\t\t\treturn { ...initialState, error: action.payload };\n\t\t\t}\n\t\t\tdefault: {\n\t\t\t\treturn state;\n\t\t\t}\n\t\t}\n\t};\n\n\tconst [state, dispatch] = useReducer(fetchReducer, initialState);\n\n\tuseEffect(() => {\n\t\tdispatch({ type: 'migrating' });\n\t\tmigrate(db, migrations).then(() => {\n\t\t\tdispatch({ type: 'migrated', payload: true });\n\t\t}).catch((error) => {\n\t\t\tdispatch({ type: 'error', payload: error as Error });\n\t\t});\n\t}, []);\n\n\treturn state;\n};\n"],"mappings":"AAAA,SAAS,WAAW,kBAAkB;AAWtC,eAAe,mBAAmB,EAAE,SAAS,WAAW,GAA8C;AACrG,QAAM,mBAAoC,CAAC;AAE3C,mBAAiB,gBAAgB,QAAQ,SAAS;AACjD,UAAM,QAAQ,WAAW,IAAI,aAAa,IAAI,SAAS,EAAE,SAAS,GAAG,GAAG,CAAC,EAAE;AAE3E,QAAI,CAAC,OAAO;AACX,YAAM,IAAI,MAAM,sBAAsB,aAAa,GAAG,EAAE;AAAA,IACzD;AAEA,QAAI;AACH,YAAM,SAAS,MAAM,MAAM,0BAA0B,EAAE,IAAI,CAAC,OAAO;AAClE,eAAO;AAAA,MACR,CAAC;AAED,uBAAiB,KAAK;AAAA,QACrB,KAAK;AAAA,QACL,KAAK,aAAa;AAAA,QAClB,cAAc,aAAa;AAAA,QAC3B,MAAM;AAAA,MACP,CAAC;AAAA,IACF,QAAQ;AACP,YAAM,IAAI,MAAM,8BAA8B,aAAa,GAAG,EAAE;AAAA,IACjE;AAAA,EACD;AAEA,SAAO;AACR;AAEA,eAAsB,QACrB,IACA,QACC;AACD,QAAM,aAAa,MAAM,mBAAmB,MAAM;AAClD,SAAO,GAAG,QAAQ,QAAQ,YAAY,GAAG,OAAO;AACjD;AAYO,MAAM,gBAAgB,CAAC,IAA2B,eAK5C;AACZ,QAAM,eAAsB;AAAA,IAC3B,SAAS;AAAA,IACT,OAAO;AAAA,EACR;AAEA,QAAM,eAAe,CAACA,QAAc,WAA0B;AAC7D,YAAQ,OAAO,MAAM;AAAA,MACpB,KAAK,aAAa;AACjB,eAAO,EAAE,GAAG,aAAa;AAAA,MAC1B;AAAA,MACA,KAAK,YAAY;AAChB,eAAO,EAAE,GAAG,cAAc,SAAS,OAAO,QAAQ;AAAA,MACnD;AAAA,MACA,KAAK,SAAS;AACb,eAAO,EAAE,GAAG,cAAc,OAAO,OAAO,QAAQ;AAAA,MACjD;AAAA,MACA,SAAS;AACR,eAAOA;AAAA,MACR;AAAA,IACD;AAAA,EACD;AAEA,QAAM,CAAC,OAAO,QAAQ,IAAI,WAAW,cAAc,YAAY;AAE/D,YAAU,MAAM;AACf,aAAS,EAAE,MAAM,YAAY,CAAC;AAC9B,YAAQ,IAAI,UAAU,EAAE,KAAK,MAAM;AAClC,eAAS,EAAE,MAAM,YAAY,SAAS,KAAK,CAAC;AAAA,IAC7C,CAAC,EAAE,MAAM,CAAC,UAAU;AACnB,eAAS,EAAE,MAAM,SAAS,SAAS,MAAe,CAAC;AAAA,IACpD,CAAC;AAAA,EACF,GAAG,CAAC,CAAC;AAEL,SAAO;AACR;","names":["state"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "drizzle-orm",
3
- "version": "0.33.0-b921e79",
3
+ "version": "0.33.0-c5d1196",
4
4
  "description": "Drizzle ORM package for SQL databases",
5
5
  "type": "module",
6
6
  "scripts": {
@@ -46,7 +46,7 @@
46
46
  "@aws-sdk/client-rds-data": ">=3",
47
47
  "@cloudflare/workers-types": ">=3",
48
48
  "@electric-sql/pglite": ">=0.1.1",
49
- "@libsql/client": "*",
49
+ "@libsql/client": ">=0.10.0",
50
50
  "@neondatabase/serverless": ">=0.1",
51
51
  "@op-engineering/op-sqlite": ">=2",
52
52
  "@opentelemetry/api": "^1.4.1",
@@ -161,7 +161,7 @@
161
161
  "@aws-sdk/client-rds-data": "^3.549.0",
162
162
  "@cloudflare/workers-types": "^4.20230904.0",
163
163
  "@electric-sql/pglite": "^0.1.1",
164
- "@libsql/client": "^0.5.6",
164
+ "@libsql/client": "^0.10.0",
165
165
  "@neondatabase/serverless": "^0.9.0",
166
166
  "@op-engineering/op-sqlite": "^2.0.16",
167
167
  "@opentelemetry/api": "^1.4.1",
package/version.cjs CHANGED
@@ -26,7 +26,7 @@ __export(version_exports, {
26
26
  module.exports = __toCommonJS(version_exports);
27
27
 
28
28
  // package.json
29
- var version = "0.33.0-b921e79";
29
+ var version = "0.33.0-c5d1196";
30
30
 
31
31
  // src/version.ts
32
32
  var compatibilityVersion = 8;
package/version.cjs.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/version.ts"],"sourcesContent":["// @ts-ignore - imported using Rollup json plugin\nexport { version as npmVersion } from '../package.json';\n\n// In version 7, we changed the PostgreSQL indexes API\n// v8 - new migrations folders structure\nexport const compatibilityVersion = 8;\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,qBAAsC;AAI/B,MAAM,uBAAuB;","names":[]}
1
+ {"version":3,"sources":["../src/version.ts"],"sourcesContent":["// @ts-ignore - imported using Rollup json plugin\nexport { version as npmVersion } from '../package.json';\n// In version 7, we changed the PostgreSQL indexes API\nexport const compatibilityVersion = 8;\n"],"mappings":";;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AACA,qBAAsC;AAE/B,MAAM,uBAAuB;","names":[]}
package/version.d.cts CHANGED
@@ -1,4 +1,4 @@
1
- var version = "0.33.0-b921e79";
1
+ var version = "0.33.0-c5d1196";
2
2
 
3
3
  declare const compatibilityVersion = 8;
4
4
 
package/version.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- var version = "0.33.0-b921e79";
1
+ var version = "0.33.0-c5d1196";
2
2
 
3
3
  declare const compatibilityVersion = 8;
4
4
 
package/version.js CHANGED
@@ -1,5 +1,5 @@
1
1
  // package.json
2
- var version = "0.33.0-b921e79";
2
+ var version = "0.33.0-c5d1196";
3
3
 
4
4
  // src/version.ts
5
5
  var compatibilityVersion = 8;
package/version.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/version.ts"],"sourcesContent":["// @ts-ignore - imported using Rollup json plugin\nexport { version as npmVersion } from '../package.json';\n\n// In version 7, we changed the PostgreSQL indexes API\n// v8 - new migrations folders structure\nexport const compatibilityVersion = 8;\n"],"mappings":"AACA,SAAoB,eAAkB;AAI/B,MAAM,uBAAuB;","names":[]}
1
+ {"version":3,"sources":["../src/version.ts"],"sourcesContent":["// @ts-ignore - imported using Rollup json plugin\nexport { version as npmVersion } from '../package.json';\n// In version 7, we changed the PostgreSQL indexes API\nexport const compatibilityVersion = 8;\n"],"mappings":"AACA,SAAoB,eAAkB;AAE/B,MAAM,uBAAuB;","names":[]}