@leonardovida-md/drizzle-neo-duckdb 1.0.1 → 1.0.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -747,12 +747,13 @@ import { sql as sql4 } from "drizzle-orm";
747
747
  var SYSTEM_SCHEMAS = new Set(["information_schema", "pg_catalog"]);
748
748
  var DEFAULT_IMPORT_BASE = "@leonardovida-md/drizzle-neo-duckdb";
749
749
  async function introspect(db, opts = {}) {
750
- const schemas = await resolveSchemas(db, opts.schemas);
750
+ const database = await resolveDatabase(db, opts.database, opts.allDatabases);
751
+ const schemas = await resolveSchemas(db, database, opts.schemas);
751
752
  const includeViews = opts.includeViews ?? false;
752
- const tables = await loadTables(db, schemas, includeViews);
753
- const columns = await loadColumns(db, schemas);
754
- const constraints = await loadConstraints(db, schemas);
755
- const indexes = await loadIndexes(db, schemas);
753
+ const tables = await loadTables(db, database, schemas, includeViews);
754
+ const columns = await loadColumns(db, database, schemas);
755
+ const constraints = await loadConstraints(db, database, schemas);
756
+ const indexes = await loadIndexes(db, database, schemas);
756
757
  const grouped = buildTables(tables, columns, constraints, indexes);
757
758
  const schemaTs = emitSchema(grouped, {
758
759
  useCustomTimeTypes: opts.useCustomTimeTypes ?? true,
@@ -766,27 +767,41 @@ async function introspect(db, opts = {}) {
766
767
  }
767
768
  };
768
769
  }
769
- async function resolveSchemas(db, targetSchemas) {
770
+ async function resolveDatabase(db, targetDatabase, allDatabases) {
771
+ if (allDatabases) {
772
+ return null;
773
+ }
774
+ if (targetDatabase) {
775
+ return targetDatabase;
776
+ }
777
+ const rows = await db.execute(sql4`SELECT current_database() as current_database`);
778
+ return rows[0]?.current_database ?? null;
779
+ }
780
+ async function resolveSchemas(db, database, targetSchemas) {
770
781
  if (targetSchemas?.length) {
771
782
  return targetSchemas;
772
783
  }
773
- const rows = await db.execute(sql4`select schema_name from information_schema.schemata`);
784
+ const databaseFilter = database ? sql4`catalog_name = ${database}` : sql4`1 = 1`;
785
+ const rows = await db.execute(sql4`SELECT schema_name FROM information_schema.schemata WHERE ${databaseFilter}`);
774
786
  return rows.map((row) => row.schema_name).filter((name) => !SYSTEM_SCHEMAS.has(name));
775
787
  }
776
- async function loadTables(db, schemas, includeViews) {
788
+ async function loadTables(db, database, schemas, includeViews) {
777
789
  const schemaFragments = schemas.map((schema) => sql4`${schema}`);
790
+ const databaseFilter = database ? sql4`table_catalog = ${database}` : sql4`1 = 1`;
778
791
  return await db.execute(sql4`
779
- select table_schema as schema_name, table_name, table_type
780
- from information_schema.tables
781
- where table_schema in (${sql4.join(schemaFragments, sql4.raw(", "))})
782
- and ${includeViews ? sql4`1 = 1` : sql4`table_type = 'BASE TABLE'`}
783
- order by table_schema, table_name
792
+ SELECT table_schema as schema_name, table_name, table_type
793
+ FROM information_schema.tables
794
+ WHERE ${databaseFilter}
795
+ AND table_schema IN (${sql4.join(schemaFragments, sql4.raw(", "))})
796
+ AND ${includeViews ? sql4`1 = 1` : sql4`table_type = 'BASE TABLE'`}
797
+ ORDER BY table_schema, table_name
784
798
  `);
785
799
  }
786
- async function loadColumns(db, schemas) {
800
+ async function loadColumns(db, database, schemas) {
787
801
  const schemaFragments = schemas.map((schema) => sql4`${schema}`);
802
+ const databaseFilter = database ? sql4`database_name = ${database}` : sql4`1 = 1`;
788
803
  return await db.execute(sql4`
789
- select
804
+ SELECT
790
805
  schema_name,
791
806
  table_name,
792
807
  column_name,
@@ -798,15 +813,17 @@ async function loadColumns(db, schemas) {
798
813
  numeric_precision,
799
814
  numeric_scale,
800
815
  internal
801
- from duckdb_columns()
802
- where schema_name in (${sql4.join(schemaFragments, sql4.raw(", "))})
803
- order by schema_name, table_name, column_index
816
+ FROM duckdb_columns()
817
+ WHERE ${databaseFilter}
818
+ AND schema_name IN (${sql4.join(schemaFragments, sql4.raw(", "))})
819
+ ORDER BY schema_name, table_name, column_index
804
820
  `);
805
821
  }
806
- async function loadConstraints(db, schemas) {
822
+ async function loadConstraints(db, database, schemas) {
807
823
  const schemaFragments = schemas.map((schema) => sql4`${schema}`);
824
+ const databaseFilter = database ? sql4`database_name = ${database}` : sql4`1 = 1`;
808
825
  return await db.execute(sql4`
809
- select
826
+ SELECT
810
827
  schema_name,
811
828
  table_name,
812
829
  constraint_name,
@@ -815,23 +832,26 @@ async function loadConstraints(db, schemas) {
815
832
  constraint_column_names,
816
833
  referenced_table,
817
834
  referenced_column_names
818
- from duckdb_constraints()
819
- where schema_name in (${sql4.join(schemaFragments, sql4.raw(", "))})
820
- order by schema_name, table_name, constraint_index
835
+ FROM duckdb_constraints()
836
+ WHERE ${databaseFilter}
837
+ AND schema_name IN (${sql4.join(schemaFragments, sql4.raw(", "))})
838
+ ORDER BY schema_name, table_name, constraint_index
821
839
  `);
822
840
  }
823
- async function loadIndexes(db, schemas) {
841
+ async function loadIndexes(db, database, schemas) {
824
842
  const schemaFragments = schemas.map((schema) => sql4`${schema}`);
843
+ const databaseFilter = database ? sql4`database_name = ${database}` : sql4`1 = 1`;
825
844
  return await db.execute(sql4`
826
- select
845
+ SELECT
827
846
  schema_name,
828
847
  table_name,
829
848
  index_name,
830
849
  is_unique,
831
850
  expressions
832
- from duckdb_indexes()
833
- where schema_name in (${sql4.join(schemaFragments, sql4.raw(", "))})
834
- order by schema_name, table_name, index_name
851
+ FROM duckdb_indexes()
852
+ WHERE ${databaseFilter}
853
+ AND schema_name IN (${sql4.join(schemaFragments, sql4.raw(", "))})
854
+ ORDER BY schema_name, table_name, index_name
835
855
  `);
836
856
  }
837
857
  function buildTables(tables, columns, constraints, indexes) {
@@ -1278,6 +1298,7 @@ function renderImports(imports, importBasePath) {
1278
1298
  function parseArgs(argv) {
1279
1299
  const options = {
1280
1300
  outFile: path.resolve(process.cwd(), "drizzle/schema.ts"),
1301
+ allDatabases: false,
1281
1302
  includeViews: false,
1282
1303
  useCustomTimeTypes: true
1283
1304
  };
@@ -1287,6 +1308,13 @@ function parseArgs(argv) {
1287
1308
  case "--url":
1288
1309
  options.url = argv[++i];
1289
1310
  break;
1311
+ case "--database":
1312
+ case "--db":
1313
+ options.database = argv[++i];
1314
+ break;
1315
+ case "--all-databases":
1316
+ options.allDatabases = true;
1317
+ break;
1290
1318
  case "--schema":
1291
1319
  case "--schemas":
1292
1320
  options.schemas = argv[++i]?.split(",").map((s) => s.trim()).filter(Boolean);
@@ -1325,11 +1353,27 @@ Usage:
1325
1353
 
1326
1354
  Options:
1327
1355
  --url DuckDB database path (e.g. :memory:, ./local.duckdb, md:)
1356
+ --database, --db Database/catalog to introspect (default: current database)
1357
+ --all-databases Introspect all attached databases (not just current)
1328
1358
  --schema Comma separated schema list (defaults to all non-system schemas)
1329
1359
  --out Output file (default: ./drizzle/schema.ts)
1330
1360
  --include-views Include views in the generated schema
1331
1361
  --use-pg-time Use pg-core timestamp/date/time instead of DuckDB custom helpers
1332
1362
  --import-base Override import path for duckdb helpers (default: package name)
1363
+
1364
+ Database Filtering:
1365
+ By default, only tables from the current database are introspected. This prevents
1366
+ returning tables from all attached databases in MotherDuck workspaces.
1367
+
1368
+ Use --database to specify a different database, or --all-databases to introspect
1369
+ all attached databases.
1370
+
1371
+ Examples:
1372
+ # Local DuckDB file
1373
+ bun x duckdb-introspect --url ./my-database.duckdb --out ./schema.ts
1374
+
1375
+ # MotherDuck (requires MOTHERDUCK_TOKEN env var)
1376
+ MOTHERDUCK_TOKEN=xxx bun x duckdb-introspect --url md: --database my_cloud_db --out ./schema.ts
1333
1377
  `);
1334
1378
  }
1335
1379
  async function main() {
@@ -1344,6 +1388,8 @@ async function main() {
1344
1388
  const db = drizzle(connection);
1345
1389
  try {
1346
1390
  const result = await introspect(db, {
1391
+ database: options.database,
1392
+ allDatabases: options.allDatabases,
1347
1393
  schemas: options.schemas,
1348
1394
  includeViews: options.includeViews,
1349
1395
  useCustomTimeTypes: options.useCustomTimeTypes,
package/dist/index.mjs CHANGED
@@ -1012,12 +1012,13 @@ import { sql as sql5 } from "drizzle-orm";
1012
1012
  var SYSTEM_SCHEMAS = new Set(["information_schema", "pg_catalog"]);
1013
1013
  var DEFAULT_IMPORT_BASE = "@leonardovida-md/drizzle-neo-duckdb";
1014
1014
  async function introspect(db, opts = {}) {
1015
- const schemas = await resolveSchemas(db, opts.schemas);
1015
+ const database = await resolveDatabase(db, opts.database, opts.allDatabases);
1016
+ const schemas = await resolveSchemas(db, database, opts.schemas);
1016
1017
  const includeViews = opts.includeViews ?? false;
1017
- const tables = await loadTables(db, schemas, includeViews);
1018
- const columns = await loadColumns(db, schemas);
1019
- const constraints = await loadConstraints(db, schemas);
1020
- const indexes = await loadIndexes(db, schemas);
1018
+ const tables = await loadTables(db, database, schemas, includeViews);
1019
+ const columns = await loadColumns(db, database, schemas);
1020
+ const constraints = await loadConstraints(db, database, schemas);
1021
+ const indexes = await loadIndexes(db, database, schemas);
1021
1022
  const grouped = buildTables(tables, columns, constraints, indexes);
1022
1023
  const schemaTs = emitSchema(grouped, {
1023
1024
  useCustomTimeTypes: opts.useCustomTimeTypes ?? true,
@@ -1031,27 +1032,41 @@ async function introspect(db, opts = {}) {
1031
1032
  }
1032
1033
  };
1033
1034
  }
1034
- async function resolveSchemas(db, targetSchemas) {
1035
+ async function resolveDatabase(db, targetDatabase, allDatabases) {
1036
+ if (allDatabases) {
1037
+ return null;
1038
+ }
1039
+ if (targetDatabase) {
1040
+ return targetDatabase;
1041
+ }
1042
+ const rows = await db.execute(sql5`SELECT current_database() as current_database`);
1043
+ return rows[0]?.current_database ?? null;
1044
+ }
1045
+ async function resolveSchemas(db, database, targetSchemas) {
1035
1046
  if (targetSchemas?.length) {
1036
1047
  return targetSchemas;
1037
1048
  }
1038
- const rows = await db.execute(sql5`select schema_name from information_schema.schemata`);
1049
+ const databaseFilter = database ? sql5`catalog_name = ${database}` : sql5`1 = 1`;
1050
+ const rows = await db.execute(sql5`SELECT schema_name FROM information_schema.schemata WHERE ${databaseFilter}`);
1039
1051
  return rows.map((row) => row.schema_name).filter((name) => !SYSTEM_SCHEMAS.has(name));
1040
1052
  }
1041
- async function loadTables(db, schemas, includeViews) {
1053
+ async function loadTables(db, database, schemas, includeViews) {
1042
1054
  const schemaFragments = schemas.map((schema) => sql5`${schema}`);
1055
+ const databaseFilter = database ? sql5`table_catalog = ${database}` : sql5`1 = 1`;
1043
1056
  return await db.execute(sql5`
1044
- select table_schema as schema_name, table_name, table_type
1045
- from information_schema.tables
1046
- where table_schema in (${sql5.join(schemaFragments, sql5.raw(", "))})
1047
- and ${includeViews ? sql5`1 = 1` : sql5`table_type = 'BASE TABLE'`}
1048
- order by table_schema, table_name
1057
+ SELECT table_schema as schema_name, table_name, table_type
1058
+ FROM information_schema.tables
1059
+ WHERE ${databaseFilter}
1060
+ AND table_schema IN (${sql5.join(schemaFragments, sql5.raw(", "))})
1061
+ AND ${includeViews ? sql5`1 = 1` : sql5`table_type = 'BASE TABLE'`}
1062
+ ORDER BY table_schema, table_name
1049
1063
  `);
1050
1064
  }
1051
- async function loadColumns(db, schemas) {
1065
+ async function loadColumns(db, database, schemas) {
1052
1066
  const schemaFragments = schemas.map((schema) => sql5`${schema}`);
1067
+ const databaseFilter = database ? sql5`database_name = ${database}` : sql5`1 = 1`;
1053
1068
  return await db.execute(sql5`
1054
- select
1069
+ SELECT
1055
1070
  schema_name,
1056
1071
  table_name,
1057
1072
  column_name,
@@ -1063,15 +1078,17 @@ async function loadColumns(db, schemas) {
1063
1078
  numeric_precision,
1064
1079
  numeric_scale,
1065
1080
  internal
1066
- from duckdb_columns()
1067
- where schema_name in (${sql5.join(schemaFragments, sql5.raw(", "))})
1068
- order by schema_name, table_name, column_index
1081
+ FROM duckdb_columns()
1082
+ WHERE ${databaseFilter}
1083
+ AND schema_name IN (${sql5.join(schemaFragments, sql5.raw(", "))})
1084
+ ORDER BY schema_name, table_name, column_index
1069
1085
  `);
1070
1086
  }
1071
- async function loadConstraints(db, schemas) {
1087
+ async function loadConstraints(db, database, schemas) {
1072
1088
  const schemaFragments = schemas.map((schema) => sql5`${schema}`);
1089
+ const databaseFilter = database ? sql5`database_name = ${database}` : sql5`1 = 1`;
1073
1090
  return await db.execute(sql5`
1074
- select
1091
+ SELECT
1075
1092
  schema_name,
1076
1093
  table_name,
1077
1094
  constraint_name,
@@ -1080,23 +1097,26 @@ async function loadConstraints(db, schemas) {
1080
1097
  constraint_column_names,
1081
1098
  referenced_table,
1082
1099
  referenced_column_names
1083
- from duckdb_constraints()
1084
- where schema_name in (${sql5.join(schemaFragments, sql5.raw(", "))})
1085
- order by schema_name, table_name, constraint_index
1100
+ FROM duckdb_constraints()
1101
+ WHERE ${databaseFilter}
1102
+ AND schema_name IN (${sql5.join(schemaFragments, sql5.raw(", "))})
1103
+ ORDER BY schema_name, table_name, constraint_index
1086
1104
  `);
1087
1105
  }
1088
- async function loadIndexes(db, schemas) {
1106
+ async function loadIndexes(db, database, schemas) {
1089
1107
  const schemaFragments = schemas.map((schema) => sql5`${schema}`);
1108
+ const databaseFilter = database ? sql5`database_name = ${database}` : sql5`1 = 1`;
1090
1109
  return await db.execute(sql5`
1091
- select
1110
+ SELECT
1092
1111
  schema_name,
1093
1112
  table_name,
1094
1113
  index_name,
1095
1114
  is_unique,
1096
1115
  expressions
1097
- from duckdb_indexes()
1098
- where schema_name in (${sql5.join(schemaFragments, sql5.raw(", "))})
1099
- order by schema_name, table_name, index_name
1116
+ FROM duckdb_indexes()
1117
+ WHERE ${databaseFilter}
1118
+ AND schema_name IN (${sql5.join(schemaFragments, sql5.raw(", "))})
1119
+ ORDER BY schema_name, table_name, index_name
1100
1120
  `);
1101
1121
  }
1102
1122
  function buildTables(tables, columns, constraints, indexes) {
@@ -1,6 +1,18 @@
1
1
  import type { RowData } from './client.ts';
2
2
  import type { DuckDBDatabase } from './driver.ts';
3
3
  export interface IntrospectOptions {
4
+ /**
5
+ * Database/catalog to introspect. If not specified, uses the current database
6
+ * (via `SELECT current_database()`). This prevents returning tables from all
7
+ * attached databases in MotherDuck workspaces.
8
+ */
9
+ database?: string;
10
+ /**
11
+ * When true, introspects all attached databases instead of just the current one.
12
+ * Ignored if `database` is explicitly set.
13
+ * @default false
14
+ */
15
+ allDatabases?: boolean;
4
16
  schemas?: string[];
5
17
  includeViews?: boolean;
6
18
  useCustomTimeTypes?: boolean;
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "module": "./dist/index.mjs",
4
4
  "main": "./dist/index.mjs",
5
5
  "types": "./dist/index.d.ts",
6
- "version": "1.0.1",
6
+ "version": "1.0.2",
7
7
  "description": "A drizzle ORM client for use with DuckDB. Based on drizzle's Postgres client.",
8
8
  "type": "module",
9
9
  "scripts": {
@@ -8,6 +8,8 @@ import { introspect } from '../introspect.ts';
8
8
 
9
9
  interface CliOptions {
10
10
  url?: string;
11
+ database?: string;
12
+ allDatabases: boolean;
11
13
  schemas?: string[];
12
14
  outFile: string;
13
15
  includeViews: boolean;
@@ -18,6 +20,7 @@ interface CliOptions {
18
20
  function parseArgs(argv: string[]): CliOptions {
19
21
  const options: CliOptions = {
20
22
  outFile: path.resolve(process.cwd(), 'drizzle/schema.ts'),
23
+ allDatabases: false,
21
24
  includeViews: false,
22
25
  useCustomTimeTypes: true,
23
26
  };
@@ -28,6 +31,13 @@ function parseArgs(argv: string[]): CliOptions {
28
31
  case '--url':
29
32
  options.url = argv[++i];
30
33
  break;
34
+ case '--database':
35
+ case '--db':
36
+ options.database = argv[++i];
37
+ break;
38
+ case '--all-databases':
39
+ options.allDatabases = true;
40
+ break;
31
41
  case '--schema':
32
42
  case '--schemas':
33
43
  options.schemas = argv[++i]?.split(',').map((s) => s.trim()).filter(Boolean);
@@ -68,11 +78,27 @@ Usage:
68
78
 
69
79
  Options:
70
80
  --url DuckDB database path (e.g. :memory:, ./local.duckdb, md:)
81
+ --database, --db Database/catalog to introspect (default: current database)
82
+ --all-databases Introspect all attached databases (not just current)
71
83
  --schema Comma separated schema list (defaults to all non-system schemas)
72
84
  --out Output file (default: ./drizzle/schema.ts)
73
85
  --include-views Include views in the generated schema
74
86
  --use-pg-time Use pg-core timestamp/date/time instead of DuckDB custom helpers
75
87
  --import-base Override import path for duckdb helpers (default: package name)
88
+
89
+ Database Filtering:
90
+ By default, only tables from the current database are introspected. This prevents
91
+ returning tables from all attached databases in MotherDuck workspaces.
92
+
93
+ Use --database to specify a different database, or --all-databases to introspect
94
+ all attached databases.
95
+
96
+ Examples:
97
+ # Local DuckDB file
98
+ bun x duckdb-introspect --url ./my-database.duckdb --out ./schema.ts
99
+
100
+ # MotherDuck (requires MOTHERDUCK_TOKEN env var)
101
+ MOTHERDUCK_TOKEN=xxx bun x duckdb-introspect --url md: --database my_cloud_db --out ./schema.ts
76
102
  `);
77
103
  }
78
104
 
@@ -94,6 +120,8 @@ async function main() {
94
120
 
95
121
  try {
96
122
  const result = await introspect(db, {
123
+ database: options.database,
124
+ allDatabases: options.allDatabases,
97
125
  schemas: options.schemas,
98
126
  includeViews: options.includeViews,
99
127
  useCustomTimeTypes: options.useCustomTimeTypes,
package/src/introspect.ts CHANGED
@@ -5,6 +5,18 @@ import type { DuckDBDatabase } from './driver.ts';
5
5
  const SYSTEM_SCHEMAS = new Set(['information_schema', 'pg_catalog']);
6
6
 
7
7
  export interface IntrospectOptions {
8
+ /**
9
+ * Database/catalog to introspect. If not specified, uses the current database
10
+ * (via `SELECT current_database()`). This prevents returning tables from all
11
+ * attached databases in MotherDuck workspaces.
12
+ */
13
+ database?: string;
14
+ /**
15
+ * When true, introspects all attached databases instead of just the current one.
16
+ * Ignored if `database` is explicitly set.
17
+ * @default false
18
+ */
19
+ allDatabases?: boolean;
8
20
  schemas?: string[];
9
21
  includeViews?: boolean;
10
22
  useCustomTimeTypes?: boolean;
@@ -102,13 +114,14 @@ export async function introspect(
102
114
  db: DuckDBDatabase,
103
115
  opts: IntrospectOptions = {}
104
116
  ): Promise<IntrospectResult> {
105
- const schemas = await resolveSchemas(db, opts.schemas);
117
+ const database = await resolveDatabase(db, opts.database, opts.allDatabases);
118
+ const schemas = await resolveSchemas(db, database, opts.schemas);
106
119
  const includeViews = opts.includeViews ?? false;
107
120
 
108
- const tables = await loadTables(db, schemas, includeViews);
109
- const columns = await loadColumns(db, schemas);
110
- const constraints = await loadConstraints(db, schemas);
111
- const indexes = await loadIndexes(db, schemas);
121
+ const tables = await loadTables(db, database, schemas, includeViews);
122
+ const columns = await loadColumns(db, database, schemas);
123
+ const constraints = await loadConstraints(db, database, schemas);
124
+ const indexes = await loadIndexes(db, database, schemas);
112
125
 
113
126
  const grouped = buildTables(tables, columns, constraints, indexes);
114
127
 
@@ -126,16 +139,39 @@ export async function introspect(
126
139
  };
127
140
  }
128
141
 
142
+ async function resolveDatabase(
143
+ db: DuckDBDatabase,
144
+ targetDatabase?: string,
145
+ allDatabases?: boolean
146
+ ): Promise<string | null> {
147
+ if (allDatabases) {
148
+ return null;
149
+ }
150
+ if (targetDatabase) {
151
+ return targetDatabase;
152
+ }
153
+
154
+ const rows = await db.execute<{ current_database: string }>(
155
+ sql`SELECT current_database() as current_database`
156
+ );
157
+ return rows[0]?.current_database ?? null;
158
+ }
159
+
129
160
  async function resolveSchemas(
130
161
  db: DuckDBDatabase,
162
+ database: string | null,
131
163
  targetSchemas?: string[]
132
164
  ): Promise<string[]> {
133
165
  if (targetSchemas?.length) {
134
166
  return targetSchemas;
135
167
  }
136
168
 
169
+ const databaseFilter = database
170
+ ? sql`catalog_name = ${database}`
171
+ : sql`1 = 1`;
172
+
137
173
  const rows = await db.execute<{ schema_name: string }>(
138
- sql`select schema_name from information_schema.schemata`
174
+ sql`SELECT schema_name FROM information_schema.schemata WHERE ${databaseFilter}`
139
175
  );
140
176
 
141
177
  return rows
@@ -145,30 +181,40 @@ async function resolveSchemas(
145
181
 
146
182
  async function loadTables(
147
183
  db: DuckDBDatabase,
184
+ database: string | null,
148
185
  schemas: string[],
149
186
  includeViews: boolean
150
187
  ): Promise<DuckDbTableRow[]> {
151
188
  const schemaFragments = schemas.map((schema) => sql`${schema}`);
189
+ const databaseFilter = database
190
+ ? sql`table_catalog = ${database}`
191
+ : sql`1 = 1`;
152
192
 
153
193
  return await db.execute<DuckDbTableRow>(
154
194
  sql`
155
- select table_schema as schema_name, table_name, table_type
156
- from information_schema.tables
157
- where table_schema in (${sql.join(schemaFragments, sql.raw(', '))})
158
- and ${includeViews ? sql`1 = 1` : sql`table_type = 'BASE TABLE'`}
159
- order by table_schema, table_name
195
+ SELECT table_schema as schema_name, table_name, table_type
196
+ FROM information_schema.tables
197
+ WHERE ${databaseFilter}
198
+ AND table_schema IN (${sql.join(schemaFragments, sql.raw(', '))})
199
+ AND ${includeViews ? sql`1 = 1` : sql`table_type = 'BASE TABLE'`}
200
+ ORDER BY table_schema, table_name
160
201
  `
161
202
  );
162
203
  }
163
204
 
164
205
  async function loadColumns(
165
206
  db: DuckDBDatabase,
207
+ database: string | null,
166
208
  schemas: string[]
167
209
  ): Promise<DuckDbColumnRow[]> {
168
210
  const schemaFragments = schemas.map((schema) => sql`${schema}`);
211
+ const databaseFilter = database
212
+ ? sql`database_name = ${database}`
213
+ : sql`1 = 1`;
214
+
169
215
  return await db.execute<DuckDbColumnRow>(
170
216
  sql`
171
- select
217
+ SELECT
172
218
  schema_name,
173
219
  table_name,
174
220
  column_name,
@@ -180,21 +226,27 @@ async function loadColumns(
180
226
  numeric_precision,
181
227
  numeric_scale,
182
228
  internal
183
- from duckdb_columns()
184
- where schema_name in (${sql.join(schemaFragments, sql.raw(', '))})
185
- order by schema_name, table_name, column_index
229
+ FROM duckdb_columns()
230
+ WHERE ${databaseFilter}
231
+ AND schema_name IN (${sql.join(schemaFragments, sql.raw(', '))})
232
+ ORDER BY schema_name, table_name, column_index
186
233
  `
187
234
  );
188
235
  }
189
236
 
190
237
  async function loadConstraints(
191
238
  db: DuckDBDatabase,
239
+ database: string | null,
192
240
  schemas: string[]
193
241
  ): Promise<DuckDbConstraintRow[]> {
194
242
  const schemaFragments = schemas.map((schema) => sql`${schema}`);
243
+ const databaseFilter = database
244
+ ? sql`database_name = ${database}`
245
+ : sql`1 = 1`;
246
+
195
247
  return await db.execute<DuckDbConstraintRow>(
196
248
  sql`
197
- select
249
+ SELECT
198
250
  schema_name,
199
251
  table_name,
200
252
  constraint_name,
@@ -203,29 +255,36 @@ async function loadConstraints(
203
255
  constraint_column_names,
204
256
  referenced_table,
205
257
  referenced_column_names
206
- from duckdb_constraints()
207
- where schema_name in (${sql.join(schemaFragments, sql.raw(', '))})
208
- order by schema_name, table_name, constraint_index
258
+ FROM duckdb_constraints()
259
+ WHERE ${databaseFilter}
260
+ AND schema_name IN (${sql.join(schemaFragments, sql.raw(', '))})
261
+ ORDER BY schema_name, table_name, constraint_index
209
262
  `
210
263
  );
211
264
  }
212
265
 
213
266
  async function loadIndexes(
214
267
  db: DuckDBDatabase,
268
+ database: string | null,
215
269
  schemas: string[]
216
270
  ): Promise<DuckDbIndexRow[]> {
217
271
  const schemaFragments = schemas.map((schema) => sql`${schema}`);
272
+ const databaseFilter = database
273
+ ? sql`database_name = ${database}`
274
+ : sql`1 = 1`;
275
+
218
276
  return await db.execute<DuckDbIndexRow>(
219
277
  sql`
220
- select
278
+ SELECT
221
279
  schema_name,
222
280
  table_name,
223
281
  index_name,
224
282
  is_unique,
225
283
  expressions
226
- from duckdb_indexes()
227
- where schema_name in (${sql.join(schemaFragments, sql.raw(', '))})
228
- order by schema_name, table_name, index_name
284
+ FROM duckdb_indexes()
285
+ WHERE ${databaseFilter}
286
+ AND schema_name IN (${sql.join(schemaFragments, sql.raw(', '))})
287
+ ORDER BY schema_name, table_name, index_name
229
288
  `
230
289
  );
231
290
  }