rake-db 2.25.9 → 2.25.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -1,4 +1,4 @@
1
- import { DomainColumn, escapeForMigration, EnumColumn, defaultSchemaConfig, getColumnTypes, parseTableData, escapeString, tableDataMethods, ColumnType, parseTableDataInput, UnknownColumn, raw, TransactionAdapter, logParamToLogObject, createDb as createDb$1, Adapter, makeColumnTypes, makeColumnsByType, RawSQL, CustomTypeColumn, assignDbDataToColumn, ArrayColumn, PostgisGeographyPointColumn, pushTableDataCode, primaryKeyInnerToCode, indexInnerToCode, excludeInnerToCode, constraintInnerToCode, referencesArgsToCode, TimestampTZColumn, TimestampColumn } from 'pqb';
1
+ import { DomainColumn, escapeForMigration, ArrayColumn, EnumColumn, defaultSchemaConfig, getColumnTypes, parseTableData, escapeString, tableDataMethods, ColumnType, parseTableDataInput, UnknownColumn, raw, TransactionAdapter, logParamToLogObject, createDb as createDb$1, Adapter, makeColumnTypes, makeColumnsByType, RawSQL, CustomTypeColumn, assignDbDataToColumn, PostgisGeographyPointColumn, pushTableDataCode, primaryKeyInnerToCode, indexInnerToCode, excludeInnerToCode, constraintInnerToCode, referencesArgsToCode, TimestampTZColumn, TimestampColumn } from 'pqb';
2
2
  import { singleQuote, toSnakeCase, isRawSQL, toCamelCase, toArray, snakeCaseKey, emptyObject, setCurrentColumnName, consumeColumnName, ColumnTypeBase, setDefaultLanguage, deepCompare, getImportPath, pathToLog, emptyArray, getStackTrace, codeToString, addCode, quoteObjectKey, backtickQuote } from 'orchid-core';
3
3
  import path, { join } from 'path';
4
4
  import { pathToFileURL, fileURLToPath } from 'node:url';
@@ -160,7 +160,7 @@ const encodeColumnDefault = (def, values, column) => {
160
160
  return def.toSQL({ values });
161
161
  } else {
162
162
  return escapeForMigration(
163
- column?.data.encode ? column.data.encode(def) : def
163
+ column instanceof ArrayColumn && Array.isArray(def) ? "{" + (column.data.item.data.encode ? def.map((x) => column.data.item.data.encode(x)) : def).join(",") + "}" : column?.data.encode ? column.data.encode(def) : def
164
164
  );
165
165
  }
166
166
  }
@@ -1177,8 +1177,9 @@ const handleTableItemChange = (key, item, ast, alterTable, renameItems, values,
1177
1177
  if (to.type && (from.type !== to.type || from.collate !== to.collate)) {
1178
1178
  changeType = true;
1179
1179
  const type = !to.column || to.column.data.isOfCustomType ? to.column && to.column instanceof DomainColumn ? quoteNameFromString(to.type) : quoteCustomType(to.type) : to.type;
1180
+ const using = item.using?.usingUp ? ` USING ${item.using.usingUp.toSQL({ values })}` : to.column instanceof EnumColumn ? ` USING "${name}"::text::${type}` : to.column instanceof ArrayColumn ? ` USING "${name}"::text[]::${type}` : "";
1180
1181
  alterTable.push(
1181
- `ALTER COLUMN "${name}" TYPE ${type}${to.collate ? ` COLLATE ${quoteNameFromString(to.collate)}` : ""}${item.using?.usingUp ? ` USING ${item.using.usingUp.toSQL({ values })}` : to.column instanceof EnumColumn ? ` USING "${name}"::text::${type}` : ""}`
1182
+ `ALTER COLUMN "${name}" TYPE ${type}${to.collate ? ` COLLATE ${quoteNameFromString(to.collate)}` : ""}${using}`
1182
1183
  );
1183
1184
  }
1184
1185
  if (typeof from.identity !== typeof to.identity || !deepCompare(from.identity, to.identity)) {
@@ -2481,11 +2482,15 @@ const recreateEnum = async (migration, { schema, name }, values, errorMessage) =
2481
2482
  const { rows: tables } = await migration.adapter.query(
2482
2483
  `SELECT n.nspname AS "schema",
2483
2484
  c.relname AS "table",
2484
- json_agg(a.attname ORDER BY a.attnum) AS "columns"
2485
+ json_agg(
2486
+ json_build_object('name', a.attname, 'arrayDims', a.attndims)
2487
+ ORDER BY a.attnum
2488
+ ) AS "columns"
2485
2489
  FROM pg_class c
2486
2490
  JOIN pg_catalog.pg_namespace n ON n.oid = relnamespace
2487
- JOIN pg_attribute a ON a.attrelid = c.oid
2488
- JOIN pg_type t ON a.atttypid = t.oid AND t.typname = ${singleQuote(name)}
2491
+ JOIN pg_type bt ON bt.typname = ${singleQuote(name)}
2492
+ JOIN pg_type t ON t.oid = bt.oid OR t.typelem = bt.oid
2493
+ JOIN pg_attribute a ON a.attrelid = c.oid AND a.atttypid = t.oid
2489
2494
  JOIN pg_namespace tn ON tn.oid = t.typnamespace AND tn.nspname = ${singleQuote(
2490
2495
  schema ?? defaultSchema
2491
2496
  )}
@@ -2494,7 +2499,9 @@ GROUP BY n.nspname, c.relname`
2494
2499
  );
2495
2500
  const sql = tables.map(
2496
2501
  (t) => `ALTER TABLE ${quoteTable(t.schema, t.table)}
2497
- ${t.columns.map((c) => ` ALTER COLUMN "${c}" TYPE text`).join(",\n")}`
2502
+ ${t.columns.map(
2503
+ (c) => ` ALTER COLUMN "${c.name}" TYPE text${"[]".repeat(c.arrayDims)}`
2504
+ ).join(",\n")}`
2498
2505
  );
2499
2506
  sql.push(
2500
2507
  `DROP TYPE ${quotedName}`,
@@ -2504,14 +2511,17 @@ GROUP BY n.nspname, c.relname`
2504
2511
  for (const t of tables) {
2505
2512
  const table = quoteTable(t.schema, t.table);
2506
2513
  for (const c of t.columns) {
2514
+ const type = quotedName + "[]".repeat(c.arrayDims);
2507
2515
  try {
2508
2516
  await migration.adapter.query(
2509
2517
  `ALTER TABLE ${table}
2510
- ALTER COLUMN "${c}" TYPE ${quotedName} USING "${c}"::${quotedName}`
2518
+ ALTER COLUMN "${c.name}" TYPE ${type} USING "${c.name}"::${type}`
2511
2519
  );
2512
2520
  } catch (err) {
2513
2521
  if (err.code === "22P02") {
2514
- throw new Error(errorMessage(quotedName, table, c), { cause: err });
2522
+ throw new Error(errorMessage(quotedName, table, c.name), {
2523
+ cause: err
2524
+ });
2515
2525
  }
2516
2526
  throw err;
2517
2527
  }
@@ -4238,7 +4248,8 @@ const instantiateDbColumn = (ctx, data, domains, dbColumn) => {
4238
4248
  if (domainColumn) {
4239
4249
  column = new DomainColumn(
4240
4250
  ctx.columnSchemaConfig,
4241
- typeId,
4251
+ typeName,
4252
+ typeSchema,
4242
4253
  dbColumn.extension
4243
4254
  ).as(domainColumn);
4244
4255
  } else {
@@ -4255,7 +4266,8 @@ const instantiateDbColumn = (ctx, data, domains, dbColumn) => {
4255
4266
  } else {
4256
4267
  column = new CustomTypeColumn(
4257
4268
  ctx.columnSchemaConfig,
4258
- typeId,
4269
+ typeName,
4270
+ typeSchema === "pg_catalog" ? void 0 : typeSchema,
4259
4271
  dbColumn.extension
4260
4272
  );
4261
4273
  ((_a = ctx.unsupportedTypes)[_b = dbColumn.type] ?? (_a[_b] = [])).push(
@@ -4939,7 +4951,7 @@ ${group.map(
4939
4951
  return code;
4940
4952
  };
4941
4953
  const astEncoders = {
4942
- table(ast, config) {
4954
+ table(ast, config, currentSchema) {
4943
4955
  let code = [];
4944
4956
  const result = code;
4945
4957
  const hasOptions = Boolean(ast.comment || ast.noPrimaryKey === "ignore");
@@ -4973,6 +4985,7 @@ const astEncoders = {
4973
4985
  const toCodeCtx = {
4974
4986
  t: "t",
4975
4987
  table: ast.name,
4988
+ currentSchema,
4976
4989
  migration: true,
4977
4990
  snakeCase: config.snakeCase
4978
4991
  };
@@ -5028,6 +5041,7 @@ const astEncoders = {
5028
5041
  const toCodeCtx = {
5029
5042
  t: "t",
5030
5043
  table: ast.name,
5044
+ currentSchema,
5031
5045
  migration: true,
5032
5046
  snakeCase: config.snakeCase
5033
5047
  };
@@ -5060,6 +5074,7 @@ const astEncoders = {
5060
5074
  {
5061
5075
  t: "t",
5062
5076
  table: ast.name,
5077
+ currentSchema,
5063
5078
  migration: true,
5064
5079
  snakeCase: config.snakeCase
5065
5080
  },
@@ -5196,11 +5211,11 @@ const astEncoders = {
5196
5211
  ast
5197
5212
  )}, [${ast.fromValues.map(singleQuote).join(", ")}], [${ast.toValues.map(singleQuote).join(", ")}]);`;
5198
5213
  },
5199
- domain(ast) {
5214
+ domain(ast, _, currentSchema) {
5200
5215
  return `await db.${ast.action}Domain(${quoteSchemaTable(
5201
5216
  ast
5202
5217
  )}, (t) => ${ast.baseType.toCode(
5203
- { t: "t", table: ast.name },
5218
+ { t: "t", table: ast.name, currentSchema },
5204
5219
  ast.baseType.data.name ?? ""
5205
5220
  )});`;
5206
5221
  },