knex 3.2.5 → 3.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,10 +1,33 @@
1
1
  # Master (Unreleased)
2
2
 
3
+ # 3.2.7 - 27 March, 2026
4
+
5
+ ### Bug fixes
6
+
7
+ - fix sqlite DDL operations failing inside transactions [#6408](https://github.com/knex/knex/issues/6408)
8
+ - fix: handle lowercase INFORMATION_SCHEMA keys in MySQL renameColumn [#6407](https://github.com/knex/knex/issues/6407)
9
+ - fix: clone config in client constructor [#5633](https://github.com/knex/knex/issues/5633)
10
+ - fix: remove \_\_knexTxId from transaction connection on release [#5288](https://github.com/knex/knex/issues/5288)
11
+ - fix: correct binding order in delete with subquery join [#6412](https://github.com/knex/knex/issues/6412)
12
+ - chore: omit ./scripts from published package [#6356](https://github.com/knex/knex/issues/6356)
13
+
14
+ # 3.2.6 - 24 March, 2026
15
+
16
+ ### Bug fixes
17
+
18
+ - Fix module exports [#6406](https://github.com/knex/knex/issues/6406)
19
+
20
+ # 3.2.5 - 23 March, 2026
21
+
22
+ ### Bug fixes
23
+
24
+ - Fix ESM exports [#6405](https://github.com/knex/knex/issues/6405)
25
+
3
26
  # 3.2.4 - 23 March, 2026
4
27
 
5
28
  ### Bug fixes
6
29
 
7
- - Fix ESM type exports
30
+ - Fix ESM type exports [#6404](https://github.com/knex/knex/issues/6404)
8
31
 
9
32
  # 3.2.1 - 22 March, 2026
10
33
 
package/bin/cli.js CHANGED
File without changes
package/lib/client.js CHANGED
@@ -41,11 +41,14 @@ const debug = require('debug')('knex:client');
41
41
  class Client extends EventEmitter {
42
42
  constructor(config = {}) {
43
43
  super();
44
- this.config = config;
45
- this.logger = new Logger(config);
44
+ this.config = { ...config };
45
+ if (config.connection && typeof config.connection === 'object') {
46
+ this.config.connection = { ...config.connection };
47
+ }
48
+ this.logger = new Logger(this.config);
46
49
 
47
- if (this.config.connection && this.config.connection.password) {
48
- setHiddenProperty(this.config.connection);
50
+ if (this.config.connection && config.connection.password) {
51
+ setHiddenProperty(this.config.connection, config.connection);
49
52
  }
50
53
 
51
54
  //Client is a required field, so throw error if it's not supplied.
@@ -70,19 +73,19 @@ class Client extends EventEmitter {
70
73
  }
71
74
 
72
75
  if (config.connection && config.connection instanceof Function) {
73
- this.connectionConfigProvider = config.connection;
76
+ this.connectionConfigProvider = this.config.connection;
74
77
  this.connectionConfigExpirationChecker = () => true; // causes the provider to be called on first use
75
78
  } else {
76
- this.connectionSettings = cloneDeep(config.connection || {});
79
+ this.connectionSettings = cloneDeep(this.config.connection || {});
77
80
  if (config.connection && config.connection.password) {
78
- setHiddenProperty(this.connectionSettings, config.connection);
81
+ setHiddenProperty(this.connectionSettings, this.config.connection);
79
82
  }
80
83
  this.connectionConfigExpirationChecker = null;
81
84
  }
82
85
  if (this.driverName && config.connection) {
83
86
  this.initializeDriver();
84
87
  if (!config.pool || (config.pool && config.pool.max !== 0)) {
85
- this.initializePool(config);
88
+ this.initializePool(this.config);
86
89
  }
87
90
  }
88
91
  this.valueForUndefined = this.raw('DEFAULT');
@@ -125,11 +125,19 @@ class TableCompiler_MySQL extends TableCompiler {
125
125
  return compiler.createFKRefs(
126
126
  runner,
127
127
  refs.map(function (ref) {
128
- if (ref.REFERENCED_COLUMN_NAME === from) {
129
- ref.REFERENCED_COLUMN_NAME = to;
128
+ const refColKey =
129
+ ref.REFERENCED_COLUMN_NAME !== undefined
130
+ ? 'REFERENCED_COLUMN_NAME'
131
+ : 'referenced_column_name';
132
+ const colKey =
133
+ ref.COLUMN_NAME !== undefined
134
+ ? 'COLUMN_NAME'
135
+ : 'column_name';
136
+ if (ref[refColKey] === from) {
137
+ ref[refColKey] = to;
130
138
  }
131
- if (ref.COLUMN_NAME === from) {
132
- ref.COLUMN_NAME = to;
139
+ if (ref[colKey] === from) {
140
+ ref[colKey] = to;
133
141
  }
134
142
  return ref;
135
143
  })
@@ -187,7 +195,7 @@ class TableCompiler_MySQL extends TableCompiler {
187
195
  ' RC.UPDATE_RULE, RC.DELETE_RULE ' +
188
196
  'FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU ' +
189
197
  'JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC ' +
190
- ' USING(CONSTRAINT_NAME)' +
198
+ ' USING(CONSTRAINT_NAME) ' +
191
199
  'WHERE KCU.REFERENCED_TABLE_NAME = ' +
192
200
  this.client.parameter(
193
201
  this.tableNameRaw,
@@ -220,8 +228,10 @@ class TableCompiler_MySQL extends TableCompiler {
220
228
 
221
229
  return Promise.all(
222
230
  refs.map(function (ref) {
223
- const constraintName = formatter.wrap(ref.CONSTRAINT_NAME);
224
- const tableName = formatter.wrap(ref.TABLE_NAME);
231
+ const constraintName = formatter.wrap(
232
+ ref.CONSTRAINT_NAME || ref.constraint_name
233
+ );
234
+ const tableName = formatter.wrap(ref.TABLE_NAME || ref.table_name);
225
235
  return runner.query({
226
236
  sql: `alter table ${tableName} drop foreign key ${constraintName}`,
227
237
  });
@@ -234,13 +244,19 @@ class TableCompiler_MySQL extends TableCompiler {
234
244
 
235
245
  return Promise.all(
236
246
  refs.map(function (ref) {
237
- const tableName = formatter.wrap(ref.TABLE_NAME);
238
- const keyName = formatter.wrap(ref.CONSTRAINT_NAME);
239
- const column = formatter.columnize(ref.COLUMN_NAME);
240
- const references = formatter.columnize(ref.REFERENCED_COLUMN_NAME);
241
- const inTable = formatter.wrap(ref.REFERENCED_TABLE_NAME);
242
- const onUpdate = ` ON UPDATE ${ref.UPDATE_RULE}`;
243
- const onDelete = ` ON DELETE ${ref.DELETE_RULE}`;
247
+ const tableName = formatter.wrap(ref.TABLE_NAME || ref.table_name);
248
+ const keyName = formatter.wrap(
249
+ ref.CONSTRAINT_NAME || ref.constraint_name
250
+ );
251
+ const column = formatter.columnize(ref.COLUMN_NAME || ref.column_name);
252
+ const references = formatter.columnize(
253
+ ref.REFERENCED_COLUMN_NAME || ref.referenced_column_name
254
+ );
255
+ const inTable = formatter.wrap(
256
+ ref.REFERENCED_TABLE_NAME || ref.referenced_table_name
257
+ );
258
+ const onUpdate = ` ON UPDATE ${ref.UPDATE_RULE || ref.update_rule}`;
259
+ const onDelete = ` ON DELETE ${ref.DELETE_RULE || ref.delete_rule}`;
244
260
 
245
261
  return runner.query({
246
262
  sql:
@@ -88,7 +88,7 @@ class Transaction_Sqlite extends Transaction {
88
88
  if (
89
89
  strictForeignKeyPragma &&
90
90
  hasOuterTransaction &&
91
- restoreForeignCheck !== undefined
91
+ restoreForeignCheck != null
92
92
  ) {
93
93
  throw new Error(
94
94
  `Refusing to create transaction: unable to change \`foreign_keys\` pragma inside a nested transaction`
@@ -334,6 +334,10 @@ class SQLite3_DDL {
334
334
  }
335
335
 
336
336
  async alter(newSql, createIndices, columns) {
337
+ // When already inside a transaction, we cannot change the foreign_keys
338
+ // pragma (SQLite silently ignores pragma changes within transactions).
339
+ // Use `null` to leave it as-is and avoid the nested-transaction guard.
340
+ const enforceForeignCheck = this.client.transacting ? null : false;
337
341
  await this.client.transaction(
338
342
  async (trx) => {
339
343
  await trx.raw(newSql);
@@ -345,7 +349,7 @@ class SQLite3_DDL {
345
349
  await trx.raw(createIndex);
346
350
  }
347
351
  },
348
- { connection: this.connection, enforceForeignCheck: false }
352
+ { connection: this.connection, enforceForeignCheck }
349
353
  );
350
354
  }
351
355
 
@@ -47,7 +47,7 @@ function executeQuery(connection, queryObject, client) {
47
47
  'query-error',
48
48
  err,
49
49
  Object.assign(
50
- { __knexUid: connection.__knexUid, __knexTxId: connection.__knexUid },
50
+ { __knexUid: connection.__knexUid, __knexTxId: connection.__knexTxId },
51
51
  queryObject
52
52
  )
53
53
  );
@@ -271,6 +271,7 @@ class Transaction extends EventEmitter {
271
271
  } finally {
272
272
  if (!configConnection) {
273
273
  debug('%s: releasing connection', this.txid);
274
+ delete connection.__knexTxId;
274
275
  this.client.releaseConnection(connection);
275
276
  } else {
276
277
  debug('%s: not releasing external connection', this.txid);
@@ -865,8 +865,8 @@ class QueryCompiler {
865
865
  // Make sure tableName is processed by the formatter first.
866
866
  const { tableName } = this;
867
867
  const withSQL = this.with();
868
- const wheres = this.where();
869
868
  const joins = this.join();
869
+ const wheres = this.where();
870
870
  // When using joins, delete the "from" table values as a default
871
871
  const deleteSelector = joins ? tableName + ' ' : '';
872
872
  return (
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "knex",
3
- "version": "3.2.5",
3
+ "version": "3.2.7",
4
4
  "description": "A batteries-included SQL query & schema builder for PostgresSQL, MySQL, CockroachDB, MSSQL and SQLite3",
5
5
  "main": "knex.js",
6
6
  "types": "types/index.d.ts",
@@ -15,6 +15,10 @@
15
15
  "default": "./knex.js"
16
16
  }
17
17
  },
18
+ "./bin/*.js": "./bin/*.js",
19
+ "./bin/*": "./bin/*",
20
+ "./types/*.d.ts": "./types/*.d.ts",
21
+ "./types/*": "./types/*.d.ts",
18
22
  "./lib/*.js": "./lib/*.js",
19
23
  "./lib/*": "./lib/*.js",
20
24
  "./knex": "./knex.js",
@@ -33,10 +37,10 @@
33
37
  "format:check": "prettier --list-different .",
34
38
  "debug:test": "mocha --inspect-brk --exit -t 0 test/all-tests-suite.js",
35
39
  "debug:tape": "node --inspect-brk test/tape/index.js",
36
- "coveralls": "nyc report --reporter=lcov",
37
40
  "lint": "eslint --cache .",
38
41
  "lint:fix": "eslint --cache --fix .",
39
- "lint:types": "tsd && tstyche --target 5.4 test-tstyche/esm-types.tst.ts",
42
+ "prelint:types": "cd test-tstyche && npm i",
43
+ "lint:types": "tsd && tstyche --target 5.4",
40
44
  "lint:everything": "npm run lint && npm run lint:types",
41
45
  "lint:fix:everything": "npm run lint:fix && npm run lint:types",
42
46
  "test:unit": "npm run test:unit-only && npm run test:cli",
@@ -254,7 +258,6 @@
254
258
  "!lib/**/*.d.ts",
255
259
  "!lib/**/*.js.map",
256
260
  "!lib/.gitignore",
257
- "scripts/*",
258
261
  "types/index.d.ts",
259
262
  "types/index.d.mts",
260
263
  "types/result.d.ts",
@@ -1,19 +0,0 @@
1
- #!/bin/bash -e
2
-
3
- mkdir -p /tmp/artifacts
4
-
5
- HERE="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
6
-
7
- evtfile=""
8
- if [ -f "$1" ]; then
9
- evtfile="$1"
10
- elif [ -f "$HERE/$1.json" ]; then
11
- evtfile="$HERE/$1.json"
12
- else
13
- echo "Usage: $0 <event jsonfile>"
14
- exit 1
15
- fi
16
-
17
- shift
18
-
19
- act --artifact-server-path /tmp/artifacts -e "$evtfile" -W "$HERE/../../.github/workflows/publish.yml" "$@"
@@ -1,11 +0,0 @@
1
- {
2
- "pull_request": {
3
- "head": {
4
- "ref": "pr-getting-merged"
5
- },
6
- "base": {
7
- "ref": "master"
8
- },
9
- "merged": true
10
- }
11
- }
@@ -1,12 +0,0 @@
1
- {
2
- "pull_request": {
3
- "head": {
4
- "ref": "pr-getting-merged"
5
- },
6
- "base": {
7
- "ref": "master"
8
- },
9
- "merged": true,
10
- "labels": [{ "name": "patch" }]
11
- }
12
- }
@@ -1,12 +0,0 @@
1
- {
2
- "pull_request": {
3
- "head": {
4
- "ref": "pr-getting-merged"
5
- },
6
- "base": {
7
- "ref": "master"
8
- },
9
- "merged": true,
10
- "labels": [{ "name": "skip-release" }]
11
- }
12
- }
@@ -1,12 +0,0 @@
1
- {
2
- "pull_request": {
3
- "head": {
4
- "ref": "pr-getting-merged"
5
- },
6
- "base": {
7
- "ref": "master"
8
- },
9
- "merged": false,
10
- "labels": [{ "name": "patch" }]
11
- }
12
- }
@@ -1,122 +0,0 @@
1
- #!/bin/bash -e
2
-
3
- # context: currently, no package lockfile is utilized in this repository.
4
- # this is so that CI tests run on the ~latest versions of dependencies,
5
- # especially the peer dependencies of database clients, so that our tests
6
- # will more readily surface problems that our users will experience when
7
- # they just do the normal thing.
8
-
9
- # however, for automatic release publishing, we want to be much stricter
10
- # with the dependencies that are involved in the workflow to avoid risks
11
- # from e.g. supply chain attacks.
12
-
13
- # we could use "npm ci" if we had a lockfile, but instead we're going to
14
- # separately maintain pinned versions of the build dependencies and run
15
- # exactly those versions. care should be taken when updating/altering the
16
- # versions to vet them.
17
-
18
- # pinned versions of the dependencies required to perform a release build
19
- declare -A PINNED_VERSIONS=(
20
- [typescript]="5.0.4"
21
- [prettier]="2.8.7"
22
- [@types/node]="20.19.11"
23
- [@tsconfig/node12]="1.0.11"
24
- )
25
-
26
- # validate args
27
- BUMP_TYPE="$1"
28
- case "$BUMP_TYPE" in
29
- major|minor|patch)
30
- # valid
31
- ;;
32
- *)
33
- >&2 echo "Invalid bump type. Use: $0 {major|minor|patch}"
34
- exit 1
35
- ;;
36
- esac
37
-
38
-
39
- # npm 7 doesn't provide a way to install only a specific dependency, it's
40
- # all-or-nothing. so we have to do some shenanigans to validate our pinned
41
- # versions against package.json
42
-
43
- # create a jq expression for a minimal package.json that includes only
44
- # our build dependencies
45
- tmpl='
46
- {
47
- name: "dep-check",
48
- private: true,
49
- version: "0.0.0",
50
- devDependencies: {
51
- '
52
- for pkg in "${!PINNED_VERSIONS[@]}"; do
53
- # for each pinned dependency, add something like:
54
- # pkg: .devDependencies.pkg
55
- tmpl+=" \"${pkg}\": .devDependencies[\"${pkg}\"],
56
- "
57
- done
58
- tmpl+='
59
- }
60
- }'
61
-
62
- PROJECT_DIR="$(pwd)"
63
- TMP_DIR="$(mktemp -d)"
64
-
65
- # render the template to a package.json file in a temp dir
66
- echo
67
- echo "Build dependencies:"
68
- jq "$tmpl" package.json | tee "$TMP_DIR/package.json"
69
-
70
- # install dependencies at the pinned version in the temp dir
71
- # ignore pre/post script hooks
72
- echo
73
- echo "Installing packages"
74
- >/dev/null pushd "$TMP_DIR"
75
-
76
- failed=0
77
- for pkg in "${!PINNED_VERSIONS[@]}"; do
78
- fqpkg="${pkg}@${PINNED_VERSIONS[$pkg]}"
79
- echo "npm install --no-save --ignore-scripts $fqpkg"
80
- >/dev/null 2>/dev/null npm install --no-save --ignore-scripts "$fqpkg"
81
-
82
-
83
- # ensure the pinned version conforms to package.json semver specification
84
- if npm ls 2>/dev/null | grep invalid; then
85
- failed=1
86
- fi
87
- done
88
-
89
- # one or more pins is incorrect, do not publish
90
- if [[ "$failed" = 1 ]]; then
91
- echo
92
- echo "One or more pinned dependencies do not satisfy package.json requirements"
93
- echo "Please update '$0'"
94
- exit 1
95
- fi
96
-
97
- >/dev/null popd
98
-
99
- # move tempdir node_modules to build dir
100
- mv "$TMP_DIR/node_modules" "$PROJECT_DIR/node_modules"
101
- echo
102
- echo "node_modules:"
103
- ls -l node_modules
104
-
105
- echo "Running build steps"
106
-
107
- # run the package.json build script
108
- # currently, this executes typescript and uses
109
- # prettier to format the TS output
110
- npm run build
111
-
112
- # bump the version in package.json
113
- npm version "$BUMP_TYPE" --no-git-tag-version
114
-
115
- # we don't commit here, but we do create the tarball that
116
- # will be published to npm. the dependent job takes the
117
- # tarball and commits the changes + publishes the tarball
118
-
119
- # create the tarball for handoff and record its filename
120
- TARBALL="$(npm pack --silent)"
121
- echo "tarball=$TARBALL" >> "$GITHUB_OUTPUT"
122
- ls -la "$TARBALL"
package/scripts/build.js DELETED
@@ -1,125 +0,0 @@
1
- #!/usr/bin/env node
2
- const fs = require('fs');
3
- const path = require('path');
4
- const child_process = require('child_process');
5
- const _ = require('lodash');
6
-
7
- const exec = function (cmd, args) {
8
- return new Promise(function (resolve, reject) {
9
- // Execute command
10
- const child = child_process.exec(cmd, {
11
- cwd: process.cwd(),
12
- env: process.env,
13
- });
14
-
15
- // Pass stdout and stderr
16
- child.stdout.on('data', function (data) {
17
- process.stdout.write(data.toString());
18
- });
19
- child.stderr.on('data', function (data) {
20
- process.stderr.write(data.toString());
21
- });
22
- // Handle result
23
- child.on('exit', function (code) {
24
- if (code) reject(code);
25
- else resolve();
26
- });
27
- child.on('error', reject);
28
- });
29
- };
30
-
31
- const CWD = process.cwd();
32
- const POSTINSTALL_BUILD_CWD = process.env.POSTINSTALL_BUILD_CWD;
33
-
34
- // If we didn't have this check, then we'd be stuck in an infinite `postinstall`
35
- // loop, since we run `npm install --only=dev` below, triggering another
36
- // `postinstall`. We can't use `--ignore-scripts` because that ignores scripts
37
- // on all the modules that get installed, too, which would break stuff. So
38
- // instead, we set an environment variable, `POSTINSTALL_BUILD_CWD`, that keeps
39
- // track of what we're installing. It's more than just a yes/no flag because
40
- // the dev dependencies we're installing might use `postinstall-build` too, and
41
- // we don't want the flag to prevent them from running.
42
- if (POSTINSTALL_BUILD_CWD !== CWD) {
43
- const BUILD_ARTIFACT = process.argv[2];
44
- const BUILD_COMMAND = process.argv[3];
45
-
46
- fs.stat(BUILD_ARTIFACT, function (err, stats) {
47
- if (err || !(stats.isFile() || stats.isDirectory())) {
48
- // This script will run again after we run `npm install` below. Set an
49
- // environment variable to tell it to skip the check. Really we just want
50
- // the execSync's `env` to be modified, but it's easier just modify and
51
- // pass along the entire `process.env`.
52
- process.env.POSTINSTALL_BUILD_CWD = CWD;
53
- // We already have prod dependencies, that's what triggered `postinstall`
54
- // in the first place. So only install dev.
55
-
56
- // Fetch package.json
57
- const pkgJson = require(path.join(CWD, 'package.json'));
58
- const devDeps = pkgJson.devDependencies;
59
- // Values listed under `buildDependencies` contain the dependency names
60
- // that are required for `lib` building.
61
- const buildDependencies = _.pick(devDeps, pkgJson.buildDependencies);
62
-
63
- // Proceed only if there is something to install
64
- if (!_.isEmpty(buildDependencies)) {
65
- const opts = { env: process.env, stdio: 'inherit' };
66
-
67
- console.log('Building Knex.js');
68
-
69
- // Map all key (dependency) value (semver) pairs to
70
- // "dependency@semver dependency@semver ..." string that can be used
71
- // for `npm install` command
72
- const installArgs = _(buildDependencies)
73
- .pickBy(function (semver, dep) {
74
- // Check if the dependency is already installed
75
- try {
76
- require(dep);
77
- return false;
78
- } catch (err) {
79
- return true;
80
- }
81
- })
82
- .map(function (semver, dep) {
83
- // Format installable dependencies
84
- return dep + '@' + semver;
85
- })
86
- .value()
87
- .join(' ');
88
- const needsDepInstallation = !_.isEmpty(installArgs);
89
- const dependenciesInstalledQ = needsDepInstallation
90
- ? exec('npm install ' + installArgs, opts)
91
- : Promise.resolve();
92
- dependenciesInstalledQ
93
- .then(function () {
94
- console.log('✓');
95
- // Don't need the flag anymore as `postinstall` was already run.
96
- // Change it back so the environment is minimally changed for the
97
- // remaining commands.
98
- process.env.POSTINSTALL_BUILD_CWD = POSTINSTALL_BUILD_CWD;
99
- console.log('Building compiled files (' + BUILD_COMMAND + ')');
100
- return exec(BUILD_COMMAND, opts);
101
- })
102
- .catch(function (err) {
103
- console.error(err);
104
- process.exit(1);
105
- })
106
- .then(function () {
107
- if (process.env.NODE_ENV === 'production') {
108
- console.log('✓');
109
- console.log('Pruning dev dependencies for production build');
110
- return exec('npm prune --production', opts);
111
- } else {
112
- console.log('Skipping npm prune');
113
- }
114
- })
115
- .then(function () {
116
- console.log('✓');
117
- })
118
- .catch(function (err) {
119
- console.error(err);
120
- process.exit(1);
121
- });
122
- }
123
- }
124
- });
125
- }
package/scripts/clean.js DELETED
@@ -1,31 +0,0 @@
1
- #!/usr/bin/env node
2
-
3
- const fs = require('fs');
4
- const path = require('path');
5
- const { execSync } = require('child_process');
6
-
7
- function main() {
8
- const repoDir = path.dirname(__dirname);
9
- const gitDir = path.join(repoDir, '.git');
10
- const gitDirExists = doesDirectoryExist(gitDir);
11
- if (!gitDirExists) {
12
- console.log("No .git directory detected so can not clean 'lib/'. Exiting.");
13
- process.exit(0);
14
- }
15
- console.log(
16
- "Cleaning 'lib/' of outputted files from Typescript compilation ..."
17
- );
18
- const cmd = 'git clean -f -X lib/';
19
- const output = execSync(cmd, { cwd: repoDir });
20
- console.log(output.toString('utf8'));
21
- console.log('Done');
22
- }
23
-
24
- function doesDirectoryExist(p) {
25
- if (fs.existsSync(p)) {
26
- return fs.lstatSync(p).isDirectory();
27
- }
28
- return false;
29
- }
30
-
31
- main();