knex 3.2.6 → 3.2.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +16 -0
- package/knex.mjs +0 -1
- package/lib/client.js +11 -8
- package/lib/dialects/mysql/schema/mysql-tablecompiler.js +30 -14
- package/lib/dialects/sqlite3/execution/sqlite-transaction.js +1 -1
- package/lib/dialects/sqlite3/schema/ddl.js +5 -1
- package/lib/execution/internal/query-executioner.js +1 -1
- package/lib/execution/transaction.js +1 -0
- package/lib/query/querycompiler.js +1 -1
- package/package.json +1 -26
- package/types/index.d.ts +5 -3
- package/scripts/act-testing/act.sh +0 -19
- package/scripts/act-testing/merged-no-label.json +0 -11
- package/scripts/act-testing/merged-patch-labeled.json +0 -12
- package/scripts/act-testing/merged-skip-labeled.json +0 -12
- package/scripts/act-testing/not-merged-patch-labeled.json +0 -12
- package/scripts/build-for-release.sh +0 -122
- package/scripts/build.js +0 -125
- package/scripts/clean.js +0 -31
- package/scripts/docker-compose.yml +0 -150
- package/scripts/format-changelog.js +0 -55
- package/scripts/next-release-howto.md +0 -24
- package/scripts/oracledb-install-driver-libs.sh +0 -82
- package/scripts/release.sh +0 -36
- package/scripts/runkit-example.js +0 -35
- package/scripts/stress-test/README.txt +0 -18
- package/scripts/stress-test/docker-compose.yml +0 -55
- package/scripts/stress-test/knex-stress-test.js +0 -212
- package/scripts/stress-test/mysql2-random-hanging-every-now-and-then.js +0 -149
- package/scripts/stress-test/mysql2-sudden-exit-without-error.js +0 -101
- package/scripts/stress-test/reconnect-test-mysql-based-drivers.js +0 -188
- package/types/index.d.mts +0 -14
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,21 @@
|
|
|
1
1
|
# Master (Unreleased)
|
|
2
2
|
|
|
3
|
+
### Bug fixes
|
|
4
|
+
|
|
5
|
+
- Reverts the breaking changes added in [#6227](https://github.com/knex/knex/issues/6227). This means that the ESM import of Knex is reverted to `import { knex } from 'knex/knex.mjs` [#6422](https://github.com/knex/knex/issues/6422)
|
|
6
|
+
- fix(types): allow a `QueryBuilder` type as a value in an `update` [#6419](https://github.com/knex/knex/issues/6419)
|
|
7
|
+
|
|
8
|
+
# 3.2.7 - 27 March, 2026
|
|
9
|
+
|
|
10
|
+
### Bug fixes
|
|
11
|
+
|
|
12
|
+
- fix sqlite DDL operations failing inside transactions [#6408](https://github.com/knex/knex/issues/6408)
|
|
13
|
+
- fix: handle lowercase INFORMATION_SCHEMA keys in MySQL renameColumn [#6407](https://github.com/knex/knex/issues/6407)
|
|
14
|
+
- fix: clone config in client constructor [#5633](https://github.com/knex/knex/issues/5633)
|
|
15
|
+
- fix: remove \_\_knexTxId from transaction connection on release [#5288](https://github.com/knex/knex/issues/5288)
|
|
16
|
+
- fix: correct binding order in delete with subquery join [#6412](https://github.com/knex/knex/issues/6412)
|
|
17
|
+
- chore: omit ./scripts from published package [#6356](https://github.com/knex/knex/issues/6356)
|
|
18
|
+
|
|
3
19
|
# 3.2.6 - 24 March, 2026
|
|
4
20
|
|
|
5
21
|
### Bug fixes
|
package/knex.mjs
CHANGED
package/lib/client.js
CHANGED
|
@@ -41,11 +41,14 @@ const debug = require('debug')('knex:client');
|
|
|
41
41
|
class Client extends EventEmitter {
|
|
42
42
|
constructor(config = {}) {
|
|
43
43
|
super();
|
|
44
|
-
this.config = config;
|
|
45
|
-
|
|
44
|
+
this.config = { ...config };
|
|
45
|
+
if (config.connection && typeof config.connection === 'object') {
|
|
46
|
+
this.config.connection = { ...config.connection };
|
|
47
|
+
}
|
|
48
|
+
this.logger = new Logger(this.config);
|
|
46
49
|
|
|
47
|
-
if (this.config.connection &&
|
|
48
|
-
setHiddenProperty(this.config.connection);
|
|
50
|
+
if (this.config.connection && config.connection.password) {
|
|
51
|
+
setHiddenProperty(this.config.connection, config.connection);
|
|
49
52
|
}
|
|
50
53
|
|
|
51
54
|
//Client is a required field, so throw error if it's not supplied.
|
|
@@ -70,19 +73,19 @@ class Client extends EventEmitter {
|
|
|
70
73
|
}
|
|
71
74
|
|
|
72
75
|
if (config.connection && config.connection instanceof Function) {
|
|
73
|
-
this.connectionConfigProvider = config.connection;
|
|
76
|
+
this.connectionConfigProvider = this.config.connection;
|
|
74
77
|
this.connectionConfigExpirationChecker = () => true; // causes the provider to be called on first use
|
|
75
78
|
} else {
|
|
76
|
-
this.connectionSettings = cloneDeep(config.connection || {});
|
|
79
|
+
this.connectionSettings = cloneDeep(this.config.connection || {});
|
|
77
80
|
if (config.connection && config.connection.password) {
|
|
78
|
-
setHiddenProperty(this.connectionSettings, config.connection);
|
|
81
|
+
setHiddenProperty(this.connectionSettings, this.config.connection);
|
|
79
82
|
}
|
|
80
83
|
this.connectionConfigExpirationChecker = null;
|
|
81
84
|
}
|
|
82
85
|
if (this.driverName && config.connection) {
|
|
83
86
|
this.initializeDriver();
|
|
84
87
|
if (!config.pool || (config.pool && config.pool.max !== 0)) {
|
|
85
|
-
this.initializePool(config);
|
|
88
|
+
this.initializePool(this.config);
|
|
86
89
|
}
|
|
87
90
|
}
|
|
88
91
|
this.valueForUndefined = this.raw('DEFAULT');
|
|
@@ -125,11 +125,19 @@ class TableCompiler_MySQL extends TableCompiler {
|
|
|
125
125
|
return compiler.createFKRefs(
|
|
126
126
|
runner,
|
|
127
127
|
refs.map(function (ref) {
|
|
128
|
-
|
|
129
|
-
ref.REFERENCED_COLUMN_NAME
|
|
128
|
+
const refColKey =
|
|
129
|
+
ref.REFERENCED_COLUMN_NAME !== undefined
|
|
130
|
+
? 'REFERENCED_COLUMN_NAME'
|
|
131
|
+
: 'referenced_column_name';
|
|
132
|
+
const colKey =
|
|
133
|
+
ref.COLUMN_NAME !== undefined
|
|
134
|
+
? 'COLUMN_NAME'
|
|
135
|
+
: 'column_name';
|
|
136
|
+
if (ref[refColKey] === from) {
|
|
137
|
+
ref[refColKey] = to;
|
|
130
138
|
}
|
|
131
|
-
if (ref
|
|
132
|
-
ref
|
|
139
|
+
if (ref[colKey] === from) {
|
|
140
|
+
ref[colKey] = to;
|
|
133
141
|
}
|
|
134
142
|
return ref;
|
|
135
143
|
})
|
|
@@ -187,7 +195,7 @@ class TableCompiler_MySQL extends TableCompiler {
|
|
|
187
195
|
' RC.UPDATE_RULE, RC.DELETE_RULE ' +
|
|
188
196
|
'FROM INFORMATION_SCHEMA.KEY_COLUMN_USAGE AS KCU ' +
|
|
189
197
|
'JOIN INFORMATION_SCHEMA.REFERENTIAL_CONSTRAINTS AS RC ' +
|
|
190
|
-
' USING(CONSTRAINT_NAME)' +
|
|
198
|
+
' USING(CONSTRAINT_NAME) ' +
|
|
191
199
|
'WHERE KCU.REFERENCED_TABLE_NAME = ' +
|
|
192
200
|
this.client.parameter(
|
|
193
201
|
this.tableNameRaw,
|
|
@@ -220,8 +228,10 @@ class TableCompiler_MySQL extends TableCompiler {
|
|
|
220
228
|
|
|
221
229
|
return Promise.all(
|
|
222
230
|
refs.map(function (ref) {
|
|
223
|
-
const constraintName = formatter.wrap(
|
|
224
|
-
|
|
231
|
+
const constraintName = formatter.wrap(
|
|
232
|
+
ref.CONSTRAINT_NAME || ref.constraint_name
|
|
233
|
+
);
|
|
234
|
+
const tableName = formatter.wrap(ref.TABLE_NAME || ref.table_name);
|
|
225
235
|
return runner.query({
|
|
226
236
|
sql: `alter table ${tableName} drop foreign key ${constraintName}`,
|
|
227
237
|
});
|
|
@@ -234,13 +244,19 @@ class TableCompiler_MySQL extends TableCompiler {
|
|
|
234
244
|
|
|
235
245
|
return Promise.all(
|
|
236
246
|
refs.map(function (ref) {
|
|
237
|
-
const tableName = formatter.wrap(ref.TABLE_NAME);
|
|
238
|
-
const keyName = formatter.wrap(
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
const
|
|
242
|
-
const
|
|
243
|
-
|
|
247
|
+
const tableName = formatter.wrap(ref.TABLE_NAME || ref.table_name);
|
|
248
|
+
const keyName = formatter.wrap(
|
|
249
|
+
ref.CONSTRAINT_NAME || ref.constraint_name
|
|
250
|
+
);
|
|
251
|
+
const column = formatter.columnize(ref.COLUMN_NAME || ref.column_name);
|
|
252
|
+
const references = formatter.columnize(
|
|
253
|
+
ref.REFERENCED_COLUMN_NAME || ref.referenced_column_name
|
|
254
|
+
);
|
|
255
|
+
const inTable = formatter.wrap(
|
|
256
|
+
ref.REFERENCED_TABLE_NAME || ref.referenced_table_name
|
|
257
|
+
);
|
|
258
|
+
const onUpdate = ` ON UPDATE ${ref.UPDATE_RULE || ref.update_rule}`;
|
|
259
|
+
const onDelete = ` ON DELETE ${ref.DELETE_RULE || ref.delete_rule}`;
|
|
244
260
|
|
|
245
261
|
return runner.query({
|
|
246
262
|
sql:
|
|
@@ -88,7 +88,7 @@ class Transaction_Sqlite extends Transaction {
|
|
|
88
88
|
if (
|
|
89
89
|
strictForeignKeyPragma &&
|
|
90
90
|
hasOuterTransaction &&
|
|
91
|
-
restoreForeignCheck
|
|
91
|
+
restoreForeignCheck != null
|
|
92
92
|
) {
|
|
93
93
|
throw new Error(
|
|
94
94
|
`Refusing to create transaction: unable to change \`foreign_keys\` pragma inside a nested transaction`
|
|
@@ -334,6 +334,10 @@ class SQLite3_DDL {
|
|
|
334
334
|
}
|
|
335
335
|
|
|
336
336
|
async alter(newSql, createIndices, columns) {
|
|
337
|
+
// When already inside a transaction, we cannot change the foreign_keys
|
|
338
|
+
// pragma (SQLite silently ignores pragma changes within transactions).
|
|
339
|
+
// Use `null` to leave it as-is and avoid the nested-transaction guard.
|
|
340
|
+
const enforceForeignCheck = this.client.transacting ? null : false;
|
|
337
341
|
await this.client.transaction(
|
|
338
342
|
async (trx) => {
|
|
339
343
|
await trx.raw(newSql);
|
|
@@ -345,7 +349,7 @@ class SQLite3_DDL {
|
|
|
345
349
|
await trx.raw(createIndex);
|
|
346
350
|
}
|
|
347
351
|
},
|
|
348
|
-
{ connection: this.connection, enforceForeignCheck
|
|
352
|
+
{ connection: this.connection, enforceForeignCheck }
|
|
349
353
|
);
|
|
350
354
|
}
|
|
351
355
|
|
|
@@ -47,7 +47,7 @@ function executeQuery(connection, queryObject, client) {
|
|
|
47
47
|
'query-error',
|
|
48
48
|
err,
|
|
49
49
|
Object.assign(
|
|
50
|
-
{ __knexUid: connection.__knexUid, __knexTxId: connection.
|
|
50
|
+
{ __knexUid: connection.__knexUid, __knexTxId: connection.__knexTxId },
|
|
51
51
|
queryObject
|
|
52
52
|
)
|
|
53
53
|
);
|
|
@@ -271,6 +271,7 @@ class Transaction extends EventEmitter {
|
|
|
271
271
|
} finally {
|
|
272
272
|
if (!configConnection) {
|
|
273
273
|
debug('%s: releasing connection', this.txid);
|
|
274
|
+
delete connection.__knexTxId;
|
|
274
275
|
this.client.releaseConnection(connection);
|
|
275
276
|
} else {
|
|
276
277
|
debug('%s: not releasing external connection', this.txid);
|
|
@@ -865,8 +865,8 @@ class QueryCompiler {
|
|
|
865
865
|
// Make sure tableName is processed by the formatter first.
|
|
866
866
|
const { tableName } = this;
|
|
867
867
|
const withSQL = this.with();
|
|
868
|
-
const wheres = this.where();
|
|
869
868
|
const joins = this.join();
|
|
869
|
+
const wheres = this.where();
|
|
870
870
|
// When using joins, delete the "from" table values as a default
|
|
871
871
|
const deleteSelector = joins ? tableName + ' ' : '';
|
|
872
872
|
return (
|
package/package.json
CHANGED
|
@@ -1,31 +1,9 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "knex",
|
|
3
|
-
"version": "3.2.
|
|
3
|
+
"version": "3.2.8",
|
|
4
4
|
"description": "A batteries-included SQL query & schema builder for PostgresSQL, MySQL, CockroachDB, MSSQL and SQLite3",
|
|
5
5
|
"main": "knex.js",
|
|
6
6
|
"types": "types/index.d.ts",
|
|
7
|
-
"exports": {
|
|
8
|
-
".": {
|
|
9
|
-
"import": {
|
|
10
|
-
"types": "./types/index.d.mts",
|
|
11
|
-
"default": "./knex.mjs"
|
|
12
|
-
},
|
|
13
|
-
"require": {
|
|
14
|
-
"types": "./types/index.d.ts",
|
|
15
|
-
"default": "./knex.js"
|
|
16
|
-
}
|
|
17
|
-
},
|
|
18
|
-
"./bin/*.js": "./bin/*.js",
|
|
19
|
-
"./bin/*": "./bin/*",
|
|
20
|
-
"./types/*.d.ts": "./types/*.d.ts",
|
|
21
|
-
"./types/*": "./types/*.d.ts",
|
|
22
|
-
"./lib/*.js": "./lib/*.js",
|
|
23
|
-
"./lib/*": "./lib/*.js",
|
|
24
|
-
"./knex": "./knex.js",
|
|
25
|
-
"./knex.js": "./knex.js",
|
|
26
|
-
"./knex.mjs": "./knex.mjs",
|
|
27
|
-
"./package.json": "./package.json"
|
|
28
|
-
},
|
|
29
7
|
"engines": {
|
|
30
8
|
"node": ">=16"
|
|
31
9
|
},
|
|
@@ -37,7 +15,6 @@
|
|
|
37
15
|
"format:check": "prettier --list-different .",
|
|
38
16
|
"debug:test": "mocha --inspect-brk --exit -t 0 test/all-tests-suite.js",
|
|
39
17
|
"debug:tape": "node --inspect-brk test/tape/index.js",
|
|
40
|
-
"coveralls": "nyc report --reporter=lcov",
|
|
41
18
|
"lint": "eslint --cache .",
|
|
42
19
|
"lint:fix": "eslint --cache --fix .",
|
|
43
20
|
"prelint:types": "cd test-tstyche && npm i",
|
|
@@ -163,7 +140,6 @@
|
|
|
163
140
|
"pg": "^8.20.0",
|
|
164
141
|
"pg-query-stream": "^4.14.0",
|
|
165
142
|
"prettier": "2.8.7",
|
|
166
|
-
"resolve.exports": "^2.0.3",
|
|
167
143
|
"rimraf": "^5.0.5",
|
|
168
144
|
"semver": "^7.7.4",
|
|
169
145
|
"sinon": "^15.0.1",
|
|
@@ -259,7 +235,6 @@
|
|
|
259
235
|
"!lib/**/*.d.ts",
|
|
260
236
|
"!lib/**/*.js.map",
|
|
261
237
|
"!lib/.gitignore",
|
|
262
|
-
"scripts/*",
|
|
263
238
|
"types/index.d.ts",
|
|
264
239
|
"types/index.d.mts",
|
|
265
240
|
"types/result.d.ts",
|
package/types/index.d.ts
CHANGED
|
@@ -504,6 +504,8 @@ declare namespace Knex {
|
|
|
504
504
|
| Record<string, unknown>
|
|
505
505
|
| Knex.Raw;
|
|
506
506
|
|
|
507
|
+
type ValueOrBuilder = Value | Knex.QueryBuilder;
|
|
508
|
+
|
|
507
509
|
interface ValueDict extends Dict<Value | Knex.QueryBuilder> {}
|
|
508
510
|
interface AliasDict extends Dict<string> {}
|
|
509
511
|
|
|
@@ -1018,7 +1020,7 @@ declare namespace Knex {
|
|
|
1018
1020
|
>[]
|
|
1019
1021
|
>(
|
|
1020
1022
|
columnName: K1,
|
|
1021
|
-
value: DbColumn<ResolveTableType<TRecord, 'update'>[K1]
|
|
1023
|
+
value: DbColumn<ResolveTableType<TRecord, 'update'>[K1]> | QueryBuilder,
|
|
1022
1024
|
returning: readonly K2[],
|
|
1023
1025
|
options?: DMLOptions
|
|
1024
1026
|
): QueryBuilder<TRecord, TResult2>;
|
|
@@ -1028,7 +1030,7 @@ declare namespace Knex {
|
|
|
1028
1030
|
): QueryBuilder<TRecord, number>;
|
|
1029
1031
|
update<TResult2 = SafePartial<TRecord>[]>(
|
|
1030
1032
|
columnName: string,
|
|
1031
|
-
value:
|
|
1033
|
+
value: ValueOrBuilder,
|
|
1032
1034
|
returning: string | readonly string[],
|
|
1033
1035
|
options?: DMLOptions
|
|
1034
1036
|
): QueryBuilder<TRecord, TResult2>;
|
|
@@ -1101,7 +1103,7 @@ declare namespace Knex {
|
|
|
1101
1103
|
|
|
1102
1104
|
update<TResult2 = number>(
|
|
1103
1105
|
columnName: string,
|
|
1104
|
-
value:
|
|
1106
|
+
value: ValueOrBuilder
|
|
1105
1107
|
): QueryBuilder<TRecord, TResult2>;
|
|
1106
1108
|
|
|
1107
1109
|
returning(
|
|
@@ -1,19 +0,0 @@
|
|
|
1
|
-
#!/bin/bash -e
|
|
2
|
-
|
|
3
|
-
mkdir -p /tmp/artifacts
|
|
4
|
-
|
|
5
|
-
HERE="$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )"
|
|
6
|
-
|
|
7
|
-
evtfile=""
|
|
8
|
-
if [ -f "$1" ]; then
|
|
9
|
-
evtfile="$1"
|
|
10
|
-
elif [ -f "$HERE/$1.json" ]; then
|
|
11
|
-
evtfile="$HERE/$1.json"
|
|
12
|
-
else
|
|
13
|
-
echo "Usage: $0 <event jsonfile>"
|
|
14
|
-
exit 1
|
|
15
|
-
fi
|
|
16
|
-
|
|
17
|
-
shift
|
|
18
|
-
|
|
19
|
-
act --artifact-server-path /tmp/artifacts -e "$evtfile" -W "$HERE/../../.github/workflows/publish.yml" "$@"
|
|
@@ -1,122 +0,0 @@
|
|
|
1
|
-
#!/bin/bash -e
|
|
2
|
-
|
|
3
|
-
# context: currently, no package lockfile is utilized in this repository.
|
|
4
|
-
# this is so that CI tests run on the ~latest versions of dependencies,
|
|
5
|
-
# especially the peer dependencies of database clients, so that our tests
|
|
6
|
-
# will more readily surface problems that our users will experience when
|
|
7
|
-
# they just do the normal thing.
|
|
8
|
-
|
|
9
|
-
# however, for automatic release publishing, we want to be much stricter
|
|
10
|
-
# with the dependencies that are involved in the workflow to avoid risks
|
|
11
|
-
# from e.g. supply chain attacks.
|
|
12
|
-
|
|
13
|
-
# we could use "npm ci" if we had a lockfile, but instead we're going to
|
|
14
|
-
# separately maintain pinned versions of the build dependencies and run
|
|
15
|
-
# exactly those versions. care should be taken when updating/altering the
|
|
16
|
-
# versions to vet them.
|
|
17
|
-
|
|
18
|
-
# pinned versions of the dependencies required to perform a release build
|
|
19
|
-
declare -A PINNED_VERSIONS=(
|
|
20
|
-
[typescript]="5.0.4"
|
|
21
|
-
[prettier]="2.8.7"
|
|
22
|
-
[@types/node]="20.19.11"
|
|
23
|
-
[@tsconfig/node12]="1.0.11"
|
|
24
|
-
)
|
|
25
|
-
|
|
26
|
-
# validate args
|
|
27
|
-
BUMP_TYPE="$1"
|
|
28
|
-
case "$BUMP_TYPE" in
|
|
29
|
-
major|minor|patch)
|
|
30
|
-
# valid
|
|
31
|
-
;;
|
|
32
|
-
*)
|
|
33
|
-
>&2 echo "Invalid bump type. Use: $0 {major|minor|patch}"
|
|
34
|
-
exit 1
|
|
35
|
-
;;
|
|
36
|
-
esac
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
# npm 7 doesn't provide a way to install only a specific dependency, it's
|
|
40
|
-
# all-or-nothing. so we have to do some shenanigans to validate our pinned
|
|
41
|
-
# versions against package.json
|
|
42
|
-
|
|
43
|
-
# create a jq expression for a minimal package.json that includes only
|
|
44
|
-
# our build dependencies
|
|
45
|
-
tmpl='
|
|
46
|
-
{
|
|
47
|
-
name: "dep-check",
|
|
48
|
-
private: true,
|
|
49
|
-
version: "0.0.0",
|
|
50
|
-
devDependencies: {
|
|
51
|
-
'
|
|
52
|
-
for pkg in "${!PINNED_VERSIONS[@]}"; do
|
|
53
|
-
# for each pinned dependency, add something like:
|
|
54
|
-
# pkg: .devDependencies.pkg
|
|
55
|
-
tmpl+=" \"${pkg}\": .devDependencies[\"${pkg}\"],
|
|
56
|
-
"
|
|
57
|
-
done
|
|
58
|
-
tmpl+='
|
|
59
|
-
}
|
|
60
|
-
}'
|
|
61
|
-
|
|
62
|
-
PROJECT_DIR="$(pwd)"
|
|
63
|
-
TMP_DIR="$(mktemp -d)"
|
|
64
|
-
|
|
65
|
-
# render the template to a package.json file in a temp dir
|
|
66
|
-
echo
|
|
67
|
-
echo "Build dependencies:"
|
|
68
|
-
jq "$tmpl" package.json | tee "$TMP_DIR/package.json"
|
|
69
|
-
|
|
70
|
-
# install dependencies at the pinned version in the temp dir
|
|
71
|
-
# ignore pre/post script hooks
|
|
72
|
-
echo
|
|
73
|
-
echo "Installing packages"
|
|
74
|
-
>/dev/null pushd "$TMP_DIR"
|
|
75
|
-
|
|
76
|
-
failed=0
|
|
77
|
-
for pkg in "${!PINNED_VERSIONS[@]}"; do
|
|
78
|
-
fqpkg="${pkg}@${PINNED_VERSIONS[$pkg]}"
|
|
79
|
-
echo "npm install --no-save --ignore-scripts $fqpkg"
|
|
80
|
-
>/dev/null 2>/dev/null npm install --no-save --ignore-scripts "$fqpkg"
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
# ensure the pinned version conforms to package.json semver specification
|
|
84
|
-
if npm ls 2>/dev/null | grep invalid; then
|
|
85
|
-
failed=1
|
|
86
|
-
fi
|
|
87
|
-
done
|
|
88
|
-
|
|
89
|
-
# one or more pins is incorrect, do not publish
|
|
90
|
-
if [[ "$failed" = 1 ]]; then
|
|
91
|
-
echo
|
|
92
|
-
echo "One or more pinned dependencies do not satisfy package.json requirements"
|
|
93
|
-
echo "Please update '$0'"
|
|
94
|
-
exit 1
|
|
95
|
-
fi
|
|
96
|
-
|
|
97
|
-
>/dev/null popd
|
|
98
|
-
|
|
99
|
-
# move tempdir node_modules to build dir
|
|
100
|
-
mv "$TMP_DIR/node_modules" "$PROJECT_DIR/node_modules"
|
|
101
|
-
echo
|
|
102
|
-
echo "node_modules:"
|
|
103
|
-
ls -l node_modules
|
|
104
|
-
|
|
105
|
-
echo "Running build steps"
|
|
106
|
-
|
|
107
|
-
# run the package.json build script
|
|
108
|
-
# currently, this executes typescript and uses
|
|
109
|
-
# prettier to format the TS output
|
|
110
|
-
npm run build
|
|
111
|
-
|
|
112
|
-
# bump the version in package.json
|
|
113
|
-
npm version "$BUMP_TYPE" --no-git-tag-version
|
|
114
|
-
|
|
115
|
-
# we don't commit here, but we do create the tarball that
|
|
116
|
-
# will be published to npm. the dependent job takes the
|
|
117
|
-
# tarball and commits the changes + publishes the tarball
|
|
118
|
-
|
|
119
|
-
# create the tarball for handoff and record its filename
|
|
120
|
-
TARBALL="$(npm pack --silent)"
|
|
121
|
-
echo "tarball=$TARBALL" >> "$GITHUB_OUTPUT"
|
|
122
|
-
ls -la "$TARBALL"
|
package/scripts/build.js
DELETED
|
@@ -1,125 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
const fs = require('fs');
|
|
3
|
-
const path = require('path');
|
|
4
|
-
const child_process = require('child_process');
|
|
5
|
-
const _ = require('lodash');
|
|
6
|
-
|
|
7
|
-
const exec = function (cmd, args) {
|
|
8
|
-
return new Promise(function (resolve, reject) {
|
|
9
|
-
// Execute command
|
|
10
|
-
const child = child_process.exec(cmd, {
|
|
11
|
-
cwd: process.cwd(),
|
|
12
|
-
env: process.env,
|
|
13
|
-
});
|
|
14
|
-
|
|
15
|
-
// Pass stdout and stderr
|
|
16
|
-
child.stdout.on('data', function (data) {
|
|
17
|
-
process.stdout.write(data.toString());
|
|
18
|
-
});
|
|
19
|
-
child.stderr.on('data', function (data) {
|
|
20
|
-
process.stderr.write(data.toString());
|
|
21
|
-
});
|
|
22
|
-
// Handle result
|
|
23
|
-
child.on('exit', function (code) {
|
|
24
|
-
if (code) reject(code);
|
|
25
|
-
else resolve();
|
|
26
|
-
});
|
|
27
|
-
child.on('error', reject);
|
|
28
|
-
});
|
|
29
|
-
};
|
|
30
|
-
|
|
31
|
-
const CWD = process.cwd();
|
|
32
|
-
const POSTINSTALL_BUILD_CWD = process.env.POSTINSTALL_BUILD_CWD;
|
|
33
|
-
|
|
34
|
-
// If we didn't have this check, then we'd be stuck in an infinite `postinstall`
|
|
35
|
-
// loop, since we run `npm install --only=dev` below, triggering another
|
|
36
|
-
// `postinstall`. We can't use `--ignore-scripts` because that ignores scripts
|
|
37
|
-
// on all the modules that get installed, too, which would break stuff. So
|
|
38
|
-
// instead, we set an environment variable, `POSTINSTALL_BUILD_CWD`, that keeps
|
|
39
|
-
// track of what we're installing. It's more than just a yes/no flag because
|
|
40
|
-
// the dev dependencies we're installing might use `postinstall-build` too, and
|
|
41
|
-
// we don't want the flag to prevent them from running.
|
|
42
|
-
if (POSTINSTALL_BUILD_CWD !== CWD) {
|
|
43
|
-
const BUILD_ARTIFACT = process.argv[2];
|
|
44
|
-
const BUILD_COMMAND = process.argv[3];
|
|
45
|
-
|
|
46
|
-
fs.stat(BUILD_ARTIFACT, function (err, stats) {
|
|
47
|
-
if (err || !(stats.isFile() || stats.isDirectory())) {
|
|
48
|
-
// This script will run again after we run `npm install` below. Set an
|
|
49
|
-
// environment variable to tell it to skip the check. Really we just want
|
|
50
|
-
// the execSync's `env` to be modified, but it's easier just modify and
|
|
51
|
-
// pass along the entire `process.env`.
|
|
52
|
-
process.env.POSTINSTALL_BUILD_CWD = CWD;
|
|
53
|
-
// We already have prod dependencies, that's what triggered `postinstall`
|
|
54
|
-
// in the first place. So only install dev.
|
|
55
|
-
|
|
56
|
-
// Fetch package.json
|
|
57
|
-
const pkgJson = require(path.join(CWD, 'package.json'));
|
|
58
|
-
const devDeps = pkgJson.devDependencies;
|
|
59
|
-
// Values listed under `buildDependencies` contain the dependency names
|
|
60
|
-
// that are required for `lib` building.
|
|
61
|
-
const buildDependencies = _.pick(devDeps, pkgJson.buildDependencies);
|
|
62
|
-
|
|
63
|
-
// Proceed only if there is something to install
|
|
64
|
-
if (!_.isEmpty(buildDependencies)) {
|
|
65
|
-
const opts = { env: process.env, stdio: 'inherit' };
|
|
66
|
-
|
|
67
|
-
console.log('Building Knex.js');
|
|
68
|
-
|
|
69
|
-
// Map all key (dependency) value (semver) pairs to
|
|
70
|
-
// "dependency@semver dependency@semver ..." string that can be used
|
|
71
|
-
// for `npm install` command
|
|
72
|
-
const installArgs = _(buildDependencies)
|
|
73
|
-
.pickBy(function (semver, dep) {
|
|
74
|
-
// Check if the dependency is already installed
|
|
75
|
-
try {
|
|
76
|
-
require(dep);
|
|
77
|
-
return false;
|
|
78
|
-
} catch (err) {
|
|
79
|
-
return true;
|
|
80
|
-
}
|
|
81
|
-
})
|
|
82
|
-
.map(function (semver, dep) {
|
|
83
|
-
// Format installable dependencies
|
|
84
|
-
return dep + '@' + semver;
|
|
85
|
-
})
|
|
86
|
-
.value()
|
|
87
|
-
.join(' ');
|
|
88
|
-
const needsDepInstallation = !_.isEmpty(installArgs);
|
|
89
|
-
const dependenciesInstalledQ = needsDepInstallation
|
|
90
|
-
? exec('npm install ' + installArgs, opts)
|
|
91
|
-
: Promise.resolve();
|
|
92
|
-
dependenciesInstalledQ
|
|
93
|
-
.then(function () {
|
|
94
|
-
console.log('✓');
|
|
95
|
-
// Don't need the flag anymore as `postinstall` was already run.
|
|
96
|
-
// Change it back so the environment is minimally changed for the
|
|
97
|
-
// remaining commands.
|
|
98
|
-
process.env.POSTINSTALL_BUILD_CWD = POSTINSTALL_BUILD_CWD;
|
|
99
|
-
console.log('Building compiled files (' + BUILD_COMMAND + ')');
|
|
100
|
-
return exec(BUILD_COMMAND, opts);
|
|
101
|
-
})
|
|
102
|
-
.catch(function (err) {
|
|
103
|
-
console.error(err);
|
|
104
|
-
process.exit(1);
|
|
105
|
-
})
|
|
106
|
-
.then(function () {
|
|
107
|
-
if (process.env.NODE_ENV === 'production') {
|
|
108
|
-
console.log('✓');
|
|
109
|
-
console.log('Pruning dev dependencies for production build');
|
|
110
|
-
return exec('npm prune --production', opts);
|
|
111
|
-
} else {
|
|
112
|
-
console.log('Skipping npm prune');
|
|
113
|
-
}
|
|
114
|
-
})
|
|
115
|
-
.then(function () {
|
|
116
|
-
console.log('✓');
|
|
117
|
-
})
|
|
118
|
-
.catch(function (err) {
|
|
119
|
-
console.error(err);
|
|
120
|
-
process.exit(1);
|
|
121
|
-
});
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
});
|
|
125
|
-
}
|
package/scripts/clean.js
DELETED
|
@@ -1,31 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
|
|
3
|
-
const fs = require('fs');
|
|
4
|
-
const path = require('path');
|
|
5
|
-
const { execSync } = require('child_process');
|
|
6
|
-
|
|
7
|
-
function main() {
|
|
8
|
-
const repoDir = path.dirname(__dirname);
|
|
9
|
-
const gitDir = path.join(repoDir, '.git');
|
|
10
|
-
const gitDirExists = doesDirectoryExist(gitDir);
|
|
11
|
-
if (!gitDirExists) {
|
|
12
|
-
console.log("No .git directory detected so can not clean 'lib/'. Exiting.");
|
|
13
|
-
process.exit(0);
|
|
14
|
-
}
|
|
15
|
-
console.log(
|
|
16
|
-
"Cleaning 'lib/' of outputted files from Typescript compilation ..."
|
|
17
|
-
);
|
|
18
|
-
const cmd = 'git clean -f -X lib/';
|
|
19
|
-
const output = execSync(cmd, { cwd: repoDir });
|
|
20
|
-
console.log(output.toString('utf8'));
|
|
21
|
-
console.log('Done');
|
|
22
|
-
}
|
|
23
|
-
|
|
24
|
-
function doesDirectoryExist(p) {
|
|
25
|
-
if (fs.existsSync(p)) {
|
|
26
|
-
return fs.lstatSync(p).isDirectory();
|
|
27
|
-
}
|
|
28
|
-
return false;
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
main();
|