@strapi/database 4.3.4 → 4.3.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/jest.config.js +1 -1
- package/lib/__tests__/lifecycles.test.js +1 -1
- package/lib/connection.js +4 -4
- package/lib/dialects/dialect.js +12 -2
- package/lib/dialects/index.js +2 -2
- package/lib/dialects/mysql/index.js +2 -2
- package/lib/dialects/mysql/schema-inspector.js +12 -16
- package/lib/dialects/postgresql/index.js +2 -2
- package/lib/dialects/postgresql/schema-inspector.js +30 -25
- package/lib/dialects/sqlite/index.js +9 -1
- package/lib/dialects/sqlite/schema-inspector.js +5 -5
- package/lib/entity-manager.js +46 -72
- package/lib/entity-repository.js +1 -1
- package/lib/errors/database.js +12 -0
- package/lib/errors/index.js +15 -0
- package/lib/errors/invalid-date.js +14 -0
- package/lib/errors/invalid-datetime.js +14 -0
- package/lib/errors/invalid-time.js +14 -0
- package/lib/errors/not-null.js +15 -0
- package/lib/fields/biginteger.js +17 -0
- package/lib/fields/boolean.js +39 -0
- package/lib/fields/date.js +16 -0
- package/lib/fields/datetime.js +19 -0
- package/lib/fields/field.js +17 -0
- package/lib/{fields.d.ts → fields/index.d.ts} +0 -0
- package/lib/fields/index.js +49 -0
- package/lib/fields/json.js +16 -0
- package/lib/fields/number.js +23 -0
- package/lib/fields/shared/parsers.js +69 -0
- package/lib/fields/string.js +17 -0
- package/lib/fields/time.js +17 -0
- package/lib/fields/timestamp.js +19 -0
- package/lib/index.js +1 -1
- package/lib/lifecycles/index.js +2 -2
- package/lib/lifecycles/subscribers/models-lifecycles.js +1 -1
- package/lib/lifecycles/subscribers/timestamps.js +2 -2
- package/lib/metadata/index.js +2 -2
- package/lib/metadata/relations.js +18 -15
- package/lib/migrations/index.js +5 -5
- package/lib/migrations/storage.js +4 -11
- package/lib/query/helpers/join.js +5 -5
- package/lib/query/helpers/order-by.js +1 -1
- package/lib/query/helpers/populate.js +43 -51
- package/lib/query/helpers/search.js +5 -5
- package/lib/query/helpers/transform.js +2 -2
- package/lib/query/helpers/where.js +17 -17
- package/lib/query/query-builder.js +11 -14
- package/lib/schema/builder.js +28 -17
- package/lib/schema/diff.js +15 -15
- package/lib/schema/index.js +1 -2
- package/lib/schema/schema.js +4 -4
- package/lib/schema/storage.js +3 -6
- package/lib/types/index.js +6 -6
- package/lib/utils/content-types.js +3 -3
- package/package.json +3 -3
- package/lib/errors.js +0 -56
- package/lib/fields.js +0 -231
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
class DatabaseError extends Error {
|
|
4
|
+
constructor(message, details = {}) {
|
|
5
|
+
super();
|
|
6
|
+
this.name = 'DatabaseError';
|
|
7
|
+
this.message = message || 'A database error occured';
|
|
8
|
+
this.details = details;
|
|
9
|
+
}
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
module.exports = DatabaseError;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const DatabaseError = require('./database');
|
|
4
|
+
const NotNullError = require('./not-null');
|
|
5
|
+
const InvalidTimeError = require('./invalid-time');
|
|
6
|
+
const InvalidDateError = require('./invalid-date');
|
|
7
|
+
const InvalidDateTimeError = require('./invalid-datetime');
|
|
8
|
+
|
|
9
|
+
module.exports = {
|
|
10
|
+
DatabaseError,
|
|
11
|
+
NotNullError,
|
|
12
|
+
InvalidTimeError,
|
|
13
|
+
InvalidDateError,
|
|
14
|
+
InvalidDateTimeError,
|
|
15
|
+
};
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const DatabaseError = require('./database');
|
|
4
|
+
|
|
5
|
+
class InvalidDateError extends DatabaseError {
|
|
6
|
+
constructor(message) {
|
|
7
|
+
super();
|
|
8
|
+
this.name = 'InvalidTimeFormat';
|
|
9
|
+
this.message = message || 'Invalid date format, expected YYYY-MM-DD';
|
|
10
|
+
this.details = {};
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
module.exports = InvalidDateError;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const DatabaseError = require('./database');
|
|
4
|
+
|
|
5
|
+
class InvalidDateTimeError extends DatabaseError {
|
|
6
|
+
constructor(message) {
|
|
7
|
+
super();
|
|
8
|
+
this.name = 'InvalidTimeFormat';
|
|
9
|
+
this.message = message || 'Invalid datetime format, expected a timestamp or an ISO date';
|
|
10
|
+
this.details = {};
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
module.exports = InvalidDateTimeError;
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const DatabaseError = require('./database');
|
|
4
|
+
|
|
5
|
+
class InvalidTimeError extends DatabaseError {
|
|
6
|
+
constructor(message) {
|
|
7
|
+
super();
|
|
8
|
+
this.name = 'InvalidTimeFormat';
|
|
9
|
+
this.message = message || 'Invalid time format, expected HH:mm:ss.SSS';
|
|
10
|
+
this.details = {};
|
|
11
|
+
}
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
module.exports = InvalidTimeError;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const DatabaseError = require('./database');
|
|
4
|
+
|
|
5
|
+
class NotNullError extends DatabaseError {
|
|
6
|
+
constructor({ column = '' } = {}) {
|
|
7
|
+
super();
|
|
8
|
+
this.name = 'NotNullError';
|
|
9
|
+
this.message = `Not null constraint violation${column ? ` on column ${column}` : ''}.`;
|
|
10
|
+
this.details = { column };
|
|
11
|
+
this.stack = '';
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
module.exports = NotNullError;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { toString } = require('lodash/fp');
|
|
4
|
+
|
|
5
|
+
const NumberField = require('./number');
|
|
6
|
+
|
|
7
|
+
class BigIntegerField extends NumberField {
|
|
8
|
+
toDB(value) {
|
|
9
|
+
return toString(value);
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
fromDB(value) {
|
|
13
|
+
return toString(value);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
module.exports = BigIntegerField;
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { toString } = require('lodash/fp');
|
|
4
|
+
|
|
5
|
+
const Field = require('./field');
|
|
6
|
+
|
|
7
|
+
class BooleanField extends Field {
|
|
8
|
+
toDB(value) {
|
|
9
|
+
if (typeof value === 'boolean') return value;
|
|
10
|
+
|
|
11
|
+
if (['true', 't', '1', 1].includes(value)) {
|
|
12
|
+
return true;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
if (['false', 'f', '0', 0].includes(value)) {
|
|
16
|
+
return false;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
return Boolean(value);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
fromDB(value) {
|
|
23
|
+
if (typeof value === 'boolean') {
|
|
24
|
+
return value;
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
const strVal = toString(value);
|
|
28
|
+
|
|
29
|
+
if (strVal === '1') {
|
|
30
|
+
return true;
|
|
31
|
+
}
|
|
32
|
+
if (strVal === '0') {
|
|
33
|
+
return false;
|
|
34
|
+
}
|
|
35
|
+
return null;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
module.exports = BooleanField;
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { parseDate } = require('./shared/parsers');
|
|
4
|
+
const Field = require('./field');
|
|
5
|
+
|
|
6
|
+
class DateField extends Field {
|
|
7
|
+
toDB(value) {
|
|
8
|
+
return parseDate(value);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
fromDB(value) {
|
|
12
|
+
return value;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
module.exports = DateField;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const dateFns = require('date-fns');
|
|
4
|
+
|
|
5
|
+
const { parseDateTimeOrTimestamp } = require('./shared/parsers');
|
|
6
|
+
const Field = require('./field');
|
|
7
|
+
|
|
8
|
+
class DatetimeField extends Field {
|
|
9
|
+
toDB(value) {
|
|
10
|
+
return parseDateTimeOrTimestamp(value);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
fromDB(value) {
|
|
14
|
+
const cast = new Date(value);
|
|
15
|
+
return dateFns.isValid(cast) ? cast.toISOString() : null;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
module.exports = DatetimeField;
|
|
File without changes
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const _ = require('lodash/fp');
|
|
4
|
+
|
|
5
|
+
const Field = require('./field');
|
|
6
|
+
const StringField = require('./string');
|
|
7
|
+
const JSONField = require('./json');
|
|
8
|
+
const BigIntegerField = require('./biginteger');
|
|
9
|
+
const NumberField = require('./number');
|
|
10
|
+
const DateField = require('./date');
|
|
11
|
+
const TimeField = require('./time');
|
|
12
|
+
const DatetimeField = require('./datetime');
|
|
13
|
+
const TimestampField = require('./timestamp');
|
|
14
|
+
const BooleanField = require('./boolean');
|
|
15
|
+
|
|
16
|
+
const typeToFieldMap = {
|
|
17
|
+
increments: Field,
|
|
18
|
+
password: StringField,
|
|
19
|
+
email: StringField,
|
|
20
|
+
string: StringField,
|
|
21
|
+
uid: StringField,
|
|
22
|
+
richtext: StringField,
|
|
23
|
+
text: StringField,
|
|
24
|
+
enumeration: StringField,
|
|
25
|
+
json: JSONField,
|
|
26
|
+
biginteger: BigIntegerField,
|
|
27
|
+
integer: NumberField,
|
|
28
|
+
float: NumberField,
|
|
29
|
+
decimal: NumberField,
|
|
30
|
+
date: DateField,
|
|
31
|
+
time: TimeField,
|
|
32
|
+
datetime: DatetimeField,
|
|
33
|
+
timestamp: TimestampField,
|
|
34
|
+
boolean: BooleanField,
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
const createField = (attribute) => {
|
|
38
|
+
const { type } = attribute;
|
|
39
|
+
|
|
40
|
+
if (_.has(type, typeToFieldMap)) {
|
|
41
|
+
return new typeToFieldMap[type]({});
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
throw new Error(`Undefined field for type ${type}`);
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
module.exports = {
|
|
48
|
+
createField,
|
|
49
|
+
};
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const Field = require('./field');
|
|
4
|
+
|
|
5
|
+
class JSONField extends Field {
|
|
6
|
+
toDB(value) {
|
|
7
|
+
return JSON.stringify(value);
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
fromDB(value) {
|
|
11
|
+
if (typeof value === 'string') return JSON.parse(value);
|
|
12
|
+
return value;
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
module.exports = JSONField;
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { toNumber } = require('lodash/fp');
|
|
4
|
+
|
|
5
|
+
const Field = require('./field');
|
|
6
|
+
|
|
7
|
+
class NumberField extends Field {
|
|
8
|
+
toDB(value) {
|
|
9
|
+
const numberValue = toNumber(value);
|
|
10
|
+
|
|
11
|
+
if (Number.isNaN(numberValue)) {
|
|
12
|
+
throw new Error(`Expected a valid Number, got ${value}`);
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
return numberValue;
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
fromDB(value) {
|
|
19
|
+
return toNumber(value);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
module.exports = NumberField;
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { isString, padCharsEnd } = require('lodash/fp');
|
|
4
|
+
const dateFns = require('date-fns');
|
|
5
|
+
|
|
6
|
+
const { InvalidDateTimeError, InvalidDateError, InvalidTimeError } = require('../../errors');
|
|
7
|
+
|
|
8
|
+
const DATE_REGEX = /^\d{4}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])$/;
|
|
9
|
+
const PARTIAL_DATE_REGEX = /^\d{4}-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01])/g;
|
|
10
|
+
const TIME_REGEX = /^(2[0-3]|[01][0-9]):([0-5][0-9]):([0-5][0-9])(.[0-9]{1,3})?$/;
|
|
11
|
+
|
|
12
|
+
const parseDateTimeOrTimestamp = (value) => {
|
|
13
|
+
if (dateFns.isDate(value)) return value;
|
|
14
|
+
try {
|
|
15
|
+
const date = dateFns.parseISO(value);
|
|
16
|
+
if (dateFns.isValid(date)) return date;
|
|
17
|
+
|
|
18
|
+
const milliUnixDate = dateFns.parse(value, 'T', new Date());
|
|
19
|
+
if (dateFns.isValid(milliUnixDate)) return milliUnixDate;
|
|
20
|
+
|
|
21
|
+
throw new InvalidDateTimeError(`Invalid format, expected a timestamp or an ISO date`);
|
|
22
|
+
} catch (error) {
|
|
23
|
+
throw new InvalidDateTimeError(`Invalid format, expected a timestamp or an ISO date`);
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
const parseDate = (value) => {
|
|
28
|
+
const found = isString(value) ? value.match(PARTIAL_DATE_REGEX) || [] : [];
|
|
29
|
+
const extractedValue = found[0];
|
|
30
|
+
|
|
31
|
+
if (extractedValue && !DATE_REGEX.test(value)) {
|
|
32
|
+
// TODO V5: throw an error when format yyyy-MM-dd is not respected
|
|
33
|
+
// throw new InvalidDateError(`Invalid format, expected yyyy-MM-dd`);
|
|
34
|
+
process.emitWarning(
|
|
35
|
+
`[deprecated] Using a date format other than YYYY-MM-DD will be removed in future versions. Date received: ${value}. Date stored: ${extractedValue}.`
|
|
36
|
+
);
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
const date = dateFns.parseISO(extractedValue);
|
|
40
|
+
if (!dateFns.isValid(date)) {
|
|
41
|
+
throw new InvalidDateError(`Invalid date`);
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
return extractedValue;
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
const parseTime = (value) => {
|
|
48
|
+
if (dateFns.isDate(value)) return dateFns.format(value, 'HH:mm:ss.SSS');
|
|
49
|
+
|
|
50
|
+
if (typeof value !== 'string') {
|
|
51
|
+
throw new InvalidTimeError(`Expected a string, got a ${typeof value}`);
|
|
52
|
+
}
|
|
53
|
+
const result = value.match(TIME_REGEX);
|
|
54
|
+
|
|
55
|
+
if (result === null) {
|
|
56
|
+
throw new InvalidTimeError('Invalid time format, expected HH:mm:ss.SSS');
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const [, hours, minutes, seconds, fraction = '.000'] = result;
|
|
60
|
+
const fractionPart = padCharsEnd('0', 3, fraction.slice(1));
|
|
61
|
+
|
|
62
|
+
return `${hours}:${minutes}:${seconds}.${fractionPart}`;
|
|
63
|
+
};
|
|
64
|
+
|
|
65
|
+
module.exports = {
|
|
66
|
+
parseDateTimeOrTimestamp,
|
|
67
|
+
parseDate,
|
|
68
|
+
parseTime,
|
|
69
|
+
};
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { toString } = require('lodash/fp');
|
|
4
|
+
|
|
5
|
+
const Field = require('./field');
|
|
6
|
+
|
|
7
|
+
class StringField extends Field {
|
|
8
|
+
toDB(value) {
|
|
9
|
+
return toString(value);
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
fromDB(value) {
|
|
13
|
+
return toString(value);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
module.exports = StringField;
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const { parseTime } = require('./shared/parsers');
|
|
4
|
+
const Field = require('./field');
|
|
5
|
+
|
|
6
|
+
class TimeField extends Field {
|
|
7
|
+
toDB(value) {
|
|
8
|
+
return parseTime(value);
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
fromDB(value) {
|
|
12
|
+
// make sure that's a string with valid format ?
|
|
13
|
+
return value;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
module.exports = TimeField;
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
const dateFns = require('date-fns');
|
|
4
|
+
|
|
5
|
+
const { parseDateTimeOrTimestamp } = require('./shared/parsers');
|
|
6
|
+
const Field = require('./field');
|
|
7
|
+
|
|
8
|
+
class TimestampField extends Field {
|
|
9
|
+
toDB(value) {
|
|
10
|
+
return parseDateTimeOrTimestamp(value);
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
fromDB(value) {
|
|
14
|
+
const cast = new Date(value);
|
|
15
|
+
return dateFns.isValid(cast) ? dateFns.format(cast, 'T') : null;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
module.exports = TimestampField;
|
package/lib/index.js
CHANGED
|
@@ -74,7 +74,7 @@ class Database {
|
|
|
74
74
|
|
|
75
75
|
// TODO: move into strapi
|
|
76
76
|
Database.transformContentTypes = transformContentTypes;
|
|
77
|
-
Database.init = async config => new Database(config);
|
|
77
|
+
Database.init = async (config) => new Database(config);
|
|
78
78
|
|
|
79
79
|
module.exports = {
|
|
80
80
|
Database,
|
package/lib/lifecycles/index.js
CHANGED
|
@@ -5,7 +5,7 @@ const assert = require('assert').strict;
|
|
|
5
5
|
const timestampsLifecyclesSubscriber = require('./subscribers/timestamps');
|
|
6
6
|
const modelLifecyclesSubscriber = require('./subscribers/models-lifecycles');
|
|
7
7
|
|
|
8
|
-
const isValidSubscriber = subscriber => {
|
|
8
|
+
const isValidSubscriber = (subscriber) => {
|
|
9
9
|
return (
|
|
10
10
|
typeof subscriber === 'function' || (typeof subscriber === 'object' && subscriber !== null)
|
|
11
11
|
);
|
|
@@ -14,7 +14,7 @@ const isValidSubscriber = subscriber => {
|
|
|
14
14
|
/**
|
|
15
15
|
* @type {import('.').createLifecyclesProvider}
|
|
16
16
|
*/
|
|
17
|
-
const createLifecyclesProvider = db => {
|
|
17
|
+
const createLifecyclesProvider = (db) => {
|
|
18
18
|
let subscribers = [timestampsLifecyclesSubscriber, modelLifecyclesSubscriber];
|
|
19
19
|
|
|
20
20
|
return {
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
* For each model try to run it's lifecycles function if any is defined
|
|
9
9
|
* @type {Subscriber}
|
|
10
10
|
*/
|
|
11
|
-
const modelsLifecyclesSubscriber = async event => {
|
|
11
|
+
const modelsLifecyclesSubscriber = async (event) => {
|
|
12
12
|
const { model } = event;
|
|
13
13
|
|
|
14
14
|
if (event.action in model.lifecycles) {
|
|
@@ -33,7 +33,7 @@ const timestampsLifecyclesSubscriber = {
|
|
|
33
33
|
|
|
34
34
|
const now = new Date();
|
|
35
35
|
if (_.isArray(data)) {
|
|
36
|
-
data.forEach(data => _.defaults(data, { createdAt: now, updatedAt: now }));
|
|
36
|
+
data.forEach((data) => _.defaults(data, { createdAt: now, updatedAt: now }));
|
|
37
37
|
}
|
|
38
38
|
},
|
|
39
39
|
|
|
@@ -57,7 +57,7 @@ const timestampsLifecyclesSubscriber = {
|
|
|
57
57
|
|
|
58
58
|
const now = new Date();
|
|
59
59
|
if (_.isArray(data)) {
|
|
60
|
-
data.forEach(data => _.assign(data, { updatedAt: now }));
|
|
60
|
+
data.forEach((data) => _.assign(data, { updatedAt: now }));
|
|
61
61
|
}
|
|
62
62
|
},
|
|
63
63
|
};
|
package/lib/metadata/index.js
CHANGED
|
@@ -84,14 +84,14 @@ const createMetadata = (models = []) => {
|
|
|
84
84
|
return metadata;
|
|
85
85
|
};
|
|
86
86
|
|
|
87
|
-
const hasComponentsOrDz = model => {
|
|
87
|
+
const hasComponentsOrDz = (model) => {
|
|
88
88
|
return Object.values(model.attributes).some(
|
|
89
89
|
({ type }) => types.isComponent(type) || types.isDynamicZone(type)
|
|
90
90
|
);
|
|
91
91
|
};
|
|
92
92
|
|
|
93
93
|
// NOTE: we might just move the compo logic outside this layer too at some point
|
|
94
|
-
const createCompoLinkModelMeta = baseModelMeta => {
|
|
94
|
+
const createCompoLinkModelMeta = (baseModelMeta) => {
|
|
95
95
|
return {
|
|
96
96
|
// TODO: make sure there can't be any conflicts with a prefix
|
|
97
97
|
// singularName: 'compo',
|
|
@@ -9,10 +9,10 @@ const _ = require('lodash/fp');
|
|
|
9
9
|
const hasInversedBy = _.has('inversedBy');
|
|
10
10
|
const hasMappedBy = _.has('mappedBy');
|
|
11
11
|
|
|
12
|
-
const isOneToAny = attribute => ['oneToOne', 'oneToMany'].includes(attribute.relation);
|
|
13
|
-
const isBidirectional = attribute => hasInversedBy(attribute) || hasMappedBy(attribute);
|
|
14
|
-
const isOwner = attribute => !isBidirectional(attribute) || hasInversedBy(attribute);
|
|
15
|
-
const shouldUseJoinTable = attribute => attribute.useJoinTable !== false;
|
|
12
|
+
const isOneToAny = (attribute) => ['oneToOne', 'oneToMany'].includes(attribute.relation);
|
|
13
|
+
const isBidirectional = (attribute) => hasInversedBy(attribute) || hasMappedBy(attribute);
|
|
14
|
+
const isOwner = (attribute) => !isBidirectional(attribute) || hasInversedBy(attribute);
|
|
15
|
+
const shouldUseJoinTable = (attribute) => attribute.useJoinTable !== false;
|
|
16
16
|
|
|
17
17
|
/**
|
|
18
18
|
* Creates a oneToOne relation metadata
|
|
@@ -75,12 +75,8 @@ const createOneToMany = (attributeName, attribute, meta, metadata) => {
|
|
|
75
75
|
attributeName,
|
|
76
76
|
meta,
|
|
77
77
|
});
|
|
78
|
-
} else {
|
|
79
|
-
|
|
80
|
-
throw new Error(
|
|
81
|
-
'one side of a oneToMany cannot be the owner side in a bidirectional relation'
|
|
82
|
-
);
|
|
83
|
-
}
|
|
78
|
+
} else if (isOwner(attribute)) {
|
|
79
|
+
throw new Error('one side of a oneToMany cannot be the owner side in a bidirectional relation');
|
|
84
80
|
}
|
|
85
81
|
};
|
|
86
82
|
|
|
@@ -169,7 +165,7 @@ const createManyToMany = (attributeName, attribute, meta, metadata) => {
|
|
|
169
165
|
* @param {ModelMetadata} meta
|
|
170
166
|
* @param {Metadata} metadata
|
|
171
167
|
*/
|
|
172
|
-
const createMorphToOne = (attributeName, attribute /*meta, metadata*/) => {
|
|
168
|
+
const createMorphToOne = (attributeName, attribute /* meta, metadata */) => {
|
|
173
169
|
const idColumnName = 'target_id';
|
|
174
170
|
const typeColumnName = 'target_type';
|
|
175
171
|
|
|
@@ -210,6 +206,9 @@ const createMorphToMany = (attributeName, attribute, meta, metadata) => {
|
|
|
210
206
|
uid: joinTableName,
|
|
211
207
|
tableName: joinTableName,
|
|
212
208
|
attributes: {
|
|
209
|
+
id: {
|
|
210
|
+
type: 'increments',
|
|
211
|
+
},
|
|
213
212
|
[joinColumnName]: {
|
|
214
213
|
type: 'integer',
|
|
215
214
|
column: {
|
|
@@ -341,9 +340,10 @@ const createRelation = (attributeName, attribute, meta, metadata) => {
|
|
|
341
340
|
return createMorphOne(attributeName, attribute, meta, metadata);
|
|
342
341
|
case 'morphMany':
|
|
343
342
|
return createMorphMany(attributeName, attribute, meta, metadata);
|
|
343
|
+
default: {
|
|
344
|
+
throw new Error(`Unknown relation ${attribute.relation}`);
|
|
345
|
+
}
|
|
344
346
|
}
|
|
345
|
-
|
|
346
|
-
throw new Error(`Unknown relation ${attribute.relation}`);
|
|
347
347
|
};
|
|
348
348
|
|
|
349
349
|
/**
|
|
@@ -354,7 +354,7 @@ const createRelation = (attributeName, attribute, meta, metadata) => {
|
|
|
354
354
|
* @param {string} param.attributeName name of the associated attribute
|
|
355
355
|
* @param {Object} param.meta model metadata
|
|
356
356
|
*/
|
|
357
|
-
const createJoinColum = (metadata, { attribute, attributeName /*meta */ }) => {
|
|
357
|
+
const createJoinColum = (metadata, { attribute, attributeName /* meta */ }) => {
|
|
358
358
|
const targetMeta = metadata.get(attribute.target);
|
|
359
359
|
|
|
360
360
|
const joinColumnName = _.snakeCase(`${attributeName}_id`);
|
|
@@ -395,7 +395,7 @@ const createJoinTable = (metadata, { attributeName, attribute, meta }) => {
|
|
|
395
395
|
|
|
396
396
|
const joinTableName = _.snakeCase(`${meta.tableName}_${attributeName}_links`);
|
|
397
397
|
|
|
398
|
-
|
|
398
|
+
const joinColumnName = _.snakeCase(`${meta.singularName}_id`);
|
|
399
399
|
let inverseJoinColumnName = _.snakeCase(`${targetMeta.singularName}_id`);
|
|
400
400
|
|
|
401
401
|
// if relation is slef referencing
|
|
@@ -407,6 +407,9 @@ const createJoinTable = (metadata, { attributeName, attribute, meta }) => {
|
|
|
407
407
|
uid: joinTableName,
|
|
408
408
|
tableName: joinTableName,
|
|
409
409
|
attributes: {
|
|
410
|
+
id: {
|
|
411
|
+
type: 'increments',
|
|
412
|
+
},
|
|
410
413
|
[joinColumnName]: {
|
|
411
414
|
type: 'integer',
|
|
412
415
|
column: {
|
package/lib/migrations/index.js
CHANGED
|
@@ -6,8 +6,8 @@ const { Umzug } = require('umzug');
|
|
|
6
6
|
|
|
7
7
|
const createStorage = require('./storage');
|
|
8
8
|
|
|
9
|
-
const wrapTransaction = db => fn => () =>
|
|
10
|
-
db.connection.transaction(trx => Promise.resolve(fn(trx)));
|
|
9
|
+
const wrapTransaction = (db) => (fn) => () =>
|
|
10
|
+
db.connection.transaction((trx) => Promise.resolve(fn(trx)));
|
|
11
11
|
|
|
12
12
|
// TODO: check multiple commands in one sql statement
|
|
13
13
|
const migrationResolver = ({ name, path, context }) => {
|
|
@@ -19,7 +19,7 @@ const migrationResolver = ({ name, path, context }) => {
|
|
|
19
19
|
|
|
20
20
|
return {
|
|
21
21
|
name,
|
|
22
|
-
up: wrapTransaction(db)(knex => knex.raw(sql)),
|
|
22
|
+
up: wrapTransaction(db)((knex) => knex.raw(sql)),
|
|
23
23
|
down() {},
|
|
24
24
|
};
|
|
25
25
|
}
|
|
@@ -33,7 +33,7 @@ const migrationResolver = ({ name, path, context }) => {
|
|
|
33
33
|
};
|
|
34
34
|
};
|
|
35
35
|
|
|
36
|
-
const createUmzugProvider = db => {
|
|
36
|
+
const createUmzugProvider = (db) => {
|
|
37
37
|
const migrationDir = path.join(strapi.dirs.app.root, 'database/migrations');
|
|
38
38
|
|
|
39
39
|
fse.ensureDirSync(migrationDir);
|
|
@@ -54,7 +54,7 @@ const createUmzugProvider = db => {
|
|
|
54
54
|
* Creates migrations provider
|
|
55
55
|
* @type {import('.').createMigrationsProvider}
|
|
56
56
|
*/
|
|
57
|
-
const createMigrationsProvider = db => {
|
|
57
|
+
const createMigrationsProvider = (db) => {
|
|
58
58
|
const migrations = createUmzugProvider(db);
|
|
59
59
|
|
|
60
60
|
return {
|
|
@@ -6,7 +6,7 @@ const createStorage = (opts = {}) => {
|
|
|
6
6
|
const hasMigrationTable = () => db.getSchemaConnection().hasTable(tableName);
|
|
7
7
|
|
|
8
8
|
const createMigrationTable = () => {
|
|
9
|
-
return db.getSchemaConnection().createTable(tableName, table => {
|
|
9
|
+
return db.getSchemaConnection().createTable(tableName, (table) => {
|
|
10
10
|
table.increments('id');
|
|
11
11
|
table.string('name');
|
|
12
12
|
table.datetime('time', { useTz: false });
|
|
@@ -25,10 +25,7 @@ const createStorage = (opts = {}) => {
|
|
|
25
25
|
},
|
|
26
26
|
|
|
27
27
|
async unlogMigration({ name }) {
|
|
28
|
-
await db
|
|
29
|
-
.getConnection(tableName)
|
|
30
|
-
.del()
|
|
31
|
-
.where({ name });
|
|
28
|
+
await db.getConnection(tableName).del().where({ name });
|
|
32
29
|
},
|
|
33
30
|
|
|
34
31
|
async executed() {
|
|
@@ -37,13 +34,9 @@ const createStorage = (opts = {}) => {
|
|
|
37
34
|
return [];
|
|
38
35
|
}
|
|
39
36
|
|
|
40
|
-
const logs = await db
|
|
41
|
-
.getConnection(tableName)
|
|
42
|
-
.select()
|
|
43
|
-
.from(tableName)
|
|
44
|
-
.orderBy('time');
|
|
37
|
+
const logs = await db.getConnection(tableName).select().from(tableName).orderBy('time');
|
|
45
38
|
|
|
46
|
-
return logs.map(log => log.name);
|
|
39
|
+
return logs.map((log) => log.name);
|
|
47
40
|
},
|
|
48
41
|
};
|
|
49
42
|
};
|