@bedrockio/model 0.18.0 → 0.18.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/settings.local.json +15 -0
- package/CHANGELOG.md +12 -0
- package/dist/cjs/cache.js +17 -14
- package/dist/cjs/delete-hooks.js +13 -6
- package/dist/cjs/export.js +19 -0
- package/dist/cjs/include.js +10 -5
- package/dist/cjs/reload.js +40 -6
- package/dist/cjs/schema.js +170 -108
- package/package.json +1 -1
- package/src/cache.js +17 -16
- package/src/delete-hooks.js +15 -9
- package/src/export.js +14 -0
- package/src/include.js +10 -5
- package/src/reload.js +52 -7
- package/src/schema.js +186 -130
- package/types/cache.d.ts +1 -0
- package/types/cache.d.ts.map +1 -1
- package/types/delete-hooks.d.ts +1 -0
- package/types/delete-hooks.d.ts.map +1 -1
- package/types/export.d.ts +2 -0
- package/types/export.d.ts.map +1 -0
- package/types/reload.d.ts.map +1 -1
- package/types/schema.d.ts +0 -1
- package/types/schema.d.ts.map +1 -1
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
{
|
|
2
|
+
"permissions": {
|
|
3
|
+
"allow": [
|
|
4
|
+
"Bash(yarn test:*)",
|
|
5
|
+
"Bash(yarn list:*)",
|
|
6
|
+
"WebSearch",
|
|
7
|
+
"WebFetch(domain:www.npmjs.com)",
|
|
8
|
+
"WebFetch(domain:gist.github.com)",
|
|
9
|
+
"WebFetch(domain:mongoosejs.com)",
|
|
10
|
+
"WebFetch(domain:github.com)"
|
|
11
|
+
],
|
|
12
|
+
"deny": [],
|
|
13
|
+
"ask": []
|
|
14
|
+
}
|
|
15
|
+
}
|
package/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,15 @@
|
|
|
1
|
+
## 0.18.2
|
|
2
|
+
|
|
3
|
+
- Further fix for `reload` not working with delete hooks.
|
|
4
|
+
- Better detection of literal `type` fields.
|
|
5
|
+
- Schema refactor.
|
|
6
|
+
|
|
7
|
+
## 0.18.1
|
|
8
|
+
|
|
9
|
+
- Added `export` for dumping documents and to support reload.
|
|
10
|
+
- Fix for `include` not working on nested virtuals.
|
|
11
|
+
- Fixes for `reload`.
|
|
12
|
+
|
|
1
13
|
## 0.18.0
|
|
2
14
|
|
|
3
15
|
- Added `reload`.
|
package/dist/cjs/cache.js
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
Object.defineProperty(exports, "__esModule", {
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
|
+
exports.addCacheFields = addCacheFields;
|
|
6
7
|
exports.applyCache = applyCache;
|
|
7
8
|
var _lodash = require("lodash");
|
|
8
9
|
var _mongoose = _interopRequireDefault(require("mongoose"));
|
|
@@ -10,32 +11,34 @@ var _utils = require("./utils");
|
|
|
10
11
|
function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e }; }
|
|
11
12
|
const definitionMap = new Map();
|
|
12
13
|
_mongoose.default.plugin(cacheSyncPlugin);
|
|
13
|
-
function
|
|
14
|
-
|
|
15
|
-
|
|
14
|
+
function addCacheFields(definition) {
|
|
15
|
+
const {
|
|
16
|
+
cache
|
|
17
|
+
} = definition;
|
|
18
|
+
if (!cache) {
|
|
16
19
|
return;
|
|
17
20
|
}
|
|
18
|
-
|
|
19
|
-
applyStaticMethods(schema, definition);
|
|
20
|
-
applyCacheHook(schema, definition);
|
|
21
|
-
}
|
|
22
|
-
function createCacheFields(schema, definition) {
|
|
23
|
-
for (let [cachedField, def] of Object.entries(definition.cache)) {
|
|
21
|
+
for (let [cachedField, def] of Object.entries(cache)) {
|
|
24
22
|
const {
|
|
25
23
|
type,
|
|
26
24
|
path,
|
|
27
25
|
...rest
|
|
28
26
|
} = def;
|
|
29
|
-
|
|
30
|
-
[cachedField]: type
|
|
31
|
-
});
|
|
32
|
-
schema.obj[cachedField] = {
|
|
33
|
-
...rest,
|
|
27
|
+
definition.attributes[cachedField] = {
|
|
34
28
|
type,
|
|
29
|
+
...rest,
|
|
35
30
|
writeAccess: 'none'
|
|
36
31
|
};
|
|
37
32
|
}
|
|
38
33
|
}
|
|
34
|
+
function applyCache(schema, definition) {
|
|
35
|
+
definitionMap.set(schema, definition);
|
|
36
|
+
if (!definition.cache) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
applyStaticMethods(schema, definition);
|
|
40
|
+
applyCacheHook(schema, definition);
|
|
41
|
+
}
|
|
39
42
|
function applyStaticMethods(schema, definition) {
|
|
40
43
|
schema.static('syncCacheFields', async function syncCacheFields() {
|
|
41
44
|
assertIncludeModule(this);
|
package/dist/cjs/delete-hooks.js
CHANGED
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
Object.defineProperty(exports, "__esModule", {
|
|
4
4
|
value: true
|
|
5
5
|
});
|
|
6
|
+
exports.addDeletedFields = addDeletedFields;
|
|
6
7
|
exports.applyDeleteHooks = applyDeleteHooks;
|
|
7
8
|
var _lodash = require("lodash");
|
|
8
9
|
var _mongoose = _interopRequireDefault(require("mongoose"));
|
|
@@ -12,6 +13,18 @@ function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e
|
|
|
12
13
|
const {
|
|
13
14
|
ObjectId: SchemaObjectId
|
|
14
15
|
} = _mongoose.default.Schema.Types;
|
|
16
|
+
function addDeletedFields(definition) {
|
|
17
|
+
let {
|
|
18
|
+
onDelete: deleteHooks
|
|
19
|
+
} = definition;
|
|
20
|
+
if (!deleteHooks) {
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
definition.attributes['deletedRefs'] = [{
|
|
24
|
+
_id: 'ObjectId',
|
|
25
|
+
ref: 'String'
|
|
26
|
+
}];
|
|
27
|
+
}
|
|
15
28
|
function applyDeleteHooks(schema, definition) {
|
|
16
29
|
let {
|
|
17
30
|
onDelete: deleteHooks
|
|
@@ -45,12 +58,6 @@ function applyDeleteHooks(schema, definition) {
|
|
|
45
58
|
await restoreReferences(this, cleanHooks);
|
|
46
59
|
await restoreFn.apply(this, arguments);
|
|
47
60
|
});
|
|
48
|
-
schema.add({
|
|
49
|
-
deletedRefs: [{
|
|
50
|
-
_id: 'ObjectId',
|
|
51
|
-
ref: 'String'
|
|
52
|
-
}]
|
|
53
|
-
});
|
|
54
61
|
}
|
|
55
62
|
|
|
56
63
|
// Clean Hook
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, "__esModule", {
|
|
4
|
+
value: true
|
|
5
|
+
});
|
|
6
|
+
exports.applyExport = applyExport;
|
|
7
|
+
var _lodash = require("lodash");
|
|
8
|
+
function applyExport(schema) {
|
|
9
|
+
schema.method('export', function () {
|
|
10
|
+
const result = {};
|
|
11
|
+
this.constructor.schema.eachPath(schemaPath => {
|
|
12
|
+
const value = this.get(schemaPath);
|
|
13
|
+
if (value !== undefined) {
|
|
14
|
+
(0, _lodash.set)(result, schemaPath, value);
|
|
15
|
+
}
|
|
16
|
+
});
|
|
17
|
+
return result;
|
|
18
|
+
});
|
|
19
|
+
}
|
package/dist/cjs/include.js
CHANGED
|
@@ -162,16 +162,21 @@ function getDocumentParams(doc, arg, options = {}) {
|
|
|
162
162
|
const params = getParams(doc.constructor.modelName, arg);
|
|
163
163
|
if (!options.force) {
|
|
164
164
|
params.populate = params.populate.filter(p => {
|
|
165
|
-
return !
|
|
165
|
+
return !isPopulated(doc, p);
|
|
166
166
|
});
|
|
167
167
|
}
|
|
168
168
|
return params;
|
|
169
169
|
}
|
|
170
|
-
function
|
|
171
|
-
if (
|
|
172
|
-
|
|
170
|
+
function isPopulated(arg, params) {
|
|
171
|
+
if (Array.isArray(arg)) {
|
|
172
|
+
return arg.every(el => {
|
|
173
|
+
return isPopulated(el, params);
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
if (arg.populated(params.path)) {
|
|
177
|
+
const sub = arg.get(params.path);
|
|
173
178
|
return params.populate.every(p => {
|
|
174
|
-
return
|
|
179
|
+
return isPopulated(sub, p);
|
|
175
180
|
});
|
|
176
181
|
} else {
|
|
177
182
|
return false;
|
package/dist/cjs/reload.js
CHANGED
|
@@ -12,19 +12,53 @@ function applyReload(schema) {
|
|
|
12
12
|
const paths = getPopulatedPaths(this);
|
|
13
13
|
const doc = await this.constructor.findById(this.id).include(paths);
|
|
14
14
|
if (!doc) {
|
|
15
|
-
throw new Error('Document
|
|
15
|
+
throw new Error('Document does not exist');
|
|
16
|
+
}
|
|
17
|
+
this.overwrite(doc.export());
|
|
18
|
+
|
|
19
|
+
// Include on the query above will not work
|
|
20
|
+
// for virtuals so handle separately here.
|
|
21
|
+
for (const path of getVirtualReferencePaths(doc)) {
|
|
22
|
+
await doc.include(path);
|
|
23
|
+
this.set(path, doc[path]);
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// All data reloaded so mark as unmodified.
|
|
27
|
+
for (const path of this.modifiedPaths()) {
|
|
28
|
+
this.unmarkModified(path);
|
|
16
29
|
}
|
|
17
|
-
this.overwrite(doc);
|
|
18
30
|
});
|
|
19
31
|
}
|
|
20
|
-
function getPopulatedPaths(doc) {
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
return
|
|
32
|
+
function getPopulatedPaths(doc, base = []) {
|
|
33
|
+
const schema = doc.constructor.schema;
|
|
34
|
+
return getReferencePaths(schema).filter(name => {
|
|
35
|
+
return doc.populated(name);
|
|
36
|
+
}).flatMap(name => {
|
|
37
|
+
const path = [...base, name];
|
|
38
|
+
const value = doc.get(name);
|
|
39
|
+
const inner = Array.isArray(value) ? value[0] : value;
|
|
40
|
+
return [path.join('.'), ...getPopulatedPaths(inner, path)];
|
|
24
41
|
});
|
|
25
42
|
}
|
|
26
43
|
function getReferencePaths(schema) {
|
|
44
|
+
return [...getRealReferencePaths(schema), ...getVirtualReferencePaths(schema)];
|
|
45
|
+
}
|
|
46
|
+
function getRealReferencePaths(schema) {
|
|
27
47
|
return (0, _utils.getSchemaPaths)(schema).filter(path => {
|
|
28
48
|
return (0, _utils.isReferenceField)(schema, path);
|
|
29
49
|
});
|
|
50
|
+
}
|
|
51
|
+
function getVirtualReferencePaths(arg) {
|
|
52
|
+
const schema = resolveSchema(arg);
|
|
53
|
+
return Object.keys(schema.virtuals).filter(key => {
|
|
54
|
+
return schema.virtuals[key].options?.ref;
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
function resolveSchema(arg) {
|
|
58
|
+
if (arg instanceof _mongoose.default.Document) {
|
|
59
|
+
// @ts-ignore
|
|
60
|
+
return arg.constructor.schema;
|
|
61
|
+
} else if (arg instanceof _mongoose.default.Schema) {
|
|
62
|
+
return arg;
|
|
63
|
+
}
|
|
30
64
|
}
|
package/dist/cjs/schema.js
CHANGED
|
@@ -12,6 +12,7 @@ var _cache = require("./cache");
|
|
|
12
12
|
var _clone = require("./clone");
|
|
13
13
|
var _deleteHooks = require("./delete-hooks");
|
|
14
14
|
var _disallowed = require("./disallowed");
|
|
15
|
+
var _export = require("./export");
|
|
15
16
|
var _hydrate = require("./hydrate");
|
|
16
17
|
var _include = require("./include");
|
|
17
18
|
var _reload = require("./reload");
|
|
@@ -32,7 +33,9 @@ function _interopRequireDefault(e) { return e && e.__esModule ? e : { default: e
|
|
|
32
33
|
* @returns mongoose.Schema
|
|
33
34
|
*/
|
|
34
35
|
function createSchema(definition, options = {}) {
|
|
35
|
-
|
|
36
|
+
(0, _cache.addCacheFields)(definition);
|
|
37
|
+
(0, _deleteHooks.addDeletedFields)(definition);
|
|
38
|
+
const attributes = normalizeAttributes({
|
|
36
39
|
...definition.attributes,
|
|
37
40
|
// Although timestamps are being set below, we still need to add
|
|
38
41
|
// them to the schema so that validation can be generated for them,
|
|
@@ -44,7 +47,9 @@ function createSchema(definition, options = {}) {
|
|
|
44
47
|
type: 'Boolean',
|
|
45
48
|
default: false
|
|
46
49
|
}
|
|
47
|
-
})
|
|
50
|
+
});
|
|
51
|
+
applyExtensions(attributes);
|
|
52
|
+
const schema = new _mongoose.default.Schema(attributes, {
|
|
48
53
|
timestamps: true,
|
|
49
54
|
toJSON: _serialization.serializeOptions,
|
|
50
55
|
toObject: _serialization.serializeOptions,
|
|
@@ -60,6 +65,7 @@ function createSchema(definition, options = {}) {
|
|
|
60
65
|
(0, _cache.applyCache)(schema, definition);
|
|
61
66
|
(0, _clone.applyClone)(schema);
|
|
62
67
|
(0, _reload.applyReload)(schema);
|
|
68
|
+
(0, _export.applyExport)(schema);
|
|
63
69
|
(0, _disallowed.applyDisallowed)(schema);
|
|
64
70
|
(0, _include.applyInclude)(schema);
|
|
65
71
|
(0, _hydrate.applyHydrate)(schema);
|
|
@@ -73,19 +79,9 @@ function normalizeAttributes(arg, path = []) {
|
|
|
73
79
|
return arg;
|
|
74
80
|
} else if (typeof arg === 'function') {
|
|
75
81
|
throw new Error('Native functions are not allowed as types.');
|
|
76
|
-
} else if (
|
|
77
|
-
return
|
|
78
|
-
type: arg
|
|
79
|
-
}, path);
|
|
80
|
-
} else if (Array.isArray(arg)) {
|
|
81
|
-
return normalizeSchemaTypedef({
|
|
82
|
-
type: arg
|
|
83
|
-
}, path);
|
|
82
|
+
} else if (isTypedefInput(arg)) {
|
|
83
|
+
return normalizeTypedef(arg, path);
|
|
84
84
|
} else if (typeof arg === 'object') {
|
|
85
|
-
assertRefs(arg, path);
|
|
86
|
-
if ((0, _utils.isSchemaTypedef)(arg)) {
|
|
87
|
-
return normalizeSchemaTypedef(arg, path);
|
|
88
|
-
}
|
|
89
85
|
const attributes = {};
|
|
90
86
|
for (let [key, val] of Object.entries(arg)) {
|
|
91
87
|
attributes[key] = normalizeAttributes(val, [...path, key]);
|
|
@@ -93,68 +89,74 @@ function normalizeAttributes(arg, path = []) {
|
|
|
93
89
|
return attributes;
|
|
94
90
|
}
|
|
95
91
|
}
|
|
96
|
-
function normalizeSchemaTypedef(typedef, path) {
|
|
97
|
-
const {
|
|
98
|
-
type
|
|
99
|
-
} = typedef;
|
|
100
|
-
if (Array.isArray(type)) {
|
|
101
|
-
typedef.type = normalizeArrayAttributes(type, path);
|
|
102
|
-
} else if (typeof type === 'object') {
|
|
103
|
-
typedef.type = normalizeAttributes(type, path);
|
|
104
|
-
} else {
|
|
105
|
-
assertSchemaType(type, path);
|
|
106
|
-
}
|
|
107
|
-
if (typedef.type === 'String') {
|
|
108
|
-
typedef.trim ??= true;
|
|
109
|
-
}
|
|
110
|
-
return typedef;
|
|
111
|
-
}
|
|
112
92
|
function normalizeArrayAttributes(arr, path) {
|
|
113
93
|
return arr.map((el, i) => {
|
|
114
94
|
return normalizeAttributes(el, [...path, i]);
|
|
115
95
|
});
|
|
116
96
|
}
|
|
117
|
-
function
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
}
|
|
121
|
-
|
|
97
|
+
function normalizeTypedef(arg, path) {
|
|
98
|
+
const typedef = arg.type ? arg : {
|
|
99
|
+
type: arg
|
|
100
|
+
};
|
|
101
|
+
if (Array.isArray(typedef.type)) {
|
|
102
|
+
// Normalize all inner fields.
|
|
103
|
+
typedef.type = normalizeArrayAttributes(typedef.type, path);
|
|
104
|
+
} else if (typeof typedef.type === 'object') {
|
|
105
|
+
// Normalize literal "type" field.
|
|
106
|
+
typedef.type = normalizeAttributes(typedef.type, path);
|
|
107
|
+
} else if (isExtendedSyntax(typedef)) {
|
|
108
|
+
// Normalize extended syntax: type "Object" or "Array".
|
|
109
|
+
typedef.attributes = normalizeAttributes(typedef.attributes, path);
|
|
122
110
|
}
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
val = attributesToMongoose(val);
|
|
131
|
-
} else if (key === 'match' && type === 'string') {
|
|
132
|
-
// Convert match field to RegExp that cannot be expressed in JSON.
|
|
133
|
-
val = parseRegExp(val);
|
|
134
|
-
} else if (key === 'validate' && type === 'string') {
|
|
135
|
-
// Allow custom mongoose validation function that derives from the schema.
|
|
136
|
-
val = (0, _validation.getNamedValidator)(val);
|
|
137
|
-
} else if (key === 'attributes' && type === 'object') {
|
|
138
|
-
val = attributesToMongoose(val);
|
|
139
|
-
}
|
|
140
|
-
} else if (Array.isArray(val)) {
|
|
141
|
-
val = val.map(attributesToMongoose);
|
|
142
|
-
} else if ((0, _lodash.isPlainObject)(val)) {
|
|
143
|
-
if (isScopeExtension(val)) {
|
|
144
|
-
applyScopeExtension(val, definition);
|
|
145
|
-
continue;
|
|
146
|
-
} else {
|
|
147
|
-
val = attributesToMongoose(val);
|
|
148
|
-
}
|
|
111
|
+
if (typedef.type === 'String') {
|
|
112
|
+
// Auto-apply trim to string fields.
|
|
113
|
+
typedef.trim ??= true;
|
|
114
|
+
if (typeof typedef.match === 'string') {
|
|
115
|
+
// Convert string RegExp so that
|
|
116
|
+
// it can be expressed in JSON.
|
|
117
|
+
typedef.match = parseRegExp(typedef.match);
|
|
149
118
|
}
|
|
150
|
-
definition[key] = val;
|
|
151
119
|
}
|
|
152
|
-
|
|
153
|
-
|
|
120
|
+
assertSchemaType(typedef, path);
|
|
121
|
+
assertObjectRefs(typedef, path);
|
|
122
|
+
return typedef;
|
|
123
|
+
}
|
|
124
|
+
function isTypedefInput(arg) {
|
|
125
|
+
if (typeof arg === 'string') {
|
|
126
|
+
// "Number" as shorthand for a typedef.
|
|
127
|
+
return true;
|
|
128
|
+
} else if (Array.isArray(arg)) {
|
|
129
|
+
// Array signals an array field with inner schema.
|
|
130
|
+
return true;
|
|
131
|
+
} else if (hasLiteralTypeField(arg)) {
|
|
132
|
+
// An object with a literal "type" field.
|
|
133
|
+
return false;
|
|
154
134
|
}
|
|
155
|
-
return
|
|
135
|
+
return (0, _utils.isSchemaTypedef)(arg);
|
|
156
136
|
}
|
|
157
|
-
|
|
137
|
+
|
|
138
|
+
// Detects input like:
|
|
139
|
+
// {
|
|
140
|
+
// "type": "String",
|
|
141
|
+
// "name": "String",
|
|
142
|
+
// }
|
|
143
|
+
// Which is not intended to be a typedef.
|
|
144
|
+
function hasLiteralTypeField(arg) {
|
|
145
|
+
const {
|
|
146
|
+
type,
|
|
147
|
+
...rest
|
|
148
|
+
} = arg || {};
|
|
149
|
+
if (!isMongooseType(type)) {
|
|
150
|
+
return false;
|
|
151
|
+
}
|
|
152
|
+
return Object.values(rest).some(key => {
|
|
153
|
+
return isMongooseType(key);
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
function assertSchemaType(typedef, path) {
|
|
157
|
+
const {
|
|
158
|
+
type
|
|
159
|
+
} = typedef;
|
|
158
160
|
if (typeof type === 'string') {
|
|
159
161
|
if (!isMongooseType(type)) {
|
|
160
162
|
const p = path.join('.');
|
|
@@ -167,33 +169,59 @@ function assertSchemaType(type, path) {
|
|
|
167
169
|
}
|
|
168
170
|
}
|
|
169
171
|
}
|
|
170
|
-
function
|
|
172
|
+
function assertObjectRefs(typedef, path) {
|
|
171
173
|
const {
|
|
172
174
|
type,
|
|
173
|
-
ref
|
|
174
|
-
|
|
175
|
-
} = field;
|
|
175
|
+
ref
|
|
176
|
+
} = typedef;
|
|
176
177
|
const p = path.join('.');
|
|
177
|
-
if (
|
|
178
|
+
if (requiresRef(typedef, path)) {
|
|
178
179
|
throw new Error(`Ref must be passed for "${p}".`);
|
|
180
|
+
// TODO: what is the middle part doing here??
|
|
179
181
|
} else if (ref && !isMongooseType(ref) && !isObjectIdType(type)) {
|
|
180
182
|
throw new Error(`Ref field "${p}" must be type "ObjectId".`);
|
|
181
183
|
}
|
|
182
184
|
}
|
|
183
|
-
function
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
|
|
185
|
+
function requiresRef(typedef, path) {
|
|
186
|
+
const {
|
|
187
|
+
type,
|
|
188
|
+
ref,
|
|
189
|
+
refPath
|
|
190
|
+
} = typedef;
|
|
191
|
+
|
|
192
|
+
// Allow "_id" to not have a ref for the
|
|
193
|
+
// delete hooks module to function.
|
|
194
|
+
if ((0, _lodash.last)(path) === '_id') {
|
|
195
|
+
return false;
|
|
196
|
+
}
|
|
197
|
+
return isObjectIdType(type) && !ref && !refPath;
|
|
191
198
|
}
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
199
|
+
|
|
200
|
+
// Extensions
|
|
201
|
+
|
|
202
|
+
function applyExtensions(arg) {
|
|
203
|
+
if ((0, _utils.isSchemaTypedef)(arg)) {
|
|
204
|
+
applySyntaxExtensions(arg);
|
|
205
|
+
applyValidateExtension(arg);
|
|
206
|
+
applyUniqueExtension(arg);
|
|
207
|
+
applyTupleExtension(arg);
|
|
208
|
+
applyDateExtension(arg);
|
|
209
|
+
if (Array.isArray(arg.type)) {
|
|
210
|
+
for (let field of arg.type) {
|
|
211
|
+
applyExtensions(field);
|
|
212
|
+
}
|
|
213
|
+
applyArrayValidators(arg);
|
|
214
|
+
applyOptionHoisting(arg);
|
|
215
|
+
}
|
|
216
|
+
} else if ((0, _lodash.isPlainObject)(arg)) {
|
|
217
|
+
for (let [key, value] of Object.entries(arg)) {
|
|
218
|
+
if (isScopeExtension(value)) {
|
|
219
|
+
applyScopeExtension(value, arg, key);
|
|
220
|
+
} else {
|
|
221
|
+
applyExtensions(value);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
}
|
|
197
225
|
}
|
|
198
226
|
function applySyntaxExtensions(typedef) {
|
|
199
227
|
const {
|
|
@@ -201,14 +229,13 @@ function applySyntaxExtensions(typedef) {
|
|
|
201
229
|
attributes
|
|
202
230
|
} = typedef;
|
|
203
231
|
if (isExtendedSyntax(typedef)) {
|
|
204
|
-
|
|
232
|
+
applyExtensions(attributes);
|
|
205
233
|
if (type === 'Array') {
|
|
206
|
-
typedef.type = [
|
|
234
|
+
typedef.type = [attributes];
|
|
235
|
+
} else if (type === 'Object') {
|
|
236
|
+
typedef.type = new _mongoose.default.Schema(attributes);
|
|
207
237
|
}
|
|
208
|
-
|
|
209
|
-
if (Array.isArray(typedef.type)) {
|
|
210
|
-
applyArrayValidators(typedef);
|
|
211
|
-
applyOptionHoisting(typedef);
|
|
238
|
+
delete typedef['attributes'];
|
|
212
239
|
}
|
|
213
240
|
}
|
|
214
241
|
|
|
@@ -222,32 +249,42 @@ function isExtendedSyntax(typedef) {
|
|
|
222
249
|
type,
|
|
223
250
|
attributes
|
|
224
251
|
} = typedef;
|
|
225
|
-
|
|
252
|
+
if (!attributes) {
|
|
253
|
+
return false;
|
|
254
|
+
}
|
|
255
|
+
return type === 'Object' || type === 'Array' || type === 'Scope';
|
|
226
256
|
}
|
|
227
257
|
function isScopeExtension(arg) {
|
|
228
258
|
return (0, _utils.isSchemaTypedef)(arg) && arg.type === 'Scope';
|
|
229
259
|
}
|
|
230
|
-
function applyScopeExtension(typedef,
|
|
260
|
+
function applyScopeExtension(typedef, parent, name) {
|
|
231
261
|
const {
|
|
232
262
|
type,
|
|
233
263
|
attributes,
|
|
234
|
-
...
|
|
264
|
+
...rest
|
|
235
265
|
} = typedef;
|
|
236
|
-
for (let [key,
|
|
237
|
-
if ((0, _utils.isSchemaTypedef)(
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
266
|
+
for (let [key, value] of Object.entries(attributes)) {
|
|
267
|
+
if ((0, _utils.isSchemaTypedef)(value)) {
|
|
268
|
+
// If the child is a typedef then apply
|
|
269
|
+
// options directly to the field.
|
|
270
|
+
applyExtensions(value);
|
|
271
|
+
parent[key] = {
|
|
272
|
+
...value,
|
|
273
|
+
...rest
|
|
241
274
|
};
|
|
242
275
|
} else {
|
|
243
|
-
|
|
276
|
+
// If the child is a nested object then
|
|
277
|
+
// need to use extended object syntax.
|
|
278
|
+
const typedef = {
|
|
244
279
|
type: 'Object',
|
|
245
|
-
attributes:
|
|
246
|
-
...
|
|
280
|
+
attributes: value,
|
|
281
|
+
...rest
|
|
247
282
|
};
|
|
283
|
+
applyExtensions(typedef);
|
|
284
|
+
parent[key] = typedef;
|
|
248
285
|
}
|
|
249
|
-
definition[key] = attributesToMongoose(val);
|
|
250
286
|
}
|
|
287
|
+
delete parent[name];
|
|
251
288
|
}
|
|
252
289
|
|
|
253
290
|
// Extended tuple syntax. Return mixed type and set validator.
|
|
@@ -279,6 +316,16 @@ function applyDateExtension(typedef) {
|
|
|
279
316
|
}
|
|
280
317
|
}
|
|
281
318
|
|
|
319
|
+
// Apply custom mongoose validation by name.
|
|
320
|
+
function applyValidateExtension(typedef) {
|
|
321
|
+
const {
|
|
322
|
+
validate
|
|
323
|
+
} = typedef;
|
|
324
|
+
if (typeof validate === 'string') {
|
|
325
|
+
typedef.validate = (0, _validation.getNamedValidator)(typedef.validate);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
|
|
282
329
|
// Intercepts "unique" options and changes to "softUnique".
|
|
283
330
|
function applyUniqueExtension(typedef) {
|
|
284
331
|
if (typedef.unique === true) {
|
|
@@ -318,12 +365,9 @@ function validateMaxLength(max) {
|
|
|
318
365
|
}
|
|
319
366
|
};
|
|
320
367
|
}
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
fn2?.(...args);
|
|
325
|
-
};
|
|
326
|
-
}
|
|
368
|
+
|
|
369
|
+
// Regex Parsing
|
|
370
|
+
|
|
327
371
|
const REG_MATCH = /^\/(.+)\/(\w+)$/;
|
|
328
372
|
function parseRegExp(str) {
|
|
329
373
|
const match = str.match(REG_MATCH);
|
|
@@ -332,4 +376,22 @@ function parseRegExp(str) {
|
|
|
332
376
|
}
|
|
333
377
|
const [, source, flags] = match;
|
|
334
378
|
return RegExp(source, flags);
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
// Utils
|
|
382
|
+
|
|
383
|
+
function camelUpper(str) {
|
|
384
|
+
return (0, _lodash.capitalize)((0, _lodash.camelCase)(str));
|
|
385
|
+
}
|
|
386
|
+
function isObjectIdType(type) {
|
|
387
|
+
return type === 'ObjectId' || type === _mongoose.default.Schema.Types.ObjectId;
|
|
388
|
+
}
|
|
389
|
+
function isMongooseType(type) {
|
|
390
|
+
return !!_mongoose.default.Schema.Types[type];
|
|
391
|
+
}
|
|
392
|
+
function chain(fn1, fn2) {
|
|
393
|
+
return (...args) => {
|
|
394
|
+
fn1?.(...args);
|
|
395
|
+
fn2?.(...args);
|
|
396
|
+
};
|
|
335
397
|
}
|
package/package.json
CHANGED
package/src/cache.js
CHANGED
|
@@ -7,33 +7,34 @@ const definitionMap = new Map();
|
|
|
7
7
|
|
|
8
8
|
mongoose.plugin(cacheSyncPlugin);
|
|
9
9
|
|
|
10
|
-
export function
|
|
11
|
-
|
|
10
|
+
export function addCacheFields(definition) {
|
|
11
|
+
const { cache } = definition;
|
|
12
12
|
|
|
13
|
-
if (!
|
|
13
|
+
if (!cache) {
|
|
14
14
|
return;
|
|
15
15
|
}
|
|
16
16
|
|
|
17
|
-
|
|
18
|
-
applyStaticMethods(schema, definition);
|
|
19
|
-
applyCacheHook(schema, definition);
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
function createCacheFields(schema, definition) {
|
|
23
|
-
for (let [cachedField, def] of Object.entries(definition.cache)) {
|
|
17
|
+
for (let [cachedField, def] of Object.entries(cache)) {
|
|
24
18
|
const { type, path, ...rest } = def;
|
|
25
|
-
|
|
26
|
-
schema.add({
|
|
27
|
-
[cachedField]: type,
|
|
28
|
-
});
|
|
29
|
-
schema.obj[cachedField] = {
|
|
30
|
-
...rest,
|
|
19
|
+
definition.attributes[cachedField] = {
|
|
31
20
|
type,
|
|
21
|
+
...rest,
|
|
32
22
|
writeAccess: 'none',
|
|
33
23
|
};
|
|
34
24
|
}
|
|
35
25
|
}
|
|
36
26
|
|
|
27
|
+
export function applyCache(schema, definition) {
|
|
28
|
+
definitionMap.set(schema, definition);
|
|
29
|
+
|
|
30
|
+
if (!definition.cache) {
|
|
31
|
+
return;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
applyStaticMethods(schema, definition);
|
|
35
|
+
applyCacheHook(schema, definition);
|
|
36
|
+
}
|
|
37
|
+
|
|
37
38
|
function applyStaticMethods(schema, definition) {
|
|
38
39
|
schema.static('syncCacheFields', async function syncCacheFields() {
|
|
39
40
|
assertIncludeModule(this);
|
package/src/delete-hooks.js
CHANGED
|
@@ -6,6 +6,21 @@ import { getInnerField } from './utils';
|
|
|
6
6
|
|
|
7
7
|
const { ObjectId: SchemaObjectId } = mongoose.Schema.Types;
|
|
8
8
|
|
|
9
|
+
export function addDeletedFields(definition) {
|
|
10
|
+
let { onDelete: deleteHooks } = definition;
|
|
11
|
+
|
|
12
|
+
if (!deleteHooks) {
|
|
13
|
+
return;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
definition.attributes['deletedRefs'] = [
|
|
17
|
+
{
|
|
18
|
+
_id: 'ObjectId',
|
|
19
|
+
ref: 'String',
|
|
20
|
+
},
|
|
21
|
+
];
|
|
22
|
+
}
|
|
23
|
+
|
|
9
24
|
export function applyDeleteHooks(schema, definition) {
|
|
10
25
|
let { onDelete: deleteHooks } = definition;
|
|
11
26
|
|
|
@@ -43,15 +58,6 @@ export function applyDeleteHooks(schema, definition) {
|
|
|
43
58
|
await restoreReferences(this, cleanHooks);
|
|
44
59
|
await restoreFn.apply(this, arguments);
|
|
45
60
|
});
|
|
46
|
-
|
|
47
|
-
schema.add({
|
|
48
|
-
deletedRefs: [
|
|
49
|
-
{
|
|
50
|
-
_id: 'ObjectId',
|
|
51
|
-
ref: 'String',
|
|
52
|
-
},
|
|
53
|
-
],
|
|
54
|
-
});
|
|
55
61
|
}
|
|
56
62
|
|
|
57
63
|
// Clean Hook
|
package/src/export.js
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
import { set } from 'lodash';
|
|
2
|
+
|
|
3
|
+
export function applyExport(schema) {
|
|
4
|
+
schema.method('export', function () {
|
|
5
|
+
const result = {};
|
|
6
|
+
this.constructor.schema.eachPath((schemaPath) => {
|
|
7
|
+
const value = this.get(schemaPath);
|
|
8
|
+
if (value !== undefined) {
|
|
9
|
+
set(result, schemaPath, value);
|
|
10
|
+
}
|
|
11
|
+
});
|
|
12
|
+
return result;
|
|
13
|
+
});
|
|
14
|
+
}
|
package/src/include.js
CHANGED
|
@@ -146,18 +146,23 @@ export function getDocumentParams(doc, arg, options = {}) {
|
|
|
146
146
|
|
|
147
147
|
if (!options.force) {
|
|
148
148
|
params.populate = params.populate.filter((p) => {
|
|
149
|
-
return !
|
|
149
|
+
return !isPopulated(doc, p);
|
|
150
150
|
});
|
|
151
151
|
}
|
|
152
152
|
|
|
153
153
|
return params;
|
|
154
154
|
}
|
|
155
155
|
|
|
156
|
-
function
|
|
157
|
-
if (
|
|
158
|
-
|
|
156
|
+
function isPopulated(arg, params) {
|
|
157
|
+
if (Array.isArray(arg)) {
|
|
158
|
+
return arg.every((el) => {
|
|
159
|
+
return isPopulated(el, params);
|
|
160
|
+
});
|
|
161
|
+
}
|
|
162
|
+
if (arg.populated(params.path)) {
|
|
163
|
+
const sub = arg.get(params.path);
|
|
159
164
|
return params.populate.every((p) => {
|
|
160
|
-
return
|
|
165
|
+
return isPopulated(sub, p);
|
|
161
166
|
});
|
|
162
167
|
} else {
|
|
163
168
|
return false;
|
package/src/reload.js
CHANGED
|
@@ -6,25 +6,70 @@ import { isReferenceField } from './utils';
|
|
|
6
6
|
export function applyReload(schema) {
|
|
7
7
|
schema.method('reload', async function reload() {
|
|
8
8
|
const paths = getPopulatedPaths(this);
|
|
9
|
+
|
|
9
10
|
const doc = await this.constructor.findById(this.id).include(paths);
|
|
10
11
|
|
|
11
12
|
if (!doc) {
|
|
12
|
-
throw new Error('Document
|
|
13
|
+
throw new Error('Document does not exist');
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
this.overwrite(doc.export());
|
|
17
|
+
|
|
18
|
+
// Include on the query above will not work
|
|
19
|
+
// for virtuals so handle separately here.
|
|
20
|
+
for (const path of getVirtualReferencePaths(doc)) {
|
|
21
|
+
await doc.include(path);
|
|
22
|
+
this.set(path, doc[path]);
|
|
13
23
|
}
|
|
14
24
|
|
|
15
|
-
|
|
25
|
+
// All data reloaded so mark as unmodified.
|
|
26
|
+
for (const path of this.modifiedPaths()) {
|
|
27
|
+
this.unmarkModified(path);
|
|
28
|
+
}
|
|
16
29
|
});
|
|
17
30
|
}
|
|
18
31
|
|
|
19
|
-
function getPopulatedPaths(doc) {
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
32
|
+
function getPopulatedPaths(doc, base = []) {
|
|
33
|
+
const schema = doc.constructor.schema;
|
|
34
|
+
return getReferencePaths(schema)
|
|
35
|
+
.filter((name) => {
|
|
36
|
+
return doc.populated(name);
|
|
37
|
+
})
|
|
38
|
+
.flatMap((name) => {
|
|
39
|
+
const path = [...base, name];
|
|
40
|
+
|
|
41
|
+
const value = doc.get(name);
|
|
42
|
+
const inner = Array.isArray(value) ? value[0] : value;
|
|
43
|
+
|
|
44
|
+
return [path.join('.'), ...getPopulatedPaths(inner, path)];
|
|
45
|
+
});
|
|
24
46
|
}
|
|
25
47
|
|
|
26
48
|
function getReferencePaths(schema) {
|
|
49
|
+
return [
|
|
50
|
+
...getRealReferencePaths(schema),
|
|
51
|
+
...getVirtualReferencePaths(schema),
|
|
52
|
+
];
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function getRealReferencePaths(schema) {
|
|
27
56
|
return getSchemaPaths(schema).filter((path) => {
|
|
28
57
|
return isReferenceField(schema, path);
|
|
29
58
|
});
|
|
30
59
|
}
|
|
60
|
+
|
|
61
|
+
function getVirtualReferencePaths(arg) {
|
|
62
|
+
const schema = resolveSchema(arg);
|
|
63
|
+
return Object.keys(schema.virtuals).filter((key) => {
|
|
64
|
+
return schema.virtuals[key].options?.ref;
|
|
65
|
+
});
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
function resolveSchema(arg) {
|
|
69
|
+
if (arg instanceof mongoose.Document) {
|
|
70
|
+
// @ts-ignore
|
|
71
|
+
return arg.constructor.schema;
|
|
72
|
+
} else if (arg instanceof mongoose.Schema) {
|
|
73
|
+
return arg;
|
|
74
|
+
}
|
|
75
|
+
}
|
package/src/schema.js
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
|
-
import { camelCase, capitalize, isPlainObject, pick } from 'lodash';
|
|
1
|
+
import { camelCase, capitalize, isPlainObject, last, pick } from 'lodash';
|
|
2
2
|
import mongoose from 'mongoose';
|
|
3
3
|
|
|
4
4
|
import { applyAssign } from './assign';
|
|
5
|
-
import { applyCache } from './cache';
|
|
5
|
+
import { addCacheFields, applyCache } from './cache';
|
|
6
6
|
import { applyClone } from './clone';
|
|
7
|
-
import { applyDeleteHooks } from './delete-hooks';
|
|
7
|
+
import { addDeletedFields, applyDeleteHooks } from './delete-hooks';
|
|
8
8
|
import { applyDisallowed } from './disallowed';
|
|
9
|
+
import { applyExport } from './export';
|
|
9
10
|
import { applyHydrate } from './hydrate';
|
|
10
11
|
import { applyInclude } from './include';
|
|
11
12
|
import { applyReload } from './reload';
|
|
@@ -31,28 +32,32 @@ import {
|
|
|
31
32
|
* @returns mongoose.Schema
|
|
32
33
|
*/
|
|
33
34
|
export function createSchema(definition, options = {}) {
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
{
|
|
50
|
-
timestamps: true,
|
|
51
|
-
toJSON: serializeOptions,
|
|
52
|
-
toObject: serializeOptions,
|
|
53
|
-
...options,
|
|
35
|
+
addCacheFields(definition);
|
|
36
|
+
addDeletedFields(definition);
|
|
37
|
+
|
|
38
|
+
const attributes = normalizeAttributes({
|
|
39
|
+
...definition.attributes,
|
|
40
|
+
|
|
41
|
+
// Although timestamps are being set below, we still need to add
|
|
42
|
+
// them to the schema so that validation can be generated for them,
|
|
43
|
+
// namely in getSearchValidation.
|
|
44
|
+
createdAt: 'Date',
|
|
45
|
+
updatedAt: 'Date',
|
|
46
|
+
deletedAt: 'Date',
|
|
47
|
+
deleted: {
|
|
48
|
+
type: 'Boolean',
|
|
49
|
+
default: false,
|
|
54
50
|
},
|
|
55
|
-
);
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
applyExtensions(attributes);
|
|
54
|
+
|
|
55
|
+
const schema = new mongoose.Schema(attributes, {
|
|
56
|
+
timestamps: true,
|
|
57
|
+
toJSON: serializeOptions,
|
|
58
|
+
toObject: serializeOptions,
|
|
59
|
+
...options,
|
|
60
|
+
});
|
|
56
61
|
|
|
57
62
|
// Soft Delete needs to be applied
|
|
58
63
|
// first for hooks to work correctly.
|
|
@@ -63,6 +68,7 @@ export function createSchema(definition, options = {}) {
|
|
|
63
68
|
applyCache(schema, definition);
|
|
64
69
|
applyClone(schema);
|
|
65
70
|
applyReload(schema);
|
|
71
|
+
applyExport(schema);
|
|
66
72
|
applyDisallowed(schema);
|
|
67
73
|
applyInclude(schema);
|
|
68
74
|
applyHydrate(schema);
|
|
@@ -78,17 +84,9 @@ export function normalizeAttributes(arg, path = []) {
|
|
|
78
84
|
return arg;
|
|
79
85
|
} else if (typeof arg === 'function') {
|
|
80
86
|
throw new Error('Native functions are not allowed as types.');
|
|
81
|
-
} else if (
|
|
82
|
-
return
|
|
83
|
-
} else if (Array.isArray(arg)) {
|
|
84
|
-
return normalizeSchemaTypedef({ type: arg }, path);
|
|
87
|
+
} else if (isTypedefInput(arg)) {
|
|
88
|
+
return normalizeTypedef(arg, path);
|
|
85
89
|
} else if (typeof arg === 'object') {
|
|
86
|
-
assertRefs(arg, path);
|
|
87
|
-
|
|
88
|
-
if (isSchemaTypedef(arg)) {
|
|
89
|
-
return normalizeSchemaTypedef(arg, path);
|
|
90
|
-
}
|
|
91
|
-
|
|
92
90
|
const attributes = {};
|
|
93
91
|
for (let [key, val] of Object.entries(arg)) {
|
|
94
92
|
attributes[key] = normalizeAttributes(val, [...path, key]);
|
|
@@ -97,78 +95,77 @@ export function normalizeAttributes(arg, path = []) {
|
|
|
97
95
|
}
|
|
98
96
|
}
|
|
99
97
|
|
|
100
|
-
function
|
|
101
|
-
|
|
98
|
+
function normalizeArrayAttributes(arr, path) {
|
|
99
|
+
return arr.map((el, i) => {
|
|
100
|
+
return normalizeAttributes(el, [...path, i]);
|
|
101
|
+
});
|
|
102
|
+
}
|
|
102
103
|
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
104
|
+
function normalizeTypedef(arg, path) {
|
|
105
|
+
const typedef = arg.type ? arg : { type: arg };
|
|
106
|
+
|
|
107
|
+
if (Array.isArray(typedef.type)) {
|
|
108
|
+
// Normalize all inner fields.
|
|
109
|
+
typedef.type = normalizeArrayAttributes(typedef.type, path);
|
|
110
|
+
} else if (typeof typedef.type === 'object') {
|
|
111
|
+
// Normalize literal "type" field.
|
|
112
|
+
typedef.type = normalizeAttributes(typedef.type, path);
|
|
113
|
+
} else if (isExtendedSyntax(typedef)) {
|
|
114
|
+
// Normalize extended syntax: type "Object" or "Array".
|
|
115
|
+
typedef.attributes = normalizeAttributes(typedef.attributes, path);
|
|
109
116
|
}
|
|
110
117
|
|
|
111
118
|
if (typedef.type === 'String') {
|
|
119
|
+
// Auto-apply trim to string fields.
|
|
112
120
|
typedef.trim ??= true;
|
|
121
|
+
|
|
122
|
+
if (typeof typedef.match === 'string') {
|
|
123
|
+
// Convert string RegExp so that
|
|
124
|
+
// it can be expressed in JSON.
|
|
125
|
+
typedef.match = parseRegExp(typedef.match);
|
|
126
|
+
}
|
|
113
127
|
}
|
|
114
128
|
|
|
115
|
-
|
|
116
|
-
|
|
129
|
+
assertSchemaType(typedef, path);
|
|
130
|
+
assertObjectRefs(typedef, path);
|
|
117
131
|
|
|
118
|
-
|
|
119
|
-
return arr.map((el, i) => {
|
|
120
|
-
return normalizeAttributes(el, [...path, i]);
|
|
121
|
-
});
|
|
132
|
+
return typedef;
|
|
122
133
|
}
|
|
123
134
|
|
|
124
|
-
function
|
|
125
|
-
if (typeof
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
const isTypedef = isSchemaTypedef(attributes);
|
|
136
|
-
|
|
137
|
-
for (let [key, val] of Object.entries(attributes)) {
|
|
138
|
-
const type = typeof val;
|
|
139
|
-
if (isTypedef) {
|
|
140
|
-
if (key === 'type' && type !== 'function') {
|
|
141
|
-
val = attributesToMongoose(val);
|
|
142
|
-
} else if (key === 'match' && type === 'string') {
|
|
143
|
-
// Convert match field to RegExp that cannot be expressed in JSON.
|
|
144
|
-
val = parseRegExp(val);
|
|
145
|
-
} else if (key === 'validate' && type === 'string') {
|
|
146
|
-
// Allow custom mongoose validation function that derives from the schema.
|
|
147
|
-
val = getNamedValidator(val);
|
|
148
|
-
} else if (key === 'attributes' && type === 'object') {
|
|
149
|
-
val = attributesToMongoose(val);
|
|
150
|
-
}
|
|
151
|
-
} else if (Array.isArray(val)) {
|
|
152
|
-
val = val.map(attributesToMongoose);
|
|
153
|
-
} else if (isPlainObject(val)) {
|
|
154
|
-
if (isScopeExtension(val)) {
|
|
155
|
-
applyScopeExtension(val, definition);
|
|
156
|
-
continue;
|
|
157
|
-
} else {
|
|
158
|
-
val = attributesToMongoose(val);
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
definition[key] = val;
|
|
135
|
+
function isTypedefInput(arg) {
|
|
136
|
+
if (typeof arg === 'string') {
|
|
137
|
+
// "Number" as shorthand for a typedef.
|
|
138
|
+
return true;
|
|
139
|
+
} else if (Array.isArray(arg)) {
|
|
140
|
+
// Array signals an array field with inner schema.
|
|
141
|
+
return true;
|
|
142
|
+
} else if (hasLiteralTypeField(arg)) {
|
|
143
|
+
// An object with a literal "type" field.
|
|
144
|
+
return false;
|
|
162
145
|
}
|
|
146
|
+
return isSchemaTypedef(arg);
|
|
147
|
+
}
|
|
163
148
|
|
|
164
|
-
|
|
165
|
-
|
|
149
|
+
// Detects input like:
|
|
150
|
+
// {
|
|
151
|
+
// "type": "String",
|
|
152
|
+
// "name": "String",
|
|
153
|
+
// }
|
|
154
|
+
// Which is not intended to be a typedef.
|
|
155
|
+
function hasLiteralTypeField(arg) {
|
|
156
|
+
const { type, ...rest } = arg || {};
|
|
157
|
+
|
|
158
|
+
if (!isMongooseType(type)) {
|
|
159
|
+
return false;
|
|
166
160
|
}
|
|
167
161
|
|
|
168
|
-
return
|
|
162
|
+
return Object.values(rest).some((key) => {
|
|
163
|
+
return isMongooseType(key);
|
|
164
|
+
});
|
|
169
165
|
}
|
|
170
166
|
|
|
171
|
-
function assertSchemaType(
|
|
167
|
+
function assertSchemaType(typedef, path) {
|
|
168
|
+
const { type } = typedef;
|
|
172
169
|
if (typeof type === 'string') {
|
|
173
170
|
if (!isMongooseType(type)) {
|
|
174
171
|
const p = path.join('.');
|
|
@@ -182,46 +179,68 @@ function assertSchemaType(type, path) {
|
|
|
182
179
|
}
|
|
183
180
|
}
|
|
184
181
|
|
|
185
|
-
function
|
|
186
|
-
const { type, ref
|
|
182
|
+
function assertObjectRefs(typedef, path) {
|
|
183
|
+
const { type, ref } = typedef;
|
|
187
184
|
const p = path.join('.');
|
|
188
|
-
|
|
185
|
+
|
|
186
|
+
if (requiresRef(typedef, path)) {
|
|
189
187
|
throw new Error(`Ref must be passed for "${p}".`);
|
|
188
|
+
// TODO: what is the middle part doing here??
|
|
190
189
|
} else if (ref && !isMongooseType(ref) && !isObjectIdType(type)) {
|
|
191
190
|
throw new Error(`Ref field "${p}" must be type "ObjectId".`);
|
|
192
191
|
}
|
|
193
192
|
}
|
|
194
193
|
|
|
195
|
-
function
|
|
196
|
-
|
|
197
|
-
}
|
|
194
|
+
function requiresRef(typedef, path) {
|
|
195
|
+
const { type, ref, refPath } = typedef;
|
|
198
196
|
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
197
|
+
// Allow "_id" to not have a ref for the
|
|
198
|
+
// delete hooks module to function.
|
|
199
|
+
if (last(path) === '_id') {
|
|
200
|
+
return false;
|
|
201
|
+
}
|
|
202
202
|
|
|
203
|
-
|
|
204
|
-
return !!mongoose.Schema.Types[type];
|
|
203
|
+
return isObjectIdType(type) && !ref && !refPath;
|
|
205
204
|
}
|
|
206
205
|
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
206
|
+
// Extensions
|
|
207
|
+
|
|
208
|
+
function applyExtensions(arg) {
|
|
209
|
+
if (isSchemaTypedef(arg)) {
|
|
210
|
+
applySyntaxExtensions(arg);
|
|
211
|
+
applyValidateExtension(arg);
|
|
212
|
+
applyUniqueExtension(arg);
|
|
213
|
+
applyTupleExtension(arg);
|
|
214
|
+
applyDateExtension(arg);
|
|
215
|
+
|
|
216
|
+
if (Array.isArray(arg.type)) {
|
|
217
|
+
for (let field of arg.type) {
|
|
218
|
+
applyExtensions(field);
|
|
219
|
+
}
|
|
220
|
+
applyArrayValidators(arg);
|
|
221
|
+
applyOptionHoisting(arg);
|
|
222
|
+
}
|
|
223
|
+
} else if (isPlainObject(arg)) {
|
|
224
|
+
for (let [key, value] of Object.entries(arg)) {
|
|
225
|
+
if (isScopeExtension(value)) {
|
|
226
|
+
applyScopeExtension(value, arg, key);
|
|
227
|
+
} else {
|
|
228
|
+
applyExtensions(value);
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
}
|
|
212
232
|
}
|
|
213
233
|
|
|
214
234
|
function applySyntaxExtensions(typedef) {
|
|
215
235
|
const { type, attributes } = typedef;
|
|
216
236
|
if (isExtendedSyntax(typedef)) {
|
|
217
|
-
|
|
237
|
+
applyExtensions(attributes);
|
|
218
238
|
if (type === 'Array') {
|
|
219
|
-
typedef.type = [
|
|
239
|
+
typedef.type = [attributes];
|
|
240
|
+
} else if (type === 'Object') {
|
|
241
|
+
typedef.type = new mongoose.Schema(attributes);
|
|
220
242
|
}
|
|
221
|
-
|
|
222
|
-
if (Array.isArray(typedef.type)) {
|
|
223
|
-
applyArrayValidators(typedef);
|
|
224
|
-
applyOptionHoisting(typedef);
|
|
243
|
+
delete typedef['attributes'];
|
|
225
244
|
}
|
|
226
245
|
}
|
|
227
246
|
|
|
@@ -233,30 +252,42 @@ function applyOptionHoisting(typedef) {
|
|
|
233
252
|
|
|
234
253
|
function isExtendedSyntax(typedef) {
|
|
235
254
|
const { type, attributes } = typedef;
|
|
236
|
-
|
|
255
|
+
if (!attributes) {
|
|
256
|
+
return false;
|
|
257
|
+
}
|
|
258
|
+
return type === 'Object' || type === 'Array' || type === 'Scope';
|
|
237
259
|
}
|
|
238
260
|
|
|
239
261
|
function isScopeExtension(arg) {
|
|
240
262
|
return isSchemaTypedef(arg) && arg.type === 'Scope';
|
|
241
263
|
}
|
|
242
264
|
|
|
243
|
-
function applyScopeExtension(typedef,
|
|
244
|
-
const { type, attributes, ...
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
265
|
+
function applyScopeExtension(typedef, parent, name) {
|
|
266
|
+
const { type, attributes, ...rest } = typedef;
|
|
267
|
+
|
|
268
|
+
for (let [key, value] of Object.entries(attributes)) {
|
|
269
|
+
if (isSchemaTypedef(value)) {
|
|
270
|
+
// If the child is a typedef then apply
|
|
271
|
+
// options directly to the field.
|
|
272
|
+
applyExtensions(value);
|
|
273
|
+
parent[key] = {
|
|
274
|
+
...value,
|
|
275
|
+
...rest,
|
|
250
276
|
};
|
|
251
277
|
} else {
|
|
252
|
-
|
|
278
|
+
// If the child is a nested object then
|
|
279
|
+
// need to use extended object syntax.
|
|
280
|
+
const typedef = {
|
|
253
281
|
type: 'Object',
|
|
254
|
-
attributes:
|
|
255
|
-
...
|
|
282
|
+
attributes: value,
|
|
283
|
+
...rest,
|
|
256
284
|
};
|
|
285
|
+
applyExtensions(typedef);
|
|
286
|
+
parent[key] = typedef;
|
|
257
287
|
}
|
|
258
|
-
definition[key] = attributesToMongoose(val);
|
|
259
288
|
}
|
|
289
|
+
|
|
290
|
+
delete parent[name];
|
|
260
291
|
}
|
|
261
292
|
|
|
262
293
|
// Extended tuple syntax. Return mixed type and set validator.
|
|
@@ -284,6 +315,15 @@ function applyDateExtension(typedef) {
|
|
|
284
315
|
}
|
|
285
316
|
}
|
|
286
317
|
|
|
318
|
+
// Apply custom mongoose validation by name.
|
|
319
|
+
function applyValidateExtension(typedef) {
|
|
320
|
+
const { validate } = typedef;
|
|
321
|
+
|
|
322
|
+
if (typeof validate === 'string') {
|
|
323
|
+
typedef.validate = getNamedValidator(typedef.validate);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
|
|
287
327
|
// Intercepts "unique" options and changes to "softUnique".
|
|
288
328
|
function applyUniqueExtension(typedef) {
|
|
289
329
|
if (typedef.unique === true) {
|
|
@@ -323,12 +363,7 @@ function validateMaxLength(max) {
|
|
|
323
363
|
};
|
|
324
364
|
}
|
|
325
365
|
|
|
326
|
-
|
|
327
|
-
return (...args) => {
|
|
328
|
-
fn1?.(...args);
|
|
329
|
-
fn2?.(...args);
|
|
330
|
-
};
|
|
331
|
-
}
|
|
366
|
+
// Regex Parsing
|
|
332
367
|
|
|
333
368
|
const REG_MATCH = /^\/(.+)\/(\w+)$/;
|
|
334
369
|
|
|
@@ -340,3 +375,24 @@ function parseRegExp(str) {
|
|
|
340
375
|
const [, source, flags] = match;
|
|
341
376
|
return RegExp(source, flags);
|
|
342
377
|
}
|
|
378
|
+
|
|
379
|
+
// Utils
|
|
380
|
+
|
|
381
|
+
function camelUpper(str) {
|
|
382
|
+
return capitalize(camelCase(str));
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
function isObjectIdType(type) {
|
|
386
|
+
return type === 'ObjectId' || type === mongoose.Schema.Types.ObjectId;
|
|
387
|
+
}
|
|
388
|
+
|
|
389
|
+
function isMongooseType(type) {
|
|
390
|
+
return !!mongoose.Schema.Types[type];
|
|
391
|
+
}
|
|
392
|
+
|
|
393
|
+
function chain(fn1, fn2) {
|
|
394
|
+
return (...args) => {
|
|
395
|
+
fn1?.(...args);
|
|
396
|
+
fn2?.(...args);
|
|
397
|
+
};
|
|
398
|
+
}
|
package/types/cache.d.ts
CHANGED
package/types/cache.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"cache.d.ts","sourceRoot":"","sources":["../src/cache.js"],"names":[],"mappings":"AASA,+
|
|
1
|
+
{"version":3,"file":"cache.d.ts","sourceRoot":"","sources":["../src/cache.js"],"names":[],"mappings":"AASA,sDAeC;AAED,+DASC"}
|
package/types/delete-hooks.d.ts
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"delete-hooks.d.ts","sourceRoot":"","sources":["../src/delete-hooks.js"],"names":[],"mappings":"AAQA,
|
|
1
|
+
{"version":3,"file":"delete-hooks.d.ts","sourceRoot":"","sources":["../src/delete-hooks.js"],"names":[],"mappings":"AAQA,wDAaC;AAED,qEAqCC"}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"export.d.ts","sourceRoot":"","sources":["../src/export.js"],"names":[],"mappings":"AAEA,+CAWC"}
|
package/types/reload.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"reload.d.ts","sourceRoot":"","sources":["../src/reload.js"],"names":[],"mappings":"AAKA,+
|
|
1
|
+
{"version":3,"file":"reload.d.ts","sourceRoot":"","sources":["../src/reload.js"],"names":[],"mappings":"AAKA,+CAwBC"}
|
package/types/schema.d.ts
CHANGED
|
@@ -19,7 +19,6 @@ export function createSchema(definition: object, options?: mongoose.SchemaOption
|
|
|
19
19
|
};
|
|
20
20
|
collation?: mongoose.mongo.CollationOptions;
|
|
21
21
|
collectionOptions?: mongoose.mongo.CreateCollectionOptions;
|
|
22
|
-
lean?: boolean | mongoose.LeanOptions;
|
|
23
22
|
timeseries?: mongoose.mongo.TimeSeriesCollectionOptions;
|
|
24
23
|
expireAfterSeconds?: number;
|
|
25
24
|
expires?: number | string;
|
package/types/schema.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../src/schema.js"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"schema.d.ts","sourceRoot":"","sources":["../src/schema.js"],"names":[],"mappings":"AAyBA;;;;;;;GAOG;AACH,yCAJW,MAAM,YACN,QAAQ,CAAC,aAAa;;;;;;;YA2C7B,CAAC;WAAa,CAAC;mBACH,CAAC;;;;;;;;;;;;;;;;;;;;;;;SAgIb,CAAC;gBAA4B,CAAA;SAAW,CAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;aA3H5C;AAED,iEAcC;qBA9FoB,UAAU"}
|