@coderich/autograph 0.12.0 → 0.13.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/index.js +4 -6
  2. package/package.json +30 -44
  3. package/src/data/DataLoader.js +77 -70
  4. package/src/data/Emitter.js +89 -0
  5. package/src/data/Loader.js +33 -0
  6. package/src/data/Pipeline.js +84 -101
  7. package/src/data/Resolver.js +304 -0
  8. package/src/data/Transaction.js +49 -0
  9. package/src/query/Query.js +159 -335
  10. package/src/query/QueryBuilder.js +228 -114
  11. package/src/query/QueryResolver.js +110 -205
  12. package/src/query/QueryResolverTransaction.js +16 -0
  13. package/src/schema/Schema.js +602 -0
  14. package/src/service/AppService.js +38 -0
  15. package/src/service/ErrorService.js +7 -0
  16. package/CHANGELOG.md +0 -41
  17. package/LICENSE +0 -21
  18. package/README.md +0 -76
  19. package/src/.DS_Store +0 -0
  20. package/src/core/.DS_Store +0 -0
  21. package/src/core/Boom.js +0 -9
  22. package/src/core/EventEmitter.js +0 -95
  23. package/src/core/Resolver.js +0 -124
  24. package/src/core/Schema.js +0 -55
  25. package/src/core/ServerResolver.js +0 -15
  26. package/src/data/.DS_Store +0 -0
  27. package/src/data/DataService.js +0 -120
  28. package/src/data/DataTransaction.js +0 -161
  29. package/src/data/Field.js +0 -83
  30. package/src/data/Model.js +0 -214
  31. package/src/data/TreeMap.js +0 -78
  32. package/src/data/Type.js +0 -50
  33. package/src/driver/.DS_Store +0 -0
  34. package/src/driver/MongoDriver.js +0 -227
  35. package/src/driver/index.js +0 -11
  36. package/src/graphql/.DS_Store +0 -0
  37. package/src/graphql/ast/.DS_Store +0 -0
  38. package/src/graphql/ast/Field.js +0 -206
  39. package/src/graphql/ast/Model.js +0 -145
  40. package/src/graphql/ast/Node.js +0 -291
  41. package/src/graphql/ast/Schema.js +0 -133
  42. package/src/graphql/ast/Type.js +0 -26
  43. package/src/graphql/ast/TypeDefApi.js +0 -93
  44. package/src/graphql/extension/.DS_Store +0 -0
  45. package/src/graphql/extension/api.js +0 -193
  46. package/src/graphql/extension/framework.js +0 -71
  47. package/src/graphql/extension/type.js +0 -34
  48. package/src/query/.DS_Store +0 -0
  49. package/src/query/QueryBuilderTransaction.js +0 -26
  50. package/src/query/QueryService.js +0 -111
  51. package/src/service/.DS_Store +0 -0
  52. package/src/service/app.service.js +0 -319
  53. package/src/service/decorator.service.js +0 -114
  54. package/src/service/event.service.js +0 -66
  55. package/src/service/graphql.service.js +0 -92
  56. package/src/service/schema.service.js +0 -95
package/index.js CHANGED
@@ -1,13 +1,11 @@
1
- const Schema = require('./src/core/Schema');
2
- const Resolver = require('./src/core/Resolver');
1
+ const Schema = require('./src/schema/Schema');
2
+ const Resolver = require('./src/data/Resolver');
3
3
  const Pipeline = require('./src/data/Pipeline');
4
- const Driver = require('./src/driver');
5
- const { eventEmitter: Emitter } = require('./src/service/event.service');
4
+ const Emitter = require('./src/data/Emitter');
6
5
 
7
6
  module.exports = {
8
7
  Schema,
9
8
  Resolver,
10
- Driver,
11
- Emitter,
12
9
  Pipeline,
10
+ Emitter,
13
11
  };
package/package.json CHANGED
@@ -1,59 +1,45 @@
1
1
  {
2
2
  "name": "@coderich/autograph",
3
- "author": "Richard Livolsi (coderich)",
4
- "version": "0.12.0",
5
- "description": "AutoGraph",
6
- "keywords": [
7
- "graphql",
8
- "mongo",
9
- "neo4j",
10
- "redis",
11
- "dataloader",
12
- "resolver",
13
- "orm"
14
- ],
15
3
  "main": "index.js",
16
- "license": "MIT",
4
+ "version": "0.13.1",
5
+ "publishConfig": {
6
+ "access": "public"
7
+ },
17
8
  "files": [
18
- "src/",
19
- "index.js"
9
+ "src",
10
+ "!__mocks__"
20
11
  ],
21
- "engines": {
22
- "node": ">=14.17.0"
23
- },
24
12
  "scripts": {
25
- "start": "APP_ROOT_PATH=$(pwd) node ./test/server",
26
- "test": "APP_ROOT_PATH=$(pwd) ratchet test",
27
- "test:debug": "APP_ROOT_PATH=$(pwd) node --inspect-brk ./node_modules/jest/bin/jest.js --watch --runInBand --logHeapUsage",
28
- "lint": "APP_ROOT_PATH=$(pwd) ratchet lint",
29
- "inspect": "APP_ROOT_PATH=$(pwd) node --expose-gc --inspect=9222 ./src/server",
30
- "ratchet": "ratchet"
13
+ "test": "NODE_OPTIONS=\"--stack-trace-limit=1000\" jest --config=jest.config.js",
14
+ "lint": "eslint --config=.eslintrc ./",
15
+ "dev": "coderich-dev"
31
16
  },
32
17
  "dependencies": {
33
- "@hapi/boom": "^9.1.0",
34
- "dataloader": "^2.0.0",
35
- "deepmerge": "^4.2.2",
36
- "fill-range": "^7.0.1",
37
- "flat": "^5.0.2",
38
- "glob": "^7.1.6",
39
- "graphql-fields": "^2.0.3",
40
- "lodash": "^4.17.21",
41
- "mongodb": "4.8.1",
42
- "object-hash": "^2.0.1",
43
- "picomatch": "^2.1.1"
18
+ "@coderich/util": "0.1.4",
19
+ "@graphql-tools/merge": "9.0.0",
20
+ "@graphql-tools/resolvers-composition": "7.0.0",
21
+ "@hapi/boom": "10.0.1",
22
+ "bson-objectid": "2.0.4",
23
+ "dataloader": "2.2.2",
24
+ "deepmerge": "4.3.1",
25
+ "fill-range": "7.0.1",
26
+ "lodash.get": "4.4.2",
27
+ "lodash.merge": "4.6.2",
28
+ "lodash.uniqwith": "4.5.0",
29
+ "object-hash": "3.0.0",
30
+ "picomatch": "2.3.1"
44
31
  },
45
32
  "devDependencies": {
46
- "@coderich/ratchet": "^1.5.8",
47
- "@graphql-tools/schema": "^9.0.1",
48
- "graphql": "^15.5.0",
49
- "mongodb-memory-server": "^8.7.2",
50
- "validator": "^13.7.0"
33
+ "@apollo/server": "4.9.5",
34
+ "@coderich/autograph-mongodb": "0.0.1",
35
+ "@coderich/dev": "0.1.0",
36
+ "@graphql-tools/schema": "10.0.0",
37
+ "graphql": "16.6.0",
38
+ "mongodb": "5.7.0",
39
+ "mongodb-memory-server": "8.13.0",
40
+ "validator": "13.9.0"
51
41
  },
52
42
  "peerDependencies": {
53
43
  "graphql": "*"
54
- },
55
- "repository": {
56
- "type": "git",
57
- "url": "git@github.com:coderich/autograph.git"
58
44
  }
59
45
  }
@@ -1,82 +1,89 @@
1
- const FBDataLoader = require('dataloader');
2
- const { map, ensureArray, hashObject } = require('../service/app.service');
3
- const Query = require('../query/Query');
1
+ const get = require('lodash.get');
2
+ const Util = require('@coderich/util');
3
+ const DataLoader = require('dataloader');
4
+ const { hashObject } = require('../service/AppService');
4
5
 
5
- const handleData = (data, model, query) => {
6
- if (data == null || typeof data !== 'object') return data;
7
- return model.deserialize(data, query);
8
- };
9
-
10
- module.exports = class DataLoader extends FBDataLoader {
11
- constructor(resolver, model) {
12
- const driver = model.getDriver();
6
+ module.exports = class Loader {
7
+ #model;
8
+ #loader;
13
9
 
14
- return new FBDataLoader((queries) => {
15
- let performBatchQuery = false; // If we don't have to batch it's faster to resolve normal
16
- const defaultBatchName = '__default__'; // Something that won't collide with an actual field name
10
+ constructor(model) {
11
+ this.#model = model;
12
+ model.loader.cacheKeyFn ??= (query => hashObject(query.toCacheKey()));
13
+ this.#loader = new DataLoader(keys => this.#resolve(keys), model.loader);
14
+ }
17
15
 
18
- /**
19
- * Batch queries can save resources and network round-trip latency. However, we have to be careful to
20
- * preserve the order and adhere to the DataLoader API. This step simply creates a map of batch
21
- * queries to run; saving the order ("i") along with useful meta information
22
- */
23
- const batchQueries = queries.reduce((prev, query, i) => {
24
- const { batch = defaultBatchName, where, cmd } = query.toObject();
25
- const key = batch && (cmd === 'one' || cmd === 'many') ? batch : defaultBatchName;
26
- if (key !== defaultBatchName) performBatchQuery = true;
27
- prev[key] = prev[key] || [];
28
- prev[key].push({ query, where, cmd, i });
29
- return prev;
30
- }, {});
16
+ clearAll() {
17
+ return this.#loader.clearAll();
18
+ }
31
19
 
32
- // Don't batch unless it's worth it!
33
- if (!performBatchQuery) {
34
- return Promise.all(queries.map((query) => {
35
- return driver.resolve(query.toDriver()).then(data => handleData(data, model, query));
36
- }));
37
- }
20
+ resolve(query) {
21
+ return this.#loader.load(query);
22
+ }
38
23
 
39
- /**
40
- * We have reduced the number of queries down to a smaller set of batch queries to run. The dance
41
- * performed below retreives the data and then expands the results back into the original queries
42
- */
43
- const whereShape = model.getShape('create', 'where');
24
+ #resolve(queries) {
25
+ return Promise.all(queries.map((query) => {
26
+ const dquery = query.toDriver();
27
+ const $query = dquery.toObject();
44
28
 
45
- return Promise.all(Object.entries(batchQueries).map(([key, values]) => {
46
- switch (key) {
47
- case defaultBatchName: {
48
- return values.map(({ query, i }) => driver.resolve(query.toDriver()).then(data => handleData(data, model, query)).then(data => ({ data, i })));
49
- }
50
- default: {
51
- const keys = Array.from(new Set(values.map(({ where }) => map(where[key], el => `${el}`)).flat()));
52
- const batchQuery = new Query({ resolver, model, method: 'findMany', crud: 'read' });
53
- const batchWhere = model.shapeObject(whereShape, { ...values[0].where, [key]: keys }, batchQuery); // All where's should be the same - this is for idKey on keys etc
29
+ return this.#model.source.client.resolve($query).then((data) => {
30
+ if (data == null) return null; // Explicit return null;
31
+ if ($query.isCursorPaging && Array.isArray(data)) return Loader.#paginateResults(data, query.toObject());
32
+ return data;
33
+ });
34
+ }));
35
+ }
54
36
 
55
- return driver.resolve(batchQuery.where(batchWhere).toDriver()).then(data => handleData(data, model, batchQuery)).then((results) => {
56
- // One-time data transformation on results to make matching back faster (below)
57
- const resultsByKey = results.reduce((prev, row) => {
58
- ensureArray(row[key]).forEach((id) => {
59
- prev[id] = prev[id] || [];
60
- prev[id].push(row);
61
- });
62
- return prev;
63
- }, {});
37
+ static #paginateResults(rs, query) {
38
+ let hasNextPage = false;
39
+ let hasPreviousPage = false;
40
+ const { first, after, last, before, sort = {} } = query;
41
+ const sortPaths = Object.keys(Util.flatten(sort, { safe: true }));
42
+ const limiter = first || last;
64
43
 
65
- // Match back
66
- return values.map(({ where, cmd, i }) => {
67
- const targets = ensureArray(where[key]).map(t => `${t}`);
68
- const data = targets.map(t => resultsByKey[t] || null).flat();
69
- return { i, data: cmd === 'many' ? data.filter(d => d != null) : data[0] };
70
- });
71
- });
72
- }
73
- }
74
- }).flat()).then((results) => {
75
- return results.flat().sort((a, b) => a.i - b.i).map(({ data }) => data);
44
+ // Add $cursor data (but only if sort is defined!)
45
+ if (sortPaths.length) {
46
+ Util.map(rs, (doc) => {
47
+ const sortValues = sortPaths.reduce((prev, path) => Object.assign(prev, { [path]: get(doc, path) }), {});
48
+ Object.defineProperty(doc, '$cursor', { value: Buffer.from(JSON.stringify(sortValues)).toString('base64') });
76
49
  });
77
- }, {
78
- cache: true,
79
- cacheKeyFn: query => hashObject(query.getCacheKey()),
50
+ }
51
+
52
+ // First try to take off the "bookends" ($gte | $lte)
53
+ if (rs.length && rs[0].$cursor === after) {
54
+ rs.shift();
55
+ hasPreviousPage = true;
56
+ }
57
+
58
+ if (rs.length && rs[rs.length - 1].$cursor === before) {
59
+ rs.pop();
60
+ hasNextPage = true;
61
+ }
62
+
63
+ // Next, remove any overage
64
+ const overage = rs.length - (limiter - 2);
65
+
66
+ if (overage > 0) {
67
+ if (first) {
68
+ rs.splice(-overage);
69
+ hasNextPage = true;
70
+ } else if (last) {
71
+ rs.splice(0, overage);
72
+ hasPreviousPage = true;
73
+ } else {
74
+ rs.splice(-overage);
75
+ hasNextPage = true;
76
+ }
77
+ }
78
+
79
+ // Add $pageInfo
80
+ return Object.defineProperty(rs, '$pageInfo', {
81
+ value: {
82
+ startCursor: get(rs, '0.$cursor', ''),
83
+ endCursor: get(rs, `${rs.length - 1}.$cursor`, ''),
84
+ hasPreviousPage,
85
+ hasNextPage,
86
+ },
80
87
  });
81
88
  }
82
89
  };
@@ -0,0 +1,89 @@
1
+ const EventEmitter = require('events');
2
+ const Util = require('@coderich/util');
3
+ const { AbortEarlyError } = require('../service/ErrorService');
4
+
5
+ /**
6
+ * EventEmitter.
7
+ *
8
+ * The difference is that I'm looking at each raw listeners to determine how many arguments it's expecting.
9
+ * If it expects more than 1 we block and wait for it to finish.
10
+ */
11
+ class Emitter extends EventEmitter {
12
+ emit(event, data) {
13
+ // Here we pull out functions with "next" vs those without
14
+ const [basicFuncs, nextFuncs] = this.rawListeners(event).reduce((prev, wrapper) => {
15
+ const listener = wrapper.listener || wrapper;
16
+ const isBasic = listener.length < 2;
17
+ return prev[isBasic ? 0 : 1].push(wrapper) && prev;
18
+ }, [[], []]);
19
+
20
+ return new Promise((resolve, reject) => {
21
+ // Basic functions run first; if they return a value they abort the flow of execution
22
+ basicFuncs.forEach((fn) => {
23
+ const value = fn(data);
24
+ if (value !== undefined && !(value instanceof Promise)) throw new AbortEarlyError(value);
25
+ });
26
+
27
+ // Next functions are async and control the timing of the next phase
28
+ Promise.all(nextFuncs.map((fn) => {
29
+ return new Promise((next) => {
30
+ Promise.resolve(fn(data, next));
31
+ }).then((result) => {
32
+ if (result !== undefined) throw new AbortEarlyError(result);
33
+ }).catch(reject);
34
+ })).then(() => resolve()); // Resolve to undefined
35
+ }).catch((e) => {
36
+ if (e instanceof AbortEarlyError) return e.data;
37
+ throw e;
38
+ });
39
+ }
40
+
41
+ /**
42
+ * Syntactic sugar to listen on query keys
43
+ */
44
+ onKeys(...args) {
45
+ return this.#createWrapper(...args, 'key');
46
+ }
47
+
48
+ /**
49
+ * Syntactic sugar to listen once on query keys
50
+ */
51
+ onceKeys(...args) {
52
+ return this.#createWrapper(...args, 'key', true);
53
+ }
54
+
55
+ /**
56
+ * Syntactic sugar to listen on query models
57
+ */
58
+ onModels(...args) {
59
+ return this.#createWrapper(...args, 'model');
60
+ }
61
+
62
+ /**
63
+ * Syntactic sugar to listen once on query models
64
+ */
65
+ onceModels(...args) {
66
+ return this.#createWrapper(...args, 'model', true);
67
+ }
68
+
69
+ #createWrapper(eventName, arr, listener, prop, once) {
70
+ arr = Util.ensureArray(arr);
71
+
72
+ const wrapper = listener.length < 2 ? (event) => {
73
+ if (arr.includes(`${event.query[prop]}`)) {
74
+ if (once) this.removeListener(eventName, wrapper);
75
+ return listener(event);
76
+ }
77
+ return undefined;
78
+ } : (event, next) => {
79
+ if (arr.includes(`${event.query[prop]}`)) {
80
+ if (once) this.removeListener(eventName, wrapper);
81
+ next(listener(event, next));
82
+ }
83
+ };
84
+
85
+ return this.on(eventName, wrapper);
86
+ }
87
+ }
88
+
89
+ module.exports = new Emitter().setMaxListeners(100);
@@ -0,0 +1,33 @@
1
+ const DataLoader = require('dataloader');
2
+ const { hashObject } = require('../service/AppService');
3
+
4
+ module.exports = class Loader {
5
+ #loader;
6
+ #resolver;
7
+
8
+ constructor(resolver, config = {}) {
9
+ config.cacheKeyFn ??= event => hashObject(event.args);
10
+ this.#loader = new DataLoader(events => this.#resolve(events), config);
11
+ this.#resolver = resolver;
12
+ }
13
+
14
+ load(args, context) {
15
+ return this.#loader.load({ args, context });
16
+ }
17
+
18
+ prime(args, value) {
19
+ return this.#loader.prime({ args }, value);
20
+ }
21
+
22
+ clear(args) {
23
+ return this.#loader.clear({ args });
24
+ }
25
+
26
+ clearAll() {
27
+ return this.#loader.clearAll();
28
+ }
29
+
30
+ #resolve(events) {
31
+ return Promise.all(events.map(event => this.#resolver(event.args, event.context)));
32
+ }
33
+ };
@@ -1,6 +1,8 @@
1
- const { uniqWith } = require('lodash');
2
- const { map, ensureArray, hashObject } = require('../service/app.service');
3
- const Boom = require('../core/Boom');
1
+ const Boom = require('@hapi/boom');
2
+ const get = require('lodash.get');
3
+ const Util = require('@coderich/util');
4
+ const uniqWith = require('lodash.uniqwith');
5
+ const { hashObject } = require('../service/AppService');
4
6
 
5
7
  module.exports = class Pipeline {
6
8
  constructor() {
@@ -12,20 +14,25 @@ module.exports = class Pipeline {
12
14
  if (typeof factory !== 'function') throw new Error(`Pipeline definition for "${name}" must be a function`);
13
15
 
14
16
  // Determine options; which may come from the factory function
15
- const { ignoreNull = true, itemize = true, configurable = false } = Object.assign({}, factory.options, options);
17
+ const { ignoreNull = true, itemize = true, configurable = false } = { ...factory.options, ...options };
16
18
 
17
19
  const wrapper = Object.defineProperty((args) => {
18
- if (ignoreNull && args.value == null) return args.value;
20
+ try {
21
+ if (ignoreNull && args.value == null) return args.value;
22
+
23
+ if (ignoreNull && itemize) {
24
+ return Util.map(args.value, (value) => {
25
+ const v = factory({ ...args, value });
26
+ return v === undefined ? value : v;
27
+ });
28
+ }
19
29
 
20
- if (ignoreNull && itemize) {
21
- return map(args.value, (val, index) => {
22
- const v = factory({ ...args, value: val, index });
23
- return v === undefined ? val : v;
24
- });
30
+ const value = factory(args);
31
+ return value === undefined ? args.value : value;
32
+ } catch (e) {
33
+ const { data = {} } = e;
34
+ throw Boom.boomify(e, { data: { ...args, ...data } });
25
35
  }
26
-
27
- const val = factory(args);
28
- return val === undefined ? args.value : val;
29
36
  }, 'name', { value: name });
30
37
 
31
38
  // Attach enumerable method to the Pipeline
@@ -42,22 +49,6 @@ module.exports = class Pipeline {
42
49
  return Object.defineProperty(Pipeline, name, { value: (...args) => Object.defineProperty(thunk(...args), 'options', { value: options }) })[name];
43
50
  }
44
51
 
45
- // static wrapper(name, factory, { ignoreNull, itemize }) {
46
- // return Object.defineProperty((args) => {
47
- // if (ignoreNull && args.value == null) return args.value;
48
-
49
- // if (ignoreNull && itemize) {
50
- // return map(args.value, (val, index) => {
51
- // const v = factory({ ...args, value: val, index });
52
- // return v === undefined ? val : v;
53
- // });
54
- // }
55
-
56
- // const val = factory(args);
57
- // return val === undefined ? args.value : val;
58
- // }, 'name', { value: name });
59
- // }
60
-
61
52
  static createPresets() {
62
53
  // Built-In Javascript String Transformers
63
54
  const jsStringTransformers = ['toLowerCase', 'toUpperCase', 'toString', 'trim', 'trimEnd', 'trimStart'];
@@ -66,76 +57,78 @@ module.exports = class Pipeline {
66
57
  // Additional Transformers
67
58
  Pipeline.define('toTitleCase', ({ value }) => value.replace(/\w\S*/g, w => w.charAt(0).toUpperCase() + w.slice(1).toLowerCase()));
68
59
  Pipeline.define('toSentenceCase', ({ value }) => value.charAt(0).toUpperCase() + value.slice(1));
69
- Pipeline.define('toId', ({ model, value }) => model.idValue(value));
70
60
  Pipeline.define('toArray', ({ value }) => (Array.isArray(value) ? value : [value]), { itemize: false });
71
61
  Pipeline.define('toDate', ({ value }) => new Date(value), { configurable: true });
72
- Pipeline.define('timestamp', ({ value }) => Date.now(), { ignoreNull: false });
73
- Pipeline.define('createdAt', ({ value }) => value || Date.now(), { ignoreNull: false });
62
+ Pipeline.define('updatedAt', () => new Date(), { ignoreNull: false });
63
+ Pipeline.define('createdAt', ({ value }) => value || new Date(), { ignoreNull: false });
64
+ Pipeline.define('timestamp', () => Date.now(), { ignoreNull: false });
74
65
  Pipeline.define('dedupe', ({ value }) => uniqWith(value, (b, c) => hashObject(b) === hashObject(c)), { itemize: false });
75
- Pipeline.define('idKey', ({ model, value }) => (value == null ? model.idValue() : value), { ignoreNull: false });
76
- Pipeline.define('idField', ({ model, field, value }) => field.getIdModel().idValue(value.id || value));
77
- Pipeline.define('ensureArrayValue', ({ field, value }) => (field.toObject().isArray && !Array.isArray(value) ? [value] : value), { itemize: false });
78
-
79
- Pipeline.define('ensureId', ({ resolver, field, value }) => {
80
- const { type } = field.toObject();
81
- const ids = Array.from(new Set(ensureArray(value).map(v => `${v}`)));
82
-
83
- return resolver.match(type).where({ id: ids }).count().then((count) => {
84
- if (count !== ids.length) throw Boom.notFound(`${type} Not Found`);
85
- });
86
- }, { itemize: false });
87
-
88
- Pipeline.define('defaultValue', ({ field, value }) => {
89
- const { defaultValue } = field.toObject();
90
- return value === undefined ? defaultValue : value;
91
- }, { ignoreNull: false });
92
-
93
- Pipeline.define('castValue', ({ field, value }) => {
94
- const { type, isEmbedded } = field.toObject();
95
66
 
67
+ // Structures
68
+ Pipeline.define('$instruct', params => Pipeline.resolve(params, 'instruct'), { ignoreNull: false });
69
+ Pipeline.define('$normalize', params => Pipeline.resolve(params, 'normalize'), { ignoreNull: false });
70
+ Pipeline.define('$construct', params => Pipeline.resolve(params, 'construct'), { ignoreNull: false });
71
+ Pipeline.define('$restruct', params => Pipeline.resolve(params, 'restruct'), { ignoreNull: false });
72
+ Pipeline.define('$serialize', params => Pipeline.resolve(params, 'serialize'), { ignoreNull: false });
73
+ Pipeline.define('$finalize', params => Pipeline.resolve(params, 'finalize'), { ignoreNull: false });
74
+
75
+ //
76
+ Pipeline.define('$pk', ({ query, model, value, path }) => model.source.idValue(get(query.doc, path) || value?.id || value), { ignoreNull: false });
77
+ Pipeline.define('$fk', ({ model, value }) => model.source.idValue(value.id || value));
78
+ Pipeline.define('$default', ({ field: { defaultValue }, value }) => (value === undefined ? defaultValue : value), { ignoreNull: false });
79
+
80
+ //
81
+ Pipeline.define('$cast', ({ field, value }) => {
82
+ const { type, isEmbedded } = field;
96
83
  if (isEmbedded) return value;
97
84
 
98
- return map(value, (v) => {
99
- switch (type) {
100
- case 'String': {
101
- return `${v}`;
102
- }
103
- case 'Float': case 'Number': {
104
- const num = Number(v);
105
- if (!Number.isNaN(num)) return num;
106
- return v;
107
- }
108
- case 'Int': {
109
- const num = Number(v);
110
- if (!Number.isNaN(num)) return parseInt(v, 10);
111
- return v;
112
- }
113
- case 'Boolean': {
114
- if (v === 'true') return true;
115
- if (v === 'false') return false;
116
- return v;
117
- }
118
- default: {
119
- return v;
120
- }
85
+ switch (type.toLowerCase()) {
86
+ case 'string': {
87
+ return `${value}`;
88
+ }
89
+ case 'float': case 'number': {
90
+ const num = Number(value);
91
+ if (!Number.isNaN(num)) return num;
92
+ return value;
93
+ }
94
+ case 'int': {
95
+ const num = Number(value);
96
+ if (!Number.isNaN(num)) return parseInt(value, 10);
97
+ return value;
121
98
  }
99
+ case 'boolean': {
100
+ if (value === 'true') return true;
101
+ if (value === 'false') return false;
102
+ return value;
103
+ }
104
+ default: {
105
+ return value;
106
+ }
107
+ }
108
+ });
109
+
110
+ //
111
+ Pipeline.define('ensureId', ({ query, resolver, model, field, value }) => {
112
+ const { type } = field;
113
+ const ids = Util.filterBy(Util.ensureArray(value), (a, b) => `${a}` === `${b}`);
114
+ return resolver.match(type).flags(query.flags).where({ id: ids }).count().then((count) => {
115
+ if (count !== ids.length) throw Boom.notFound(`${type} Not Found`);
122
116
  });
123
117
  }, { itemize: false });
124
118
 
125
119
  // Required fields
126
- Pipeline.define('required', ({ model, field, value }) => {
127
- if (value == null) throw Boom.badRequest(`${model}.${field} is required`);
120
+ Pipeline.define('required', ({ query, model, field, value }) => {
121
+ if ((query.crud === 'create' && value == null) || (query.crud === 'update' && value === null)) throw Boom.badRequest(`${model.name}.${field.name} is required`);
128
122
  }, { ignoreNull: false });
129
123
 
130
124
  // A field cannot hold a reference to itself
131
- Pipeline.define('selfless', ({ model, field, parent, parentPath, value }) => {
132
- if (`${value}` === `${parentPath('id')}`) throw Boom.badRequest(`${model}.${field} cannot hold a reference to itself`);
125
+ Pipeline.define('selfless', ({ query, model, field, value }) => {
126
+ if (`${value}` === `${query.doc?.id}`) throw Boom.badRequest(`${model}.${field} cannot hold a reference to itself`);
133
127
  });
134
128
 
135
129
  // Once set it cannot be changed
136
- Pipeline.define('immutable', ({ model, field, docPath, parentPath, path, value }) => {
137
- const hint = { id: parentPath('id') };
138
- const oldVal = docPath(path, hint);
130
+ Pipeline.define('immutable', ({ query, model, field, value, path }) => {
131
+ const oldVal = get(query.doc, path);
139
132
  if (oldVal !== undefined && value !== undefined && `${hashObject(oldVal)}` !== `${hashObject(value)}`) throw Boom.badRequest(`${model}.${field} is immutable; cannot be changed once set ${oldVal} -> ${value}`);
140
133
  });
141
134
 
@@ -161,24 +154,14 @@ module.exports = class Pipeline {
161
154
  };
162
155
  }, { itemize: false });
163
156
  }
157
+
158
+ static resolve(params, pipeline) {
159
+ const transformers = params.field.pipelines[pipeline] || [];
160
+
161
+ return Util.pipeline(transformers.map(t => async (value) => {
162
+ return Pipeline[t]({ ...params, value });
163
+ }), params.value);
164
+ }
164
165
  };
165
166
 
166
- // const jsStringMethods = [
167
- // 'charAt', 'charCodeAt', 'codePointAt', 'concat', 'indexOf', 'lastIndexOf', 'localeCompare',
168
- // 'normalize', 'padEnd', 'padStart', 'repeat', 'replace', 'search', 'slice', 'split', 'substr', 'substring',
169
- // 'toLocaleLowerCase', 'toLocaleUpperCase', 'toLowerCase', 'toString', 'toUpperCase', 'trim', 'trimEnd', 'trimStart', 'raw',
170
- // ];
171
-
172
- // Transformer.factory('toTitleCase', () => ({ value }) => value.replace(/\w\S*/g, w => w.charAt(0).toUpperCase() + w.slice(1).toLowerCase()), { enumerable: true });
173
- // Transformer.factory('toLocaleTitleCase', (...args) => ({ value }) => value.replace(/\w\S*/g, w => w.charAt(0).toLocaleUpperCase(...args) + w.slice(1).toLocaleLowerCase()));
174
- // Transformer.factory('toSentenceCase', () => ({ value }) => value.charAt(0).toUpperCase() + value.slice(1), { enumerable: true });
175
- // Transformer.factory('toLocaleSentenceCase', (...args) => ({ value }) => value.charAt(0).toLocaleUpperCase(...args) + value.slice(1));
176
- // Transformer.factory('toArray', () => ({ value }) => (Array.isArray(value) ? value : [value]), { itemize: false, enumerable: true });
177
- // Transformer.factory('toDate', () => ({ value }) => new Date(value), { enumerable: true, writable: true });
178
- // Transformer.factory('dedupe', () => ({ value }) => uniqWith(value, (b, c) => hashObject(b) === hashObject(c)), { ignoreNull: false, enumerable: true });
179
- // Transformer.factory('dedupeBy', key => ({ value }) => uniqWith(value, (b, c) => hashObject(b[key]) === hashObject(c[key])), { ignoreNull: false, enumerable: true });
180
- // Transformer.factory('timestamp', () => () => Date.now(), { enumerable: true, ignoreNull: false });
181
- // Transformer.factory('createdAt', () => ({ value }) => value || Date.now(), { enumerable: true, ignoreNull: false });
182
- // Transformer.factory('first', () => ({ value }) => (Array.isArray(value) ? value[0] : value), { enumerable: true });
183
- // Transformer.factory('get', path => ({ value }) => get(value, path), { enumerable: true });
184
- // Transformer.factory('set', path => ({ value }) => set({}, path, value), { enumerable: true });
167
+ module.exports.createPresets();