nodester 0.6.3 → 0.6.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -18,7 +18,7 @@ module.exports = function initNodesterQL() {
|
|
|
18
18
|
return nqlHandle;
|
|
19
19
|
};
|
|
20
20
|
|
|
21
|
-
function nqlHandle(req, res, next) {
|
|
21
|
+
async function nqlHandle(req, res, next) {
|
|
22
22
|
// Object, which will be populated with parsed query.
|
|
23
23
|
req.nquery = {};
|
|
24
24
|
|
|
@@ -37,9 +37,10 @@ function nqlHandle(req, res, next) {
|
|
|
37
37
|
|
|
38
38
|
const decoded = decodeQueryString(queryString);
|
|
39
39
|
const lexer = new QueryLexer(decoded);
|
|
40
|
+
const nquery = await lexer.parse();
|
|
40
41
|
|
|
41
42
|
// Go on!
|
|
42
|
-
req.nquery =
|
|
43
|
+
req.nquery = nquery;
|
|
43
44
|
return next();
|
|
44
45
|
}
|
|
45
46
|
catch(error) {
|
|
@@ -74,27 +74,24 @@ const FN_TOKENS = new Enum({
|
|
|
74
74
|
module.exports = class QueryLexer {
|
|
75
75
|
constructor(queryString='') {
|
|
76
76
|
this.tree = new ModelsTree();
|
|
77
|
-
this.
|
|
78
|
-
this.parse(queryString).toObject()
|
|
79
|
-
:
|
|
80
|
-
{};
|
|
77
|
+
this.queryString = queryString;
|
|
81
78
|
}
|
|
82
79
|
|
|
83
|
-
parse(queryString=
|
|
80
|
+
async parse(queryString=this.queryString, tree=this.tree) {
|
|
84
81
|
if (typeof queryString !== 'string') {
|
|
85
82
|
const err = new TypeError(`Invalid 'queryString'.`);
|
|
86
|
-
|
|
83
|
+
return Promise.reject(err);
|
|
87
84
|
}
|
|
88
85
|
|
|
89
86
|
// You never know if it's encoded or not.
|
|
90
87
|
const decoded = decodeURI(queryString);
|
|
91
88
|
|
|
92
|
-
this.parseIsolatedQuery(decoded, 0, this.tree);
|
|
89
|
+
await this.parseIsolatedQuery(decoded, 0, this.tree);
|
|
93
90
|
|
|
94
|
-
return this.tree.root;
|
|
91
|
+
return Promise.resolve(this.tree.root.toObject());
|
|
95
92
|
}
|
|
96
93
|
|
|
97
|
-
parseIsolatedQuery(queryString='', startAt=0, tree) {
|
|
94
|
+
async parseIsolatedQuery(queryString='', startAt=0, tree) {
|
|
98
95
|
const isSubQuery = tree.node.model !== 'root';
|
|
99
96
|
|
|
100
97
|
// Token is a String, accumulated char-by-char.
|
|
@@ -137,7 +134,7 @@ module.exports = class QueryLexer {
|
|
|
137
134
|
|
|
138
135
|
// Process subquery:
|
|
139
136
|
i++;
|
|
140
|
-
const [ charsCount ] = this.parseIsolatedQuery(queryString, i, tree);
|
|
137
|
+
const [ charsCount ] = await this.parseIsolatedQuery(queryString, i, tree);
|
|
141
138
|
i += charsCount;
|
|
142
139
|
|
|
143
140
|
previousActive = model;
|
|
@@ -159,7 +156,7 @@ module.exports = class QueryLexer {
|
|
|
159
156
|
// If token is empty, error:
|
|
160
157
|
if (token === '') {
|
|
161
158
|
const err = UnexpectedCharError(i, char);
|
|
162
|
-
|
|
159
|
+
return Promise.reject(err);
|
|
163
160
|
}
|
|
164
161
|
|
|
165
162
|
// Structure of a value depends on OP:
|
|
@@ -183,18 +180,18 @@ module.exports = class QueryLexer {
|
|
|
183
180
|
// If this param is already in WHERE,
|
|
184
181
|
// treat it like SQL AND:
|
|
185
182
|
if (tree.node.where[tree.node.activeParam]) {
|
|
186
|
-
// If this OP is already in the set
|
|
183
|
+
// If this OP is already in the set,
|
|
184
|
+
// concatenate arrays:
|
|
187
185
|
if (tree.node.where[tree.node.activeParam][tree.node.op]) {
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
186
|
+
Array.prototype.push.apply(
|
|
187
|
+
tree.node.where[tree.node.activeParam][tree.node.op],
|
|
188
|
+
fullOp[tree.node.activeParam][tree.node.op]
|
|
189
|
+
);
|
|
192
190
|
}
|
|
193
191
|
else {
|
|
194
|
-
tree.node.where[tree.node.activeParam]
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
}
|
|
192
|
+
const existingOps = tree.node.where[tree.node.activeParam];
|
|
193
|
+
const newOps = fullOp[tree.node.activeParam];
|
|
194
|
+
tree.node.where[tree.node.activeParam] = Object.assign({}, existingOps, newOps);
|
|
198
195
|
}
|
|
199
196
|
}
|
|
200
197
|
else {
|
|
@@ -311,7 +308,7 @@ module.exports = class QueryLexer {
|
|
|
311
308
|
}
|
|
312
309
|
|
|
313
310
|
const err = UnexpectedCharError(i, char);
|
|
314
|
-
|
|
311
|
+
return Promise.reject(err);
|
|
315
312
|
}
|
|
316
313
|
|
|
317
314
|
// . can mean:
|
|
@@ -342,7 +339,7 @@ module.exports = class QueryLexer {
|
|
|
342
339
|
}
|
|
343
340
|
|
|
344
341
|
const err = UnexpectedCharError(i, char);
|
|
345
|
-
|
|
342
|
+
return Promise.reject(err);
|
|
346
343
|
}
|
|
347
344
|
|
|
348
345
|
// + can only mean horizontal include:
|
|
@@ -365,7 +362,7 @@ module.exports = class QueryLexer {
|
|
|
365
362
|
|
|
366
363
|
if (tree.node.hasParent === false) {
|
|
367
364
|
const err = UnexpectedCharError(i, char);
|
|
368
|
-
|
|
365
|
+
return Promise.reject(err);
|
|
369
366
|
}
|
|
370
367
|
|
|
371
368
|
tree.up();
|
|
@@ -383,7 +380,7 @@ module.exports = class QueryLexer {
|
|
|
383
380
|
// If any OP at all:
|
|
384
381
|
if (!!tree.node.op) {
|
|
385
382
|
const err = MissingCharError(i+1, ')');
|
|
386
|
-
|
|
383
|
+
return Promise.reject(err);
|
|
387
384
|
}
|
|
388
385
|
|
|
389
386
|
// If end of a key=value pair:
|
|
@@ -407,7 +404,7 @@ module.exports = class QueryLexer {
|
|
|
407
404
|
// then it's a syntactic error:
|
|
408
405
|
if (token.length > 0) {
|
|
409
406
|
const err = new NodesterQueryError(`unrecognized char at position ${ i }: Unknown token '${ token }'`);
|
|
410
|
-
|
|
407
|
+
return Promise.reject(err);
|
|
411
408
|
}
|
|
412
409
|
|
|
413
410
|
// Reset:
|
|
@@ -455,7 +452,7 @@ module.exports = class QueryLexer {
|
|
|
455
452
|
|
|
456
453
|
// Unknown case:
|
|
457
454
|
const err = UnexpectedCharError(i, char);
|
|
458
|
-
|
|
455
|
+
return Promise.reject(err);
|
|
459
456
|
}
|
|
460
457
|
|
|
461
458
|
// [ can mean:
|
|
@@ -468,7 +465,7 @@ module.exports = class QueryLexer {
|
|
|
468
465
|
}
|
|
469
466
|
else {
|
|
470
467
|
const err = UnexpectedCharError(i - token.length, token);
|
|
471
|
-
|
|
468
|
+
return Promise.reject(err);
|
|
472
469
|
}
|
|
473
470
|
}
|
|
474
471
|
|
|
@@ -487,7 +484,7 @@ module.exports = class QueryLexer {
|
|
|
487
484
|
tree.node.op !== OP_TOKENS.NOT_IN
|
|
488
485
|
) {
|
|
489
486
|
const err = UnexpectedCharError(i, char);
|
|
490
|
-
|
|
487
|
+
return Promise.reject(err);
|
|
491
488
|
}
|
|
492
489
|
|
|
493
490
|
// Token is the last element in this array:
|
|
@@ -515,7 +512,7 @@ module.exports = class QueryLexer {
|
|
|
515
512
|
|
|
516
513
|
if (isSubQuery === true && param === PARAM_TOKENS.INCLUDES) {
|
|
517
514
|
const err = new NodesterQueryError(`'include' is forbidden inside subquery (position ${ i }). Use: 'model.submodel' or 'model.submodel1+submodel2'.`);
|
|
518
|
-
|
|
515
|
+
return Promise.reject(err);
|
|
519
516
|
}
|
|
520
517
|
|
|
521
518
|
tree.node.activeParam = param;
|
|
@@ -533,19 +530,19 @@ module.exports = class QueryLexer {
|
|
|
533
530
|
// haven't up from 'in':
|
|
534
531
|
if (tree.node.op === 'in') {
|
|
535
532
|
const err = MissingCharError(i+1, ']');
|
|
536
|
-
|
|
533
|
+
return Promise.reject(err);
|
|
537
534
|
}
|
|
538
535
|
|
|
539
536
|
// If any Function:
|
|
540
537
|
if (!!tree.node.fn) {
|
|
541
538
|
const err = MissingCharError(i+1, ')');
|
|
542
|
-
|
|
539
|
+
return Promise.reject(err);
|
|
543
540
|
}
|
|
544
541
|
|
|
545
542
|
// If any OP at all:
|
|
546
543
|
if (!!tree.node.op) {
|
|
547
544
|
const err = MissingCharError(i+1, ')');
|
|
548
|
-
|
|
545
|
+
return Promise.reject(err);
|
|
549
546
|
}
|
|
550
547
|
|
|
551
548
|
this.setNodeParam(tree.node, token, value);
|
|
@@ -558,7 +555,8 @@ module.exports = class QueryLexer {
|
|
|
558
555
|
}
|
|
559
556
|
}
|
|
560
557
|
|
|
561
|
-
return
|
|
558
|
+
// Must return it's portion of chars.
|
|
559
|
+
return Promise.resolve([ queryString.length - startAt ]);
|
|
562
560
|
}
|
|
563
561
|
|
|
564
562
|
parseParamFromToken(token) {
|
package/lib/models/mixins.js
CHANGED
|
@@ -212,10 +212,8 @@ async function _updateOne(
|
|
|
212
212
|
|
|
213
213
|
// Will contain data from parent instance and associations.
|
|
214
214
|
const fullInstanceData = instance.toJSON();
|
|
215
|
+
const parentData = { ...data };
|
|
215
216
|
|
|
216
|
-
const parentData = {
|
|
217
|
-
...data
|
|
218
|
-
}
|
|
219
217
|
for (let includeConfig of include) {
|
|
220
218
|
const { association } = includeConfig;
|
|
221
219
|
|
|
@@ -223,6 +221,7 @@ async function _updateOne(
|
|
|
223
221
|
continue;
|
|
224
222
|
}
|
|
225
223
|
|
|
224
|
+
// Remove association from parentData (handled separately)
|
|
226
225
|
delete parentData[association];
|
|
227
226
|
|
|
228
227
|
const associationDefinition = this.associations[association];
|
|
@@ -246,6 +245,15 @@ async function _updateOne(
|
|
|
246
245
|
// If association type is HasMany or HasOne (We don't work with any other):
|
|
247
246
|
switch(associationType) {
|
|
248
247
|
case 'HasMany': {
|
|
248
|
+
// Handle empty array (remove all old associations):
|
|
249
|
+
if (Array.isArray(includeData) && includeData.length === 0) {
|
|
250
|
+
const where = {
|
|
251
|
+
[foreignKey]: instance.id
|
|
252
|
+
}
|
|
253
|
+
await associatedModel.destroy({ where });
|
|
254
|
+
fullInstanceData[association] = [];
|
|
255
|
+
}
|
|
256
|
+
|
|
249
257
|
const promises = includeData.map(singleData => {
|
|
250
258
|
// Note: for now we are only able to work with a model with single PrimaryKey:
|
|
251
259
|
const where = {
|
|
@@ -263,16 +271,25 @@ async function _updateOne(
|
|
|
263
271
|
}
|
|
264
272
|
|
|
265
273
|
case 'HasOne': {
|
|
266
|
-
//
|
|
267
|
-
|
|
268
|
-
|
|
274
|
+
// Handle null case (remove old association)
|
|
275
|
+
if (includeData === null) {
|
|
276
|
+
const where = {
|
|
277
|
+
[foreignKey]: instance.id
|
|
278
|
+
}
|
|
279
|
+
await associatedModel.destroy({ where });
|
|
280
|
+
fullInstanceData[association] = null;
|
|
281
|
+
}
|
|
282
|
+
else {
|
|
283
|
+
// Note: for now we are only able to work with a model with single PrimaryKey:
|
|
284
|
+
const where = {
|
|
285
|
+
[pkField]: includeData[pkField]
|
|
286
|
+
}
|
|
287
|
+
fullInstanceData[association] = await associatedModel.updateOne(
|
|
288
|
+
where,
|
|
289
|
+
includeData,
|
|
290
|
+
associationUpdateOpts
|
|
291
|
+
);
|
|
269
292
|
}
|
|
270
|
-
fullInstanceData[association] = await associatedModel.updateOne(
|
|
271
|
-
where,
|
|
272
|
-
includeData,
|
|
273
|
-
associationUpdateOpts
|
|
274
|
-
);
|
|
275
|
-
|
|
276
293
|
continue;
|
|
277
294
|
}
|
|
278
295
|
|
package/package.json
CHANGED
package/tests/nql.test.js
CHANGED
|
@@ -23,9 +23,9 @@ describe('nodester Query Language', () => {
|
|
|
23
23
|
'id=10&position=4&limit=3&skip=10&order=desc&order_by=index&a=id,content,position,created_at',
|
|
24
24
|
];
|
|
25
25
|
|
|
26
|
-
it('Simple where', () => {
|
|
26
|
+
it('Simple where', async () => {
|
|
27
27
|
const lexer = new QueryLexer( queryStrings[0] );
|
|
28
|
-
const result = lexer.
|
|
28
|
+
const result = await lexer.parse();
|
|
29
29
|
|
|
30
30
|
const tree = new ModelsTree();
|
|
31
31
|
tree.node.addWhere({ id: ['10'] });
|
|
@@ -34,9 +34,9 @@ describe('nodester Query Language', () => {
|
|
|
34
34
|
expect(result).toMatchObject(expected);
|
|
35
35
|
});
|
|
36
36
|
|
|
37
|
-
it('Only certain attributes', () => {
|
|
37
|
+
it('Only certain attributes', async () => {
|
|
38
38
|
const lexer = new QueryLexer( queryStrings[1] );
|
|
39
|
-
const result = lexer.
|
|
39
|
+
const result = await lexer.parse();
|
|
40
40
|
|
|
41
41
|
const tree = new ModelsTree();
|
|
42
42
|
tree.node.attributes = [ 'id', 'text' ];
|
|
@@ -45,9 +45,9 @@ describe('nodester Query Language', () => {
|
|
|
45
45
|
expect(result).toMatchObject(expected);
|
|
46
46
|
});
|
|
47
47
|
|
|
48
|
-
test('All possible params', () => {
|
|
48
|
+
test('All possible params', async () => {
|
|
49
49
|
const lexer = new QueryLexer( queryStrings[2] );
|
|
50
|
-
const result = lexer.
|
|
50
|
+
const result = await lexer.parse();
|
|
51
51
|
|
|
52
52
|
|
|
53
53
|
const tree = new ModelsTree();
|
|
@@ -86,9 +86,9 @@ describe('nodester Query Language', () => {
|
|
|
86
86
|
'separated-includes': 'includes=comments(order=rand)&id=7&limit=3&includes=users(a=id,content)',
|
|
87
87
|
};
|
|
88
88
|
|
|
89
|
-
test('Simple includes', () => {
|
|
89
|
+
test('Simple includes', async () => {
|
|
90
90
|
const lexer = new QueryLexer( queryStrings['simple-includes'] );
|
|
91
|
-
const result = lexer.
|
|
91
|
+
const result = await lexer.parse();
|
|
92
92
|
|
|
93
93
|
|
|
94
94
|
const tree = new ModelsTree();
|
|
@@ -99,9 +99,9 @@ describe('nodester Query Language', () => {
|
|
|
99
99
|
expect(result).toMatchObject(expected);
|
|
100
100
|
});
|
|
101
101
|
|
|
102
|
-
test('Include with all possible params', () => {
|
|
102
|
+
test('Include with all possible params', async () => {
|
|
103
103
|
const lexer = new QueryLexer( queryStrings['include-with-params'] );
|
|
104
|
-
const result = lexer.
|
|
104
|
+
const result = await lexer.parse();
|
|
105
105
|
|
|
106
106
|
const tree = new ModelsTree();
|
|
107
107
|
tree.include('comments').use('comments');
|
|
@@ -116,9 +116,9 @@ describe('nodester Query Language', () => {
|
|
|
116
116
|
expect(result).toMatchObject(expected);
|
|
117
117
|
});
|
|
118
118
|
|
|
119
|
-
test('2 horizontals', () => {
|
|
119
|
+
test('2 horizontals', async () => {
|
|
120
120
|
const lexer = new QueryLexer( queryStrings['2-horizontals'] );
|
|
121
|
-
const result = lexer.
|
|
121
|
+
const result = await lexer.parse();
|
|
122
122
|
|
|
123
123
|
|
|
124
124
|
const tree = new ModelsTree();
|
|
@@ -130,11 +130,11 @@ describe('nodester Query Language', () => {
|
|
|
130
130
|
expect(result).toMatchObject(expected);
|
|
131
131
|
});
|
|
132
132
|
|
|
133
|
-
test('4 horizontals', () => {
|
|
133
|
+
test('4 horizontals', async () => {
|
|
134
134
|
// in=categories,replies.users,comments(order_by=position&order=desc),users.avatars
|
|
135
135
|
|
|
136
136
|
const lexer = new QueryLexer( queryStrings['4-horizontals'] );
|
|
137
|
-
const result = lexer.
|
|
137
|
+
const result = await lexer.parse();
|
|
138
138
|
|
|
139
139
|
|
|
140
140
|
const tree = new ModelsTree();
|
|
@@ -161,9 +161,9 @@ describe('nodester Query Language', () => {
|
|
|
161
161
|
expect(result).toMatchObject(expected);
|
|
162
162
|
});
|
|
163
163
|
|
|
164
|
-
test('Horizontals queried', () => {
|
|
164
|
+
test('Horizontals queried', async () => {
|
|
165
165
|
const lexer = new QueryLexer( queryStrings['horizontals-queried'] );
|
|
166
|
-
const result = lexer.
|
|
166
|
+
const result = await lexer.parse();
|
|
167
167
|
|
|
168
168
|
|
|
169
169
|
const tree = new ModelsTree();
|
|
@@ -181,9 +181,9 @@ describe('nodester Query Language', () => {
|
|
|
181
181
|
expect(result).toMatchObject(expected);
|
|
182
182
|
});
|
|
183
183
|
|
|
184
|
-
test('Horizontals queried №2', () => {
|
|
184
|
+
test('Horizontals queried №2', async () => {
|
|
185
185
|
const lexer = new QueryLexer( queryStrings['horizontals-queried-2'] );
|
|
186
|
-
const result = lexer.
|
|
186
|
+
const result = await lexer.parse();
|
|
187
187
|
|
|
188
188
|
const tree = new ModelsTree();
|
|
189
189
|
tree.include('comments').use('comments');
|
|
@@ -199,9 +199,9 @@ describe('nodester Query Language', () => {
|
|
|
199
199
|
expect(result).toMatchObject(expected);
|
|
200
200
|
});
|
|
201
201
|
|
|
202
|
-
test('Horizontals queried №3', () => {
|
|
202
|
+
test('Horizontals queried №3', async () => {
|
|
203
203
|
const lexer = new QueryLexer( queryStrings['horizontals-queried-3'] );
|
|
204
|
-
const result = lexer.
|
|
204
|
+
const result = await lexer.parse();
|
|
205
205
|
|
|
206
206
|
const tree = new ModelsTree();
|
|
207
207
|
tree.include('reactions');
|
|
@@ -226,9 +226,9 @@ describe('nodester Query Language', () => {
|
|
|
226
226
|
expect(result).toMatchObject(expected);
|
|
227
227
|
});
|
|
228
228
|
|
|
229
|
-
test('Separated includes"', () => {
|
|
229
|
+
test('Separated includes"', async () => {
|
|
230
230
|
const lexer = new QueryLexer( queryStrings['separated-includes'] );
|
|
231
|
-
const result = lexer.
|
|
231
|
+
const result = await lexer.parse();
|
|
232
232
|
|
|
233
233
|
const tree = new ModelsTree();
|
|
234
234
|
tree.node.addWhere({ id: ['7'] });
|
|
@@ -262,9 +262,9 @@ describe('nodester Query Language', () => {
|
|
|
262
262
|
'includes=comments(order=desc).users+likes(order=rand&order_by=position)&id=1000',
|
|
263
263
|
];
|
|
264
264
|
|
|
265
|
-
test('Simple subinclude', () => {
|
|
265
|
+
test('Simple subinclude', async () => {
|
|
266
266
|
const lexer = new QueryLexer( queryStrings[0] );
|
|
267
|
-
const result = lexer.
|
|
267
|
+
const result = await lexer.parse();
|
|
268
268
|
|
|
269
269
|
|
|
270
270
|
const tree = new ModelsTree();
|
|
@@ -275,9 +275,9 @@ describe('nodester Query Language', () => {
|
|
|
275
275
|
expect(result).toMatchObject(expected);
|
|
276
276
|
});
|
|
277
277
|
|
|
278
|
-
test('Deep subincludes', () => {
|
|
278
|
+
test('Deep subincludes', async () => {
|
|
279
279
|
const lexer = new QueryLexer( queryStrings[1] );
|
|
280
|
-
const result = lexer.
|
|
280
|
+
const result = await lexer.parse();
|
|
281
281
|
|
|
282
282
|
|
|
283
283
|
const tree = new ModelsTree();
|
|
@@ -291,9 +291,9 @@ describe('nodester Query Language', () => {
|
|
|
291
291
|
expect(result).toMatchObject(expected);
|
|
292
292
|
});
|
|
293
293
|
|
|
294
|
-
test('Simple horizontal subinclude, "+" syntaxis"', () => {
|
|
294
|
+
test('Simple horizontal subinclude, "+" syntaxis"', async () => {
|
|
295
295
|
const lexer = new QueryLexer( queryStrings[2] );
|
|
296
|
-
const result = lexer.
|
|
296
|
+
const result = await lexer.parse();
|
|
297
297
|
|
|
298
298
|
|
|
299
299
|
const tree = new ModelsTree();
|
|
@@ -305,9 +305,9 @@ describe('nodester Query Language', () => {
|
|
|
305
305
|
expect(result).toMatchObject(expected);
|
|
306
306
|
});
|
|
307
307
|
|
|
308
|
-
test('Subinclude query', () => {
|
|
308
|
+
test('Subinclude query', async () => {
|
|
309
309
|
const lexer = new QueryLexer( queryStrings[3] );
|
|
310
|
-
const result = lexer.
|
|
310
|
+
const result = await lexer.parse();
|
|
311
311
|
|
|
312
312
|
|
|
313
313
|
const tree = new ModelsTree();
|
|
@@ -320,9 +320,9 @@ describe('nodester Query Language', () => {
|
|
|
320
320
|
expect(result).toMatchObject(expected);
|
|
321
321
|
});
|
|
322
322
|
|
|
323
|
-
test('Complex subincludes query, "+" syntaxis', () => {
|
|
323
|
+
test('Complex subincludes query, "+" syntaxis', async () => {
|
|
324
324
|
const lexer = new QueryLexer( queryStrings[4] );
|
|
325
|
-
const result = lexer.
|
|
325
|
+
const result = await lexer.parse();
|
|
326
326
|
|
|
327
327
|
|
|
328
328
|
const tree = new ModelsTree();
|
|
@@ -348,9 +348,9 @@ describe('nodester Query Language', () => {
|
|
|
348
348
|
'|(index=2,position=5)',
|
|
349
349
|
];
|
|
350
350
|
|
|
351
|
-
test('"OR" simple', () => {
|
|
351
|
+
test('"OR" simple', async () => {
|
|
352
352
|
const lexer = new QueryLexer( queryStrings[0] );
|
|
353
|
-
const result = lexer.
|
|
353
|
+
const result = await lexer.parse();
|
|
354
354
|
|
|
355
355
|
const tree = new ModelsTree();
|
|
356
356
|
tree.node.addWhere({ or: [ { index: ['2'] }, { position: ['5'] } ] });
|
|
@@ -359,9 +359,9 @@ describe('nodester Query Language', () => {
|
|
|
359
359
|
expect(result).toMatchObject(expected);
|
|
360
360
|
});
|
|
361
361
|
|
|
362
|
-
test('"OR" short', () => {
|
|
362
|
+
test('"OR" short', async () => {
|
|
363
363
|
const lexer = new QueryLexer( queryStrings[1] );
|
|
364
|
-
const result = lexer.
|
|
364
|
+
const result = await lexer.parse();
|
|
365
365
|
|
|
366
366
|
const tree = new ModelsTree();
|
|
367
367
|
tree.node.addWhere({ or: [ { index: ['2'] }, { position: ['5'] } ] });
|
|
@@ -381,9 +381,9 @@ describe('nodester Query Language', () => {
|
|
|
381
381
|
'includes=comments(id=not(7))'
|
|
382
382
|
];
|
|
383
383
|
|
|
384
|
-
test('"NOT" simple', () => {
|
|
384
|
+
test('"NOT" simple', async () => {
|
|
385
385
|
const lexer = new QueryLexer( queryStrings[0] );
|
|
386
|
-
const result = lexer.
|
|
386
|
+
const result = await lexer.parse();
|
|
387
387
|
|
|
388
388
|
const tree = new ModelsTree();
|
|
389
389
|
tree.node.addWhere({ key: { not: ['main'] } });
|
|
@@ -392,9 +392,9 @@ describe('nodester Query Language', () => {
|
|
|
392
392
|
expect(result).toMatchObject(expected);
|
|
393
393
|
});
|
|
394
394
|
|
|
395
|
-
test('"NOT" short', () => {
|
|
395
|
+
test('"NOT" short', async () => {
|
|
396
396
|
const lexer = new QueryLexer( queryStrings[1] );
|
|
397
|
-
const result = lexer.
|
|
397
|
+
const result = await lexer.parse();
|
|
398
398
|
|
|
399
399
|
const tree = new ModelsTree();
|
|
400
400
|
tree.node.addWhere({ key: { not: ['main'] } });
|
|
@@ -403,9 +403,9 @@ describe('nodester Query Language', () => {
|
|
|
403
403
|
expect(result).toMatchObject(expected);
|
|
404
404
|
});
|
|
405
405
|
|
|
406
|
-
test('"NOT" inside includes', () => {
|
|
406
|
+
test('"NOT" inside includes', async () => {
|
|
407
407
|
const lexer = new QueryLexer( queryStrings[2] );
|
|
408
|
-
const result = lexer.
|
|
408
|
+
const result = await lexer.parse();
|
|
409
409
|
|
|
410
410
|
const tree = new ModelsTree();
|
|
411
411
|
tree.include('comments').use('comments');
|
|
@@ -428,9 +428,9 @@ describe('nodester Query Language', () => {
|
|
|
428
428
|
'title=!like(some_text)',
|
|
429
429
|
];
|
|
430
430
|
|
|
431
|
-
test('"Like" simple', () => {
|
|
431
|
+
test('"Like" simple', async () => {
|
|
432
432
|
const lexer = new QueryLexer( queryStrings[0] );
|
|
433
|
-
const result = lexer.
|
|
433
|
+
const result = await lexer.parse();
|
|
434
434
|
|
|
435
435
|
const tree = new ModelsTree();
|
|
436
436
|
tree.node.addWhere({ title: { like: ['some_text'] }});
|
|
@@ -439,9 +439,9 @@ describe('nodester Query Language', () => {
|
|
|
439
439
|
expect(result).toMatchObject(expected);
|
|
440
440
|
});
|
|
441
441
|
|
|
442
|
-
test('"NotLike" simple', () => {
|
|
442
|
+
test('"NotLike" simple', async () => {
|
|
443
443
|
const lexer = new QueryLexer( queryStrings[1] );
|
|
444
|
-
const result = lexer.
|
|
444
|
+
const result = await lexer.parse();
|
|
445
445
|
|
|
446
446
|
const tree = new ModelsTree();
|
|
447
447
|
tree.node.addWhere({ title: { notLike: ['some_text'] }});
|
|
@@ -450,9 +450,9 @@ describe('nodester Query Language', () => {
|
|
|
450
450
|
expect(result).toMatchObject(expected);
|
|
451
451
|
});
|
|
452
452
|
|
|
453
|
-
test('"NotLike" short', () => {
|
|
453
|
+
test('"NotLike" short', async () => {
|
|
454
454
|
const lexer = new QueryLexer( queryStrings[2] );
|
|
455
|
-
const result = lexer.
|
|
455
|
+
const result = await lexer.parse();
|
|
456
456
|
|
|
457
457
|
const tree = new ModelsTree();
|
|
458
458
|
tree.node.addWhere({ title: { notLike: ['some_text'] }});
|
|
@@ -471,9 +471,9 @@ describe('nodester Query Language', () => {
|
|
|
471
471
|
'status=[REVIEWED,ANSWERED]&limit=3',
|
|
472
472
|
];
|
|
473
473
|
|
|
474
|
-
test('"IN" simple', () => {
|
|
474
|
+
test('"IN" simple', async () => {
|
|
475
475
|
const lexer = new QueryLexer( queryStrings[0] );
|
|
476
|
-
const result = lexer.
|
|
476
|
+
const result = await lexer.parse();
|
|
477
477
|
|
|
478
478
|
const tree = new ModelsTree();
|
|
479
479
|
tree.node.addWhere({ status: { in: ['REVIEWED', 'ANSWERED'] }});
|
|
@@ -482,9 +482,9 @@ describe('nodester Query Language', () => {
|
|
|
482
482
|
expect(result).toMatchObject(expected);
|
|
483
483
|
});
|
|
484
484
|
|
|
485
|
-
test('"IN" and "limit" clause', () => {
|
|
485
|
+
test('"IN" and "limit" clause', async () => {
|
|
486
486
|
const lexer = new QueryLexer( queryStrings[1] );
|
|
487
|
-
const result = lexer.
|
|
487
|
+
const result = await lexer.parse();
|
|
488
488
|
|
|
489
489
|
const tree = new ModelsTree();
|
|
490
490
|
tree.node.limit = 3;
|
|
@@ -513,9 +513,9 @@ describe('nodester Query Language', () => {
|
|
|
513
513
|
'in=comments.likes(index=gt(60))'
|
|
514
514
|
];
|
|
515
515
|
|
|
516
|
-
test('Greater than', () => {
|
|
516
|
+
test('Greater than', async () => {
|
|
517
517
|
const lexer = new QueryLexer( queryStrings[0] );
|
|
518
|
-
const result = lexer.
|
|
518
|
+
const result = await lexer.parse();
|
|
519
519
|
|
|
520
520
|
|
|
521
521
|
const tree = new ModelsTree();
|
|
@@ -525,9 +525,9 @@ describe('nodester Query Language', () => {
|
|
|
525
525
|
expect(result).toMatchObject(expected);
|
|
526
526
|
});
|
|
527
527
|
|
|
528
|
-
test('Greater than or equal to', () => {
|
|
528
|
+
test('Greater than or equal to', async () => {
|
|
529
529
|
const lexer = new QueryLexer( queryStrings[1] );
|
|
530
|
-
const result = lexer.
|
|
530
|
+
const result = await lexer.parse();
|
|
531
531
|
|
|
532
532
|
|
|
533
533
|
const tree = new ModelsTree();
|
|
@@ -537,9 +537,9 @@ describe('nodester Query Language', () => {
|
|
|
537
537
|
expect(result).toMatchObject(expected);
|
|
538
538
|
});
|
|
539
539
|
|
|
540
|
-
test('Lower than', () => {
|
|
540
|
+
test('Lower than', async () => {
|
|
541
541
|
const lexer = new QueryLexer( queryStrings[2] );
|
|
542
|
-
const result = lexer.
|
|
542
|
+
const result = await lexer.parse();
|
|
543
543
|
|
|
544
544
|
|
|
545
545
|
const tree = new ModelsTree();
|
|
@@ -549,9 +549,9 @@ describe('nodester Query Language', () => {
|
|
|
549
549
|
expect(result).toMatchObject(expected);
|
|
550
550
|
});
|
|
551
551
|
|
|
552
|
-
test('Lower than or equal to', () => {
|
|
552
|
+
test('Lower than or equal to', async () => {
|
|
553
553
|
const lexer = new QueryLexer( queryStrings[3] );
|
|
554
|
-
const result = lexer.
|
|
554
|
+
const result = await lexer.parse();
|
|
555
555
|
|
|
556
556
|
|
|
557
557
|
const tree = new ModelsTree();
|
|
@@ -561,9 +561,9 @@ describe('nodester Query Language', () => {
|
|
|
561
561
|
expect(result).toMatchObject(expected);
|
|
562
562
|
});
|
|
563
563
|
|
|
564
|
-
test('Greater than in subinclude', () => {
|
|
564
|
+
test('Greater than in subinclude', async () => {
|
|
565
565
|
const lexer = new QueryLexer( queryStrings[4] );
|
|
566
|
-
const result = lexer.
|
|
566
|
+
const result = await lexer.parse();
|
|
567
567
|
|
|
568
568
|
|
|
569
569
|
const tree = new ModelsTree();
|
|
@@ -586,9 +586,9 @@ describe('nodester Query Language', () => {
|
|
|
586
586
|
and_in_subincludes_2: 'title=like(book),notLike(book #3)&in=comments(text=like(hi),notLike(hi!))',
|
|
587
587
|
}
|
|
588
588
|
|
|
589
|
-
test('AND (simple)', () => {
|
|
589
|
+
test('AND (simple)', async () => {
|
|
590
590
|
const lexer = new QueryLexer( queryStrings.and_simple );
|
|
591
|
-
const result = lexer.
|
|
591
|
+
const result = await lexer.parse();
|
|
592
592
|
|
|
593
593
|
const tree = new ModelsTree();
|
|
594
594
|
tree.node.addWhere({ id: { gte: ['2'], lt: ['5'] }});
|
|
@@ -597,9 +597,9 @@ describe('nodester Query Language', () => {
|
|
|
597
597
|
expect(result).toMatchObject(expected);
|
|
598
598
|
});
|
|
599
599
|
|
|
600
|
-
test('AND (more OP)', () => {
|
|
600
|
+
test('AND (more OP)', async () => {
|
|
601
601
|
const lexer = new QueryLexer( queryStrings.and_more_op );
|
|
602
|
-
const result = lexer.
|
|
602
|
+
const result = await lexer.parse();
|
|
603
603
|
|
|
604
604
|
const tree = new ModelsTree();
|
|
605
605
|
tree.node.addWhere({
|
|
@@ -610,9 +610,9 @@ describe('nodester Query Language', () => {
|
|
|
610
610
|
expect(result).toMatchObject(expected);
|
|
611
611
|
});
|
|
612
612
|
|
|
613
|
-
test('AND (in subincludes #0)', () => {
|
|
613
|
+
test('AND (in subincludes #0)', async () => {
|
|
614
614
|
const lexer = new QueryLexer( queryStrings.and_in_subincludes_0 );
|
|
615
|
-
const result = lexer.
|
|
615
|
+
const result = await lexer.parse();
|
|
616
616
|
|
|
617
617
|
const tree = new ModelsTree();
|
|
618
618
|
tree.include('comments').use('comments');
|
|
@@ -624,9 +624,9 @@ describe('nodester Query Language', () => {
|
|
|
624
624
|
expect(result).toMatchObject(expected);
|
|
625
625
|
});
|
|
626
626
|
|
|
627
|
-
test('AND (in subincludes #1)', () => {
|
|
627
|
+
test('AND (in subincludes #1)', async () => {
|
|
628
628
|
const lexer = new QueryLexer( queryStrings.and_in_subincludes_1 );
|
|
629
|
-
const result = lexer.
|
|
629
|
+
const result = await lexer.parse();
|
|
630
630
|
|
|
631
631
|
const tree = new ModelsTree();
|
|
632
632
|
tree.node.addWhere({
|
|
@@ -641,9 +641,9 @@ describe('nodester Query Language', () => {
|
|
|
641
641
|
expect(result).toMatchObject(expected);
|
|
642
642
|
});
|
|
643
643
|
|
|
644
|
-
test('AND (in subincludes #2)', () => {
|
|
644
|
+
test('AND (in subincludes #2)', async () => {
|
|
645
645
|
const lexer = new QueryLexer( queryStrings.and_in_subincludes_2 );
|
|
646
|
-
const result = lexer.
|
|
646
|
+
const result = await lexer.parse();
|
|
647
647
|
|
|
648
648
|
const tree = new ModelsTree();
|
|
649
649
|
tree.node.addWhere({
|
|
@@ -668,9 +668,9 @@ describe('nodester Query Language', () => {
|
|
|
668
668
|
count_and_includes: 'fn=count(comments)&in=comments',
|
|
669
669
|
}
|
|
670
670
|
|
|
671
|
-
test('Count (full key name)', () => {
|
|
671
|
+
test('Count (full key name)', async () => {
|
|
672
672
|
const lexer = new QueryLexer( queryStrings.count_long );
|
|
673
|
-
const result = lexer.
|
|
673
|
+
const result = await lexer.parse();
|
|
674
674
|
|
|
675
675
|
|
|
676
676
|
const tree = new ModelsTree();
|
|
@@ -683,9 +683,9 @@ describe('nodester Query Language', () => {
|
|
|
683
683
|
expect(result).toMatchObject(expected);
|
|
684
684
|
});
|
|
685
685
|
|
|
686
|
-
test('Count (short key name)', () => {
|
|
686
|
+
test('Count (short key name)', async () => {
|
|
687
687
|
const lexer = new QueryLexer( queryStrings.count_long );
|
|
688
|
-
const result = lexer.
|
|
688
|
+
const result = await lexer.parse();
|
|
689
689
|
|
|
690
690
|
|
|
691
691
|
const tree = new ModelsTree();
|
|
@@ -698,9 +698,9 @@ describe('nodester Query Language', () => {
|
|
|
698
698
|
expect(result).toMatchObject(expected);
|
|
699
699
|
});
|
|
700
700
|
|
|
701
|
-
test('Count and includes', () => {
|
|
701
|
+
test('Count and includes', async () => {
|
|
702
702
|
const lexer = new QueryLexer( queryStrings.count_and_includes );
|
|
703
|
-
const result = lexer.
|
|
703
|
+
const result = await lexer.parse();
|
|
704
704
|
|
|
705
705
|
|
|
706
706
|
const tree = new ModelsTree();
|