@labdigital/commercetools-mock 1.3.0 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -75,7 +75,7 @@ var parseExpandClause = (clause) => {
75
75
  };
76
76
 
77
77
  // src/lib/predicateParser.ts
78
- import perplex from "perplex";
78
+ import Lexer from "perplex";
79
79
  import { Parser } from "pratt";
80
80
 
81
81
  // src/lib/haversine.ts
@@ -137,7 +137,7 @@ var resolveValue = (obj, val) => {
137
137
  }
138
138
  return obj[val.value];
139
139
  };
140
- var getLexer = (value) => new perplex(value).token("AND", /and(?![-_a-z0-9]+)/i).token("OR", /or(?![-_a-z0-9]+)/i).token("NOT", /not(?![-_a-z0-9]+)/i).token("WITHIN", /within(?![-_a-z0-9]+)/i).token("IN", /in(?![-_a-z0-9]+)/i).token("MATCHES_IGNORE_CASE", /matches\s+ignore\s+case(?![-_a-z0-9]+)/i).token("CONTAINS", /contains(?![-_a-z0-9]+)/i).token("ALL", /all(?![-_a-z0-9]+)/i).token("ANY", /any(?![-_a-z0-9]+)/i).token("EMPTY", /empty(?![-_a-z0-9]+)/i).token("IS", /is(?![-_a-z0-9]+)/i).token("DEFINED", /defined(?![-_a-z0-9]+)/i).token("FLOAT", /\d+\.\d+/).token("INT", /\d+/).token("VARIABLE", /:([-_A-Za-z0-9]+)/).token("BOOLEAN", /(true|false)/).token("IDENTIFIER", /[-_A-Za-z0-9]+/).token("STRING", /"((?:\\.|[^"\\])*)"/).token("STRING", /'((?:\\.|[^'\\])*)'/).token("COMMA", ",").token("(", "(").token(")", ")").token(">=", ">=").token("<=", "<=").token(">", ">").token("<", "<").token("!=", "!=").token("=", "=").token('"', '"').token("WS", /\s+/, true);
140
+ var getLexer = (value) => new Lexer(value).token("AND", /and(?![-_a-z0-9]+)/i).token("OR", /or(?![-_a-z0-9]+)/i).token("NOT", /not(?![-_a-z0-9]+)/i).token("WITHIN", /within(?![-_a-z0-9]+)/i).token("IN", /in(?![-_a-z0-9]+)/i).token("MATCHES_IGNORE_CASE", /matches\s+ignore\s+case(?![-_a-z0-9]+)/i).token("CONTAINS", /contains(?![-_a-z0-9]+)/i).token("ALL", /all(?![-_a-z0-9]+)/i).token("ANY", /any(?![-_a-z0-9]+)/i).token("EMPTY", /empty(?![-_a-z0-9]+)/i).token("IS", /is(?![-_a-z0-9]+)/i).token("DEFINED", /defined(?![-_a-z0-9]+)/i).token("FLOAT", /\d+\.\d+/).token("INT", /\d+/).token("VARIABLE", /:([-_A-Za-z0-9]+)/).token("BOOLEAN", /(true|false)/).token("IDENTIFIER", /[-_A-Za-z0-9]+/).token("STRING", /"((?:\\.|[^"\\])*)"/).token("STRING", /'((?:\\.|[^'\\])*)'/).token("COMMA", ",").token("(", "(").token(")", ")").token(">=", ">=").token("<=", "<=").token(">", ">").token("<", "<").token("!=", "!=").token("=", "=").token('"', '"').token("WS", /\s+/, true);
141
141
  var generateMatchFunc = (predicate) => {
142
142
  const lexer = getLexer(predicate);
143
143
  const parser = new Parser(lexer).builder().nud(
@@ -1254,12 +1254,11 @@ var CartRepository = class extends AbstractResourceRepository {
1254
1254
  (x) => x.productId === product?.id && x.variant.id === variant?.id
1255
1255
  );
1256
1256
  if (alreadyAdded) {
1257
- resource.lineItems.map((x) => {
1257
+ resource.lineItems.forEach((x) => {
1258
1258
  if (x.productId === product?.id && x.variant.id === variant?.id) {
1259
1259
  x.quantity += quantity;
1260
1260
  x.totalPrice.centAmount = calculateLineItemTotalPrice(x);
1261
1261
  }
1262
- return x;
1263
1262
  });
1264
1263
  } else {
1265
1264
  if (!variant.prices?.length) {
@@ -1328,18 +1327,16 @@ var CartRepository = class extends AbstractResourceRepository {
1328
1327
  message: `Either lineItemid or lineItemKey needs to be provided.`
1329
1328
  });
1330
1329
  }
1331
- const shouldDelete = !quantity || quantity >= lineItem.quantity;
1332
- if (shouldDelete) {
1330
+ if (quantity === 0) {
1333
1331
  resource.lineItems = resource.lineItems.filter(
1334
1332
  (x) => x.id !== lineItemId
1335
1333
  );
1336
1334
  } else {
1337
- resource.lineItems.map((x) => {
1335
+ resource.lineItems.forEach((x) => {
1338
1336
  if (x.id === lineItemId && quantity) {
1339
- x.quantity -= quantity;
1337
+ x.quantity = quantity;
1340
1338
  x.totalPrice.centAmount = calculateLineItemTotalPrice(x);
1341
1339
  }
1342
- return x;
1343
1340
  });
1344
1341
  }
1345
1342
  resource.totalPrice.centAmount = calculateCartTotalPrice(resource);
@@ -1358,12 +1355,11 @@ var CartRepository = class extends AbstractResourceRepository {
1358
1355
  (x) => x.id !== lineItemId
1359
1356
  );
1360
1357
  } else {
1361
- resource.lineItems.map((x) => {
1358
+ resource.lineItems.forEach((x) => {
1362
1359
  if (x.id === lineItemId && quantity) {
1363
1360
  x.quantity -= quantity;
1364
1361
  x.totalPrice.centAmount = calculateLineItemTotalPrice(x);
1365
1362
  }
1366
- return x;
1367
1363
  });
1368
1364
  }
1369
1365
  resource.totalPrice.centAmount = calculateCartTotalPrice(resource);
@@ -3230,7 +3226,7 @@ var ProductDiscountRepository = class extends AbstractResourceRepository {
3230
3226
  };
3231
3227
 
3232
3228
  // src/lib/projectionSearchFilter.ts
3233
- import perplex2 from "perplex";
3229
+ import Lexer2 from "perplex";
3234
3230
  import Parser2 from "pratt";
3235
3231
  var parseFilterExpression = (filter) => {
3236
3232
  const exprFunc = generateMatchFunc2(filter);
@@ -3240,7 +3236,7 @@ var parseFilterExpression = (filter) => {
3240
3236
  }
3241
3237
  return filterProduct(source, exprFunc);
3242
3238
  };
3243
- var getLexer2 = (value) => new perplex2(value).token("MISSING", /missing(?![-_a-z0-9]+)/i).token("EXISTS", /exists(?![-_a-z0-9]+)/i).token("RANGE", /range(?![-_a-z0-9]+)/i).token("TO", /to(?![-_a-z0-9]+)/i).token("IDENTIFIER", /[-_.a-z]+/i).token("FLOAT", /\d+\.\d+/).token("INT", /\d+/).token("STRING", /"((?:\\.|[^"\\])*)"/).token("STRING", /'((?:\\.|[^'\\])*)'/).token("COMMA", ",").token("STAR", "*").token("(", "(").token(":", ":").token(")", ")").token('"', '"').token("WS", /\s+/, true);
3239
+ var getLexer2 = (value) => new Lexer2(value).token("MISSING", /missing(?![-_a-z0-9]+)/i).token("EXISTS", /exists(?![-_a-z0-9]+)/i).token("RANGE", /range(?![-_a-z0-9]+)/i).token("TO", /to(?![-_a-z0-9]+)/i).token("IDENTIFIER", /[-_.a-z]+/i).token("FLOAT", /\d+\.\d+/).token("INT", /\d+/).token("STRING", /"((?:\\.|[^"\\])*)"/).token("STRING", /'((?:\\.|[^'\\])*)'/).token("COMMA", ",").token("STAR", "*").token("(", "(").token(":", ":").token(")", ")").token('"', '"').token("WS", /\s+/, true);
3244
3240
  var parseFilter = (filter) => {
3245
3241
  const lexer = getLexer2(filter);
3246
3242
  const parser = new Parser2(lexer).builder().nud("IDENTIFIER", 100, (t) => t.token.match).led(":", 100, ({ left, bp }) => {