@valbuild/server 0.67.0 → 0.68.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,10 +8,10 @@ import fsPath__default from 'path';
8
8
  import fs, { promises } from 'fs';
9
9
  import { transform } from 'sucrase';
10
10
  import { VAL_CSS_PATH, VAL_APP_ID, VAL_OVERLAY_ID } from '@valbuild/ui';
11
- import { VAL_ENABLE_COOKIE_NAME, VAL_STATE_COOKIE, VAL_SESSION_COOKIE, Api } from '@valbuild/shared/internal';
11
+ import { Patch, ParentRef, VAL_ENABLE_COOKIE_NAME, VAL_STATE_COOKIE, VAL_SESSION_COOKIE, Api } from '@valbuild/shared/internal';
12
12
  import { createUIRequestHandler } from '@valbuild/ui/server';
13
13
  import crypto$1 from 'crypto';
14
- import z$1, { z } from 'zod';
14
+ import { z } from 'zod';
15
15
  import sizeOf from 'image-size';
16
16
  import { fromError, fromZodError } from 'zod-validation-error';
17
17
 
@@ -1353,11 +1353,11 @@ const tsOps = new TSOps(document => {
1353
1353
  class ValOps {
1354
1354
  /** Sources from val modules, immutable (without patches or anything) */
1355
1355
 
1356
- /** The sha265 / hash of sources + schema + config */
1356
+ /** The sha256 / hash of sources + schema + config */
1357
1357
 
1358
1358
  /** Schema from val modules, immutable */
1359
1359
 
1360
- /** The sha265 / hash of schema + config - if this changes users needs to reload */
1360
+ /** The sha256 / hash of schema + config - if this changes users needs to reload */
1361
1361
 
1362
1362
  constructor(valModules, options) {
1363
1363
  this.valModules = valModules;
@@ -1369,8 +1369,46 @@ class ValOps {
1369
1369
  this.modulesErrors = null;
1370
1370
  }
1371
1371
  hash(input) {
1372
+ if (typeof input === "object") {
1373
+ return this.hashObject(input);
1374
+ }
1372
1375
  return Internal.getSHA256Hash(textEncoder$1.encode(input));
1373
1376
  }
1377
+ hashObject(obj) {
1378
+ const collector = [];
1379
+ this.collectObjectRecursive(obj, collector);
1380
+ return Internal.getSHA256Hash(textEncoder$1.encode(collector.join("")));
1381
+ }
1382
+ collectObjectRecursive(item, collector) {
1383
+ if (typeof item === "string") {
1384
+ collector.push(`"`, item, `"`);
1385
+ return;
1386
+ } else if (typeof item === "number") {
1387
+ collector.push(item.toString());
1388
+ return;
1389
+ } else if (typeof item === "object") {
1390
+ if (Array.isArray(item)) {
1391
+ collector.push("[");
1392
+ for (let i = 0; i < item.length; i++) {
1393
+ this.collectObjectRecursive(item[i], collector);
1394
+ i !== item.length - 1 && collector.push(",");
1395
+ }
1396
+ collector.push("]");
1397
+ } else {
1398
+ collector.push("{");
1399
+ const keys = Object.keys(item).sort();
1400
+ keys.forEach((key, i) => {
1401
+ collector.push(`"${key}":`);
1402
+ this.collectObjectRecursive(item[key], collector);
1403
+ i !== keys.length - 1 && collector.push(",");
1404
+ });
1405
+ collector.push("}");
1406
+ }
1407
+ return;
1408
+ } else {
1409
+ console.warn("Unknown type encountered when hashing object", typeof item, item);
1410
+ }
1411
+ }
1374
1412
 
1375
1413
  // #region stat
1376
1414
  /**
@@ -1384,7 +1422,7 @@ class ValOps {
1384
1422
  */
1385
1423
 
1386
1424
  // #region initTree
1387
- async initTree() {
1425
+ async initSources() {
1388
1426
  if (this.baseSha === null || this.schemaSha === null || this.sources === null || this.schemas === null || this.modulesErrors === null) {
1389
1427
  const currentModulesErrors = [];
1390
1428
  const addModuleError = (message, index, path) => {
@@ -1478,50 +1516,43 @@ class ValOps {
1478
1516
  const {
1479
1517
  baseSha,
1480
1518
  schemaSha
1481
- } = await this.initTree();
1519
+ } = await this.initSources();
1482
1520
  await this.onInit(baseSha, schemaSha);
1483
1521
  }
1484
1522
  async getBaseSources() {
1485
- return this.initTree().then(result => result.sources);
1523
+ return this.initSources().then(result => result.sources);
1486
1524
  }
1487
1525
  async getSchemas() {
1488
- return this.initTree().then(result => result.schemas);
1526
+ return this.initSources().then(result => result.schemas);
1489
1527
  }
1490
1528
  async getModuleErrors() {
1491
- return this.initTree().then(result => result.moduleErrors);
1529
+ return this.initSources().then(result => result.moduleErrors);
1492
1530
  }
1493
1531
  async getBaseSha() {
1494
- return this.initTree().then(result => result.baseSha);
1532
+ return this.initSources().then(result => result.baseSha);
1495
1533
  }
1496
1534
  async getSchemaSha() {
1497
- return this.initTree().then(result => result.schemaSha);
1535
+ return this.initSources().then(result => result.schemaSha);
1498
1536
  }
1499
1537
 
1500
1538
  // #region analyzePatches
1501
- analyzePatches(patchesById) {
1539
+ analyzePatches(sortedPatches) {
1502
1540
  const patchesByModule = {};
1503
1541
  const fileLastUpdatedByPatchId = {};
1504
- for (const [patchIdS, {
1505
- path,
1506
- patch,
1507
- createdAt: created_at
1508
- }] of Object.entries(patchesById)) {
1509
- const patchId = patchIdS;
1510
- for (const op of patch) {
1542
+ for (const patch of sortedPatches) {
1543
+ for (const op of patch.patch) {
1511
1544
  if (op.op === "file") {
1512
- fileLastUpdatedByPatchId[op.filePath] = patchId;
1545
+ const filePath = op.filePath;
1546
+ fileLastUpdatedByPatchId[filePath] = patch.patchId;
1513
1547
  }
1548
+ const path = patch.path;
1549
+ if (!patchesByModule[path]) {
1550
+ patchesByModule[path] = [];
1551
+ }
1552
+ patchesByModule[path].push({
1553
+ patchId: patch.patchId
1554
+ });
1514
1555
  }
1515
- if (!patchesByModule[path]) {
1516
- patchesByModule[path] = [];
1517
- }
1518
- patchesByModule[path].push({
1519
- patchId,
1520
- createdAt: created_at
1521
- });
1522
- }
1523
- for (const path in patchesByModule) {
1524
- patchesByModule[path].sort((a, b) => a.createdAt.localeCompare(b.createdAt));
1525
1556
  }
1526
1557
  return {
1527
1558
  patchesByModule,
@@ -1530,11 +1561,11 @@ class ValOps {
1530
1561
  }
1531
1562
 
1532
1563
  // #region getTree
1533
- async getTree(analysis) {
1564
+ async getSources(analysis) {
1534
1565
  if (!analysis) {
1535
1566
  const {
1536
1567
  sources
1537
- } = await this.initTree();
1568
+ } = await this.initSources();
1538
1569
  return {
1539
1570
  sources,
1540
1571
  errors: {}
@@ -1542,76 +1573,72 @@ class ValOps {
1542
1573
  }
1543
1574
  const {
1544
1575
  sources
1545
- } = await this.initTree();
1576
+ } = await this.initSources();
1546
1577
  const patchedSources = {};
1547
1578
  const errors = {};
1548
- for (const [pathS, patches] of Object.entries(analysis.patchesByModule)) {
1549
- const path = pathS;
1579
+ for (const patchData of analysis.patches) {
1580
+ const path = patchData.path;
1550
1581
  if (!sources[path]) {
1551
1582
  if (!errors[path]) {
1552
1583
  errors[path] = [];
1553
1584
  }
1554
- errors[path].push(...patches.map(({
1555
- patchId
1556
- }) => ({
1557
- patchId,
1558
- invalidPath: true,
1585
+ console.error("Module not found", path);
1586
+ errors[path].push({
1587
+ patchId: patchData.patchId,
1559
1588
  skipped: true,
1560
- error: new PatchError(`Module at path: '${path}' not found`)
1561
- })));
1589
+ error: new PatchError(`Module not found`)
1590
+ });
1591
+ continue;
1562
1592
  }
1563
- patchedSources[path] = sources[path];
1564
- for (const {
1565
- patchId
1566
- } of patches) {
1567
- if (errors[path]) {
1593
+ if (!patchedSources[path]) {
1594
+ patchedSources[path] = sources[path];
1595
+ }
1596
+ const patchId = patchData.patchId;
1597
+ if (errors[path]) {
1598
+ console.error("Cannot apply patch: previous errors exists", path, errors[path]);
1599
+ errors[path].push({
1600
+ patchId: patchId,
1601
+ skipped: true,
1602
+ error: new PatchError(`Cannot apply patch: previous errors exists`)
1603
+ });
1604
+ } else {
1605
+ const applicableOps = [];
1606
+ const fileFixOps = {};
1607
+ for (const op of patchData.patch) {
1608
+ if (op.op === "file") {
1609
+ // NOTE: We insert the last patch_id that modify a file
1610
+ // when constructing the url we use the patch id (and the file path)
1611
+ // to fetch the right file
1612
+ // NOTE: overwrite and use last patch_id if multiple patches modify the same file
1613
+ fileFixOps[op.path.join("/")] = [{
1614
+ op: "add",
1615
+ path: op.path.concat(...(op.nestedFilePath || [])).concat("patch_id"),
1616
+ value: patchId
1617
+ }];
1618
+ } else {
1619
+ applicableOps.push(op);
1620
+ }
1621
+ }
1622
+ const patchRes = applyPatch(deepClone(patchedSources[path]),
1623
+ // applyPatch mutates the source. On add operations it adds more than once? There is something strange going on... deepClone seems to fix, but is that the right solution?
1624
+ jsonOps, applicableOps.concat(...Object.values(fileFixOps)));
1625
+ if (result.isErr(patchRes)) {
1626
+ console.error("Could not apply patch", JSON.stringify({
1627
+ path,
1628
+ patchId,
1629
+ error: patchRes.error,
1630
+ applicableOps
1631
+ }, null, 2));
1632
+ if (!errors[path]) {
1633
+ errors[path] = [];
1634
+ }
1568
1635
  errors[path].push({
1569
1636
  patchId: patchId,
1570
- skipped: true,
1571
- error: new PatchError(`Cannot apply patch: previous errors exists`)
1637
+ skipped: false,
1638
+ error: patchRes.error
1572
1639
  });
1573
1640
  } else {
1574
- const patchData = analysis.patches[patchId];
1575
- if (!patchData) {
1576
- errors[path] = [{
1577
- patchId: patchId,
1578
- skipped: false,
1579
- error: new PatchError(`Patch not found`)
1580
- }];
1581
- continue;
1582
- }
1583
- const applicableOps = [];
1584
- const fileFixOps = {};
1585
- for (const op of patchData.patch) {
1586
- if (op.op === "file") {
1587
- // NOTE: We insert the last patch_id that modify a file
1588
- // when constructing the url we use the patch id (and the file path)
1589
- // to fetch the right file
1590
- // NOTE: overwrite and use last patch_id if multiple patches modify the same file
1591
- fileFixOps[op.path.join("/")] = [{
1592
- op: "add",
1593
- path: op.path.concat(...(op.nestedFilePath || [])).concat("patch_id"),
1594
- value: patchId
1595
- }];
1596
- } else {
1597
- applicableOps.push(op);
1598
- }
1599
- }
1600
- const patchRes = applyPatch(deepClone(patchedSources[path]),
1601
- // applyPatch mutates the source. On add operations it will add multiple items? There is something strange going on. DeepClone seems to fix, but is that the right?
1602
- jsonOps, applicableOps.concat(...Object.values(fileFixOps)));
1603
- if (result.isErr(patchRes)) {
1604
- if (!errors[path]) {
1605
- errors[path] = [];
1606
- }
1607
- errors[path].push({
1608
- patchId: patchId,
1609
- skipped: false,
1610
- error: patchRes.error
1611
- });
1612
- } else {
1613
- patchedSources[path] = patchRes.value;
1614
- }
1641
+ patchedSources[path] = patchRes.value;
1615
1642
  }
1616
1643
  }
1617
1644
  }
@@ -1806,7 +1833,7 @@ class ValOps {
1806
1833
  actual: currentValueMetadata[field],
1807
1834
  expected: fieldMetadata
1808
1835
  },
1809
- fixes: ["image:replace-metadata"]
1836
+ fixes: ["image:check-metadata"]
1810
1837
  }];
1811
1838
  }
1812
1839
  }
@@ -1849,14 +1876,14 @@ class ValOps {
1849
1876
  for (const {
1850
1877
  patchId
1851
1878
  } of patches) {
1852
- var _patchAnalysis$patche;
1853
- const patch = (_patchAnalysis$patche = patchAnalysis.patches) === null || _patchAnalysis$patche === void 0 || (_patchAnalysis$patche = _patchAnalysis$patche[patchId]) === null || _patchAnalysis$patche === void 0 ? void 0 : _patchAnalysis$patche.patch;
1854
- if (!patch) {
1879
+ const patchData = patchAnalysis.patches.find(p => p.patchId === patchId);
1880
+ if (!patchData) {
1855
1881
  errors.push({
1856
1882
  message: `Analysis required non-existing patch: ${patchId}`
1857
1883
  });
1858
1884
  break;
1859
1885
  }
1886
+ const patch = patchData.patch;
1860
1887
  const sourceFileOps = patch.filter(op => op.op !== "file"); // file is not a valid source file op
1861
1888
  const patchRes = applyPatch(tsSourceFile, tsOps, sourceFileOps);
1862
1889
  if (result.isErr(patchRes)) {
@@ -1970,13 +1997,21 @@ class ValOps {
1970
1997
  }
1971
1998
 
1972
1999
  // #region createPatch
1973
- async createPatch(path, patchAnalysis, patch, authorId) {
1974
- const initTree = await this.initTree();
2000
+ async createPatch(path, patch, parentRef, authorId) {
2001
+ const initTree = await this.initSources();
1975
2002
  const schemas = initTree.schemas;
1976
2003
  const moduleErrors = initTree.moduleErrors;
1977
2004
  let sources = initTree.sources;
1978
- if (patchAnalysis) {
1979
- const tree = await this.getTree(patchAnalysis);
2005
+ if (parentRef.type !== "head") {
2006
+ // There's room for some optimizations here: we could do this once, then re-use every time we create a patch, then again we only create one patch at a time
2007
+ const patchOps = await this.fetchPatches({
2008
+ omitPatch: false
2009
+ });
2010
+ const patchAnalysis = this.analyzePatches(patchOps.patches);
2011
+ const tree = await this.getSources({
2012
+ ...patchAnalysis,
2013
+ ...patchOps
2014
+ });
1980
2015
  sources = {
1981
2016
  ...sources,
1982
2017
  ...tree.sources
@@ -1987,27 +2022,30 @@ class ValOps {
1987
2022
  const moduleError = moduleErrors.find(e => e.path === path);
1988
2023
  if (moduleError) {
1989
2024
  console.error(`Cannot patch. Module at path: '${path}' has fatal errors: "${moduleError.message}"`);
1990
- return {
2025
+ return result.err({
2026
+ errorType: "other",
1991
2027
  error: {
1992
2028
  message: `Cannot patch. Module at path: '${path}' has fatal errors: ` + moduleErrors.map(m => `"${m.message}"`).join(" and ")
1993
2029
  }
1994
- };
2030
+ });
1995
2031
  }
1996
2032
  if (!source) {
1997
2033
  console.error(`Cannot patch. Module source at path: '${path}' does not exist`);
1998
- return {
2034
+ return result.err({
2035
+ errorType: "other",
1999
2036
  error: {
2000
2037
  message: `Cannot patch. Module source at path: '${path}' does not exist`
2001
2038
  }
2002
- };
2039
+ });
2003
2040
  }
2004
2041
  if (!schema) {
2005
2042
  console.error(`Cannot patch. Module schema at path: '${path}' does not exist`);
2006
- return {
2043
+ return result.err({
2044
+ errorType: "other",
2007
2045
  error: {
2008
2046
  message: `Cannot patch. Module schema at path: '${path}' does not exist`
2009
2047
  }
2010
- };
2048
+ });
2011
2049
  }
2012
2050
  const sourceFileOps = [];
2013
2051
  const files = {};
@@ -2046,14 +2084,20 @@ class ValOps {
2046
2084
  }
2047
2085
  }
2048
2086
  }
2049
- const saveRes = await this.saveSourceFilePatch(path, sourceFileOps, authorId);
2050
- if (saveRes.error) {
2051
- console.error(`Could not save source file patch at path: '${path}'. Error: ${saveRes.error.message}`);
2052
- return {
2087
+ const saveRes = await this.saveSourceFilePatch(path, patch, parentRef, authorId);
2088
+ if (result.isErr(saveRes)) {
2089
+ console.error(`Could not save source patch at path: '${path}'. Error: ${saveRes.error.errorType === "other" ? saveRes.error.message : saveRes.error.errorType}`);
2090
+ if (saveRes.error.errorType === "patch-head-conflict") {
2091
+ return result.err({
2092
+ errorType: "patch-head-conflict"
2093
+ });
2094
+ }
2095
+ return result.err({
2096
+ errorType: "other",
2053
2097
  error: saveRes.error
2054
- };
2098
+ });
2055
2099
  }
2056
- const patchId = saveRes.patchId;
2100
+ const patchId = saveRes.value.patchId;
2057
2101
  const saveFileRes = await Promise.all(Object.entries(files).map(async ([filePath, data]) => {
2058
2102
  if (data.error) {
2059
2103
  return {
@@ -2118,7 +2162,7 @@ class ValOps {
2118
2162
  const MaxRetries = 3;
2119
2163
  let lastRes;
2120
2164
  for (let i = 0; i < MaxRetries; i++) {
2121
- lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data.value, type, metadataOps.metadata);
2165
+ lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data.value, type, metadataOps.metadata);
2122
2166
  if (!lastRes.error) {
2123
2167
  return {
2124
2168
  filePath
@@ -2133,24 +2177,25 @@ class ValOps {
2133
2177
  }));
2134
2178
  const errors = saveFileRes.filter(f => !!f.error);
2135
2179
  if (errors.length > 0) {
2136
- return {
2180
+ return result.err({
2181
+ errorType: "other",
2137
2182
  error: {
2138
2183
  message: "Could not save patch: " + errors.map(e => e.error.message).join(", ")
2139
2184
  }
2140
- };
2185
+ });
2141
2186
  }
2142
- return {
2187
+ return result.ok({
2143
2188
  patchId,
2144
2189
  files: saveFileRes,
2145
2190
  createdAt: new Date().toISOString()
2146
- };
2191
+ });
2147
2192
  }
2148
2193
 
2149
2194
  // #region abstract ops
2150
2195
  }
2151
2196
  function isOnlyFileCheckValidationError(validationError) {
2152
2197
  var _validationError$fixe;
2153
- if ((_validationError$fixe = validationError.fixes) !== null && _validationError$fixe !== void 0 && _validationError$fixe.every(f => f === "file:check-metadata" || f === "image:replace-metadata")) {
2198
+ if ((_validationError$fixe = validationError.fixes) !== null && _validationError$fixe !== void 0 && _validationError$fixe.every(f => f === "file:check-metadata" || f === "image:check-metadata")) {
2154
2199
  return true;
2155
2200
  }
2156
2201
  return false;
@@ -2240,81 +2285,51 @@ function bufferFromDataUrl(dataUrl) {
2240
2285
  }
2241
2286
  }
2242
2287
 
2243
- const JSONValueT = z$1.lazy(() => z$1.union([z$1.string(), z$1.number(), z$1.boolean(), z$1.null(), z$1.array(JSONValueT), z$1.record(JSONValueT)]));
2244
-
2245
- /**
2246
- * Raw JSON patch operation.
2247
- */
2248
- const OperationJSONT = z$1.discriminatedUnion("op", [z$1.object({
2249
- op: z$1.literal("add"),
2250
- path: z$1.string(),
2251
- value: JSONValueT
2252
- }).strict(), z$1.object({
2253
- op: z$1.literal("remove"),
2254
- /**
2255
- * Must be non-root
2256
- */
2257
- path: z$1.string()
2258
- }).strict(), z$1.object({
2259
- op: z$1.literal("replace"),
2260
- path: z$1.string(),
2261
- value: JSONValueT
2262
- }).strict(), z$1.object({
2263
- op: z$1.literal("move"),
2264
- /**
2265
- * Must be non-root and not a proper prefix of "path".
2266
- */
2267
- from: z$1.string(),
2268
- path: z$1.string()
2269
- }).strict(), z$1.object({
2270
- op: z$1.literal("copy"),
2271
- from: z$1.string(),
2272
- path: z$1.string()
2273
- }).strict(), z$1.object({
2274
- op: z$1.literal("test"),
2275
- path: z$1.string(),
2276
- value: JSONValueT
2277
- }).strict(), z$1.object({
2278
- op: z$1.literal("file"),
2279
- path: z$1.string(),
2280
- filePath: z$1.string(),
2281
- value: z$1.string()
2282
- }).strict()]);
2283
- const PatchJSON = z$1.array(OperationJSONT);
2284
2288
  /**
2285
- * Raw JSON patch operation.
2289
+ * Computes the changed patch parent references based on the current patches and the patch IDs to be deleted.
2290
+ *
2291
+ * NOTE: patches that will be deleted are not included in the changed patches, since they will be deleted any how.
2292
+ *
2293
+ * @param currentPatches - The array of current patches.
2294
+ * @param deletePatchIds - The array of patch IDs to be deleted.
2295
+ * @returns An object containing the changed patches with their corresponding parent references.
2286
2296
  */
2287
- const OperationT = z$1.discriminatedUnion("op", [z$1.object({
2288
- op: z$1.literal("add"),
2289
- path: z$1.array(z$1.string()),
2290
- value: JSONValueT
2291
- }).strict(), z$1.object({
2292
- op: z$1.literal("remove"),
2293
- path: z$1.array(z$1.string()).nonempty()
2294
- }).strict(), z$1.object({
2295
- op: z$1.literal("replace"),
2296
- path: z$1.array(z$1.string()),
2297
- value: JSONValueT
2298
- }).strict(), z$1.object({
2299
- op: z$1.literal("move"),
2300
- from: z$1.array(z$1.string()).nonempty(),
2301
- path: z$1.array(z$1.string())
2302
- }).strict(), z$1.object({
2303
- op: z$1.literal("copy"),
2304
- from: z$1.array(z$1.string()),
2305
- path: z$1.array(z$1.string())
2306
- }).strict(), z$1.object({
2307
- op: z$1.literal("test"),
2308
- path: z$1.array(z$1.string()),
2309
- value: JSONValueT
2310
- }).strict(), z$1.object({
2311
- op: z$1.literal("file"),
2312
- path: z$1.array(z$1.string()),
2313
- filePath: z$1.string(),
2314
- nestedFilePath: z$1.array(z$1.string()).optional(),
2315
- value: z$1.string()
2316
- }).strict()]);
2317
- const Patch = z$1.array(OperationT);
2297
+ function computeChangedPatchParentRefs(currentPatches, deletePatchIds) {
2298
+ let lastNonDeletedPatchIndex = -1;
2299
+ const changedPatches = {};
2300
+ for (let i = 0; i < currentPatches.length; i++) {
2301
+ const current = currentPatches[i];
2302
+ if (
2303
+ // skip all patches that will be deleted:
2304
+ deletePatchIds.includes(current.patchId)) {
2305
+ var _currentPatches;
2306
+ if (
2307
+ // skip change if the patch after is deleted anyway:
2308
+ !deletePatchIds.includes((_currentPatches = currentPatches[i + 1]) === null || _currentPatches === void 0 ? void 0 : _currentPatches.patchId)) {
2309
+ if (
2310
+ // set next patch to point to head if it exists:
2311
+ lastNonDeletedPatchIndex === -1 && currentPatches[i + 1]) {
2312
+ changedPatches[currentPatches[i + 1].patchId] = {
2313
+ type: "head",
2314
+ headBaseSha: current.baseSha
2315
+ };
2316
+ } else if (
2317
+ // set next patch to point to the last non-deleted patch:
2318
+ currentPatches[lastNonDeletedPatchIndex] && currentPatches[i + 1]) {
2319
+ changedPatches[currentPatches[i + 1].patchId] = {
2320
+ type: "patch",
2321
+ patchId: currentPatches[lastNonDeletedPatchIndex].patchId
2322
+ };
2323
+ }
2324
+ }
2325
+ } else {
2326
+ lastNonDeletedPatchIndex = i;
2327
+ }
2328
+ }
2329
+ return {
2330
+ changedPatches
2331
+ };
2332
+ }
2318
2333
 
2319
2334
  class ValOpsFS extends ValOps {
2320
2335
  static VAL_DIR = ".val";
@@ -2488,32 +2503,31 @@ class ValOpsFS extends ValOps {
2488
2503
  patchJsonFiles = this.host.readDirectory(patchesCacheDir, ["patch.json"], [], []);
2489
2504
  }
2490
2505
  const patches = {};
2491
- const errors = {};
2492
- const parsedPatchIds = patchJsonFiles.map(file => parseInt(fsPath__default.basename(fsPath__default.dirname(file)), 10)).sort();
2493
- for (const patchIdNum of parsedPatchIds) {
2494
- if (Number.isNaN(patchIdNum)) {
2495
- throw new Error("Could not parse patch id from file name. Files found: " + patchJsonFiles.join(", "));
2496
- }
2497
- const patchId = patchIdNum.toString();
2498
- if (includes && includes.length > 0 && !includes.includes(patchId)) {
2499
- continue;
2500
- }
2501
- const parsedFSPatchRes = this.parseJsonFile(this.getPatchFilePath(patchId), FSPatch);
2502
- let parsedFSPatchBaseRes = undefined;
2503
- if (this.host.fileExists(this.getPatchBaseFile(patchId))) {
2504
- parsedFSPatchBaseRes = this.parseJsonFile(this.getPatchBaseFile(patchId), FSPatchBase);
2505
- }
2506
- if (parsedFSPatchRes.error) {
2507
- errors[patchId] = parsedFSPatchRes.error;
2508
- } else if (parsedFSPatchBaseRes && parsedFSPatchBaseRes.error) {
2509
- errors[patchId] = parsedFSPatchBaseRes.error;
2506
+ const errors = [];
2507
+ const parsedUnsortedFsPatches = patchJsonFiles.map(file => fsPath__default.basename(fsPath__default.dirname(file))).map(patchDir => [patchDir, this.parseJsonFile(this.getPatchFilePath(patchDir), FSPatch), this.host.fileExists(this.getPatchBaseFile(patchDir)) ? this.parseJsonFile(this.getPatchBaseFile(patchDir), FSPatchBase) : undefined]);
2508
+ parsedUnsortedFsPatches.forEach(([dir, parsedPatch, parsedBase]) => {
2509
+ if (parsedPatch.error) {
2510
+ errors.push({
2511
+ ...parsedPatch.error,
2512
+ parentPatchId: dir
2513
+ });
2514
+ } else if (parsedBase && parsedBase.error) {
2515
+ errors.push({
2516
+ ...parsedBase.error,
2517
+ parentPatchId: dir
2518
+ });
2510
2519
  } else {
2511
- patches[patchId] = {
2512
- ...parsedFSPatchRes.data,
2513
- appliedAt: parsedFSPatchBaseRes ? parsedFSPatchBaseRes.data : null
2520
+ if (includes && includes.length > 0 && !includes.includes(parsedPatch.data.patchId)) {
2521
+ return;
2522
+ }
2523
+ patches[parsedPatch.data.patchId] = {
2524
+ ...parsedPatch.data,
2525
+ appliedAt: parsedBase ? parsedBase.data : null
2514
2526
  };
2515
2527
  }
2516
- }
2528
+ });
2529
+
2530
+ // If there are patches, but no head. error
2517
2531
  if (Object.keys(errors).length > 0) {
2518
2532
  return {
2519
2533
  patches,
@@ -2524,36 +2538,52 @@ class ValOpsFS extends ValOps {
2524
2538
  patches
2525
2539
  };
2526
2540
  }
2541
+ getParentPatchIdFromParentRef(parentRef) {
2542
+ return parentRef.type === "head" ? "head" : parentRef.patchId;
2543
+ }
2527
2544
  async fetchPatches(filters) {
2545
+ const fetchPatchesRes = await this.fetchPatchesFromFS(!!filters.omitPatch);
2546
+ const sortedPatches = this.createPatchChain(fetchPatchesRes.patches).filter(patchData => {
2547
+ if (filters.authors && !(patchData.authorId === null || filters.authors.includes(patchData.authorId))) {
2548
+ return false;
2549
+ }
2550
+ if (filters.moduleFilePaths && !filters.moduleFilePaths.includes(patchData.path)) {
2551
+ return false;
2552
+ }
2553
+ return true;
2554
+ }).map(patchData => {
2555
+ if (filters.omitPatch) {
2556
+ return {
2557
+ ...patchData,
2558
+ patch: undefined
2559
+ };
2560
+ }
2561
+ return patchData;
2562
+ });
2563
+ return {
2564
+ patches: sortedPatches,
2565
+ errors: fetchPatchesRes.errors
2566
+ };
2567
+ }
2568
+ async fetchPatchesFromFS(omitPath) {
2528
2569
  const patches = {};
2529
- const errors = {};
2530
2570
  const {
2531
- errors: allErrors,
2571
+ errors,
2532
2572
  patches: allPatches
2533
- } = await this.readPatches(filters.patchIds);
2534
- if (allErrors && Object.keys(allErrors).length > 0) {
2535
- for (const [patchId, error] of Object.entries(allErrors)) {
2536
- console.error("Error reading patch", patchId, error);
2537
- errors[patchId] = error;
2538
- }
2539
- }
2573
+ } = await this.readPatches();
2540
2574
  for (const [patchIdS, patch] of Object.entries(allPatches)) {
2541
2575
  const patchId = patchIdS;
2542
- if (filters.authors && !(patch.authorId === null || filters.authors.includes(patch.authorId))) {
2543
- continue;
2544
- }
2545
- if (filters.moduleFilePaths && !filters.moduleFilePaths.includes(patch.path)) {
2546
- continue;
2547
- }
2548
2576
  patches[patchId] = {
2549
- patch: filters.omitPatch ? undefined : patch.patch,
2577
+ patch: omitPath ? undefined : patch.patch,
2578
+ parentRef: patch.parentRef,
2550
2579
  path: patch.path,
2580
+ baseSha: patch.baseSha,
2551
2581
  createdAt: patch.createdAt,
2552
2582
  authorId: patch.authorId,
2553
2583
  appliedAt: patch.appliedAt
2554
2584
  };
2555
2585
  }
2556
- if (errors && Object.keys(errors).length > 0) {
2586
+ if (errors && errors.length > 0) {
2557
2587
  return {
2558
2588
  patches,
2559
2589
  errors
@@ -2564,6 +2594,33 @@ class ValOpsFS extends ValOps {
2564
2594
  };
2565
2595
  }
2566
2596
 
2597
+ // #region createPatchChain
2598
+ createPatchChain(unsortedPatchRecord) {
2599
+ var _Object$entries$find;
2600
+ // TODO: Error handling
2601
+ const nextPatch = {};
2602
+ Object.keys(unsortedPatchRecord).forEach(patchId => {
2603
+ const patch = unsortedPatchRecord[patchId];
2604
+ if (patch.parentRef.type === "head") {
2605
+ nextPatch["head"] = patchId;
2606
+ } else {
2607
+ nextPatch[patch.parentRef.patchId] = patchId;
2608
+ }
2609
+ });
2610
+ const sortedPatches = [];
2611
+ let nextPatchId = (_Object$entries$find = Object.entries(unsortedPatchRecord).find(([, patch]) => patch.parentRef.type === "head")) === null || _Object$entries$find === void 0 ? void 0 : _Object$entries$find[0];
2612
+ while (!!nextPatchId && nextPatchId in unsortedPatchRecord) {
2613
+ const patch = unsortedPatchRecord[nextPatchId];
2614
+ delete patch["parentRef"];
2615
+ sortedPatches.push({
2616
+ ...patch,
2617
+ patchId: nextPatchId
2618
+ });
2619
+ nextPatchId = nextPatch[nextPatchId];
2620
+ }
2621
+ return sortedPatches;
2622
+ }
2623
+
2567
2624
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
2568
2625
  parseJsonFile(filePath, parser) {
2569
2626
  if (!this.host.fileExists(filePath)) {
@@ -2638,38 +2695,47 @@ class ValOpsFS extends ValOps {
2638
2695
  };
2639
2696
  }
2640
2697
  }
2641
- async saveSourceFilePatch(path, patch, authorId) {
2642
- let fileId = Date.now();
2698
+ async saveSourceFilePatch(path, patch, parentRef, authorId) {
2699
+ const patchDir = this.getParentPatchIdFromParentRef(parentRef);
2643
2700
  try {
2644
- while (this.host.fileExists(this.getPatchFilePath(fileId.toString()))) {
2645
- // ensure unique file / patch id
2646
- fileId++;
2647
- }
2648
- const patchId = fileId.toString();
2701
+ const baseSha = await this.getBaseSha();
2702
+ const patchId = crypto.randomUUID();
2649
2703
  const data = {
2650
2704
  patch,
2705
+ patchId,
2706
+ parentRef,
2651
2707
  path,
2652
2708
  authorId,
2709
+ baseSha,
2653
2710
  coreVersion: Internal.VERSION.core,
2654
2711
  createdAt: new Date().toISOString()
2655
2712
  };
2656
- this.host.writeUf8File(this.getPatchFilePath(patchId), JSON.stringify(data));
2657
- return {
2713
+ const writeRes = this.host.tryWriteUf8File(this.getPatchFilePath(patchDir), JSON.stringify(data));
2714
+ if (writeRes.type === "error") {
2715
+ return writeRes.errorType === "dir-already-exists" ? result.err({
2716
+ errorType: "patch-head-conflict"
2717
+ }) : result.err({
2718
+ errorType: "other",
2719
+ error: writeRes.error,
2720
+ message: "Failed to write patch file"
2721
+ });
2722
+ }
2723
+ return result.ok({
2658
2724
  patchId
2659
- };
2725
+ });
2660
2726
  } catch (err) {
2661
2727
  if (err instanceof Error) {
2662
- return {
2663
- error: {
2664
- message: err.message
2665
- }
2666
- };
2728
+ return result.err({
2729
+ errorType: "other",
2730
+ error: err,
2731
+ message: err.message
2732
+ });
2667
2733
  }
2668
- return {
2669
- error: {
2670
- message: "Unknown error"
2671
- }
2672
- };
2734
+ return result.err({
2735
+ errorType: "other",
2736
+ error: err,
2737
+ message: "Unknown error"
2738
+ });
2673
2739
  }
2674
2740
  }
2675
2741
  async getSourceFile(path) {
@@ -2707,9 +2773,10 @@ class ValOpsFS extends ValOps {
2707
2773
  };
2708
2774
  }
2709
2775
  }
2710
- async saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data, _type, metadata) {
2711
- const patchFilePath = this.getBinaryFilePath(filePath, patchId);
2712
- const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchId);
2776
+ async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, _type, metadata) {
2777
+ const patchDir = this.getParentPatchIdFromParentRef(parentRef);
2778
+ const patchFilePath = this.getBinaryFilePath(filePath, patchDir);
2779
+ const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDir);
2713
2780
  try {
2714
2781
  const buffer = bufferFromDataUrl(data);
2715
2782
  if (!buffer) {
@@ -2741,7 +2808,15 @@ class ValOpsFS extends ValOps {
2741
2808
  }
2742
2809
  }
2743
2810
  async getBase64EncodedBinaryFileMetadataFromPatch(filePath, type, patchId) {
2744
- const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchId);
2811
+ const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
2812
+ if (result.isErr(patchDirRes)) {
2813
+ return {
2814
+ errors: [{
2815
+ message: "Failed to get patch dir from patch id"
2816
+ }]
2817
+ };
2818
+ }
2819
+ const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDirRes.value);
2745
2820
  if (!this.host.fileExists(metadataFilePath)) {
2746
2821
  return {
2747
2822
  errors: [{
@@ -2777,7 +2852,11 @@ class ValOpsFS extends ValOps {
2777
2852
  };
2778
2853
  }
2779
2854
  async getBase64EncodedBinaryFileFromPatch(filePath, patchId) {
2780
- const absPath = this.getBinaryFilePath(filePath, patchId);
2855
+ const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
2856
+ if (!result.isOk(patchDirRes)) {
2857
+ return null;
2858
+ }
2859
+ const absPath = this.getBinaryFilePath(filePath, patchDirRes.value);
2781
2860
  if (!this.host.fileExists(absPath)) {
2782
2861
  return null;
2783
2862
  }
@@ -2785,30 +2864,62 @@ class ValOpsFS extends ValOps {
2785
2864
  }
2786
2865
  async deletePatches(patchIds) {
2787
2866
  const deleted = [];
2788
- let errors = null;
2789
- for (const patchId of patchIds) {
2790
- try {
2791
- this.host.deleteDir(this.getPatchDir(patchId));
2792
- deleted.push(patchId);
2793
- } catch (err) {
2794
- if (!errors) {
2795
- errors = {};
2796
- }
2797
- errors[patchId] = {
2798
- message: err instanceof Error ? err.message : "Unknown error"
2799
- };
2800
- }
2801
- }
2802
- if (errors) {
2867
+ const patchDirMapRes = await this.getParentPatchIdFromPatchIdMap();
2868
+ if (result.isErr(patchDirMapRes)) {
2803
2869
  return {
2804
- deleted,
2805
- errors
2870
+ error: {
2871
+ message: "Failed to get patch dir map"
2872
+ }
2806
2873
  };
2807
2874
  }
2875
+ const currentPatches = this.createPatchChain((await this.fetchPatchesFromFS(false)).patches);
2876
+ this.updateOrderedPatches(computeChangedPatchParentRefs(currentPatches, patchIds), patchDirMapRes.value, patchIds);
2808
2877
  return {
2809
2878
  deleted
2810
2879
  };
2811
2880
  }
2881
+ updateOrderedPatches(updates, patchDirMap, deletePatchIds) {
2882
+ for (const patchId of deletePatchIds) {
2883
+ const patchDir = patchDirMap[patchId];
2884
+ if (!patchDir) {
2885
+ console.error("Could not find patch dir for patch id scheduled for deletion: ", patchId);
2886
+ continue;
2887
+ }
2888
+ try {
2889
+ this.host.deleteDir(this.getFullPatchDir(patchDir));
2890
+ } catch (err) {
2891
+ console.error("Failed to delete patch dir", err);
2892
+ }
2893
+ }
2894
+ for (const [patchIdS, parentRef] of Object.entries(updates.changedPatches)) {
2895
+ const prevParentPatchId = patchDirMap[patchIdS];
2896
+ if (!prevParentPatchId) {
2897
+ console.error("Could not find previous parent patch id for deleted patch id: ", patchIdS);
2898
+ continue;
2899
+ }
2900
+ const newParentPatchId = parentRef.type === "head" ? "head" : parentRef.patchId;
2901
+ const currentPatchDataRes = this.parseJsonFile(this.getPatchFilePath(prevParentPatchId), FSPatch);
2902
+ if (currentPatchDataRes.error) {
2903
+ console.error("Failed to parse patch file while fixing patch chain after deleted patch", {
2904
+ updates
2905
+ }, currentPatchDataRes.error);
2906
+ continue;
2907
+ }
2908
+ const newPatchData = currentPatchDataRes.data;
2909
+ newPatchData.parentRef = parentRef;
2910
+ try {
2911
+ this.host.writeUf8File(this.getPatchFilePath(prevParentPatchId), JSON.stringify(newPatchData));
2912
+ if (this.host.directoryExists(this.getFullPatchDir(newParentPatchId))) {
2913
+ this.host.deleteDir(this.getFullPatchDir(newParentPatchId));
2914
+ }
2915
+ this.host.moveDir(this.getFullPatchDir(prevParentPatchId), this.getFullPatchDir(newParentPatchId));
2916
+ } catch (err) {
2917
+ console.error("Failed fix patch chain after deleted patch", {
2918
+ updates
2919
+ }, err);
2920
+ }
2921
+ }
2922
+ }
2812
2923
  async saveFiles(preparedCommit) {
2813
2924
  const updatedFiles = [];
2814
2925
  const errors = {};
@@ -2824,12 +2935,28 @@ class ValOpsFS extends ValOps {
2824
2935
  };
2825
2936
  }
2826
2937
  }
2938
+ const patchIdToPatchDirMapRes = await this.getParentPatchIdFromPatchIdMap();
2939
+ if (result.isErr(patchIdToPatchDirMapRes)) {
2940
+ return {
2941
+ updatedFiles,
2942
+ errors
2943
+ };
2944
+ }
2945
+ const patchIdToPatchDirMap = patchIdToPatchDirMapRes.value;
2827
2946
  for (const [filePath, {
2828
2947
  patchId
2829
2948
  }] of Object.entries(preparedCommit.patchedBinaryFilesDescriptors)) {
2830
2949
  const absPath = fsPath__default.join(this.rootDir, ...filePath.split("/"));
2831
2950
  try {
2832
- this.host.copyFile(this.getBinaryFilePath(filePath, patchId), absPath);
2951
+ const patchDir = patchIdToPatchDirMap[patchId];
2952
+ if (!patchDir) {
2953
+ errors[absPath] = {
2954
+ message: "Failed to find PatchDir for PatchId " + patchId,
2955
+ filePath
2956
+ };
2957
+ continue;
2958
+ }
2959
+ this.host.copyFile(this.getBinaryFilePath(filePath, patchDir), absPath);
2833
2960
  updatedFiles.push(absPath);
2834
2961
  } catch (err) {
2835
2962
  errors[absPath] = {
@@ -2843,7 +2970,14 @@ class ValOpsFS extends ValOps {
2843
2970
  baseSha: await this.getBaseSha(),
2844
2971
  timestamp: new Date().toISOString()
2845
2972
  };
2846
- const absPath = this.getPatchBaseFile(patchId);
2973
+ const patchDir = patchIdToPatchDirMap[patchId];
2974
+ if (!patchDir) {
2975
+ errors[`patchId:${patchId}`] = {
2976
+ message: "Failed to find PatchDir for PatchId " + patchId
2977
+ };
2978
+ continue;
2979
+ }
2980
+ const absPath = this.getPatchBaseFile(patchDir);
2847
2981
  try {
2848
2982
  this.host.writeUf8File(absPath, JSON.stringify(appliedAt));
2849
2983
  } catch (err) {
@@ -2887,25 +3021,47 @@ class ValOpsFS extends ValOps {
2887
3021
  }
2888
3022
  return createMetadataFromBuffer(type, mimeType, buffer);
2889
3023
  }
3024
+ async getParentPatchIdFromPatchId(patchId) {
3025
+ // This is not great. If needed we should find a better way
3026
+ const patches = await this.readPatches();
3027
+ if (patches.errors || patches.error) {
3028
+ console.error("Failed to read patches", JSON.stringify(patches));
3029
+ return result.err("failed-to-read-patches");
3030
+ }
3031
+ const patch = patches.patches[patchId];
3032
+ if (!patch) {
3033
+ console.error("Could not find patch with patchId: ", patchId);
3034
+ return result.err("patch-not-found");
3035
+ }
3036
+ return result.ok(this.getParentPatchIdFromParentRef(patch.parentRef));
3037
+ }
3038
+ async getParentPatchIdFromPatchIdMap() {
3039
+ const patches = await this.readPatches();
3040
+ if (patches.errors || patches.error) {
3041
+ console.error("Failed to read patches", JSON.stringify(patches));
3042
+ return result.err("failed-to-read-patches");
3043
+ }
3044
+ return result.ok(Object.fromEntries(Object.entries(patches.patches).map(([patchId, value]) => [patchId, this.getParentPatchIdFromParentRef(value.parentRef)])));
3045
+ }
2890
3046
 
2891
3047
  // #region fs file path helpers
2892
3048
  getPatchesDir() {
2893
3049
  return fsPath__default.join(this.rootDir, ValOpsFS.VAL_DIR, "patches");
2894
3050
  }
2895
- getPatchDir(patchId) {
2896
- return fsPath__default.join(this.getPatchesDir(), patchId);
3051
+ getFullPatchDir(patchDir) {
3052
+ return fsPath__default.join(this.getPatchesDir(), patchDir);
2897
3053
  }
2898
- getBinaryFilePath(filePath, patchId) {
2899
- return fsPath__default.join(this.getPatchDir(patchId), "files", filePath, fsPath__default.basename(filePath));
3054
+ getBinaryFilePath(filePath, patchDir) {
3055
+ return fsPath__default.join(this.getFullPatchDir(patchDir), "files", filePath, fsPath__default.basename(filePath));
2900
3056
  }
2901
- getBinaryFileMetadataPath(filePath, patchId) {
2902
- return fsPath__default.join(this.getPatchDir(patchId), "files", filePath, "metadata.json");
3057
+ getBinaryFileMetadataPath(filePath, patchDir) {
3058
+ return fsPath__default.join(this.getFullPatchDir(patchDir), "files", filePath, "metadata.json");
2903
3059
  }
2904
- getPatchFilePath(patchId) {
2905
- return fsPath__default.join(this.getPatchDir(patchId), "patch.json");
3060
+ getPatchFilePath(patchDir) {
3061
+ return fsPath__default.join(this.getFullPatchDir(patchDir), "patch.json");
2906
3062
  }
2907
- getPatchBaseFile(patchId) {
2908
- return fsPath__default.join(this.getPatchDir(patchId), "base.json");
3063
+ getPatchBaseFile(patchDir) {
3064
+ return fsPath__default.join(this.getFullPatchDir(patchDir), "base.json");
2909
3065
  }
2910
3066
  }
2911
3067
  class FSOpsHost {
@@ -2919,6 +3075,9 @@ class FSOpsHost {
2919
3075
  });
2920
3076
  }
2921
3077
  }
3078
+ moveDir(from, to) {
3079
+ fs.renameSync(from, to);
3080
+ }
2922
3081
  directoryExists(path) {
2923
3082
  return ts.sys.directoryExists(path);
2924
3083
  }
@@ -2940,6 +3099,37 @@ class FSOpsHost {
2940
3099
  });
2941
3100
  fs.writeFileSync(path, data, "utf-8");
2942
3101
  }
3102
+ tryWriteUf8File(path, data) {
3103
+ try {
3104
+ const parentDir = fsPath__default.join(fsPath__default.dirname(path), "../");
3105
+ fs.mkdirSync(parentDir, {
3106
+ recursive: true
3107
+ });
3108
+ // Make the parent dir separately. This is because we need mkdir to throw
3109
+ // if the directory already exists. If we use recursive: true, it doesn't
3110
+ fs.mkdirSync(fsPath__default.dirname(path), {
3111
+ recursive: false
3112
+ });
3113
+ } catch (e) {
3114
+ return {
3115
+ type: "error",
3116
+ errorType: "dir-already-exists",
3117
+ error: e
3118
+ };
3119
+ }
3120
+ try {
3121
+ fs.writeFileSync(path, data, "utf-8");
3122
+ } catch (e) {
3123
+ return {
3124
+ type: "error",
3125
+ errorType: "failed-to-write-file",
3126
+ error: e
3127
+ };
3128
+ }
3129
+ return {
3130
+ type: "success"
3131
+ };
3132
+ }
2943
3133
  writeBinaryFile(path, data) {
2944
3134
  fs.mkdirSync(fsPath__default.dirname(path), {
2945
3135
  recursive: true
@@ -2956,6 +3146,9 @@ class FSOpsHost {
2956
3146
  const FSPatch = z.object({
2957
3147
  path: z.string().refine(p => p.startsWith("/") && p.includes(".val."), "Path is not valid. Must start with '/' and include '.val.'"),
2958
3148
  patch: Patch,
3149
+ patchId: z.string(),
3150
+ baseSha: z.string(),
3151
+ parentRef: ParentRef,
2959
3152
  authorId: z.string().refine(p => true).nullable(),
2960
3153
  createdAt: z.string().datetime(),
2961
3154
  coreVersion: z.string().nullable() // TODO: use this to check if patch is compatible with current core version?
@@ -2968,7 +3161,7 @@ const FSPatchBase = z.object({
2968
3161
  const textEncoder = new TextEncoder();
2969
3162
  const PatchId = z.string().refine(s => !!s); // TODO: validate
2970
3163
  const CommitSha = z.string().refine(s => !!s); // TODO: validate
2971
- const BaseSha = z.string().refine(s => !!s); // TODO: validate
3164
+ z.string().refine(s => !!s); // TODO: validate
2972
3165
  const AuthorId = z.string().refine(s => !!s); // TODO: validate
2973
3166
  const ModuleFilePath = z.string().refine(s => !!s); // TODO: validate
2974
3167
  const Metadata = z.union([z.object({
@@ -2988,11 +3181,7 @@ const BasePatchResponse = z.object({
2988
3181
  patchId: PatchId,
2989
3182
  authorId: AuthorId.nullable(),
2990
3183
  createdAt: z.string().datetime(),
2991
- applied: z.object({
2992
- baseSha: BaseSha,
2993
- commitSha: CommitSha,
2994
- appliedAt: z.string().datetime()
2995
- }).nullable()
3184
+ baseSha: z.string()
2996
3185
  });
2997
3186
  const GetPatches = z.object({
2998
3187
  patches: z.array(z.intersection(z.object({
@@ -3037,7 +3226,7 @@ const DeletePatchesResponse = z.object({
3037
3226
  patchId: PatchId
3038
3227
  })).optional()
3039
3228
  });
3040
- const SavePatchFileResponse = z.object({
3229
+ z.object({
3041
3230
  patchId: PatchId,
3042
3231
  filePath: ModuleFilePath
3043
3232
  });
@@ -3074,18 +3263,35 @@ class ValOpsHttp extends ValOps {
3074
3263
  }
3075
3264
  const currentBaseSha = await this.getBaseSha();
3076
3265
  const currentSchemaSha = await this.getSchemaSha();
3077
- const patchData = await this.fetchPatches({
3266
+ const allPatchData = await this.fetchPatches({
3078
3267
  omitPatch: true,
3079
3268
  authors: undefined,
3080
3269
  patchIds: undefined,
3081
3270
  moduleFilePaths: undefined
3082
3271
  });
3272
+ // We think these errors will be picked up else where (?), so we only return an error here if there are no patches
3273
+ if (allPatchData.patches.length === 0) {
3274
+ let message;
3275
+ if (allPatchData.error) {
3276
+ message = allPatchData.error.message;
3277
+ } else if (allPatchData.errors && allPatchData.errors.length > 0) {
3278
+ const errors = allPatchData.errors;
3279
+ message = errors.map(error => error.message).join("");
3280
+ }
3281
+ if (message) {
3282
+ message = `Could not get patches: ${message}`;
3283
+ console.error(message);
3284
+ return {
3285
+ type: "error",
3286
+ error: {
3287
+ message
3288
+ }
3289
+ };
3290
+ }
3291
+ }
3083
3292
  const patches = [];
3084
- // TODO: use proper patch sequences when available:
3085
- for (const [patchId] of Object.entries(patchData.patches).sort(([, a], [, b]) => {
3086
- return a.createdAt.localeCompare(b.createdAt, undefined);
3087
- })) {
3088
- patches.push(patchId);
3293
+ for (const patchData of allPatchData.patches) {
3294
+ patches.push(patchData.patchId);
3089
3295
  }
3090
3296
  const webSocketNonceRes = await this.getWebSocketNonce(params.profileId);
3091
3297
  if (webSocketNonceRes.status === "error") {
@@ -3146,6 +3352,16 @@ class ValOpsHttp extends ValOps {
3146
3352
  }
3147
3353
  };
3148
3354
  }
3355
+ const contentType = res.headers.get("Content-Type") || "";
3356
+ if (contentType.startsWith("application/json")) {
3357
+ const json = await res.json();
3358
+ return {
3359
+ status: "error",
3360
+ error: {
3361
+ message: "Could not get nonce." + (json.message || "Unexpected error (no error message). Status: " + res.status)
3362
+ }
3363
+ };
3364
+ }
3149
3365
  return {
3150
3366
  status: "error",
3151
3367
  error: {
@@ -3171,8 +3387,8 @@ class ValOpsHttp extends ValOps {
3171
3387
  for (let i = 0; i < patchIds.length; i += chunkSize) {
3172
3388
  patchIdChunks.push(patchIds.slice(i, i + chunkSize));
3173
3389
  }
3174
- let allPatches = {};
3175
- let allErrors = {};
3390
+ let allPatches = [];
3391
+ let allErrors = [];
3176
3392
  if (patchIds === undefined || patchIds.length === 0) {
3177
3393
  return this.fetchPatchesInternal({
3178
3394
  patchIds: patchIds,
@@ -3195,10 +3411,7 @@ class ValOpsHttp extends ValOps {
3195
3411
  ...res.patches
3196
3412
  };
3197
3413
  if (res.errors) {
3198
- allErrors = {
3199
- ...allErrors,
3200
- ...res.errors
3201
- };
3414
+ allErrors = [...allErrors, ...res.errors];
3202
3415
  }
3203
3416
  }
3204
3417
  return {
@@ -3234,27 +3447,23 @@ class ValOpsHttp extends ValOps {
3234
3447
  "Content-Type": "application/json"
3235
3448
  }
3236
3449
  }).then(async res => {
3237
- const patches = {};
3450
+ const patches = [];
3238
3451
  if (res.ok) {
3239
3452
  const json = await res.json();
3240
3453
  const parsed = GetPatches.safeParse(json);
3241
3454
  if (parsed.success) {
3455
+ const errors = [];
3242
3456
  const data = parsed.data;
3243
- const errors = {};
3244
3457
  for (const patchesRes of data.patches) {
3245
- patches[patchesRes.patchId] = {
3246
- path: patchesRes.path,
3458
+ patches.push({
3247
3459
  authorId: patchesRes.authorId,
3248
3460
  createdAt: patchesRes.createdAt,
3249
- appliedAt: patchesRes.applied && {
3250
- baseSha: patchesRes.applied.baseSha,
3251
- timestamp: patchesRes.applied.appliedAt,
3252
- git: {
3253
- commitSha: patchesRes.applied.commitSha
3254
- }
3255
- },
3256
- patch: patchesRes.patch
3257
- };
3461
+ appliedAt: null,
3462
+ patchId: patchesRes.patchId,
3463
+ path: patchesRes.path,
3464
+ baseSha: patchesRes.baseSha,
3465
+ patch: filters.omitPatch ? undefined : patchesRes.patch
3466
+ });
3258
3467
  }
3259
3468
  return {
3260
3469
  patches,
@@ -3276,7 +3485,8 @@ class ValOpsHttp extends ValOps {
3276
3485
  };
3277
3486
  });
3278
3487
  }
3279
- async saveSourceFilePatch(path, patch, authorId) {
3488
+ async saveSourceFilePatch(path, patch, parentRef, authorId) {
3489
+ const baseSha = await this.getBaseSha();
3280
3490
  return fetch(`${this.hostUrl}/v1/${this.project}/patches`, {
3281
3491
  method: "POST",
3282
3492
  headers: {
@@ -3287,78 +3497,53 @@ class ValOpsHttp extends ValOps {
3287
3497
  path,
3288
3498
  patch,
3289
3499
  authorId,
3500
+ parentPatchId: parentRef.type === "patch" ? parentRef.patchId : null,
3501
+ baseSha,
3290
3502
  commit: this.commitSha,
3291
3503
  branch: this.branch,
3292
3504
  coreVersion: Internal.VERSION.core
3293
3505
  })
3294
3506
  }).then(async res => {
3507
+ var _res$headers$get;
3295
3508
  if (res.ok) {
3296
3509
  const parsed = SavePatchResponse.safeParse(await res.json());
3297
3510
  if (parsed.success) {
3298
- return {
3511
+ return result.ok({
3299
3512
  patchId: parsed.data.patchId
3300
- };
3513
+ });
3301
3514
  }
3302
- return {
3303
- error: {
3304
- message: `Could not parse save patch response. Error: ${fromError(parsed.error)}`
3305
- }
3306
- };
3515
+ return result.err({
3516
+ errorType: "other",
3517
+ message: `Could not parse save patch response. Error: ${fromError(parsed.error)}`
3518
+ });
3307
3519
  }
3308
- return {
3309
- error: {
3310
- message: "Could not save patch. HTTP error: " + res.status + " " + res.statusText
3311
- }
3312
- };
3313
- }).catch(e => {
3314
- return {
3315
- error: {
3316
- message: `Could save source file patch (connection error?): ${e instanceof Error ? e.message : e.toString()}`
3317
- }
3318
- };
3319
- });
3320
- }
3321
- async saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data, type, metadata) {
3322
- return fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/files`, {
3323
- method: "POST",
3324
- headers: {
3325
- ...this.authHeaders,
3326
- "Content-Type": "application/json"
3327
- },
3328
- body: JSON.stringify({
3329
- filePath: filePath,
3330
- data,
3331
- type,
3332
- metadata
3333
- })
3334
- }).then(async res => {
3335
- if (res.ok) {
3336
- const parsed = SavePatchFileResponse.safeParse(await res.json());
3337
- if (parsed.success) {
3338
- return {
3339
- patchId: parsed.data.patchId,
3340
- filePath: parsed.data.filePath
3341
- };
3342
- }
3343
- return {
3344
- error: {
3345
- message: `Could not parse save patch file response. Error: ${fromError(parsed.error)}`
3346
- }
3347
- };
3520
+ if (res.status === 409) {
3521
+ return result.err({
3522
+ errorType: "patch-head-conflict",
3523
+ message: "Conflict: " + (await res.text())
3524
+ });
3348
3525
  }
3349
- return {
3350
- error: {
3351
- message: "Could not save patch file. HTTP error: " + res.status + " " + res.statusText
3352
- }
3353
- };
3526
+ if ((_res$headers$get = res.headers.get("Content-Type")) !== null && _res$headers$get !== void 0 && _res$headers$get.includes("application/json")) {
3527
+ const json = await res.json();
3528
+ return result.err({
3529
+ errorType: "other",
3530
+ message: json.message || "Unknown error"
3531
+ });
3532
+ }
3533
+ return result.err({
3534
+ errorType: "other",
3535
+ message: "Could not save patch. HTTP error: " + res.status + " " + res.statusText
3536
+ });
3354
3537
  }).catch(e => {
3355
- return {
3356
- error: {
3357
- message: `Could save source binary file in patch (connection error?): ${e.toString()}`
3358
- }
3359
- };
3538
+ return result.err({
3539
+ errorType: "other",
3540
+ message: `Could save source file patch (connection error?): ${e instanceof Error ? e.message : e.toString()}`
3541
+ });
3360
3542
  });
3361
3543
  }
3544
+ async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, type, metadata) {
3545
+ throw Error("TODO: implement");
3546
+ }
3362
3547
  async getHttpFiles(files) {
3363
3548
  const params = new URLSearchParams();
3364
3549
  const stringifiedFiles = JSON.stringify({
@@ -3461,7 +3646,7 @@ class ValOpsHttp extends ValOps {
3461
3646
  const params = new URLSearchParams();
3462
3647
  params.set("file_path", filePath);
3463
3648
  try {
3464
- const metadataRes = await fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/metadata?${params}`, {
3649
+ const metadataRes = await fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/files?${params}`, {
3465
3650
  headers: {
3466
3651
  ...this.authHeaders,
3467
3652
  "Content-Type": "application/json"
@@ -3563,7 +3748,7 @@ class ValOpsHttp extends ValOps {
3563
3748
  }
3564
3749
  async commit(prepared, message, committer, newBranch) {
3565
3750
  try {
3566
- var _res$headers$get;
3751
+ var _res$headers$get2;
3567
3752
  const existingBranch = this.branch;
3568
3753
  const res = await fetch(`${this.hostUrl}/v1/${this.project}/commit`, {
3569
3754
  method: "POST",
@@ -3599,7 +3784,7 @@ class ValOpsHttp extends ValOps {
3599
3784
  }
3600
3785
  };
3601
3786
  }
3602
- if ((_res$headers$get = res.headers.get("Content-Type")) !== null && _res$headers$get !== void 0 && _res$headers$get.includes("application/json")) {
3787
+ if ((_res$headers$get2 = res.headers.get("Content-Type")) !== null && _res$headers$get2 !== void 0 && _res$headers$get2.includes("application/json")) {
3603
3788
  const json = await res.json();
3604
3789
  if (json.isNotFastForward) {
3605
3790
  return {
@@ -4138,8 +4323,74 @@ const ValServer = (valModules, options, callbacks) => {
4138
4323
  }
4139
4324
  },
4140
4325
  //#region patches
4141
- "/patches/~": {
4326
+ "/patches": {
4327
+ PUT: async req => {
4328
+ const cookies = req.cookies;
4329
+ const auth = getAuth(cookies);
4330
+ if (auth.error) {
4331
+ return {
4332
+ status: 401,
4333
+ json: {
4334
+ message: auth.error
4335
+ }
4336
+ };
4337
+ }
4338
+ if (serverOps instanceof ValOpsHttp && !("id" in auth)) {
4339
+ return {
4340
+ status: 401,
4341
+ json: {
4342
+ message: "Unauthorized"
4343
+ }
4344
+ };
4345
+ }
4346
+ const patches = req.body.patches;
4347
+ const parentRef = req.body.parentRef;
4348
+ const authorId = "id" in auth ? auth.id : null;
4349
+ const newPatchIds = [];
4350
+ for (const patch of patches) {
4351
+ const createPatchRes = await serverOps.createPatch(patch.path, patch.patch, parentRef, authorId);
4352
+ if (result.isErr(createPatchRes)) {
4353
+ if (createPatchRes.error.errorType === "patch-head-conflict") {
4354
+ return {
4355
+ status: 409,
4356
+ json: {
4357
+ type: "patch-head-conflict",
4358
+ message: "Patch id conflict"
4359
+ }
4360
+ };
4361
+ } else {
4362
+ return {
4363
+ status: 400,
4364
+ json: {
4365
+ type: "patch-error",
4366
+ message: "Could not create patch",
4367
+ errors: {
4368
+ [patch.path]: [{
4369
+ error: {
4370
+ message: createPatchRes.error.error.message
4371
+ }
4372
+ }]
4373
+ }
4374
+ }
4375
+ };
4376
+ }
4377
+ } else {
4378
+ newPatchIds.push(createPatchRes.value.patchId);
4379
+ }
4380
+ }
4381
+ return {
4382
+ status: 200,
4383
+ json: {
4384
+ newPatchIds,
4385
+ parentRef: {
4386
+ type: "patch",
4387
+ patchId: newPatchIds[newPatchIds.length - 1]
4388
+ }
4389
+ }
4390
+ };
4391
+ },
4142
4392
  GET: async req => {
4393
+ // TODO: Fix type error patchId is string somewhere and PatchId somewhere else
4143
4394
  const query = req.query;
4144
4395
  const cookies = req.cookies;
4145
4396
  const auth = getAuth(cookies);
@@ -4159,38 +4410,52 @@ const ValServer = (valModules, options, callbacks) => {
4159
4410
  }
4160
4411
  };
4161
4412
  }
4413
+ const omit_patch = query.omit_patch === true;
4162
4414
  const authors = query.author;
4163
- const patches = await serverOps.fetchPatches({
4415
+ const fetchedPatchesRes = await serverOps.fetchPatches({
4164
4416
  authors,
4165
4417
  patchIds: query.patch_id,
4166
- omitPatch: query.omit_patch === true,
4418
+ omitPatch: omit_patch,
4167
4419
  moduleFilePaths: query.module_file_path
4168
4420
  });
4169
- if (patches.error) {
4421
+ if (fetchedPatchesRes.error) {
4170
4422
  // Error is singular
4171
- console.error("Val: Failed to get patches", patches.errors);
4423
+ console.error("Val: Failed to get patches", fetchedPatchesRes.error);
4172
4424
  return {
4173
4425
  status: 500,
4174
4426
  json: {
4175
- message: patches.error.message,
4176
- details: patches.error
4427
+ message: fetchedPatchesRes.error.message,
4428
+ error: fetchedPatchesRes.error
4177
4429
  }
4178
4430
  };
4179
4431
  }
4180
- if (patches.errors && Object.keys(patches.errors).length > 0) {
4432
+ if (fetchedPatchesRes.errors && Object.keys(fetchedPatchesRes.errors).length > 0) {
4181
4433
  // Errors is plural. Different property than above.
4182
- console.error("Val: Failed to get patches", patches.errors);
4434
+ console.error("Val: Failed to get patches", fetchedPatchesRes.errors);
4183
4435
  return {
4184
4436
  status: 500,
4185
4437
  json: {
4186
4438
  message: "Failed to get patches",
4187
- details: patches.errors
4439
+ patchErrors: fetchedPatchesRes.errors
4188
4440
  }
4189
4441
  };
4190
4442
  }
4443
+ const patches = [];
4444
+ for (const [patchIdS, patchData] of Object.entries(fetchedPatchesRes.patches)) {
4445
+ const patchId = patchIdS;
4446
+ patches.push({
4447
+ patchId,
4448
+ ...patchData
4449
+ });
4450
+ }
4451
+ // TODO: we should sort by parentRef instead:
4452
+ patches.sort((a, b) => a.createdAt.localeCompare(b.createdAt));
4191
4453
  return {
4192
4454
  status: 200,
4193
- json: patches
4455
+ json: {
4456
+ patches,
4457
+ baseSha: await serverOps.getBaseSha()
4458
+ }
4194
4459
  };
4195
4460
  },
4196
4461
  DELETE: async req => {
@@ -4221,7 +4486,10 @@ const ValServer = (valModules, options, callbacks) => {
4221
4486
  status: 500,
4222
4487
  json: {
4223
4488
  message: "Failed to delete patches",
4224
- details: deleteRes.errors
4489
+ errors: Object.entries(deleteRes.errors).map(([id, error]) => ({
4490
+ patchId: id,
4491
+ ...error
4492
+ }))
4225
4493
  }
4226
4494
  };
4227
4495
  }
@@ -4293,13 +4561,12 @@ const ValServer = (valModules, options, callbacks) => {
4293
4561
  }
4294
4562
  },
4295
4563
  // #region sources
4296
- "/sources": {
4564
+ "/sources/~": {
4297
4565
  PUT: async req => {
4298
- var _body$patchIds;
4299
4566
  const query = req.query;
4300
4567
  const cookies = req.cookies;
4301
- const body = req.body;
4302
- const treePath = req.path || "";
4568
+ // TODO: filter results by moduleFilePath
4569
+ // const moduleFilePath = req.path || "";
4303
4570
  const auth = getAuth(cookies);
4304
4571
  if (auth.error) {
4305
4572
  return {
@@ -4328,149 +4595,81 @@ const ValServer = (valModules, options, callbacks) => {
4328
4595
  }
4329
4596
  };
4330
4597
  }
4331
- let tree;
4332
- let patchAnalysis = null;
4333
- let newPatchIds = undefined;
4334
- if (body !== null && body !== void 0 && body.patchIds && (body === null || body === void 0 || (_body$patchIds = body.patchIds) === null || _body$patchIds === void 0 ? void 0 : _body$patchIds.length) > 0 || body !== null && body !== void 0 && body.addPatches) {
4335
- // TODO: validate patches_sha
4336
- const patchIds = body === null || body === void 0 ? void 0 : body.patchIds;
4337
- const patchOps = patchIds && patchIds.length > 0 ? await serverOps.fetchPatches({
4338
- patchIds,
4339
- omitPatch: false
4340
- }) : {
4341
- patches: {}
4342
- };
4343
- if (patchOps.error) {
4344
- return {
4345
- status: 400,
4346
- json: {
4347
- message: "Failed to fetch patches: " + patchOps.error.message,
4348
- details: []
4349
- }
4350
- };
4351
- }
4352
- let patchErrors = undefined;
4353
- for (const [patchIdS, error] of Object.entries(patchOps.errors || {})) {
4354
- const patchId = patchIdS;
4355
- if (!patchErrors) {
4356
- patchErrors = {};
4357
- }
4358
- patchErrors[patchId] = {
4359
- message: error.message
4360
- };
4361
- }
4362
- // TODO: errors
4363
- patchAnalysis = serverOps.analyzePatches(patchOps.patches);
4364
- if (body !== null && body !== void 0 && body.addPatches) {
4365
- for (const addPatch of body.addPatches) {
4366
- const newPatchModuleFilePath = addPatch.path;
4367
- const newPatchOps = addPatch.patch;
4368
- const authorId = "id" in auth ? auth.id : null;
4369
- const createPatchRes = await serverOps.createPatch(newPatchModuleFilePath, {
4370
- ...patchAnalysis,
4371
- ...patchOps
4372
- }, newPatchOps, authorId);
4373
- if (createPatchRes.error) {
4374
- return {
4375
- status: 500,
4376
- json: {
4377
- message: "Failed to create patch: " + createPatchRes.error.message,
4378
- details: createPatchRes.error
4379
- }
4380
- };
4381
- }
4382
- if (!newPatchIds) {
4383
- newPatchIds = [createPatchRes.patchId];
4384
- } else {
4385
- newPatchIds.push(createPatchRes.patchId);
4386
- }
4387
- patchOps.patches[createPatchRes.patchId] = {
4388
- path: newPatchModuleFilePath,
4389
- patch: newPatchOps,
4390
- authorId,
4391
- createdAt: createPatchRes.createdAt,
4392
- appliedAt: null
4393
- };
4394
- patchAnalysis.patchesByModule[newPatchModuleFilePath] = [...(patchAnalysis.patchesByModule[newPatchModuleFilePath] || []), {
4395
- patchId: createPatchRes.patchId
4396
- }];
4397
- }
4398
- }
4399
- tree = {
4400
- ...(await serverOps.getTree({
4401
- ...patchAnalysis,
4402
- ...patchOps
4403
- }))
4404
- };
4405
- if (query.validate_all) {
4406
- const allTree = await serverOps.getTree();
4407
- tree = {
4408
- sources: {
4409
- ...allTree.sources,
4410
- ...tree.sources
4411
- },
4412
- errors: {
4413
- ...allTree.errors,
4414
- ...tree.errors
4415
- }
4416
- };
4598
+ const patchOps = await serverOps.fetchPatches({
4599
+ patchIds: undefined,
4600
+ omitPatch: false
4601
+ });
4602
+ const patchAnalysis = serverOps.analyzePatches(patchOps.patches);
4603
+ let sourcesRes = await serverOps.getSources();
4604
+ const onlyPatchedTreeModules = await serverOps.getSources({
4605
+ ...patchAnalysis,
4606
+ ...patchOps
4607
+ });
4608
+ sourcesRes = {
4609
+ sources: {
4610
+ ...sourcesRes.sources,
4611
+ ...(onlyPatchedTreeModules.sources || {})
4612
+ },
4613
+ errors: {
4614
+ ...sourcesRes.errors,
4615
+ ...(onlyPatchedTreeModules.errors || {})
4417
4616
  }
4418
- } else {
4419
- tree = await serverOps.getTree();
4420
- }
4617
+ };
4421
4618
  let sourcesValidation = {
4422
4619
  errors: {},
4423
4620
  files: {}
4424
4621
  };
4425
4622
  if (query.validate_sources || query.validate_binary_files) {
4426
4623
  const schemas = await serverOps.getSchemas();
4427
- sourcesValidation = await serverOps.validateSources(schemas, tree.sources);
4624
+ sourcesValidation = await serverOps.validateSources(schemas, sourcesRes.sources);
4428
4625
 
4429
- // TODO: send validation errors
4626
+ // TODO: send binary files validation errors
4430
4627
  if (query.validate_binary_files) {
4431
- await serverOps.validateFiles(schemas, tree.sources, sourcesValidation.files);
4628
+ await serverOps.validateFiles(schemas, sourcesRes.sources, sourcesValidation.files);
4432
4629
  }
4433
4630
  }
4434
4631
  const schemaSha = await serverOps.getSchemaSha();
4435
4632
  const modules = {};
4436
- for (const [moduleFilePathS, module] of Object.entries(tree.sources)) {
4633
+ for (const [moduleFilePathS, module] of Object.entries(sourcesRes.sources)) {
4437
4634
  const moduleFilePath = moduleFilePathS;
4438
- if (moduleFilePath.startsWith(treePath)) {
4439
- var _sourcesValidation$er;
4635
+ if (moduleFilePath.startsWith(moduleFilePath)) {
4636
+ var _patchAnalysis$patche, _sourcesValidation$er;
4637
+ const skippedPatches = [];
4638
+ const patchErrors = {};
4639
+ const appliedPatches = ((_patchAnalysis$patche = patchAnalysis.patchesByModule[moduleFilePath]) === null || _patchAnalysis$patche === void 0 ? void 0 : _patchAnalysis$patche.map(p => p.patchId)) || [];
4640
+ for (const {
4641
+ patchId,
4642
+ skipped,
4643
+ error
4644
+ } of ((_sourcesRes$errors = sourcesRes.errors) === null || _sourcesRes$errors === void 0 ? void 0 : _sourcesRes$errors[moduleFilePath]) || []) {
4645
+ var _sourcesRes$errors;
4646
+ if (skipped) {
4647
+ skippedPatches.push(patchId);
4648
+ } else if (error) {
4649
+ patchErrors[patchId] = {
4650
+ message: error.message
4651
+ };
4652
+ } else {
4653
+ // unsure what makes sense here
4654
+ appliedPatches.push(patchId);
4655
+ }
4656
+ }
4440
4657
  modules[moduleFilePath] = {
4441
4658
  source: module,
4442
- patches: patchAnalysis && patchAnalysis.patchesByModule[moduleFilePath] ? {
4443
- applied: patchAnalysis.patchesByModule[moduleFilePath].map(p => p.patchId)
4659
+ patches: appliedPatches.length > 0 || skippedPatches.length > 0 || Object.keys(patchErrors).length > 0 ? {
4660
+ applied: appliedPatches,
4661
+ skipped: skippedPatches.length > 0 ? skippedPatches : undefined,
4662
+ errors: Object.keys(patchErrors).length > 0 ? patchErrors : undefined
4444
4663
  } : undefined,
4445
4664
  validationErrors: (_sourcesValidation$er = sourcesValidation.errors[moduleFilePath]) === null || _sourcesValidation$er === void 0 ? void 0 : _sourcesValidation$er.validations
4446
4665
  };
4447
4666
  }
4448
4667
  }
4449
- if (tree.errors && Object.keys(tree.errors).length > 0) {
4450
- const res = {
4451
- status: 400,
4452
- json: {
4453
- type: "patch-error",
4454
- schemaSha,
4455
- modules,
4456
- errors: Object.fromEntries(Object.entries(tree.errors).map(([key, value]) => [key, value.map(error => ({
4457
- patchId: error.patchId,
4458
- skipped: error.skipped,
4459
- error: {
4460
- message: error.error.message
4461
- }
4462
- }))])),
4463
- message: "One or more patches failed to be applied"
4464
- }
4465
- };
4466
- return res;
4467
- }
4468
4668
  const res = {
4469
4669
  status: 200,
4470
4670
  json: {
4471
4671
  schemaSha,
4472
- modules,
4473
- newPatchIds
4672
+ modules
4474
4673
  }
4475
4674
  };
4476
4675
  return res;
@@ -5422,7 +5621,7 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
5422
5621
  const remainingErrors = [];
5423
5622
  const patch = [];
5424
5623
  for (const fix of validationError.fixes || []) {
5425
- if (fix === "image:replace-metadata" || fix === "image:add-metadata") {
5624
+ if (fix === "image:check-metadata" || fix === "image:add-metadata") {
5426
5625
  const imageMetadata = await getImageMetadata();
5427
5626
  if (imageMetadata.width === undefined || imageMetadata.height === undefined) {
5428
5627
  remainingErrors.push({
@@ -5430,7 +5629,7 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
5430
5629
  message: "Failed to get image metadata",
5431
5630
  fixes: undefined
5432
5631
  });
5433
- } else if (fix === "image:replace-metadata") {
5632
+ } else if (fix === "image:check-metadata") {
5434
5633
  const currentValue = validationError.value;
5435
5634
  const metadataIsCorrect =
5436
5635
  // metadata is a prop that is an object
@@ -5441,7 +5640,6 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
5441
5640
  "height" in currentValue.metadata && currentValue.metadata.height === imageMetadata.height &&
5442
5641
  // mimeType is correct
5443
5642
  "mimeType" in currentValue.metadata && currentValue.metadata.mimeType === imageMetadata.mimeType;
5444
-
5445
5643
  // skips if the metadata is already correct
5446
5644
  if (!metadataIsCorrect) {
5447
5645
  if (apply) {
@@ -5449,6 +5647,8 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
5449
5647
  op: "replace",
5450
5648
  path: sourceToPatchPath(sourcePath).concat("metadata"),
5451
5649
  value: {
5650
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
5651
+ ...currentValue.metadata,
5452
5652
  width: imageMetadata.width,
5453
5653
  height: imageMetadata.height,
5454
5654
  mimeType: imageMetadata.mimeType
@@ -5517,6 +5717,8 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
5517
5717
  op: "replace",
5518
5718
  path: sourceToPatchPath(sourcePath).concat("metadata"),
5519
5719
  value: {
5720
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
5721
+ ...currentValue.metadata,
5520
5722
  ...(fileMetadata.mimeType ? {
5521
5723
  mimeType: fileMetadata.mimeType
5522
5724
  } : {})
@@ -5550,21 +5752,6 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
5550
5752
  }
5551
5753
  });
5552
5754
  }
5553
- } else if (fix === "fix:deprecated-richtext") {
5554
- if (!validationError.value) {
5555
- throw Error("Cannot fix richtext without a value");
5556
- }
5557
- patch.push({
5558
- op: "replace",
5559
- path: sourceToPatchPath(sourcePath),
5560
- value: validationError.value
5561
- });
5562
- } else {
5563
- remainingErrors.push({
5564
- ...validationError,
5565
- message: `Unknown fix: ${fix}`,
5566
- fixes: undefined
5567
- });
5568
5755
  }
5569
5756
  }
5570
5757
  if (!validationError.fixes || validationError.fixes.length === 0) {
@@ -5576,4 +5763,4 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
5576
5763
  };
5577
5764
  }
5578
5765
 
5579
- export { Patch, PatchJSON, Service, ValFSHost, ValModuleLoader, ValSourceFileHandler, createFixPatch, createService, createValApiRouter, createValServer, decodeJwt, encodeJwt, formatSyntaxErrorTree, getCompilerOptions, getExpire, patchSourceFile, safeReadGit };
5766
+ export { Service, ValFSHost, ValModuleLoader, ValSourceFileHandler, createFixPatch, createService, createValApiRouter, createValServer, decodeJwt, encodeJwt, formatSyntaxErrorTree, getCompilerOptions, getExpire, patchSourceFile, safeReadGit };