@valbuild/server 0.67.1 → 0.68.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,10 +8,10 @@ import fsPath__default from 'path';
8
8
  import fs, { promises } from 'fs';
9
9
  import { transform } from 'sucrase';
10
10
  import { VAL_CSS_PATH, VAL_APP_ID, VAL_OVERLAY_ID } from '@valbuild/ui';
11
- import { VAL_ENABLE_COOKIE_NAME, VAL_STATE_COOKIE, VAL_SESSION_COOKIE, Api } from '@valbuild/shared/internal';
11
+ import { Patch, ParentRef, VAL_ENABLE_COOKIE_NAME, VAL_STATE_COOKIE, VAL_SESSION_COOKIE, Api } from '@valbuild/shared/internal';
12
12
  import { createUIRequestHandler } from '@valbuild/ui/server';
13
13
  import crypto$1 from 'crypto';
14
- import z$1, { z } from 'zod';
14
+ import { z } from 'zod';
15
15
  import sizeOf from 'image-size';
16
16
  import { fromError, fromZodError } from 'zod-validation-error';
17
17
 
@@ -1353,11 +1353,11 @@ const tsOps = new TSOps(document => {
1353
1353
  class ValOps {
1354
1354
  /** Sources from val modules, immutable (without patches or anything) */
1355
1355
 
1356
- /** The sha265 / hash of sources + schema + config */
1356
+ /** The sha256 / hash of sources + schema + config */
1357
1357
 
1358
1358
  /** Schema from val modules, immutable */
1359
1359
 
1360
- /** The sha265 / hash of schema + config - if this changes users needs to reload */
1360
+ /** The sha256 / hash of schema + config - if this changes users needs to reload */
1361
1361
 
1362
1362
  constructor(valModules, options) {
1363
1363
  this.valModules = valModules;
@@ -1369,8 +1369,46 @@ class ValOps {
1369
1369
  this.modulesErrors = null;
1370
1370
  }
1371
1371
  hash(input) {
1372
+ if (typeof input === "object") {
1373
+ return this.hashObject(input);
1374
+ }
1372
1375
  return Internal.getSHA256Hash(textEncoder$1.encode(input));
1373
1376
  }
1377
+ hashObject(obj) {
1378
+ const collector = [];
1379
+ this.collectObjectRecursive(obj, collector);
1380
+ return Internal.getSHA256Hash(textEncoder$1.encode(collector.join("")));
1381
+ }
1382
+ collectObjectRecursive(item, collector) {
1383
+ if (typeof item === "string") {
1384
+ collector.push(`"`, item, `"`);
1385
+ return;
1386
+ } else if (typeof item === "number") {
1387
+ collector.push(item.toString());
1388
+ return;
1389
+ } else if (typeof item === "object") {
1390
+ if (Array.isArray(item)) {
1391
+ collector.push("[");
1392
+ for (let i = 0; i < item.length; i++) {
1393
+ this.collectObjectRecursive(item[i], collector);
1394
+ i !== item.length - 1 && collector.push(",");
1395
+ }
1396
+ collector.push("]");
1397
+ } else {
1398
+ collector.push("{");
1399
+ const keys = Object.keys(item).sort();
1400
+ keys.forEach((key, i) => {
1401
+ collector.push(`"${key}":`);
1402
+ this.collectObjectRecursive(item[key], collector);
1403
+ i !== keys.length - 1 && collector.push(",");
1404
+ });
1405
+ collector.push("}");
1406
+ }
1407
+ return;
1408
+ } else {
1409
+ console.warn("Unknown type encountered when hashing object", typeof item, item);
1410
+ }
1411
+ }
1374
1412
 
1375
1413
  // #region stat
1376
1414
  /**
@@ -1384,7 +1422,7 @@ class ValOps {
1384
1422
  */
1385
1423
 
1386
1424
  // #region initTree
1387
- async initTree() {
1425
+ async initSources() {
1388
1426
  if (this.baseSha === null || this.schemaSha === null || this.sources === null || this.schemas === null || this.modulesErrors === null) {
1389
1427
  const currentModulesErrors = [];
1390
1428
  const addModuleError = (message, index, path) => {
@@ -1478,50 +1516,43 @@ class ValOps {
1478
1516
  const {
1479
1517
  baseSha,
1480
1518
  schemaSha
1481
- } = await this.initTree();
1519
+ } = await this.initSources();
1482
1520
  await this.onInit(baseSha, schemaSha);
1483
1521
  }
1484
1522
  async getBaseSources() {
1485
- return this.initTree().then(result => result.sources);
1523
+ return this.initSources().then(result => result.sources);
1486
1524
  }
1487
1525
  async getSchemas() {
1488
- return this.initTree().then(result => result.schemas);
1526
+ return this.initSources().then(result => result.schemas);
1489
1527
  }
1490
1528
  async getModuleErrors() {
1491
- return this.initTree().then(result => result.moduleErrors);
1529
+ return this.initSources().then(result => result.moduleErrors);
1492
1530
  }
1493
1531
  async getBaseSha() {
1494
- return this.initTree().then(result => result.baseSha);
1532
+ return this.initSources().then(result => result.baseSha);
1495
1533
  }
1496
1534
  async getSchemaSha() {
1497
- return this.initTree().then(result => result.schemaSha);
1535
+ return this.initSources().then(result => result.schemaSha);
1498
1536
  }
1499
1537
 
1500
1538
  // #region analyzePatches
1501
- analyzePatches(patchesById) {
1539
+ analyzePatches(sortedPatches) {
1502
1540
  const patchesByModule = {};
1503
1541
  const fileLastUpdatedByPatchId = {};
1504
- for (const [patchIdS, {
1505
- path,
1506
- patch,
1507
- createdAt: created_at
1508
- }] of Object.entries(patchesById)) {
1509
- const patchId = patchIdS;
1510
- for (const op of patch) {
1542
+ for (const patch of sortedPatches) {
1543
+ for (const op of patch.patch) {
1511
1544
  if (op.op === "file") {
1512
- fileLastUpdatedByPatchId[op.filePath] = patchId;
1545
+ const filePath = op.filePath;
1546
+ fileLastUpdatedByPatchId[filePath] = patch.patchId;
1513
1547
  }
1548
+ const path = patch.path;
1549
+ if (!patchesByModule[path]) {
1550
+ patchesByModule[path] = [];
1551
+ }
1552
+ patchesByModule[path].push({
1553
+ patchId: patch.patchId
1554
+ });
1514
1555
  }
1515
- if (!patchesByModule[path]) {
1516
- patchesByModule[path] = [];
1517
- }
1518
- patchesByModule[path].push({
1519
- patchId,
1520
- createdAt: created_at
1521
- });
1522
- }
1523
- for (const path in patchesByModule) {
1524
- patchesByModule[path].sort((a, b) => a.createdAt.localeCompare(b.createdAt));
1525
1556
  }
1526
1557
  return {
1527
1558
  patchesByModule,
@@ -1530,11 +1561,11 @@ class ValOps {
1530
1561
  }
1531
1562
 
1532
1563
  // #region getTree
1533
- async getTree(analysis) {
1564
+ async getSources(analysis) {
1534
1565
  if (!analysis) {
1535
1566
  const {
1536
1567
  sources
1537
- } = await this.initTree();
1568
+ } = await this.initSources();
1538
1569
  return {
1539
1570
  sources,
1540
1571
  errors: {}
@@ -1542,76 +1573,72 @@ class ValOps {
1542
1573
  }
1543
1574
  const {
1544
1575
  sources
1545
- } = await this.initTree();
1576
+ } = await this.initSources();
1546
1577
  const patchedSources = {};
1547
1578
  const errors = {};
1548
- for (const [pathS, patches] of Object.entries(analysis.patchesByModule)) {
1549
- const path = pathS;
1579
+ for (const patchData of analysis.patches) {
1580
+ const path = patchData.path;
1550
1581
  if (!sources[path]) {
1551
1582
  if (!errors[path]) {
1552
1583
  errors[path] = [];
1553
1584
  }
1554
- errors[path].push(...patches.map(({
1555
- patchId
1556
- }) => ({
1557
- patchId,
1558
- invalidPath: true,
1585
+ console.error("Module not found", path);
1586
+ errors[path].push({
1587
+ patchId: patchData.patchId,
1559
1588
  skipped: true,
1560
- error: new PatchError(`Module at path: '${path}' not found`)
1561
- })));
1589
+ error: new PatchError(`Module not found`)
1590
+ });
1591
+ continue;
1562
1592
  }
1563
- patchedSources[path] = sources[path];
1564
- for (const {
1565
- patchId
1566
- } of patches) {
1567
- if (errors[path]) {
1593
+ if (!patchedSources[path]) {
1594
+ patchedSources[path] = sources[path];
1595
+ }
1596
+ const patchId = patchData.patchId;
1597
+ if (errors[path]) {
1598
+ console.error("Cannot apply patch: previous errors exists", path, errors[path]);
1599
+ errors[path].push({
1600
+ patchId: patchId,
1601
+ skipped: true,
1602
+ error: new PatchError(`Cannot apply patch: previous errors exists`)
1603
+ });
1604
+ } else {
1605
+ const applicableOps = [];
1606
+ const fileFixOps = {};
1607
+ for (const op of patchData.patch) {
1608
+ if (op.op === "file") {
1609
+ // NOTE: We insert the last patch_id that modify a file
1610
+ // when constructing the url we use the patch id (and the file path)
1611
+ // to fetch the right file
1612
+ // NOTE: overwrite and use last patch_id if multiple patches modify the same file
1613
+ fileFixOps[op.path.join("/")] = [{
1614
+ op: "add",
1615
+ path: op.path.concat(...(op.nestedFilePath || [])).concat("patch_id"),
1616
+ value: patchId
1617
+ }];
1618
+ } else {
1619
+ applicableOps.push(op);
1620
+ }
1621
+ }
1622
+ const patchRes = applyPatch(deepClone(patchedSources[path]),
1623
+ // applyPatch mutates the source. On add operations it adds more than once? There is something strange going on... deepClone seems to fix, but is that the right solution?
1624
+ jsonOps, applicableOps.concat(...Object.values(fileFixOps)));
1625
+ if (result.isErr(patchRes)) {
1626
+ console.error("Could not apply patch", JSON.stringify({
1627
+ path,
1628
+ patchId,
1629
+ error: patchRes.error,
1630
+ applicableOps
1631
+ }, null, 2));
1632
+ if (!errors[path]) {
1633
+ errors[path] = [];
1634
+ }
1568
1635
  errors[path].push({
1569
1636
  patchId: patchId,
1570
- skipped: true,
1571
- error: new PatchError(`Cannot apply patch: previous errors exists`)
1637
+ skipped: false,
1638
+ error: patchRes.error
1572
1639
  });
1573
1640
  } else {
1574
- const patchData = analysis.patches[patchId];
1575
- if (!patchData) {
1576
- errors[path] = [{
1577
- patchId: patchId,
1578
- skipped: false,
1579
- error: new PatchError(`Patch not found`)
1580
- }];
1581
- continue;
1582
- }
1583
- const applicableOps = [];
1584
- const fileFixOps = {};
1585
- for (const op of patchData.patch) {
1586
- if (op.op === "file") {
1587
- // NOTE: We insert the last patch_id that modify a file
1588
- // when constructing the url we use the patch id (and the file path)
1589
- // to fetch the right file
1590
- // NOTE: overwrite and use last patch_id if multiple patches modify the same file
1591
- fileFixOps[op.path.join("/")] = [{
1592
- op: "add",
1593
- path: op.path.concat(...(op.nestedFilePath || [])).concat("patch_id"),
1594
- value: patchId
1595
- }];
1596
- } else {
1597
- applicableOps.push(op);
1598
- }
1599
- }
1600
- const patchRes = applyPatch(deepClone(patchedSources[path]),
1601
- // applyPatch mutates the source. On add operations it will add multiple items? There is something strange going on. DeepClone seems to fix, but is that the right?
1602
- jsonOps, applicableOps.concat(...Object.values(fileFixOps)));
1603
- if (result.isErr(patchRes)) {
1604
- if (!errors[path]) {
1605
- errors[path] = [];
1606
- }
1607
- errors[path].push({
1608
- patchId: patchId,
1609
- skipped: false,
1610
- error: patchRes.error
1611
- });
1612
- } else {
1613
- patchedSources[path] = patchRes.value;
1614
- }
1641
+ patchedSources[path] = patchRes.value;
1615
1642
  }
1616
1643
  }
1617
1644
  }
@@ -1849,14 +1876,14 @@ class ValOps {
1849
1876
  for (const {
1850
1877
  patchId
1851
1878
  } of patches) {
1852
- var _patchAnalysis$patche;
1853
- const patch = (_patchAnalysis$patche = patchAnalysis.patches) === null || _patchAnalysis$patche === void 0 || (_patchAnalysis$patche = _patchAnalysis$patche[patchId]) === null || _patchAnalysis$patche === void 0 ? void 0 : _patchAnalysis$patche.patch;
1854
- if (!patch) {
1879
+ const patchData = patchAnalysis.patches.find(p => p.patchId === patchId);
1880
+ if (!patchData) {
1855
1881
  errors.push({
1856
1882
  message: `Analysis required non-existing patch: ${patchId}`
1857
1883
  });
1858
1884
  break;
1859
1885
  }
1886
+ const patch = patchData.patch;
1860
1887
  const sourceFileOps = patch.filter(op => op.op !== "file"); // file is not a valid source file op
1861
1888
  const patchRes = applyPatch(tsSourceFile, tsOps, sourceFileOps);
1862
1889
  if (result.isErr(patchRes)) {
@@ -1970,13 +1997,21 @@ class ValOps {
1970
1997
  }
1971
1998
 
1972
1999
  // #region createPatch
1973
- async createPatch(path, patchAnalysis, patch, authorId) {
1974
- const initTree = await this.initTree();
2000
+ async createPatch(path, patch, parentRef, authorId) {
2001
+ const initTree = await this.initSources();
1975
2002
  const schemas = initTree.schemas;
1976
2003
  const moduleErrors = initTree.moduleErrors;
1977
2004
  let sources = initTree.sources;
1978
- if (patchAnalysis) {
1979
- const tree = await this.getTree(patchAnalysis);
2005
+ if (parentRef.type !== "head") {
2006
+ // There's room for some optimizations here: we could do this once, then re-use every time we create a patch, then again we only create one patch at a time
2007
+ const patchOps = await this.fetchPatches({
2008
+ omitPatch: false
2009
+ });
2010
+ const patchAnalysis = this.analyzePatches(patchOps.patches);
2011
+ const tree = await this.getSources({
2012
+ ...patchAnalysis,
2013
+ ...patchOps
2014
+ });
1980
2015
  sources = {
1981
2016
  ...sources,
1982
2017
  ...tree.sources
@@ -1987,27 +2022,30 @@ class ValOps {
1987
2022
  const moduleError = moduleErrors.find(e => e.path === path);
1988
2023
  if (moduleError) {
1989
2024
  console.error(`Cannot patch. Module at path: '${path}' has fatal errors: "${moduleError.message}"`);
1990
- return {
2025
+ return result.err({
2026
+ errorType: "other",
1991
2027
  error: {
1992
2028
  message: `Cannot patch. Module at path: '${path}' has fatal errors: ` + moduleErrors.map(m => `"${m.message}"`).join(" and ")
1993
2029
  }
1994
- };
2030
+ });
1995
2031
  }
1996
2032
  if (!source) {
1997
2033
  console.error(`Cannot patch. Module source at path: '${path}' does not exist`);
1998
- return {
2034
+ return result.err({
2035
+ errorType: "other",
1999
2036
  error: {
2000
2037
  message: `Cannot patch. Module source at path: '${path}' does not exist`
2001
2038
  }
2002
- };
2039
+ });
2003
2040
  }
2004
2041
  if (!schema) {
2005
2042
  console.error(`Cannot patch. Module schema at path: '${path}' does not exist`);
2006
- return {
2043
+ return result.err({
2044
+ errorType: "other",
2007
2045
  error: {
2008
2046
  message: `Cannot patch. Module schema at path: '${path}' does not exist`
2009
2047
  }
2010
- };
2048
+ });
2011
2049
  }
2012
2050
  const sourceFileOps = [];
2013
2051
  const files = {};
@@ -2046,14 +2084,20 @@ class ValOps {
2046
2084
  }
2047
2085
  }
2048
2086
  }
2049
- const saveRes = await this.saveSourceFilePatch(path, sourceFileOps, authorId);
2050
- if (saveRes.error) {
2051
- console.error(`Could not save source file patch at path: '${path}'. Error: ${saveRes.error.message}`);
2052
- return {
2087
+ const saveRes = await this.saveSourceFilePatch(path, patch, parentRef, authorId);
2088
+ if (result.isErr(saveRes)) {
2089
+ console.error(`Could not save source patch at path: '${path}'. Error: ${saveRes.error.errorType === "other" ? saveRes.error.message : saveRes.error.errorType}`);
2090
+ if (saveRes.error.errorType === "patch-head-conflict") {
2091
+ return result.err({
2092
+ errorType: "patch-head-conflict"
2093
+ });
2094
+ }
2095
+ return result.err({
2096
+ errorType: "other",
2053
2097
  error: saveRes.error
2054
- };
2098
+ });
2055
2099
  }
2056
- const patchId = saveRes.patchId;
2100
+ const patchId = saveRes.value.patchId;
2057
2101
  const saveFileRes = await Promise.all(Object.entries(files).map(async ([filePath, data]) => {
2058
2102
  if (data.error) {
2059
2103
  return {
@@ -2118,7 +2162,7 @@ class ValOps {
2118
2162
  const MaxRetries = 3;
2119
2163
  let lastRes;
2120
2164
  for (let i = 0; i < MaxRetries; i++) {
2121
- lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data.value, type, metadataOps.metadata);
2165
+ lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data.value, type, metadataOps.metadata);
2122
2166
  if (!lastRes.error) {
2123
2167
  return {
2124
2168
  filePath
@@ -2133,17 +2177,18 @@ class ValOps {
2133
2177
  }));
2134
2178
  const errors = saveFileRes.filter(f => !!f.error);
2135
2179
  if (errors.length > 0) {
2136
- return {
2180
+ return result.err({
2181
+ errorType: "other",
2137
2182
  error: {
2138
2183
  message: "Could not save patch: " + errors.map(e => e.error.message).join(", ")
2139
2184
  }
2140
- };
2185
+ });
2141
2186
  }
2142
- return {
2187
+ return result.ok({
2143
2188
  patchId,
2144
2189
  files: saveFileRes,
2145
2190
  createdAt: new Date().toISOString()
2146
- };
2191
+ });
2147
2192
  }
2148
2193
 
2149
2194
  // #region abstract ops
@@ -2240,81 +2285,51 @@ function bufferFromDataUrl(dataUrl) {
2240
2285
  }
2241
2286
  }
2242
2287
 
2243
- const JSONValueT = z$1.lazy(() => z$1.union([z$1.string(), z$1.number(), z$1.boolean(), z$1.null(), z$1.array(JSONValueT), z$1.record(JSONValueT)]));
2244
-
2245
- /**
2246
- * Raw JSON patch operation.
2247
- */
2248
- const OperationJSONT = z$1.discriminatedUnion("op", [z$1.object({
2249
- op: z$1.literal("add"),
2250
- path: z$1.string(),
2251
- value: JSONValueT
2252
- }).strict(), z$1.object({
2253
- op: z$1.literal("remove"),
2254
- /**
2255
- * Must be non-root
2256
- */
2257
- path: z$1.string()
2258
- }).strict(), z$1.object({
2259
- op: z$1.literal("replace"),
2260
- path: z$1.string(),
2261
- value: JSONValueT
2262
- }).strict(), z$1.object({
2263
- op: z$1.literal("move"),
2264
- /**
2265
- * Must be non-root and not a proper prefix of "path".
2266
- */
2267
- from: z$1.string(),
2268
- path: z$1.string()
2269
- }).strict(), z$1.object({
2270
- op: z$1.literal("copy"),
2271
- from: z$1.string(),
2272
- path: z$1.string()
2273
- }).strict(), z$1.object({
2274
- op: z$1.literal("test"),
2275
- path: z$1.string(),
2276
- value: JSONValueT
2277
- }).strict(), z$1.object({
2278
- op: z$1.literal("file"),
2279
- path: z$1.string(),
2280
- filePath: z$1.string(),
2281
- value: z$1.string()
2282
- }).strict()]);
2283
- const PatchJSON = z$1.array(OperationJSONT);
2284
2288
  /**
2285
- * Raw JSON patch operation.
2289
+ * Computes the changed patch parent references based on the current patches and the patch IDs to be deleted.
2290
+ *
2291
+ * NOTE: patches that will be deleted are not included in the changed patches, since they will be deleted any how.
2292
+ *
2293
+ * @param currentPatches - The array of current patches.
2294
+ * @param deletePatchIds - The array of patch IDs to be deleted.
2295
+ * @returns An object containing the changed patches with their corresponding parent references.
2286
2296
  */
2287
- const OperationT = z$1.discriminatedUnion("op", [z$1.object({
2288
- op: z$1.literal("add"),
2289
- path: z$1.array(z$1.string()),
2290
- value: JSONValueT
2291
- }).strict(), z$1.object({
2292
- op: z$1.literal("remove"),
2293
- path: z$1.array(z$1.string()).nonempty()
2294
- }).strict(), z$1.object({
2295
- op: z$1.literal("replace"),
2296
- path: z$1.array(z$1.string()),
2297
- value: JSONValueT
2298
- }).strict(), z$1.object({
2299
- op: z$1.literal("move"),
2300
- from: z$1.array(z$1.string()).nonempty(),
2301
- path: z$1.array(z$1.string())
2302
- }).strict(), z$1.object({
2303
- op: z$1.literal("copy"),
2304
- from: z$1.array(z$1.string()),
2305
- path: z$1.array(z$1.string())
2306
- }).strict(), z$1.object({
2307
- op: z$1.literal("test"),
2308
- path: z$1.array(z$1.string()),
2309
- value: JSONValueT
2310
- }).strict(), z$1.object({
2311
- op: z$1.literal("file"),
2312
- path: z$1.array(z$1.string()),
2313
- filePath: z$1.string(),
2314
- nestedFilePath: z$1.array(z$1.string()).optional(),
2315
- value: z$1.string()
2316
- }).strict()]);
2317
- const Patch = z$1.array(OperationT);
2297
+ function computeChangedPatchParentRefs(currentPatches, deletePatchIds) {
2298
+ let lastNonDeletedPatchIndex = -1;
2299
+ const changedPatches = {};
2300
+ for (let i = 0; i < currentPatches.length; i++) {
2301
+ const current = currentPatches[i];
2302
+ if (
2303
+ // skip all patches that will be deleted:
2304
+ deletePatchIds.includes(current.patchId)) {
2305
+ var _currentPatches;
2306
+ if (
2307
+ // skip change if the patch after is deleted anyway:
2308
+ !deletePatchIds.includes((_currentPatches = currentPatches[i + 1]) === null || _currentPatches === void 0 ? void 0 : _currentPatches.patchId)) {
2309
+ if (
2310
+ // set next patch to point to head if it exists:
2311
+ lastNonDeletedPatchIndex === -1 && currentPatches[i + 1]) {
2312
+ changedPatches[currentPatches[i + 1].patchId] = {
2313
+ type: "head",
2314
+ headBaseSha: current.baseSha
2315
+ };
2316
+ } else if (
2317
+ // set next patch to point to the last non-deleted patch:
2318
+ currentPatches[lastNonDeletedPatchIndex] && currentPatches[i + 1]) {
2319
+ changedPatches[currentPatches[i + 1].patchId] = {
2320
+ type: "patch",
2321
+ patchId: currentPatches[lastNonDeletedPatchIndex].patchId
2322
+ };
2323
+ }
2324
+ }
2325
+ } else {
2326
+ lastNonDeletedPatchIndex = i;
2327
+ }
2328
+ }
2329
+ return {
2330
+ changedPatches
2331
+ };
2332
+ }
2318
2333
 
2319
2334
  class ValOpsFS extends ValOps {
2320
2335
  static VAL_DIR = ".val";
@@ -2488,32 +2503,31 @@ class ValOpsFS extends ValOps {
2488
2503
  patchJsonFiles = this.host.readDirectory(patchesCacheDir, ["patch.json"], [], []);
2489
2504
  }
2490
2505
  const patches = {};
2491
- const errors = {};
2492
- const parsedPatchIds = patchJsonFiles.map(file => parseInt(fsPath__default.basename(fsPath__default.dirname(file)), 10)).sort();
2493
- for (const patchIdNum of parsedPatchIds) {
2494
- if (Number.isNaN(patchIdNum)) {
2495
- throw new Error("Could not parse patch id from file name. Files found: " + patchJsonFiles.join(", "));
2496
- }
2497
- const patchId = patchIdNum.toString();
2498
- if (includes && includes.length > 0 && !includes.includes(patchId)) {
2499
- continue;
2500
- }
2501
- const parsedFSPatchRes = this.parseJsonFile(this.getPatchFilePath(patchId), FSPatch);
2502
- let parsedFSPatchBaseRes = undefined;
2503
- if (this.host.fileExists(this.getPatchBaseFile(patchId))) {
2504
- parsedFSPatchBaseRes = this.parseJsonFile(this.getPatchBaseFile(patchId), FSPatchBase);
2505
- }
2506
- if (parsedFSPatchRes.error) {
2507
- errors[patchId] = parsedFSPatchRes.error;
2508
- } else if (parsedFSPatchBaseRes && parsedFSPatchBaseRes.error) {
2509
- errors[patchId] = parsedFSPatchBaseRes.error;
2506
+ const errors = [];
2507
+ const parsedUnsortedFsPatches = patchJsonFiles.map(file => fsPath__default.basename(fsPath__default.dirname(file))).map(patchDir => [patchDir, this.parseJsonFile(this.getPatchFilePath(patchDir), FSPatch), this.host.fileExists(this.getPatchBaseFile(patchDir)) ? this.parseJsonFile(this.getPatchBaseFile(patchDir), FSPatchBase) : undefined]);
2508
+ parsedUnsortedFsPatches.forEach(([dir, parsedPatch, parsedBase]) => {
2509
+ if (parsedPatch.error) {
2510
+ errors.push({
2511
+ ...parsedPatch.error,
2512
+ parentPatchId: dir
2513
+ });
2514
+ } else if (parsedBase && parsedBase.error) {
2515
+ errors.push({
2516
+ ...parsedBase.error,
2517
+ parentPatchId: dir
2518
+ });
2510
2519
  } else {
2511
- patches[patchId] = {
2512
- ...parsedFSPatchRes.data,
2513
- appliedAt: parsedFSPatchBaseRes ? parsedFSPatchBaseRes.data : null
2520
+ if (includes && includes.length > 0 && !includes.includes(parsedPatch.data.patchId)) {
2521
+ return;
2522
+ }
2523
+ patches[parsedPatch.data.patchId] = {
2524
+ ...parsedPatch.data,
2525
+ appliedAt: parsedBase ? parsedBase.data : null
2514
2526
  };
2515
2527
  }
2516
- }
2528
+ });
2529
+
2530
+ // If there are patches, but no head. error
2517
2531
  if (Object.keys(errors).length > 0) {
2518
2532
  return {
2519
2533
  patches,
@@ -2524,36 +2538,52 @@ class ValOpsFS extends ValOps {
2524
2538
  patches
2525
2539
  };
2526
2540
  }
2541
+ getParentPatchIdFromParentRef(parentRef) {
2542
+ return parentRef.type === "head" ? "head" : parentRef.patchId;
2543
+ }
2527
2544
  async fetchPatches(filters) {
2545
+ const fetchPatchesRes = await this.fetchPatchesFromFS(!!filters.omitPatch);
2546
+ const sortedPatches = this.createPatchChain(fetchPatchesRes.patches).filter(patchData => {
2547
+ if (filters.authors && !(patchData.authorId === null || filters.authors.includes(patchData.authorId))) {
2548
+ return false;
2549
+ }
2550
+ if (filters.moduleFilePaths && !filters.moduleFilePaths.includes(patchData.path)) {
2551
+ return false;
2552
+ }
2553
+ return true;
2554
+ }).map(patchData => {
2555
+ if (filters.omitPatch) {
2556
+ return {
2557
+ ...patchData,
2558
+ patch: undefined
2559
+ };
2560
+ }
2561
+ return patchData;
2562
+ });
2563
+ return {
2564
+ patches: sortedPatches,
2565
+ errors: fetchPatchesRes.errors
2566
+ };
2567
+ }
2568
+ async fetchPatchesFromFS(omitPath) {
2528
2569
  const patches = {};
2529
- const errors = {};
2530
2570
  const {
2531
- errors: allErrors,
2571
+ errors,
2532
2572
  patches: allPatches
2533
- } = await this.readPatches(filters.patchIds);
2534
- if (allErrors && Object.keys(allErrors).length > 0) {
2535
- for (const [patchId, error] of Object.entries(allErrors)) {
2536
- console.error("Error reading patch", patchId, error);
2537
- errors[patchId] = error;
2538
- }
2539
- }
2573
+ } = await this.readPatches();
2540
2574
  for (const [patchIdS, patch] of Object.entries(allPatches)) {
2541
2575
  const patchId = patchIdS;
2542
- if (filters.authors && !(patch.authorId === null || filters.authors.includes(patch.authorId))) {
2543
- continue;
2544
- }
2545
- if (filters.moduleFilePaths && !filters.moduleFilePaths.includes(patch.path)) {
2546
- continue;
2547
- }
2548
2576
  patches[patchId] = {
2549
- patch: filters.omitPatch ? undefined : patch.patch,
2577
+ patch: omitPath ? undefined : patch.patch,
2578
+ parentRef: patch.parentRef,
2550
2579
  path: patch.path,
2580
+ baseSha: patch.baseSha,
2551
2581
  createdAt: patch.createdAt,
2552
2582
  authorId: patch.authorId,
2553
2583
  appliedAt: patch.appliedAt
2554
2584
  };
2555
2585
  }
2556
- if (errors && Object.keys(errors).length > 0) {
2586
+ if (errors && errors.length > 0) {
2557
2587
  return {
2558
2588
  patches,
2559
2589
  errors
@@ -2564,6 +2594,33 @@ class ValOpsFS extends ValOps {
2564
2594
  };
2565
2595
  }
2566
2596
 
2597
+ // #region createPatchChain
2598
+ createPatchChain(unsortedPatchRecord) {
2599
+ var _Object$entries$find;
2600
+ // TODO: Error handling
2601
+ const nextPatch = {};
2602
+ Object.keys(unsortedPatchRecord).forEach(patchId => {
2603
+ const patch = unsortedPatchRecord[patchId];
2604
+ if (patch.parentRef.type === "head") {
2605
+ nextPatch["head"] = patchId;
2606
+ } else {
2607
+ nextPatch[patch.parentRef.patchId] = patchId;
2608
+ }
2609
+ });
2610
+ const sortedPatches = [];
2611
+ let nextPatchId = (_Object$entries$find = Object.entries(unsortedPatchRecord).find(([, patch]) => patch.parentRef.type === "head")) === null || _Object$entries$find === void 0 ? void 0 : _Object$entries$find[0];
2612
+ while (!!nextPatchId && nextPatchId in unsortedPatchRecord) {
2613
+ const patch = unsortedPatchRecord[nextPatchId];
2614
+ delete patch["parentRef"];
2615
+ sortedPatches.push({
2616
+ ...patch,
2617
+ patchId: nextPatchId
2618
+ });
2619
+ nextPatchId = nextPatch[nextPatchId];
2620
+ }
2621
+ return sortedPatches;
2622
+ }
2623
+
2567
2624
  // eslint-disable-next-line @typescript-eslint/no-explicit-any
2568
2625
  parseJsonFile(filePath, parser) {
2569
2626
  if (!this.host.fileExists(filePath)) {
@@ -2638,38 +2695,47 @@ class ValOpsFS extends ValOps {
2638
2695
  };
2639
2696
  }
2640
2697
  }
2641
- async saveSourceFilePatch(path, patch, authorId) {
2642
- let fileId = Date.now();
2698
+ async saveSourceFilePatch(path, patch, parentRef, authorId) {
2699
+ const patchDir = this.getParentPatchIdFromParentRef(parentRef);
2643
2700
  try {
2644
- while (this.host.fileExists(this.getPatchFilePath(fileId.toString()))) {
2645
- // ensure unique file / patch id
2646
- fileId++;
2647
- }
2648
- const patchId = fileId.toString();
2701
+ const baseSha = await this.getBaseSha();
2702
+ const patchId = crypto.randomUUID();
2649
2703
  const data = {
2650
2704
  patch,
2705
+ patchId,
2706
+ parentRef,
2651
2707
  path,
2652
2708
  authorId,
2709
+ baseSha,
2653
2710
  coreVersion: Internal.VERSION.core,
2654
2711
  createdAt: new Date().toISOString()
2655
2712
  };
2656
- this.host.writeUf8File(this.getPatchFilePath(patchId), JSON.stringify(data));
2657
- return {
2713
+ const writeRes = this.host.tryWriteUf8File(this.getPatchFilePath(patchDir), JSON.stringify(data));
2714
+ if (writeRes.type === "error") {
2715
+ return writeRes.errorType === "dir-already-exists" ? result.err({
2716
+ errorType: "patch-head-conflict"
2717
+ }) : result.err({
2718
+ errorType: "other",
2719
+ error: writeRes.error,
2720
+ message: "Failed to write patch file"
2721
+ });
2722
+ }
2723
+ return result.ok({
2658
2724
  patchId
2659
- };
2725
+ });
2660
2726
  } catch (err) {
2661
2727
  if (err instanceof Error) {
2662
- return {
2663
- error: {
2664
- message: err.message
2665
- }
2666
- };
2728
+ return result.err({
2729
+ errorType: "other",
2730
+ error: err,
2731
+ message: err.message
2732
+ });
2667
2733
  }
2668
- return {
2669
- error: {
2670
- message: "Unknown error"
2671
- }
2672
- };
2734
+ return result.err({
2735
+ errorType: "other",
2736
+ error: err,
2737
+ message: "Unknown error"
2738
+ });
2673
2739
  }
2674
2740
  }
2675
2741
  async getSourceFile(path) {
@@ -2707,9 +2773,10 @@ class ValOpsFS extends ValOps {
2707
2773
  };
2708
2774
  }
2709
2775
  }
2710
- async saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data, _type, metadata) {
2711
- const patchFilePath = this.getBinaryFilePath(filePath, patchId);
2712
- const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchId);
2776
+ async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, _type, metadata) {
2777
+ const patchDir = this.getParentPatchIdFromParentRef(parentRef);
2778
+ const patchFilePath = this.getBinaryFilePath(filePath, patchDir);
2779
+ const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDir);
2713
2780
  try {
2714
2781
  const buffer = bufferFromDataUrl(data);
2715
2782
  if (!buffer) {
@@ -2741,7 +2808,15 @@ class ValOpsFS extends ValOps {
2741
2808
  }
2742
2809
  }
2743
2810
  async getBase64EncodedBinaryFileMetadataFromPatch(filePath, type, patchId) {
2744
- const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchId);
2811
+ const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
2812
+ if (result.isErr(patchDirRes)) {
2813
+ return {
2814
+ errors: [{
2815
+ message: "Failed to get patch dir from patch id"
2816
+ }]
2817
+ };
2818
+ }
2819
+ const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDirRes.value);
2745
2820
  if (!this.host.fileExists(metadataFilePath)) {
2746
2821
  return {
2747
2822
  errors: [{
@@ -2777,7 +2852,11 @@ class ValOpsFS extends ValOps {
2777
2852
  };
2778
2853
  }
2779
2854
  async getBase64EncodedBinaryFileFromPatch(filePath, patchId) {
2780
- const absPath = this.getBinaryFilePath(filePath, patchId);
2855
+ const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
2856
+ if (!result.isOk(patchDirRes)) {
2857
+ return null;
2858
+ }
2859
+ const absPath = this.getBinaryFilePath(filePath, patchDirRes.value);
2781
2860
  if (!this.host.fileExists(absPath)) {
2782
2861
  return null;
2783
2862
  }
@@ -2785,30 +2864,62 @@ class ValOpsFS extends ValOps {
2785
2864
  }
2786
2865
  async deletePatches(patchIds) {
2787
2866
  const deleted = [];
2788
- let errors = null;
2789
- for (const patchId of patchIds) {
2790
- try {
2791
- this.host.deleteDir(this.getPatchDir(patchId));
2792
- deleted.push(patchId);
2793
- } catch (err) {
2794
- if (!errors) {
2795
- errors = {};
2796
- }
2797
- errors[patchId] = {
2798
- message: err instanceof Error ? err.message : "Unknown error"
2799
- };
2800
- }
2801
- }
2802
- if (errors) {
2867
+ const patchDirMapRes = await this.getParentPatchIdFromPatchIdMap();
2868
+ if (result.isErr(patchDirMapRes)) {
2803
2869
  return {
2804
- deleted,
2805
- errors
2870
+ error: {
2871
+ message: "Failed to get patch dir map"
2872
+ }
2806
2873
  };
2807
2874
  }
2875
+ const currentPatches = this.createPatchChain((await this.fetchPatchesFromFS(false)).patches);
2876
+ this.updateOrderedPatches(computeChangedPatchParentRefs(currentPatches, patchIds), patchDirMapRes.value, patchIds);
2808
2877
  return {
2809
2878
  deleted
2810
2879
  };
2811
2880
  }
2881
+ updateOrderedPatches(updates, patchDirMap, deletePatchIds) {
2882
+ for (const patchId of deletePatchIds) {
2883
+ const patchDir = patchDirMap[patchId];
2884
+ if (!patchDir) {
2885
+ console.error("Could not find patch dir for patch id scheduled for deletion: ", patchId);
2886
+ continue;
2887
+ }
2888
+ try {
2889
+ this.host.deleteDir(this.getFullPatchDir(patchDir));
2890
+ } catch (err) {
2891
+ console.error("Failed to delete patch dir", err);
2892
+ }
2893
+ }
2894
+ for (const [patchIdS, parentRef] of Object.entries(updates.changedPatches)) {
2895
+ const prevParentPatchId = patchDirMap[patchIdS];
2896
+ if (!prevParentPatchId) {
2897
+ console.error("Could not find previous parent patch id for deleted patch id: ", patchIdS);
2898
+ continue;
2899
+ }
2900
+ const newParentPatchId = parentRef.type === "head" ? "head" : parentRef.patchId;
2901
+ const currentPatchDataRes = this.parseJsonFile(this.getPatchFilePath(prevParentPatchId), FSPatch);
2902
+ if (currentPatchDataRes.error) {
2903
+ console.error("Failed to parse patch file while fixing patch chain after deleted patch", {
2904
+ updates
2905
+ }, currentPatchDataRes.error);
2906
+ continue;
2907
+ }
2908
+ const newPatchData = currentPatchDataRes.data;
2909
+ newPatchData.parentRef = parentRef;
2910
+ try {
2911
+ this.host.writeUf8File(this.getPatchFilePath(prevParentPatchId), JSON.stringify(newPatchData));
2912
+ if (this.host.directoryExists(this.getFullPatchDir(newParentPatchId))) {
2913
+ this.host.deleteDir(this.getFullPatchDir(newParentPatchId));
2914
+ }
2915
+ this.host.moveDir(this.getFullPatchDir(prevParentPatchId), this.getFullPatchDir(newParentPatchId));
2916
+ } catch (err) {
2917
+ console.error("Failed fix patch chain after deleted patch", {
2918
+ updates
2919
+ }, err);
2920
+ }
2921
+ }
2922
+ }
2812
2923
  async saveFiles(preparedCommit) {
2813
2924
  const updatedFiles = [];
2814
2925
  const errors = {};
@@ -2824,12 +2935,28 @@ class ValOpsFS extends ValOps {
2824
2935
  };
2825
2936
  }
2826
2937
  }
2938
+ const patchIdToPatchDirMapRes = await this.getParentPatchIdFromPatchIdMap();
2939
+ if (result.isErr(patchIdToPatchDirMapRes)) {
2940
+ return {
2941
+ updatedFiles,
2942
+ errors
2943
+ };
2944
+ }
2945
+ const patchIdToPatchDirMap = patchIdToPatchDirMapRes.value;
2827
2946
  for (const [filePath, {
2828
2947
  patchId
2829
2948
  }] of Object.entries(preparedCommit.patchedBinaryFilesDescriptors)) {
2830
2949
  const absPath = fsPath__default.join(this.rootDir, ...filePath.split("/"));
2831
2950
  try {
2832
- this.host.copyFile(this.getBinaryFilePath(filePath, patchId), absPath);
2951
+ const patchDir = patchIdToPatchDirMap[patchId];
2952
+ if (!patchDir) {
2953
+ errors[absPath] = {
2954
+ message: "Failed to find PatchDir for PatchId " + patchId,
2955
+ filePath
2956
+ };
2957
+ continue;
2958
+ }
2959
+ this.host.copyFile(this.getBinaryFilePath(filePath, patchDir), absPath);
2833
2960
  updatedFiles.push(absPath);
2834
2961
  } catch (err) {
2835
2962
  errors[absPath] = {
@@ -2843,7 +2970,14 @@ class ValOpsFS extends ValOps {
2843
2970
  baseSha: await this.getBaseSha(),
2844
2971
  timestamp: new Date().toISOString()
2845
2972
  };
2846
- const absPath = this.getPatchBaseFile(patchId);
2973
+ const patchDir = patchIdToPatchDirMap[patchId];
2974
+ if (!patchDir) {
2975
+ errors[`patchId:${patchId}`] = {
2976
+ message: "Failed to find PatchDir for PatchId " + patchId
2977
+ };
2978
+ continue;
2979
+ }
2980
+ const absPath = this.getPatchBaseFile(patchDir);
2847
2981
  try {
2848
2982
  this.host.writeUf8File(absPath, JSON.stringify(appliedAt));
2849
2983
  } catch (err) {
@@ -2887,25 +3021,52 @@ class ValOpsFS extends ValOps {
2887
3021
  }
2888
3022
  return createMetadataFromBuffer(type, mimeType, buffer);
2889
3023
  }
3024
+ async getParentPatchIdFromPatchId(patchId) {
3025
+ // This is not great. If needed we should find a better way
3026
+ const patches = await this.readPatches();
3027
+ if (patches.errors || patches.error) {
3028
+ console.error("Failed to read patches", JSON.stringify(patches));
3029
+ return result.err("failed-to-read-patches");
3030
+ }
3031
+ const patch = patches.patches[patchId];
3032
+ if (!patch) {
3033
+ console.error("Could not find patch with patchId: ", patchId);
3034
+ return result.err("patch-not-found");
3035
+ }
3036
+ return result.ok(this.getParentPatchIdFromParentRef(patch.parentRef));
3037
+ }
3038
+ async getParentPatchIdFromPatchIdMap() {
3039
+ const patches = await this.readPatches();
3040
+ if (patches.errors || patches.error) {
3041
+ console.error("Failed to read patches", JSON.stringify(patches));
3042
+ return result.err("failed-to-read-patches");
3043
+ }
3044
+ return result.ok(Object.fromEntries(Object.entries(patches.patches).map(([patchId, value]) => [patchId, this.getParentPatchIdFromParentRef(value.parentRef)])));
3045
+ }
3046
+
3047
+ // #region profiles
3048
+ async getProfiles() {
3049
+ throw new Error("Configuration error: cannot get profiles in local / development file system mode");
3050
+ }
2890
3051
 
2891
3052
  // #region fs file path helpers
2892
3053
  getPatchesDir() {
2893
3054
  return fsPath__default.join(this.rootDir, ValOpsFS.VAL_DIR, "patches");
2894
3055
  }
2895
- getPatchDir(patchId) {
2896
- return fsPath__default.join(this.getPatchesDir(), patchId);
3056
+ getFullPatchDir(patchDir) {
3057
+ return fsPath__default.join(this.getPatchesDir(), patchDir);
2897
3058
  }
2898
- getBinaryFilePath(filePath, patchId) {
2899
- return fsPath__default.join(this.getPatchDir(patchId), "files", filePath, fsPath__default.basename(filePath));
3059
+ getBinaryFilePath(filePath, patchDir) {
3060
+ return fsPath__default.join(this.getFullPatchDir(patchDir), "files", filePath, fsPath__default.basename(filePath));
2900
3061
  }
2901
- getBinaryFileMetadataPath(filePath, patchId) {
2902
- return fsPath__default.join(this.getPatchDir(patchId), "files", filePath, "metadata.json");
3062
+ getBinaryFileMetadataPath(filePath, patchDir) {
3063
+ return fsPath__default.join(this.getFullPatchDir(patchDir), "files", filePath, "metadata.json");
2903
3064
  }
2904
- getPatchFilePath(patchId) {
2905
- return fsPath__default.join(this.getPatchDir(patchId), "patch.json");
3065
+ getPatchFilePath(patchDir) {
3066
+ return fsPath__default.join(this.getFullPatchDir(patchDir), "patch.json");
2906
3067
  }
2907
- getPatchBaseFile(patchId) {
2908
- return fsPath__default.join(this.getPatchDir(patchId), "base.json");
3068
+ getPatchBaseFile(patchDir) {
3069
+ return fsPath__default.join(this.getFullPatchDir(patchDir), "base.json");
2909
3070
  }
2910
3071
  }
2911
3072
  class FSOpsHost {
@@ -2919,6 +3080,9 @@ class FSOpsHost {
2919
3080
  });
2920
3081
  }
2921
3082
  }
3083
+ moveDir(from, to) {
3084
+ fs.renameSync(from, to);
3085
+ }
2922
3086
  directoryExists(path) {
2923
3087
  return ts.sys.directoryExists(path);
2924
3088
  }
@@ -2940,6 +3104,37 @@ class FSOpsHost {
2940
3104
  });
2941
3105
  fs.writeFileSync(path, data, "utf-8");
2942
3106
  }
3107
+ tryWriteUf8File(path, data) {
3108
+ try {
3109
+ const parentDir = fsPath__default.join(fsPath__default.dirname(path), "../");
3110
+ fs.mkdirSync(parentDir, {
3111
+ recursive: true
3112
+ });
3113
+ // Make the parent dir separately. This is because we need mkdir to throw
3114
+ // if the directory already exists. If we use recursive: true, it doesn't
3115
+ fs.mkdirSync(fsPath__default.dirname(path), {
3116
+ recursive: false
3117
+ });
3118
+ } catch (e) {
3119
+ return {
3120
+ type: "error",
3121
+ errorType: "dir-already-exists",
3122
+ error: e
3123
+ };
3124
+ }
3125
+ try {
3126
+ fs.writeFileSync(path, data, "utf-8");
3127
+ } catch (e) {
3128
+ return {
3129
+ type: "error",
3130
+ errorType: "failed-to-write-file",
3131
+ error: e
3132
+ };
3133
+ }
3134
+ return {
3135
+ type: "success"
3136
+ };
3137
+ }
2943
3138
  writeBinaryFile(path, data) {
2944
3139
  fs.mkdirSync(fsPath__default.dirname(path), {
2945
3140
  recursive: true
@@ -2956,6 +3151,9 @@ class FSOpsHost {
2956
3151
  const FSPatch = z.object({
2957
3152
  path: z.string().refine(p => p.startsWith("/") && p.includes(".val."), "Path is not valid. Must start with '/' and include '.val.'"),
2958
3153
  patch: Patch,
3154
+ patchId: z.string(),
3155
+ baseSha: z.string(),
3156
+ parentRef: ParentRef,
2959
3157
  authorId: z.string().refine(p => true).nullable(),
2960
3158
  createdAt: z.string().datetime(),
2961
3159
  coreVersion: z.string().nullable() // TODO: use this to check if patch is compatible with current core version?
@@ -2968,7 +3166,7 @@ const FSPatchBase = z.object({
2968
3166
  const textEncoder = new TextEncoder();
2969
3167
  const PatchId = z.string().refine(s => !!s); // TODO: validate
2970
3168
  const CommitSha = z.string().refine(s => !!s); // TODO: validate
2971
- const BaseSha = z.string().refine(s => !!s); // TODO: validate
3169
+ z.string().refine(s => !!s); // TODO: validate
2972
3170
  const AuthorId = z.string().refine(s => !!s); // TODO: validate
2973
3171
  const ModuleFilePath = z.string().refine(s => !!s); // TODO: validate
2974
3172
  const Metadata = z.union([z.object({
@@ -2988,11 +3186,7 @@ const BasePatchResponse = z.object({
2988
3186
  patchId: PatchId,
2989
3187
  authorId: AuthorId.nullable(),
2990
3188
  createdAt: z.string().datetime(),
2991
- applied: z.object({
2992
- baseSha: BaseSha,
2993
- commitSha: CommitSha,
2994
- appliedAt: z.string().datetime()
2995
- }).nullable()
3189
+ baseSha: z.string()
2996
3190
  });
2997
3191
  const GetPatches = z.object({
2998
3192
  patches: z.array(z.intersection(z.object({
@@ -3037,7 +3231,7 @@ const DeletePatchesResponse = z.object({
3037
3231
  patchId: PatchId
3038
3232
  })).optional()
3039
3233
  });
3040
- const SavePatchFileResponse = z.object({
3234
+ z.object({
3041
3235
  patchId: PatchId,
3042
3236
  filePath: ModuleFilePath
3043
3237
  });
@@ -3046,6 +3240,15 @@ const CommitResponse = z.object({
3046
3240
  commit: CommitSha,
3047
3241
  branch: z.string()
3048
3242
  });
3243
+ const ProfilesResponse = z.object({
3244
+ profiles: z.array(z.object({
3245
+ profileId: z.string(),
3246
+ fullName: z.string(),
3247
+ avatar: z.object({
3248
+ url: z.string()
3249
+ }).nullable()
3250
+ }))
3251
+ });
3049
3252
  class ValOpsHttp extends ValOps {
3050
3253
  constructor(hostUrl, project, commitSha,
3051
3254
  // TODO: CommitSha
@@ -3074,18 +3277,35 @@ class ValOpsHttp extends ValOps {
3074
3277
  }
3075
3278
  const currentBaseSha = await this.getBaseSha();
3076
3279
  const currentSchemaSha = await this.getSchemaSha();
3077
- const patchData = await this.fetchPatches({
3280
+ const allPatchData = await this.fetchPatches({
3078
3281
  omitPatch: true,
3079
3282
  authors: undefined,
3080
3283
  patchIds: undefined,
3081
3284
  moduleFilePaths: undefined
3082
3285
  });
3286
+ // We think these errors will be picked up else where (?), so we only return an error here if there are no patches
3287
+ if (allPatchData.patches.length === 0) {
3288
+ let message;
3289
+ if (allPatchData.error) {
3290
+ message = allPatchData.error.message;
3291
+ } else if (allPatchData.errors && allPatchData.errors.length > 0) {
3292
+ const errors = allPatchData.errors;
3293
+ message = errors.map(error => error.message).join("");
3294
+ }
3295
+ if (message) {
3296
+ message = `Could not get patches: ${message}`;
3297
+ console.error(message);
3298
+ return {
3299
+ type: "error",
3300
+ error: {
3301
+ message
3302
+ }
3303
+ };
3304
+ }
3305
+ }
3083
3306
  const patches = [];
3084
- // TODO: use proper patch sequences when available:
3085
- for (const [patchId] of Object.entries(patchData.patches).sort(([, a], [, b]) => {
3086
- return a.createdAt.localeCompare(b.createdAt, undefined);
3087
- })) {
3088
- patches.push(patchId);
3307
+ for (const patchData of allPatchData.patches) {
3308
+ patches.push(patchData.patchId);
3089
3309
  }
3090
3310
  const webSocketNonceRes = await this.getWebSocketNonce(params.profileId);
3091
3311
  if (webSocketNonceRes.status === "error") {
@@ -3146,6 +3366,16 @@ class ValOpsHttp extends ValOps {
3146
3366
  }
3147
3367
  };
3148
3368
  }
3369
+ const contentType = res.headers.get("Content-Type") || "";
3370
+ if (contentType.startsWith("application/json")) {
3371
+ const json = await res.json();
3372
+ return {
3373
+ status: "error",
3374
+ error: {
3375
+ message: "Could not get nonce." + (json.message || "Unexpected error (no error message). Status: " + res.status)
3376
+ }
3377
+ };
3378
+ }
3149
3379
  return {
3150
3380
  status: "error",
3151
3381
  error: {
@@ -3171,8 +3401,8 @@ class ValOpsHttp extends ValOps {
3171
3401
  for (let i = 0; i < patchIds.length; i += chunkSize) {
3172
3402
  patchIdChunks.push(patchIds.slice(i, i + chunkSize));
3173
3403
  }
3174
- let allPatches = {};
3175
- let allErrors = {};
3404
+ const allPatches = [];
3405
+ const allErrors = [];
3176
3406
  if (patchIds === undefined || patchIds.length === 0) {
3177
3407
  return this.fetchPatchesInternal({
3178
3408
  patchIds: patchIds,
@@ -3190,15 +3420,9 @@ class ValOpsHttp extends ValOps {
3190
3420
  if ("error" in res) {
3191
3421
  return res;
3192
3422
  }
3193
- allPatches = {
3194
- ...allPatches,
3195
- ...res.patches
3196
- };
3423
+ allPatches.push(...res.patches);
3197
3424
  if (res.errors) {
3198
- allErrors = {
3199
- ...allErrors,
3200
- ...res.errors
3201
- };
3425
+ allErrors.push(...res.errors);
3202
3426
  }
3203
3427
  }
3204
3428
  return {
@@ -3234,27 +3458,23 @@ class ValOpsHttp extends ValOps {
3234
3458
  "Content-Type": "application/json"
3235
3459
  }
3236
3460
  }).then(async res => {
3237
- const patches = {};
3461
+ const patches = [];
3238
3462
  if (res.ok) {
3239
3463
  const json = await res.json();
3240
3464
  const parsed = GetPatches.safeParse(json);
3241
3465
  if (parsed.success) {
3466
+ const errors = [];
3242
3467
  const data = parsed.data;
3243
- const errors = {};
3244
3468
  for (const patchesRes of data.patches) {
3245
- patches[patchesRes.patchId] = {
3246
- path: patchesRes.path,
3469
+ patches.push({
3247
3470
  authorId: patchesRes.authorId,
3248
3471
  createdAt: patchesRes.createdAt,
3249
- appliedAt: patchesRes.applied && {
3250
- baseSha: patchesRes.applied.baseSha,
3251
- timestamp: patchesRes.applied.appliedAt,
3252
- git: {
3253
- commitSha: patchesRes.applied.commitSha
3254
- }
3255
- },
3256
- patch: patchesRes.patch
3257
- };
3472
+ appliedAt: null,
3473
+ patchId: patchesRes.patchId,
3474
+ path: patchesRes.path,
3475
+ baseSha: patchesRes.baseSha,
3476
+ patch: filters.omitPatch ? undefined : patchesRes.patch
3477
+ });
3258
3478
  }
3259
3479
  return {
3260
3480
  patches,
@@ -3276,7 +3496,8 @@ class ValOpsHttp extends ValOps {
3276
3496
  };
3277
3497
  });
3278
3498
  }
3279
- async saveSourceFilePatch(path, patch, authorId) {
3499
+ async saveSourceFilePatch(path, patch, parentRef, authorId) {
3500
+ const baseSha = await this.getBaseSha();
3280
3501
  return fetch(`${this.hostUrl}/v1/${this.project}/patches`, {
3281
3502
  method: "POST",
3282
3503
  headers: {
@@ -3287,78 +3508,53 @@ class ValOpsHttp extends ValOps {
3287
3508
  path,
3288
3509
  patch,
3289
3510
  authorId,
3511
+ parentPatchId: parentRef.type === "patch" ? parentRef.patchId : null,
3512
+ baseSha,
3290
3513
  commit: this.commitSha,
3291
3514
  branch: this.branch,
3292
3515
  coreVersion: Internal.VERSION.core
3293
3516
  })
3294
3517
  }).then(async res => {
3518
+ var _res$headers$get;
3295
3519
  if (res.ok) {
3296
3520
  const parsed = SavePatchResponse.safeParse(await res.json());
3297
3521
  if (parsed.success) {
3298
- return {
3522
+ return result.ok({
3299
3523
  patchId: parsed.data.patchId
3300
- };
3524
+ });
3301
3525
  }
3302
- return {
3303
- error: {
3304
- message: `Could not parse save patch response. Error: ${fromError(parsed.error)}`
3305
- }
3306
- };
3526
+ return result.err({
3527
+ errorType: "other",
3528
+ message: `Could not parse save patch response. Error: ${fromError(parsed.error)}`
3529
+ });
3307
3530
  }
3308
- return {
3309
- error: {
3310
- message: "Could not save patch. HTTP error: " + res.status + " " + res.statusText
3311
- }
3312
- };
3313
- }).catch(e => {
3314
- return {
3315
- error: {
3316
- message: `Could save source file patch (connection error?): ${e instanceof Error ? e.message : e.toString()}`
3317
- }
3318
- };
3319
- });
3320
- }
3321
- async saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data, type, metadata) {
3322
- return fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/files`, {
3323
- method: "POST",
3324
- headers: {
3325
- ...this.authHeaders,
3326
- "Content-Type": "application/json"
3327
- },
3328
- body: JSON.stringify({
3329
- filePath: filePath,
3330
- data,
3331
- type,
3332
- metadata
3333
- })
3334
- }).then(async res => {
3335
- if (res.ok) {
3336
- const parsed = SavePatchFileResponse.safeParse(await res.json());
3337
- if (parsed.success) {
3338
- return {
3339
- patchId: parsed.data.patchId,
3340
- filePath: parsed.data.filePath
3341
- };
3342
- }
3343
- return {
3344
- error: {
3345
- message: `Could not parse save patch file response. Error: ${fromError(parsed.error)}`
3346
- }
3347
- };
3531
+ if (res.status === 409) {
3532
+ return result.err({
3533
+ errorType: "patch-head-conflict",
3534
+ message: "Conflict: " + (await res.text())
3535
+ });
3348
3536
  }
3349
- return {
3350
- error: {
3351
- message: "Could not save patch file. HTTP error: " + res.status + " " + res.statusText
3352
- }
3353
- };
3537
+ if ((_res$headers$get = res.headers.get("Content-Type")) !== null && _res$headers$get !== void 0 && _res$headers$get.includes("application/json")) {
3538
+ const json = await res.json();
3539
+ return result.err({
3540
+ errorType: "other",
3541
+ message: json.message || "Unknown error"
3542
+ });
3543
+ }
3544
+ return result.err({
3545
+ errorType: "other",
3546
+ message: "Could not save patch. HTTP error: " + res.status + " " + res.statusText
3547
+ });
3354
3548
  }).catch(e => {
3355
- return {
3356
- error: {
3357
- message: `Could save source binary file in patch (connection error?): ${e.toString()}`
3358
- }
3359
- };
3549
+ return result.err({
3550
+ errorType: "other",
3551
+ message: `Could save source file patch (connection error?): ${e instanceof Error ? e.message : e.toString()}`
3552
+ });
3360
3553
  });
3361
3554
  }
3555
+ async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, type, metadata) {
3556
+ throw Error("TODO: implement");
3557
+ }
3362
3558
  async getHttpFiles(files) {
3363
3559
  const params = new URLSearchParams();
3364
3560
  const stringifiedFiles = JSON.stringify({
@@ -3461,7 +3657,7 @@ class ValOpsHttp extends ValOps {
3461
3657
  const params = new URLSearchParams();
3462
3658
  params.set("file_path", filePath);
3463
3659
  try {
3464
- const metadataRes = await fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/metadata?${params}`, {
3660
+ const metadataRes = await fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/files?${params}`, {
3465
3661
  headers: {
3466
3662
  ...this.authHeaders,
3467
3663
  "Content-Type": "application/json"
@@ -3563,7 +3759,7 @@ class ValOpsHttp extends ValOps {
3563
3759
  }
3564
3760
  async commit(prepared, message, committer, newBranch) {
3565
3761
  try {
3566
- var _res$headers$get;
3762
+ var _res$headers$get2;
3567
3763
  const existingBranch = this.branch;
3568
3764
  const res = await fetch(`${this.hostUrl}/v1/${this.project}/commit`, {
3569
3765
  method: "POST",
@@ -3599,7 +3795,7 @@ class ValOpsHttp extends ValOps {
3599
3795
  }
3600
3796
  };
3601
3797
  }
3602
- if ((_res$headers$get = res.headers.get("Content-Type")) !== null && _res$headers$get !== void 0 && _res$headers$get.includes("application/json")) {
3798
+ if ((_res$headers$get2 = res.headers.get("Content-Type")) !== null && _res$headers$get2 !== void 0 && _res$headers$get2.includes("application/json")) {
3603
3799
  const json = await res.json();
3604
3800
  if (json.isNotFastForward) {
3605
3801
  return {
@@ -3628,6 +3824,30 @@ class ValOpsHttp extends ValOps {
3628
3824
  };
3629
3825
  }
3630
3826
  }
3827
+
3828
+ // #region profiles
3829
+ async getProfiles() {
3830
+ var _res$headers$get3;
3831
+ const res = await fetch(`${this.hostUrl}/v1/${this.project}/profiles`, {
3832
+ headers: {
3833
+ ...this.authHeaders,
3834
+ "Content-Type": "application/json"
3835
+ }
3836
+ });
3837
+ if (res.ok) {
3838
+ const parsed = ProfilesResponse.safeParse(await res.json());
3839
+ if (parsed.error) {
3840
+ console.error("Could not parse profiles response", parsed.error);
3841
+ throw Error(`Could not get profiles from remote server: wrong format. You might need to upgrade Val.`);
3842
+ }
3843
+ return parsed.data.profiles;
3844
+ }
3845
+ if ((_res$headers$get3 = res.headers.get("Content-Type")) !== null && _res$headers$get3 !== void 0 && _res$headers$get3.includes("application/json")) {
3846
+ const json = await res.json();
3847
+ throw Error(`Could not get profiles (status: ${res.status}): ${"message" in json ? json.message : "Unknown error"}`);
3848
+ }
3849
+ throw Error(`Could not get profiles. Got status: ${res.status}`);
3850
+ }
3631
3851
  }
3632
3852
 
3633
3853
  /* eslint-disable @typescript-eslint/no-unused-vars */
@@ -4138,8 +4358,74 @@ const ValServer = (valModules, options, callbacks) => {
4138
4358
  }
4139
4359
  },
4140
4360
  //#region patches
4141
- "/patches/~": {
4361
+ "/patches": {
4362
+ PUT: async req => {
4363
+ const cookies = req.cookies;
4364
+ const auth = getAuth(cookies);
4365
+ if (auth.error) {
4366
+ return {
4367
+ status: 401,
4368
+ json: {
4369
+ message: auth.error
4370
+ }
4371
+ };
4372
+ }
4373
+ if (serverOps instanceof ValOpsHttp && !("id" in auth)) {
4374
+ return {
4375
+ status: 401,
4376
+ json: {
4377
+ message: "Unauthorized"
4378
+ }
4379
+ };
4380
+ }
4381
+ const patches = req.body.patches;
4382
+ const parentRef = req.body.parentRef;
4383
+ const authorId = "id" in auth ? auth.id : null;
4384
+ const newPatchIds = [];
4385
+ for (const patch of patches) {
4386
+ const createPatchRes = await serverOps.createPatch(patch.path, patch.patch, parentRef, authorId);
4387
+ if (result.isErr(createPatchRes)) {
4388
+ if (createPatchRes.error.errorType === "patch-head-conflict") {
4389
+ return {
4390
+ status: 409,
4391
+ json: {
4392
+ type: "patch-head-conflict",
4393
+ message: "Patch id conflict"
4394
+ }
4395
+ };
4396
+ } else {
4397
+ return {
4398
+ status: 400,
4399
+ json: {
4400
+ type: "patch-error",
4401
+ message: "Could not create patch",
4402
+ errors: {
4403
+ [patch.path]: [{
4404
+ error: {
4405
+ message: createPatchRes.error.error.message
4406
+ }
4407
+ }]
4408
+ }
4409
+ }
4410
+ };
4411
+ }
4412
+ } else {
4413
+ newPatchIds.push(createPatchRes.value.patchId);
4414
+ }
4415
+ }
4416
+ return {
4417
+ status: 200,
4418
+ json: {
4419
+ newPatchIds,
4420
+ parentRef: {
4421
+ type: "patch",
4422
+ patchId: newPatchIds[newPatchIds.length - 1]
4423
+ }
4424
+ }
4425
+ };
4426
+ },
4142
4427
  GET: async req => {
4428
+ // TODO: Fix type error patchId is string somewhere and PatchId somewhere else
4143
4429
  const query = req.query;
4144
4430
  const cookies = req.cookies;
4145
4431
  const auth = getAuth(cookies);
@@ -4159,38 +4445,52 @@ const ValServer = (valModules, options, callbacks) => {
4159
4445
  }
4160
4446
  };
4161
4447
  }
4448
+ const omit_patch = query.omit_patch === true;
4162
4449
  const authors = query.author;
4163
- const patches = await serverOps.fetchPatches({
4450
+ const fetchedPatchesRes = await serverOps.fetchPatches({
4164
4451
  authors,
4165
4452
  patchIds: query.patch_id,
4166
- omitPatch: query.omit_patch === true,
4453
+ omitPatch: omit_patch,
4167
4454
  moduleFilePaths: query.module_file_path
4168
4455
  });
4169
- if (patches.error) {
4456
+ if (fetchedPatchesRes.error) {
4170
4457
  // Error is singular
4171
- console.error("Val: Failed to get patches", patches.errors);
4458
+ console.error("Val: Failed to get patches", fetchedPatchesRes.error);
4172
4459
  return {
4173
4460
  status: 500,
4174
4461
  json: {
4175
- message: patches.error.message,
4176
- details: patches.error
4462
+ message: fetchedPatchesRes.error.message,
4463
+ error: fetchedPatchesRes.error
4177
4464
  }
4178
4465
  };
4179
4466
  }
4180
- if (patches.errors && Object.keys(patches.errors).length > 0) {
4467
+ if (fetchedPatchesRes.errors && Object.keys(fetchedPatchesRes.errors).length > 0) {
4181
4468
  // Errors is plural. Different property than above.
4182
- console.error("Val: Failed to get patches", patches.errors);
4469
+ console.error("Val: Failed to get patches", fetchedPatchesRes.errors);
4183
4470
  return {
4184
4471
  status: 500,
4185
4472
  json: {
4186
4473
  message: "Failed to get patches",
4187
- details: patches.errors
4474
+ patchErrors: fetchedPatchesRes.errors
4188
4475
  }
4189
4476
  };
4190
4477
  }
4478
+ const patches = [];
4479
+ for (const [patchIdS, patchData] of Object.entries(fetchedPatchesRes.patches)) {
4480
+ const patchId = patchIdS;
4481
+ patches.push({
4482
+ patchId,
4483
+ ...patchData
4484
+ });
4485
+ }
4486
+ // TODO: we should sort by parentRef instead:
4487
+ patches.sort((a, b) => a.createdAt.localeCompare(b.createdAt));
4191
4488
  return {
4192
4489
  status: 200,
4193
- json: patches
4490
+ json: {
4491
+ patches,
4492
+ baseSha: await serverOps.getBaseSha()
4493
+ }
4194
4494
  };
4195
4495
  },
4196
4496
  DELETE: async req => {
@@ -4221,7 +4521,10 @@ const ValServer = (valModules, options, callbacks) => {
4221
4521
  status: 500,
4222
4522
  json: {
4223
4523
  message: "Failed to delete patches",
4224
- details: deleteRes.errors
4524
+ errors: Object.entries(deleteRes.errors).map(([id, error]) => ({
4525
+ patchId: id,
4526
+ ...error
4527
+ }))
4225
4528
  }
4226
4529
  };
4227
4530
  }
@@ -4293,13 +4596,12 @@ const ValServer = (valModules, options, callbacks) => {
4293
4596
  }
4294
4597
  },
4295
4598
  // #region sources
4296
- "/sources": {
4599
+ "/sources/~": {
4297
4600
  PUT: async req => {
4298
- var _body$patchIds;
4299
4601
  const query = req.query;
4300
4602
  const cookies = req.cookies;
4301
- const body = req.body;
4302
- const treePath = req.path || "";
4603
+ // TODO: filter results by moduleFilePath
4604
+ // const moduleFilePath = req.path || "";
4303
4605
  const auth = getAuth(cookies);
4304
4606
  if (auth.error) {
4305
4607
  return {
@@ -4328,154 +4630,108 @@ const ValServer = (valModules, options, callbacks) => {
4328
4630
  }
4329
4631
  };
4330
4632
  }
4331
- let tree;
4332
- let patchAnalysis = null;
4333
- let newPatchIds = undefined;
4334
- if (body !== null && body !== void 0 && body.patchIds && (body === null || body === void 0 || (_body$patchIds = body.patchIds) === null || _body$patchIds === void 0 ? void 0 : _body$patchIds.length) > 0 || body !== null && body !== void 0 && body.addPatches) {
4335
- // TODO: validate patches_sha
4336
- const patchIds = body === null || body === void 0 ? void 0 : body.patchIds;
4337
- const patchOps = patchIds && patchIds.length > 0 ? await serverOps.fetchPatches({
4338
- patchIds,
4339
- omitPatch: false
4340
- }) : {
4341
- patches: {}
4342
- };
4343
- if (patchOps.error) {
4344
- return {
4345
- status: 400,
4346
- json: {
4347
- message: "Failed to fetch patches: " + patchOps.error.message,
4348
- details: []
4349
- }
4350
- };
4351
- }
4352
- let patchErrors = undefined;
4353
- for (const [patchIdS, error] of Object.entries(patchOps.errors || {})) {
4354
- const patchId = patchIdS;
4355
- if (!patchErrors) {
4356
- patchErrors = {};
4357
- }
4358
- patchErrors[patchId] = {
4359
- message: error.message
4360
- };
4361
- }
4362
- // TODO: errors
4363
- patchAnalysis = serverOps.analyzePatches(patchOps.patches);
4364
- if (body !== null && body !== void 0 && body.addPatches) {
4365
- for (const addPatch of body.addPatches) {
4366
- const newPatchModuleFilePath = addPatch.path;
4367
- const newPatchOps = addPatch.patch;
4368
- const authorId = "id" in auth ? auth.id : null;
4369
- const createPatchRes = await serverOps.createPatch(newPatchModuleFilePath, {
4370
- ...patchAnalysis,
4371
- ...patchOps
4372
- }, newPatchOps, authorId);
4373
- if (createPatchRes.error) {
4374
- return {
4375
- status: 500,
4376
- json: {
4377
- message: "Failed to create patch: " + createPatchRes.error.message,
4378
- details: createPatchRes.error
4379
- }
4380
- };
4381
- }
4382
- if (!newPatchIds) {
4383
- newPatchIds = [createPatchRes.patchId];
4384
- } else {
4385
- newPatchIds.push(createPatchRes.patchId);
4386
- }
4387
- patchOps.patches[createPatchRes.patchId] = {
4388
- path: newPatchModuleFilePath,
4389
- patch: newPatchOps,
4390
- authorId,
4391
- createdAt: createPatchRes.createdAt,
4392
- appliedAt: null
4393
- };
4394
- patchAnalysis.patchesByModule[newPatchModuleFilePath] = [...(patchAnalysis.patchesByModule[newPatchModuleFilePath] || []), {
4395
- patchId: createPatchRes.patchId
4396
- }];
4397
- }
4398
- }
4399
- tree = {
4400
- ...(await serverOps.getTree({
4401
- ...patchAnalysis,
4402
- ...patchOps
4403
- }))
4404
- };
4405
- if (query.validate_all) {
4406
- const allTree = await serverOps.getTree();
4407
- tree = {
4408
- sources: {
4409
- ...allTree.sources,
4410
- ...tree.sources
4411
- },
4412
- errors: {
4413
- ...allTree.errors,
4414
- ...tree.errors
4415
- }
4416
- };
4633
+ const patchOps = await serverOps.fetchPatches({
4634
+ patchIds: undefined,
4635
+ omitPatch: false
4636
+ });
4637
+ const patchAnalysis = serverOps.analyzePatches(patchOps.patches);
4638
+ let sourcesRes = await serverOps.getSources();
4639
+ const onlyPatchedTreeModules = await serverOps.getSources({
4640
+ ...patchAnalysis,
4641
+ ...patchOps
4642
+ });
4643
+ sourcesRes = {
4644
+ sources: {
4645
+ ...sourcesRes.sources,
4646
+ ...(onlyPatchedTreeModules.sources || {})
4647
+ },
4648
+ errors: {
4649
+ ...sourcesRes.errors,
4650
+ ...(onlyPatchedTreeModules.errors || {})
4417
4651
  }
4418
- } else {
4419
- tree = await serverOps.getTree();
4420
- }
4652
+ };
4421
4653
  let sourcesValidation = {
4422
4654
  errors: {},
4423
4655
  files: {}
4424
4656
  };
4425
4657
  if (query.validate_sources || query.validate_binary_files) {
4426
4658
  const schemas = await serverOps.getSchemas();
4427
- sourcesValidation = await serverOps.validateSources(schemas, tree.sources);
4659
+ sourcesValidation = await serverOps.validateSources(schemas, sourcesRes.sources);
4428
4660
 
4429
- // TODO: send validation errors
4661
+ // TODO: send binary files validation errors
4430
4662
  if (query.validate_binary_files) {
4431
- await serverOps.validateFiles(schemas, tree.sources, sourcesValidation.files);
4663
+ await serverOps.validateFiles(schemas, sourcesRes.sources, sourcesValidation.files);
4432
4664
  }
4433
4665
  }
4434
4666
  const schemaSha = await serverOps.getSchemaSha();
4435
4667
  const modules = {};
4436
- for (const [moduleFilePathS, module] of Object.entries(tree.sources)) {
4668
+ for (const [moduleFilePathS, module] of Object.entries(sourcesRes.sources)) {
4437
4669
  const moduleFilePath = moduleFilePathS;
4438
- if (moduleFilePath.startsWith(treePath)) {
4439
- var _sourcesValidation$er;
4670
+ if (moduleFilePath.startsWith(moduleFilePath)) {
4671
+ var _patchAnalysis$patche, _sourcesValidation$er;
4672
+ const skippedPatches = [];
4673
+ const patchErrors = {};
4674
+ const appliedPatches = ((_patchAnalysis$patche = patchAnalysis.patchesByModule[moduleFilePath]) === null || _patchAnalysis$patche === void 0 ? void 0 : _patchAnalysis$patche.map(p => p.patchId)) || [];
4675
+ for (const {
4676
+ patchId,
4677
+ skipped,
4678
+ error
4679
+ } of ((_sourcesRes$errors = sourcesRes.errors) === null || _sourcesRes$errors === void 0 ? void 0 : _sourcesRes$errors[moduleFilePath]) || []) {
4680
+ var _sourcesRes$errors;
4681
+ if (skipped) {
4682
+ skippedPatches.push(patchId);
4683
+ } else if (error) {
4684
+ patchErrors[patchId] = {
4685
+ message: error.message
4686
+ };
4687
+ } else {
4688
+ // unsure what makes sense here
4689
+ appliedPatches.push(patchId);
4690
+ }
4691
+ }
4440
4692
  modules[moduleFilePath] = {
4441
4693
  source: module,
4442
- patches: patchAnalysis && patchAnalysis.patchesByModule[moduleFilePath] ? {
4443
- applied: patchAnalysis.patchesByModule[moduleFilePath].map(p => p.patchId)
4694
+ patches: appliedPatches.length > 0 || skippedPatches.length > 0 || Object.keys(patchErrors).length > 0 ? {
4695
+ applied: appliedPatches,
4696
+ skipped: skippedPatches.length > 0 ? skippedPatches : undefined,
4697
+ errors: Object.keys(patchErrors).length > 0 ? patchErrors : undefined
4444
4698
  } : undefined,
4445
4699
  validationErrors: (_sourcesValidation$er = sourcesValidation.errors[moduleFilePath]) === null || _sourcesValidation$er === void 0 ? void 0 : _sourcesValidation$er.validations
4446
4700
  };
4447
4701
  }
4448
4702
  }
4449
- if (tree.errors && Object.keys(tree.errors).length > 0) {
4450
- const res = {
4451
- status: 400,
4452
- json: {
4453
- type: "patch-error",
4454
- schemaSha,
4455
- modules,
4456
- errors: Object.fromEntries(Object.entries(tree.errors).map(([key, value]) => [key, value.map(error => ({
4457
- patchId: error.patchId,
4458
- skipped: error.skipped,
4459
- error: {
4460
- message: error.error.message
4461
- }
4462
- }))])),
4463
- message: "One or more patches failed to be applied"
4464
- }
4465
- };
4466
- return res;
4467
- }
4468
4703
  const res = {
4469
4704
  status: 200,
4470
4705
  json: {
4471
4706
  schemaSha,
4472
- modules,
4473
- newPatchIds
4707
+ modules
4474
4708
  }
4475
4709
  };
4476
4710
  return res;
4477
4711
  }
4478
4712
  },
4713
+ "/profiles": {
4714
+ GET: async req => {
4715
+ // const cookies = req.cookies;
4716
+ // const auth = getAuth(cookies);
4717
+ // if (auth.error) {
4718
+ // return {
4719
+ // status: 401,
4720
+ // json: {
4721
+ // message: auth.error,
4722
+ // },
4723
+ // };
4724
+ // }
4725
+
4726
+ const profiles = await serverOps.getProfiles();
4727
+ return {
4728
+ status: 200,
4729
+ json: {
4730
+ profiles
4731
+ }
4732
+ };
4733
+ }
4734
+ },
4479
4735
  "/save": {
4480
4736
  POST: async req => {
4481
4737
  const cookies = req.cookies;
@@ -5564,4 +5820,4 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
5564
5820
  };
5565
5821
  }
5566
5822
 
5567
- export { Patch, PatchJSON, Service, ValFSHost, ValModuleLoader, ValSourceFileHandler, createFixPatch, createService, createValApiRouter, createValServer, decodeJwt, encodeJwt, formatSyntaxErrorTree, getCompilerOptions, getExpire, patchSourceFile, safeReadGit };
5823
+ export { Service, ValFSHost, ValModuleLoader, ValSourceFileHandler, createFixPatch, createService, createValApiRouter, createValServer, decodeJwt, encodeJwt, formatSyntaxErrorTree, getCompilerOptions, getExpire, patchSourceFile, safeReadGit };