@valbuild/server 0.67.0 → 0.68.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/declarations/src/ValFS.d.ts +2 -0
- package/dist/declarations/src/ValFSHost.d.ts +2 -0
- package/dist/declarations/src/ValServer.d.ts +2 -0
- package/dist/declarations/src/index.d.ts +0 -1
- package/dist/valbuild-server.cjs.dev.js +791 -607
- package/dist/valbuild-server.cjs.prod.js +791 -607
- package/dist/valbuild-server.esm.js +716 -529
- package/package.json +4 -4
- package/dist/declarations/src/patch/validation.d.ts +0 -6
@@ -14,7 +14,7 @@ var ui = require('@valbuild/ui');
|
|
14
14
|
var internal = require('@valbuild/shared/internal');
|
15
15
|
var server = require('@valbuild/ui/server');
|
16
16
|
var crypto$1 = require('crypto');
|
17
|
-
var
|
17
|
+
var zod = require('zod');
|
18
18
|
var sizeOf = require('image-size');
|
19
19
|
var zodValidationError = require('zod-validation-error');
|
20
20
|
|
@@ -42,7 +42,6 @@ var ts__default = /*#__PURE__*/_interopDefault(ts);
|
|
42
42
|
var fsPath__namespace = /*#__PURE__*/_interopNamespace(fsPath);
|
43
43
|
var fs__default = /*#__PURE__*/_interopDefault(fs);
|
44
44
|
var crypto__default = /*#__PURE__*/_interopDefault(crypto$1);
|
45
|
-
var z__default = /*#__PURE__*/_interopDefault(z);
|
46
45
|
var sizeOf__default = /*#__PURE__*/_interopDefault(sizeOf);
|
47
46
|
|
48
47
|
class ValSyntaxError {
|
@@ -1356,9 +1355,9 @@ function decodeJwt(token, secretKey) {
|
|
1356
1355
|
function getExpire() {
|
1357
1356
|
return Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 4; // 4 days
|
1358
1357
|
}
|
1359
|
-
const JwtHeaderSchema =
|
1360
|
-
alg:
|
1361
|
-
typ:
|
1358
|
+
const JwtHeaderSchema = zod.z.object({
|
1359
|
+
alg: zod.z.literal("HS256"),
|
1360
|
+
typ: zod.z.literal("JWT")
|
1362
1361
|
});
|
1363
1362
|
const jwtHeader = {
|
1364
1363
|
alg: "HS256",
|
@@ -1383,11 +1382,11 @@ const tsOps = new TSOps(document => {
|
|
1383
1382
|
class ValOps {
|
1384
1383
|
/** Sources from val modules, immutable (without patches or anything) */
|
1385
1384
|
|
1386
|
-
/** The
|
1385
|
+
/** The sha256 / hash of sources + schema + config */
|
1387
1386
|
|
1388
1387
|
/** Schema from val modules, immutable */
|
1389
1388
|
|
1390
|
-
/** The
|
1389
|
+
/** The sha256 / hash of schema + config - if this changes users needs to reload */
|
1391
1390
|
|
1392
1391
|
constructor(valModules, options) {
|
1393
1392
|
this.valModules = valModules;
|
@@ -1399,8 +1398,46 @@ class ValOps {
|
|
1399
1398
|
this.modulesErrors = null;
|
1400
1399
|
}
|
1401
1400
|
hash(input) {
|
1401
|
+
if (typeof input === "object") {
|
1402
|
+
return this.hashObject(input);
|
1403
|
+
}
|
1402
1404
|
return core.Internal.getSHA256Hash(textEncoder$1.encode(input));
|
1403
1405
|
}
|
1406
|
+
hashObject(obj) {
|
1407
|
+
const collector = [];
|
1408
|
+
this.collectObjectRecursive(obj, collector);
|
1409
|
+
return core.Internal.getSHA256Hash(textEncoder$1.encode(collector.join("")));
|
1410
|
+
}
|
1411
|
+
collectObjectRecursive(item, collector) {
|
1412
|
+
if (typeof item === "string") {
|
1413
|
+
collector.push(`"`, item, `"`);
|
1414
|
+
return;
|
1415
|
+
} else if (typeof item === "number") {
|
1416
|
+
collector.push(item.toString());
|
1417
|
+
return;
|
1418
|
+
} else if (typeof item === "object") {
|
1419
|
+
if (Array.isArray(item)) {
|
1420
|
+
collector.push("[");
|
1421
|
+
for (let i = 0; i < item.length; i++) {
|
1422
|
+
this.collectObjectRecursive(item[i], collector);
|
1423
|
+
i !== item.length - 1 && collector.push(",");
|
1424
|
+
}
|
1425
|
+
collector.push("]");
|
1426
|
+
} else {
|
1427
|
+
collector.push("{");
|
1428
|
+
const keys = Object.keys(item).sort();
|
1429
|
+
keys.forEach((key, i) => {
|
1430
|
+
collector.push(`"${key}":`);
|
1431
|
+
this.collectObjectRecursive(item[key], collector);
|
1432
|
+
i !== keys.length - 1 && collector.push(",");
|
1433
|
+
});
|
1434
|
+
collector.push("}");
|
1435
|
+
}
|
1436
|
+
return;
|
1437
|
+
} else {
|
1438
|
+
console.warn("Unknown type encountered when hashing object", typeof item, item);
|
1439
|
+
}
|
1440
|
+
}
|
1404
1441
|
|
1405
1442
|
// #region stat
|
1406
1443
|
/**
|
@@ -1414,7 +1451,7 @@ class ValOps {
|
|
1414
1451
|
*/
|
1415
1452
|
|
1416
1453
|
// #region initTree
|
1417
|
-
async
|
1454
|
+
async initSources() {
|
1418
1455
|
if (this.baseSha === null || this.schemaSha === null || this.sources === null || this.schemas === null || this.modulesErrors === null) {
|
1419
1456
|
const currentModulesErrors = [];
|
1420
1457
|
const addModuleError = (message, index, path) => {
|
@@ -1508,50 +1545,43 @@ class ValOps {
|
|
1508
1545
|
const {
|
1509
1546
|
baseSha,
|
1510
1547
|
schemaSha
|
1511
|
-
} = await this.
|
1548
|
+
} = await this.initSources();
|
1512
1549
|
await this.onInit(baseSha, schemaSha);
|
1513
1550
|
}
|
1514
1551
|
async getBaseSources() {
|
1515
|
-
return this.
|
1552
|
+
return this.initSources().then(result => result.sources);
|
1516
1553
|
}
|
1517
1554
|
async getSchemas() {
|
1518
|
-
return this.
|
1555
|
+
return this.initSources().then(result => result.schemas);
|
1519
1556
|
}
|
1520
1557
|
async getModuleErrors() {
|
1521
|
-
return this.
|
1558
|
+
return this.initSources().then(result => result.moduleErrors);
|
1522
1559
|
}
|
1523
1560
|
async getBaseSha() {
|
1524
|
-
return this.
|
1561
|
+
return this.initSources().then(result => result.baseSha);
|
1525
1562
|
}
|
1526
1563
|
async getSchemaSha() {
|
1527
|
-
return this.
|
1564
|
+
return this.initSources().then(result => result.schemaSha);
|
1528
1565
|
}
|
1529
1566
|
|
1530
1567
|
// #region analyzePatches
|
1531
|
-
analyzePatches(
|
1568
|
+
analyzePatches(sortedPatches) {
|
1532
1569
|
const patchesByModule = {};
|
1533
1570
|
const fileLastUpdatedByPatchId = {};
|
1534
|
-
for (const
|
1535
|
-
|
1536
|
-
patch,
|
1537
|
-
createdAt: created_at
|
1538
|
-
}] of Object.entries(patchesById)) {
|
1539
|
-
const patchId = patchIdS;
|
1540
|
-
for (const op of patch) {
|
1571
|
+
for (const patch of sortedPatches) {
|
1572
|
+
for (const op of patch.patch) {
|
1541
1573
|
if (op.op === "file") {
|
1542
|
-
|
1574
|
+
const filePath = op.filePath;
|
1575
|
+
fileLastUpdatedByPatchId[filePath] = patch.patchId;
|
1543
1576
|
}
|
1577
|
+
const path = patch.path;
|
1578
|
+
if (!patchesByModule[path]) {
|
1579
|
+
patchesByModule[path] = [];
|
1580
|
+
}
|
1581
|
+
patchesByModule[path].push({
|
1582
|
+
patchId: patch.patchId
|
1583
|
+
});
|
1544
1584
|
}
|
1545
|
-
if (!patchesByModule[path]) {
|
1546
|
-
patchesByModule[path] = [];
|
1547
|
-
}
|
1548
|
-
patchesByModule[path].push({
|
1549
|
-
patchId,
|
1550
|
-
createdAt: created_at
|
1551
|
-
});
|
1552
|
-
}
|
1553
|
-
for (const path in patchesByModule) {
|
1554
|
-
patchesByModule[path].sort((a, b) => a.createdAt.localeCompare(b.createdAt));
|
1555
1585
|
}
|
1556
1586
|
return {
|
1557
1587
|
patchesByModule,
|
@@ -1560,11 +1590,11 @@ class ValOps {
|
|
1560
1590
|
}
|
1561
1591
|
|
1562
1592
|
// #region getTree
|
1563
|
-
async
|
1593
|
+
async getSources(analysis) {
|
1564
1594
|
if (!analysis) {
|
1565
1595
|
const {
|
1566
1596
|
sources
|
1567
|
-
} = await this.
|
1597
|
+
} = await this.initSources();
|
1568
1598
|
return {
|
1569
1599
|
sources,
|
1570
1600
|
errors: {}
|
@@ -1572,76 +1602,72 @@ class ValOps {
|
|
1572
1602
|
}
|
1573
1603
|
const {
|
1574
1604
|
sources
|
1575
|
-
} = await this.
|
1605
|
+
} = await this.initSources();
|
1576
1606
|
const patchedSources = {};
|
1577
1607
|
const errors = {};
|
1578
|
-
for (const
|
1579
|
-
const path =
|
1608
|
+
for (const patchData of analysis.patches) {
|
1609
|
+
const path = patchData.path;
|
1580
1610
|
if (!sources[path]) {
|
1581
1611
|
if (!errors[path]) {
|
1582
1612
|
errors[path] = [];
|
1583
1613
|
}
|
1584
|
-
|
1585
|
-
|
1586
|
-
|
1587
|
-
patchId,
|
1588
|
-
invalidPath: true,
|
1614
|
+
console.error("Module not found", path);
|
1615
|
+
errors[path].push({
|
1616
|
+
patchId: patchData.patchId,
|
1589
1617
|
skipped: true,
|
1590
|
-
error: new patch.PatchError(`Module
|
1591
|
-
})
|
1618
|
+
error: new patch.PatchError(`Module not found`)
|
1619
|
+
});
|
1620
|
+
continue;
|
1592
1621
|
}
|
1593
|
-
patchedSources[path]
|
1594
|
-
|
1595
|
-
|
1596
|
-
|
1597
|
-
|
1622
|
+
if (!patchedSources[path]) {
|
1623
|
+
patchedSources[path] = sources[path];
|
1624
|
+
}
|
1625
|
+
const patchId = patchData.patchId;
|
1626
|
+
if (errors[path]) {
|
1627
|
+
console.error("Cannot apply patch: previous errors exists", path, errors[path]);
|
1628
|
+
errors[path].push({
|
1629
|
+
patchId: patchId,
|
1630
|
+
skipped: true,
|
1631
|
+
error: new patch.PatchError(`Cannot apply patch: previous errors exists`)
|
1632
|
+
});
|
1633
|
+
} else {
|
1634
|
+
const applicableOps = [];
|
1635
|
+
const fileFixOps = {};
|
1636
|
+
for (const op of patchData.patch) {
|
1637
|
+
if (op.op === "file") {
|
1638
|
+
// NOTE: We insert the last patch_id that modify a file
|
1639
|
+
// when constructing the url we use the patch id (and the file path)
|
1640
|
+
// to fetch the right file
|
1641
|
+
// NOTE: overwrite and use last patch_id if multiple patches modify the same file
|
1642
|
+
fileFixOps[op.path.join("/")] = [{
|
1643
|
+
op: "add",
|
1644
|
+
path: op.path.concat(...(op.nestedFilePath || [])).concat("patch_id"),
|
1645
|
+
value: patchId
|
1646
|
+
}];
|
1647
|
+
} else {
|
1648
|
+
applicableOps.push(op);
|
1649
|
+
}
|
1650
|
+
}
|
1651
|
+
const patchRes = patch.applyPatch(patch.deepClone(patchedSources[path]),
|
1652
|
+
// applyPatch mutates the source. On add operations it adds more than once? There is something strange going on... deepClone seems to fix, but is that the right solution?
|
1653
|
+
jsonOps, applicableOps.concat(...Object.values(fileFixOps)));
|
1654
|
+
if (fp.result.isErr(patchRes)) {
|
1655
|
+
console.error("Could not apply patch", JSON.stringify({
|
1656
|
+
path,
|
1657
|
+
patchId,
|
1658
|
+
error: patchRes.error,
|
1659
|
+
applicableOps
|
1660
|
+
}, null, 2));
|
1661
|
+
if (!errors[path]) {
|
1662
|
+
errors[path] = [];
|
1663
|
+
}
|
1598
1664
|
errors[path].push({
|
1599
1665
|
patchId: patchId,
|
1600
|
-
skipped:
|
1601
|
-
error:
|
1666
|
+
skipped: false,
|
1667
|
+
error: patchRes.error
|
1602
1668
|
});
|
1603
1669
|
} else {
|
1604
|
-
|
1605
|
-
if (!patchData) {
|
1606
|
-
errors[path] = [{
|
1607
|
-
patchId: patchId,
|
1608
|
-
skipped: false,
|
1609
|
-
error: new patch.PatchError(`Patch not found`)
|
1610
|
-
}];
|
1611
|
-
continue;
|
1612
|
-
}
|
1613
|
-
const applicableOps = [];
|
1614
|
-
const fileFixOps = {};
|
1615
|
-
for (const op of patchData.patch) {
|
1616
|
-
if (op.op === "file") {
|
1617
|
-
// NOTE: We insert the last patch_id that modify a file
|
1618
|
-
// when constructing the url we use the patch id (and the file path)
|
1619
|
-
// to fetch the right file
|
1620
|
-
// NOTE: overwrite and use last patch_id if multiple patches modify the same file
|
1621
|
-
fileFixOps[op.path.join("/")] = [{
|
1622
|
-
op: "add",
|
1623
|
-
path: op.path.concat(...(op.nestedFilePath || [])).concat("patch_id"),
|
1624
|
-
value: patchId
|
1625
|
-
}];
|
1626
|
-
} else {
|
1627
|
-
applicableOps.push(op);
|
1628
|
-
}
|
1629
|
-
}
|
1630
|
-
const patchRes = patch.applyPatch(patch.deepClone(patchedSources[path]),
|
1631
|
-
// applyPatch mutates the source. On add operations it will add multiple items? There is something strange going on. DeepClone seems to fix, but is that the right?
|
1632
|
-
jsonOps, applicableOps.concat(...Object.values(fileFixOps)));
|
1633
|
-
if (fp.result.isErr(patchRes)) {
|
1634
|
-
if (!errors[path]) {
|
1635
|
-
errors[path] = [];
|
1636
|
-
}
|
1637
|
-
errors[path].push({
|
1638
|
-
patchId: patchId,
|
1639
|
-
skipped: false,
|
1640
|
-
error: patchRes.error
|
1641
|
-
});
|
1642
|
-
} else {
|
1643
|
-
patchedSources[path] = patchRes.value;
|
1644
|
-
}
|
1670
|
+
patchedSources[path] = patchRes.value;
|
1645
1671
|
}
|
1646
1672
|
}
|
1647
1673
|
}
|
@@ -1836,7 +1862,7 @@ class ValOps {
|
|
1836
1862
|
actual: currentValueMetadata[field],
|
1837
1863
|
expected: fieldMetadata
|
1838
1864
|
},
|
1839
|
-
fixes: ["image:
|
1865
|
+
fixes: ["image:check-metadata"]
|
1840
1866
|
}];
|
1841
1867
|
}
|
1842
1868
|
}
|
@@ -1879,14 +1905,14 @@ class ValOps {
|
|
1879
1905
|
for (const {
|
1880
1906
|
patchId
|
1881
1907
|
} of patches) {
|
1882
|
-
|
1883
|
-
|
1884
|
-
if (!patch$1) {
|
1908
|
+
const patchData = patchAnalysis.patches.find(p => p.patchId === patchId);
|
1909
|
+
if (!patchData) {
|
1885
1910
|
errors.push({
|
1886
1911
|
message: `Analysis required non-existing patch: ${patchId}`
|
1887
1912
|
});
|
1888
1913
|
break;
|
1889
1914
|
}
|
1915
|
+
const patch$1 = patchData.patch;
|
1890
1916
|
const sourceFileOps = patch$1.filter(op => op.op !== "file"); // file is not a valid source file op
|
1891
1917
|
const patchRes = patch.applyPatch(tsSourceFile, tsOps, sourceFileOps);
|
1892
1918
|
if (fp.result.isErr(patchRes)) {
|
@@ -2000,13 +2026,21 @@ class ValOps {
|
|
2000
2026
|
}
|
2001
2027
|
|
2002
2028
|
// #region createPatch
|
2003
|
-
async createPatch(path,
|
2004
|
-
const initTree = await this.
|
2029
|
+
async createPatch(path, patch$1, parentRef, authorId) {
|
2030
|
+
const initTree = await this.initSources();
|
2005
2031
|
const schemas = initTree.schemas;
|
2006
2032
|
const moduleErrors = initTree.moduleErrors;
|
2007
2033
|
let sources = initTree.sources;
|
2008
|
-
if (
|
2009
|
-
|
2034
|
+
if (parentRef.type !== "head") {
|
2035
|
+
// There's room for some optimizations here: we could do this once, then re-use every time we create a patch, then again we only create one patch at a time
|
2036
|
+
const patchOps = await this.fetchPatches({
|
2037
|
+
omitPatch: false
|
2038
|
+
});
|
2039
|
+
const patchAnalysis = this.analyzePatches(patchOps.patches);
|
2040
|
+
const tree = await this.getSources({
|
2041
|
+
...patchAnalysis,
|
2042
|
+
...patchOps
|
2043
|
+
});
|
2010
2044
|
sources = {
|
2011
2045
|
...sources,
|
2012
2046
|
...tree.sources
|
@@ -2017,27 +2051,30 @@ class ValOps {
|
|
2017
2051
|
const moduleError = moduleErrors.find(e => e.path === path);
|
2018
2052
|
if (moduleError) {
|
2019
2053
|
console.error(`Cannot patch. Module at path: '${path}' has fatal errors: "${moduleError.message}"`);
|
2020
|
-
return {
|
2054
|
+
return fp.result.err({
|
2055
|
+
errorType: "other",
|
2021
2056
|
error: {
|
2022
2057
|
message: `Cannot patch. Module at path: '${path}' has fatal errors: ` + moduleErrors.map(m => `"${m.message}"`).join(" and ")
|
2023
2058
|
}
|
2024
|
-
};
|
2059
|
+
});
|
2025
2060
|
}
|
2026
2061
|
if (!source) {
|
2027
2062
|
console.error(`Cannot patch. Module source at path: '${path}' does not exist`);
|
2028
|
-
return {
|
2063
|
+
return fp.result.err({
|
2064
|
+
errorType: "other",
|
2029
2065
|
error: {
|
2030
2066
|
message: `Cannot patch. Module source at path: '${path}' does not exist`
|
2031
2067
|
}
|
2032
|
-
};
|
2068
|
+
});
|
2033
2069
|
}
|
2034
2070
|
if (!schema) {
|
2035
2071
|
console.error(`Cannot patch. Module schema at path: '${path}' does not exist`);
|
2036
|
-
return {
|
2072
|
+
return fp.result.err({
|
2073
|
+
errorType: "other",
|
2037
2074
|
error: {
|
2038
2075
|
message: `Cannot patch. Module schema at path: '${path}' does not exist`
|
2039
2076
|
}
|
2040
|
-
};
|
2077
|
+
});
|
2041
2078
|
}
|
2042
2079
|
const sourceFileOps = [];
|
2043
2080
|
const files = {};
|
@@ -2076,14 +2113,20 @@ class ValOps {
|
|
2076
2113
|
}
|
2077
2114
|
}
|
2078
2115
|
}
|
2079
|
-
const saveRes = await this.saveSourceFilePatch(path,
|
2080
|
-
if (saveRes
|
2081
|
-
console.error(`Could not save source
|
2082
|
-
|
2116
|
+
const saveRes = await this.saveSourceFilePatch(path, patch$1, parentRef, authorId);
|
2117
|
+
if (fp.result.isErr(saveRes)) {
|
2118
|
+
console.error(`Could not save source patch at path: '${path}'. Error: ${saveRes.error.errorType === "other" ? saveRes.error.message : saveRes.error.errorType}`);
|
2119
|
+
if (saveRes.error.errorType === "patch-head-conflict") {
|
2120
|
+
return fp.result.err({
|
2121
|
+
errorType: "patch-head-conflict"
|
2122
|
+
});
|
2123
|
+
}
|
2124
|
+
return fp.result.err({
|
2125
|
+
errorType: "other",
|
2083
2126
|
error: saveRes.error
|
2084
|
-
};
|
2127
|
+
});
|
2085
2128
|
}
|
2086
|
-
const patchId = saveRes.patchId;
|
2129
|
+
const patchId = saveRes.value.patchId;
|
2087
2130
|
const saveFileRes = await Promise.all(Object.entries(files).map(async ([filePath, data]) => {
|
2088
2131
|
if (data.error) {
|
2089
2132
|
return {
|
@@ -2148,7 +2191,7 @@ class ValOps {
|
|
2148
2191
|
const MaxRetries = 3;
|
2149
2192
|
let lastRes;
|
2150
2193
|
for (let i = 0; i < MaxRetries; i++) {
|
2151
|
-
lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data.value, type, metadataOps.metadata);
|
2194
|
+
lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data.value, type, metadataOps.metadata);
|
2152
2195
|
if (!lastRes.error) {
|
2153
2196
|
return {
|
2154
2197
|
filePath
|
@@ -2163,24 +2206,25 @@ class ValOps {
|
|
2163
2206
|
}));
|
2164
2207
|
const errors = saveFileRes.filter(f => !!f.error);
|
2165
2208
|
if (errors.length > 0) {
|
2166
|
-
return {
|
2209
|
+
return fp.result.err({
|
2210
|
+
errorType: "other",
|
2167
2211
|
error: {
|
2168
2212
|
message: "Could not save patch: " + errors.map(e => e.error.message).join(", ")
|
2169
2213
|
}
|
2170
|
-
};
|
2214
|
+
});
|
2171
2215
|
}
|
2172
|
-
return {
|
2216
|
+
return fp.result.ok({
|
2173
2217
|
patchId,
|
2174
2218
|
files: saveFileRes,
|
2175
2219
|
createdAt: new Date().toISOString()
|
2176
|
-
};
|
2220
|
+
});
|
2177
2221
|
}
|
2178
2222
|
|
2179
2223
|
// #region abstract ops
|
2180
2224
|
}
|
2181
2225
|
function isOnlyFileCheckValidationError(validationError) {
|
2182
2226
|
var _validationError$fixe;
|
2183
|
-
if ((_validationError$fixe = validationError.fixes) !== null && _validationError$fixe !== void 0 && _validationError$fixe.every(f => f === "file:check-metadata" || f === "image:
|
2227
|
+
if ((_validationError$fixe = validationError.fixes) !== null && _validationError$fixe !== void 0 && _validationError$fixe.every(f => f === "file:check-metadata" || f === "image:check-metadata")) {
|
2184
2228
|
return true;
|
2185
2229
|
}
|
2186
2230
|
return false;
|
@@ -2270,81 +2314,51 @@ function bufferFromDataUrl(dataUrl) {
|
|
2270
2314
|
}
|
2271
2315
|
}
|
2272
2316
|
|
2273
|
-
const JSONValueT = z__default["default"].lazy(() => z__default["default"].union([z__default["default"].string(), z__default["default"].number(), z__default["default"].boolean(), z__default["default"].null(), z__default["default"].array(JSONValueT), z__default["default"].record(JSONValueT)]));
|
2274
|
-
|
2275
|
-
/**
|
2276
|
-
* Raw JSON patch operation.
|
2277
|
-
*/
|
2278
|
-
const OperationJSONT = z__default["default"].discriminatedUnion("op", [z__default["default"].object({
|
2279
|
-
op: z__default["default"].literal("add"),
|
2280
|
-
path: z__default["default"].string(),
|
2281
|
-
value: JSONValueT
|
2282
|
-
}).strict(), z__default["default"].object({
|
2283
|
-
op: z__default["default"].literal("remove"),
|
2284
|
-
/**
|
2285
|
-
* Must be non-root
|
2286
|
-
*/
|
2287
|
-
path: z__default["default"].string()
|
2288
|
-
}).strict(), z__default["default"].object({
|
2289
|
-
op: z__default["default"].literal("replace"),
|
2290
|
-
path: z__default["default"].string(),
|
2291
|
-
value: JSONValueT
|
2292
|
-
}).strict(), z__default["default"].object({
|
2293
|
-
op: z__default["default"].literal("move"),
|
2294
|
-
/**
|
2295
|
-
* Must be non-root and not a proper prefix of "path".
|
2296
|
-
*/
|
2297
|
-
from: z__default["default"].string(),
|
2298
|
-
path: z__default["default"].string()
|
2299
|
-
}).strict(), z__default["default"].object({
|
2300
|
-
op: z__default["default"].literal("copy"),
|
2301
|
-
from: z__default["default"].string(),
|
2302
|
-
path: z__default["default"].string()
|
2303
|
-
}).strict(), z__default["default"].object({
|
2304
|
-
op: z__default["default"].literal("test"),
|
2305
|
-
path: z__default["default"].string(),
|
2306
|
-
value: JSONValueT
|
2307
|
-
}).strict(), z__default["default"].object({
|
2308
|
-
op: z__default["default"].literal("file"),
|
2309
|
-
path: z__default["default"].string(),
|
2310
|
-
filePath: z__default["default"].string(),
|
2311
|
-
value: z__default["default"].string()
|
2312
|
-
}).strict()]);
|
2313
|
-
const PatchJSON = z__default["default"].array(OperationJSONT);
|
2314
2317
|
/**
|
2315
|
-
*
|
2318
|
+
* Computes the changed patch parent references based on the current patches and the patch IDs to be deleted.
|
2319
|
+
*
|
2320
|
+
* NOTE: patches that will be deleted are not included in the changed patches, since they will be deleted any how.
|
2321
|
+
*
|
2322
|
+
* @param currentPatches - The array of current patches.
|
2323
|
+
* @param deletePatchIds - The array of patch IDs to be deleted.
|
2324
|
+
* @returns An object containing the changed patches with their corresponding parent references.
|
2316
2325
|
*/
|
2317
|
-
|
2318
|
-
|
2319
|
-
|
2320
|
-
|
2321
|
-
|
2322
|
-
|
2323
|
-
|
2324
|
-
|
2325
|
-
|
2326
|
-
|
2327
|
-
|
2328
|
-
|
2329
|
-
|
2330
|
-
|
2331
|
-
|
2332
|
-
|
2333
|
-
|
2334
|
-
|
2335
|
-
|
2336
|
-
}
|
2337
|
-
|
2338
|
-
|
2339
|
-
|
2340
|
-
|
2341
|
-
|
2342
|
-
|
2343
|
-
|
2344
|
-
|
2345
|
-
|
2346
|
-
|
2347
|
-
|
2326
|
+
function computeChangedPatchParentRefs(currentPatches, deletePatchIds) {
|
2327
|
+
let lastNonDeletedPatchIndex = -1;
|
2328
|
+
const changedPatches = {};
|
2329
|
+
for (let i = 0; i < currentPatches.length; i++) {
|
2330
|
+
const current = currentPatches[i];
|
2331
|
+
if (
|
2332
|
+
// skip all patches that will be deleted:
|
2333
|
+
deletePatchIds.includes(current.patchId)) {
|
2334
|
+
var _currentPatches;
|
2335
|
+
if (
|
2336
|
+
// skip change if the patch after is deleted anyway:
|
2337
|
+
!deletePatchIds.includes((_currentPatches = currentPatches[i + 1]) === null || _currentPatches === void 0 ? void 0 : _currentPatches.patchId)) {
|
2338
|
+
if (
|
2339
|
+
// set next patch to point to head if it exists:
|
2340
|
+
lastNonDeletedPatchIndex === -1 && currentPatches[i + 1]) {
|
2341
|
+
changedPatches[currentPatches[i + 1].patchId] = {
|
2342
|
+
type: "head",
|
2343
|
+
headBaseSha: current.baseSha
|
2344
|
+
};
|
2345
|
+
} else if (
|
2346
|
+
// set next patch to point to the last non-deleted patch:
|
2347
|
+
currentPatches[lastNonDeletedPatchIndex] && currentPatches[i + 1]) {
|
2348
|
+
changedPatches[currentPatches[i + 1].patchId] = {
|
2349
|
+
type: "patch",
|
2350
|
+
patchId: currentPatches[lastNonDeletedPatchIndex].patchId
|
2351
|
+
};
|
2352
|
+
}
|
2353
|
+
}
|
2354
|
+
} else {
|
2355
|
+
lastNonDeletedPatchIndex = i;
|
2356
|
+
}
|
2357
|
+
}
|
2358
|
+
return {
|
2359
|
+
changedPatches
|
2360
|
+
};
|
2361
|
+
}
|
2348
2362
|
|
2349
2363
|
class ValOpsFS extends ValOps {
|
2350
2364
|
static VAL_DIR = ".val";
|
@@ -2518,32 +2532,31 @@ class ValOpsFS extends ValOps {
|
|
2518
2532
|
patchJsonFiles = this.host.readDirectory(patchesCacheDir, ["patch.json"], [], []);
|
2519
2533
|
}
|
2520
2534
|
const patches = {};
|
2521
|
-
const errors =
|
2522
|
-
const
|
2523
|
-
|
2524
|
-
if (
|
2525
|
-
|
2526
|
-
|
2527
|
-
|
2528
|
-
|
2529
|
-
|
2530
|
-
|
2531
|
-
|
2532
|
-
|
2533
|
-
|
2534
|
-
parsedFSPatchBaseRes = this.parseJsonFile(this.getPatchBaseFile(patchId), FSPatchBase);
|
2535
|
-
}
|
2536
|
-
if (parsedFSPatchRes.error) {
|
2537
|
-
errors[patchId] = parsedFSPatchRes.error;
|
2538
|
-
} else if (parsedFSPatchBaseRes && parsedFSPatchBaseRes.error) {
|
2539
|
-
errors[patchId] = parsedFSPatchBaseRes.error;
|
2535
|
+
const errors = [];
|
2536
|
+
const parsedUnsortedFsPatches = patchJsonFiles.map(file => fsPath__namespace["default"].basename(fsPath__namespace["default"].dirname(file))).map(patchDir => [patchDir, this.parseJsonFile(this.getPatchFilePath(patchDir), FSPatch), this.host.fileExists(this.getPatchBaseFile(patchDir)) ? this.parseJsonFile(this.getPatchBaseFile(patchDir), FSPatchBase) : undefined]);
|
2537
|
+
parsedUnsortedFsPatches.forEach(([dir, parsedPatch, parsedBase]) => {
|
2538
|
+
if (parsedPatch.error) {
|
2539
|
+
errors.push({
|
2540
|
+
...parsedPatch.error,
|
2541
|
+
parentPatchId: dir
|
2542
|
+
});
|
2543
|
+
} else if (parsedBase && parsedBase.error) {
|
2544
|
+
errors.push({
|
2545
|
+
...parsedBase.error,
|
2546
|
+
parentPatchId: dir
|
2547
|
+
});
|
2540
2548
|
} else {
|
2541
|
-
|
2542
|
-
|
2543
|
-
|
2549
|
+
if (includes && includes.length > 0 && !includes.includes(parsedPatch.data.patchId)) {
|
2550
|
+
return;
|
2551
|
+
}
|
2552
|
+
patches[parsedPatch.data.patchId] = {
|
2553
|
+
...parsedPatch.data,
|
2554
|
+
appliedAt: parsedBase ? parsedBase.data : null
|
2544
2555
|
};
|
2545
2556
|
}
|
2546
|
-
}
|
2557
|
+
});
|
2558
|
+
|
2559
|
+
// If there are patches, but no head. error
|
2547
2560
|
if (Object.keys(errors).length > 0) {
|
2548
2561
|
return {
|
2549
2562
|
patches,
|
@@ -2554,36 +2567,52 @@ class ValOpsFS extends ValOps {
|
|
2554
2567
|
patches
|
2555
2568
|
};
|
2556
2569
|
}
|
2570
|
+
getParentPatchIdFromParentRef(parentRef) {
|
2571
|
+
return parentRef.type === "head" ? "head" : parentRef.patchId;
|
2572
|
+
}
|
2557
2573
|
async fetchPatches(filters) {
|
2574
|
+
const fetchPatchesRes = await this.fetchPatchesFromFS(!!filters.omitPatch);
|
2575
|
+
const sortedPatches = this.createPatchChain(fetchPatchesRes.patches).filter(patchData => {
|
2576
|
+
if (filters.authors && !(patchData.authorId === null || filters.authors.includes(patchData.authorId))) {
|
2577
|
+
return false;
|
2578
|
+
}
|
2579
|
+
if (filters.moduleFilePaths && !filters.moduleFilePaths.includes(patchData.path)) {
|
2580
|
+
return false;
|
2581
|
+
}
|
2582
|
+
return true;
|
2583
|
+
}).map(patchData => {
|
2584
|
+
if (filters.omitPatch) {
|
2585
|
+
return {
|
2586
|
+
...patchData,
|
2587
|
+
patch: undefined
|
2588
|
+
};
|
2589
|
+
}
|
2590
|
+
return patchData;
|
2591
|
+
});
|
2592
|
+
return {
|
2593
|
+
patches: sortedPatches,
|
2594
|
+
errors: fetchPatchesRes.errors
|
2595
|
+
};
|
2596
|
+
}
|
2597
|
+
async fetchPatchesFromFS(omitPath) {
|
2558
2598
|
const patches = {};
|
2559
|
-
const errors = {};
|
2560
2599
|
const {
|
2561
|
-
errors
|
2600
|
+
errors,
|
2562
2601
|
patches: allPatches
|
2563
|
-
} = await this.readPatches(
|
2564
|
-
if (allErrors && Object.keys(allErrors).length > 0) {
|
2565
|
-
for (const [patchId, error] of Object.entries(allErrors)) {
|
2566
|
-
console.error("Error reading patch", patchId, error);
|
2567
|
-
errors[patchId] = error;
|
2568
|
-
}
|
2569
|
-
}
|
2602
|
+
} = await this.readPatches();
|
2570
2603
|
for (const [patchIdS, patch] of Object.entries(allPatches)) {
|
2571
2604
|
const patchId = patchIdS;
|
2572
|
-
if (filters.authors && !(patch.authorId === null || filters.authors.includes(patch.authorId))) {
|
2573
|
-
continue;
|
2574
|
-
}
|
2575
|
-
if (filters.moduleFilePaths && !filters.moduleFilePaths.includes(patch.path)) {
|
2576
|
-
continue;
|
2577
|
-
}
|
2578
2605
|
patches[patchId] = {
|
2579
|
-
patch:
|
2606
|
+
patch: omitPath ? undefined : patch.patch,
|
2607
|
+
parentRef: patch.parentRef,
|
2580
2608
|
path: patch.path,
|
2609
|
+
baseSha: patch.baseSha,
|
2581
2610
|
createdAt: patch.createdAt,
|
2582
2611
|
authorId: patch.authorId,
|
2583
2612
|
appliedAt: patch.appliedAt
|
2584
2613
|
};
|
2585
2614
|
}
|
2586
|
-
if (errors &&
|
2615
|
+
if (errors && errors.length > 0) {
|
2587
2616
|
return {
|
2588
2617
|
patches,
|
2589
2618
|
errors
|
@@ -2594,6 +2623,33 @@ class ValOpsFS extends ValOps {
|
|
2594
2623
|
};
|
2595
2624
|
}
|
2596
2625
|
|
2626
|
+
// #region createPatchChain
|
2627
|
+
createPatchChain(unsortedPatchRecord) {
|
2628
|
+
var _Object$entries$find;
|
2629
|
+
// TODO: Error handling
|
2630
|
+
const nextPatch = {};
|
2631
|
+
Object.keys(unsortedPatchRecord).forEach(patchId => {
|
2632
|
+
const patch = unsortedPatchRecord[patchId];
|
2633
|
+
if (patch.parentRef.type === "head") {
|
2634
|
+
nextPatch["head"] = patchId;
|
2635
|
+
} else {
|
2636
|
+
nextPatch[patch.parentRef.patchId] = patchId;
|
2637
|
+
}
|
2638
|
+
});
|
2639
|
+
const sortedPatches = [];
|
2640
|
+
let nextPatchId = (_Object$entries$find = Object.entries(unsortedPatchRecord).find(([, patch]) => patch.parentRef.type === "head")) === null || _Object$entries$find === void 0 ? void 0 : _Object$entries$find[0];
|
2641
|
+
while (!!nextPatchId && nextPatchId in unsortedPatchRecord) {
|
2642
|
+
const patch = unsortedPatchRecord[nextPatchId];
|
2643
|
+
delete patch["parentRef"];
|
2644
|
+
sortedPatches.push({
|
2645
|
+
...patch,
|
2646
|
+
patchId: nextPatchId
|
2647
|
+
});
|
2648
|
+
nextPatchId = nextPatch[nextPatchId];
|
2649
|
+
}
|
2650
|
+
return sortedPatches;
|
2651
|
+
}
|
2652
|
+
|
2597
2653
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
2598
2654
|
parseJsonFile(filePath, parser) {
|
2599
2655
|
if (!this.host.fileExists(filePath)) {
|
@@ -2668,38 +2724,47 @@ class ValOpsFS extends ValOps {
|
|
2668
2724
|
};
|
2669
2725
|
}
|
2670
2726
|
}
|
2671
|
-
async saveSourceFilePatch(path, patch, authorId) {
|
2672
|
-
|
2727
|
+
async saveSourceFilePatch(path, patch, parentRef, authorId) {
|
2728
|
+
const patchDir = this.getParentPatchIdFromParentRef(parentRef);
|
2673
2729
|
try {
|
2674
|
-
|
2675
|
-
|
2676
|
-
fileId++;
|
2677
|
-
}
|
2678
|
-
const patchId = fileId.toString();
|
2730
|
+
const baseSha = await this.getBaseSha();
|
2731
|
+
const patchId = crypto.randomUUID();
|
2679
2732
|
const data = {
|
2680
2733
|
patch,
|
2734
|
+
patchId,
|
2735
|
+
parentRef,
|
2681
2736
|
path,
|
2682
2737
|
authorId,
|
2738
|
+
baseSha,
|
2683
2739
|
coreVersion: core.Internal.VERSION.core,
|
2684
2740
|
createdAt: new Date().toISOString()
|
2685
2741
|
};
|
2686
|
-
this.host.
|
2687
|
-
|
2742
|
+
const writeRes = this.host.tryWriteUf8File(this.getPatchFilePath(patchDir), JSON.stringify(data));
|
2743
|
+
if (writeRes.type === "error") {
|
2744
|
+
return writeRes.errorType === "dir-already-exists" ? fp.result.err({
|
2745
|
+
errorType: "patch-head-conflict"
|
2746
|
+
}) : fp.result.err({
|
2747
|
+
errorType: "other",
|
2748
|
+
error: writeRes.error,
|
2749
|
+
message: "Failed to write patch file"
|
2750
|
+
});
|
2751
|
+
}
|
2752
|
+
return fp.result.ok({
|
2688
2753
|
patchId
|
2689
|
-
};
|
2754
|
+
});
|
2690
2755
|
} catch (err) {
|
2691
2756
|
if (err instanceof Error) {
|
2692
|
-
return {
|
2693
|
-
|
2694
|
-
|
2695
|
-
|
2696
|
-
};
|
2757
|
+
return fp.result.err({
|
2758
|
+
errorType: "other",
|
2759
|
+
error: err,
|
2760
|
+
message: err.message
|
2761
|
+
});
|
2697
2762
|
}
|
2698
|
-
return {
|
2699
|
-
|
2700
|
-
|
2701
|
-
|
2702
|
-
};
|
2763
|
+
return fp.result.err({
|
2764
|
+
errorType: "other",
|
2765
|
+
error: err,
|
2766
|
+
message: "Unknown error"
|
2767
|
+
});
|
2703
2768
|
}
|
2704
2769
|
}
|
2705
2770
|
async getSourceFile(path) {
|
@@ -2737,9 +2802,10 @@ class ValOpsFS extends ValOps {
|
|
2737
2802
|
};
|
2738
2803
|
}
|
2739
2804
|
}
|
2740
|
-
async saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data, _type, metadata) {
|
2741
|
-
const
|
2742
|
-
const
|
2805
|
+
async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, _type, metadata) {
|
2806
|
+
const patchDir = this.getParentPatchIdFromParentRef(parentRef);
|
2807
|
+
const patchFilePath = this.getBinaryFilePath(filePath, patchDir);
|
2808
|
+
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDir);
|
2743
2809
|
try {
|
2744
2810
|
const buffer = bufferFromDataUrl(data);
|
2745
2811
|
if (!buffer) {
|
@@ -2771,7 +2837,15 @@ class ValOpsFS extends ValOps {
|
|
2771
2837
|
}
|
2772
2838
|
}
|
2773
2839
|
async getBase64EncodedBinaryFileMetadataFromPatch(filePath, type, patchId) {
|
2774
|
-
const
|
2840
|
+
const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
|
2841
|
+
if (fp.result.isErr(patchDirRes)) {
|
2842
|
+
return {
|
2843
|
+
errors: [{
|
2844
|
+
message: "Failed to get patch dir from patch id"
|
2845
|
+
}]
|
2846
|
+
};
|
2847
|
+
}
|
2848
|
+
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDirRes.value);
|
2775
2849
|
if (!this.host.fileExists(metadataFilePath)) {
|
2776
2850
|
return {
|
2777
2851
|
errors: [{
|
@@ -2780,7 +2854,7 @@ class ValOpsFS extends ValOps {
|
|
2780
2854
|
}]
|
2781
2855
|
};
|
2782
2856
|
}
|
2783
|
-
const metadataParseRes = this.parseJsonFile(metadataFilePath,
|
2857
|
+
const metadataParseRes = this.parseJsonFile(metadataFilePath, zod.z.record(zod.z.union([zod.z.string(), zod.z.number()])));
|
2784
2858
|
if (metadataParseRes.error) {
|
2785
2859
|
return {
|
2786
2860
|
errors: [metadataParseRes.error]
|
@@ -2807,7 +2881,11 @@ class ValOpsFS extends ValOps {
|
|
2807
2881
|
};
|
2808
2882
|
}
|
2809
2883
|
async getBase64EncodedBinaryFileFromPatch(filePath, patchId) {
|
2810
|
-
const
|
2884
|
+
const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
|
2885
|
+
if (!fp.result.isOk(patchDirRes)) {
|
2886
|
+
return null;
|
2887
|
+
}
|
2888
|
+
const absPath = this.getBinaryFilePath(filePath, patchDirRes.value);
|
2811
2889
|
if (!this.host.fileExists(absPath)) {
|
2812
2890
|
return null;
|
2813
2891
|
}
|
@@ -2815,30 +2893,62 @@ class ValOpsFS extends ValOps {
|
|
2815
2893
|
}
|
2816
2894
|
async deletePatches(patchIds) {
|
2817
2895
|
const deleted = [];
|
2818
|
-
|
2819
|
-
|
2820
|
-
try {
|
2821
|
-
this.host.deleteDir(this.getPatchDir(patchId));
|
2822
|
-
deleted.push(patchId);
|
2823
|
-
} catch (err) {
|
2824
|
-
if (!errors) {
|
2825
|
-
errors = {};
|
2826
|
-
}
|
2827
|
-
errors[patchId] = {
|
2828
|
-
message: err instanceof Error ? err.message : "Unknown error"
|
2829
|
-
};
|
2830
|
-
}
|
2831
|
-
}
|
2832
|
-
if (errors) {
|
2896
|
+
const patchDirMapRes = await this.getParentPatchIdFromPatchIdMap();
|
2897
|
+
if (fp.result.isErr(patchDirMapRes)) {
|
2833
2898
|
return {
|
2834
|
-
|
2835
|
-
|
2899
|
+
error: {
|
2900
|
+
message: "Failed to get patch dir map"
|
2901
|
+
}
|
2836
2902
|
};
|
2837
2903
|
}
|
2904
|
+
const currentPatches = this.createPatchChain((await this.fetchPatchesFromFS(false)).patches);
|
2905
|
+
this.updateOrderedPatches(computeChangedPatchParentRefs(currentPatches, patchIds), patchDirMapRes.value, patchIds);
|
2838
2906
|
return {
|
2839
2907
|
deleted
|
2840
2908
|
};
|
2841
2909
|
}
|
2910
|
+
updateOrderedPatches(updates, patchDirMap, deletePatchIds) {
|
2911
|
+
for (const patchId of deletePatchIds) {
|
2912
|
+
const patchDir = patchDirMap[patchId];
|
2913
|
+
if (!patchDir) {
|
2914
|
+
console.error("Could not find patch dir for patch id scheduled for deletion: ", patchId);
|
2915
|
+
continue;
|
2916
|
+
}
|
2917
|
+
try {
|
2918
|
+
this.host.deleteDir(this.getFullPatchDir(patchDir));
|
2919
|
+
} catch (err) {
|
2920
|
+
console.error("Failed to delete patch dir", err);
|
2921
|
+
}
|
2922
|
+
}
|
2923
|
+
for (const [patchIdS, parentRef] of Object.entries(updates.changedPatches)) {
|
2924
|
+
const prevParentPatchId = patchDirMap[patchIdS];
|
2925
|
+
if (!prevParentPatchId) {
|
2926
|
+
console.error("Could not find previous parent patch id for deleted patch id: ", patchIdS);
|
2927
|
+
continue;
|
2928
|
+
}
|
2929
|
+
const newParentPatchId = parentRef.type === "head" ? "head" : parentRef.patchId;
|
2930
|
+
const currentPatchDataRes = this.parseJsonFile(this.getPatchFilePath(prevParentPatchId), FSPatch);
|
2931
|
+
if (currentPatchDataRes.error) {
|
2932
|
+
console.error("Failed to parse patch file while fixing patch chain after deleted patch", {
|
2933
|
+
updates
|
2934
|
+
}, currentPatchDataRes.error);
|
2935
|
+
continue;
|
2936
|
+
}
|
2937
|
+
const newPatchData = currentPatchDataRes.data;
|
2938
|
+
newPatchData.parentRef = parentRef;
|
2939
|
+
try {
|
2940
|
+
this.host.writeUf8File(this.getPatchFilePath(prevParentPatchId), JSON.stringify(newPatchData));
|
2941
|
+
if (this.host.directoryExists(this.getFullPatchDir(newParentPatchId))) {
|
2942
|
+
this.host.deleteDir(this.getFullPatchDir(newParentPatchId));
|
2943
|
+
}
|
2944
|
+
this.host.moveDir(this.getFullPatchDir(prevParentPatchId), this.getFullPatchDir(newParentPatchId));
|
2945
|
+
} catch (err) {
|
2946
|
+
console.error("Failed fix patch chain after deleted patch", {
|
2947
|
+
updates
|
2948
|
+
}, err);
|
2949
|
+
}
|
2950
|
+
}
|
2951
|
+
}
|
2842
2952
|
async saveFiles(preparedCommit) {
|
2843
2953
|
const updatedFiles = [];
|
2844
2954
|
const errors = {};
|
@@ -2854,12 +2964,28 @@ class ValOpsFS extends ValOps {
|
|
2854
2964
|
};
|
2855
2965
|
}
|
2856
2966
|
}
|
2967
|
+
const patchIdToPatchDirMapRes = await this.getParentPatchIdFromPatchIdMap();
|
2968
|
+
if (fp.result.isErr(patchIdToPatchDirMapRes)) {
|
2969
|
+
return {
|
2970
|
+
updatedFiles,
|
2971
|
+
errors
|
2972
|
+
};
|
2973
|
+
}
|
2974
|
+
const patchIdToPatchDirMap = patchIdToPatchDirMapRes.value;
|
2857
2975
|
for (const [filePath, {
|
2858
2976
|
patchId
|
2859
2977
|
}] of Object.entries(preparedCommit.patchedBinaryFilesDescriptors)) {
|
2860
2978
|
const absPath = fsPath__namespace["default"].join(this.rootDir, ...filePath.split("/"));
|
2861
2979
|
try {
|
2862
|
-
|
2980
|
+
const patchDir = patchIdToPatchDirMap[patchId];
|
2981
|
+
if (!patchDir) {
|
2982
|
+
errors[absPath] = {
|
2983
|
+
message: "Failed to find PatchDir for PatchId " + patchId,
|
2984
|
+
filePath
|
2985
|
+
};
|
2986
|
+
continue;
|
2987
|
+
}
|
2988
|
+
this.host.copyFile(this.getBinaryFilePath(filePath, patchDir), absPath);
|
2863
2989
|
updatedFiles.push(absPath);
|
2864
2990
|
} catch (err) {
|
2865
2991
|
errors[absPath] = {
|
@@ -2873,7 +2999,14 @@ class ValOpsFS extends ValOps {
|
|
2873
2999
|
baseSha: await this.getBaseSha(),
|
2874
3000
|
timestamp: new Date().toISOString()
|
2875
3001
|
};
|
2876
|
-
const
|
3002
|
+
const patchDir = patchIdToPatchDirMap[patchId];
|
3003
|
+
if (!patchDir) {
|
3004
|
+
errors[`patchId:${patchId}`] = {
|
3005
|
+
message: "Failed to find PatchDir for PatchId " + patchId
|
3006
|
+
};
|
3007
|
+
continue;
|
3008
|
+
}
|
3009
|
+
const absPath = this.getPatchBaseFile(patchDir);
|
2877
3010
|
try {
|
2878
3011
|
this.host.writeUf8File(absPath, JSON.stringify(appliedAt));
|
2879
3012
|
} catch (err) {
|
@@ -2917,25 +3050,47 @@ class ValOpsFS extends ValOps {
|
|
2917
3050
|
}
|
2918
3051
|
return createMetadataFromBuffer(type, mimeType, buffer);
|
2919
3052
|
}
|
3053
|
+
async getParentPatchIdFromPatchId(patchId) {
|
3054
|
+
// This is not great. If needed we should find a better way
|
3055
|
+
const patches = await this.readPatches();
|
3056
|
+
if (patches.errors || patches.error) {
|
3057
|
+
console.error("Failed to read patches", JSON.stringify(patches));
|
3058
|
+
return fp.result.err("failed-to-read-patches");
|
3059
|
+
}
|
3060
|
+
const patch = patches.patches[patchId];
|
3061
|
+
if (!patch) {
|
3062
|
+
console.error("Could not find patch with patchId: ", patchId);
|
3063
|
+
return fp.result.err("patch-not-found");
|
3064
|
+
}
|
3065
|
+
return fp.result.ok(this.getParentPatchIdFromParentRef(patch.parentRef));
|
3066
|
+
}
|
3067
|
+
async getParentPatchIdFromPatchIdMap() {
|
3068
|
+
const patches = await this.readPatches();
|
3069
|
+
if (patches.errors || patches.error) {
|
3070
|
+
console.error("Failed to read patches", JSON.stringify(patches));
|
3071
|
+
return fp.result.err("failed-to-read-patches");
|
3072
|
+
}
|
3073
|
+
return fp.result.ok(Object.fromEntries(Object.entries(patches.patches).map(([patchId, value]) => [patchId, this.getParentPatchIdFromParentRef(value.parentRef)])));
|
3074
|
+
}
|
2920
3075
|
|
2921
3076
|
// #region fs file path helpers
|
2922
3077
|
getPatchesDir() {
|
2923
3078
|
return fsPath__namespace["default"].join(this.rootDir, ValOpsFS.VAL_DIR, "patches");
|
2924
3079
|
}
|
2925
|
-
|
2926
|
-
return fsPath__namespace["default"].join(this.getPatchesDir(),
|
3080
|
+
getFullPatchDir(patchDir) {
|
3081
|
+
return fsPath__namespace["default"].join(this.getPatchesDir(), patchDir);
|
2927
3082
|
}
|
2928
|
-
getBinaryFilePath(filePath,
|
2929
|
-
return fsPath__namespace["default"].join(this.
|
3083
|
+
getBinaryFilePath(filePath, patchDir) {
|
3084
|
+
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "files", filePath, fsPath__namespace["default"].basename(filePath));
|
2930
3085
|
}
|
2931
|
-
getBinaryFileMetadataPath(filePath,
|
2932
|
-
return fsPath__namespace["default"].join(this.
|
3086
|
+
getBinaryFileMetadataPath(filePath, patchDir) {
|
3087
|
+
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "files", filePath, "metadata.json");
|
2933
3088
|
}
|
2934
|
-
getPatchFilePath(
|
2935
|
-
return fsPath__namespace["default"].join(this.
|
3089
|
+
getPatchFilePath(patchDir) {
|
3090
|
+
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "patch.json");
|
2936
3091
|
}
|
2937
|
-
getPatchBaseFile(
|
2938
|
-
return fsPath__namespace["default"].join(this.
|
3092
|
+
getPatchBaseFile(patchDir) {
|
3093
|
+
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "base.json");
|
2939
3094
|
}
|
2940
3095
|
}
|
2941
3096
|
class FSOpsHost {
|
@@ -2949,6 +3104,9 @@ class FSOpsHost {
|
|
2949
3104
|
});
|
2950
3105
|
}
|
2951
3106
|
}
|
3107
|
+
moveDir(from, to) {
|
3108
|
+
fs__default["default"].renameSync(from, to);
|
3109
|
+
}
|
2952
3110
|
directoryExists(path) {
|
2953
3111
|
return ts__default["default"].sys.directoryExists(path);
|
2954
3112
|
}
|
@@ -2970,6 +3128,37 @@ class FSOpsHost {
|
|
2970
3128
|
});
|
2971
3129
|
fs__default["default"].writeFileSync(path, data, "utf-8");
|
2972
3130
|
}
|
3131
|
+
tryWriteUf8File(path, data) {
|
3132
|
+
try {
|
3133
|
+
const parentDir = fsPath__namespace["default"].join(fsPath__namespace["default"].dirname(path), "../");
|
3134
|
+
fs__default["default"].mkdirSync(parentDir, {
|
3135
|
+
recursive: true
|
3136
|
+
});
|
3137
|
+
// Make the parent dir separately. This is because we need mkdir to throw
|
3138
|
+
// if the directory already exists. If we use recursive: true, it doesn't
|
3139
|
+
fs__default["default"].mkdirSync(fsPath__namespace["default"].dirname(path), {
|
3140
|
+
recursive: false
|
3141
|
+
});
|
3142
|
+
} catch (e) {
|
3143
|
+
return {
|
3144
|
+
type: "error",
|
3145
|
+
errorType: "dir-already-exists",
|
3146
|
+
error: e
|
3147
|
+
};
|
3148
|
+
}
|
3149
|
+
try {
|
3150
|
+
fs__default["default"].writeFileSync(path, data, "utf-8");
|
3151
|
+
} catch (e) {
|
3152
|
+
return {
|
3153
|
+
type: "error",
|
3154
|
+
errorType: "failed-to-write-file",
|
3155
|
+
error: e
|
3156
|
+
};
|
3157
|
+
}
|
3158
|
+
return {
|
3159
|
+
type: "success"
|
3160
|
+
};
|
3161
|
+
}
|
2973
3162
|
writeBinaryFile(path, data) {
|
2974
3163
|
fs__default["default"].mkdirSync(fsPath__namespace["default"].dirname(path), {
|
2975
3164
|
recursive: true
|
@@ -2983,98 +3172,97 @@ class FSOpsHost {
|
|
2983
3172
|
fs__default["default"].copyFileSync(from, to);
|
2984
3173
|
}
|
2985
3174
|
}
|
2986
|
-
const FSPatch =
|
2987
|
-
path:
|
2988
|
-
patch: Patch,
|
2989
|
-
|
2990
|
-
|
2991
|
-
|
3175
|
+
const FSPatch = zod.z.object({
|
3176
|
+
path: zod.z.string().refine(p => p.startsWith("/") && p.includes(".val."), "Path is not valid. Must start with '/' and include '.val.'"),
|
3177
|
+
patch: internal.Patch,
|
3178
|
+
patchId: zod.z.string(),
|
3179
|
+
baseSha: zod.z.string(),
|
3180
|
+
parentRef: internal.ParentRef,
|
3181
|
+
authorId: zod.z.string().refine(p => true).nullable(),
|
3182
|
+
createdAt: zod.z.string().datetime(),
|
3183
|
+
coreVersion: zod.z.string().nullable() // TODO: use this to check if patch is compatible with current core version?
|
2992
3184
|
});
|
2993
|
-
const FSPatchBase =
|
2994
|
-
baseSha:
|
2995
|
-
timestamp:
|
3185
|
+
const FSPatchBase = zod.z.object({
|
3186
|
+
baseSha: zod.z.string().refine(p => true),
|
3187
|
+
timestamp: zod.z.string().datetime()
|
2996
3188
|
});
|
2997
3189
|
|
2998
3190
|
const textEncoder = new TextEncoder();
|
2999
|
-
const PatchId =
|
3000
|
-
const CommitSha =
|
3001
|
-
|
3002
|
-
const AuthorId =
|
3003
|
-
const ModuleFilePath =
|
3004
|
-
const Metadata =
|
3005
|
-
mimeType:
|
3006
|
-
width:
|
3007
|
-
height:
|
3008
|
-
}),
|
3009
|
-
mimeType:
|
3191
|
+
const PatchId = zod.z.string().refine(s => !!s); // TODO: validate
|
3192
|
+
const CommitSha = zod.z.string().refine(s => !!s); // TODO: validate
|
3193
|
+
zod.z.string().refine(s => !!s); // TODO: validate
|
3194
|
+
const AuthorId = zod.z.string().refine(s => !!s); // TODO: validate
|
3195
|
+
const ModuleFilePath = zod.z.string().refine(s => !!s); // TODO: validate
|
3196
|
+
const Metadata = zod.z.union([zod.z.object({
|
3197
|
+
mimeType: zod.z.string(),
|
3198
|
+
width: zod.z.number(),
|
3199
|
+
height: zod.z.number()
|
3200
|
+
}), zod.z.object({
|
3201
|
+
mimeType: zod.z.string()
|
3010
3202
|
})]);
|
3011
|
-
const MetadataRes =
|
3203
|
+
const MetadataRes = zod.z.object({
|
3012
3204
|
filePath: ModuleFilePath,
|
3013
3205
|
metadata: Metadata,
|
3014
|
-
type:
|
3206
|
+
type: zod.z.union([zod.z.literal("file"), zod.z.literal("image")]).nullable()
|
3015
3207
|
});
|
3016
|
-
const BasePatchResponse =
|
3208
|
+
const BasePatchResponse = zod.z.object({
|
3017
3209
|
path: ModuleFilePath,
|
3018
3210
|
patchId: PatchId,
|
3019
3211
|
authorId: AuthorId.nullable(),
|
3020
|
-
createdAt:
|
3021
|
-
|
3022
|
-
baseSha: BaseSha,
|
3023
|
-
commitSha: CommitSha,
|
3024
|
-
appliedAt: z.z.string().datetime()
|
3025
|
-
}).nullable()
|
3212
|
+
createdAt: zod.z.string().datetime(),
|
3213
|
+
baseSha: zod.z.string()
|
3026
3214
|
});
|
3027
|
-
const GetPatches =
|
3028
|
-
patches:
|
3029
|
-
patch: Patch.optional()
|
3215
|
+
const GetPatches = zod.z.object({
|
3216
|
+
patches: zod.z.array(zod.z.intersection(zod.z.object({
|
3217
|
+
patch: internal.Patch.optional()
|
3030
3218
|
}), BasePatchResponse)),
|
3031
|
-
errors:
|
3219
|
+
errors: zod.z.array(zod.z.object({
|
3032
3220
|
patchId: PatchId.optional(),
|
3033
|
-
message:
|
3221
|
+
message: zod.z.string()
|
3034
3222
|
})).optional()
|
3035
3223
|
});
|
3036
|
-
const FilesResponse =
|
3037
|
-
files:
|
3038
|
-
filePath:
|
3039
|
-
location:
|
3224
|
+
const FilesResponse = zod.z.object({
|
3225
|
+
files: zod.z.array(zod.z.union([zod.z.object({
|
3226
|
+
filePath: zod.z.string(),
|
3227
|
+
location: zod.z.literal("patch"),
|
3040
3228
|
patchId: PatchId,
|
3041
|
-
value:
|
3042
|
-
}),
|
3043
|
-
filePath:
|
3044
|
-
location:
|
3229
|
+
value: zod.z.string()
|
3230
|
+
}), zod.z.object({
|
3231
|
+
filePath: zod.z.string(),
|
3232
|
+
location: zod.z.literal("repo"),
|
3045
3233
|
commitSha: CommitSha,
|
3046
|
-
value:
|
3234
|
+
value: zod.z.string()
|
3047
3235
|
})])),
|
3048
|
-
errors:
|
3049
|
-
filePath:
|
3050
|
-
location:
|
3236
|
+
errors: zod.z.array(zod.z.union([zod.z.object({
|
3237
|
+
filePath: zod.z.string(),
|
3238
|
+
location: zod.z.literal("patch"),
|
3051
3239
|
patchId: PatchId,
|
3052
|
-
message:
|
3053
|
-
}),
|
3054
|
-
filePath:
|
3055
|
-
location:
|
3240
|
+
message: zod.z.string()
|
3241
|
+
}), zod.z.object({
|
3242
|
+
filePath: zod.z.string(),
|
3243
|
+
location: zod.z.literal("repo"),
|
3056
3244
|
commitSha: CommitSha,
|
3057
|
-
message:
|
3245
|
+
message: zod.z.string()
|
3058
3246
|
})])).optional()
|
3059
3247
|
});
|
3060
|
-
const SavePatchResponse =
|
3248
|
+
const SavePatchResponse = zod.z.object({
|
3061
3249
|
patchId: PatchId
|
3062
3250
|
});
|
3063
|
-
const DeletePatchesResponse =
|
3064
|
-
deleted:
|
3065
|
-
errors:
|
3066
|
-
message:
|
3251
|
+
const DeletePatchesResponse = zod.z.object({
|
3252
|
+
deleted: zod.z.array(PatchId),
|
3253
|
+
errors: zod.z.array(zod.z.object({
|
3254
|
+
message: zod.z.string(),
|
3067
3255
|
patchId: PatchId
|
3068
3256
|
})).optional()
|
3069
3257
|
});
|
3070
|
-
|
3258
|
+
zod.z.object({
|
3071
3259
|
patchId: PatchId,
|
3072
3260
|
filePath: ModuleFilePath
|
3073
3261
|
});
|
3074
|
-
const CommitResponse =
|
3075
|
-
updatedFiles:
|
3262
|
+
const CommitResponse = zod.z.object({
|
3263
|
+
updatedFiles: zod.z.array(zod.z.string()),
|
3076
3264
|
commit: CommitSha,
|
3077
|
-
branch:
|
3265
|
+
branch: zod.z.string()
|
3078
3266
|
});
|
3079
3267
|
class ValOpsHttp extends ValOps {
|
3080
3268
|
constructor(hostUrl, project, commitSha,
|
@@ -3104,18 +3292,35 @@ class ValOpsHttp extends ValOps {
|
|
3104
3292
|
}
|
3105
3293
|
const currentBaseSha = await this.getBaseSha();
|
3106
3294
|
const currentSchemaSha = await this.getSchemaSha();
|
3107
|
-
const
|
3295
|
+
const allPatchData = await this.fetchPatches({
|
3108
3296
|
omitPatch: true,
|
3109
3297
|
authors: undefined,
|
3110
3298
|
patchIds: undefined,
|
3111
3299
|
moduleFilePaths: undefined
|
3112
3300
|
});
|
3301
|
+
// We think these errors will be picked up else where (?), so we only return an error here if there are no patches
|
3302
|
+
if (allPatchData.patches.length === 0) {
|
3303
|
+
let message;
|
3304
|
+
if (allPatchData.error) {
|
3305
|
+
message = allPatchData.error.message;
|
3306
|
+
} else if (allPatchData.errors && allPatchData.errors.length > 0) {
|
3307
|
+
const errors = allPatchData.errors;
|
3308
|
+
message = errors.map(error => error.message).join("");
|
3309
|
+
}
|
3310
|
+
if (message) {
|
3311
|
+
message = `Could not get patches: ${message}`;
|
3312
|
+
console.error(message);
|
3313
|
+
return {
|
3314
|
+
type: "error",
|
3315
|
+
error: {
|
3316
|
+
message
|
3317
|
+
}
|
3318
|
+
};
|
3319
|
+
}
|
3320
|
+
}
|
3113
3321
|
const patches = [];
|
3114
|
-
|
3115
|
-
|
3116
|
-
return a.createdAt.localeCompare(b.createdAt, undefined);
|
3117
|
-
})) {
|
3118
|
-
patches.push(patchId);
|
3322
|
+
for (const patchData of allPatchData.patches) {
|
3323
|
+
patches.push(patchData.patchId);
|
3119
3324
|
}
|
3120
3325
|
const webSocketNonceRes = await this.getWebSocketNonce(params.profileId);
|
3121
3326
|
if (webSocketNonceRes.status === "error") {
|
@@ -3176,6 +3381,16 @@ class ValOpsHttp extends ValOps {
|
|
3176
3381
|
}
|
3177
3382
|
};
|
3178
3383
|
}
|
3384
|
+
const contentType = res.headers.get("Content-Type") || "";
|
3385
|
+
if (contentType.startsWith("application/json")) {
|
3386
|
+
const json = await res.json();
|
3387
|
+
return {
|
3388
|
+
status: "error",
|
3389
|
+
error: {
|
3390
|
+
message: "Could not get nonce." + (json.message || "Unexpected error (no error message). Status: " + res.status)
|
3391
|
+
}
|
3392
|
+
};
|
3393
|
+
}
|
3179
3394
|
return {
|
3180
3395
|
status: "error",
|
3181
3396
|
error: {
|
@@ -3201,8 +3416,8 @@ class ValOpsHttp extends ValOps {
|
|
3201
3416
|
for (let i = 0; i < patchIds.length; i += chunkSize) {
|
3202
3417
|
patchIdChunks.push(patchIds.slice(i, i + chunkSize));
|
3203
3418
|
}
|
3204
|
-
let allPatches =
|
3205
|
-
let allErrors =
|
3419
|
+
let allPatches = [];
|
3420
|
+
let allErrors = [];
|
3206
3421
|
if (patchIds === undefined || patchIds.length === 0) {
|
3207
3422
|
return this.fetchPatchesInternal({
|
3208
3423
|
patchIds: patchIds,
|
@@ -3225,10 +3440,7 @@ class ValOpsHttp extends ValOps {
|
|
3225
3440
|
...res.patches
|
3226
3441
|
};
|
3227
3442
|
if (res.errors) {
|
3228
|
-
allErrors =
|
3229
|
-
...allErrors,
|
3230
|
-
...res.errors
|
3231
|
-
};
|
3443
|
+
allErrors = [...allErrors, ...res.errors];
|
3232
3444
|
}
|
3233
3445
|
}
|
3234
3446
|
return {
|
@@ -3264,27 +3476,23 @@ class ValOpsHttp extends ValOps {
|
|
3264
3476
|
"Content-Type": "application/json"
|
3265
3477
|
}
|
3266
3478
|
}).then(async res => {
|
3267
|
-
const patches =
|
3479
|
+
const patches = [];
|
3268
3480
|
if (res.ok) {
|
3269
3481
|
const json = await res.json();
|
3270
3482
|
const parsed = GetPatches.safeParse(json);
|
3271
3483
|
if (parsed.success) {
|
3484
|
+
const errors = [];
|
3272
3485
|
const data = parsed.data;
|
3273
|
-
const errors = {};
|
3274
3486
|
for (const patchesRes of data.patches) {
|
3275
|
-
patches
|
3276
|
-
path: patchesRes.path,
|
3487
|
+
patches.push({
|
3277
3488
|
authorId: patchesRes.authorId,
|
3278
3489
|
createdAt: patchesRes.createdAt,
|
3279
|
-
appliedAt:
|
3280
|
-
|
3281
|
-
|
3282
|
-
|
3283
|
-
|
3284
|
-
|
3285
|
-
},
|
3286
|
-
patch: patchesRes.patch
|
3287
|
-
};
|
3490
|
+
appliedAt: null,
|
3491
|
+
patchId: patchesRes.patchId,
|
3492
|
+
path: patchesRes.path,
|
3493
|
+
baseSha: patchesRes.baseSha,
|
3494
|
+
patch: filters.omitPatch ? undefined : patchesRes.patch
|
3495
|
+
});
|
3288
3496
|
}
|
3289
3497
|
return {
|
3290
3498
|
patches,
|
@@ -3306,7 +3514,8 @@ class ValOpsHttp extends ValOps {
|
|
3306
3514
|
};
|
3307
3515
|
});
|
3308
3516
|
}
|
3309
|
-
async saveSourceFilePatch(path, patch, authorId) {
|
3517
|
+
async saveSourceFilePatch(path, patch, parentRef, authorId) {
|
3518
|
+
const baseSha = await this.getBaseSha();
|
3310
3519
|
return fetch(`${this.hostUrl}/v1/${this.project}/patches`, {
|
3311
3520
|
method: "POST",
|
3312
3521
|
headers: {
|
@@ -3317,78 +3526,53 @@ class ValOpsHttp extends ValOps {
|
|
3317
3526
|
path,
|
3318
3527
|
patch,
|
3319
3528
|
authorId,
|
3529
|
+
parentPatchId: parentRef.type === "patch" ? parentRef.patchId : null,
|
3530
|
+
baseSha,
|
3320
3531
|
commit: this.commitSha,
|
3321
3532
|
branch: this.branch,
|
3322
3533
|
coreVersion: core.Internal.VERSION.core
|
3323
3534
|
})
|
3324
3535
|
}).then(async res => {
|
3536
|
+
var _res$headers$get;
|
3325
3537
|
if (res.ok) {
|
3326
3538
|
const parsed = SavePatchResponse.safeParse(await res.json());
|
3327
3539
|
if (parsed.success) {
|
3328
|
-
return {
|
3540
|
+
return fp.result.ok({
|
3329
3541
|
patchId: parsed.data.patchId
|
3330
|
-
};
|
3542
|
+
});
|
3331
3543
|
}
|
3332
|
-
return {
|
3333
|
-
|
3334
|
-
|
3335
|
-
|
3336
|
-
};
|
3544
|
+
return fp.result.err({
|
3545
|
+
errorType: "other",
|
3546
|
+
message: `Could not parse save patch response. Error: ${zodValidationError.fromError(parsed.error)}`
|
3547
|
+
});
|
3337
3548
|
}
|
3338
|
-
|
3339
|
-
|
3340
|
-
|
3341
|
-
|
3342
|
-
|
3343
|
-
}).catch(e => {
|
3344
|
-
return {
|
3345
|
-
error: {
|
3346
|
-
message: `Could save source file patch (connection error?): ${e instanceof Error ? e.message : e.toString()}`
|
3347
|
-
}
|
3348
|
-
};
|
3349
|
-
});
|
3350
|
-
}
|
3351
|
-
async saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data, type, metadata) {
|
3352
|
-
return fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/files`, {
|
3353
|
-
method: "POST",
|
3354
|
-
headers: {
|
3355
|
-
...this.authHeaders,
|
3356
|
-
"Content-Type": "application/json"
|
3357
|
-
},
|
3358
|
-
body: JSON.stringify({
|
3359
|
-
filePath: filePath,
|
3360
|
-
data,
|
3361
|
-
type,
|
3362
|
-
metadata
|
3363
|
-
})
|
3364
|
-
}).then(async res => {
|
3365
|
-
if (res.ok) {
|
3366
|
-
const parsed = SavePatchFileResponse.safeParse(await res.json());
|
3367
|
-
if (parsed.success) {
|
3368
|
-
return {
|
3369
|
-
patchId: parsed.data.patchId,
|
3370
|
-
filePath: parsed.data.filePath
|
3371
|
-
};
|
3372
|
-
}
|
3373
|
-
return {
|
3374
|
-
error: {
|
3375
|
-
message: `Could not parse save patch file response. Error: ${zodValidationError.fromError(parsed.error)}`
|
3376
|
-
}
|
3377
|
-
};
|
3549
|
+
if (res.status === 409) {
|
3550
|
+
return fp.result.err({
|
3551
|
+
errorType: "patch-head-conflict",
|
3552
|
+
message: "Conflict: " + (await res.text())
|
3553
|
+
});
|
3378
3554
|
}
|
3379
|
-
|
3380
|
-
|
3381
|
-
|
3382
|
-
|
3383
|
-
|
3555
|
+
if ((_res$headers$get = res.headers.get("Content-Type")) !== null && _res$headers$get !== void 0 && _res$headers$get.includes("application/json")) {
|
3556
|
+
const json = await res.json();
|
3557
|
+
return fp.result.err({
|
3558
|
+
errorType: "other",
|
3559
|
+
message: json.message || "Unknown error"
|
3560
|
+
});
|
3561
|
+
}
|
3562
|
+
return fp.result.err({
|
3563
|
+
errorType: "other",
|
3564
|
+
message: "Could not save patch. HTTP error: " + res.status + " " + res.statusText
|
3565
|
+
});
|
3384
3566
|
}).catch(e => {
|
3385
|
-
return {
|
3386
|
-
|
3387
|
-
|
3388
|
-
|
3389
|
-
};
|
3567
|
+
return fp.result.err({
|
3568
|
+
errorType: "other",
|
3569
|
+
message: `Could save source file patch (connection error?): ${e instanceof Error ? e.message : e.toString()}`
|
3570
|
+
});
|
3390
3571
|
});
|
3391
3572
|
}
|
3573
|
+
async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, type, metadata) {
|
3574
|
+
throw Error("TODO: implement");
|
3575
|
+
}
|
3392
3576
|
async getHttpFiles(files) {
|
3393
3577
|
const params = new URLSearchParams();
|
3394
3578
|
const stringifiedFiles = JSON.stringify({
|
@@ -3491,7 +3675,7 @@ class ValOpsHttp extends ValOps {
|
|
3491
3675
|
const params = new URLSearchParams();
|
3492
3676
|
params.set("file_path", filePath);
|
3493
3677
|
try {
|
3494
|
-
const metadataRes = await fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/
|
3678
|
+
const metadataRes = await fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/files?${params}`, {
|
3495
3679
|
headers: {
|
3496
3680
|
...this.authHeaders,
|
3497
3681
|
"Content-Type": "application/json"
|
@@ -3593,7 +3777,7 @@ class ValOpsHttp extends ValOps {
|
|
3593
3777
|
}
|
3594
3778
|
async commit(prepared, message, committer, newBranch) {
|
3595
3779
|
try {
|
3596
|
-
var _res$headers$
|
3780
|
+
var _res$headers$get2;
|
3597
3781
|
const existingBranch = this.branch;
|
3598
3782
|
const res = await fetch(`${this.hostUrl}/v1/${this.project}/commit`, {
|
3599
3783
|
method: "POST",
|
@@ -3629,7 +3813,7 @@ class ValOpsHttp extends ValOps {
|
|
3629
3813
|
}
|
3630
3814
|
};
|
3631
3815
|
}
|
3632
|
-
if ((_res$headers$
|
3816
|
+
if ((_res$headers$get2 = res.headers.get("Content-Type")) !== null && _res$headers$get2 !== void 0 && _res$headers$get2.includes("application/json")) {
|
3633
3817
|
const json = await res.json();
|
3634
3818
|
if (json.isNotFastForward) {
|
3635
3819
|
return {
|
@@ -4168,8 +4352,74 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4168
4352
|
}
|
4169
4353
|
},
|
4170
4354
|
//#region patches
|
4171
|
-
"/patches
|
4355
|
+
"/patches": {
|
4356
|
+
PUT: async req => {
|
4357
|
+
const cookies = req.cookies;
|
4358
|
+
const auth = getAuth(cookies);
|
4359
|
+
if (auth.error) {
|
4360
|
+
return {
|
4361
|
+
status: 401,
|
4362
|
+
json: {
|
4363
|
+
message: auth.error
|
4364
|
+
}
|
4365
|
+
};
|
4366
|
+
}
|
4367
|
+
if (serverOps instanceof ValOpsHttp && !("id" in auth)) {
|
4368
|
+
return {
|
4369
|
+
status: 401,
|
4370
|
+
json: {
|
4371
|
+
message: "Unauthorized"
|
4372
|
+
}
|
4373
|
+
};
|
4374
|
+
}
|
4375
|
+
const patches = req.body.patches;
|
4376
|
+
const parentRef = req.body.parentRef;
|
4377
|
+
const authorId = "id" in auth ? auth.id : null;
|
4378
|
+
const newPatchIds = [];
|
4379
|
+
for (const patch of patches) {
|
4380
|
+
const createPatchRes = await serverOps.createPatch(patch.path, patch.patch, parentRef, authorId);
|
4381
|
+
if (fp.result.isErr(createPatchRes)) {
|
4382
|
+
if (createPatchRes.error.errorType === "patch-head-conflict") {
|
4383
|
+
return {
|
4384
|
+
status: 409,
|
4385
|
+
json: {
|
4386
|
+
type: "patch-head-conflict",
|
4387
|
+
message: "Patch id conflict"
|
4388
|
+
}
|
4389
|
+
};
|
4390
|
+
} else {
|
4391
|
+
return {
|
4392
|
+
status: 400,
|
4393
|
+
json: {
|
4394
|
+
type: "patch-error",
|
4395
|
+
message: "Could not create patch",
|
4396
|
+
errors: {
|
4397
|
+
[patch.path]: [{
|
4398
|
+
error: {
|
4399
|
+
message: createPatchRes.error.error.message
|
4400
|
+
}
|
4401
|
+
}]
|
4402
|
+
}
|
4403
|
+
}
|
4404
|
+
};
|
4405
|
+
}
|
4406
|
+
} else {
|
4407
|
+
newPatchIds.push(createPatchRes.value.patchId);
|
4408
|
+
}
|
4409
|
+
}
|
4410
|
+
return {
|
4411
|
+
status: 200,
|
4412
|
+
json: {
|
4413
|
+
newPatchIds,
|
4414
|
+
parentRef: {
|
4415
|
+
type: "patch",
|
4416
|
+
patchId: newPatchIds[newPatchIds.length - 1]
|
4417
|
+
}
|
4418
|
+
}
|
4419
|
+
};
|
4420
|
+
},
|
4172
4421
|
GET: async req => {
|
4422
|
+
// TODO: Fix type error patchId is string somewhere and PatchId somewhere else
|
4173
4423
|
const query = req.query;
|
4174
4424
|
const cookies = req.cookies;
|
4175
4425
|
const auth = getAuth(cookies);
|
@@ -4189,38 +4439,52 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4189
4439
|
}
|
4190
4440
|
};
|
4191
4441
|
}
|
4442
|
+
const omit_patch = query.omit_patch === true;
|
4192
4443
|
const authors = query.author;
|
4193
|
-
const
|
4444
|
+
const fetchedPatchesRes = await serverOps.fetchPatches({
|
4194
4445
|
authors,
|
4195
4446
|
patchIds: query.patch_id,
|
4196
|
-
omitPatch:
|
4447
|
+
omitPatch: omit_patch,
|
4197
4448
|
moduleFilePaths: query.module_file_path
|
4198
4449
|
});
|
4199
|
-
if (
|
4450
|
+
if (fetchedPatchesRes.error) {
|
4200
4451
|
// Error is singular
|
4201
|
-
console.error("Val: Failed to get patches",
|
4452
|
+
console.error("Val: Failed to get patches", fetchedPatchesRes.error);
|
4202
4453
|
return {
|
4203
4454
|
status: 500,
|
4204
4455
|
json: {
|
4205
|
-
message:
|
4206
|
-
|
4456
|
+
message: fetchedPatchesRes.error.message,
|
4457
|
+
error: fetchedPatchesRes.error
|
4207
4458
|
}
|
4208
4459
|
};
|
4209
4460
|
}
|
4210
|
-
if (
|
4461
|
+
if (fetchedPatchesRes.errors && Object.keys(fetchedPatchesRes.errors).length > 0) {
|
4211
4462
|
// Errors is plural. Different property than above.
|
4212
|
-
console.error("Val: Failed to get patches",
|
4463
|
+
console.error("Val: Failed to get patches", fetchedPatchesRes.errors);
|
4213
4464
|
return {
|
4214
4465
|
status: 500,
|
4215
4466
|
json: {
|
4216
4467
|
message: "Failed to get patches",
|
4217
|
-
|
4468
|
+
patchErrors: fetchedPatchesRes.errors
|
4218
4469
|
}
|
4219
4470
|
};
|
4220
4471
|
}
|
4472
|
+
const patches = [];
|
4473
|
+
for (const [patchIdS, patchData] of Object.entries(fetchedPatchesRes.patches)) {
|
4474
|
+
const patchId = patchIdS;
|
4475
|
+
patches.push({
|
4476
|
+
patchId,
|
4477
|
+
...patchData
|
4478
|
+
});
|
4479
|
+
}
|
4480
|
+
// TODO: we should sort by parentRef instead:
|
4481
|
+
patches.sort((a, b) => a.createdAt.localeCompare(b.createdAt));
|
4221
4482
|
return {
|
4222
4483
|
status: 200,
|
4223
|
-
json:
|
4484
|
+
json: {
|
4485
|
+
patches,
|
4486
|
+
baseSha: await serverOps.getBaseSha()
|
4487
|
+
}
|
4224
4488
|
};
|
4225
4489
|
},
|
4226
4490
|
DELETE: async req => {
|
@@ -4251,7 +4515,10 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4251
4515
|
status: 500,
|
4252
4516
|
json: {
|
4253
4517
|
message: "Failed to delete patches",
|
4254
|
-
|
4518
|
+
errors: Object.entries(deleteRes.errors).map(([id, error]) => ({
|
4519
|
+
patchId: id,
|
4520
|
+
...error
|
4521
|
+
}))
|
4255
4522
|
}
|
4256
4523
|
};
|
4257
4524
|
}
|
@@ -4323,13 +4590,12 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4323
4590
|
}
|
4324
4591
|
},
|
4325
4592
|
// #region sources
|
4326
|
-
"/sources": {
|
4593
|
+
"/sources/~": {
|
4327
4594
|
PUT: async req => {
|
4328
|
-
var _body$patchIds;
|
4329
4595
|
const query = req.query;
|
4330
4596
|
const cookies = req.cookies;
|
4331
|
-
|
4332
|
-
const
|
4597
|
+
// TODO: filter results by moduleFilePath
|
4598
|
+
// const moduleFilePath = req.path || "";
|
4333
4599
|
const auth = getAuth(cookies);
|
4334
4600
|
if (auth.error) {
|
4335
4601
|
return {
|
@@ -4358,149 +4624,81 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4358
4624
|
}
|
4359
4625
|
};
|
4360
4626
|
}
|
4361
|
-
|
4362
|
-
|
4363
|
-
|
4364
|
-
|
4365
|
-
|
4366
|
-
|
4367
|
-
|
4368
|
-
|
4369
|
-
|
4370
|
-
|
4371
|
-
|
4372
|
-
|
4373
|
-
|
4374
|
-
|
4375
|
-
|
4376
|
-
|
4377
|
-
|
4378
|
-
|
4379
|
-
}
|
4380
|
-
};
|
4381
|
-
}
|
4382
|
-
let patchErrors = undefined;
|
4383
|
-
for (const [patchIdS, error] of Object.entries(patchOps.errors || {})) {
|
4384
|
-
const patchId = patchIdS;
|
4385
|
-
if (!patchErrors) {
|
4386
|
-
patchErrors = {};
|
4387
|
-
}
|
4388
|
-
patchErrors[patchId] = {
|
4389
|
-
message: error.message
|
4390
|
-
};
|
4391
|
-
}
|
4392
|
-
// TODO: errors
|
4393
|
-
patchAnalysis = serverOps.analyzePatches(patchOps.patches);
|
4394
|
-
if (body !== null && body !== void 0 && body.addPatches) {
|
4395
|
-
for (const addPatch of body.addPatches) {
|
4396
|
-
const newPatchModuleFilePath = addPatch.path;
|
4397
|
-
const newPatchOps = addPatch.patch;
|
4398
|
-
const authorId = "id" in auth ? auth.id : null;
|
4399
|
-
const createPatchRes = await serverOps.createPatch(newPatchModuleFilePath, {
|
4400
|
-
...patchAnalysis,
|
4401
|
-
...patchOps
|
4402
|
-
}, newPatchOps, authorId);
|
4403
|
-
if (createPatchRes.error) {
|
4404
|
-
return {
|
4405
|
-
status: 500,
|
4406
|
-
json: {
|
4407
|
-
message: "Failed to create patch: " + createPatchRes.error.message,
|
4408
|
-
details: createPatchRes.error
|
4409
|
-
}
|
4410
|
-
};
|
4411
|
-
}
|
4412
|
-
if (!newPatchIds) {
|
4413
|
-
newPatchIds = [createPatchRes.patchId];
|
4414
|
-
} else {
|
4415
|
-
newPatchIds.push(createPatchRes.patchId);
|
4416
|
-
}
|
4417
|
-
patchOps.patches[createPatchRes.patchId] = {
|
4418
|
-
path: newPatchModuleFilePath,
|
4419
|
-
patch: newPatchOps,
|
4420
|
-
authorId,
|
4421
|
-
createdAt: createPatchRes.createdAt,
|
4422
|
-
appliedAt: null
|
4423
|
-
};
|
4424
|
-
patchAnalysis.patchesByModule[newPatchModuleFilePath] = [...(patchAnalysis.patchesByModule[newPatchModuleFilePath] || []), {
|
4425
|
-
patchId: createPatchRes.patchId
|
4426
|
-
}];
|
4427
|
-
}
|
4428
|
-
}
|
4429
|
-
tree = {
|
4430
|
-
...(await serverOps.getTree({
|
4431
|
-
...patchAnalysis,
|
4432
|
-
...patchOps
|
4433
|
-
}))
|
4434
|
-
};
|
4435
|
-
if (query.validate_all) {
|
4436
|
-
const allTree = await serverOps.getTree();
|
4437
|
-
tree = {
|
4438
|
-
sources: {
|
4439
|
-
...allTree.sources,
|
4440
|
-
...tree.sources
|
4441
|
-
},
|
4442
|
-
errors: {
|
4443
|
-
...allTree.errors,
|
4444
|
-
...tree.errors
|
4445
|
-
}
|
4446
|
-
};
|
4627
|
+
const patchOps = await serverOps.fetchPatches({
|
4628
|
+
patchIds: undefined,
|
4629
|
+
omitPatch: false
|
4630
|
+
});
|
4631
|
+
const patchAnalysis = serverOps.analyzePatches(patchOps.patches);
|
4632
|
+
let sourcesRes = await serverOps.getSources();
|
4633
|
+
const onlyPatchedTreeModules = await serverOps.getSources({
|
4634
|
+
...patchAnalysis,
|
4635
|
+
...patchOps
|
4636
|
+
});
|
4637
|
+
sourcesRes = {
|
4638
|
+
sources: {
|
4639
|
+
...sourcesRes.sources,
|
4640
|
+
...(onlyPatchedTreeModules.sources || {})
|
4641
|
+
},
|
4642
|
+
errors: {
|
4643
|
+
...sourcesRes.errors,
|
4644
|
+
...(onlyPatchedTreeModules.errors || {})
|
4447
4645
|
}
|
4448
|
-
}
|
4449
|
-
tree = await serverOps.getTree();
|
4450
|
-
}
|
4646
|
+
};
|
4451
4647
|
let sourcesValidation = {
|
4452
4648
|
errors: {},
|
4453
4649
|
files: {}
|
4454
4650
|
};
|
4455
4651
|
if (query.validate_sources || query.validate_binary_files) {
|
4456
4652
|
const schemas = await serverOps.getSchemas();
|
4457
|
-
sourcesValidation = await serverOps.validateSources(schemas,
|
4653
|
+
sourcesValidation = await serverOps.validateSources(schemas, sourcesRes.sources);
|
4458
4654
|
|
4459
|
-
// TODO: send validation errors
|
4655
|
+
// TODO: send binary files validation errors
|
4460
4656
|
if (query.validate_binary_files) {
|
4461
|
-
await serverOps.validateFiles(schemas,
|
4657
|
+
await serverOps.validateFiles(schemas, sourcesRes.sources, sourcesValidation.files);
|
4462
4658
|
}
|
4463
4659
|
}
|
4464
4660
|
const schemaSha = await serverOps.getSchemaSha();
|
4465
4661
|
const modules = {};
|
4466
|
-
for (const [moduleFilePathS, module] of Object.entries(
|
4662
|
+
for (const [moduleFilePathS, module] of Object.entries(sourcesRes.sources)) {
|
4467
4663
|
const moduleFilePath = moduleFilePathS;
|
4468
|
-
if (moduleFilePath.startsWith(
|
4469
|
-
var _sourcesValidation$er;
|
4664
|
+
if (moduleFilePath.startsWith(moduleFilePath)) {
|
4665
|
+
var _patchAnalysis$patche, _sourcesValidation$er;
|
4666
|
+
const skippedPatches = [];
|
4667
|
+
const patchErrors = {};
|
4668
|
+
const appliedPatches = ((_patchAnalysis$patche = patchAnalysis.patchesByModule[moduleFilePath]) === null || _patchAnalysis$patche === void 0 ? void 0 : _patchAnalysis$patche.map(p => p.patchId)) || [];
|
4669
|
+
for (const {
|
4670
|
+
patchId,
|
4671
|
+
skipped,
|
4672
|
+
error
|
4673
|
+
} of ((_sourcesRes$errors = sourcesRes.errors) === null || _sourcesRes$errors === void 0 ? void 0 : _sourcesRes$errors[moduleFilePath]) || []) {
|
4674
|
+
var _sourcesRes$errors;
|
4675
|
+
if (skipped) {
|
4676
|
+
skippedPatches.push(patchId);
|
4677
|
+
} else if (error) {
|
4678
|
+
patchErrors[patchId] = {
|
4679
|
+
message: error.message
|
4680
|
+
};
|
4681
|
+
} else {
|
4682
|
+
// unsure what makes sense here
|
4683
|
+
appliedPatches.push(patchId);
|
4684
|
+
}
|
4685
|
+
}
|
4470
4686
|
modules[moduleFilePath] = {
|
4471
4687
|
source: module,
|
4472
|
-
patches:
|
4473
|
-
applied:
|
4688
|
+
patches: appliedPatches.length > 0 || skippedPatches.length > 0 || Object.keys(patchErrors).length > 0 ? {
|
4689
|
+
applied: appliedPatches,
|
4690
|
+
skipped: skippedPatches.length > 0 ? skippedPatches : undefined,
|
4691
|
+
errors: Object.keys(patchErrors).length > 0 ? patchErrors : undefined
|
4474
4692
|
} : undefined,
|
4475
4693
|
validationErrors: (_sourcesValidation$er = sourcesValidation.errors[moduleFilePath]) === null || _sourcesValidation$er === void 0 ? void 0 : _sourcesValidation$er.validations
|
4476
4694
|
};
|
4477
4695
|
}
|
4478
4696
|
}
|
4479
|
-
if (tree.errors && Object.keys(tree.errors).length > 0) {
|
4480
|
-
const res = {
|
4481
|
-
status: 400,
|
4482
|
-
json: {
|
4483
|
-
type: "patch-error",
|
4484
|
-
schemaSha,
|
4485
|
-
modules,
|
4486
|
-
errors: Object.fromEntries(Object.entries(tree.errors).map(([key, value]) => [key, value.map(error => ({
|
4487
|
-
patchId: error.patchId,
|
4488
|
-
skipped: error.skipped,
|
4489
|
-
error: {
|
4490
|
-
message: error.error.message
|
4491
|
-
}
|
4492
|
-
}))])),
|
4493
|
-
message: "One or more patches failed to be applied"
|
4494
|
-
}
|
4495
|
-
};
|
4496
|
-
return res;
|
4497
|
-
}
|
4498
4697
|
const res = {
|
4499
4698
|
status: 200,
|
4500
4699
|
json: {
|
4501
4700
|
schemaSha,
|
4502
|
-
modules
|
4503
|
-
newPatchIds
|
4701
|
+
modules
|
4504
4702
|
}
|
4505
4703
|
};
|
4506
4704
|
return res;
|
@@ -4519,8 +4717,8 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4519
4717
|
}
|
4520
4718
|
};
|
4521
4719
|
}
|
4522
|
-
const PostSaveBody =
|
4523
|
-
patchIds:
|
4720
|
+
const PostSaveBody = zod.z.object({
|
4721
|
+
patchIds: zod.z.array(zod.z.string().refine(id => true // TODO:
|
4524
4722
|
))
|
4525
4723
|
});
|
4526
4724
|
const bodyRes = PostSaveBody.safeParse(body);
|
@@ -4768,18 +4966,18 @@ function getStateFromCookie(stateCookie) {
|
|
4768
4966
|
function createStateCookie(state) {
|
4769
4967
|
return Buffer.from(JSON.stringify(state), "utf8").toString("base64");
|
4770
4968
|
}
|
4771
|
-
const ValAppJwtPayload =
|
4772
|
-
sub:
|
4773
|
-
exp:
|
4774
|
-
project:
|
4775
|
-
org:
|
4969
|
+
const ValAppJwtPayload = zod.z.object({
|
4970
|
+
sub: zod.z.string(),
|
4971
|
+
exp: zod.z.number(),
|
4972
|
+
project: zod.z.string(),
|
4973
|
+
org: zod.z.string()
|
4776
4974
|
});
|
4777
|
-
const IntegratedServerJwtPayload =
|
4778
|
-
sub:
|
4779
|
-
exp:
|
4780
|
-
token:
|
4781
|
-
org:
|
4782
|
-
project:
|
4975
|
+
const IntegratedServerJwtPayload = zod.z.object({
|
4976
|
+
sub: zod.z.string(),
|
4977
|
+
exp: zod.z.number(),
|
4978
|
+
token: zod.z.string(),
|
4979
|
+
org: zod.z.string(),
|
4980
|
+
project: zod.z.string()
|
4783
4981
|
});
|
4784
4982
|
async function withAuth(secret, cookies, errorMessageType, handler) {
|
4785
4983
|
const cookie = cookies[internal.VAL_SESSION_COOKIE];
|
@@ -5236,21 +5434,21 @@ function createValApiRouter(route, valServerPromise, convert) {
|
|
5236
5434
|
let isOptional = false;
|
5237
5435
|
let isArray = false;
|
5238
5436
|
// extract inner types:
|
5239
|
-
if (innerType instanceof
|
5437
|
+
if (innerType instanceof zod.z.ZodOptional) {
|
5240
5438
|
isOptional = true;
|
5241
5439
|
innerType = innerType.unwrap();
|
5242
5440
|
}
|
5243
|
-
if (innerType instanceof
|
5441
|
+
if (innerType instanceof zod.z.ZodArray) {
|
5244
5442
|
isArray = true;
|
5245
5443
|
innerType = innerType.element;
|
5246
5444
|
}
|
5247
5445
|
// convert boolean to union of literals true and false so we can parse it as a string
|
5248
|
-
if (innerType instanceof
|
5249
|
-
innerType =
|
5446
|
+
if (innerType instanceof zod.z.ZodBoolean) {
|
5447
|
+
innerType = zod.z.union([zod.z.literal("true"), zod.z.literal("false")]).transform(arg => arg === "true");
|
5250
5448
|
}
|
5251
5449
|
// re-build rules:
|
5252
5450
|
let arrayCompatibleRule = innerType;
|
5253
|
-
arrayCompatibleRule =
|
5451
|
+
arrayCompatibleRule = zod.z.array(innerType); // we always want to parse an array because we group the query params by into an array
|
5254
5452
|
if (isOptional) {
|
5255
5453
|
arrayCompatibleRule = arrayCompatibleRule.optional();
|
5256
5454
|
}
|
@@ -5259,7 +5457,7 @@ function createValApiRouter(route, valServerPromise, convert) {
|
|
5259
5457
|
}
|
5260
5458
|
queryRules[key] = arrayCompatibleRule;
|
5261
5459
|
}
|
5262
|
-
const queryRes =
|
5460
|
+
const queryRes = zod.z.object(queryRules).safeParse(actualQueryParams);
|
5263
5461
|
if (!queryRes.success) {
|
5264
5462
|
return zodErrorResult(queryRes.error, `invalid query params: (${JSON.stringify(actualQueryParams)})`);
|
5265
5463
|
}
|
@@ -5333,7 +5531,7 @@ function getCookies(req, cookiesDef) {
|
|
5333
5531
|
input[name.trim()] = value;
|
5334
5532
|
}
|
5335
5533
|
}
|
5336
|
-
return
|
5534
|
+
return zod.z.object(cookiesDef).safeParse(input);
|
5337
5535
|
}
|
5338
5536
|
|
5339
5537
|
/**
|
@@ -5452,7 +5650,7 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
|
|
5452
5650
|
const remainingErrors = [];
|
5453
5651
|
const patch$1 = [];
|
5454
5652
|
for (const fix of validationError.fixes || []) {
|
5455
|
-
if (fix === "image:
|
5653
|
+
if (fix === "image:check-metadata" || fix === "image:add-metadata") {
|
5456
5654
|
const imageMetadata = await getImageMetadata();
|
5457
5655
|
if (imageMetadata.width === undefined || imageMetadata.height === undefined) {
|
5458
5656
|
remainingErrors.push({
|
@@ -5460,7 +5658,7 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
|
|
5460
5658
|
message: "Failed to get image metadata",
|
5461
5659
|
fixes: undefined
|
5462
5660
|
});
|
5463
|
-
} else if (fix === "image:
|
5661
|
+
} else if (fix === "image:check-metadata") {
|
5464
5662
|
const currentValue = validationError.value;
|
5465
5663
|
const metadataIsCorrect =
|
5466
5664
|
// metadata is a prop that is an object
|
@@ -5471,7 +5669,6 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
|
|
5471
5669
|
"height" in currentValue.metadata && currentValue.metadata.height === imageMetadata.height &&
|
5472
5670
|
// mimeType is correct
|
5473
5671
|
"mimeType" in currentValue.metadata && currentValue.metadata.mimeType === imageMetadata.mimeType;
|
5474
|
-
|
5475
5672
|
// skips if the metadata is already correct
|
5476
5673
|
if (!metadataIsCorrect) {
|
5477
5674
|
if (apply) {
|
@@ -5479,6 +5676,8 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
|
|
5479
5676
|
op: "replace",
|
5480
5677
|
path: patch.sourceToPatchPath(sourcePath).concat("metadata"),
|
5481
5678
|
value: {
|
5679
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
5680
|
+
...currentValue.metadata,
|
5482
5681
|
width: imageMetadata.width,
|
5483
5682
|
height: imageMetadata.height,
|
5484
5683
|
mimeType: imageMetadata.mimeType
|
@@ -5547,6 +5746,8 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
|
|
5547
5746
|
op: "replace",
|
5548
5747
|
path: patch.sourceToPatchPath(sourcePath).concat("metadata"),
|
5549
5748
|
value: {
|
5749
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
5750
|
+
...currentValue.metadata,
|
5550
5751
|
...(fileMetadata.mimeType ? {
|
5551
5752
|
mimeType: fileMetadata.mimeType
|
5552
5753
|
} : {})
|
@@ -5580,21 +5781,6 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
|
|
5580
5781
|
}
|
5581
5782
|
});
|
5582
5783
|
}
|
5583
|
-
} else if (fix === "fix:deprecated-richtext") {
|
5584
|
-
if (!validationError.value) {
|
5585
|
-
throw Error("Cannot fix richtext without a value");
|
5586
|
-
}
|
5587
|
-
patch$1.push({
|
5588
|
-
op: "replace",
|
5589
|
-
path: patch.sourceToPatchPath(sourcePath),
|
5590
|
-
value: validationError.value
|
5591
|
-
});
|
5592
|
-
} else {
|
5593
|
-
remainingErrors.push({
|
5594
|
-
...validationError,
|
5595
|
-
message: `Unknown fix: ${fix}`,
|
5596
|
-
fixes: undefined
|
5597
|
-
});
|
5598
5784
|
}
|
5599
5785
|
}
|
5600
5786
|
if (!validationError.fixes || validationError.fixes.length === 0) {
|
@@ -5606,8 +5792,6 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
|
|
5606
5792
|
};
|
5607
5793
|
}
|
5608
5794
|
|
5609
|
-
exports.Patch = Patch;
|
5610
|
-
exports.PatchJSON = PatchJSON;
|
5611
5795
|
exports.Service = Service;
|
5612
5796
|
exports.ValFSHost = ValFSHost;
|
5613
5797
|
exports.ValModuleLoader = ValModuleLoader;
|