@valbuild/server 0.67.1 → 0.68.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/declarations/src/ValFS.d.ts +2 -0
- package/dist/declarations/src/ValFSHost.d.ts +2 -0
- package/dist/declarations/src/ValServer.d.ts +2 -0
- package/dist/declarations/src/index.d.ts +0 -1
- package/dist/valbuild-server.cjs.dev.js +844 -591
- package/dist/valbuild-server.cjs.prod.js +844 -591
- package/dist/valbuild-server.esm.js +769 -513
- package/package.json +4 -4
- package/dist/declarations/src/patch/validation.d.ts +0 -6
@@ -14,7 +14,7 @@ var ui = require('@valbuild/ui');
|
|
14
14
|
var internal = require('@valbuild/shared/internal');
|
15
15
|
var server = require('@valbuild/ui/server');
|
16
16
|
var crypto$1 = require('crypto');
|
17
|
-
var
|
17
|
+
var zod = require('zod');
|
18
18
|
var sizeOf = require('image-size');
|
19
19
|
var zodValidationError = require('zod-validation-error');
|
20
20
|
|
@@ -42,7 +42,6 @@ var ts__default = /*#__PURE__*/_interopDefault(ts);
|
|
42
42
|
var fsPath__namespace = /*#__PURE__*/_interopNamespace(fsPath);
|
43
43
|
var fs__default = /*#__PURE__*/_interopDefault(fs);
|
44
44
|
var crypto__default = /*#__PURE__*/_interopDefault(crypto$1);
|
45
|
-
var z__default = /*#__PURE__*/_interopDefault(z);
|
46
45
|
var sizeOf__default = /*#__PURE__*/_interopDefault(sizeOf);
|
47
46
|
|
48
47
|
class ValSyntaxError {
|
@@ -1356,9 +1355,9 @@ function decodeJwt(token, secretKey) {
|
|
1356
1355
|
function getExpire() {
|
1357
1356
|
return Math.floor(Date.now() / 1000) + 60 * 60 * 24 * 4; // 4 days
|
1358
1357
|
}
|
1359
|
-
const JwtHeaderSchema =
|
1360
|
-
alg:
|
1361
|
-
typ:
|
1358
|
+
const JwtHeaderSchema = zod.z.object({
|
1359
|
+
alg: zod.z.literal("HS256"),
|
1360
|
+
typ: zod.z.literal("JWT")
|
1362
1361
|
});
|
1363
1362
|
const jwtHeader = {
|
1364
1363
|
alg: "HS256",
|
@@ -1383,11 +1382,11 @@ const tsOps = new TSOps(document => {
|
|
1383
1382
|
class ValOps {
|
1384
1383
|
/** Sources from val modules, immutable (without patches or anything) */
|
1385
1384
|
|
1386
|
-
/** The
|
1385
|
+
/** The sha256 / hash of sources + schema + config */
|
1387
1386
|
|
1388
1387
|
/** Schema from val modules, immutable */
|
1389
1388
|
|
1390
|
-
/** The
|
1389
|
+
/** The sha256 / hash of schema + config - if this changes users needs to reload */
|
1391
1390
|
|
1392
1391
|
constructor(valModules, options) {
|
1393
1392
|
this.valModules = valModules;
|
@@ -1399,8 +1398,46 @@ class ValOps {
|
|
1399
1398
|
this.modulesErrors = null;
|
1400
1399
|
}
|
1401
1400
|
hash(input) {
|
1401
|
+
if (typeof input === "object") {
|
1402
|
+
return this.hashObject(input);
|
1403
|
+
}
|
1402
1404
|
return core.Internal.getSHA256Hash(textEncoder$1.encode(input));
|
1403
1405
|
}
|
1406
|
+
hashObject(obj) {
|
1407
|
+
const collector = [];
|
1408
|
+
this.collectObjectRecursive(obj, collector);
|
1409
|
+
return core.Internal.getSHA256Hash(textEncoder$1.encode(collector.join("")));
|
1410
|
+
}
|
1411
|
+
collectObjectRecursive(item, collector) {
|
1412
|
+
if (typeof item === "string") {
|
1413
|
+
collector.push(`"`, item, `"`);
|
1414
|
+
return;
|
1415
|
+
} else if (typeof item === "number") {
|
1416
|
+
collector.push(item.toString());
|
1417
|
+
return;
|
1418
|
+
} else if (typeof item === "object") {
|
1419
|
+
if (Array.isArray(item)) {
|
1420
|
+
collector.push("[");
|
1421
|
+
for (let i = 0; i < item.length; i++) {
|
1422
|
+
this.collectObjectRecursive(item[i], collector);
|
1423
|
+
i !== item.length - 1 && collector.push(",");
|
1424
|
+
}
|
1425
|
+
collector.push("]");
|
1426
|
+
} else {
|
1427
|
+
collector.push("{");
|
1428
|
+
const keys = Object.keys(item).sort();
|
1429
|
+
keys.forEach((key, i) => {
|
1430
|
+
collector.push(`"${key}":`);
|
1431
|
+
this.collectObjectRecursive(item[key], collector);
|
1432
|
+
i !== keys.length - 1 && collector.push(",");
|
1433
|
+
});
|
1434
|
+
collector.push("}");
|
1435
|
+
}
|
1436
|
+
return;
|
1437
|
+
} else {
|
1438
|
+
console.warn("Unknown type encountered when hashing object", typeof item, item);
|
1439
|
+
}
|
1440
|
+
}
|
1404
1441
|
|
1405
1442
|
// #region stat
|
1406
1443
|
/**
|
@@ -1414,7 +1451,7 @@ class ValOps {
|
|
1414
1451
|
*/
|
1415
1452
|
|
1416
1453
|
// #region initTree
|
1417
|
-
async
|
1454
|
+
async initSources() {
|
1418
1455
|
if (this.baseSha === null || this.schemaSha === null || this.sources === null || this.schemas === null || this.modulesErrors === null) {
|
1419
1456
|
const currentModulesErrors = [];
|
1420
1457
|
const addModuleError = (message, index, path) => {
|
@@ -1508,50 +1545,43 @@ class ValOps {
|
|
1508
1545
|
const {
|
1509
1546
|
baseSha,
|
1510
1547
|
schemaSha
|
1511
|
-
} = await this.
|
1548
|
+
} = await this.initSources();
|
1512
1549
|
await this.onInit(baseSha, schemaSha);
|
1513
1550
|
}
|
1514
1551
|
async getBaseSources() {
|
1515
|
-
return this.
|
1552
|
+
return this.initSources().then(result => result.sources);
|
1516
1553
|
}
|
1517
1554
|
async getSchemas() {
|
1518
|
-
return this.
|
1555
|
+
return this.initSources().then(result => result.schemas);
|
1519
1556
|
}
|
1520
1557
|
async getModuleErrors() {
|
1521
|
-
return this.
|
1558
|
+
return this.initSources().then(result => result.moduleErrors);
|
1522
1559
|
}
|
1523
1560
|
async getBaseSha() {
|
1524
|
-
return this.
|
1561
|
+
return this.initSources().then(result => result.baseSha);
|
1525
1562
|
}
|
1526
1563
|
async getSchemaSha() {
|
1527
|
-
return this.
|
1564
|
+
return this.initSources().then(result => result.schemaSha);
|
1528
1565
|
}
|
1529
1566
|
|
1530
1567
|
// #region analyzePatches
|
1531
|
-
analyzePatches(
|
1568
|
+
analyzePatches(sortedPatches) {
|
1532
1569
|
const patchesByModule = {};
|
1533
1570
|
const fileLastUpdatedByPatchId = {};
|
1534
|
-
for (const
|
1535
|
-
|
1536
|
-
patch,
|
1537
|
-
createdAt: created_at
|
1538
|
-
}] of Object.entries(patchesById)) {
|
1539
|
-
const patchId = patchIdS;
|
1540
|
-
for (const op of patch) {
|
1571
|
+
for (const patch of sortedPatches) {
|
1572
|
+
for (const op of patch.patch) {
|
1541
1573
|
if (op.op === "file") {
|
1542
|
-
|
1574
|
+
const filePath = op.filePath;
|
1575
|
+
fileLastUpdatedByPatchId[filePath] = patch.patchId;
|
1543
1576
|
}
|
1577
|
+
const path = patch.path;
|
1578
|
+
if (!patchesByModule[path]) {
|
1579
|
+
patchesByModule[path] = [];
|
1580
|
+
}
|
1581
|
+
patchesByModule[path].push({
|
1582
|
+
patchId: patch.patchId
|
1583
|
+
});
|
1544
1584
|
}
|
1545
|
-
if (!patchesByModule[path]) {
|
1546
|
-
patchesByModule[path] = [];
|
1547
|
-
}
|
1548
|
-
patchesByModule[path].push({
|
1549
|
-
patchId,
|
1550
|
-
createdAt: created_at
|
1551
|
-
});
|
1552
|
-
}
|
1553
|
-
for (const path in patchesByModule) {
|
1554
|
-
patchesByModule[path].sort((a, b) => a.createdAt.localeCompare(b.createdAt));
|
1555
1585
|
}
|
1556
1586
|
return {
|
1557
1587
|
patchesByModule,
|
@@ -1560,11 +1590,11 @@ class ValOps {
|
|
1560
1590
|
}
|
1561
1591
|
|
1562
1592
|
// #region getTree
|
1563
|
-
async
|
1593
|
+
async getSources(analysis) {
|
1564
1594
|
if (!analysis) {
|
1565
1595
|
const {
|
1566
1596
|
sources
|
1567
|
-
} = await this.
|
1597
|
+
} = await this.initSources();
|
1568
1598
|
return {
|
1569
1599
|
sources,
|
1570
1600
|
errors: {}
|
@@ -1572,76 +1602,72 @@ class ValOps {
|
|
1572
1602
|
}
|
1573
1603
|
const {
|
1574
1604
|
sources
|
1575
|
-
} = await this.
|
1605
|
+
} = await this.initSources();
|
1576
1606
|
const patchedSources = {};
|
1577
1607
|
const errors = {};
|
1578
|
-
for (const
|
1579
|
-
const path =
|
1608
|
+
for (const patchData of analysis.patches) {
|
1609
|
+
const path = patchData.path;
|
1580
1610
|
if (!sources[path]) {
|
1581
1611
|
if (!errors[path]) {
|
1582
1612
|
errors[path] = [];
|
1583
1613
|
}
|
1584
|
-
|
1585
|
-
|
1586
|
-
|
1587
|
-
patchId,
|
1588
|
-
invalidPath: true,
|
1614
|
+
console.error("Module not found", path);
|
1615
|
+
errors[path].push({
|
1616
|
+
patchId: patchData.patchId,
|
1589
1617
|
skipped: true,
|
1590
|
-
error: new patch.PatchError(`Module
|
1591
|
-
})
|
1618
|
+
error: new patch.PatchError(`Module not found`)
|
1619
|
+
});
|
1620
|
+
continue;
|
1592
1621
|
}
|
1593
|
-
patchedSources[path]
|
1594
|
-
|
1595
|
-
|
1596
|
-
|
1597
|
-
|
1622
|
+
if (!patchedSources[path]) {
|
1623
|
+
patchedSources[path] = sources[path];
|
1624
|
+
}
|
1625
|
+
const patchId = patchData.patchId;
|
1626
|
+
if (errors[path]) {
|
1627
|
+
console.error("Cannot apply patch: previous errors exists", path, errors[path]);
|
1628
|
+
errors[path].push({
|
1629
|
+
patchId: patchId,
|
1630
|
+
skipped: true,
|
1631
|
+
error: new patch.PatchError(`Cannot apply patch: previous errors exists`)
|
1632
|
+
});
|
1633
|
+
} else {
|
1634
|
+
const applicableOps = [];
|
1635
|
+
const fileFixOps = {};
|
1636
|
+
for (const op of patchData.patch) {
|
1637
|
+
if (op.op === "file") {
|
1638
|
+
// NOTE: We insert the last patch_id that modify a file
|
1639
|
+
// when constructing the url we use the patch id (and the file path)
|
1640
|
+
// to fetch the right file
|
1641
|
+
// NOTE: overwrite and use last patch_id if multiple patches modify the same file
|
1642
|
+
fileFixOps[op.path.join("/")] = [{
|
1643
|
+
op: "add",
|
1644
|
+
path: op.path.concat(...(op.nestedFilePath || [])).concat("patch_id"),
|
1645
|
+
value: patchId
|
1646
|
+
}];
|
1647
|
+
} else {
|
1648
|
+
applicableOps.push(op);
|
1649
|
+
}
|
1650
|
+
}
|
1651
|
+
const patchRes = patch.applyPatch(patch.deepClone(patchedSources[path]),
|
1652
|
+
// applyPatch mutates the source. On add operations it adds more than once? There is something strange going on... deepClone seems to fix, but is that the right solution?
|
1653
|
+
jsonOps, applicableOps.concat(...Object.values(fileFixOps)));
|
1654
|
+
if (fp.result.isErr(patchRes)) {
|
1655
|
+
console.error("Could not apply patch", JSON.stringify({
|
1656
|
+
path,
|
1657
|
+
patchId,
|
1658
|
+
error: patchRes.error,
|
1659
|
+
applicableOps
|
1660
|
+
}, null, 2));
|
1661
|
+
if (!errors[path]) {
|
1662
|
+
errors[path] = [];
|
1663
|
+
}
|
1598
1664
|
errors[path].push({
|
1599
1665
|
patchId: patchId,
|
1600
|
-
skipped:
|
1601
|
-
error:
|
1666
|
+
skipped: false,
|
1667
|
+
error: patchRes.error
|
1602
1668
|
});
|
1603
1669
|
} else {
|
1604
|
-
|
1605
|
-
if (!patchData) {
|
1606
|
-
errors[path] = [{
|
1607
|
-
patchId: patchId,
|
1608
|
-
skipped: false,
|
1609
|
-
error: new patch.PatchError(`Patch not found`)
|
1610
|
-
}];
|
1611
|
-
continue;
|
1612
|
-
}
|
1613
|
-
const applicableOps = [];
|
1614
|
-
const fileFixOps = {};
|
1615
|
-
for (const op of patchData.patch) {
|
1616
|
-
if (op.op === "file") {
|
1617
|
-
// NOTE: We insert the last patch_id that modify a file
|
1618
|
-
// when constructing the url we use the patch id (and the file path)
|
1619
|
-
// to fetch the right file
|
1620
|
-
// NOTE: overwrite and use last patch_id if multiple patches modify the same file
|
1621
|
-
fileFixOps[op.path.join("/")] = [{
|
1622
|
-
op: "add",
|
1623
|
-
path: op.path.concat(...(op.nestedFilePath || [])).concat("patch_id"),
|
1624
|
-
value: patchId
|
1625
|
-
}];
|
1626
|
-
} else {
|
1627
|
-
applicableOps.push(op);
|
1628
|
-
}
|
1629
|
-
}
|
1630
|
-
const patchRes = patch.applyPatch(patch.deepClone(patchedSources[path]),
|
1631
|
-
// applyPatch mutates the source. On add operations it will add multiple items? There is something strange going on. DeepClone seems to fix, but is that the right?
|
1632
|
-
jsonOps, applicableOps.concat(...Object.values(fileFixOps)));
|
1633
|
-
if (fp.result.isErr(patchRes)) {
|
1634
|
-
if (!errors[path]) {
|
1635
|
-
errors[path] = [];
|
1636
|
-
}
|
1637
|
-
errors[path].push({
|
1638
|
-
patchId: patchId,
|
1639
|
-
skipped: false,
|
1640
|
-
error: patchRes.error
|
1641
|
-
});
|
1642
|
-
} else {
|
1643
|
-
patchedSources[path] = patchRes.value;
|
1644
|
-
}
|
1670
|
+
patchedSources[path] = patchRes.value;
|
1645
1671
|
}
|
1646
1672
|
}
|
1647
1673
|
}
|
@@ -1879,14 +1905,14 @@ class ValOps {
|
|
1879
1905
|
for (const {
|
1880
1906
|
patchId
|
1881
1907
|
} of patches) {
|
1882
|
-
|
1883
|
-
|
1884
|
-
if (!patch$1) {
|
1908
|
+
const patchData = patchAnalysis.patches.find(p => p.patchId === patchId);
|
1909
|
+
if (!patchData) {
|
1885
1910
|
errors.push({
|
1886
1911
|
message: `Analysis required non-existing patch: ${patchId}`
|
1887
1912
|
});
|
1888
1913
|
break;
|
1889
1914
|
}
|
1915
|
+
const patch$1 = patchData.patch;
|
1890
1916
|
const sourceFileOps = patch$1.filter(op => op.op !== "file"); // file is not a valid source file op
|
1891
1917
|
const patchRes = patch.applyPatch(tsSourceFile, tsOps, sourceFileOps);
|
1892
1918
|
if (fp.result.isErr(patchRes)) {
|
@@ -2000,13 +2026,21 @@ class ValOps {
|
|
2000
2026
|
}
|
2001
2027
|
|
2002
2028
|
// #region createPatch
|
2003
|
-
async createPatch(path,
|
2004
|
-
const initTree = await this.
|
2029
|
+
async createPatch(path, patch$1, parentRef, authorId) {
|
2030
|
+
const initTree = await this.initSources();
|
2005
2031
|
const schemas = initTree.schemas;
|
2006
2032
|
const moduleErrors = initTree.moduleErrors;
|
2007
2033
|
let sources = initTree.sources;
|
2008
|
-
if (
|
2009
|
-
|
2034
|
+
if (parentRef.type !== "head") {
|
2035
|
+
// There's room for some optimizations here: we could do this once, then re-use every time we create a patch, then again we only create one patch at a time
|
2036
|
+
const patchOps = await this.fetchPatches({
|
2037
|
+
omitPatch: false
|
2038
|
+
});
|
2039
|
+
const patchAnalysis = this.analyzePatches(patchOps.patches);
|
2040
|
+
const tree = await this.getSources({
|
2041
|
+
...patchAnalysis,
|
2042
|
+
...patchOps
|
2043
|
+
});
|
2010
2044
|
sources = {
|
2011
2045
|
...sources,
|
2012
2046
|
...tree.sources
|
@@ -2017,27 +2051,30 @@ class ValOps {
|
|
2017
2051
|
const moduleError = moduleErrors.find(e => e.path === path);
|
2018
2052
|
if (moduleError) {
|
2019
2053
|
console.error(`Cannot patch. Module at path: '${path}' has fatal errors: "${moduleError.message}"`);
|
2020
|
-
return {
|
2054
|
+
return fp.result.err({
|
2055
|
+
errorType: "other",
|
2021
2056
|
error: {
|
2022
2057
|
message: `Cannot patch. Module at path: '${path}' has fatal errors: ` + moduleErrors.map(m => `"${m.message}"`).join(" and ")
|
2023
2058
|
}
|
2024
|
-
};
|
2059
|
+
});
|
2025
2060
|
}
|
2026
2061
|
if (!source) {
|
2027
2062
|
console.error(`Cannot patch. Module source at path: '${path}' does not exist`);
|
2028
|
-
return {
|
2063
|
+
return fp.result.err({
|
2064
|
+
errorType: "other",
|
2029
2065
|
error: {
|
2030
2066
|
message: `Cannot patch. Module source at path: '${path}' does not exist`
|
2031
2067
|
}
|
2032
|
-
};
|
2068
|
+
});
|
2033
2069
|
}
|
2034
2070
|
if (!schema) {
|
2035
2071
|
console.error(`Cannot patch. Module schema at path: '${path}' does not exist`);
|
2036
|
-
return {
|
2072
|
+
return fp.result.err({
|
2073
|
+
errorType: "other",
|
2037
2074
|
error: {
|
2038
2075
|
message: `Cannot patch. Module schema at path: '${path}' does not exist`
|
2039
2076
|
}
|
2040
|
-
};
|
2077
|
+
});
|
2041
2078
|
}
|
2042
2079
|
const sourceFileOps = [];
|
2043
2080
|
const files = {};
|
@@ -2076,14 +2113,20 @@ class ValOps {
|
|
2076
2113
|
}
|
2077
2114
|
}
|
2078
2115
|
}
|
2079
|
-
const saveRes = await this.saveSourceFilePatch(path,
|
2080
|
-
if (saveRes
|
2081
|
-
console.error(`Could not save source
|
2082
|
-
|
2116
|
+
const saveRes = await this.saveSourceFilePatch(path, patch$1, parentRef, authorId);
|
2117
|
+
if (fp.result.isErr(saveRes)) {
|
2118
|
+
console.error(`Could not save source patch at path: '${path}'. Error: ${saveRes.error.errorType === "other" ? saveRes.error.message : saveRes.error.errorType}`);
|
2119
|
+
if (saveRes.error.errorType === "patch-head-conflict") {
|
2120
|
+
return fp.result.err({
|
2121
|
+
errorType: "patch-head-conflict"
|
2122
|
+
});
|
2123
|
+
}
|
2124
|
+
return fp.result.err({
|
2125
|
+
errorType: "other",
|
2083
2126
|
error: saveRes.error
|
2084
|
-
};
|
2127
|
+
});
|
2085
2128
|
}
|
2086
|
-
const patchId = saveRes.patchId;
|
2129
|
+
const patchId = saveRes.value.patchId;
|
2087
2130
|
const saveFileRes = await Promise.all(Object.entries(files).map(async ([filePath, data]) => {
|
2088
2131
|
if (data.error) {
|
2089
2132
|
return {
|
@@ -2148,7 +2191,7 @@ class ValOps {
|
|
2148
2191
|
const MaxRetries = 3;
|
2149
2192
|
let lastRes;
|
2150
2193
|
for (let i = 0; i < MaxRetries; i++) {
|
2151
|
-
lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data.value, type, metadataOps.metadata);
|
2194
|
+
lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data.value, type, metadataOps.metadata);
|
2152
2195
|
if (!lastRes.error) {
|
2153
2196
|
return {
|
2154
2197
|
filePath
|
@@ -2163,17 +2206,18 @@ class ValOps {
|
|
2163
2206
|
}));
|
2164
2207
|
const errors = saveFileRes.filter(f => !!f.error);
|
2165
2208
|
if (errors.length > 0) {
|
2166
|
-
return {
|
2209
|
+
return fp.result.err({
|
2210
|
+
errorType: "other",
|
2167
2211
|
error: {
|
2168
2212
|
message: "Could not save patch: " + errors.map(e => e.error.message).join(", ")
|
2169
2213
|
}
|
2170
|
-
};
|
2214
|
+
});
|
2171
2215
|
}
|
2172
|
-
return {
|
2216
|
+
return fp.result.ok({
|
2173
2217
|
patchId,
|
2174
2218
|
files: saveFileRes,
|
2175
2219
|
createdAt: new Date().toISOString()
|
2176
|
-
};
|
2220
|
+
});
|
2177
2221
|
}
|
2178
2222
|
|
2179
2223
|
// #region abstract ops
|
@@ -2270,81 +2314,51 @@ function bufferFromDataUrl(dataUrl) {
|
|
2270
2314
|
}
|
2271
2315
|
}
|
2272
2316
|
|
2273
|
-
const JSONValueT = z__default["default"].lazy(() => z__default["default"].union([z__default["default"].string(), z__default["default"].number(), z__default["default"].boolean(), z__default["default"].null(), z__default["default"].array(JSONValueT), z__default["default"].record(JSONValueT)]));
|
2274
|
-
|
2275
|
-
/**
|
2276
|
-
* Raw JSON patch operation.
|
2277
|
-
*/
|
2278
|
-
const OperationJSONT = z__default["default"].discriminatedUnion("op", [z__default["default"].object({
|
2279
|
-
op: z__default["default"].literal("add"),
|
2280
|
-
path: z__default["default"].string(),
|
2281
|
-
value: JSONValueT
|
2282
|
-
}).strict(), z__default["default"].object({
|
2283
|
-
op: z__default["default"].literal("remove"),
|
2284
|
-
/**
|
2285
|
-
* Must be non-root
|
2286
|
-
*/
|
2287
|
-
path: z__default["default"].string()
|
2288
|
-
}).strict(), z__default["default"].object({
|
2289
|
-
op: z__default["default"].literal("replace"),
|
2290
|
-
path: z__default["default"].string(),
|
2291
|
-
value: JSONValueT
|
2292
|
-
}).strict(), z__default["default"].object({
|
2293
|
-
op: z__default["default"].literal("move"),
|
2294
|
-
/**
|
2295
|
-
* Must be non-root and not a proper prefix of "path".
|
2296
|
-
*/
|
2297
|
-
from: z__default["default"].string(),
|
2298
|
-
path: z__default["default"].string()
|
2299
|
-
}).strict(), z__default["default"].object({
|
2300
|
-
op: z__default["default"].literal("copy"),
|
2301
|
-
from: z__default["default"].string(),
|
2302
|
-
path: z__default["default"].string()
|
2303
|
-
}).strict(), z__default["default"].object({
|
2304
|
-
op: z__default["default"].literal("test"),
|
2305
|
-
path: z__default["default"].string(),
|
2306
|
-
value: JSONValueT
|
2307
|
-
}).strict(), z__default["default"].object({
|
2308
|
-
op: z__default["default"].literal("file"),
|
2309
|
-
path: z__default["default"].string(),
|
2310
|
-
filePath: z__default["default"].string(),
|
2311
|
-
value: z__default["default"].string()
|
2312
|
-
}).strict()]);
|
2313
|
-
const PatchJSON = z__default["default"].array(OperationJSONT);
|
2314
2317
|
/**
|
2315
|
-
*
|
2318
|
+
* Computes the changed patch parent references based on the current patches and the patch IDs to be deleted.
|
2319
|
+
*
|
2320
|
+
* NOTE: patches that will be deleted are not included in the changed patches, since they will be deleted any how.
|
2321
|
+
*
|
2322
|
+
* @param currentPatches - The array of current patches.
|
2323
|
+
* @param deletePatchIds - The array of patch IDs to be deleted.
|
2324
|
+
* @returns An object containing the changed patches with their corresponding parent references.
|
2316
2325
|
*/
|
2317
|
-
|
2318
|
-
|
2319
|
-
|
2320
|
-
|
2321
|
-
|
2322
|
-
|
2323
|
-
|
2324
|
-
|
2325
|
-
|
2326
|
-
|
2327
|
-
|
2328
|
-
|
2329
|
-
|
2330
|
-
|
2331
|
-
|
2332
|
-
|
2333
|
-
|
2334
|
-
|
2335
|
-
|
2336
|
-
}
|
2337
|
-
|
2338
|
-
|
2339
|
-
|
2340
|
-
|
2341
|
-
|
2342
|
-
|
2343
|
-
|
2344
|
-
|
2345
|
-
|
2346
|
-
|
2347
|
-
|
2326
|
+
function computeChangedPatchParentRefs(currentPatches, deletePatchIds) {
|
2327
|
+
let lastNonDeletedPatchIndex = -1;
|
2328
|
+
const changedPatches = {};
|
2329
|
+
for (let i = 0; i < currentPatches.length; i++) {
|
2330
|
+
const current = currentPatches[i];
|
2331
|
+
if (
|
2332
|
+
// skip all patches that will be deleted:
|
2333
|
+
deletePatchIds.includes(current.patchId)) {
|
2334
|
+
var _currentPatches;
|
2335
|
+
if (
|
2336
|
+
// skip change if the patch after is deleted anyway:
|
2337
|
+
!deletePatchIds.includes((_currentPatches = currentPatches[i + 1]) === null || _currentPatches === void 0 ? void 0 : _currentPatches.patchId)) {
|
2338
|
+
if (
|
2339
|
+
// set next patch to point to head if it exists:
|
2340
|
+
lastNonDeletedPatchIndex === -1 && currentPatches[i + 1]) {
|
2341
|
+
changedPatches[currentPatches[i + 1].patchId] = {
|
2342
|
+
type: "head",
|
2343
|
+
headBaseSha: current.baseSha
|
2344
|
+
};
|
2345
|
+
} else if (
|
2346
|
+
// set next patch to point to the last non-deleted patch:
|
2347
|
+
currentPatches[lastNonDeletedPatchIndex] && currentPatches[i + 1]) {
|
2348
|
+
changedPatches[currentPatches[i + 1].patchId] = {
|
2349
|
+
type: "patch",
|
2350
|
+
patchId: currentPatches[lastNonDeletedPatchIndex].patchId
|
2351
|
+
};
|
2352
|
+
}
|
2353
|
+
}
|
2354
|
+
} else {
|
2355
|
+
lastNonDeletedPatchIndex = i;
|
2356
|
+
}
|
2357
|
+
}
|
2358
|
+
return {
|
2359
|
+
changedPatches
|
2360
|
+
};
|
2361
|
+
}
|
2348
2362
|
|
2349
2363
|
class ValOpsFS extends ValOps {
|
2350
2364
|
static VAL_DIR = ".val";
|
@@ -2518,32 +2532,31 @@ class ValOpsFS extends ValOps {
|
|
2518
2532
|
patchJsonFiles = this.host.readDirectory(patchesCacheDir, ["patch.json"], [], []);
|
2519
2533
|
}
|
2520
2534
|
const patches = {};
|
2521
|
-
const errors =
|
2522
|
-
const
|
2523
|
-
|
2524
|
-
if (
|
2525
|
-
|
2526
|
-
|
2527
|
-
|
2528
|
-
|
2529
|
-
|
2530
|
-
|
2531
|
-
|
2532
|
-
|
2533
|
-
|
2534
|
-
parsedFSPatchBaseRes = this.parseJsonFile(this.getPatchBaseFile(patchId), FSPatchBase);
|
2535
|
-
}
|
2536
|
-
if (parsedFSPatchRes.error) {
|
2537
|
-
errors[patchId] = parsedFSPatchRes.error;
|
2538
|
-
} else if (parsedFSPatchBaseRes && parsedFSPatchBaseRes.error) {
|
2539
|
-
errors[patchId] = parsedFSPatchBaseRes.error;
|
2535
|
+
const errors = [];
|
2536
|
+
const parsedUnsortedFsPatches = patchJsonFiles.map(file => fsPath__namespace["default"].basename(fsPath__namespace["default"].dirname(file))).map(patchDir => [patchDir, this.parseJsonFile(this.getPatchFilePath(patchDir), FSPatch), this.host.fileExists(this.getPatchBaseFile(patchDir)) ? this.parseJsonFile(this.getPatchBaseFile(patchDir), FSPatchBase) : undefined]);
|
2537
|
+
parsedUnsortedFsPatches.forEach(([dir, parsedPatch, parsedBase]) => {
|
2538
|
+
if (parsedPatch.error) {
|
2539
|
+
errors.push({
|
2540
|
+
...parsedPatch.error,
|
2541
|
+
parentPatchId: dir
|
2542
|
+
});
|
2543
|
+
} else if (parsedBase && parsedBase.error) {
|
2544
|
+
errors.push({
|
2545
|
+
...parsedBase.error,
|
2546
|
+
parentPatchId: dir
|
2547
|
+
});
|
2540
2548
|
} else {
|
2541
|
-
|
2542
|
-
|
2543
|
-
|
2549
|
+
if (includes && includes.length > 0 && !includes.includes(parsedPatch.data.patchId)) {
|
2550
|
+
return;
|
2551
|
+
}
|
2552
|
+
patches[parsedPatch.data.patchId] = {
|
2553
|
+
...parsedPatch.data,
|
2554
|
+
appliedAt: parsedBase ? parsedBase.data : null
|
2544
2555
|
};
|
2545
2556
|
}
|
2546
|
-
}
|
2557
|
+
});
|
2558
|
+
|
2559
|
+
// If there are patches, but no head. error
|
2547
2560
|
if (Object.keys(errors).length > 0) {
|
2548
2561
|
return {
|
2549
2562
|
patches,
|
@@ -2554,36 +2567,52 @@ class ValOpsFS extends ValOps {
|
|
2554
2567
|
patches
|
2555
2568
|
};
|
2556
2569
|
}
|
2570
|
+
getParentPatchIdFromParentRef(parentRef) {
|
2571
|
+
return parentRef.type === "head" ? "head" : parentRef.patchId;
|
2572
|
+
}
|
2557
2573
|
async fetchPatches(filters) {
|
2574
|
+
const fetchPatchesRes = await this.fetchPatchesFromFS(!!filters.omitPatch);
|
2575
|
+
const sortedPatches = this.createPatchChain(fetchPatchesRes.patches).filter(patchData => {
|
2576
|
+
if (filters.authors && !(patchData.authorId === null || filters.authors.includes(patchData.authorId))) {
|
2577
|
+
return false;
|
2578
|
+
}
|
2579
|
+
if (filters.moduleFilePaths && !filters.moduleFilePaths.includes(patchData.path)) {
|
2580
|
+
return false;
|
2581
|
+
}
|
2582
|
+
return true;
|
2583
|
+
}).map(patchData => {
|
2584
|
+
if (filters.omitPatch) {
|
2585
|
+
return {
|
2586
|
+
...patchData,
|
2587
|
+
patch: undefined
|
2588
|
+
};
|
2589
|
+
}
|
2590
|
+
return patchData;
|
2591
|
+
});
|
2592
|
+
return {
|
2593
|
+
patches: sortedPatches,
|
2594
|
+
errors: fetchPatchesRes.errors
|
2595
|
+
};
|
2596
|
+
}
|
2597
|
+
async fetchPatchesFromFS(omitPath) {
|
2558
2598
|
const patches = {};
|
2559
|
-
const errors = {};
|
2560
2599
|
const {
|
2561
|
-
errors
|
2600
|
+
errors,
|
2562
2601
|
patches: allPatches
|
2563
|
-
} = await this.readPatches(
|
2564
|
-
if (allErrors && Object.keys(allErrors).length > 0) {
|
2565
|
-
for (const [patchId, error] of Object.entries(allErrors)) {
|
2566
|
-
console.error("Error reading patch", patchId, error);
|
2567
|
-
errors[patchId] = error;
|
2568
|
-
}
|
2569
|
-
}
|
2602
|
+
} = await this.readPatches();
|
2570
2603
|
for (const [patchIdS, patch] of Object.entries(allPatches)) {
|
2571
2604
|
const patchId = patchIdS;
|
2572
|
-
if (filters.authors && !(patch.authorId === null || filters.authors.includes(patch.authorId))) {
|
2573
|
-
continue;
|
2574
|
-
}
|
2575
|
-
if (filters.moduleFilePaths && !filters.moduleFilePaths.includes(patch.path)) {
|
2576
|
-
continue;
|
2577
|
-
}
|
2578
2605
|
patches[patchId] = {
|
2579
|
-
patch:
|
2606
|
+
patch: omitPath ? undefined : patch.patch,
|
2607
|
+
parentRef: patch.parentRef,
|
2580
2608
|
path: patch.path,
|
2609
|
+
baseSha: patch.baseSha,
|
2581
2610
|
createdAt: patch.createdAt,
|
2582
2611
|
authorId: patch.authorId,
|
2583
2612
|
appliedAt: patch.appliedAt
|
2584
2613
|
};
|
2585
2614
|
}
|
2586
|
-
if (errors &&
|
2615
|
+
if (errors && errors.length > 0) {
|
2587
2616
|
return {
|
2588
2617
|
patches,
|
2589
2618
|
errors
|
@@ -2594,6 +2623,33 @@ class ValOpsFS extends ValOps {
|
|
2594
2623
|
};
|
2595
2624
|
}
|
2596
2625
|
|
2626
|
+
// #region createPatchChain
|
2627
|
+
createPatchChain(unsortedPatchRecord) {
|
2628
|
+
var _Object$entries$find;
|
2629
|
+
// TODO: Error handling
|
2630
|
+
const nextPatch = {};
|
2631
|
+
Object.keys(unsortedPatchRecord).forEach(patchId => {
|
2632
|
+
const patch = unsortedPatchRecord[patchId];
|
2633
|
+
if (patch.parentRef.type === "head") {
|
2634
|
+
nextPatch["head"] = patchId;
|
2635
|
+
} else {
|
2636
|
+
nextPatch[patch.parentRef.patchId] = patchId;
|
2637
|
+
}
|
2638
|
+
});
|
2639
|
+
const sortedPatches = [];
|
2640
|
+
let nextPatchId = (_Object$entries$find = Object.entries(unsortedPatchRecord).find(([, patch]) => patch.parentRef.type === "head")) === null || _Object$entries$find === void 0 ? void 0 : _Object$entries$find[0];
|
2641
|
+
while (!!nextPatchId && nextPatchId in unsortedPatchRecord) {
|
2642
|
+
const patch = unsortedPatchRecord[nextPatchId];
|
2643
|
+
delete patch["parentRef"];
|
2644
|
+
sortedPatches.push({
|
2645
|
+
...patch,
|
2646
|
+
patchId: nextPatchId
|
2647
|
+
});
|
2648
|
+
nextPatchId = nextPatch[nextPatchId];
|
2649
|
+
}
|
2650
|
+
return sortedPatches;
|
2651
|
+
}
|
2652
|
+
|
2597
2653
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
2598
2654
|
parseJsonFile(filePath, parser) {
|
2599
2655
|
if (!this.host.fileExists(filePath)) {
|
@@ -2668,38 +2724,47 @@ class ValOpsFS extends ValOps {
|
|
2668
2724
|
};
|
2669
2725
|
}
|
2670
2726
|
}
|
2671
|
-
async saveSourceFilePatch(path, patch, authorId) {
|
2672
|
-
|
2727
|
+
async saveSourceFilePatch(path, patch, parentRef, authorId) {
|
2728
|
+
const patchDir = this.getParentPatchIdFromParentRef(parentRef);
|
2673
2729
|
try {
|
2674
|
-
|
2675
|
-
|
2676
|
-
fileId++;
|
2677
|
-
}
|
2678
|
-
const patchId = fileId.toString();
|
2730
|
+
const baseSha = await this.getBaseSha();
|
2731
|
+
const patchId = crypto.randomUUID();
|
2679
2732
|
const data = {
|
2680
2733
|
patch,
|
2734
|
+
patchId,
|
2735
|
+
parentRef,
|
2681
2736
|
path,
|
2682
2737
|
authorId,
|
2738
|
+
baseSha,
|
2683
2739
|
coreVersion: core.Internal.VERSION.core,
|
2684
2740
|
createdAt: new Date().toISOString()
|
2685
2741
|
};
|
2686
|
-
this.host.
|
2687
|
-
|
2742
|
+
const writeRes = this.host.tryWriteUf8File(this.getPatchFilePath(patchDir), JSON.stringify(data));
|
2743
|
+
if (writeRes.type === "error") {
|
2744
|
+
return writeRes.errorType === "dir-already-exists" ? fp.result.err({
|
2745
|
+
errorType: "patch-head-conflict"
|
2746
|
+
}) : fp.result.err({
|
2747
|
+
errorType: "other",
|
2748
|
+
error: writeRes.error,
|
2749
|
+
message: "Failed to write patch file"
|
2750
|
+
});
|
2751
|
+
}
|
2752
|
+
return fp.result.ok({
|
2688
2753
|
patchId
|
2689
|
-
};
|
2754
|
+
});
|
2690
2755
|
} catch (err) {
|
2691
2756
|
if (err instanceof Error) {
|
2692
|
-
return {
|
2693
|
-
|
2694
|
-
|
2695
|
-
|
2696
|
-
};
|
2757
|
+
return fp.result.err({
|
2758
|
+
errorType: "other",
|
2759
|
+
error: err,
|
2760
|
+
message: err.message
|
2761
|
+
});
|
2697
2762
|
}
|
2698
|
-
return {
|
2699
|
-
|
2700
|
-
|
2701
|
-
|
2702
|
-
};
|
2763
|
+
return fp.result.err({
|
2764
|
+
errorType: "other",
|
2765
|
+
error: err,
|
2766
|
+
message: "Unknown error"
|
2767
|
+
});
|
2703
2768
|
}
|
2704
2769
|
}
|
2705
2770
|
async getSourceFile(path) {
|
@@ -2737,9 +2802,10 @@ class ValOpsFS extends ValOps {
|
|
2737
2802
|
};
|
2738
2803
|
}
|
2739
2804
|
}
|
2740
|
-
async saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data, _type, metadata) {
|
2741
|
-
const
|
2742
|
-
const
|
2805
|
+
async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, _type, metadata) {
|
2806
|
+
const patchDir = this.getParentPatchIdFromParentRef(parentRef);
|
2807
|
+
const patchFilePath = this.getBinaryFilePath(filePath, patchDir);
|
2808
|
+
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDir);
|
2743
2809
|
try {
|
2744
2810
|
const buffer = bufferFromDataUrl(data);
|
2745
2811
|
if (!buffer) {
|
@@ -2771,7 +2837,15 @@ class ValOpsFS extends ValOps {
|
|
2771
2837
|
}
|
2772
2838
|
}
|
2773
2839
|
async getBase64EncodedBinaryFileMetadataFromPatch(filePath, type, patchId) {
|
2774
|
-
const
|
2840
|
+
const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
|
2841
|
+
if (fp.result.isErr(patchDirRes)) {
|
2842
|
+
return {
|
2843
|
+
errors: [{
|
2844
|
+
message: "Failed to get patch dir from patch id"
|
2845
|
+
}]
|
2846
|
+
};
|
2847
|
+
}
|
2848
|
+
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDirRes.value);
|
2775
2849
|
if (!this.host.fileExists(metadataFilePath)) {
|
2776
2850
|
return {
|
2777
2851
|
errors: [{
|
@@ -2780,7 +2854,7 @@ class ValOpsFS extends ValOps {
|
|
2780
2854
|
}]
|
2781
2855
|
};
|
2782
2856
|
}
|
2783
|
-
const metadataParseRes = this.parseJsonFile(metadataFilePath,
|
2857
|
+
const metadataParseRes = this.parseJsonFile(metadataFilePath, zod.z.record(zod.z.union([zod.z.string(), zod.z.number()])));
|
2784
2858
|
if (metadataParseRes.error) {
|
2785
2859
|
return {
|
2786
2860
|
errors: [metadataParseRes.error]
|
@@ -2807,7 +2881,11 @@ class ValOpsFS extends ValOps {
|
|
2807
2881
|
};
|
2808
2882
|
}
|
2809
2883
|
async getBase64EncodedBinaryFileFromPatch(filePath, patchId) {
|
2810
|
-
const
|
2884
|
+
const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
|
2885
|
+
if (!fp.result.isOk(patchDirRes)) {
|
2886
|
+
return null;
|
2887
|
+
}
|
2888
|
+
const absPath = this.getBinaryFilePath(filePath, patchDirRes.value);
|
2811
2889
|
if (!this.host.fileExists(absPath)) {
|
2812
2890
|
return null;
|
2813
2891
|
}
|
@@ -2815,30 +2893,62 @@ class ValOpsFS extends ValOps {
|
|
2815
2893
|
}
|
2816
2894
|
async deletePatches(patchIds) {
|
2817
2895
|
const deleted = [];
|
2818
|
-
|
2819
|
-
|
2820
|
-
try {
|
2821
|
-
this.host.deleteDir(this.getPatchDir(patchId));
|
2822
|
-
deleted.push(patchId);
|
2823
|
-
} catch (err) {
|
2824
|
-
if (!errors) {
|
2825
|
-
errors = {};
|
2826
|
-
}
|
2827
|
-
errors[patchId] = {
|
2828
|
-
message: err instanceof Error ? err.message : "Unknown error"
|
2829
|
-
};
|
2830
|
-
}
|
2831
|
-
}
|
2832
|
-
if (errors) {
|
2896
|
+
const patchDirMapRes = await this.getParentPatchIdFromPatchIdMap();
|
2897
|
+
if (fp.result.isErr(patchDirMapRes)) {
|
2833
2898
|
return {
|
2834
|
-
|
2835
|
-
|
2899
|
+
error: {
|
2900
|
+
message: "Failed to get patch dir map"
|
2901
|
+
}
|
2836
2902
|
};
|
2837
2903
|
}
|
2904
|
+
const currentPatches = this.createPatchChain((await this.fetchPatchesFromFS(false)).patches);
|
2905
|
+
this.updateOrderedPatches(computeChangedPatchParentRefs(currentPatches, patchIds), patchDirMapRes.value, patchIds);
|
2838
2906
|
return {
|
2839
2907
|
deleted
|
2840
2908
|
};
|
2841
2909
|
}
|
2910
|
+
updateOrderedPatches(updates, patchDirMap, deletePatchIds) {
|
2911
|
+
for (const patchId of deletePatchIds) {
|
2912
|
+
const patchDir = patchDirMap[patchId];
|
2913
|
+
if (!patchDir) {
|
2914
|
+
console.error("Could not find patch dir for patch id scheduled for deletion: ", patchId);
|
2915
|
+
continue;
|
2916
|
+
}
|
2917
|
+
try {
|
2918
|
+
this.host.deleteDir(this.getFullPatchDir(patchDir));
|
2919
|
+
} catch (err) {
|
2920
|
+
console.error("Failed to delete patch dir", err);
|
2921
|
+
}
|
2922
|
+
}
|
2923
|
+
for (const [patchIdS, parentRef] of Object.entries(updates.changedPatches)) {
|
2924
|
+
const prevParentPatchId = patchDirMap[patchIdS];
|
2925
|
+
if (!prevParentPatchId) {
|
2926
|
+
console.error("Could not find previous parent patch id for deleted patch id: ", patchIdS);
|
2927
|
+
continue;
|
2928
|
+
}
|
2929
|
+
const newParentPatchId = parentRef.type === "head" ? "head" : parentRef.patchId;
|
2930
|
+
const currentPatchDataRes = this.parseJsonFile(this.getPatchFilePath(prevParentPatchId), FSPatch);
|
2931
|
+
if (currentPatchDataRes.error) {
|
2932
|
+
console.error("Failed to parse patch file while fixing patch chain after deleted patch", {
|
2933
|
+
updates
|
2934
|
+
}, currentPatchDataRes.error);
|
2935
|
+
continue;
|
2936
|
+
}
|
2937
|
+
const newPatchData = currentPatchDataRes.data;
|
2938
|
+
newPatchData.parentRef = parentRef;
|
2939
|
+
try {
|
2940
|
+
this.host.writeUf8File(this.getPatchFilePath(prevParentPatchId), JSON.stringify(newPatchData));
|
2941
|
+
if (this.host.directoryExists(this.getFullPatchDir(newParentPatchId))) {
|
2942
|
+
this.host.deleteDir(this.getFullPatchDir(newParentPatchId));
|
2943
|
+
}
|
2944
|
+
this.host.moveDir(this.getFullPatchDir(prevParentPatchId), this.getFullPatchDir(newParentPatchId));
|
2945
|
+
} catch (err) {
|
2946
|
+
console.error("Failed fix patch chain after deleted patch", {
|
2947
|
+
updates
|
2948
|
+
}, err);
|
2949
|
+
}
|
2950
|
+
}
|
2951
|
+
}
|
2842
2952
|
async saveFiles(preparedCommit) {
|
2843
2953
|
const updatedFiles = [];
|
2844
2954
|
const errors = {};
|
@@ -2854,12 +2964,28 @@ class ValOpsFS extends ValOps {
|
|
2854
2964
|
};
|
2855
2965
|
}
|
2856
2966
|
}
|
2967
|
+
const patchIdToPatchDirMapRes = await this.getParentPatchIdFromPatchIdMap();
|
2968
|
+
if (fp.result.isErr(patchIdToPatchDirMapRes)) {
|
2969
|
+
return {
|
2970
|
+
updatedFiles,
|
2971
|
+
errors
|
2972
|
+
};
|
2973
|
+
}
|
2974
|
+
const patchIdToPatchDirMap = patchIdToPatchDirMapRes.value;
|
2857
2975
|
for (const [filePath, {
|
2858
2976
|
patchId
|
2859
2977
|
}] of Object.entries(preparedCommit.patchedBinaryFilesDescriptors)) {
|
2860
2978
|
const absPath = fsPath__namespace["default"].join(this.rootDir, ...filePath.split("/"));
|
2861
2979
|
try {
|
2862
|
-
|
2980
|
+
const patchDir = patchIdToPatchDirMap[patchId];
|
2981
|
+
if (!patchDir) {
|
2982
|
+
errors[absPath] = {
|
2983
|
+
message: "Failed to find PatchDir for PatchId " + patchId,
|
2984
|
+
filePath
|
2985
|
+
};
|
2986
|
+
continue;
|
2987
|
+
}
|
2988
|
+
this.host.copyFile(this.getBinaryFilePath(filePath, patchDir), absPath);
|
2863
2989
|
updatedFiles.push(absPath);
|
2864
2990
|
} catch (err) {
|
2865
2991
|
errors[absPath] = {
|
@@ -2873,7 +2999,14 @@ class ValOpsFS extends ValOps {
|
|
2873
2999
|
baseSha: await this.getBaseSha(),
|
2874
3000
|
timestamp: new Date().toISOString()
|
2875
3001
|
};
|
2876
|
-
const
|
3002
|
+
const patchDir = patchIdToPatchDirMap[patchId];
|
3003
|
+
if (!patchDir) {
|
3004
|
+
errors[`patchId:${patchId}`] = {
|
3005
|
+
message: "Failed to find PatchDir for PatchId " + patchId
|
3006
|
+
};
|
3007
|
+
continue;
|
3008
|
+
}
|
3009
|
+
const absPath = this.getPatchBaseFile(patchDir);
|
2877
3010
|
try {
|
2878
3011
|
this.host.writeUf8File(absPath, JSON.stringify(appliedAt));
|
2879
3012
|
} catch (err) {
|
@@ -2917,25 +3050,52 @@ class ValOpsFS extends ValOps {
|
|
2917
3050
|
}
|
2918
3051
|
return createMetadataFromBuffer(type, mimeType, buffer);
|
2919
3052
|
}
|
3053
|
+
async getParentPatchIdFromPatchId(patchId) {
|
3054
|
+
// This is not great. If needed we should find a better way
|
3055
|
+
const patches = await this.readPatches();
|
3056
|
+
if (patches.errors || patches.error) {
|
3057
|
+
console.error("Failed to read patches", JSON.stringify(patches));
|
3058
|
+
return fp.result.err("failed-to-read-patches");
|
3059
|
+
}
|
3060
|
+
const patch = patches.patches[patchId];
|
3061
|
+
if (!patch) {
|
3062
|
+
console.error("Could not find patch with patchId: ", patchId);
|
3063
|
+
return fp.result.err("patch-not-found");
|
3064
|
+
}
|
3065
|
+
return fp.result.ok(this.getParentPatchIdFromParentRef(patch.parentRef));
|
3066
|
+
}
|
3067
|
+
async getParentPatchIdFromPatchIdMap() {
|
3068
|
+
const patches = await this.readPatches();
|
3069
|
+
if (patches.errors || patches.error) {
|
3070
|
+
console.error("Failed to read patches", JSON.stringify(patches));
|
3071
|
+
return fp.result.err("failed-to-read-patches");
|
3072
|
+
}
|
3073
|
+
return fp.result.ok(Object.fromEntries(Object.entries(patches.patches).map(([patchId, value]) => [patchId, this.getParentPatchIdFromParentRef(value.parentRef)])));
|
3074
|
+
}
|
3075
|
+
|
3076
|
+
// #region profiles
|
3077
|
+
async getProfiles() {
|
3078
|
+
throw new Error("Configuration error: cannot get profiles in local / development file system mode");
|
3079
|
+
}
|
2920
3080
|
|
2921
3081
|
// #region fs file path helpers
|
2922
3082
|
getPatchesDir() {
|
2923
3083
|
return fsPath__namespace["default"].join(this.rootDir, ValOpsFS.VAL_DIR, "patches");
|
2924
3084
|
}
|
2925
|
-
|
2926
|
-
return fsPath__namespace["default"].join(this.getPatchesDir(),
|
3085
|
+
getFullPatchDir(patchDir) {
|
3086
|
+
return fsPath__namespace["default"].join(this.getPatchesDir(), patchDir);
|
2927
3087
|
}
|
2928
|
-
getBinaryFilePath(filePath,
|
2929
|
-
return fsPath__namespace["default"].join(this.
|
3088
|
+
getBinaryFilePath(filePath, patchDir) {
|
3089
|
+
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "files", filePath, fsPath__namespace["default"].basename(filePath));
|
2930
3090
|
}
|
2931
|
-
getBinaryFileMetadataPath(filePath,
|
2932
|
-
return fsPath__namespace["default"].join(this.
|
3091
|
+
getBinaryFileMetadataPath(filePath, patchDir) {
|
3092
|
+
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "files", filePath, "metadata.json");
|
2933
3093
|
}
|
2934
|
-
getPatchFilePath(
|
2935
|
-
return fsPath__namespace["default"].join(this.
|
3094
|
+
getPatchFilePath(patchDir) {
|
3095
|
+
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "patch.json");
|
2936
3096
|
}
|
2937
|
-
getPatchBaseFile(
|
2938
|
-
return fsPath__namespace["default"].join(this.
|
3097
|
+
getPatchBaseFile(patchDir) {
|
3098
|
+
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "base.json");
|
2939
3099
|
}
|
2940
3100
|
}
|
2941
3101
|
class FSOpsHost {
|
@@ -2949,6 +3109,9 @@ class FSOpsHost {
|
|
2949
3109
|
});
|
2950
3110
|
}
|
2951
3111
|
}
|
3112
|
+
moveDir(from, to) {
|
3113
|
+
fs__default["default"].renameSync(from, to);
|
3114
|
+
}
|
2952
3115
|
directoryExists(path) {
|
2953
3116
|
return ts__default["default"].sys.directoryExists(path);
|
2954
3117
|
}
|
@@ -2970,6 +3133,37 @@ class FSOpsHost {
|
|
2970
3133
|
});
|
2971
3134
|
fs__default["default"].writeFileSync(path, data, "utf-8");
|
2972
3135
|
}
|
3136
|
+
tryWriteUf8File(path, data) {
|
3137
|
+
try {
|
3138
|
+
const parentDir = fsPath__namespace["default"].join(fsPath__namespace["default"].dirname(path), "../");
|
3139
|
+
fs__default["default"].mkdirSync(parentDir, {
|
3140
|
+
recursive: true
|
3141
|
+
});
|
3142
|
+
// Make the parent dir separately. This is because we need mkdir to throw
|
3143
|
+
// if the directory already exists. If we use recursive: true, it doesn't
|
3144
|
+
fs__default["default"].mkdirSync(fsPath__namespace["default"].dirname(path), {
|
3145
|
+
recursive: false
|
3146
|
+
});
|
3147
|
+
} catch (e) {
|
3148
|
+
return {
|
3149
|
+
type: "error",
|
3150
|
+
errorType: "dir-already-exists",
|
3151
|
+
error: e
|
3152
|
+
};
|
3153
|
+
}
|
3154
|
+
try {
|
3155
|
+
fs__default["default"].writeFileSync(path, data, "utf-8");
|
3156
|
+
} catch (e) {
|
3157
|
+
return {
|
3158
|
+
type: "error",
|
3159
|
+
errorType: "failed-to-write-file",
|
3160
|
+
error: e
|
3161
|
+
};
|
3162
|
+
}
|
3163
|
+
return {
|
3164
|
+
type: "success"
|
3165
|
+
};
|
3166
|
+
}
|
2973
3167
|
writeBinaryFile(path, data) {
|
2974
3168
|
fs__default["default"].mkdirSync(fsPath__namespace["default"].dirname(path), {
|
2975
3169
|
recursive: true
|
@@ -2983,98 +3177,106 @@ class FSOpsHost {
|
|
2983
3177
|
fs__default["default"].copyFileSync(from, to);
|
2984
3178
|
}
|
2985
3179
|
}
|
2986
|
-
const FSPatch =
|
2987
|
-
path:
|
2988
|
-
patch: Patch,
|
2989
|
-
|
2990
|
-
|
2991
|
-
|
3180
|
+
const FSPatch = zod.z.object({
|
3181
|
+
path: zod.z.string().refine(p => p.startsWith("/") && p.includes(".val."), "Path is not valid. Must start with '/' and include '.val.'"),
|
3182
|
+
patch: internal.Patch,
|
3183
|
+
patchId: zod.z.string(),
|
3184
|
+
baseSha: zod.z.string(),
|
3185
|
+
parentRef: internal.ParentRef,
|
3186
|
+
authorId: zod.z.string().refine(p => true).nullable(),
|
3187
|
+
createdAt: zod.z.string().datetime(),
|
3188
|
+
coreVersion: zod.z.string().nullable() // TODO: use this to check if patch is compatible with current core version?
|
2992
3189
|
});
|
2993
|
-
const FSPatchBase =
|
2994
|
-
baseSha:
|
2995
|
-
timestamp:
|
3190
|
+
const FSPatchBase = zod.z.object({
|
3191
|
+
baseSha: zod.z.string().refine(p => true),
|
3192
|
+
timestamp: zod.z.string().datetime()
|
2996
3193
|
});
|
2997
3194
|
|
2998
3195
|
const textEncoder = new TextEncoder();
|
2999
|
-
const PatchId =
|
3000
|
-
const CommitSha =
|
3001
|
-
|
3002
|
-
const AuthorId =
|
3003
|
-
const ModuleFilePath =
|
3004
|
-
const Metadata =
|
3005
|
-
mimeType:
|
3006
|
-
width:
|
3007
|
-
height:
|
3008
|
-
}),
|
3009
|
-
mimeType:
|
3196
|
+
const PatchId = zod.z.string().refine(s => !!s); // TODO: validate
|
3197
|
+
const CommitSha = zod.z.string().refine(s => !!s); // TODO: validate
|
3198
|
+
zod.z.string().refine(s => !!s); // TODO: validate
|
3199
|
+
const AuthorId = zod.z.string().refine(s => !!s); // TODO: validate
|
3200
|
+
const ModuleFilePath = zod.z.string().refine(s => !!s); // TODO: validate
|
3201
|
+
const Metadata = zod.z.union([zod.z.object({
|
3202
|
+
mimeType: zod.z.string(),
|
3203
|
+
width: zod.z.number(),
|
3204
|
+
height: zod.z.number()
|
3205
|
+
}), zod.z.object({
|
3206
|
+
mimeType: zod.z.string()
|
3010
3207
|
})]);
|
3011
|
-
const MetadataRes =
|
3208
|
+
const MetadataRes = zod.z.object({
|
3012
3209
|
filePath: ModuleFilePath,
|
3013
3210
|
metadata: Metadata,
|
3014
|
-
type:
|
3211
|
+
type: zod.z.union([zod.z.literal("file"), zod.z.literal("image")]).nullable()
|
3015
3212
|
});
|
3016
|
-
const BasePatchResponse =
|
3213
|
+
const BasePatchResponse = zod.z.object({
|
3017
3214
|
path: ModuleFilePath,
|
3018
3215
|
patchId: PatchId,
|
3019
3216
|
authorId: AuthorId.nullable(),
|
3020
|
-
createdAt:
|
3021
|
-
|
3022
|
-
baseSha: BaseSha,
|
3023
|
-
commitSha: CommitSha,
|
3024
|
-
appliedAt: z.z.string().datetime()
|
3025
|
-
}).nullable()
|
3217
|
+
createdAt: zod.z.string().datetime(),
|
3218
|
+
baseSha: zod.z.string()
|
3026
3219
|
});
|
3027
|
-
const GetPatches =
|
3028
|
-
patches:
|
3029
|
-
patch: Patch.optional()
|
3220
|
+
const GetPatches = zod.z.object({
|
3221
|
+
patches: zod.z.array(zod.z.intersection(zod.z.object({
|
3222
|
+
patch: internal.Patch.optional()
|
3030
3223
|
}), BasePatchResponse)),
|
3031
|
-
errors:
|
3224
|
+
errors: zod.z.array(zod.z.object({
|
3032
3225
|
patchId: PatchId.optional(),
|
3033
|
-
message:
|
3226
|
+
message: zod.z.string()
|
3034
3227
|
})).optional()
|
3035
3228
|
});
|
3036
|
-
const FilesResponse =
|
3037
|
-
files:
|
3038
|
-
filePath:
|
3039
|
-
location:
|
3229
|
+
const FilesResponse = zod.z.object({
|
3230
|
+
files: zod.z.array(zod.z.union([zod.z.object({
|
3231
|
+
filePath: zod.z.string(),
|
3232
|
+
location: zod.z.literal("patch"),
|
3040
3233
|
patchId: PatchId,
|
3041
|
-
value:
|
3042
|
-
}),
|
3043
|
-
filePath:
|
3044
|
-
location:
|
3234
|
+
value: zod.z.string()
|
3235
|
+
}), zod.z.object({
|
3236
|
+
filePath: zod.z.string(),
|
3237
|
+
location: zod.z.literal("repo"),
|
3045
3238
|
commitSha: CommitSha,
|
3046
|
-
value:
|
3239
|
+
value: zod.z.string()
|
3047
3240
|
})])),
|
3048
|
-
errors:
|
3049
|
-
filePath:
|
3050
|
-
location:
|
3241
|
+
errors: zod.z.array(zod.z.union([zod.z.object({
|
3242
|
+
filePath: zod.z.string(),
|
3243
|
+
location: zod.z.literal("patch"),
|
3051
3244
|
patchId: PatchId,
|
3052
|
-
message:
|
3053
|
-
}),
|
3054
|
-
filePath:
|
3055
|
-
location:
|
3245
|
+
message: zod.z.string()
|
3246
|
+
}), zod.z.object({
|
3247
|
+
filePath: zod.z.string(),
|
3248
|
+
location: zod.z.literal("repo"),
|
3056
3249
|
commitSha: CommitSha,
|
3057
|
-
message:
|
3250
|
+
message: zod.z.string()
|
3058
3251
|
})])).optional()
|
3059
3252
|
});
|
3060
|
-
const SavePatchResponse =
|
3253
|
+
const SavePatchResponse = zod.z.object({
|
3061
3254
|
patchId: PatchId
|
3062
3255
|
});
|
3063
|
-
const DeletePatchesResponse =
|
3064
|
-
deleted:
|
3065
|
-
errors:
|
3066
|
-
message:
|
3256
|
+
const DeletePatchesResponse = zod.z.object({
|
3257
|
+
deleted: zod.z.array(PatchId),
|
3258
|
+
errors: zod.z.array(zod.z.object({
|
3259
|
+
message: zod.z.string(),
|
3067
3260
|
patchId: PatchId
|
3068
3261
|
})).optional()
|
3069
3262
|
});
|
3070
|
-
|
3263
|
+
zod.z.object({
|
3071
3264
|
patchId: PatchId,
|
3072
3265
|
filePath: ModuleFilePath
|
3073
3266
|
});
|
3074
|
-
const CommitResponse =
|
3075
|
-
updatedFiles:
|
3267
|
+
const CommitResponse = zod.z.object({
|
3268
|
+
updatedFiles: zod.z.array(zod.z.string()),
|
3076
3269
|
commit: CommitSha,
|
3077
|
-
branch:
|
3270
|
+
branch: zod.z.string()
|
3271
|
+
});
|
3272
|
+
const ProfilesResponse = zod.z.object({
|
3273
|
+
profiles: zod.z.array(zod.z.object({
|
3274
|
+
profileId: zod.z.string(),
|
3275
|
+
fullName: zod.z.string(),
|
3276
|
+
avatar: zod.z.object({
|
3277
|
+
url: zod.z.string()
|
3278
|
+
}).nullable()
|
3279
|
+
}))
|
3078
3280
|
});
|
3079
3281
|
class ValOpsHttp extends ValOps {
|
3080
3282
|
constructor(hostUrl, project, commitSha,
|
@@ -3104,18 +3306,35 @@ class ValOpsHttp extends ValOps {
|
|
3104
3306
|
}
|
3105
3307
|
const currentBaseSha = await this.getBaseSha();
|
3106
3308
|
const currentSchemaSha = await this.getSchemaSha();
|
3107
|
-
const
|
3309
|
+
const allPatchData = await this.fetchPatches({
|
3108
3310
|
omitPatch: true,
|
3109
3311
|
authors: undefined,
|
3110
3312
|
patchIds: undefined,
|
3111
3313
|
moduleFilePaths: undefined
|
3112
3314
|
});
|
3315
|
+
// We think these errors will be picked up else where (?), so we only return an error here if there are no patches
|
3316
|
+
if (allPatchData.patches.length === 0) {
|
3317
|
+
let message;
|
3318
|
+
if (allPatchData.error) {
|
3319
|
+
message = allPatchData.error.message;
|
3320
|
+
} else if (allPatchData.errors && allPatchData.errors.length > 0) {
|
3321
|
+
const errors = allPatchData.errors;
|
3322
|
+
message = errors.map(error => error.message).join("");
|
3323
|
+
}
|
3324
|
+
if (message) {
|
3325
|
+
message = `Could not get patches: ${message}`;
|
3326
|
+
console.error(message);
|
3327
|
+
return {
|
3328
|
+
type: "error",
|
3329
|
+
error: {
|
3330
|
+
message
|
3331
|
+
}
|
3332
|
+
};
|
3333
|
+
}
|
3334
|
+
}
|
3113
3335
|
const patches = [];
|
3114
|
-
|
3115
|
-
|
3116
|
-
return a.createdAt.localeCompare(b.createdAt, undefined);
|
3117
|
-
})) {
|
3118
|
-
patches.push(patchId);
|
3336
|
+
for (const patchData of allPatchData.patches) {
|
3337
|
+
patches.push(patchData.patchId);
|
3119
3338
|
}
|
3120
3339
|
const webSocketNonceRes = await this.getWebSocketNonce(params.profileId);
|
3121
3340
|
if (webSocketNonceRes.status === "error") {
|
@@ -3176,6 +3395,16 @@ class ValOpsHttp extends ValOps {
|
|
3176
3395
|
}
|
3177
3396
|
};
|
3178
3397
|
}
|
3398
|
+
const contentType = res.headers.get("Content-Type") || "";
|
3399
|
+
if (contentType.startsWith("application/json")) {
|
3400
|
+
const json = await res.json();
|
3401
|
+
return {
|
3402
|
+
status: "error",
|
3403
|
+
error: {
|
3404
|
+
message: "Could not get nonce." + (json.message || "Unexpected error (no error message). Status: " + res.status)
|
3405
|
+
}
|
3406
|
+
};
|
3407
|
+
}
|
3179
3408
|
return {
|
3180
3409
|
status: "error",
|
3181
3410
|
error: {
|
@@ -3201,8 +3430,8 @@ class ValOpsHttp extends ValOps {
|
|
3201
3430
|
for (let i = 0; i < patchIds.length; i += chunkSize) {
|
3202
3431
|
patchIdChunks.push(patchIds.slice(i, i + chunkSize));
|
3203
3432
|
}
|
3204
|
-
|
3205
|
-
|
3433
|
+
const allPatches = [];
|
3434
|
+
const allErrors = [];
|
3206
3435
|
if (patchIds === undefined || patchIds.length === 0) {
|
3207
3436
|
return this.fetchPatchesInternal({
|
3208
3437
|
patchIds: patchIds,
|
@@ -3220,15 +3449,9 @@ class ValOpsHttp extends ValOps {
|
|
3220
3449
|
if ("error" in res) {
|
3221
3450
|
return res;
|
3222
3451
|
}
|
3223
|
-
allPatches
|
3224
|
-
...allPatches,
|
3225
|
-
...res.patches
|
3226
|
-
};
|
3452
|
+
allPatches.push(...res.patches);
|
3227
3453
|
if (res.errors) {
|
3228
|
-
allErrors
|
3229
|
-
...allErrors,
|
3230
|
-
...res.errors
|
3231
|
-
};
|
3454
|
+
allErrors.push(...res.errors);
|
3232
3455
|
}
|
3233
3456
|
}
|
3234
3457
|
return {
|
@@ -3264,27 +3487,23 @@ class ValOpsHttp extends ValOps {
|
|
3264
3487
|
"Content-Type": "application/json"
|
3265
3488
|
}
|
3266
3489
|
}).then(async res => {
|
3267
|
-
const patches =
|
3490
|
+
const patches = [];
|
3268
3491
|
if (res.ok) {
|
3269
3492
|
const json = await res.json();
|
3270
3493
|
const parsed = GetPatches.safeParse(json);
|
3271
3494
|
if (parsed.success) {
|
3495
|
+
const errors = [];
|
3272
3496
|
const data = parsed.data;
|
3273
|
-
const errors = {};
|
3274
3497
|
for (const patchesRes of data.patches) {
|
3275
|
-
patches
|
3276
|
-
path: patchesRes.path,
|
3498
|
+
patches.push({
|
3277
3499
|
authorId: patchesRes.authorId,
|
3278
3500
|
createdAt: patchesRes.createdAt,
|
3279
|
-
appliedAt:
|
3280
|
-
|
3281
|
-
|
3282
|
-
|
3283
|
-
|
3284
|
-
|
3285
|
-
},
|
3286
|
-
patch: patchesRes.patch
|
3287
|
-
};
|
3501
|
+
appliedAt: null,
|
3502
|
+
patchId: patchesRes.patchId,
|
3503
|
+
path: patchesRes.path,
|
3504
|
+
baseSha: patchesRes.baseSha,
|
3505
|
+
patch: filters.omitPatch ? undefined : patchesRes.patch
|
3506
|
+
});
|
3288
3507
|
}
|
3289
3508
|
return {
|
3290
3509
|
patches,
|
@@ -3306,7 +3525,8 @@ class ValOpsHttp extends ValOps {
|
|
3306
3525
|
};
|
3307
3526
|
});
|
3308
3527
|
}
|
3309
|
-
async saveSourceFilePatch(path, patch, authorId) {
|
3528
|
+
async saveSourceFilePatch(path, patch, parentRef, authorId) {
|
3529
|
+
const baseSha = await this.getBaseSha();
|
3310
3530
|
return fetch(`${this.hostUrl}/v1/${this.project}/patches`, {
|
3311
3531
|
method: "POST",
|
3312
3532
|
headers: {
|
@@ -3317,78 +3537,53 @@ class ValOpsHttp extends ValOps {
|
|
3317
3537
|
path,
|
3318
3538
|
patch,
|
3319
3539
|
authorId,
|
3540
|
+
parentPatchId: parentRef.type === "patch" ? parentRef.patchId : null,
|
3541
|
+
baseSha,
|
3320
3542
|
commit: this.commitSha,
|
3321
3543
|
branch: this.branch,
|
3322
3544
|
coreVersion: core.Internal.VERSION.core
|
3323
3545
|
})
|
3324
3546
|
}).then(async res => {
|
3547
|
+
var _res$headers$get;
|
3325
3548
|
if (res.ok) {
|
3326
3549
|
const parsed = SavePatchResponse.safeParse(await res.json());
|
3327
3550
|
if (parsed.success) {
|
3328
|
-
return {
|
3551
|
+
return fp.result.ok({
|
3329
3552
|
patchId: parsed.data.patchId
|
3330
|
-
};
|
3553
|
+
});
|
3331
3554
|
}
|
3332
|
-
return {
|
3333
|
-
|
3334
|
-
|
3335
|
-
|
3336
|
-
};
|
3555
|
+
return fp.result.err({
|
3556
|
+
errorType: "other",
|
3557
|
+
message: `Could not parse save patch response. Error: ${zodValidationError.fromError(parsed.error)}`
|
3558
|
+
});
|
3337
3559
|
}
|
3338
|
-
|
3339
|
-
|
3340
|
-
|
3341
|
-
|
3342
|
-
|
3343
|
-
}).catch(e => {
|
3344
|
-
return {
|
3345
|
-
error: {
|
3346
|
-
message: `Could save source file patch (connection error?): ${e instanceof Error ? e.message : e.toString()}`
|
3347
|
-
}
|
3348
|
-
};
|
3349
|
-
});
|
3350
|
-
}
|
3351
|
-
async saveBase64EncodedBinaryFileFromPatch(filePath, patchId, data, type, metadata) {
|
3352
|
-
return fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/files`, {
|
3353
|
-
method: "POST",
|
3354
|
-
headers: {
|
3355
|
-
...this.authHeaders,
|
3356
|
-
"Content-Type": "application/json"
|
3357
|
-
},
|
3358
|
-
body: JSON.stringify({
|
3359
|
-
filePath: filePath,
|
3360
|
-
data,
|
3361
|
-
type,
|
3362
|
-
metadata
|
3363
|
-
})
|
3364
|
-
}).then(async res => {
|
3365
|
-
if (res.ok) {
|
3366
|
-
const parsed = SavePatchFileResponse.safeParse(await res.json());
|
3367
|
-
if (parsed.success) {
|
3368
|
-
return {
|
3369
|
-
patchId: parsed.data.patchId,
|
3370
|
-
filePath: parsed.data.filePath
|
3371
|
-
};
|
3372
|
-
}
|
3373
|
-
return {
|
3374
|
-
error: {
|
3375
|
-
message: `Could not parse save patch file response. Error: ${zodValidationError.fromError(parsed.error)}`
|
3376
|
-
}
|
3377
|
-
};
|
3560
|
+
if (res.status === 409) {
|
3561
|
+
return fp.result.err({
|
3562
|
+
errorType: "patch-head-conflict",
|
3563
|
+
message: "Conflict: " + (await res.text())
|
3564
|
+
});
|
3378
3565
|
}
|
3379
|
-
|
3380
|
-
|
3381
|
-
|
3382
|
-
|
3383
|
-
|
3566
|
+
if ((_res$headers$get = res.headers.get("Content-Type")) !== null && _res$headers$get !== void 0 && _res$headers$get.includes("application/json")) {
|
3567
|
+
const json = await res.json();
|
3568
|
+
return fp.result.err({
|
3569
|
+
errorType: "other",
|
3570
|
+
message: json.message || "Unknown error"
|
3571
|
+
});
|
3572
|
+
}
|
3573
|
+
return fp.result.err({
|
3574
|
+
errorType: "other",
|
3575
|
+
message: "Could not save patch. HTTP error: " + res.status + " " + res.statusText
|
3576
|
+
});
|
3384
3577
|
}).catch(e => {
|
3385
|
-
return {
|
3386
|
-
|
3387
|
-
|
3388
|
-
|
3389
|
-
};
|
3578
|
+
return fp.result.err({
|
3579
|
+
errorType: "other",
|
3580
|
+
message: `Could save source file patch (connection error?): ${e instanceof Error ? e.message : e.toString()}`
|
3581
|
+
});
|
3390
3582
|
});
|
3391
3583
|
}
|
3584
|
+
async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, type, metadata) {
|
3585
|
+
throw Error("TODO: implement");
|
3586
|
+
}
|
3392
3587
|
async getHttpFiles(files) {
|
3393
3588
|
const params = new URLSearchParams();
|
3394
3589
|
const stringifiedFiles = JSON.stringify({
|
@@ -3491,7 +3686,7 @@ class ValOpsHttp extends ValOps {
|
|
3491
3686
|
const params = new URLSearchParams();
|
3492
3687
|
params.set("file_path", filePath);
|
3493
3688
|
try {
|
3494
|
-
const metadataRes = await fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/
|
3689
|
+
const metadataRes = await fetch(`${this.hostUrl}/v1/${this.project}/patches/${patchId}/files?${params}`, {
|
3495
3690
|
headers: {
|
3496
3691
|
...this.authHeaders,
|
3497
3692
|
"Content-Type": "application/json"
|
@@ -3593,7 +3788,7 @@ class ValOpsHttp extends ValOps {
|
|
3593
3788
|
}
|
3594
3789
|
async commit(prepared, message, committer, newBranch) {
|
3595
3790
|
try {
|
3596
|
-
var _res$headers$
|
3791
|
+
var _res$headers$get2;
|
3597
3792
|
const existingBranch = this.branch;
|
3598
3793
|
const res = await fetch(`${this.hostUrl}/v1/${this.project}/commit`, {
|
3599
3794
|
method: "POST",
|
@@ -3629,7 +3824,7 @@ class ValOpsHttp extends ValOps {
|
|
3629
3824
|
}
|
3630
3825
|
};
|
3631
3826
|
}
|
3632
|
-
if ((_res$headers$
|
3827
|
+
if ((_res$headers$get2 = res.headers.get("Content-Type")) !== null && _res$headers$get2 !== void 0 && _res$headers$get2.includes("application/json")) {
|
3633
3828
|
const json = await res.json();
|
3634
3829
|
if (json.isNotFastForward) {
|
3635
3830
|
return {
|
@@ -3658,6 +3853,30 @@ class ValOpsHttp extends ValOps {
|
|
3658
3853
|
};
|
3659
3854
|
}
|
3660
3855
|
}
|
3856
|
+
|
3857
|
+
// #region profiles
|
3858
|
+
async getProfiles() {
|
3859
|
+
var _res$headers$get3;
|
3860
|
+
const res = await fetch(`${this.hostUrl}/v1/${this.project}/profiles`, {
|
3861
|
+
headers: {
|
3862
|
+
...this.authHeaders,
|
3863
|
+
"Content-Type": "application/json"
|
3864
|
+
}
|
3865
|
+
});
|
3866
|
+
if (res.ok) {
|
3867
|
+
const parsed = ProfilesResponse.safeParse(await res.json());
|
3868
|
+
if (parsed.error) {
|
3869
|
+
console.error("Could not parse profiles response", parsed.error);
|
3870
|
+
throw Error(`Could not get profiles from remote server: wrong format. You might need to upgrade Val.`);
|
3871
|
+
}
|
3872
|
+
return parsed.data.profiles;
|
3873
|
+
}
|
3874
|
+
if ((_res$headers$get3 = res.headers.get("Content-Type")) !== null && _res$headers$get3 !== void 0 && _res$headers$get3.includes("application/json")) {
|
3875
|
+
const json = await res.json();
|
3876
|
+
throw Error(`Could not get profiles (status: ${res.status}): ${"message" in json ? json.message : "Unknown error"}`);
|
3877
|
+
}
|
3878
|
+
throw Error(`Could not get profiles. Got status: ${res.status}`);
|
3879
|
+
}
|
3661
3880
|
}
|
3662
3881
|
|
3663
3882
|
/* eslint-disable @typescript-eslint/no-unused-vars */
|
@@ -4168,8 +4387,74 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4168
4387
|
}
|
4169
4388
|
},
|
4170
4389
|
//#region patches
|
4171
|
-
"/patches
|
4390
|
+
"/patches": {
|
4391
|
+
PUT: async req => {
|
4392
|
+
const cookies = req.cookies;
|
4393
|
+
const auth = getAuth(cookies);
|
4394
|
+
if (auth.error) {
|
4395
|
+
return {
|
4396
|
+
status: 401,
|
4397
|
+
json: {
|
4398
|
+
message: auth.error
|
4399
|
+
}
|
4400
|
+
};
|
4401
|
+
}
|
4402
|
+
if (serverOps instanceof ValOpsHttp && !("id" in auth)) {
|
4403
|
+
return {
|
4404
|
+
status: 401,
|
4405
|
+
json: {
|
4406
|
+
message: "Unauthorized"
|
4407
|
+
}
|
4408
|
+
};
|
4409
|
+
}
|
4410
|
+
const patches = req.body.patches;
|
4411
|
+
const parentRef = req.body.parentRef;
|
4412
|
+
const authorId = "id" in auth ? auth.id : null;
|
4413
|
+
const newPatchIds = [];
|
4414
|
+
for (const patch of patches) {
|
4415
|
+
const createPatchRes = await serverOps.createPatch(patch.path, patch.patch, parentRef, authorId);
|
4416
|
+
if (fp.result.isErr(createPatchRes)) {
|
4417
|
+
if (createPatchRes.error.errorType === "patch-head-conflict") {
|
4418
|
+
return {
|
4419
|
+
status: 409,
|
4420
|
+
json: {
|
4421
|
+
type: "patch-head-conflict",
|
4422
|
+
message: "Patch id conflict"
|
4423
|
+
}
|
4424
|
+
};
|
4425
|
+
} else {
|
4426
|
+
return {
|
4427
|
+
status: 400,
|
4428
|
+
json: {
|
4429
|
+
type: "patch-error",
|
4430
|
+
message: "Could not create patch",
|
4431
|
+
errors: {
|
4432
|
+
[patch.path]: [{
|
4433
|
+
error: {
|
4434
|
+
message: createPatchRes.error.error.message
|
4435
|
+
}
|
4436
|
+
}]
|
4437
|
+
}
|
4438
|
+
}
|
4439
|
+
};
|
4440
|
+
}
|
4441
|
+
} else {
|
4442
|
+
newPatchIds.push(createPatchRes.value.patchId);
|
4443
|
+
}
|
4444
|
+
}
|
4445
|
+
return {
|
4446
|
+
status: 200,
|
4447
|
+
json: {
|
4448
|
+
newPatchIds,
|
4449
|
+
parentRef: {
|
4450
|
+
type: "patch",
|
4451
|
+
patchId: newPatchIds[newPatchIds.length - 1]
|
4452
|
+
}
|
4453
|
+
}
|
4454
|
+
};
|
4455
|
+
},
|
4172
4456
|
GET: async req => {
|
4457
|
+
// TODO: Fix type error patchId is string somewhere and PatchId somewhere else
|
4173
4458
|
const query = req.query;
|
4174
4459
|
const cookies = req.cookies;
|
4175
4460
|
const auth = getAuth(cookies);
|
@@ -4189,38 +4474,52 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4189
4474
|
}
|
4190
4475
|
};
|
4191
4476
|
}
|
4477
|
+
const omit_patch = query.omit_patch === true;
|
4192
4478
|
const authors = query.author;
|
4193
|
-
const
|
4479
|
+
const fetchedPatchesRes = await serverOps.fetchPatches({
|
4194
4480
|
authors,
|
4195
4481
|
patchIds: query.patch_id,
|
4196
|
-
omitPatch:
|
4482
|
+
omitPatch: omit_patch,
|
4197
4483
|
moduleFilePaths: query.module_file_path
|
4198
4484
|
});
|
4199
|
-
if (
|
4485
|
+
if (fetchedPatchesRes.error) {
|
4200
4486
|
// Error is singular
|
4201
|
-
console.error("Val: Failed to get patches",
|
4487
|
+
console.error("Val: Failed to get patches", fetchedPatchesRes.error);
|
4202
4488
|
return {
|
4203
4489
|
status: 500,
|
4204
4490
|
json: {
|
4205
|
-
message:
|
4206
|
-
|
4491
|
+
message: fetchedPatchesRes.error.message,
|
4492
|
+
error: fetchedPatchesRes.error
|
4207
4493
|
}
|
4208
4494
|
};
|
4209
4495
|
}
|
4210
|
-
if (
|
4496
|
+
if (fetchedPatchesRes.errors && Object.keys(fetchedPatchesRes.errors).length > 0) {
|
4211
4497
|
// Errors is plural. Different property than above.
|
4212
|
-
console.error("Val: Failed to get patches",
|
4498
|
+
console.error("Val: Failed to get patches", fetchedPatchesRes.errors);
|
4213
4499
|
return {
|
4214
4500
|
status: 500,
|
4215
4501
|
json: {
|
4216
4502
|
message: "Failed to get patches",
|
4217
|
-
|
4503
|
+
patchErrors: fetchedPatchesRes.errors
|
4218
4504
|
}
|
4219
4505
|
};
|
4220
4506
|
}
|
4507
|
+
const patches = [];
|
4508
|
+
for (const [patchIdS, patchData] of Object.entries(fetchedPatchesRes.patches)) {
|
4509
|
+
const patchId = patchIdS;
|
4510
|
+
patches.push({
|
4511
|
+
patchId,
|
4512
|
+
...patchData
|
4513
|
+
});
|
4514
|
+
}
|
4515
|
+
// TODO: we should sort by parentRef instead:
|
4516
|
+
patches.sort((a, b) => a.createdAt.localeCompare(b.createdAt));
|
4221
4517
|
return {
|
4222
4518
|
status: 200,
|
4223
|
-
json:
|
4519
|
+
json: {
|
4520
|
+
patches,
|
4521
|
+
baseSha: await serverOps.getBaseSha()
|
4522
|
+
}
|
4224
4523
|
};
|
4225
4524
|
},
|
4226
4525
|
DELETE: async req => {
|
@@ -4251,7 +4550,10 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4251
4550
|
status: 500,
|
4252
4551
|
json: {
|
4253
4552
|
message: "Failed to delete patches",
|
4254
|
-
|
4553
|
+
errors: Object.entries(deleteRes.errors).map(([id, error]) => ({
|
4554
|
+
patchId: id,
|
4555
|
+
...error
|
4556
|
+
}))
|
4255
4557
|
}
|
4256
4558
|
};
|
4257
4559
|
}
|
@@ -4323,13 +4625,12 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4323
4625
|
}
|
4324
4626
|
},
|
4325
4627
|
// #region sources
|
4326
|
-
"/sources": {
|
4628
|
+
"/sources/~": {
|
4327
4629
|
PUT: async req => {
|
4328
|
-
var _body$patchIds;
|
4329
4630
|
const query = req.query;
|
4330
4631
|
const cookies = req.cookies;
|
4331
|
-
|
4332
|
-
const
|
4632
|
+
// TODO: filter results by moduleFilePath
|
4633
|
+
// const moduleFilePath = req.path || "";
|
4333
4634
|
const auth = getAuth(cookies);
|
4334
4635
|
if (auth.error) {
|
4335
4636
|
return {
|
@@ -4358,154 +4659,108 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4358
4659
|
}
|
4359
4660
|
};
|
4360
4661
|
}
|
4361
|
-
|
4362
|
-
|
4363
|
-
|
4364
|
-
|
4365
|
-
|
4366
|
-
|
4367
|
-
|
4368
|
-
|
4369
|
-
|
4370
|
-
|
4371
|
-
|
4372
|
-
|
4373
|
-
|
4374
|
-
|
4375
|
-
|
4376
|
-
|
4377
|
-
|
4378
|
-
|
4379
|
-
}
|
4380
|
-
};
|
4381
|
-
}
|
4382
|
-
let patchErrors = undefined;
|
4383
|
-
for (const [patchIdS, error] of Object.entries(patchOps.errors || {})) {
|
4384
|
-
const patchId = patchIdS;
|
4385
|
-
if (!patchErrors) {
|
4386
|
-
patchErrors = {};
|
4387
|
-
}
|
4388
|
-
patchErrors[patchId] = {
|
4389
|
-
message: error.message
|
4390
|
-
};
|
4391
|
-
}
|
4392
|
-
// TODO: errors
|
4393
|
-
patchAnalysis = serverOps.analyzePatches(patchOps.patches);
|
4394
|
-
if (body !== null && body !== void 0 && body.addPatches) {
|
4395
|
-
for (const addPatch of body.addPatches) {
|
4396
|
-
const newPatchModuleFilePath = addPatch.path;
|
4397
|
-
const newPatchOps = addPatch.patch;
|
4398
|
-
const authorId = "id" in auth ? auth.id : null;
|
4399
|
-
const createPatchRes = await serverOps.createPatch(newPatchModuleFilePath, {
|
4400
|
-
...patchAnalysis,
|
4401
|
-
...patchOps
|
4402
|
-
}, newPatchOps, authorId);
|
4403
|
-
if (createPatchRes.error) {
|
4404
|
-
return {
|
4405
|
-
status: 500,
|
4406
|
-
json: {
|
4407
|
-
message: "Failed to create patch: " + createPatchRes.error.message,
|
4408
|
-
details: createPatchRes.error
|
4409
|
-
}
|
4410
|
-
};
|
4411
|
-
}
|
4412
|
-
if (!newPatchIds) {
|
4413
|
-
newPatchIds = [createPatchRes.patchId];
|
4414
|
-
} else {
|
4415
|
-
newPatchIds.push(createPatchRes.patchId);
|
4416
|
-
}
|
4417
|
-
patchOps.patches[createPatchRes.patchId] = {
|
4418
|
-
path: newPatchModuleFilePath,
|
4419
|
-
patch: newPatchOps,
|
4420
|
-
authorId,
|
4421
|
-
createdAt: createPatchRes.createdAt,
|
4422
|
-
appliedAt: null
|
4423
|
-
};
|
4424
|
-
patchAnalysis.patchesByModule[newPatchModuleFilePath] = [...(patchAnalysis.patchesByModule[newPatchModuleFilePath] || []), {
|
4425
|
-
patchId: createPatchRes.patchId
|
4426
|
-
}];
|
4427
|
-
}
|
4428
|
-
}
|
4429
|
-
tree = {
|
4430
|
-
...(await serverOps.getTree({
|
4431
|
-
...patchAnalysis,
|
4432
|
-
...patchOps
|
4433
|
-
}))
|
4434
|
-
};
|
4435
|
-
if (query.validate_all) {
|
4436
|
-
const allTree = await serverOps.getTree();
|
4437
|
-
tree = {
|
4438
|
-
sources: {
|
4439
|
-
...allTree.sources,
|
4440
|
-
...tree.sources
|
4441
|
-
},
|
4442
|
-
errors: {
|
4443
|
-
...allTree.errors,
|
4444
|
-
...tree.errors
|
4445
|
-
}
|
4446
|
-
};
|
4662
|
+
const patchOps = await serverOps.fetchPatches({
|
4663
|
+
patchIds: undefined,
|
4664
|
+
omitPatch: false
|
4665
|
+
});
|
4666
|
+
const patchAnalysis = serverOps.analyzePatches(patchOps.patches);
|
4667
|
+
let sourcesRes = await serverOps.getSources();
|
4668
|
+
const onlyPatchedTreeModules = await serverOps.getSources({
|
4669
|
+
...patchAnalysis,
|
4670
|
+
...patchOps
|
4671
|
+
});
|
4672
|
+
sourcesRes = {
|
4673
|
+
sources: {
|
4674
|
+
...sourcesRes.sources,
|
4675
|
+
...(onlyPatchedTreeModules.sources || {})
|
4676
|
+
},
|
4677
|
+
errors: {
|
4678
|
+
...sourcesRes.errors,
|
4679
|
+
...(onlyPatchedTreeModules.errors || {})
|
4447
4680
|
}
|
4448
|
-
}
|
4449
|
-
tree = await serverOps.getTree();
|
4450
|
-
}
|
4681
|
+
};
|
4451
4682
|
let sourcesValidation = {
|
4452
4683
|
errors: {},
|
4453
4684
|
files: {}
|
4454
4685
|
};
|
4455
4686
|
if (query.validate_sources || query.validate_binary_files) {
|
4456
4687
|
const schemas = await serverOps.getSchemas();
|
4457
|
-
sourcesValidation = await serverOps.validateSources(schemas,
|
4688
|
+
sourcesValidation = await serverOps.validateSources(schemas, sourcesRes.sources);
|
4458
4689
|
|
4459
|
-
// TODO: send validation errors
|
4690
|
+
// TODO: send binary files validation errors
|
4460
4691
|
if (query.validate_binary_files) {
|
4461
|
-
await serverOps.validateFiles(schemas,
|
4692
|
+
await serverOps.validateFiles(schemas, sourcesRes.sources, sourcesValidation.files);
|
4462
4693
|
}
|
4463
4694
|
}
|
4464
4695
|
const schemaSha = await serverOps.getSchemaSha();
|
4465
4696
|
const modules = {};
|
4466
|
-
for (const [moduleFilePathS, module] of Object.entries(
|
4697
|
+
for (const [moduleFilePathS, module] of Object.entries(sourcesRes.sources)) {
|
4467
4698
|
const moduleFilePath = moduleFilePathS;
|
4468
|
-
if (moduleFilePath.startsWith(
|
4469
|
-
var _sourcesValidation$er;
|
4699
|
+
if (moduleFilePath.startsWith(moduleFilePath)) {
|
4700
|
+
var _patchAnalysis$patche, _sourcesValidation$er;
|
4701
|
+
const skippedPatches = [];
|
4702
|
+
const patchErrors = {};
|
4703
|
+
const appliedPatches = ((_patchAnalysis$patche = patchAnalysis.patchesByModule[moduleFilePath]) === null || _patchAnalysis$patche === void 0 ? void 0 : _patchAnalysis$patche.map(p => p.patchId)) || [];
|
4704
|
+
for (const {
|
4705
|
+
patchId,
|
4706
|
+
skipped,
|
4707
|
+
error
|
4708
|
+
} of ((_sourcesRes$errors = sourcesRes.errors) === null || _sourcesRes$errors === void 0 ? void 0 : _sourcesRes$errors[moduleFilePath]) || []) {
|
4709
|
+
var _sourcesRes$errors;
|
4710
|
+
if (skipped) {
|
4711
|
+
skippedPatches.push(patchId);
|
4712
|
+
} else if (error) {
|
4713
|
+
patchErrors[patchId] = {
|
4714
|
+
message: error.message
|
4715
|
+
};
|
4716
|
+
} else {
|
4717
|
+
// unsure what makes sense here
|
4718
|
+
appliedPatches.push(patchId);
|
4719
|
+
}
|
4720
|
+
}
|
4470
4721
|
modules[moduleFilePath] = {
|
4471
4722
|
source: module,
|
4472
|
-
patches:
|
4473
|
-
applied:
|
4723
|
+
patches: appliedPatches.length > 0 || skippedPatches.length > 0 || Object.keys(patchErrors).length > 0 ? {
|
4724
|
+
applied: appliedPatches,
|
4725
|
+
skipped: skippedPatches.length > 0 ? skippedPatches : undefined,
|
4726
|
+
errors: Object.keys(patchErrors).length > 0 ? patchErrors : undefined
|
4474
4727
|
} : undefined,
|
4475
4728
|
validationErrors: (_sourcesValidation$er = sourcesValidation.errors[moduleFilePath]) === null || _sourcesValidation$er === void 0 ? void 0 : _sourcesValidation$er.validations
|
4476
4729
|
};
|
4477
4730
|
}
|
4478
4731
|
}
|
4479
|
-
if (tree.errors && Object.keys(tree.errors).length > 0) {
|
4480
|
-
const res = {
|
4481
|
-
status: 400,
|
4482
|
-
json: {
|
4483
|
-
type: "patch-error",
|
4484
|
-
schemaSha,
|
4485
|
-
modules,
|
4486
|
-
errors: Object.fromEntries(Object.entries(tree.errors).map(([key, value]) => [key, value.map(error => ({
|
4487
|
-
patchId: error.patchId,
|
4488
|
-
skipped: error.skipped,
|
4489
|
-
error: {
|
4490
|
-
message: error.error.message
|
4491
|
-
}
|
4492
|
-
}))])),
|
4493
|
-
message: "One or more patches failed to be applied"
|
4494
|
-
}
|
4495
|
-
};
|
4496
|
-
return res;
|
4497
|
-
}
|
4498
4732
|
const res = {
|
4499
4733
|
status: 200,
|
4500
4734
|
json: {
|
4501
4735
|
schemaSha,
|
4502
|
-
modules
|
4503
|
-
newPatchIds
|
4736
|
+
modules
|
4504
4737
|
}
|
4505
4738
|
};
|
4506
4739
|
return res;
|
4507
4740
|
}
|
4508
4741
|
},
|
4742
|
+
"/profiles": {
|
4743
|
+
GET: async req => {
|
4744
|
+
// const cookies = req.cookies;
|
4745
|
+
// const auth = getAuth(cookies);
|
4746
|
+
// if (auth.error) {
|
4747
|
+
// return {
|
4748
|
+
// status: 401,
|
4749
|
+
// json: {
|
4750
|
+
// message: auth.error,
|
4751
|
+
// },
|
4752
|
+
// };
|
4753
|
+
// }
|
4754
|
+
|
4755
|
+
const profiles = await serverOps.getProfiles();
|
4756
|
+
return {
|
4757
|
+
status: 200,
|
4758
|
+
json: {
|
4759
|
+
profiles
|
4760
|
+
}
|
4761
|
+
};
|
4762
|
+
}
|
4763
|
+
},
|
4509
4764
|
"/save": {
|
4510
4765
|
POST: async req => {
|
4511
4766
|
const cookies = req.cookies;
|
@@ -4519,8 +4774,8 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4519
4774
|
}
|
4520
4775
|
};
|
4521
4776
|
}
|
4522
|
-
const PostSaveBody =
|
4523
|
-
patchIds:
|
4777
|
+
const PostSaveBody = zod.z.object({
|
4778
|
+
patchIds: zod.z.array(zod.z.string().refine(id => true // TODO:
|
4524
4779
|
))
|
4525
4780
|
});
|
4526
4781
|
const bodyRes = PostSaveBody.safeParse(body);
|
@@ -4768,18 +5023,18 @@ function getStateFromCookie(stateCookie) {
|
|
4768
5023
|
function createStateCookie(state) {
|
4769
5024
|
return Buffer.from(JSON.stringify(state), "utf8").toString("base64");
|
4770
5025
|
}
|
4771
|
-
const ValAppJwtPayload =
|
4772
|
-
sub:
|
4773
|
-
exp:
|
4774
|
-
project:
|
4775
|
-
org:
|
5026
|
+
const ValAppJwtPayload = zod.z.object({
|
5027
|
+
sub: zod.z.string(),
|
5028
|
+
exp: zod.z.number(),
|
5029
|
+
project: zod.z.string(),
|
5030
|
+
org: zod.z.string()
|
4776
5031
|
});
|
4777
|
-
const IntegratedServerJwtPayload =
|
4778
|
-
sub:
|
4779
|
-
exp:
|
4780
|
-
token:
|
4781
|
-
org:
|
4782
|
-
project:
|
5032
|
+
const IntegratedServerJwtPayload = zod.z.object({
|
5033
|
+
sub: zod.z.string(),
|
5034
|
+
exp: zod.z.number(),
|
5035
|
+
token: zod.z.string(),
|
5036
|
+
org: zod.z.string(),
|
5037
|
+
project: zod.z.string()
|
4783
5038
|
});
|
4784
5039
|
async function withAuth(secret, cookies, errorMessageType, handler) {
|
4785
5040
|
const cookie = cookies[internal.VAL_SESSION_COOKIE];
|
@@ -5236,21 +5491,21 @@ function createValApiRouter(route, valServerPromise, convert) {
|
|
5236
5491
|
let isOptional = false;
|
5237
5492
|
let isArray = false;
|
5238
5493
|
// extract inner types:
|
5239
|
-
if (innerType instanceof
|
5494
|
+
if (innerType instanceof zod.z.ZodOptional) {
|
5240
5495
|
isOptional = true;
|
5241
5496
|
innerType = innerType.unwrap();
|
5242
5497
|
}
|
5243
|
-
if (innerType instanceof
|
5498
|
+
if (innerType instanceof zod.z.ZodArray) {
|
5244
5499
|
isArray = true;
|
5245
5500
|
innerType = innerType.element;
|
5246
5501
|
}
|
5247
5502
|
// convert boolean to union of literals true and false so we can parse it as a string
|
5248
|
-
if (innerType instanceof
|
5249
|
-
innerType =
|
5503
|
+
if (innerType instanceof zod.z.ZodBoolean) {
|
5504
|
+
innerType = zod.z.union([zod.z.literal("true"), zod.z.literal("false")]).transform(arg => arg === "true");
|
5250
5505
|
}
|
5251
5506
|
// re-build rules:
|
5252
5507
|
let arrayCompatibleRule = innerType;
|
5253
|
-
arrayCompatibleRule =
|
5508
|
+
arrayCompatibleRule = zod.z.array(innerType); // we always want to parse an array because we group the query params by into an array
|
5254
5509
|
if (isOptional) {
|
5255
5510
|
arrayCompatibleRule = arrayCompatibleRule.optional();
|
5256
5511
|
}
|
@@ -5259,7 +5514,7 @@ function createValApiRouter(route, valServerPromise, convert) {
|
|
5259
5514
|
}
|
5260
5515
|
queryRules[key] = arrayCompatibleRule;
|
5261
5516
|
}
|
5262
|
-
const queryRes =
|
5517
|
+
const queryRes = zod.z.object(queryRules).safeParse(actualQueryParams);
|
5263
5518
|
if (!queryRes.success) {
|
5264
5519
|
return zodErrorResult(queryRes.error, `invalid query params: (${JSON.stringify(actualQueryParams)})`);
|
5265
5520
|
}
|
@@ -5333,7 +5588,7 @@ function getCookies(req, cookiesDef) {
|
|
5333
5588
|
input[name.trim()] = value;
|
5334
5589
|
}
|
5335
5590
|
}
|
5336
|
-
return
|
5591
|
+
return zod.z.object(cookiesDef).safeParse(input);
|
5337
5592
|
}
|
5338
5593
|
|
5339
5594
|
/**
|
@@ -5594,8 +5849,6 @@ async function createFixPatch(config, apply, sourcePath, validationError) {
|
|
5594
5849
|
};
|
5595
5850
|
}
|
5596
5851
|
|
5597
|
-
exports.Patch = Patch;
|
5598
|
-
exports.PatchJSON = PatchJSON;
|
5599
5852
|
exports.Service = Service;
|
5600
5853
|
exports.ValFSHost = ValFSHost;
|
5601
5854
|
exports.ValModuleLoader = ValModuleLoader;
|