@valbuild/server 0.75.5 → 0.76.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/valbuild-server.cjs.dev.js +279 -305
- package/dist/valbuild-server.cjs.prod.js +279 -305
- package/dist/valbuild-server.esm.js +278 -304
- package/package.json +4 -4
@@ -7,7 +7,7 @@ var ts = require('typescript');
|
|
7
7
|
var fp = require('@valbuild/core/fp');
|
8
8
|
var core = require('@valbuild/core');
|
9
9
|
var patch = require('@valbuild/core/patch');
|
10
|
-
var
|
10
|
+
var path = require('path');
|
11
11
|
var fs = require('fs');
|
12
12
|
var sucrase = require('sucrase');
|
13
13
|
var ui = require('@valbuild/ui');
|
@@ -41,7 +41,7 @@ function _interopNamespace(e) {
|
|
41
41
|
}
|
42
42
|
|
43
43
|
var ts__default = /*#__PURE__*/_interopDefault(ts);
|
44
|
-
var
|
44
|
+
var path__namespace = /*#__PURE__*/_interopNamespace(path);
|
45
45
|
var fs__default = /*#__PURE__*/_interopDefault(fs);
|
46
46
|
var crypto__default = /*#__PURE__*/_interopDefault(crypto$1);
|
47
47
|
var sizeOf__default = /*#__PURE__*/_interopDefault(sizeOf);
|
@@ -819,7 +819,7 @@ class TSOps {
|
|
819
819
|
}
|
820
820
|
|
821
821
|
function getSyntheticContainingPath(rootDir) {
|
822
|
-
return
|
822
|
+
return path__namespace["default"].join(rootDir, "<val>"); // TODO: this is the synthetic path used when evaluating / patching modules. I am not sure <val> is the best choice: val.ts / js better? But that is weird too. At least now it is clear(er) that it is indeed a synthetic file (i.e. not an actual file)
|
823
823
|
}
|
824
824
|
|
825
825
|
const ops = new TSOps(document => {
|
@@ -990,8 +990,8 @@ globalThis.valModule = {
|
|
990
990
|
};
|
991
991
|
|
992
992
|
const getCompilerOptions = (rootDir, parseConfigHost) => {
|
993
|
-
const tsConfigPath =
|
994
|
-
const jsConfigPath =
|
993
|
+
const tsConfigPath = path__namespace["default"].resolve(rootDir, "tsconfig.json");
|
994
|
+
const jsConfigPath = path__namespace["default"].resolve(rootDir, "jsconfig.json");
|
995
995
|
let configFilePath;
|
996
996
|
if (parseConfigHost.fileExists(jsConfigPath)) {
|
997
997
|
configFilePath = jsConfigPath;
|
@@ -1022,7 +1022,7 @@ class ValSourceFileHandler {
|
|
1022
1022
|
constructor(projectRoot, compilerOptions, host = {
|
1023
1023
|
...ts__default["default"].sys,
|
1024
1024
|
writeFile: (fileName, data, encoding) => {
|
1025
|
-
fs__default["default"].mkdirSync(
|
1025
|
+
fs__default["default"].mkdirSync(path__namespace["default"].dirname(fileName), {
|
1026
1026
|
recursive: true
|
1027
1027
|
});
|
1028
1028
|
fs__default["default"].writeFileSync(fileName, typeof data === "string" ? data : new Uint8Array(data), encoding);
|
@@ -1049,7 +1049,7 @@ class ValSourceFileHandler {
|
|
1049
1049
|
this.host.writeFile(filePath, content, encoding);
|
1050
1050
|
}
|
1051
1051
|
resolveSourceModulePath(containingFilePath, requestedModuleName) {
|
1052
|
-
const resolutionRes = ts__default["default"].resolveModuleName(requestedModuleName,
|
1052
|
+
const resolutionRes = ts__default["default"].resolveModuleName(requestedModuleName, path__namespace["default"].isAbsolute(containingFilePath) ? containingFilePath : path__namespace["default"].resolve(this.projectRoot, containingFilePath), this.compilerOptions, this.host, undefined, undefined, ts__default["default"].ModuleKind.ESNext);
|
1053
1053
|
const resolvedModule = resolutionRes.resolvedModule;
|
1054
1054
|
if (!resolvedModule) {
|
1055
1055
|
throw Error(`Could not resolve module "${requestedModuleName}", base: "${containingFilePath}": No resolved modules returned: ${JSON.stringify(resolutionRes)}`);
|
@@ -1072,7 +1072,7 @@ class ValModuleLoader {
|
|
1072
1072
|
sourceFileHandler, host = {
|
1073
1073
|
...ts__default["default"].sys,
|
1074
1074
|
writeFile: (fileName, data, encoding) => {
|
1075
|
-
fs__default["default"].mkdirSync(
|
1075
|
+
fs__default["default"].mkdirSync(path__namespace["default"].dirname(fileName), {
|
1076
1076
|
recursive: true
|
1077
1077
|
});
|
1078
1078
|
fs__default["default"].writeFileSync(fileName, typeof data === "string" ? data : new Uint8Array(data), encoding);
|
@@ -1365,7 +1365,7 @@ export default new Proxy({}, {
|
|
1365
1365
|
async function createService(projectRoot, opts, host = {
|
1366
1366
|
...ts__default["default"].sys,
|
1367
1367
|
writeFile: (fileName, data, encoding) => {
|
1368
|
-
fs__default["default"].mkdirSync(
|
1368
|
+
fs__default["default"].mkdirSync(path__namespace["default"].dirname(fileName), {
|
1369
1369
|
recursive: true
|
1370
1370
|
});
|
1371
1371
|
fs__default["default"].writeFileSync(fileName, typeof data === "string" ? data : new Uint8Array(data), encoding);
|
@@ -2245,96 +2245,8 @@ class ValOps {
|
|
2245
2245
|
}
|
2246
2246
|
|
2247
2247
|
// #region createPatch
|
2248
|
-
async createPatch(path, patch
|
2249
|
-
const
|
2250
|
-
const schemas = initTree.schemas;
|
2251
|
-
const moduleErrors = initTree.moduleErrors;
|
2252
|
-
let sources = initTree.sources;
|
2253
|
-
if (parentRef.type !== "head") {
|
2254
|
-
// There's room for some optimizations here: we could do this once, then re-use every time we create a patch, then again we only create one patch at a time
|
2255
|
-
const patchOps = await this.fetchPatches({
|
2256
|
-
excludePatchOps: false
|
2257
|
-
});
|
2258
|
-
const patchAnalysis = this.analyzePatches(patchOps.patches);
|
2259
|
-
const tree = await this.getSources({
|
2260
|
-
...patchAnalysis,
|
2261
|
-
...patchOps
|
2262
|
-
});
|
2263
|
-
sources = {
|
2264
|
-
...sources,
|
2265
|
-
...tree.sources
|
2266
|
-
};
|
2267
|
-
}
|
2268
|
-
const source = sources[path];
|
2269
|
-
const schema = schemas[path];
|
2270
|
-
const moduleError = moduleErrors.find(e => e.path === path);
|
2271
|
-
if (moduleError) {
|
2272
|
-
console.error(`Cannot patch. Module at path: '${path}' has fatal errors: "${moduleError.message}"`);
|
2273
|
-
return fp.result.err({
|
2274
|
-
errorType: "other",
|
2275
|
-
error: {
|
2276
|
-
message: `Cannot patch. Module at path: '${path}' has fatal errors: ` + moduleErrors.map(m => `"${m.message}"`).join(" and ")
|
2277
|
-
}
|
2278
|
-
});
|
2279
|
-
}
|
2280
|
-
if (source === undefined) {
|
2281
|
-
console.error(`Cannot patch. Module source at path: '${path}' does not exist`);
|
2282
|
-
return fp.result.err({
|
2283
|
-
errorType: "other",
|
2284
|
-
error: {
|
2285
|
-
message: `Cannot patch. Module source at path: '${path}' does not exist`
|
2286
|
-
}
|
2287
|
-
});
|
2288
|
-
}
|
2289
|
-
if (!schema) {
|
2290
|
-
console.error(`Cannot patch. Module schema at path: '${path}' does not exist`);
|
2291
|
-
return fp.result.err({
|
2292
|
-
errorType: "other",
|
2293
|
-
error: {
|
2294
|
-
message: `Cannot patch. Module schema at path: '${path}' does not exist`
|
2295
|
-
}
|
2296
|
-
});
|
2297
|
-
}
|
2298
|
-
const sourceFileOps = [];
|
2299
|
-
const files = {};
|
2300
|
-
for (const op of patch$1) {
|
2301
|
-
if (op.op !== "file") {
|
2302
|
-
sourceFileOps.push(op);
|
2303
|
-
} else {
|
2304
|
-
const {
|
2305
|
-
value,
|
2306
|
-
filePath
|
2307
|
-
} = op;
|
2308
|
-
if (files[filePath]) {
|
2309
|
-
console.error(`Cannot have multiple files with same path in same patch. Path: ${filePath}`);
|
2310
|
-
files[filePath] = {
|
2311
|
-
error: new patch.PatchError("Cannot have multiple files with same path in same patch")
|
2312
|
-
};
|
2313
|
-
} else if (typeof value !== "string") {
|
2314
|
-
console.error(`Value is not a string. Path: ${filePath}. Value: ${value}`);
|
2315
|
-
files[filePath] = {
|
2316
|
-
error: new patch.PatchError("Value is not a string")
|
2317
|
-
};
|
2318
|
-
} else {
|
2319
|
-
const sha256 = core.Internal.getSHA256Hash(textEncoder$2.encode(value));
|
2320
|
-
files[filePath] = {
|
2321
|
-
value,
|
2322
|
-
sha256,
|
2323
|
-
path: op.path,
|
2324
|
-
remote: op.remote
|
2325
|
-
};
|
2326
|
-
sourceFileOps.push({
|
2327
|
-
op: "file",
|
2328
|
-
path: op.path,
|
2329
|
-
filePath,
|
2330
|
-
nestedFilePath: op.nestedFilePath,
|
2331
|
-
value: sha256,
|
2332
|
-
remote: op.remote
|
2333
|
-
});
|
2334
|
-
}
|
2335
|
-
}
|
2336
|
-
}
|
2337
|
-
const saveRes = await this.saveSourceFilePatch(path, patch$1, patchId, parentRef, authorId);
|
2248
|
+
async createPatch(path, patch, patchId, parentRef, authorId) {
|
2249
|
+
const saveRes = await this.saveSourceFilePatch(path, patch, patchId, parentRef, authorId);
|
2338
2250
|
if (fp.result.isErr(saveRes)) {
|
2339
2251
|
console.error(`Could not save source patch at path: '${path}'. Error: ${saveRes.error.errorType === "other" ? saveRes.error.message : saveRes.error.errorType}`);
|
2340
2252
|
if (saveRes.error.errorType === "patch-head-conflict") {
|
@@ -2347,95 +2259,8 @@ class ValOps {
|
|
2347
2259
|
error: saveRes.error
|
2348
2260
|
});
|
2349
2261
|
}
|
2350
|
-
const saveFileRes = await Promise.all(Object.entries(files).map(async ([filePath, data]) => {
|
2351
|
-
if (data.error) {
|
2352
|
-
return {
|
2353
|
-
filePath,
|
2354
|
-
error: data.error
|
2355
|
-
};
|
2356
|
-
} else {
|
2357
|
-
var _lastRes;
|
2358
|
-
let type;
|
2359
|
-
const modulePath = core.Internal.patchPathToModulePath(data.path);
|
2360
|
-
try {
|
2361
|
-
const {
|
2362
|
-
schema: schemaAtPath
|
2363
|
-
} = core.Internal.resolvePath(modulePath, source, schema);
|
2364
|
-
type = schemaAtPath instanceof core.ImageSchema || schemaAtPath instanceof core.RichTextSchema // if it's a rich text schema, we assume it's an image - hope this assumption holds!
|
2365
|
-
? "image" : schemaAtPath instanceof core.FileSchema ? "file" : schemaAtPath.serialize().type;
|
2366
|
-
} catch (e) {
|
2367
|
-
if (e instanceof Error) {
|
2368
|
-
console.error(`Could not resolve file type at: ${modulePath}. Error: ${e.message}`);
|
2369
|
-
return {
|
2370
|
-
filePath,
|
2371
|
-
error: new patch.PatchError(`Could not resolve file type at: ${modulePath}. Error: ${e.message}`)
|
2372
|
-
};
|
2373
|
-
}
|
2374
|
-
console.error(`Could not resolve file type at: ${modulePath}. Unknown error.`);
|
2375
|
-
return {
|
2376
|
-
filePath,
|
2377
|
-
error: new patch.PatchError(`Could not resolve file type at: ${modulePath}. Unknown error.`)
|
2378
|
-
};
|
2379
|
-
}
|
2380
|
-
if (type !== "image" && type !== "file") {
|
2381
|
-
console.error("Unknown file type (resolved from schema): " + type);
|
2382
|
-
return {
|
2383
|
-
filePath,
|
2384
|
-
error: new patch.PatchError("Unknown file type (resolved from schema): " + type)
|
2385
|
-
};
|
2386
|
-
}
|
2387
|
-
const mimeType = getMimeTypeFromBase64(data.value);
|
2388
|
-
if (!mimeType) {
|
2389
|
-
console.error("Could not get mimeType from base 64 encoded value");
|
2390
|
-
return {
|
2391
|
-
filePath,
|
2392
|
-
error: new patch.PatchError("Could not get mimeType from base 64 encoded value. First chars were: " + data.value.slice(0, 20))
|
2393
|
-
};
|
2394
|
-
}
|
2395
|
-
const buffer = bufferFromDataUrl(data.value);
|
2396
|
-
if (!buffer) {
|
2397
|
-
console.error("Could not create buffer from base 64 encoded value");
|
2398
|
-
return {
|
2399
|
-
filePath,
|
2400
|
-
error: new patch.PatchError("Could not create buffer from base 64 encoded value")
|
2401
|
-
};
|
2402
|
-
}
|
2403
|
-
const metadataOps = createMetadataFromBuffer(type, mimeType, buffer);
|
2404
|
-
if (metadataOps.errors) {
|
2405
|
-
console.error(`Could not get metadata. Errors: ${metadataOps.errors.map(error => error.message).join(", ")}`);
|
2406
|
-
return {
|
2407
|
-
filePath,
|
2408
|
-
error: new patch.PatchError(`Could not get metadata. Errors: ${metadataOps.errors.map(error => error.message).join(", ")}`)
|
2409
|
-
};
|
2410
|
-
}
|
2411
|
-
const MaxRetries = 3;
|
2412
|
-
let lastRes;
|
2413
|
-
for (let i = 0; i < MaxRetries; i++) {
|
2414
|
-
lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data.value, type, metadataOps.metadata, data.remote);
|
2415
|
-
if (!lastRes.error) {
|
2416
|
-
return {
|
2417
|
-
filePath
|
2418
|
-
};
|
2419
|
-
}
|
2420
|
-
}
|
2421
|
-
return {
|
2422
|
-
filePath,
|
2423
|
-
error: new patch.PatchError(((_lastRes = lastRes) === null || _lastRes === void 0 || (_lastRes = _lastRes.error) === null || _lastRes === void 0 ? void 0 : _lastRes.message) || "Unexpectedly could not save patch file")
|
2424
|
-
};
|
2425
|
-
}
|
2426
|
-
}));
|
2427
|
-
const errors = saveFileRes.filter(f => !!f.error);
|
2428
|
-
if (errors.length > 0) {
|
2429
|
-
return fp.result.err({
|
2430
|
-
errorType: "other",
|
2431
|
-
error: {
|
2432
|
-
message: "Could not save patch: " + errors.map(e => e.error.message).join(", ")
|
2433
|
-
}
|
2434
|
-
});
|
2435
|
-
}
|
2436
2262
|
return fp.result.ok({
|
2437
2263
|
patchId,
|
2438
|
-
files: saveFileRes,
|
2439
2264
|
createdAt: new Date().toISOString()
|
2440
2265
|
});
|
2441
2266
|
}
|
@@ -2511,17 +2336,6 @@ function createMetadataFromBuffer(type, mimeType, buffer) {
|
|
2511
2336
|
metadata
|
2512
2337
|
};
|
2513
2338
|
}
|
2514
|
-
const base64DataAttr = "data:";
|
2515
|
-
function getMimeTypeFromBase64(content) {
|
2516
|
-
const dataIndex = content.indexOf(base64DataAttr);
|
2517
|
-
const base64Index = content.indexOf(";base64,");
|
2518
|
-
if (dataIndex > -1 || base64Index > -1) {
|
2519
|
-
const mimeType = content.slice(dataIndex + base64DataAttr.length, base64Index);
|
2520
|
-
const normalizedMimeType = mimeType === "image/jpg" ? "image/jpeg" : mimeType;
|
2521
|
-
return normalizedMimeType;
|
2522
|
-
}
|
2523
|
-
return null;
|
2524
|
-
}
|
2525
2339
|
function bufferFromDataUrl(dataUrl) {
|
2526
2340
|
let base64Data;
|
2527
2341
|
const base64Index = dataUrl.indexOf(";base64,");
|
@@ -2685,7 +2499,7 @@ class ValOpsFS extends ValOps {
|
|
2685
2499
|
const mtimeInDir = {};
|
2686
2500
|
if (fs__default["default"].existsSync(dir)) {
|
2687
2501
|
for (const file of fs__default["default"].readdirSync(dir)) {
|
2688
|
-
mtimeInDir[file] = fs__default["default"].statSync(
|
2502
|
+
mtimeInDir[file] = fs__default["default"].statSync(path__namespace["default"].join(dir, file)).mtime.getTime();
|
2689
2503
|
}
|
2690
2504
|
}
|
2691
2505
|
return new Promise(resolve => {
|
@@ -2698,7 +2512,7 @@ class ValOpsFS extends ValOps {
|
|
2698
2512
|
resolve("request-again");
|
2699
2513
|
}
|
2700
2514
|
for (const file of fs__default["default"].readdirSync(dir)) {
|
2701
|
-
const mtime = fs__default["default"].statSync(
|
2515
|
+
const mtime = fs__default["default"].statSync(path__namespace["default"].join(dir, file)).mtime.getTime();
|
2702
2516
|
if (mtime !== mtimeInDir[file]) {
|
2703
2517
|
resolve("request-again");
|
2704
2518
|
}
|
@@ -2759,7 +2573,7 @@ class ValOpsFS extends ValOps {
|
|
2759
2573
|
patchesDirHandle = handle;
|
2760
2574
|
}),
|
2761
2575
|
// we poll the files that Val depends on for changes
|
2762
|
-
disableFilePolling ? new Promise(() => {}) : didFilesChangeUsingPolling([
|
2576
|
+
disableFilePolling ? new Promise(() => {}) : didFilesChangeUsingPolling([path__namespace["default"].join(this.rootDir, "val.config.ts"), path__namespace["default"].join(this.rootDir, "val.modules.ts"), path__namespace["default"].join(this.rootDir, "val.config.js"), path__namespace["default"].join(this.rootDir, "val.modules.js"), ...moduleFilePaths.map(p => path__namespace["default"].join(this.rootDir, p))], statFilePollingInterval, handle => {
|
2763
2577
|
valFilesIntervalHandle = handle;
|
2764
2578
|
}), new Promise(resolve => {
|
2765
2579
|
fsWatcher = fs__default["default"].watch(this.rootDir, {
|
@@ -2817,7 +2631,7 @@ class ValOpsFS extends ValOps {
|
|
2817
2631
|
}
|
2818
2632
|
const patches = {};
|
2819
2633
|
const errors = [];
|
2820
|
-
const parsedUnsortedFsPatches = patchJsonFiles.map(file =>
|
2634
|
+
const parsedUnsortedFsPatches = patchJsonFiles.map(file => path__namespace["default"].basename(path__namespace["default"].dirname(file))).map(patchDir => [patchDir, this.parseJsonFile(this.getPatchFilePath(patchDir), FSPatch), this.host.fileExists(this.getPatchBaseFile(patchDir)) ? this.parseJsonFile(this.getPatchBaseFile(patchDir), FSPatchBase) : undefined]);
|
2821
2635
|
parsedUnsortedFsPatches.forEach(([dir, parsedPatch, parsedBase]) => {
|
2822
2636
|
if (parsedPatch.error) {
|
2823
2637
|
errors.push({
|
@@ -3028,9 +2842,7 @@ class ValOpsFS extends ValOps {
|
|
3028
2842
|
}
|
3029
2843
|
const writeRes = this.host.tryWriteUf8File(this.getPatchFilePath(patchDir), JSON.stringify(data));
|
3030
2844
|
if (writeRes.type === "error") {
|
3031
|
-
return
|
3032
|
-
errorType: "patch-head-conflict"
|
3033
|
-
}) : fp.result.err({
|
2845
|
+
return fp.result.err({
|
3034
2846
|
errorType: "other",
|
3035
2847
|
error: writeRes.error,
|
3036
2848
|
message: "Failed to write patch file"
|
@@ -3055,7 +2867,7 @@ class ValOpsFS extends ValOps {
|
|
3055
2867
|
}
|
3056
2868
|
}
|
3057
2869
|
async getSourceFile(path) {
|
3058
|
-
const filePath =
|
2870
|
+
const filePath = path__namespace["default"].join(this.rootDir, path);
|
3059
2871
|
if (!this.host.fileExists(filePath)) {
|
3060
2872
|
return {
|
3061
2873
|
error: {
|
@@ -3068,7 +2880,7 @@ class ValOpsFS extends ValOps {
|
|
3068
2880
|
};
|
3069
2881
|
}
|
3070
2882
|
async saveSourceFile(path, data) {
|
3071
|
-
const filePath =
|
2883
|
+
const filePath = path__namespace["default"].join(this.rootDir, ...path.split("/"));
|
3072
2884
|
try {
|
3073
2885
|
this.host.writeUf8File(filePath, data);
|
3074
2886
|
return {
|
@@ -3089,10 +2901,10 @@ class ValOpsFS extends ValOps {
|
|
3089
2901
|
};
|
3090
2902
|
}
|
3091
2903
|
}
|
3092
|
-
async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, _type, metadata
|
2904
|
+
async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, _type, metadata) {
|
3093
2905
|
const patchDir = this.getParentPatchIdFromParentRef(parentRef);
|
3094
|
-
const patchFilePath = this.getBinaryFilePath(filePath, patchDir
|
3095
|
-
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDir
|
2906
|
+
const patchFilePath = this.getBinaryFilePath(filePath, patchDir);
|
2907
|
+
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDir);
|
3096
2908
|
try {
|
3097
2909
|
const buffer = bufferFromDataUrl(data);
|
3098
2910
|
if (!buffer) {
|
@@ -3123,7 +2935,7 @@ class ValOpsFS extends ValOps {
|
|
3123
2935
|
};
|
3124
2936
|
}
|
3125
2937
|
}
|
3126
|
-
async getBase64EncodedBinaryFileMetadataFromPatch(filePath, type, patchId
|
2938
|
+
async getBase64EncodedBinaryFileMetadataFromPatch(filePath, type, patchId) {
|
3127
2939
|
const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
|
3128
2940
|
if (fp.result.isErr(patchDirRes)) {
|
3129
2941
|
return {
|
@@ -3132,7 +2944,7 @@ class ValOpsFS extends ValOps {
|
|
3132
2944
|
}]
|
3133
2945
|
};
|
3134
2946
|
}
|
3135
|
-
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDirRes.value
|
2947
|
+
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDirRes.value);
|
3136
2948
|
if (!this.host.fileExists(metadataFilePath)) {
|
3137
2949
|
return {
|
3138
2950
|
errors: [{
|
@@ -3172,10 +2984,7 @@ class ValOpsFS extends ValOps {
|
|
3172
2984
|
if (!fp.result.isOk(patchDirRes)) {
|
3173
2985
|
return null;
|
3174
2986
|
}
|
3175
|
-
const absPath = this.getBinaryFilePath(filePath, patchDirRes.value
|
3176
|
-
// We save remote remote files using the filepath (so not the remote reference) and we also retrieve them using the filepath. Therefore remote is always false
|
3177
|
-
false // remote = false
|
3178
|
-
);
|
2987
|
+
const absPath = this.getBinaryFilePath(filePath, patchDirRes.value);
|
3179
2988
|
if (!this.host.fileExists(absPath)) {
|
3180
2989
|
return null;
|
3181
2990
|
}
|
@@ -3199,7 +3008,7 @@ class ValOpsFS extends ValOps {
|
|
3199
3008
|
}
|
3200
3009
|
async deleteAllPatches() {
|
3201
3010
|
const patchesCacheDir = this.getPatchesDir();
|
3202
|
-
const tmpDir =
|
3011
|
+
const tmpDir = path__namespace["default"].join(this.rootDir, ValOpsFS.VAL_DIR, "patches-deleted-" + crypto.randomUUID());
|
3203
3012
|
try {
|
3204
3013
|
this.host.moveDir(patchesCacheDir, tmpDir);
|
3205
3014
|
this.host.deleteDir(tmpDir);
|
@@ -3314,13 +3123,11 @@ class ValOpsFS extends ValOps {
|
|
3314
3123
|
};
|
3315
3124
|
continue;
|
3316
3125
|
}
|
3317
|
-
console.log("Uploading remote file", ref);
|
3318
3126
|
const res = await uploadRemoteFile(this.contentUrl, this.options.config.project, splitRemoteRefRes.bucket, splitRemoteRefRes.fileHash, getFileExt(splitRemoteRefRes.filePath), fileBuffer, auth);
|
3319
3127
|
if (!res.success) {
|
3320
3128
|
console.error("Failed to upload remote file", ref, res.error);
|
3321
3129
|
throw new Error(`Failed to upload remote file: ${ref}. ${res.error}`);
|
3322
3130
|
}
|
3323
|
-
console.log("Completed remote file", ref);
|
3324
3131
|
uploadedRemoteRefs.push(ref);
|
3325
3132
|
}
|
3326
3133
|
}
|
@@ -3337,7 +3144,7 @@ class ValOpsFS extends ValOps {
|
|
3337
3144
|
patchId
|
3338
3145
|
}] of localFileDescriptors) {
|
3339
3146
|
const filePath = ref;
|
3340
|
-
const absPath =
|
3147
|
+
const absPath = path__namespace["default"].join(this.rootDir, ...filePath.split("/"));
|
3341
3148
|
try {
|
3342
3149
|
const patchDir = patchIdToPatchDirMap[patchId];
|
3343
3150
|
if (!patchDir) {
|
@@ -3347,7 +3154,7 @@ class ValOpsFS extends ValOps {
|
|
3347
3154
|
};
|
3348
3155
|
continue;
|
3349
3156
|
}
|
3350
|
-
this.host.copyFile(this.getBinaryFilePath(filePath, patchDir
|
3157
|
+
this.host.copyFile(this.getBinaryFilePath(filePath, patchDir), absPath);
|
3351
3158
|
updatedFiles.push(absPath);
|
3352
3159
|
} catch (err) {
|
3353
3160
|
errors[absPath] = {
|
@@ -3357,7 +3164,7 @@ class ValOpsFS extends ValOps {
|
|
3357
3164
|
}
|
3358
3165
|
}
|
3359
3166
|
for (const [filePath, data] of Object.entries(preparedCommit.patchedSourceFiles)) {
|
3360
|
-
const absPath =
|
3167
|
+
const absPath = path__namespace["default"].join(this.rootDir, ...filePath.split("/"));
|
3361
3168
|
try {
|
3362
3169
|
this.host.writeUf8File(absPath, data);
|
3363
3170
|
updatedFiles.push(absPath);
|
@@ -3397,7 +3204,7 @@ class ValOpsFS extends ValOps {
|
|
3397
3204
|
};
|
3398
3205
|
}
|
3399
3206
|
async getBinaryFile(filePath) {
|
3400
|
-
const absPath =
|
3207
|
+
const absPath = path__namespace["default"].join(this.rootDir, ...filePath.split("/"));
|
3401
3208
|
if (!this.host.fileExists(absPath)) {
|
3402
3209
|
return null;
|
3403
3210
|
}
|
@@ -3418,7 +3225,7 @@ class ValOpsFS extends ValOps {
|
|
3418
3225
|
if (!mimeType) {
|
3419
3226
|
return {
|
3420
3227
|
errors: [{
|
3421
|
-
message: `Could not guess mime type of file ext: ${
|
3228
|
+
message: `Could not guess mime type of file ext: ${path__namespace["default"].extname(filePath)}`,
|
3422
3229
|
filePath
|
3423
3230
|
}]
|
3424
3231
|
};
|
@@ -3456,38 +3263,22 @@ class ValOpsFS extends ValOps {
|
|
3456
3263
|
|
3457
3264
|
// #region fs file path helpers
|
3458
3265
|
getPatchesDir() {
|
3459
|
-
return
|
3266
|
+
return path__namespace["default"].join(this.rootDir, ValOpsFS.VAL_DIR, "patches");
|
3460
3267
|
}
|
3461
3268
|
getFullPatchDir(patchDir) {
|
3462
|
-
return
|
3269
|
+
return path__namespace["default"].join(this.getPatchesDir(), patchDir);
|
3463
3270
|
}
|
3464
|
-
getBinaryFilePath(filePath, patchDir
|
3465
|
-
|
3466
|
-
const res = core.Internal.remote.splitRemoteRef(filePath);
|
3467
|
-
if (res.status === "error") {
|
3468
|
-
throw new Error("Failed to split remote ref: " + filePath);
|
3469
|
-
}
|
3470
|
-
const actualFilePath = res.filePath;
|
3471
|
-
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "files", actualFilePath, fsPath__namespace["default"].basename(actualFilePath));
|
3472
|
-
}
|
3473
|
-
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "files", filePath, fsPath__namespace["default"].basename(filePath));
|
3271
|
+
getBinaryFilePath(filePath, patchDir) {
|
3272
|
+
return path__namespace["default"].join(this.getFullPatchDir(patchDir), "files", filePath, path__namespace["default"].basename(filePath));
|
3474
3273
|
}
|
3475
|
-
getBinaryFileMetadataPath(filePath, patchDir
|
3476
|
-
|
3477
|
-
const res = core.Internal.remote.splitRemoteRef(filePath);
|
3478
|
-
if (res.status === "error") {
|
3479
|
-
throw new Error("Failed to split remote ref (in metadata path): " + filePath);
|
3480
|
-
}
|
3481
|
-
const actualFilePath = res.filePath;
|
3482
|
-
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "files", actualFilePath, fsPath__namespace["default"].basename(actualFilePath));
|
3483
|
-
}
|
3484
|
-
return fsPath__namespace["default"].join(this.getFullPatchDir(patchDir), "files", filePath, "metadata.json");
|
3274
|
+
getBinaryFileMetadataPath(filePath, patchDir) {
|
3275
|
+
return path__namespace["default"].join(this.getFullPatchDir(patchDir), "files", filePath, "metadata.json");
|
3485
3276
|
}
|
3486
3277
|
getPatchFilePath(patchDir) {
|
3487
|
-
return
|
3278
|
+
return path__namespace["default"].join(this.getFullPatchDir(patchDir), "patch.json");
|
3488
3279
|
}
|
3489
3280
|
getPatchBaseFile(patchDir) {
|
3490
|
-
return
|
3281
|
+
return path__namespace["default"].join(this.getFullPatchDir(patchDir), "base.json");
|
3491
3282
|
}
|
3492
3283
|
}
|
3493
3284
|
class FSOpsHost {
|
@@ -3520,28 +3311,24 @@ class FSOpsHost {
|
|
3520
3311
|
return fs__default["default"].readFileSync(path, "utf-8");
|
3521
3312
|
}
|
3522
3313
|
writeUf8File(path, data) {
|
3523
|
-
fs__default["default"].mkdirSync(
|
3314
|
+
fs__default["default"].mkdirSync(path__namespace["default"].dirname(path), {
|
3524
3315
|
recursive: true
|
3525
3316
|
});
|
3526
3317
|
fs__default["default"].writeFileSync(path, data, "utf-8");
|
3527
3318
|
}
|
3528
3319
|
tryWriteUf8File(path, data) {
|
3529
3320
|
try {
|
3530
|
-
const parentDir =
|
3321
|
+
const parentDir = path__namespace["default"].join(path__namespace["default"].dirname(path), "../");
|
3531
3322
|
fs__default["default"].mkdirSync(parentDir, {
|
3532
3323
|
recursive: true
|
3533
3324
|
});
|
3534
3325
|
// Make the parent dir separately. This is because we need mkdir to throw
|
3535
3326
|
// if the directory already exists. If we use recursive: true, it doesn't
|
3536
|
-
fs__default["default"].mkdirSync(
|
3327
|
+
fs__default["default"].mkdirSync(path__namespace["default"].dirname(path), {
|
3537
3328
|
recursive: false
|
3538
3329
|
});
|
3539
3330
|
} catch (e) {
|
3540
|
-
|
3541
|
-
type: "error",
|
3542
|
-
errorType: "dir-already-exists",
|
3543
|
-
error: e
|
3544
|
-
};
|
3331
|
+
// ignore
|
3545
3332
|
}
|
3546
3333
|
try {
|
3547
3334
|
fs__default["default"].writeFileSync(path, data, "utf-8");
|
@@ -3557,13 +3344,13 @@ class FSOpsHost {
|
|
3557
3344
|
};
|
3558
3345
|
}
|
3559
3346
|
writeBinaryFile(path, data) {
|
3560
|
-
fs__default["default"].mkdirSync(
|
3347
|
+
fs__default["default"].mkdirSync(path__namespace["default"].dirname(path), {
|
3561
3348
|
recursive: true
|
3562
3349
|
});
|
3563
3350
|
fs__default["default"].writeFileSync(path, new Uint8Array(data), "base64url");
|
3564
3351
|
}
|
3565
3352
|
copyFile(from, to) {
|
3566
|
-
fs__default["default"].mkdirSync(
|
3353
|
+
fs__default["default"].mkdirSync(path__namespace["default"].dirname(to), {
|
3567
3354
|
recursive: true
|
3568
3355
|
});
|
3569
3356
|
fs__default["default"].copyFileSync(from, to);
|
@@ -3707,9 +3494,93 @@ class ValOpsHttp extends ValOps {
|
|
3707
3494
|
async onInit() {
|
3708
3495
|
// TODO: unused for now. Implement or remove
|
3709
3496
|
}
|
3710
|
-
async
|
3497
|
+
async getPresignedAuthNonce(profileId, corsOrigin) {
|
3711
3498
|
try {
|
3712
3499
|
var _res$headers$get;
|
3500
|
+
const res = await fetch(`${this.contentUrl}/v1/${this.project}/presigned-auth-nonce`, {
|
3501
|
+
method: "POST",
|
3502
|
+
headers: {
|
3503
|
+
...this.authHeaders,
|
3504
|
+
"Content-Type": "application/json"
|
3505
|
+
},
|
3506
|
+
body: JSON.stringify({
|
3507
|
+
profileId,
|
3508
|
+
corsOrigin
|
3509
|
+
})
|
3510
|
+
});
|
3511
|
+
if (res.ok) {
|
3512
|
+
const json = await res.json();
|
3513
|
+
const parsed = zod.z.object({
|
3514
|
+
nonce: zod.z.string(),
|
3515
|
+
expiresAt: zod.z.string()
|
3516
|
+
}).safeParse(json);
|
3517
|
+
if (parsed.success) {
|
3518
|
+
const {
|
3519
|
+
nonce
|
3520
|
+
} = parsed.data;
|
3521
|
+
return {
|
3522
|
+
status: "success",
|
3523
|
+
data: {
|
3524
|
+
nonce,
|
3525
|
+
baseUrl: `${this.contentUrl}/v1/${this.project}`
|
3526
|
+
}
|
3527
|
+
};
|
3528
|
+
} else {
|
3529
|
+
console.error("Could not parse presigned auth nonce response. Error: " + zodValidationError.fromError(parsed.error));
|
3530
|
+
return {
|
3531
|
+
status: "error",
|
3532
|
+
statusCode: 500,
|
3533
|
+
error: {
|
3534
|
+
message: "Could not get presigned auth nonce. The response that Val got from the server was not in the expected format. You might be running on an old version, or it might be a transient error or a configuration issue. Please try again later."
|
3535
|
+
}
|
3536
|
+
};
|
3537
|
+
}
|
3538
|
+
}
|
3539
|
+
if (res.status === 401) {
|
3540
|
+
return {
|
3541
|
+
statusCode: 401,
|
3542
|
+
status: "error",
|
3543
|
+
error: {
|
3544
|
+
message: "Could not get presigned auth nonce. Although your user is authorized, the application has authorization issues. Contact the developers on your team and ask them to verify the api keys."
|
3545
|
+
}
|
3546
|
+
};
|
3547
|
+
}
|
3548
|
+
if ((_res$headers$get = res.headers.get("Content-Type")) !== null && _res$headers$get !== void 0 && _res$headers$get.includes("application/json")) {
|
3549
|
+
const json = await res.json();
|
3550
|
+
if (json.message) {
|
3551
|
+
console.error("Presigned auth nonce error:", json.message);
|
3552
|
+
return {
|
3553
|
+
status: "error",
|
3554
|
+
statusCode: 500,
|
3555
|
+
error: {
|
3556
|
+
message: json.message
|
3557
|
+
}
|
3558
|
+
};
|
3559
|
+
}
|
3560
|
+
}
|
3561
|
+
const unknownErrorMessage = `Could not get presigned auth nonce. HTTP error: ${res.status} ${res.statusText}`;
|
3562
|
+
console.error(unknownErrorMessage);
|
3563
|
+
return {
|
3564
|
+
status: "error",
|
3565
|
+
statusCode: 500,
|
3566
|
+
error: {
|
3567
|
+
message: unknownErrorMessage
|
3568
|
+
}
|
3569
|
+
};
|
3570
|
+
} catch (e) {
|
3571
|
+
console.error("Could not get presigned auth nonce (connection error?):", e);
|
3572
|
+
return {
|
3573
|
+
status: "error",
|
3574
|
+
statusCode: 500,
|
3575
|
+
error: {
|
3576
|
+
message: `Could not get presigned auth nonce. Error: ${e instanceof Error ? e.message : JSON.stringify(e)}`
|
3577
|
+
}
|
3578
|
+
};
|
3579
|
+
}
|
3580
|
+
}
|
3581
|
+
async getCommitSummary(preparedCommit) {
|
3582
|
+
try {
|
3583
|
+
var _res$headers$get2;
|
3713
3584
|
const res = await fetch(`${this.contentUrl}/v1/${this.project}/commit-summary`, {
|
3714
3585
|
method: "POST",
|
3715
3586
|
headers: {
|
@@ -3745,7 +3616,7 @@ class ValOpsHttp extends ValOps {
|
|
3745
3616
|
};
|
3746
3617
|
}
|
3747
3618
|
const unknownErrorMessage = `Could not get summary. HTTP error: ${res.status} ${res.statusText}`;
|
3748
|
-
if ((_res$headers$
|
3619
|
+
if ((_res$headers$get2 = res.headers.get("Content-Type")) !== null && _res$headers$get2 !== void 0 && _res$headers$get2.includes("application/json")) {
|
3749
3620
|
const json = await res.json();
|
3750
3621
|
if (json.message) {
|
3751
3622
|
console.error("Summary error:", json.message);
|
@@ -4073,7 +3944,7 @@ class ValOpsHttp extends ValOps {
|
|
4073
3944
|
coreVersion: core.Internal.VERSION.core
|
4074
3945
|
})
|
4075
3946
|
}).then(async res => {
|
4076
|
-
var _res$headers$
|
3947
|
+
var _res$headers$get3;
|
4077
3948
|
if (res.ok) {
|
4078
3949
|
const parsed = SavePatchResponse.safeParse(await res.json());
|
4079
3950
|
if (parsed.success) {
|
@@ -4092,7 +3963,7 @@ class ValOpsHttp extends ValOps {
|
|
4092
3963
|
message: "Conflict: " + (await res.text())
|
4093
3964
|
});
|
4094
3965
|
}
|
4095
|
-
if ((_res$headers$
|
3966
|
+
if ((_res$headers$get3 = res.headers.get("Content-Type")) !== null && _res$headers$get3 !== void 0 && _res$headers$get3.includes("application/json")) {
|
4096
3967
|
const json = await res.json();
|
4097
3968
|
return fp.result.err({
|
4098
3969
|
errorType: "other",
|
@@ -4110,23 +3981,12 @@ class ValOpsHttp extends ValOps {
|
|
4110
3981
|
});
|
4111
3982
|
});
|
4112
3983
|
}
|
4113
|
-
|
4114
|
-
|
4115
|
-
|
4116
|
-
|
4117
|
-
|
4118
|
-
|
4119
|
-
if (splitRemoteRefDataRes.status === "error") {
|
4120
|
-
return {
|
4121
|
-
error: {
|
4122
|
-
message: `Could not split remote ref: ${splitRemoteRefDataRes.error}`
|
4123
|
-
}
|
4124
|
-
};
|
4125
|
-
}
|
4126
|
-
filePath = "/" + splitRemoteRefDataRes.filePath;
|
4127
|
-
} else {
|
4128
|
-
filePath = filePathOrRef;
|
4129
|
-
}
|
3984
|
+
|
3985
|
+
/**
|
3986
|
+
* @deprecated For HTTP ops use direct upload instead (i.e. client should upload the files directly) since hosting platforms (Vercel) might have low limits on the size of the request body.
|
3987
|
+
*/
|
3988
|
+
async saveBase64EncodedBinaryFileFromPatch(filePathOrRef, parentRef, patchId, data, type, metadata) {
|
3989
|
+
const filePath = filePathOrRef;
|
4130
3990
|
return fetch(`${this.contentUrl}/v1/${this.project}/patches/${patchId}/files`, {
|
4131
3991
|
method: "POST",
|
4132
3992
|
headers: {
|
@@ -4135,10 +3995,11 @@ class ValOpsHttp extends ValOps {
|
|
4135
3995
|
},
|
4136
3996
|
body: JSON.stringify({
|
4137
3997
|
filePath,
|
3998
|
+
parentRef,
|
3999
|
+
// Not currently used
|
4138
4000
|
data,
|
4139
4001
|
type,
|
4140
|
-
metadata
|
4141
|
-
remote
|
4002
|
+
metadata
|
4142
4003
|
})
|
4143
4004
|
}).then(async res => {
|
4144
4005
|
if (res.ok) {
|
@@ -4385,7 +4246,7 @@ class ValOpsHttp extends ValOps {
|
|
4385
4246
|
});
|
4386
4247
|
}
|
4387
4248
|
async getCommitMessage(preparedCommit) {
|
4388
|
-
var _res$headers$
|
4249
|
+
var _res$headers$get4;
|
4389
4250
|
const res = await fetch(`${this.contentUrl}/v1/${this.project}/commit-summary`, {
|
4390
4251
|
method: "POST",
|
4391
4252
|
headers: {
|
@@ -4403,7 +4264,7 @@ class ValOpsHttp extends ValOps {
|
|
4403
4264
|
commitSummary: json.commitSummary
|
4404
4265
|
};
|
4405
4266
|
}
|
4406
|
-
if ((_res$headers$
|
4267
|
+
if ((_res$headers$get4 = res.headers.get("Content-Type")) !== null && _res$headers$get4 !== void 0 && _res$headers$get4.includes("application/json")) {
|
4407
4268
|
const json = await res.json();
|
4408
4269
|
return {
|
4409
4270
|
error: {
|
@@ -4419,7 +4280,7 @@ class ValOpsHttp extends ValOps {
|
|
4419
4280
|
}
|
4420
4281
|
async commit(prepared, message, committer, filesDirectory, newBranch) {
|
4421
4282
|
try {
|
4422
|
-
var _res$headers$
|
4283
|
+
var _res$headers$get5;
|
4423
4284
|
const existingBranch = this.branch;
|
4424
4285
|
const res = await fetch(`${this.contentUrl}/v1/${this.project}/commit`, {
|
4425
4286
|
method: "POST",
|
@@ -4456,7 +4317,7 @@ class ValOpsHttp extends ValOps {
|
|
4456
4317
|
}
|
4457
4318
|
};
|
4458
4319
|
}
|
4459
|
-
if ((_res$headers$
|
4320
|
+
if ((_res$headers$get5 = res.headers.get("Content-Type")) !== null && _res$headers$get5 !== void 0 && _res$headers$get5.includes("application/json")) {
|
4460
4321
|
const json = await res.json();
|
4461
4322
|
if (json.isNotFastForward) {
|
4462
4323
|
return {
|
@@ -4488,7 +4349,7 @@ class ValOpsHttp extends ValOps {
|
|
4488
4349
|
|
4489
4350
|
// #region profiles
|
4490
4351
|
async getProfiles() {
|
4491
|
-
var _res$headers$
|
4352
|
+
var _res$headers$get6;
|
4492
4353
|
const res = await fetch(`${this.contentUrl}/v1/${this.project}/profiles`, {
|
4493
4354
|
headers: {
|
4494
4355
|
...this.authHeaders,
|
@@ -4503,7 +4364,7 @@ class ValOpsHttp extends ValOps {
|
|
4503
4364
|
}
|
4504
4365
|
return parsed.data.profiles;
|
4505
4366
|
}
|
4506
|
-
if ((_res$headers$
|
4367
|
+
if ((_res$headers$get6 = res.headers.get("Content-Type")) !== null && _res$headers$get6 !== void 0 && _res$headers$get6.includes("application/json")) {
|
4507
4368
|
const json = await res.json();
|
4508
4369
|
throw Error(`Could not get profiles (status: ${res.status}): ${"message" in json ? json.message : "Unknown error"}`);
|
4509
4370
|
}
|
@@ -4532,7 +4393,7 @@ async function getSettings(projectName, auth) {
|
|
4532
4393
|
if (response.status === 404) {
|
4533
4394
|
return {
|
4534
4395
|
success: false,
|
4535
|
-
message: `Project '${projectName}' not found: that the name of the project is correct and that you have access to it.`
|
4396
|
+
message: `Project '${projectName}' not found: verify that the name of the project is correct and that you have access to it.`
|
4536
4397
|
};
|
4537
4398
|
}
|
4538
4399
|
if (response.status !== 200) {
|
@@ -4562,7 +4423,7 @@ async function getSettings(projectName, auth) {
|
|
4562
4423
|
}
|
4563
4424
|
|
4564
4425
|
function getPersonalAccessTokenPath(root) {
|
4565
|
-
return
|
4426
|
+
return path__namespace["default"].join(path__namespace["default"].resolve(root), ".val", "pat.json");
|
4566
4427
|
}
|
4567
4428
|
function parsePersonalAccessTokenFile(content) {
|
4568
4429
|
if (!content) {
|
@@ -4840,7 +4701,7 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4840
4701
|
};
|
4841
4702
|
}
|
4842
4703
|
const fs = await Promise.resolve().then(function () { return /*#__PURE__*/_interopNamespace(require('fs')); });
|
4843
|
-
const patPath = getPersonalAccessTokenPath(
|
4704
|
+
const patPath = getPersonalAccessTokenPath(path__namespace["default"].join(process.cwd()));
|
4844
4705
|
let patFile;
|
4845
4706
|
try {
|
4846
4707
|
patFile = await fs.promises.readFile(patPath, "utf-8");
|
@@ -5279,12 +5140,12 @@ const ValServer = (valModules, options, callbacks) => {
|
|
5279
5140
|
const remoteFileAuth = remoteFileAuthRes.json.remoteFileAuth;
|
5280
5141
|
const settingsRes = await getSettings(options.project, remoteFileAuth);
|
5281
5142
|
if (!settingsRes.success) {
|
5282
|
-
console.warn("Could not get
|
5143
|
+
console.warn("Could not get remote files settings: " + settingsRes.message);
|
5283
5144
|
return {
|
5284
5145
|
status: 400,
|
5285
5146
|
json: {
|
5286
5147
|
errorCode: "error-could-not-get-settings",
|
5287
|
-
message: `Could not get settings
|
5148
|
+
message: `Could not get remote files settings: ${settingsRes.message}`
|
5288
5149
|
}
|
5289
5150
|
};
|
5290
5151
|
}
|
@@ -5366,6 +5227,105 @@ const ValServer = (valModules, options, callbacks) => {
|
|
5366
5227
|
};
|
5367
5228
|
}
|
5368
5229
|
},
|
5230
|
+
"/upload/patches": {
|
5231
|
+
POST: async req => {
|
5232
|
+
if (serverOps instanceof ValOpsHttp) {
|
5233
|
+
return {
|
5234
|
+
status: 400,
|
5235
|
+
json: {
|
5236
|
+
message: "Do not use this endpoint in HTTP mode"
|
5237
|
+
}
|
5238
|
+
};
|
5239
|
+
}
|
5240
|
+
const pathParts = req.path.split("/");
|
5241
|
+
const patchId = pathParts[1];
|
5242
|
+
const isValidPatchId = patchId.length === 36;
|
5243
|
+
const isValidEndpoint = pathParts[0] === "" && isValidPatchId && pathParts[2] === "files";
|
5244
|
+
if (!isValidEndpoint) {
|
5245
|
+
return {
|
5246
|
+
status: 400,
|
5247
|
+
json: {
|
5248
|
+
message: "Invalid endpoint"
|
5249
|
+
}
|
5250
|
+
};
|
5251
|
+
}
|
5252
|
+
const {
|
5253
|
+
filePath,
|
5254
|
+
parentRef,
|
5255
|
+
type,
|
5256
|
+
data,
|
5257
|
+
metadata
|
5258
|
+
} = req.body;
|
5259
|
+
const saveRes = await serverOps.saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, type, metadata);
|
5260
|
+
if (saveRes.error) {
|
5261
|
+
return {
|
5262
|
+
status: 400,
|
5263
|
+
json: {
|
5264
|
+
message: saveRes.error.message
|
5265
|
+
}
|
5266
|
+
};
|
5267
|
+
}
|
5268
|
+
return {
|
5269
|
+
status: 200,
|
5270
|
+
json: {
|
5271
|
+
filePath,
|
5272
|
+
patchId
|
5273
|
+
}
|
5274
|
+
};
|
5275
|
+
}
|
5276
|
+
},
|
5277
|
+
"/direct-file-upload-settings": {
|
5278
|
+
POST: async req => {
|
5279
|
+
const cookies = req.cookies;
|
5280
|
+
const auth = getAuth(cookies);
|
5281
|
+
if (auth.error) {
|
5282
|
+
return {
|
5283
|
+
status: 401,
|
5284
|
+
json: {
|
5285
|
+
message: auth.error
|
5286
|
+
}
|
5287
|
+
};
|
5288
|
+
}
|
5289
|
+
if (serverOps instanceof ValOpsFS) {
|
5290
|
+
// In FS mode we do not use the remote server at all and just return an url that points to this server
|
5291
|
+
// which has an endpoint that handles this
|
5292
|
+
// A bit hacky perhaps, but we want to have as similar semantics as possible in client code when it comes to FS / HTTP
|
5293
|
+
const host = `/api/val`;
|
5294
|
+
return {
|
5295
|
+
status: 200,
|
5296
|
+
json: {
|
5297
|
+
nonce: null,
|
5298
|
+
baseUrl: `${host}/upload` // NOTE: this is the /upload/patches endpoint - the client will add /patches/:patchId/files to this and post to it
|
5299
|
+
}
|
5300
|
+
};
|
5301
|
+
}
|
5302
|
+
const httpOps = serverOps;
|
5303
|
+
const profileId = "id" in auth && auth.id ? auth.id : undefined;
|
5304
|
+
if (!profileId) {
|
5305
|
+
return {
|
5306
|
+
status: 401,
|
5307
|
+
json: {
|
5308
|
+
message: "Unauthorized"
|
5309
|
+
}
|
5310
|
+
};
|
5311
|
+
}
|
5312
|
+
const corsOrigin = "*"; // TODO: add cors origin
|
5313
|
+
const presignedAuthNonce = await httpOps.getPresignedAuthNonce(profileId, corsOrigin);
|
5314
|
+
if (presignedAuthNonce.status === "error") {
|
5315
|
+
return {
|
5316
|
+
status: presignedAuthNonce.statusCode,
|
5317
|
+
json: presignedAuthNonce.error
|
5318
|
+
};
|
5319
|
+
}
|
5320
|
+
return {
|
5321
|
+
status: 200,
|
5322
|
+
json: {
|
5323
|
+
nonce: presignedAuthNonce.data.nonce,
|
5324
|
+
baseUrl: presignedAuthNonce.data.baseUrl
|
5325
|
+
}
|
5326
|
+
};
|
5327
|
+
}
|
5328
|
+
},
|
5369
5329
|
//#region patches
|
5370
5330
|
"/patches": {
|
5371
5331
|
PUT: async req => {
|
@@ -5407,7 +5367,7 @@ const ValServer = (valModules, options, callbacks) => {
|
|
5407
5367
|
status: 400,
|
5408
5368
|
json: {
|
5409
5369
|
type: "patch-error",
|
5410
|
-
message:
|
5370
|
+
message: createPatchRes.error.error.message,
|
5411
5371
|
errors: {
|
5412
5372
|
[patch.path]: [{
|
5413
5373
|
error: {
|
@@ -5869,8 +5829,22 @@ const ValServer = (valModules, options, callbacks) => {
|
|
5869
5829
|
return remoteFileAuthRes;
|
5870
5830
|
}
|
5871
5831
|
const remoteFileAuth = (_remoteFileAuthRes = remoteFileAuthRes) === null || _remoteFileAuthRes === void 0 || (_remoteFileAuthRes = _remoteFileAuthRes.json) === null || _remoteFileAuthRes === void 0 ? void 0 : _remoteFileAuthRes.remoteFileAuth;
|
5832
|
+
const saveRes = await serverOps.saveOrUploadFiles(preparedCommit, mode, remoteFileAuth);
|
5833
|
+
if (Object.keys(saveRes.errors).length > 0) {
|
5834
|
+
console.error("Val: Failed to save files", saveRes.errors);
|
5835
|
+
return {
|
5836
|
+
status: 400,
|
5837
|
+
json: {
|
5838
|
+
message: "Failed to save files",
|
5839
|
+
details: Object.entries(saveRes.errors).map(([key, error]) => {
|
5840
|
+
return {
|
5841
|
+
message: `Got error: ${error} in ${key}`
|
5842
|
+
};
|
5843
|
+
})
|
5844
|
+
}
|
5845
|
+
};
|
5846
|
+
}
|
5872
5847
|
const deleteRes = await serverOps.deleteAllPatches();
|
5873
|
-
await serverOps.saveOrUploadFiles(preparedCommit, mode, remoteFileAuth);
|
5874
5848
|
if (deleteRes.error) {
|
5875
5849
|
console.error(`Val got an error while cleaning up patches after publish: ${deleteRes.error.message}`);
|
5876
5850
|
}
|
@@ -6363,7 +6337,7 @@ async function initHandlerOptions(route, opts, config) {
|
|
6363
6337
|
// TODO: remove
|
6364
6338
|
async function safeReadGit(cwd) {
|
6365
6339
|
async function findGitHead(currentDir, depth) {
|
6366
|
-
const gitHeadPath =
|
6340
|
+
const gitHeadPath = path__namespace.join(currentDir, ".git", "HEAD");
|
6367
6341
|
if (depth > 1000) {
|
6368
6342
|
console.error(`Reached max depth while scanning for .git folder. Current working dir: ${cwd}.`);
|
6369
6343
|
return {
|
@@ -6387,7 +6361,7 @@ async function safeReadGit(cwd) {
|
|
6387
6361
|
};
|
6388
6362
|
}
|
6389
6363
|
} catch (error) {
|
6390
|
-
const parentDir =
|
6364
|
+
const parentDir = path__namespace.dirname(currentDir);
|
6391
6365
|
|
6392
6366
|
// We've reached the root directory
|
6393
6367
|
if (parentDir === currentDir) {
|
@@ -6411,7 +6385,7 @@ async function safeReadGit(cwd) {
|
|
6411
6385
|
}
|
6412
6386
|
async function readCommit(gitDir, branchName) {
|
6413
6387
|
try {
|
6414
|
-
return (await fs.promises.readFile(
|
6388
|
+
return (await fs.promises.readFile(path__namespace.join(gitDir, ".git", "refs", "heads", branchName), "utf-8")).trim();
|
6415
6389
|
} catch (err) {
|
6416
6390
|
return undefined;
|
6417
6391
|
}
|
@@ -6687,10 +6661,10 @@ class ValFSHost {
|
|
6687
6661
|
return this.currentDirectory;
|
6688
6662
|
}
|
6689
6663
|
getCanonicalFileName(fileName) {
|
6690
|
-
if (
|
6691
|
-
return
|
6664
|
+
if (path__namespace["default"].isAbsolute(fileName)) {
|
6665
|
+
return path__namespace["default"].normalize(fileName);
|
6692
6666
|
}
|
6693
|
-
return
|
6667
|
+
return path__namespace["default"].resolve(this.getCurrentDirectory(), fileName);
|
6694
6668
|
}
|
6695
6669
|
fileExists(fileName) {
|
6696
6670
|
return this.valFS.fileExists(fileName);
|
@@ -6857,11 +6831,11 @@ async function checkRemoteRef(remoteHost, ref, projectRoot, schema, metadata) {
|
|
6857
6831
|
}
|
6858
6832
|
function getCachedRemoteFileDir(projectRoot) {
|
6859
6833
|
// store in projectRoot/.val/remote-file-cache
|
6860
|
-
const remoteFileCacheDir =
|
6834
|
+
const remoteFileCacheDir = path__namespace["default"].join(projectRoot, ".val", "remote-file-cache");
|
6861
6835
|
return remoteFileCacheDir;
|
6862
6836
|
}
|
6863
6837
|
function getCachedRemoteFilePath(fileExt, currentFileHash, remoteFileCacheDir) {
|
6864
|
-
const remoteFilePath =
|
6838
|
+
const remoteFilePath = path__namespace["default"].join(remoteFileCacheDir, currentFileHash + "." + fileExt);
|
6865
6839
|
return remoteFilePath;
|
6866
6840
|
}
|
6867
6841
|
async function getFileBufferFromRemote(ref, fileExt, currentFileHash, projectRoot) {
|
@@ -7144,8 +7118,8 @@ async function createFixPatch(config, apply, sourcePath, validationError, remote
|
|
7144
7118
|
});
|
7145
7119
|
continue;
|
7146
7120
|
}
|
7147
|
-
const absoluteFilePath =
|
7148
|
-
await fs__default["default"].promises.mkdir(
|
7121
|
+
const absoluteFilePath = path__namespace["default"].join(config.projectRoot, splitRemoteRefDataRes.filePath);
|
7122
|
+
await fs__default["default"].promises.mkdir(path__namespace["default"].dirname(absoluteFilePath), {
|
7149
7123
|
recursive: true
|
7150
7124
|
});
|
7151
7125
|
const res = await downloadFileFromRemote(url, absoluteFilePath);
|
@@ -7306,7 +7280,7 @@ async function getImageMetadata(projectRoot, validationError) {
|
|
7306
7280
|
// TODO:
|
7307
7281
|
throw Error("Cannot fix image without a file reference");
|
7308
7282
|
}
|
7309
|
-
const filename =
|
7283
|
+
const filename = path__namespace["default"].join(projectRoot, fileRef);
|
7310
7284
|
const buffer = fs__default["default"].readFileSync(filename);
|
7311
7285
|
return extractImageMetadata(filename, buffer);
|
7312
7286
|
}
|
@@ -7316,7 +7290,7 @@ async function getFileMetadata(projectRoot, validationError) {
|
|
7316
7290
|
// TODO:
|
7317
7291
|
throw Error("Cannot fix file without a file reference");
|
7318
7292
|
}
|
7319
|
-
const filename =
|
7293
|
+
const filename = path__namespace["default"].join(projectRoot, fileRef);
|
7320
7294
|
fs__default["default"].readFileSync(filename);
|
7321
7295
|
return extractFileMetadata(fileRef);
|
7322
7296
|
}
|