@valbuild/server 0.75.4 → 0.76.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/valbuild-server.cjs.dev.js +279 -305
- package/dist/valbuild-server.cjs.prod.js +279 -305
- package/dist/valbuild-server.esm.js +278 -304
- package/package.json +4 -4
@@ -1,10 +1,10 @@
|
|
1
1
|
import { newQuickJSWASMModule } from 'quickjs-emscripten';
|
2
2
|
import ts from 'typescript';
|
3
3
|
import { result, pipe } from '@valbuild/core/fp';
|
4
|
-
import { FILE_REF_PROP, FILE_REF_SUBTYPE_TAG, VAL_EXTENSION, derefPatch, Internal, Schema, ImageSchema,
|
4
|
+
import { FILE_REF_PROP, FILE_REF_SUBTYPE_TAG, VAL_EXTENSION, derefPatch, Internal, Schema, ImageSchema, DEFAULT_CONTENT_HOST } from '@valbuild/core';
|
5
5
|
import { deepEqual, isNotRoot, PatchError, parseAndValidateArrayIndex, applyPatch, JSONOps, deepClone, sourceToPatchPath } from '@valbuild/core/patch';
|
6
|
-
import * as
|
7
|
-
import
|
6
|
+
import * as path from 'path';
|
7
|
+
import path__default from 'path';
|
8
8
|
import fs, { promises } from 'fs';
|
9
9
|
import { transform } from 'sucrase';
|
10
10
|
import { VAL_CSS_PATH, VAL_APP_ID, VAL_OVERLAY_ID } from '@valbuild/ui';
|
@@ -788,7 +788,7 @@ class TSOps {
|
|
788
788
|
}
|
789
789
|
|
790
790
|
function getSyntheticContainingPath(rootDir) {
|
791
|
-
return
|
791
|
+
return path__default.join(rootDir, "<val>"); // TODO: this is the synthetic path used when evaluating / patching modules. I am not sure <val> is the best choice: val.ts / js better? But that is weird too. At least now it is clear(er) that it is indeed a synthetic file (i.e. not an actual file)
|
792
792
|
}
|
793
793
|
|
794
794
|
const ops = new TSOps(document => {
|
@@ -959,8 +959,8 @@ globalThis.valModule = {
|
|
959
959
|
};
|
960
960
|
|
961
961
|
const getCompilerOptions = (rootDir, parseConfigHost) => {
|
962
|
-
const tsConfigPath =
|
963
|
-
const jsConfigPath =
|
962
|
+
const tsConfigPath = path__default.resolve(rootDir, "tsconfig.json");
|
963
|
+
const jsConfigPath = path__default.resolve(rootDir, "jsconfig.json");
|
964
964
|
let configFilePath;
|
965
965
|
if (parseConfigHost.fileExists(jsConfigPath)) {
|
966
966
|
configFilePath = jsConfigPath;
|
@@ -991,7 +991,7 @@ class ValSourceFileHandler {
|
|
991
991
|
constructor(projectRoot, compilerOptions, host = {
|
992
992
|
...ts.sys,
|
993
993
|
writeFile: (fileName, data, encoding) => {
|
994
|
-
fs.mkdirSync(
|
994
|
+
fs.mkdirSync(path__default.dirname(fileName), {
|
995
995
|
recursive: true
|
996
996
|
});
|
997
997
|
fs.writeFileSync(fileName, typeof data === "string" ? data : new Uint8Array(data), encoding);
|
@@ -1018,7 +1018,7 @@ class ValSourceFileHandler {
|
|
1018
1018
|
this.host.writeFile(filePath, content, encoding);
|
1019
1019
|
}
|
1020
1020
|
resolveSourceModulePath(containingFilePath, requestedModuleName) {
|
1021
|
-
const resolutionRes = ts.resolveModuleName(requestedModuleName,
|
1021
|
+
const resolutionRes = ts.resolveModuleName(requestedModuleName, path__default.isAbsolute(containingFilePath) ? containingFilePath : path__default.resolve(this.projectRoot, containingFilePath), this.compilerOptions, this.host, undefined, undefined, ts.ModuleKind.ESNext);
|
1022
1022
|
const resolvedModule = resolutionRes.resolvedModule;
|
1023
1023
|
if (!resolvedModule) {
|
1024
1024
|
throw Error(`Could not resolve module "${requestedModuleName}", base: "${containingFilePath}": No resolved modules returned: ${JSON.stringify(resolutionRes)}`);
|
@@ -1041,7 +1041,7 @@ class ValModuleLoader {
|
|
1041
1041
|
sourceFileHandler, host = {
|
1042
1042
|
...ts.sys,
|
1043
1043
|
writeFile: (fileName, data, encoding) => {
|
1044
|
-
fs.mkdirSync(
|
1044
|
+
fs.mkdirSync(path__default.dirname(fileName), {
|
1045
1045
|
recursive: true
|
1046
1046
|
});
|
1047
1047
|
fs.writeFileSync(fileName, typeof data === "string" ? data : new Uint8Array(data), encoding);
|
@@ -1334,7 +1334,7 @@ export default new Proxy({}, {
|
|
1334
1334
|
async function createService(projectRoot, opts, host = {
|
1335
1335
|
...ts.sys,
|
1336
1336
|
writeFile: (fileName, data, encoding) => {
|
1337
|
-
fs.mkdirSync(
|
1337
|
+
fs.mkdirSync(path__default.dirname(fileName), {
|
1338
1338
|
recursive: true
|
1339
1339
|
});
|
1340
1340
|
fs.writeFileSync(fileName, typeof data === "string" ? data : new Uint8Array(data), encoding);
|
@@ -2215,94 +2215,6 @@ class ValOps {
|
|
2215
2215
|
|
2216
2216
|
// #region createPatch
|
2217
2217
|
async createPatch(path, patch, patchId, parentRef, authorId) {
|
2218
|
-
const initTree = await this.initSources();
|
2219
|
-
const schemas = initTree.schemas;
|
2220
|
-
const moduleErrors = initTree.moduleErrors;
|
2221
|
-
let sources = initTree.sources;
|
2222
|
-
if (parentRef.type !== "head") {
|
2223
|
-
// There's room for some optimizations here: we could do this once, then re-use every time we create a patch, then again we only create one patch at a time
|
2224
|
-
const patchOps = await this.fetchPatches({
|
2225
|
-
excludePatchOps: false
|
2226
|
-
});
|
2227
|
-
const patchAnalysis = this.analyzePatches(patchOps.patches);
|
2228
|
-
const tree = await this.getSources({
|
2229
|
-
...patchAnalysis,
|
2230
|
-
...patchOps
|
2231
|
-
});
|
2232
|
-
sources = {
|
2233
|
-
...sources,
|
2234
|
-
...tree.sources
|
2235
|
-
};
|
2236
|
-
}
|
2237
|
-
const source = sources[path];
|
2238
|
-
const schema = schemas[path];
|
2239
|
-
const moduleError = moduleErrors.find(e => e.path === path);
|
2240
|
-
if (moduleError) {
|
2241
|
-
console.error(`Cannot patch. Module at path: '${path}' has fatal errors: "${moduleError.message}"`);
|
2242
|
-
return result.err({
|
2243
|
-
errorType: "other",
|
2244
|
-
error: {
|
2245
|
-
message: `Cannot patch. Module at path: '${path}' has fatal errors: ` + moduleErrors.map(m => `"${m.message}"`).join(" and ")
|
2246
|
-
}
|
2247
|
-
});
|
2248
|
-
}
|
2249
|
-
if (source === undefined) {
|
2250
|
-
console.error(`Cannot patch. Module source at path: '${path}' does not exist`);
|
2251
|
-
return result.err({
|
2252
|
-
errorType: "other",
|
2253
|
-
error: {
|
2254
|
-
message: `Cannot patch. Module source at path: '${path}' does not exist`
|
2255
|
-
}
|
2256
|
-
});
|
2257
|
-
}
|
2258
|
-
if (!schema) {
|
2259
|
-
console.error(`Cannot patch. Module schema at path: '${path}' does not exist`);
|
2260
|
-
return result.err({
|
2261
|
-
errorType: "other",
|
2262
|
-
error: {
|
2263
|
-
message: `Cannot patch. Module schema at path: '${path}' does not exist`
|
2264
|
-
}
|
2265
|
-
});
|
2266
|
-
}
|
2267
|
-
const sourceFileOps = [];
|
2268
|
-
const files = {};
|
2269
|
-
for (const op of patch) {
|
2270
|
-
if (op.op !== "file") {
|
2271
|
-
sourceFileOps.push(op);
|
2272
|
-
} else {
|
2273
|
-
const {
|
2274
|
-
value,
|
2275
|
-
filePath
|
2276
|
-
} = op;
|
2277
|
-
if (files[filePath]) {
|
2278
|
-
console.error(`Cannot have multiple files with same path in same patch. Path: ${filePath}`);
|
2279
|
-
files[filePath] = {
|
2280
|
-
error: new PatchError("Cannot have multiple files with same path in same patch")
|
2281
|
-
};
|
2282
|
-
} else if (typeof value !== "string") {
|
2283
|
-
console.error(`Value is not a string. Path: ${filePath}. Value: ${value}`);
|
2284
|
-
files[filePath] = {
|
2285
|
-
error: new PatchError("Value is not a string")
|
2286
|
-
};
|
2287
|
-
} else {
|
2288
|
-
const sha256 = Internal.getSHA256Hash(textEncoder$2.encode(value));
|
2289
|
-
files[filePath] = {
|
2290
|
-
value,
|
2291
|
-
sha256,
|
2292
|
-
path: op.path,
|
2293
|
-
remote: op.remote
|
2294
|
-
};
|
2295
|
-
sourceFileOps.push({
|
2296
|
-
op: "file",
|
2297
|
-
path: op.path,
|
2298
|
-
filePath,
|
2299
|
-
nestedFilePath: op.nestedFilePath,
|
2300
|
-
value: sha256,
|
2301
|
-
remote: op.remote
|
2302
|
-
});
|
2303
|
-
}
|
2304
|
-
}
|
2305
|
-
}
|
2306
2218
|
const saveRes = await this.saveSourceFilePatch(path, patch, patchId, parentRef, authorId);
|
2307
2219
|
if (result.isErr(saveRes)) {
|
2308
2220
|
console.error(`Could not save source patch at path: '${path}'. Error: ${saveRes.error.errorType === "other" ? saveRes.error.message : saveRes.error.errorType}`);
|
@@ -2316,95 +2228,8 @@ class ValOps {
|
|
2316
2228
|
error: saveRes.error
|
2317
2229
|
});
|
2318
2230
|
}
|
2319
|
-
const saveFileRes = await Promise.all(Object.entries(files).map(async ([filePath, data]) => {
|
2320
|
-
if (data.error) {
|
2321
|
-
return {
|
2322
|
-
filePath,
|
2323
|
-
error: data.error
|
2324
|
-
};
|
2325
|
-
} else {
|
2326
|
-
var _lastRes;
|
2327
|
-
let type;
|
2328
|
-
const modulePath = Internal.patchPathToModulePath(data.path);
|
2329
|
-
try {
|
2330
|
-
const {
|
2331
|
-
schema: schemaAtPath
|
2332
|
-
} = Internal.resolvePath(modulePath, source, schema);
|
2333
|
-
type = schemaAtPath instanceof ImageSchema || schemaAtPath instanceof RichTextSchema // if it's a rich text schema, we assume it's an image - hope this assumption holds!
|
2334
|
-
? "image" : schemaAtPath instanceof FileSchema ? "file" : schemaAtPath.serialize().type;
|
2335
|
-
} catch (e) {
|
2336
|
-
if (e instanceof Error) {
|
2337
|
-
console.error(`Could not resolve file type at: ${modulePath}. Error: ${e.message}`);
|
2338
|
-
return {
|
2339
|
-
filePath,
|
2340
|
-
error: new PatchError(`Could not resolve file type at: ${modulePath}. Error: ${e.message}`)
|
2341
|
-
};
|
2342
|
-
}
|
2343
|
-
console.error(`Could not resolve file type at: ${modulePath}. Unknown error.`);
|
2344
|
-
return {
|
2345
|
-
filePath,
|
2346
|
-
error: new PatchError(`Could not resolve file type at: ${modulePath}. Unknown error.`)
|
2347
|
-
};
|
2348
|
-
}
|
2349
|
-
if (type !== "image" && type !== "file") {
|
2350
|
-
console.error("Unknown file type (resolved from schema): " + type);
|
2351
|
-
return {
|
2352
|
-
filePath,
|
2353
|
-
error: new PatchError("Unknown file type (resolved from schema): " + type)
|
2354
|
-
};
|
2355
|
-
}
|
2356
|
-
const mimeType = getMimeTypeFromBase64(data.value);
|
2357
|
-
if (!mimeType) {
|
2358
|
-
console.error("Could not get mimeType from base 64 encoded value");
|
2359
|
-
return {
|
2360
|
-
filePath,
|
2361
|
-
error: new PatchError("Could not get mimeType from base 64 encoded value. First chars were: " + data.value.slice(0, 20))
|
2362
|
-
};
|
2363
|
-
}
|
2364
|
-
const buffer = bufferFromDataUrl(data.value);
|
2365
|
-
if (!buffer) {
|
2366
|
-
console.error("Could not create buffer from base 64 encoded value");
|
2367
|
-
return {
|
2368
|
-
filePath,
|
2369
|
-
error: new PatchError("Could not create buffer from base 64 encoded value")
|
2370
|
-
};
|
2371
|
-
}
|
2372
|
-
const metadataOps = createMetadataFromBuffer(type, mimeType, buffer);
|
2373
|
-
if (metadataOps.errors) {
|
2374
|
-
console.error(`Could not get metadata. Errors: ${metadataOps.errors.map(error => error.message).join(", ")}`);
|
2375
|
-
return {
|
2376
|
-
filePath,
|
2377
|
-
error: new PatchError(`Could not get metadata. Errors: ${metadataOps.errors.map(error => error.message).join(", ")}`)
|
2378
|
-
};
|
2379
|
-
}
|
2380
|
-
const MaxRetries = 3;
|
2381
|
-
let lastRes;
|
2382
|
-
for (let i = 0; i < MaxRetries; i++) {
|
2383
|
-
lastRes = await this.saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data.value, type, metadataOps.metadata, data.remote);
|
2384
|
-
if (!lastRes.error) {
|
2385
|
-
return {
|
2386
|
-
filePath
|
2387
|
-
};
|
2388
|
-
}
|
2389
|
-
}
|
2390
|
-
return {
|
2391
|
-
filePath,
|
2392
|
-
error: new PatchError(((_lastRes = lastRes) === null || _lastRes === void 0 || (_lastRes = _lastRes.error) === null || _lastRes === void 0 ? void 0 : _lastRes.message) || "Unexpectedly could not save patch file")
|
2393
|
-
};
|
2394
|
-
}
|
2395
|
-
}));
|
2396
|
-
const errors = saveFileRes.filter(f => !!f.error);
|
2397
|
-
if (errors.length > 0) {
|
2398
|
-
return result.err({
|
2399
|
-
errorType: "other",
|
2400
|
-
error: {
|
2401
|
-
message: "Could not save patch: " + errors.map(e => e.error.message).join(", ")
|
2402
|
-
}
|
2403
|
-
});
|
2404
|
-
}
|
2405
2231
|
return result.ok({
|
2406
2232
|
patchId,
|
2407
|
-
files: saveFileRes,
|
2408
2233
|
createdAt: new Date().toISOString()
|
2409
2234
|
});
|
2410
2235
|
}
|
@@ -2480,17 +2305,6 @@ function createMetadataFromBuffer(type, mimeType, buffer) {
|
|
2480
2305
|
metadata
|
2481
2306
|
};
|
2482
2307
|
}
|
2483
|
-
const base64DataAttr = "data:";
|
2484
|
-
function getMimeTypeFromBase64(content) {
|
2485
|
-
const dataIndex = content.indexOf(base64DataAttr);
|
2486
|
-
const base64Index = content.indexOf(";base64,");
|
2487
|
-
if (dataIndex > -1 || base64Index > -1) {
|
2488
|
-
const mimeType = content.slice(dataIndex + base64DataAttr.length, base64Index);
|
2489
|
-
const normalizedMimeType = mimeType === "image/jpg" ? "image/jpeg" : mimeType;
|
2490
|
-
return normalizedMimeType;
|
2491
|
-
}
|
2492
|
-
return null;
|
2493
|
-
}
|
2494
2308
|
function bufferFromDataUrl(dataUrl) {
|
2495
2309
|
let base64Data;
|
2496
2310
|
const base64Index = dataUrl.indexOf(";base64,");
|
@@ -2654,7 +2468,7 @@ class ValOpsFS extends ValOps {
|
|
2654
2468
|
const mtimeInDir = {};
|
2655
2469
|
if (fs.existsSync(dir)) {
|
2656
2470
|
for (const file of fs.readdirSync(dir)) {
|
2657
|
-
mtimeInDir[file] = fs.statSync(
|
2471
|
+
mtimeInDir[file] = fs.statSync(path__default.join(dir, file)).mtime.getTime();
|
2658
2472
|
}
|
2659
2473
|
}
|
2660
2474
|
return new Promise(resolve => {
|
@@ -2667,7 +2481,7 @@ class ValOpsFS extends ValOps {
|
|
2667
2481
|
resolve("request-again");
|
2668
2482
|
}
|
2669
2483
|
for (const file of fs.readdirSync(dir)) {
|
2670
|
-
const mtime = fs.statSync(
|
2484
|
+
const mtime = fs.statSync(path__default.join(dir, file)).mtime.getTime();
|
2671
2485
|
if (mtime !== mtimeInDir[file]) {
|
2672
2486
|
resolve("request-again");
|
2673
2487
|
}
|
@@ -2728,7 +2542,7 @@ class ValOpsFS extends ValOps {
|
|
2728
2542
|
patchesDirHandle = handle;
|
2729
2543
|
}),
|
2730
2544
|
// we poll the files that Val depends on for changes
|
2731
|
-
disableFilePolling ? new Promise(() => {}) : didFilesChangeUsingPolling([
|
2545
|
+
disableFilePolling ? new Promise(() => {}) : didFilesChangeUsingPolling([path__default.join(this.rootDir, "val.config.ts"), path__default.join(this.rootDir, "val.modules.ts"), path__default.join(this.rootDir, "val.config.js"), path__default.join(this.rootDir, "val.modules.js"), ...moduleFilePaths.map(p => path__default.join(this.rootDir, p))], statFilePollingInterval, handle => {
|
2732
2546
|
valFilesIntervalHandle = handle;
|
2733
2547
|
}), new Promise(resolve => {
|
2734
2548
|
fsWatcher = fs.watch(this.rootDir, {
|
@@ -2786,7 +2600,7 @@ class ValOpsFS extends ValOps {
|
|
2786
2600
|
}
|
2787
2601
|
const patches = {};
|
2788
2602
|
const errors = [];
|
2789
|
-
const parsedUnsortedFsPatches = patchJsonFiles.map(file =>
|
2603
|
+
const parsedUnsortedFsPatches = patchJsonFiles.map(file => path__default.basename(path__default.dirname(file))).map(patchDir => [patchDir, this.parseJsonFile(this.getPatchFilePath(patchDir), FSPatch), this.host.fileExists(this.getPatchBaseFile(patchDir)) ? this.parseJsonFile(this.getPatchBaseFile(patchDir), FSPatchBase) : undefined]);
|
2790
2604
|
parsedUnsortedFsPatches.forEach(([dir, parsedPatch, parsedBase]) => {
|
2791
2605
|
if (parsedPatch.error) {
|
2792
2606
|
errors.push({
|
@@ -2997,9 +2811,7 @@ class ValOpsFS extends ValOps {
|
|
2997
2811
|
}
|
2998
2812
|
const writeRes = this.host.tryWriteUf8File(this.getPatchFilePath(patchDir), JSON.stringify(data));
|
2999
2813
|
if (writeRes.type === "error") {
|
3000
|
-
return
|
3001
|
-
errorType: "patch-head-conflict"
|
3002
|
-
}) : result.err({
|
2814
|
+
return result.err({
|
3003
2815
|
errorType: "other",
|
3004
2816
|
error: writeRes.error,
|
3005
2817
|
message: "Failed to write patch file"
|
@@ -3024,7 +2836,7 @@ class ValOpsFS extends ValOps {
|
|
3024
2836
|
}
|
3025
2837
|
}
|
3026
2838
|
async getSourceFile(path) {
|
3027
|
-
const filePath =
|
2839
|
+
const filePath = path__default.join(this.rootDir, path);
|
3028
2840
|
if (!this.host.fileExists(filePath)) {
|
3029
2841
|
return {
|
3030
2842
|
error: {
|
@@ -3037,7 +2849,7 @@ class ValOpsFS extends ValOps {
|
|
3037
2849
|
};
|
3038
2850
|
}
|
3039
2851
|
async saveSourceFile(path, data) {
|
3040
|
-
const filePath =
|
2852
|
+
const filePath = path__default.join(this.rootDir, ...path.split("/"));
|
3041
2853
|
try {
|
3042
2854
|
this.host.writeUf8File(filePath, data);
|
3043
2855
|
return {
|
@@ -3058,10 +2870,10 @@ class ValOpsFS extends ValOps {
|
|
3058
2870
|
};
|
3059
2871
|
}
|
3060
2872
|
}
|
3061
|
-
async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, _type, metadata
|
2873
|
+
async saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, _type, metadata) {
|
3062
2874
|
const patchDir = this.getParentPatchIdFromParentRef(parentRef);
|
3063
|
-
const patchFilePath = this.getBinaryFilePath(filePath, patchDir
|
3064
|
-
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDir
|
2875
|
+
const patchFilePath = this.getBinaryFilePath(filePath, patchDir);
|
2876
|
+
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDir);
|
3065
2877
|
try {
|
3066
2878
|
const buffer = bufferFromDataUrl(data);
|
3067
2879
|
if (!buffer) {
|
@@ -3092,7 +2904,7 @@ class ValOpsFS extends ValOps {
|
|
3092
2904
|
};
|
3093
2905
|
}
|
3094
2906
|
}
|
3095
|
-
async getBase64EncodedBinaryFileMetadataFromPatch(filePath, type, patchId
|
2907
|
+
async getBase64EncodedBinaryFileMetadataFromPatch(filePath, type, patchId) {
|
3096
2908
|
const patchDirRes = await this.getParentPatchIdFromPatchId(patchId);
|
3097
2909
|
if (result.isErr(patchDirRes)) {
|
3098
2910
|
return {
|
@@ -3101,7 +2913,7 @@ class ValOpsFS extends ValOps {
|
|
3101
2913
|
}]
|
3102
2914
|
};
|
3103
2915
|
}
|
3104
|
-
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDirRes.value
|
2916
|
+
const metadataFilePath = this.getBinaryFileMetadataPath(filePath, patchDirRes.value);
|
3105
2917
|
if (!this.host.fileExists(metadataFilePath)) {
|
3106
2918
|
return {
|
3107
2919
|
errors: [{
|
@@ -3141,10 +2953,7 @@ class ValOpsFS extends ValOps {
|
|
3141
2953
|
if (!result.isOk(patchDirRes)) {
|
3142
2954
|
return null;
|
3143
2955
|
}
|
3144
|
-
const absPath = this.getBinaryFilePath(filePath, patchDirRes.value
|
3145
|
-
// We save remote remote files using the filepath (so not the remote reference) and we also retrieve them using the filepath. Therefore remote is always false
|
3146
|
-
false // remote = false
|
3147
|
-
);
|
2956
|
+
const absPath = this.getBinaryFilePath(filePath, patchDirRes.value);
|
3148
2957
|
if (!this.host.fileExists(absPath)) {
|
3149
2958
|
return null;
|
3150
2959
|
}
|
@@ -3168,7 +2977,7 @@ class ValOpsFS extends ValOps {
|
|
3168
2977
|
}
|
3169
2978
|
async deleteAllPatches() {
|
3170
2979
|
const patchesCacheDir = this.getPatchesDir();
|
3171
|
-
const tmpDir =
|
2980
|
+
const tmpDir = path__default.join(this.rootDir, ValOpsFS.VAL_DIR, "patches-deleted-" + crypto.randomUUID());
|
3172
2981
|
try {
|
3173
2982
|
this.host.moveDir(patchesCacheDir, tmpDir);
|
3174
2983
|
this.host.deleteDir(tmpDir);
|
@@ -3283,13 +3092,11 @@ class ValOpsFS extends ValOps {
|
|
3283
3092
|
};
|
3284
3093
|
continue;
|
3285
3094
|
}
|
3286
|
-
console.log("Uploading remote file", ref);
|
3287
3095
|
const res = await uploadRemoteFile(this.contentUrl, this.options.config.project, splitRemoteRefRes.bucket, splitRemoteRefRes.fileHash, getFileExt(splitRemoteRefRes.filePath), fileBuffer, auth);
|
3288
3096
|
if (!res.success) {
|
3289
3097
|
console.error("Failed to upload remote file", ref, res.error);
|
3290
3098
|
throw new Error(`Failed to upload remote file: ${ref}. ${res.error}`);
|
3291
3099
|
}
|
3292
|
-
console.log("Completed remote file", ref);
|
3293
3100
|
uploadedRemoteRefs.push(ref);
|
3294
3101
|
}
|
3295
3102
|
}
|
@@ -3306,7 +3113,7 @@ class ValOpsFS extends ValOps {
|
|
3306
3113
|
patchId
|
3307
3114
|
}] of localFileDescriptors) {
|
3308
3115
|
const filePath = ref;
|
3309
|
-
const absPath =
|
3116
|
+
const absPath = path__default.join(this.rootDir, ...filePath.split("/"));
|
3310
3117
|
try {
|
3311
3118
|
const patchDir = patchIdToPatchDirMap[patchId];
|
3312
3119
|
if (!patchDir) {
|
@@ -3316,7 +3123,7 @@ class ValOpsFS extends ValOps {
|
|
3316
3123
|
};
|
3317
3124
|
continue;
|
3318
3125
|
}
|
3319
|
-
this.host.copyFile(this.getBinaryFilePath(filePath, patchDir
|
3126
|
+
this.host.copyFile(this.getBinaryFilePath(filePath, patchDir), absPath);
|
3320
3127
|
updatedFiles.push(absPath);
|
3321
3128
|
} catch (err) {
|
3322
3129
|
errors[absPath] = {
|
@@ -3326,7 +3133,7 @@ class ValOpsFS extends ValOps {
|
|
3326
3133
|
}
|
3327
3134
|
}
|
3328
3135
|
for (const [filePath, data] of Object.entries(preparedCommit.patchedSourceFiles)) {
|
3329
|
-
const absPath =
|
3136
|
+
const absPath = path__default.join(this.rootDir, ...filePath.split("/"));
|
3330
3137
|
try {
|
3331
3138
|
this.host.writeUf8File(absPath, data);
|
3332
3139
|
updatedFiles.push(absPath);
|
@@ -3366,7 +3173,7 @@ class ValOpsFS extends ValOps {
|
|
3366
3173
|
};
|
3367
3174
|
}
|
3368
3175
|
async getBinaryFile(filePath) {
|
3369
|
-
const absPath =
|
3176
|
+
const absPath = path__default.join(this.rootDir, ...filePath.split("/"));
|
3370
3177
|
if (!this.host.fileExists(absPath)) {
|
3371
3178
|
return null;
|
3372
3179
|
}
|
@@ -3387,7 +3194,7 @@ class ValOpsFS extends ValOps {
|
|
3387
3194
|
if (!mimeType) {
|
3388
3195
|
return {
|
3389
3196
|
errors: [{
|
3390
|
-
message: `Could not guess mime type of file ext: ${
|
3197
|
+
message: `Could not guess mime type of file ext: ${path__default.extname(filePath)}`,
|
3391
3198
|
filePath
|
3392
3199
|
}]
|
3393
3200
|
};
|
@@ -3425,38 +3232,22 @@ class ValOpsFS extends ValOps {
|
|
3425
3232
|
|
3426
3233
|
// #region fs file path helpers
|
3427
3234
|
getPatchesDir() {
|
3428
|
-
return
|
3235
|
+
return path__default.join(this.rootDir, ValOpsFS.VAL_DIR, "patches");
|
3429
3236
|
}
|
3430
3237
|
getFullPatchDir(patchDir) {
|
3431
|
-
return
|
3238
|
+
return path__default.join(this.getPatchesDir(), patchDir);
|
3432
3239
|
}
|
3433
|
-
getBinaryFilePath(filePath, patchDir
|
3434
|
-
|
3435
|
-
const res = Internal.remote.splitRemoteRef(filePath);
|
3436
|
-
if (res.status === "error") {
|
3437
|
-
throw new Error("Failed to split remote ref: " + filePath);
|
3438
|
-
}
|
3439
|
-
const actualFilePath = res.filePath;
|
3440
|
-
return fsPath__default.join(this.getFullPatchDir(patchDir), "files", actualFilePath, fsPath__default.basename(actualFilePath));
|
3441
|
-
}
|
3442
|
-
return fsPath__default.join(this.getFullPatchDir(patchDir), "files", filePath, fsPath__default.basename(filePath));
|
3240
|
+
getBinaryFilePath(filePath, patchDir) {
|
3241
|
+
return path__default.join(this.getFullPatchDir(patchDir), "files", filePath, path__default.basename(filePath));
|
3443
3242
|
}
|
3444
|
-
getBinaryFileMetadataPath(filePath, patchDir
|
3445
|
-
|
3446
|
-
const res = Internal.remote.splitRemoteRef(filePath);
|
3447
|
-
if (res.status === "error") {
|
3448
|
-
throw new Error("Failed to split remote ref (in metadata path): " + filePath);
|
3449
|
-
}
|
3450
|
-
const actualFilePath = res.filePath;
|
3451
|
-
return fsPath__default.join(this.getFullPatchDir(patchDir), "files", actualFilePath, fsPath__default.basename(actualFilePath));
|
3452
|
-
}
|
3453
|
-
return fsPath__default.join(this.getFullPatchDir(patchDir), "files", filePath, "metadata.json");
|
3243
|
+
getBinaryFileMetadataPath(filePath, patchDir) {
|
3244
|
+
return path__default.join(this.getFullPatchDir(patchDir), "files", filePath, "metadata.json");
|
3454
3245
|
}
|
3455
3246
|
getPatchFilePath(patchDir) {
|
3456
|
-
return
|
3247
|
+
return path__default.join(this.getFullPatchDir(patchDir), "patch.json");
|
3457
3248
|
}
|
3458
3249
|
getPatchBaseFile(patchDir) {
|
3459
|
-
return
|
3250
|
+
return path__default.join(this.getFullPatchDir(patchDir), "base.json");
|
3460
3251
|
}
|
3461
3252
|
}
|
3462
3253
|
class FSOpsHost {
|
@@ -3489,28 +3280,24 @@ class FSOpsHost {
|
|
3489
3280
|
return fs.readFileSync(path, "utf-8");
|
3490
3281
|
}
|
3491
3282
|
writeUf8File(path, data) {
|
3492
|
-
fs.mkdirSync(
|
3283
|
+
fs.mkdirSync(path__default.dirname(path), {
|
3493
3284
|
recursive: true
|
3494
3285
|
});
|
3495
3286
|
fs.writeFileSync(path, data, "utf-8");
|
3496
3287
|
}
|
3497
3288
|
tryWriteUf8File(path, data) {
|
3498
3289
|
try {
|
3499
|
-
const parentDir =
|
3290
|
+
const parentDir = path__default.join(path__default.dirname(path), "../");
|
3500
3291
|
fs.mkdirSync(parentDir, {
|
3501
3292
|
recursive: true
|
3502
3293
|
});
|
3503
3294
|
// Make the parent dir separately. This is because we need mkdir to throw
|
3504
3295
|
// if the directory already exists. If we use recursive: true, it doesn't
|
3505
|
-
fs.mkdirSync(
|
3296
|
+
fs.mkdirSync(path__default.dirname(path), {
|
3506
3297
|
recursive: false
|
3507
3298
|
});
|
3508
3299
|
} catch (e) {
|
3509
|
-
|
3510
|
-
type: "error",
|
3511
|
-
errorType: "dir-already-exists",
|
3512
|
-
error: e
|
3513
|
-
};
|
3300
|
+
// ignore
|
3514
3301
|
}
|
3515
3302
|
try {
|
3516
3303
|
fs.writeFileSync(path, data, "utf-8");
|
@@ -3526,13 +3313,13 @@ class FSOpsHost {
|
|
3526
3313
|
};
|
3527
3314
|
}
|
3528
3315
|
writeBinaryFile(path, data) {
|
3529
|
-
fs.mkdirSync(
|
3316
|
+
fs.mkdirSync(path__default.dirname(path), {
|
3530
3317
|
recursive: true
|
3531
3318
|
});
|
3532
3319
|
fs.writeFileSync(path, new Uint8Array(data), "base64url");
|
3533
3320
|
}
|
3534
3321
|
copyFile(from, to) {
|
3535
|
-
fs.mkdirSync(
|
3322
|
+
fs.mkdirSync(path__default.dirname(to), {
|
3536
3323
|
recursive: true
|
3537
3324
|
});
|
3538
3325
|
fs.copyFileSync(from, to);
|
@@ -3676,9 +3463,93 @@ class ValOpsHttp extends ValOps {
|
|
3676
3463
|
async onInit() {
|
3677
3464
|
// TODO: unused for now. Implement or remove
|
3678
3465
|
}
|
3679
|
-
async
|
3466
|
+
async getPresignedAuthNonce(profileId, corsOrigin) {
|
3680
3467
|
try {
|
3681
3468
|
var _res$headers$get;
|
3469
|
+
const res = await fetch(`${this.contentUrl}/v1/${this.project}/presigned-auth-nonce`, {
|
3470
|
+
method: "POST",
|
3471
|
+
headers: {
|
3472
|
+
...this.authHeaders,
|
3473
|
+
"Content-Type": "application/json"
|
3474
|
+
},
|
3475
|
+
body: JSON.stringify({
|
3476
|
+
profileId,
|
3477
|
+
corsOrigin
|
3478
|
+
})
|
3479
|
+
});
|
3480
|
+
if (res.ok) {
|
3481
|
+
const json = await res.json();
|
3482
|
+
const parsed = z.object({
|
3483
|
+
nonce: z.string(),
|
3484
|
+
expiresAt: z.string()
|
3485
|
+
}).safeParse(json);
|
3486
|
+
if (parsed.success) {
|
3487
|
+
const {
|
3488
|
+
nonce
|
3489
|
+
} = parsed.data;
|
3490
|
+
return {
|
3491
|
+
status: "success",
|
3492
|
+
data: {
|
3493
|
+
nonce,
|
3494
|
+
baseUrl: `${this.contentUrl}/v1/${this.project}`
|
3495
|
+
}
|
3496
|
+
};
|
3497
|
+
} else {
|
3498
|
+
console.error("Could not parse presigned auth nonce response. Error: " + fromError(parsed.error));
|
3499
|
+
return {
|
3500
|
+
status: "error",
|
3501
|
+
statusCode: 500,
|
3502
|
+
error: {
|
3503
|
+
message: "Could not get presigned auth nonce. The response that Val got from the server was not in the expected format. You might be running on an old version, or it might be a transient error or a configuration issue. Please try again later."
|
3504
|
+
}
|
3505
|
+
};
|
3506
|
+
}
|
3507
|
+
}
|
3508
|
+
if (res.status === 401) {
|
3509
|
+
return {
|
3510
|
+
statusCode: 401,
|
3511
|
+
status: "error",
|
3512
|
+
error: {
|
3513
|
+
message: "Could not get presigned auth nonce. Although your user is authorized, the application has authorization issues. Contact the developers on your team and ask them to verify the api keys."
|
3514
|
+
}
|
3515
|
+
};
|
3516
|
+
}
|
3517
|
+
if ((_res$headers$get = res.headers.get("Content-Type")) !== null && _res$headers$get !== void 0 && _res$headers$get.includes("application/json")) {
|
3518
|
+
const json = await res.json();
|
3519
|
+
if (json.message) {
|
3520
|
+
console.error("Presigned auth nonce error:", json.message);
|
3521
|
+
return {
|
3522
|
+
status: "error",
|
3523
|
+
statusCode: 500,
|
3524
|
+
error: {
|
3525
|
+
message: json.message
|
3526
|
+
}
|
3527
|
+
};
|
3528
|
+
}
|
3529
|
+
}
|
3530
|
+
const unknownErrorMessage = `Could not get presigned auth nonce. HTTP error: ${res.status} ${res.statusText}`;
|
3531
|
+
console.error(unknownErrorMessage);
|
3532
|
+
return {
|
3533
|
+
status: "error",
|
3534
|
+
statusCode: 500,
|
3535
|
+
error: {
|
3536
|
+
message: unknownErrorMessage
|
3537
|
+
}
|
3538
|
+
};
|
3539
|
+
} catch (e) {
|
3540
|
+
console.error("Could not get presigned auth nonce (connection error?):", e);
|
3541
|
+
return {
|
3542
|
+
status: "error",
|
3543
|
+
statusCode: 500,
|
3544
|
+
error: {
|
3545
|
+
message: `Could not get presigned auth nonce. Error: ${e instanceof Error ? e.message : JSON.stringify(e)}`
|
3546
|
+
}
|
3547
|
+
};
|
3548
|
+
}
|
3549
|
+
}
|
3550
|
+
async getCommitSummary(preparedCommit) {
|
3551
|
+
try {
|
3552
|
+
var _res$headers$get2;
|
3682
3553
|
const res = await fetch(`${this.contentUrl}/v1/${this.project}/commit-summary`, {
|
3683
3554
|
method: "POST",
|
3684
3555
|
headers: {
|
@@ -3714,7 +3585,7 @@ class ValOpsHttp extends ValOps {
|
|
3714
3585
|
};
|
3715
3586
|
}
|
3716
3587
|
const unknownErrorMessage = `Could not get summary. HTTP error: ${res.status} ${res.statusText}`;
|
3717
|
-
if ((_res$headers$
|
3588
|
+
if ((_res$headers$get2 = res.headers.get("Content-Type")) !== null && _res$headers$get2 !== void 0 && _res$headers$get2.includes("application/json")) {
|
3718
3589
|
const json = await res.json();
|
3719
3590
|
if (json.message) {
|
3720
3591
|
console.error("Summary error:", json.message);
|
@@ -4042,7 +3913,7 @@ class ValOpsHttp extends ValOps {
|
|
4042
3913
|
coreVersion: Internal.VERSION.core
|
4043
3914
|
})
|
4044
3915
|
}).then(async res => {
|
4045
|
-
var _res$headers$
|
3916
|
+
var _res$headers$get3;
|
4046
3917
|
if (res.ok) {
|
4047
3918
|
const parsed = SavePatchResponse.safeParse(await res.json());
|
4048
3919
|
if (parsed.success) {
|
@@ -4061,7 +3932,7 @@ class ValOpsHttp extends ValOps {
|
|
4061
3932
|
message: "Conflict: " + (await res.text())
|
4062
3933
|
});
|
4063
3934
|
}
|
4064
|
-
if ((_res$headers$
|
3935
|
+
if ((_res$headers$get3 = res.headers.get("Content-Type")) !== null && _res$headers$get3 !== void 0 && _res$headers$get3.includes("application/json")) {
|
4065
3936
|
const json = await res.json();
|
4066
3937
|
return result.err({
|
4067
3938
|
errorType: "other",
|
@@ -4079,23 +3950,12 @@ class ValOpsHttp extends ValOps {
|
|
4079
3950
|
});
|
4080
3951
|
});
|
4081
3952
|
}
|
4082
|
-
|
4083
|
-
|
4084
|
-
|
4085
|
-
|
4086
|
-
|
4087
|
-
|
4088
|
-
if (splitRemoteRefDataRes.status === "error") {
|
4089
|
-
return {
|
4090
|
-
error: {
|
4091
|
-
message: `Could not split remote ref: ${splitRemoteRefDataRes.error}`
|
4092
|
-
}
|
4093
|
-
};
|
4094
|
-
}
|
4095
|
-
filePath = "/" + splitRemoteRefDataRes.filePath;
|
4096
|
-
} else {
|
4097
|
-
filePath = filePathOrRef;
|
4098
|
-
}
|
3953
|
+
|
3954
|
+
/**
|
3955
|
+
* @deprecated For HTTP ops use direct upload instead (i.e. client should upload the files directly) since hosting platforms (Vercel) might have low limits on the size of the request body.
|
3956
|
+
*/
|
3957
|
+
async saveBase64EncodedBinaryFileFromPatch(filePathOrRef, parentRef, patchId, data, type, metadata) {
|
3958
|
+
const filePath = filePathOrRef;
|
4099
3959
|
return fetch(`${this.contentUrl}/v1/${this.project}/patches/${patchId}/files`, {
|
4100
3960
|
method: "POST",
|
4101
3961
|
headers: {
|
@@ -4104,10 +3964,11 @@ class ValOpsHttp extends ValOps {
|
|
4104
3964
|
},
|
4105
3965
|
body: JSON.stringify({
|
4106
3966
|
filePath,
|
3967
|
+
parentRef,
|
3968
|
+
// Not currently used
|
4107
3969
|
data,
|
4108
3970
|
type,
|
4109
|
-
metadata
|
4110
|
-
remote
|
3971
|
+
metadata
|
4111
3972
|
})
|
4112
3973
|
}).then(async res => {
|
4113
3974
|
if (res.ok) {
|
@@ -4354,7 +4215,7 @@ class ValOpsHttp extends ValOps {
|
|
4354
4215
|
});
|
4355
4216
|
}
|
4356
4217
|
async getCommitMessage(preparedCommit) {
|
4357
|
-
var _res$headers$
|
4218
|
+
var _res$headers$get4;
|
4358
4219
|
const res = await fetch(`${this.contentUrl}/v1/${this.project}/commit-summary`, {
|
4359
4220
|
method: "POST",
|
4360
4221
|
headers: {
|
@@ -4372,7 +4233,7 @@ class ValOpsHttp extends ValOps {
|
|
4372
4233
|
commitSummary: json.commitSummary
|
4373
4234
|
};
|
4374
4235
|
}
|
4375
|
-
if ((_res$headers$
|
4236
|
+
if ((_res$headers$get4 = res.headers.get("Content-Type")) !== null && _res$headers$get4 !== void 0 && _res$headers$get4.includes("application/json")) {
|
4376
4237
|
const json = await res.json();
|
4377
4238
|
return {
|
4378
4239
|
error: {
|
@@ -4388,7 +4249,7 @@ class ValOpsHttp extends ValOps {
|
|
4388
4249
|
}
|
4389
4250
|
async commit(prepared, message, committer, filesDirectory, newBranch) {
|
4390
4251
|
try {
|
4391
|
-
var _res$headers$
|
4252
|
+
var _res$headers$get5;
|
4392
4253
|
const existingBranch = this.branch;
|
4393
4254
|
const res = await fetch(`${this.contentUrl}/v1/${this.project}/commit`, {
|
4394
4255
|
method: "POST",
|
@@ -4425,7 +4286,7 @@ class ValOpsHttp extends ValOps {
|
|
4425
4286
|
}
|
4426
4287
|
};
|
4427
4288
|
}
|
4428
|
-
if ((_res$headers$
|
4289
|
+
if ((_res$headers$get5 = res.headers.get("Content-Type")) !== null && _res$headers$get5 !== void 0 && _res$headers$get5.includes("application/json")) {
|
4429
4290
|
const json = await res.json();
|
4430
4291
|
if (json.isNotFastForward) {
|
4431
4292
|
return {
|
@@ -4457,7 +4318,7 @@ class ValOpsHttp extends ValOps {
|
|
4457
4318
|
|
4458
4319
|
// #region profiles
|
4459
4320
|
async getProfiles() {
|
4460
|
-
var _res$headers$
|
4321
|
+
var _res$headers$get6;
|
4461
4322
|
const res = await fetch(`${this.contentUrl}/v1/${this.project}/profiles`, {
|
4462
4323
|
headers: {
|
4463
4324
|
...this.authHeaders,
|
@@ -4472,7 +4333,7 @@ class ValOpsHttp extends ValOps {
|
|
4472
4333
|
}
|
4473
4334
|
return parsed.data.profiles;
|
4474
4335
|
}
|
4475
|
-
if ((_res$headers$
|
4336
|
+
if ((_res$headers$get6 = res.headers.get("Content-Type")) !== null && _res$headers$get6 !== void 0 && _res$headers$get6.includes("application/json")) {
|
4476
4337
|
const json = await res.json();
|
4477
4338
|
throw Error(`Could not get profiles (status: ${res.status}): ${"message" in json ? json.message : "Unknown error"}`);
|
4478
4339
|
}
|
@@ -4501,7 +4362,7 @@ async function getSettings(projectName, auth) {
|
|
4501
4362
|
if (response.status === 404) {
|
4502
4363
|
return {
|
4503
4364
|
success: false,
|
4504
|
-
message: `Project '${projectName}' not found: that the name of the project is correct and that you have access to it.`
|
4365
|
+
message: `Project '${projectName}' not found: verify that the name of the project is correct and that you have access to it.`
|
4505
4366
|
};
|
4506
4367
|
}
|
4507
4368
|
if (response.status !== 200) {
|
@@ -4531,7 +4392,7 @@ async function getSettings(projectName, auth) {
|
|
4531
4392
|
}
|
4532
4393
|
|
4533
4394
|
function getPersonalAccessTokenPath(root) {
|
4534
|
-
return
|
4395
|
+
return path__default.join(path__default.resolve(root), ".val", "pat.json");
|
4535
4396
|
}
|
4536
4397
|
function parsePersonalAccessTokenFile(content) {
|
4537
4398
|
if (!content) {
|
@@ -4809,7 +4670,7 @@ const ValServer = (valModules, options, callbacks) => {
|
|
4809
4670
|
};
|
4810
4671
|
}
|
4811
4672
|
const fs = await import('fs');
|
4812
|
-
const patPath = getPersonalAccessTokenPath(
|
4673
|
+
const patPath = getPersonalAccessTokenPath(path__default.join(process.cwd()));
|
4813
4674
|
let patFile;
|
4814
4675
|
try {
|
4815
4676
|
patFile = await fs.promises.readFile(patPath, "utf-8");
|
@@ -5248,12 +5109,12 @@ const ValServer = (valModules, options, callbacks) => {
|
|
5248
5109
|
const remoteFileAuth = remoteFileAuthRes.json.remoteFileAuth;
|
5249
5110
|
const settingsRes = await getSettings(options.project, remoteFileAuth);
|
5250
5111
|
if (!settingsRes.success) {
|
5251
|
-
console.warn("Could not get
|
5112
|
+
console.warn("Could not get remote files settings: " + settingsRes.message);
|
5252
5113
|
return {
|
5253
5114
|
status: 400,
|
5254
5115
|
json: {
|
5255
5116
|
errorCode: "error-could-not-get-settings",
|
5256
|
-
message: `Could not get settings
|
5117
|
+
message: `Could not get remote files settings: ${settingsRes.message}`
|
5257
5118
|
}
|
5258
5119
|
};
|
5259
5120
|
}
|
@@ -5335,6 +5196,105 @@ const ValServer = (valModules, options, callbacks) => {
|
|
5335
5196
|
};
|
5336
5197
|
}
|
5337
5198
|
},
|
5199
|
+
"/upload/patches": {
|
5200
|
+
POST: async req => {
|
5201
|
+
if (serverOps instanceof ValOpsHttp) {
|
5202
|
+
return {
|
5203
|
+
status: 400,
|
5204
|
+
json: {
|
5205
|
+
message: "Do not use this endpoint in HTTP mode"
|
5206
|
+
}
|
5207
|
+
};
|
5208
|
+
}
|
5209
|
+
const pathParts = req.path.split("/");
|
5210
|
+
const patchId = pathParts[1];
|
5211
|
+
const isValidPatchId = patchId.length === 36;
|
5212
|
+
const isValidEndpoint = pathParts[0] === "" && isValidPatchId && pathParts[2] === "files";
|
5213
|
+
if (!isValidEndpoint) {
|
5214
|
+
return {
|
5215
|
+
status: 400,
|
5216
|
+
json: {
|
5217
|
+
message: "Invalid endpoint"
|
5218
|
+
}
|
5219
|
+
};
|
5220
|
+
}
|
5221
|
+
const {
|
5222
|
+
filePath,
|
5223
|
+
parentRef,
|
5224
|
+
type,
|
5225
|
+
data,
|
5226
|
+
metadata
|
5227
|
+
} = req.body;
|
5228
|
+
const saveRes = await serverOps.saveBase64EncodedBinaryFileFromPatch(filePath, parentRef, patchId, data, type, metadata);
|
5229
|
+
if (saveRes.error) {
|
5230
|
+
return {
|
5231
|
+
status: 400,
|
5232
|
+
json: {
|
5233
|
+
message: saveRes.error.message
|
5234
|
+
}
|
5235
|
+
};
|
5236
|
+
}
|
5237
|
+
return {
|
5238
|
+
status: 200,
|
5239
|
+
json: {
|
5240
|
+
filePath,
|
5241
|
+
patchId
|
5242
|
+
}
|
5243
|
+
};
|
5244
|
+
}
|
5245
|
+
},
|
5246
|
+
"/direct-file-upload-settings": {
|
5247
|
+
POST: async req => {
|
5248
|
+
const cookies = req.cookies;
|
5249
|
+
const auth = getAuth(cookies);
|
5250
|
+
if (auth.error) {
|
5251
|
+
return {
|
5252
|
+
status: 401,
|
5253
|
+
json: {
|
5254
|
+
message: auth.error
|
5255
|
+
}
|
5256
|
+
};
|
5257
|
+
}
|
5258
|
+
if (serverOps instanceof ValOpsFS) {
|
5259
|
+
// In FS mode we do not use the remote server at all and just return an url that points to this server
|
5260
|
+
// which has an endpoint that handles this
|
5261
|
+
// A bit hacky perhaps, but we want to have as similar semantics as possible in client code when it comes to FS / HTTP
|
5262
|
+
const host = `/api/val`;
|
5263
|
+
return {
|
5264
|
+
status: 200,
|
5265
|
+
json: {
|
5266
|
+
nonce: null,
|
5267
|
+
baseUrl: `${host}/upload` // NOTE: this is the /upload/patches endpoint - the client will add /patches/:patchId/files to this and post to it
|
5268
|
+
}
|
5269
|
+
};
|
5270
|
+
}
|
5271
|
+
const httpOps = serverOps;
|
5272
|
+
const profileId = "id" in auth && auth.id ? auth.id : undefined;
|
5273
|
+
if (!profileId) {
|
5274
|
+
return {
|
5275
|
+
status: 401,
|
5276
|
+
json: {
|
5277
|
+
message: "Unauthorized"
|
5278
|
+
}
|
5279
|
+
};
|
5280
|
+
}
|
5281
|
+
const corsOrigin = "*"; // TODO: add cors origin
|
5282
|
+
const presignedAuthNonce = await httpOps.getPresignedAuthNonce(profileId, corsOrigin);
|
5283
|
+
if (presignedAuthNonce.status === "error") {
|
5284
|
+
return {
|
5285
|
+
status: presignedAuthNonce.statusCode,
|
5286
|
+
json: presignedAuthNonce.error
|
5287
|
+
};
|
5288
|
+
}
|
5289
|
+
return {
|
5290
|
+
status: 200,
|
5291
|
+
json: {
|
5292
|
+
nonce: presignedAuthNonce.data.nonce,
|
5293
|
+
baseUrl: presignedAuthNonce.data.baseUrl
|
5294
|
+
}
|
5295
|
+
};
|
5296
|
+
}
|
5297
|
+
},
|
5338
5298
|
//#region patches
|
5339
5299
|
"/patches": {
|
5340
5300
|
PUT: async req => {
|
@@ -5376,7 +5336,7 @@ const ValServer = (valModules, options, callbacks) => {
|
|
5376
5336
|
status: 400,
|
5377
5337
|
json: {
|
5378
5338
|
type: "patch-error",
|
5379
|
-
message:
|
5339
|
+
message: createPatchRes.error.error.message,
|
5380
5340
|
errors: {
|
5381
5341
|
[patch.path]: [{
|
5382
5342
|
error: {
|
@@ -5838,8 +5798,22 @@ const ValServer = (valModules, options, callbacks) => {
|
|
5838
5798
|
return remoteFileAuthRes;
|
5839
5799
|
}
|
5840
5800
|
const remoteFileAuth = (_remoteFileAuthRes = remoteFileAuthRes) === null || _remoteFileAuthRes === void 0 || (_remoteFileAuthRes = _remoteFileAuthRes.json) === null || _remoteFileAuthRes === void 0 ? void 0 : _remoteFileAuthRes.remoteFileAuth;
|
5801
|
+
const saveRes = await serverOps.saveOrUploadFiles(preparedCommit, mode, remoteFileAuth);
|
5802
|
+
if (Object.keys(saveRes.errors).length > 0) {
|
5803
|
+
console.error("Val: Failed to save files", saveRes.errors);
|
5804
|
+
return {
|
5805
|
+
status: 400,
|
5806
|
+
json: {
|
5807
|
+
message: "Failed to save files",
|
5808
|
+
details: Object.entries(saveRes.errors).map(([key, error]) => {
|
5809
|
+
return {
|
5810
|
+
message: `Got error: ${error} in ${key}`
|
5811
|
+
};
|
5812
|
+
})
|
5813
|
+
}
|
5814
|
+
};
|
5815
|
+
}
|
5841
5816
|
const deleteRes = await serverOps.deleteAllPatches();
|
5842
|
-
await serverOps.saveOrUploadFiles(preparedCommit, mode, remoteFileAuth);
|
5843
5817
|
if (deleteRes.error) {
|
5844
5818
|
console.error(`Val got an error while cleaning up patches after publish: ${deleteRes.error.message}`);
|
5845
5819
|
}
|
@@ -6332,7 +6306,7 @@ async function initHandlerOptions(route, opts, config) {
|
|
6332
6306
|
// TODO: remove
|
6333
6307
|
async function safeReadGit(cwd) {
|
6334
6308
|
async function findGitHead(currentDir, depth) {
|
6335
|
-
const gitHeadPath =
|
6309
|
+
const gitHeadPath = path.join(currentDir, ".git", "HEAD");
|
6336
6310
|
if (depth > 1000) {
|
6337
6311
|
console.error(`Reached max depth while scanning for .git folder. Current working dir: ${cwd}.`);
|
6338
6312
|
return {
|
@@ -6356,7 +6330,7 @@ async function safeReadGit(cwd) {
|
|
6356
6330
|
};
|
6357
6331
|
}
|
6358
6332
|
} catch (error) {
|
6359
|
-
const parentDir =
|
6333
|
+
const parentDir = path.dirname(currentDir);
|
6360
6334
|
|
6361
6335
|
// We've reached the root directory
|
6362
6336
|
if (parentDir === currentDir) {
|
@@ -6380,7 +6354,7 @@ async function safeReadGit(cwd) {
|
|
6380
6354
|
}
|
6381
6355
|
async function readCommit(gitDir, branchName) {
|
6382
6356
|
try {
|
6383
|
-
return (await promises.readFile(
|
6357
|
+
return (await promises.readFile(path.join(gitDir, ".git", "refs", "heads", branchName), "utf-8")).trim();
|
6384
6358
|
} catch (err) {
|
6385
6359
|
return undefined;
|
6386
6360
|
}
|
@@ -6656,10 +6630,10 @@ class ValFSHost {
|
|
6656
6630
|
return this.currentDirectory;
|
6657
6631
|
}
|
6658
6632
|
getCanonicalFileName(fileName) {
|
6659
|
-
if (
|
6660
|
-
return
|
6633
|
+
if (path__default.isAbsolute(fileName)) {
|
6634
|
+
return path__default.normalize(fileName);
|
6661
6635
|
}
|
6662
|
-
return
|
6636
|
+
return path__default.resolve(this.getCurrentDirectory(), fileName);
|
6663
6637
|
}
|
6664
6638
|
fileExists(fileName) {
|
6665
6639
|
return this.valFS.fileExists(fileName);
|
@@ -6826,11 +6800,11 @@ async function checkRemoteRef(remoteHost, ref, projectRoot, schema, metadata) {
|
|
6826
6800
|
}
|
6827
6801
|
function getCachedRemoteFileDir(projectRoot) {
|
6828
6802
|
// store in projectRoot/.val/remote-file-cache
|
6829
|
-
const remoteFileCacheDir =
|
6803
|
+
const remoteFileCacheDir = path__default.join(projectRoot, ".val", "remote-file-cache");
|
6830
6804
|
return remoteFileCacheDir;
|
6831
6805
|
}
|
6832
6806
|
function getCachedRemoteFilePath(fileExt, currentFileHash, remoteFileCacheDir) {
|
6833
|
-
const remoteFilePath =
|
6807
|
+
const remoteFilePath = path__default.join(remoteFileCacheDir, currentFileHash + "." + fileExt);
|
6834
6808
|
return remoteFilePath;
|
6835
6809
|
}
|
6836
6810
|
async function getFileBufferFromRemote(ref, fileExt, currentFileHash, projectRoot) {
|
@@ -7113,8 +7087,8 @@ async function createFixPatch(config, apply, sourcePath, validationError, remote
|
|
7113
7087
|
});
|
7114
7088
|
continue;
|
7115
7089
|
}
|
7116
|
-
const absoluteFilePath =
|
7117
|
-
await fs.promises.mkdir(
|
7090
|
+
const absoluteFilePath = path__default.join(config.projectRoot, splitRemoteRefDataRes.filePath);
|
7091
|
+
await fs.promises.mkdir(path__default.dirname(absoluteFilePath), {
|
7118
7092
|
recursive: true
|
7119
7093
|
});
|
7120
7094
|
const res = await downloadFileFromRemote(url, absoluteFilePath);
|
@@ -7275,7 +7249,7 @@ async function getImageMetadata(projectRoot, validationError) {
|
|
7275
7249
|
// TODO:
|
7276
7250
|
throw Error("Cannot fix image without a file reference");
|
7277
7251
|
}
|
7278
|
-
const filename =
|
7252
|
+
const filename = path__default.join(projectRoot, fileRef);
|
7279
7253
|
const buffer = fs.readFileSync(filename);
|
7280
7254
|
return extractImageMetadata(filename, buffer);
|
7281
7255
|
}
|
@@ -7285,7 +7259,7 @@ async function getFileMetadata(projectRoot, validationError) {
|
|
7285
7259
|
// TODO:
|
7286
7260
|
throw Error("Cannot fix file without a file reference");
|
7287
7261
|
}
|
7288
|
-
const filename =
|
7262
|
+
const filename = path__default.join(projectRoot, fileRef);
|
7289
7263
|
fs.readFileSync(filename);
|
7290
7264
|
return extractFileMetadata(fileRef);
|
7291
7265
|
}
|