@backstage/plugin-techdocs-node 1.2.0 → 1.2.1-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +12 -0
- package/dist/index.cjs.js +630 -245
- package/dist/index.cjs.js.map +1 -1
- package/package.json +5 -5
package/dist/index.cjs.js
CHANGED
|
@@ -74,7 +74,9 @@ const lowerCaseEntityTripletInStoragePath = (originalPath) => {
|
|
|
74
74
|
parts.shift();
|
|
75
75
|
}
|
|
76
76
|
if (parts.length <= 3) {
|
|
77
|
-
throw new Error(
|
|
77
|
+
throw new Error(
|
|
78
|
+
`Encountered file unmanaged by TechDocs ${originalPath}. Skipping.`
|
|
79
|
+
);
|
|
78
80
|
}
|
|
79
81
|
return lowerCaseEntityTriplet(parts.join(path__default["default"].posix.sep));
|
|
80
82
|
};
|
|
@@ -205,7 +207,10 @@ const getMkdocsYml = async (inputDir) => {
|
|
|
205
207
|
mkdocsYmlPath = path__default["default"].join(inputDir, "mkdocs.yml");
|
|
206
208
|
mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
|
|
207
209
|
} catch (error) {
|
|
208
|
-
throw new errors.ForwardedError(
|
|
210
|
+
throw new errors.ForwardedError(
|
|
211
|
+
"Could not read MkDocs YAML config file mkdocs.yml or mkdocs.yaml for validation",
|
|
212
|
+
error
|
|
213
|
+
);
|
|
209
214
|
}
|
|
210
215
|
}
|
|
211
216
|
return {
|
|
@@ -222,8 +227,10 @@ const validateMkdocsYaml = async (inputDir, mkdocsYmlFileString) => {
|
|
|
222
227
|
}
|
|
223
228
|
const parsedMkdocsYml = mkdocsYml;
|
|
224
229
|
if (parsedMkdocsYml.docs_dir && !backendCommon.isChildPath(inputDir, path.resolve(inputDir, parsedMkdocsYml.docs_dir))) {
|
|
225
|
-
throw new Error(
|
|
226
|
-
|
|
230
|
+
throw new Error(
|
|
231
|
+
`docs_dir configuration value in mkdocs can't be an absolute directory or start with ../ for security reasons.
|
|
232
|
+
Use relative paths instead which are resolved relative to your mkdocs.yml file location.`
|
|
233
|
+
);
|
|
227
234
|
}
|
|
228
235
|
return parsedMkdocsYml.docs_dir;
|
|
229
236
|
};
|
|
@@ -253,10 +260,12 @@ const patchIndexPreBuild = async ({
|
|
|
253
260
|
logger.warn(`${path__default["default"].relative(inputDir, filePath)} not found.`);
|
|
254
261
|
}
|
|
255
262
|
}
|
|
256
|
-
logger.warn(
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
263
|
+
logger.warn(
|
|
264
|
+
`Could not find any techdocs' index file. Please make sure at least one of ${[
|
|
265
|
+
indexMdPath,
|
|
266
|
+
...fallbacks
|
|
267
|
+
].join(" ")} exists.`
|
|
268
|
+
);
|
|
260
269
|
};
|
|
261
270
|
const createOrUpdateMetadata = async (techdocsMetadataPath, logger) => {
|
|
262
271
|
const techdocsMetadataDir = techdocsMetadataPath.split(path__default["default"].sep).slice(0, -1).join(path__default["default"].sep);
|
|
@@ -276,7 +285,9 @@ const createOrUpdateMetadata = async (techdocsMetadataPath, logger) => {
|
|
|
276
285
|
}
|
|
277
286
|
json.build_timestamp = Date.now();
|
|
278
287
|
try {
|
|
279
|
-
json.files = (await getFileTreeRecursively(techdocsMetadataDir)).map(
|
|
288
|
+
json.files = (await getFileTreeRecursively(techdocsMetadataDir)).map(
|
|
289
|
+
(file) => file.replace(`${techdocsMetadataDir}${path__default["default"].sep}`, "")
|
|
290
|
+
);
|
|
280
291
|
} catch (err) {
|
|
281
292
|
errors.assertError(err);
|
|
282
293
|
json.files = [];
|
|
@@ -298,7 +309,9 @@ const patchMkdocsFile = async (mkdocsYmlPath, logger, updateAction) => {
|
|
|
298
309
|
mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
|
|
299
310
|
} catch (error) {
|
|
300
311
|
errors.assertError(error);
|
|
301
|
-
logger.warn(
|
|
312
|
+
logger.warn(
|
|
313
|
+
`Could not read MkDocs YAML config file ${mkdocsYmlPath} before running the generator: ${error.message}`
|
|
314
|
+
);
|
|
302
315
|
return;
|
|
303
316
|
}
|
|
304
317
|
let mkdocsYml;
|
|
@@ -309,28 +322,44 @@ const patchMkdocsFile = async (mkdocsYmlPath, logger, updateAction) => {
|
|
|
309
322
|
}
|
|
310
323
|
} catch (error) {
|
|
311
324
|
errors.assertError(error);
|
|
312
|
-
logger.warn(
|
|
325
|
+
logger.warn(
|
|
326
|
+
`Error in parsing YAML at ${mkdocsYmlPath} before running the generator. ${error.message}`
|
|
327
|
+
);
|
|
313
328
|
return;
|
|
314
329
|
}
|
|
315
330
|
didEdit = updateAction(mkdocsYml);
|
|
316
331
|
try {
|
|
317
332
|
if (didEdit) {
|
|
318
|
-
await fs__default["default"].writeFile(
|
|
333
|
+
await fs__default["default"].writeFile(
|
|
334
|
+
mkdocsYmlPath,
|
|
335
|
+
yaml__default["default"].dump(mkdocsYml, { schema: MKDOCS_SCHEMA }),
|
|
336
|
+
"utf8"
|
|
337
|
+
);
|
|
319
338
|
}
|
|
320
339
|
} catch (error) {
|
|
321
340
|
errors.assertError(error);
|
|
322
|
-
logger.warn(
|
|
341
|
+
logger.warn(
|
|
342
|
+
`Could not write to ${mkdocsYmlPath} after updating it before running the generator. ${error.message}`
|
|
343
|
+
);
|
|
323
344
|
return;
|
|
324
345
|
}
|
|
325
346
|
};
|
|
326
347
|
const patchMkdocsYmlPreBuild = async (mkdocsYmlPath, logger, parsedLocationAnnotation, scmIntegrations) => {
|
|
327
348
|
await patchMkdocsFile(mkdocsYmlPath, logger, (mkdocsYml) => {
|
|
328
349
|
if (!("repo_url" in mkdocsYml) && !("edit_uri" in mkdocsYml)) {
|
|
329
|
-
const result = getRepoUrlFromLocationAnnotation(
|
|
350
|
+
const result = getRepoUrlFromLocationAnnotation(
|
|
351
|
+
parsedLocationAnnotation,
|
|
352
|
+
scmIntegrations,
|
|
353
|
+
mkdocsYml.docs_dir
|
|
354
|
+
);
|
|
330
355
|
if (result.repo_url || result.edit_uri) {
|
|
331
356
|
mkdocsYml.repo_url = result.repo_url;
|
|
332
357
|
mkdocsYml.edit_uri = result.edit_uri;
|
|
333
|
-
logger.info(
|
|
358
|
+
logger.info(
|
|
359
|
+
`Set ${JSON.stringify(
|
|
360
|
+
result
|
|
361
|
+
)}. You can disable this feature by manually setting 'repo_url' or 'edit_uri' according to the MkDocs documentation at https://www.mkdocs.org/user-guide/configuration/#repo_url`
|
|
362
|
+
);
|
|
334
363
|
return true;
|
|
335
364
|
}
|
|
336
365
|
}
|
|
@@ -381,7 +410,12 @@ const _TechdocsGenerator = class {
|
|
|
381
410
|
const { path: mkdocsYmlPath, content } = await getMkdocsYml(inputDir);
|
|
382
411
|
const docsDir = await validateMkdocsYaml(inputDir, content);
|
|
383
412
|
if (parsedLocationAnnotation) {
|
|
384
|
-
await patchMkdocsYmlPreBuild(
|
|
413
|
+
await patchMkdocsYmlPreBuild(
|
|
414
|
+
mkdocsYmlPath,
|
|
415
|
+
childLogger,
|
|
416
|
+
parsedLocationAnnotation,
|
|
417
|
+
this.scmIntegrations
|
|
418
|
+
);
|
|
385
419
|
}
|
|
386
420
|
if (this.options.legacyCopyReadmeMdToIndexMd) {
|
|
387
421
|
await patchIndexPreBuild({ inputDir, logger: childLogger, docsDir });
|
|
@@ -404,7 +438,9 @@ const _TechdocsGenerator = class {
|
|
|
404
438
|
},
|
|
405
439
|
logStream
|
|
406
440
|
});
|
|
407
|
-
childLogger.info(
|
|
441
|
+
childLogger.info(
|
|
442
|
+
`Successfully generated docs from ${inputDir} into ${outputDir} using local mkdocs`
|
|
443
|
+
);
|
|
408
444
|
break;
|
|
409
445
|
case "docker":
|
|
410
446
|
await this.containerRunner.runContainer({
|
|
@@ -416,18 +452,33 @@ const _TechdocsGenerator = class {
|
|
|
416
452
|
envVars: { HOME: "/tmp" },
|
|
417
453
|
pullImage: this.options.pullImage
|
|
418
454
|
});
|
|
419
|
-
childLogger.info(
|
|
455
|
+
childLogger.info(
|
|
456
|
+
`Successfully generated docs from ${inputDir} into ${outputDir} using techdocs-container`
|
|
457
|
+
);
|
|
420
458
|
break;
|
|
421
459
|
default:
|
|
422
|
-
throw new Error(
|
|
460
|
+
throw new Error(
|
|
461
|
+
`Invalid config value "${this.options.runIn}" provided in 'techdocs.generators.techdocs'.`
|
|
462
|
+
);
|
|
423
463
|
}
|
|
424
464
|
} catch (error) {
|
|
425
|
-
this.logger.debug(
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
465
|
+
this.logger.debug(
|
|
466
|
+
`Failed to generate docs from ${inputDir} into ${outputDir}`
|
|
467
|
+
);
|
|
468
|
+
throw new errors.ForwardedError(
|
|
469
|
+
`Failed to generate docs from ${inputDir} into ${outputDir}`,
|
|
470
|
+
error
|
|
471
|
+
);
|
|
472
|
+
}
|
|
473
|
+
await createOrUpdateMetadata(
|
|
474
|
+
path__default["default"].join(outputDir, "techdocs_metadata.json"),
|
|
475
|
+
childLogger
|
|
476
|
+
);
|
|
429
477
|
if (etag) {
|
|
430
|
-
await storeEtagMetadata(
|
|
478
|
+
await storeEtagMetadata(
|
|
479
|
+
path__default["default"].join(outputDir, "techdocs_metadata.json"),
|
|
480
|
+
etag
|
|
481
|
+
);
|
|
431
482
|
}
|
|
432
483
|
}
|
|
433
484
|
};
|
|
@@ -435,16 +486,24 @@ let TechdocsGenerator = _TechdocsGenerator;
|
|
|
435
486
|
TechdocsGenerator.defaultDockerImage = "spotify/techdocs:v1.0.3";
|
|
436
487
|
function readGeneratorConfig(config, logger) {
|
|
437
488
|
var _a;
|
|
438
|
-
const legacyGeneratorType = config.getOptionalString(
|
|
489
|
+
const legacyGeneratorType = config.getOptionalString(
|
|
490
|
+
"techdocs.generators.techdocs"
|
|
491
|
+
);
|
|
439
492
|
if (legacyGeneratorType) {
|
|
440
|
-
logger.warn(
|
|
493
|
+
logger.warn(
|
|
494
|
+
`The 'techdocs.generators.techdocs' configuration key is deprecated and will be removed in the future. Please use 'techdocs.generator' instead. See here https://backstage.io/docs/features/techdocs/configuration`
|
|
495
|
+
);
|
|
441
496
|
}
|
|
442
497
|
return {
|
|
443
498
|
runIn: (_a = legacyGeneratorType != null ? legacyGeneratorType : config.getOptionalString("techdocs.generator.runIn")) != null ? _a : "docker",
|
|
444
499
|
dockerImage: config.getOptionalString("techdocs.generator.dockerImage"),
|
|
445
500
|
pullImage: config.getOptionalBoolean("techdocs.generator.pullImage"),
|
|
446
|
-
omitTechdocsCoreMkdocsPlugin: config.getOptionalBoolean(
|
|
447
|
-
|
|
501
|
+
omitTechdocsCoreMkdocsPlugin: config.getOptionalBoolean(
|
|
502
|
+
"techdocs.generator.mkdocs.omitTechdocsCorePlugin"
|
|
503
|
+
),
|
|
504
|
+
legacyCopyReadmeMdToIndexMd: config.getOptionalBoolean(
|
|
505
|
+
"techdocs.generator.mkdocs.legacyCopyReadmeMdToIndexMd"
|
|
506
|
+
)
|
|
448
507
|
};
|
|
449
508
|
}
|
|
450
509
|
|
|
@@ -475,7 +534,9 @@ const parseReferenceAnnotation = (annotationName, entity) => {
|
|
|
475
534
|
var _a;
|
|
476
535
|
const annotation = (_a = entity.metadata.annotations) == null ? void 0 : _a[annotationName];
|
|
477
536
|
if (!annotation) {
|
|
478
|
-
throw new errors.InputError(
|
|
537
|
+
throw new errors.InputError(
|
|
538
|
+
`No location annotation provided in entity: ${entity.metadata.name}`
|
|
539
|
+
);
|
|
479
540
|
}
|
|
480
541
|
const { type, target } = catalogModel.parseLocationRef(annotation);
|
|
481
542
|
return {
|
|
@@ -497,7 +558,10 @@ const transformDirLocation = (entity, dirAnnotation, scmIntegrations) => {
|
|
|
497
558
|
};
|
|
498
559
|
}
|
|
499
560
|
case "file": {
|
|
500
|
-
const target = backendCommon.resolveSafeChildPath(
|
|
561
|
+
const target = backendCommon.resolveSafeChildPath(
|
|
562
|
+
path__default["default"].dirname(location.target),
|
|
563
|
+
dirAnnotation.target
|
|
564
|
+
);
|
|
501
565
|
return {
|
|
502
566
|
type: "dir",
|
|
503
567
|
target
|
|
@@ -508,7 +572,10 @@ const transformDirLocation = (entity, dirAnnotation, scmIntegrations) => {
|
|
|
508
572
|
}
|
|
509
573
|
};
|
|
510
574
|
const getLocationForEntity = (entity, scmIntegration) => {
|
|
511
|
-
const annotation = parseReferenceAnnotation(
|
|
575
|
+
const annotation = parseReferenceAnnotation(
|
|
576
|
+
"backstage.io/techdocs-ref",
|
|
577
|
+
entity
|
|
578
|
+
);
|
|
512
579
|
switch (annotation.type) {
|
|
513
580
|
case "url":
|
|
514
581
|
return annotation;
|
|
@@ -520,7 +587,10 @@ const getLocationForEntity = (entity, scmIntegration) => {
|
|
|
520
587
|
};
|
|
521
588
|
const getDocFilesFromRepository = async (reader, entity, opts) => {
|
|
522
589
|
var _a, _b;
|
|
523
|
-
const { target } = parseReferenceAnnotation(
|
|
590
|
+
const { target } = parseReferenceAnnotation(
|
|
591
|
+
"backstage.io/techdocs-ref",
|
|
592
|
+
entity
|
|
593
|
+
);
|
|
524
594
|
(_a = opts == null ? void 0 : opts.logger) == null ? void 0 : _a.debug(`Reading files from ${target}`);
|
|
525
595
|
const readTreeResponse = await reader.readTree(target, { etag: opts == null ? void 0 : opts.etag });
|
|
526
596
|
const preparedDir = await readTreeResponse.dir();
|
|
@@ -541,8 +611,15 @@ class DirectoryPreparer {
|
|
|
541
611
|
}
|
|
542
612
|
async prepare(entity, options) {
|
|
543
613
|
var _a, _b;
|
|
544
|
-
const annotation = parseReferenceAnnotation(
|
|
545
|
-
|
|
614
|
+
const annotation = parseReferenceAnnotation(
|
|
615
|
+
"backstage.io/techdocs-ref",
|
|
616
|
+
entity
|
|
617
|
+
);
|
|
618
|
+
const { type, target } = transformDirLocation(
|
|
619
|
+
entity,
|
|
620
|
+
annotation,
|
|
621
|
+
this.scmIntegrations
|
|
622
|
+
);
|
|
546
623
|
switch (type) {
|
|
547
624
|
case "url": {
|
|
548
625
|
(_a = options == null ? void 0 : options.logger) == null ? void 0 : _a.debug(`Reading files from ${target}`);
|
|
@@ -587,7 +664,9 @@ class UrlPreparer {
|
|
|
587
664
|
if (error.name === "NotModifiedError") {
|
|
588
665
|
this.logger.debug(`Cache is valid for etag ${options == null ? void 0 : options.etag}`);
|
|
589
666
|
} else {
|
|
590
|
-
this.logger.debug(
|
|
667
|
+
this.logger.debug(
|
|
668
|
+
`Unable to fetch files for building docs ${error.message}`
|
|
669
|
+
);
|
|
591
670
|
}
|
|
592
671
|
throw error;
|
|
593
672
|
}
|
|
@@ -613,7 +692,10 @@ class Preparers {
|
|
|
613
692
|
this.preparerMap.set(protocol, preparer);
|
|
614
693
|
}
|
|
615
694
|
get(entity) {
|
|
616
|
-
const { type } = parseReferenceAnnotation(
|
|
695
|
+
const { type } = parseReferenceAnnotation(
|
|
696
|
+
"backstage.io/techdocs-ref",
|
|
697
|
+
entity
|
|
698
|
+
);
|
|
617
699
|
const preparer = this.preparerMap.get(type);
|
|
618
700
|
if (!preparer) {
|
|
619
701
|
throw new Error(`No preparer registered for type: "${type}"`);
|
|
@@ -627,7 +709,10 @@ const streamToBuffer$1 = (stream) => {
|
|
|
627
709
|
try {
|
|
628
710
|
const chunks = [];
|
|
629
711
|
stream.on("data", (chunk) => chunks.push(chunk));
|
|
630
|
-
stream.on(
|
|
712
|
+
stream.on(
|
|
713
|
+
"error",
|
|
714
|
+
(e) => reject(new errors.ForwardedError("Unable to read stream", e))
|
|
715
|
+
);
|
|
631
716
|
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
|
632
717
|
} catch (e) {
|
|
633
718
|
throw new errors.ForwardedError("Unable to parse the response data", e);
|
|
@@ -648,22 +733,34 @@ class AwsS3Publish {
|
|
|
648
733
|
try {
|
|
649
734
|
bucketName = config.getString("techdocs.publisher.awsS3.bucketName");
|
|
650
735
|
} catch (error) {
|
|
651
|
-
throw new Error(
|
|
736
|
+
throw new Error(
|
|
737
|
+
"Since techdocs.publisher.type is set to 'awsS3' in your app config, techdocs.publisher.awsS3.bucketName is required."
|
|
738
|
+
);
|
|
652
739
|
}
|
|
653
|
-
const bucketRootPath = normalizeExternalStorageRootPath(
|
|
740
|
+
const bucketRootPath = normalizeExternalStorageRootPath(
|
|
741
|
+
config.getOptionalString("techdocs.publisher.awsS3.bucketRootPath") || ""
|
|
742
|
+
);
|
|
654
743
|
const sse = config.getOptionalString("techdocs.publisher.awsS3.sse");
|
|
655
|
-
const credentialsConfig = config.getOptionalConfig(
|
|
744
|
+
const credentialsConfig = config.getOptionalConfig(
|
|
745
|
+
"techdocs.publisher.awsS3.credentials"
|
|
746
|
+
);
|
|
656
747
|
const credentials = AwsS3Publish.buildCredentials(credentialsConfig);
|
|
657
748
|
const region = config.getOptionalString("techdocs.publisher.awsS3.region");
|
|
658
|
-
const endpoint = config.getOptionalString(
|
|
659
|
-
|
|
749
|
+
const endpoint = config.getOptionalString(
|
|
750
|
+
"techdocs.publisher.awsS3.endpoint"
|
|
751
|
+
);
|
|
752
|
+
const s3ForcePathStyle = config.getOptionalBoolean(
|
|
753
|
+
"techdocs.publisher.awsS3.s3ForcePathStyle"
|
|
754
|
+
);
|
|
660
755
|
const storageClient = new aws__default["default"].S3({
|
|
661
756
|
credentials,
|
|
662
757
|
...region && { region },
|
|
663
758
|
...endpoint && { endpoint },
|
|
664
759
|
...s3ForcePathStyle && { s3ForcePathStyle }
|
|
665
760
|
});
|
|
666
|
-
const legacyPathCasing = config.getOptionalBoolean(
|
|
761
|
+
const legacyPathCasing = config.getOptionalBoolean(
|
|
762
|
+
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
763
|
+
) || false;
|
|
667
764
|
return new AwsS3Publish({
|
|
668
765
|
storageClient,
|
|
669
766
|
bucketName,
|
|
@@ -701,10 +798,14 @@ class AwsS3Publish {
|
|
|
701
798
|
async getReadiness() {
|
|
702
799
|
try {
|
|
703
800
|
await this.storageClient.headBucket({ Bucket: this.bucketName }).promise();
|
|
704
|
-
this.logger.info(
|
|
801
|
+
this.logger.info(
|
|
802
|
+
`Successfully connected to the AWS S3 bucket ${this.bucketName}.`
|
|
803
|
+
);
|
|
705
804
|
return { isAvailable: true };
|
|
706
805
|
} catch (error) {
|
|
707
|
-
this.logger.error(
|
|
806
|
+
this.logger.error(
|
|
807
|
+
`Could not retrieve metadata about the AWS S3 bucket ${this.bucketName}. Make sure the bucket exists. Also make sure that authentication is setup either by explicitly defining credentials and region in techdocs.publisher.awsS3 in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
808
|
+
);
|
|
708
809
|
this.logger.error(`from AWS client library`, error);
|
|
709
810
|
return {
|
|
710
811
|
isAvailable: false
|
|
@@ -721,45 +822,76 @@ class AwsS3Publish {
|
|
|
721
822
|
const sse = this.sse;
|
|
722
823
|
let existingFiles = [];
|
|
723
824
|
try {
|
|
724
|
-
const remoteFolder = getCloudPathForLocalPath(
|
|
825
|
+
const remoteFolder = getCloudPathForLocalPath(
|
|
826
|
+
entity,
|
|
827
|
+
void 0,
|
|
828
|
+
useLegacyPathCasing,
|
|
829
|
+
bucketRootPath
|
|
830
|
+
);
|
|
725
831
|
existingFiles = await this.getAllObjectsFromBucket({
|
|
726
832
|
prefix: remoteFolder
|
|
727
833
|
});
|
|
728
834
|
} catch (e) {
|
|
729
835
|
errors.assertError(e);
|
|
730
|
-
this.logger.error(
|
|
836
|
+
this.logger.error(
|
|
837
|
+
`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`
|
|
838
|
+
);
|
|
731
839
|
}
|
|
732
840
|
let absoluteFilesToUpload;
|
|
733
841
|
try {
|
|
734
842
|
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
735
|
-
await bulkStorageOperation(
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
843
|
+
await bulkStorageOperation(
|
|
844
|
+
async (absoluteFilePath) => {
|
|
845
|
+
const relativeFilePath = path__default["default"].relative(directory, absoluteFilePath);
|
|
846
|
+
const fileStream = fs__default["default"].createReadStream(absoluteFilePath);
|
|
847
|
+
const params = {
|
|
848
|
+
Bucket: this.bucketName,
|
|
849
|
+
Key: getCloudPathForLocalPath(
|
|
850
|
+
entity,
|
|
851
|
+
relativeFilePath,
|
|
852
|
+
useLegacyPathCasing,
|
|
853
|
+
bucketRootPath
|
|
854
|
+
),
|
|
855
|
+
Body: fileStream,
|
|
856
|
+
...sse && { ServerSideEncryption: sse }
|
|
857
|
+
};
|
|
858
|
+
objects.push(params.Key);
|
|
859
|
+
return this.storageClient.upload(params).promise();
|
|
860
|
+
},
|
|
861
|
+
absoluteFilesToUpload,
|
|
862
|
+
{ concurrencyLimit: 10 }
|
|
863
|
+
);
|
|
864
|
+
this.logger.info(
|
|
865
|
+
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`
|
|
866
|
+
);
|
|
748
867
|
} catch (e) {
|
|
749
868
|
const errorMessage = `Unable to upload file(s) to AWS S3. ${e}`;
|
|
750
869
|
this.logger.error(errorMessage);
|
|
751
870
|
throw new Error(errorMessage);
|
|
752
871
|
}
|
|
753
872
|
try {
|
|
754
|
-
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
873
|
+
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
874
|
+
(absoluteFilePath) => getCloudPathForLocalPath(
|
|
875
|
+
entity,
|
|
876
|
+
path__default["default"].relative(directory, absoluteFilePath),
|
|
877
|
+
useLegacyPathCasing,
|
|
878
|
+
bucketRootPath
|
|
879
|
+
)
|
|
880
|
+
);
|
|
755
881
|
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
756
|
-
await bulkStorageOperation(
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
882
|
+
await bulkStorageOperation(
|
|
883
|
+
async (relativeFilePath) => {
|
|
884
|
+
return await this.storageClient.deleteObject({
|
|
885
|
+
Bucket: this.bucketName,
|
|
886
|
+
Key: relativeFilePath
|
|
887
|
+
}).promise();
|
|
888
|
+
},
|
|
889
|
+
staleFiles,
|
|
890
|
+
{ concurrencyLimit: 10 }
|
|
891
|
+
);
|
|
892
|
+
this.logger.info(
|
|
893
|
+
`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`
|
|
894
|
+
);
|
|
763
895
|
} catch (error) {
|
|
764
896
|
const errorMessage = `Unable to delete file(s) from AWS S3. ${error}`;
|
|
765
897
|
this.logger.error(errorMessage);
|
|
@@ -779,9 +911,13 @@ class AwsS3Publish {
|
|
|
779
911
|
try {
|
|
780
912
|
const techdocsMetadataJson = await streamToBuffer$1(stream);
|
|
781
913
|
if (!techdocsMetadataJson) {
|
|
782
|
-
throw new Error(
|
|
914
|
+
throw new Error(
|
|
915
|
+
`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
|
|
916
|
+
);
|
|
783
917
|
}
|
|
784
|
-
const techdocsMetadata = JSON5__default["default"].parse(
|
|
918
|
+
const techdocsMetadata = JSON5__default["default"].parse(
|
|
919
|
+
techdocsMetadataJson.toString("utf-8")
|
|
920
|
+
);
|
|
785
921
|
resolve(techdocsMetadata);
|
|
786
922
|
} catch (err) {
|
|
787
923
|
errors.assertError(err);
|
|
@@ -803,13 +939,17 @@ class AwsS3Publish {
|
|
|
803
939
|
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
804
940
|
const stream = this.storageClient.getObject({ Bucket: this.bucketName, Key: filePath }).createReadStream();
|
|
805
941
|
try {
|
|
806
|
-
for (const [headerKey, headerValue] of Object.entries(
|
|
942
|
+
for (const [headerKey, headerValue] of Object.entries(
|
|
943
|
+
responseHeaders
|
|
944
|
+
)) {
|
|
807
945
|
res.setHeader(headerKey, headerValue);
|
|
808
946
|
}
|
|
809
947
|
res.send(await streamToBuffer$1(stream));
|
|
810
948
|
} catch (err) {
|
|
811
949
|
errors.assertError(err);
|
|
812
|
-
this.logger.warn(
|
|
950
|
+
this.logger.warn(
|
|
951
|
+
`TechDocs S3 router failed to serve static files from bucket ${this.bucketName} at key ${filePath}: ${err.message}`
|
|
952
|
+
);
|
|
813
953
|
res.status(404).send("File Not Found");
|
|
814
954
|
}
|
|
815
955
|
};
|
|
@@ -834,36 +974,40 @@ class AwsS3Publish {
|
|
|
834
974
|
}) {
|
|
835
975
|
const allObjects = await this.getAllObjectsFromBucket();
|
|
836
976
|
const limiter = createLimiter__default["default"](concurrency);
|
|
837
|
-
await Promise.all(
|
|
838
|
-
|
|
839
|
-
|
|
840
|
-
|
|
841
|
-
|
|
842
|
-
|
|
843
|
-
|
|
844
|
-
|
|
845
|
-
|
|
846
|
-
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
|
|
850
|
-
|
|
851
|
-
|
|
852
|
-
|
|
853
|
-
|
|
854
|
-
|
|
855
|
-
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
|
|
859
|
-
|
|
860
|
-
|
|
861
|
-
|
|
862
|
-
|
|
863
|
-
|
|
864
|
-
|
|
865
|
-
|
|
866
|
-
|
|
977
|
+
await Promise.all(
|
|
978
|
+
allObjects.map(
|
|
979
|
+
(f) => limiter(async (file) => {
|
|
980
|
+
let newPath;
|
|
981
|
+
try {
|
|
982
|
+
newPath = lowerCaseEntityTripletInStoragePath(file);
|
|
983
|
+
} catch (e) {
|
|
984
|
+
errors.assertError(e);
|
|
985
|
+
this.logger.warn(e.message);
|
|
986
|
+
return;
|
|
987
|
+
}
|
|
988
|
+
if (file === newPath) {
|
|
989
|
+
return;
|
|
990
|
+
}
|
|
991
|
+
try {
|
|
992
|
+
this.logger.verbose(`Migrating ${file}`);
|
|
993
|
+
await this.storageClient.copyObject({
|
|
994
|
+
Bucket: this.bucketName,
|
|
995
|
+
CopySource: [this.bucketName, file].join("/"),
|
|
996
|
+
Key: newPath
|
|
997
|
+
}).promise();
|
|
998
|
+
if (removeOriginal) {
|
|
999
|
+
await this.storageClient.deleteObject({
|
|
1000
|
+
Bucket: this.bucketName,
|
|
1001
|
+
Key: file
|
|
1002
|
+
}).promise();
|
|
1003
|
+
}
|
|
1004
|
+
} catch (e) {
|
|
1005
|
+
errors.assertError(e);
|
|
1006
|
+
this.logger.warn(`Unable to migrate ${file}: ${e.message}`);
|
|
1007
|
+
}
|
|
1008
|
+
}, f)
|
|
1009
|
+
)
|
|
1010
|
+
);
|
|
867
1011
|
}
|
|
868
1012
|
async getAllObjectsFromBucket({ prefix } = { prefix: "" }) {
|
|
869
1013
|
const objects = [];
|
|
@@ -875,7 +1019,9 @@ class AwsS3Publish {
|
|
|
875
1019
|
ContinuationToken: nextContinuation,
|
|
876
1020
|
...prefix ? { Prefix: prefix } : {}
|
|
877
1021
|
}).promise();
|
|
878
|
-
objects.push(
|
|
1022
|
+
objects.push(
|
|
1023
|
+
...(allObjects.Contents || []).map((f) => f.Key || "").filter((f) => !!f)
|
|
1024
|
+
);
|
|
879
1025
|
nextContinuation = allObjects.NextContinuationToken;
|
|
880
1026
|
} while (nextContinuation);
|
|
881
1027
|
return objects;
|
|
@@ -893,25 +1039,40 @@ class AzureBlobStoragePublish {
|
|
|
893
1039
|
static fromConfig(config, logger) {
|
|
894
1040
|
let containerName = "";
|
|
895
1041
|
try {
|
|
896
|
-
containerName = config.getString(
|
|
1042
|
+
containerName = config.getString(
|
|
1043
|
+
"techdocs.publisher.azureBlobStorage.containerName"
|
|
1044
|
+
);
|
|
897
1045
|
} catch (error) {
|
|
898
|
-
throw new Error(
|
|
1046
|
+
throw new Error(
|
|
1047
|
+
"Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, techdocs.publisher.azureBlobStorage.containerName is required."
|
|
1048
|
+
);
|
|
899
1049
|
}
|
|
900
1050
|
let accountName = "";
|
|
901
1051
|
try {
|
|
902
|
-
accountName = config.getString(
|
|
1052
|
+
accountName = config.getString(
|
|
1053
|
+
"techdocs.publisher.azureBlobStorage.credentials.accountName"
|
|
1054
|
+
);
|
|
903
1055
|
} catch (error) {
|
|
904
|
-
throw new Error(
|
|
1056
|
+
throw new Error(
|
|
1057
|
+
"Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, techdocs.publisher.azureBlobStorage.credentials.accountName is required."
|
|
1058
|
+
);
|
|
905
1059
|
}
|
|
906
|
-
const accountKey = config.getOptionalString(
|
|
1060
|
+
const accountKey = config.getOptionalString(
|
|
1061
|
+
"techdocs.publisher.azureBlobStorage.credentials.accountKey"
|
|
1062
|
+
);
|
|
907
1063
|
let credential;
|
|
908
1064
|
if (accountKey) {
|
|
909
1065
|
credential = new storageBlob.StorageSharedKeyCredential(accountName, accountKey);
|
|
910
1066
|
} else {
|
|
911
1067
|
credential = new identity.DefaultAzureCredential();
|
|
912
1068
|
}
|
|
913
|
-
const storageClient = new storageBlob.BlobServiceClient(
|
|
914
|
-
|
|
1069
|
+
const storageClient = new storageBlob.BlobServiceClient(
|
|
1070
|
+
`https://${accountName}.blob.core.windows.net`,
|
|
1071
|
+
credential
|
|
1072
|
+
);
|
|
1073
|
+
const legacyPathCasing = config.getOptionalBoolean(
|
|
1074
|
+
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
1075
|
+
) || false;
|
|
915
1076
|
return new AzureBlobStoragePublish({
|
|
916
1077
|
storageClient,
|
|
917
1078
|
containerName,
|
|
@@ -928,13 +1089,17 @@ class AzureBlobStoragePublish {
|
|
|
928
1089
|
};
|
|
929
1090
|
}
|
|
930
1091
|
if (response._response.status >= 400) {
|
|
931
|
-
this.logger.error(
|
|
1092
|
+
this.logger.error(
|
|
1093
|
+
`Failed to retrieve metadata from ${response._response.request.url} with status code ${response._response.status}.`
|
|
1094
|
+
);
|
|
932
1095
|
}
|
|
933
1096
|
} catch (e) {
|
|
934
1097
|
errors.assertError(e);
|
|
935
1098
|
this.logger.error(`from Azure Blob Storage client library: ${e.message}`);
|
|
936
1099
|
}
|
|
937
|
-
this.logger.error(
|
|
1100
|
+
this.logger.error(
|
|
1101
|
+
`Could not retrieve metadata about the Azure Blob Storage container ${this.containerName}. Make sure that the Azure project and container exist and the access key is setup correctly techdocs.publisher.azureBlobStorage.credentials defined in app config has correct permissions. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
1102
|
+
);
|
|
938
1103
|
return { isAvailable: false };
|
|
939
1104
|
}
|
|
940
1105
|
async publish({
|
|
@@ -943,7 +1108,11 @@ class AzureBlobStoragePublish {
|
|
|
943
1108
|
}) {
|
|
944
1109
|
const objects = [];
|
|
945
1110
|
const useLegacyPathCasing = this.legacyPathCasing;
|
|
946
|
-
const remoteFolder = getCloudPathForLocalPath(
|
|
1111
|
+
const remoteFolder = getCloudPathForLocalPath(
|
|
1112
|
+
entity,
|
|
1113
|
+
void 0,
|
|
1114
|
+
useLegacyPathCasing
|
|
1115
|
+
);
|
|
947
1116
|
let existingFiles = [];
|
|
948
1117
|
try {
|
|
949
1118
|
existingFiles = await this.getAllBlobsFromContainer({
|
|
@@ -952,7 +1121,9 @@ class AzureBlobStoragePublish {
|
|
|
952
1121
|
});
|
|
953
1122
|
} catch (e) {
|
|
954
1123
|
errors.assertError(e);
|
|
955
|
-
this.logger.error(
|
|
1124
|
+
this.logger.error(
|
|
1125
|
+
`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`
|
|
1126
|
+
);
|
|
956
1127
|
}
|
|
957
1128
|
let absoluteFilesToUpload;
|
|
958
1129
|
let container;
|
|
@@ -960,32 +1131,62 @@ class AzureBlobStoragePublish {
|
|
|
960
1131
|
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
961
1132
|
container = this.storageClient.getContainerClient(this.containerName);
|
|
962
1133
|
const failedOperations = [];
|
|
963
|
-
await bulkStorageOperation(
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
1134
|
+
await bulkStorageOperation(
|
|
1135
|
+
async (absoluteFilePath) => {
|
|
1136
|
+
const relativeFilePath = path__default["default"].normalize(
|
|
1137
|
+
path__default["default"].relative(directory, absoluteFilePath)
|
|
1138
|
+
);
|
|
1139
|
+
const remotePath = getCloudPathForLocalPath(
|
|
1140
|
+
entity,
|
|
1141
|
+
relativeFilePath,
|
|
1142
|
+
useLegacyPathCasing
|
|
1143
|
+
);
|
|
1144
|
+
objects.push(remotePath);
|
|
1145
|
+
const response = await container.getBlockBlobClient(remotePath).uploadFile(absoluteFilePath);
|
|
1146
|
+
if (response._response.status >= 400) {
|
|
1147
|
+
failedOperations.push(
|
|
1148
|
+
new Error(
|
|
1149
|
+
`Upload failed for ${absoluteFilePath} with status code ${response._response.status}`
|
|
1150
|
+
)
|
|
1151
|
+
);
|
|
1152
|
+
}
|
|
1153
|
+
return response;
|
|
1154
|
+
},
|
|
1155
|
+
absoluteFilesToUpload,
|
|
1156
|
+
{ concurrencyLimit: BATCH_CONCURRENCY }
|
|
1157
|
+
);
|
|
973
1158
|
if (failedOperations.length > 0) {
|
|
974
|
-
throw new Error(
|
|
1159
|
+
throw new Error(
|
|
1160
|
+
failedOperations.map((r) => r.message).filter(Boolean).join(" ")
|
|
1161
|
+
);
|
|
975
1162
|
}
|
|
976
|
-
this.logger.info(
|
|
1163
|
+
this.logger.info(
|
|
1164
|
+
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`
|
|
1165
|
+
);
|
|
977
1166
|
} catch (e) {
|
|
978
1167
|
const errorMessage = `Unable to upload file(s) to Azure. ${e}`;
|
|
979
1168
|
this.logger.error(errorMessage);
|
|
980
1169
|
throw new Error(errorMessage);
|
|
981
1170
|
}
|
|
982
1171
|
try {
|
|
983
|
-
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
1172
|
+
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
1173
|
+
(absoluteFilePath) => getCloudPathForLocalPath(
|
|
1174
|
+
entity,
|
|
1175
|
+
path__default["default"].relative(directory, absoluteFilePath),
|
|
1176
|
+
useLegacyPathCasing
|
|
1177
|
+
)
|
|
1178
|
+
);
|
|
984
1179
|
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
985
|
-
await bulkStorageOperation(
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
1180
|
+
await bulkStorageOperation(
|
|
1181
|
+
async (relativeFilePath) => {
|
|
1182
|
+
return await container.deleteBlob(relativeFilePath);
|
|
1183
|
+
},
|
|
1184
|
+
staleFiles,
|
|
1185
|
+
{ concurrencyLimit: BATCH_CONCURRENCY }
|
|
1186
|
+
);
|
|
1187
|
+
this.logger.info(
|
|
1188
|
+
`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`
|
|
1189
|
+
);
|
|
989
1190
|
} catch (error) {
|
|
990
1191
|
const errorMessage = `Unable to delete file(s) from Azure. ${error}`;
|
|
991
1192
|
this.logger.error(errorMessage);
|
|
@@ -1013,11 +1214,18 @@ class AzureBlobStoragePublish {
|
|
|
1013
1214
|
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
1014
1215
|
const entityRootDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1015
1216
|
try {
|
|
1016
|
-
const techdocsMetadataJson = await this.download(
|
|
1217
|
+
const techdocsMetadataJson = await this.download(
|
|
1218
|
+
this.containerName,
|
|
1219
|
+
`${entityRootDir}/techdocs_metadata.json`
|
|
1220
|
+
);
|
|
1017
1221
|
if (!techdocsMetadataJson) {
|
|
1018
|
-
throw new Error(
|
|
1222
|
+
throw new Error(
|
|
1223
|
+
`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
|
|
1224
|
+
);
|
|
1019
1225
|
}
|
|
1020
|
-
const techdocsMetadata = JSON5__default["default"].parse(
|
|
1226
|
+
const techdocsMetadata = JSON5__default["default"].parse(
|
|
1227
|
+
techdocsMetadataJson.toString("utf-8")
|
|
1228
|
+
);
|
|
1021
1229
|
return techdocsMetadata;
|
|
1022
1230
|
} catch (e) {
|
|
1023
1231
|
throw new errors.ForwardedError("TechDocs metadata fetch failed", e);
|
|
@@ -1030,12 +1238,16 @@ class AzureBlobStoragePublish {
|
|
|
1030
1238
|
const fileExtension = path__default["default"].extname(filePath);
|
|
1031
1239
|
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
1032
1240
|
this.download(this.containerName, filePath).then((fileContent) => {
|
|
1033
|
-
for (const [headerKey, headerValue] of Object.entries(
|
|
1241
|
+
for (const [headerKey, headerValue] of Object.entries(
|
|
1242
|
+
responseHeaders
|
|
1243
|
+
)) {
|
|
1034
1244
|
res.setHeader(headerKey, headerValue);
|
|
1035
1245
|
}
|
|
1036
1246
|
res.send(fileContent);
|
|
1037
1247
|
}).catch((e) => {
|
|
1038
|
-
this.logger.warn(
|
|
1248
|
+
this.logger.warn(
|
|
1249
|
+
`TechDocs Azure router failed to serve content from container ${this.containerName} at path ${filePath}: ${e.message}`
|
|
1250
|
+
);
|
|
1039
1251
|
res.status(404).send("File Not Found");
|
|
1040
1252
|
});
|
|
1041
1253
|
};
|
|
@@ -1082,7 +1294,13 @@ class AzureBlobStoragePublish {
|
|
|
1082
1294
|
const limiter = createLimiter__default["default"](concurrency);
|
|
1083
1295
|
const container = this.storageClient.getContainerClient(this.containerName);
|
|
1084
1296
|
for await (const blob of container.listBlobsFlat()) {
|
|
1085
|
-
promises.push(
|
|
1297
|
+
promises.push(
|
|
1298
|
+
limiter(
|
|
1299
|
+
this.renameBlobToLowerCase.bind(this),
|
|
1300
|
+
blob.name,
|
|
1301
|
+
removeOriginal
|
|
1302
|
+
)
|
|
1303
|
+
);
|
|
1086
1304
|
}
|
|
1087
1305
|
await Promise.all(promises);
|
|
1088
1306
|
}
|
|
@@ -1136,7 +1354,9 @@ class MigrateWriteStream extends stream.Writable {
|
|
|
1136
1354
|
}
|
|
1137
1355
|
const migrate = this.removeOriginal ? file.move.bind(file) : file.copy.bind(file);
|
|
1138
1356
|
this.logger.verbose(`Migrating ${file.name}`);
|
|
1139
|
-
migrate(newFile).catch(
|
|
1357
|
+
migrate(newFile).catch(
|
|
1358
|
+
(e) => this.logger.warn(`Unable to migrate ${file.name}: ${e.message}`)
|
|
1359
|
+
).finally(() => {
|
|
1140
1360
|
this.inFlight--;
|
|
1141
1361
|
if (shouldCallNext) {
|
|
1142
1362
|
next();
|
|
@@ -1158,16 +1378,24 @@ class GoogleGCSPublish {
|
|
|
1158
1378
|
try {
|
|
1159
1379
|
bucketName = config.getString("techdocs.publisher.googleGcs.bucketName");
|
|
1160
1380
|
} catch (error) {
|
|
1161
|
-
throw new Error(
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1381
|
+
throw new Error(
|
|
1382
|
+
"Since techdocs.publisher.type is set to 'googleGcs' in your app config, techdocs.publisher.googleGcs.bucketName is required."
|
|
1383
|
+
);
|
|
1384
|
+
}
|
|
1385
|
+
const bucketRootPath = normalizeExternalStorageRootPath(
|
|
1386
|
+
config.getOptionalString("techdocs.publisher.googleGcs.bucketRootPath") || ""
|
|
1387
|
+
);
|
|
1388
|
+
const credentials = config.getOptionalString(
|
|
1389
|
+
"techdocs.publisher.googleGcs.credentials"
|
|
1390
|
+
);
|
|
1165
1391
|
let credentialsJson = {};
|
|
1166
1392
|
if (credentials) {
|
|
1167
1393
|
try {
|
|
1168
1394
|
credentialsJson = JSON.parse(credentials);
|
|
1169
1395
|
} catch (err) {
|
|
1170
|
-
throw new Error(
|
|
1396
|
+
throw new Error(
|
|
1397
|
+
"Error in parsing techdocs.publisher.googleGcs.credentials config to JSON."
|
|
1398
|
+
);
|
|
1171
1399
|
}
|
|
1172
1400
|
}
|
|
1173
1401
|
const storageClient = new storage.Storage({
|
|
@@ -1176,7 +1404,9 @@ class GoogleGCSPublish {
|
|
|
1176
1404
|
credentials: credentialsJson
|
|
1177
1405
|
}
|
|
1178
1406
|
});
|
|
1179
|
-
const legacyPathCasing = config.getOptionalBoolean(
|
|
1407
|
+
const legacyPathCasing = config.getOptionalBoolean(
|
|
1408
|
+
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
1409
|
+
) || false;
|
|
1180
1410
|
return new GoogleGCSPublish({
|
|
1181
1411
|
storageClient,
|
|
1182
1412
|
bucketName,
|
|
@@ -1188,13 +1418,17 @@ class GoogleGCSPublish {
|
|
|
1188
1418
|
async getReadiness() {
|
|
1189
1419
|
try {
|
|
1190
1420
|
await this.storageClient.bucket(this.bucketName).getMetadata();
|
|
1191
|
-
this.logger.info(
|
|
1421
|
+
this.logger.info(
|
|
1422
|
+
`Successfully connected to the GCS bucket ${this.bucketName}.`
|
|
1423
|
+
);
|
|
1192
1424
|
return {
|
|
1193
1425
|
isAvailable: true
|
|
1194
1426
|
};
|
|
1195
1427
|
} catch (err) {
|
|
1196
1428
|
errors.assertError(err);
|
|
1197
|
-
this.logger.error(
|
|
1429
|
+
this.logger.error(
|
|
1430
|
+
`Could not retrieve metadata about the GCS bucket ${this.bucketName}. Make sure the bucket exists. Also make sure that authentication is setup either by explicitly defining techdocs.publisher.googleGcs.credentials in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
1431
|
+
);
|
|
1198
1432
|
this.logger.error(`from GCS client library: ${err.message}`);
|
|
1199
1433
|
return { isAvailable: false };
|
|
1200
1434
|
}
|
|
@@ -1209,34 +1443,65 @@ class GoogleGCSPublish {
|
|
|
1209
1443
|
const bucketRootPath = this.bucketRootPath;
|
|
1210
1444
|
let existingFiles = [];
|
|
1211
1445
|
try {
|
|
1212
|
-
const remoteFolder = getCloudPathForLocalPath(
|
|
1446
|
+
const remoteFolder = getCloudPathForLocalPath(
|
|
1447
|
+
entity,
|
|
1448
|
+
void 0,
|
|
1449
|
+
useLegacyPathCasing,
|
|
1450
|
+
bucketRootPath
|
|
1451
|
+
);
|
|
1213
1452
|
existingFiles = await this.getFilesForFolder(remoteFolder);
|
|
1214
1453
|
} catch (e) {
|
|
1215
1454
|
errors.assertError(e);
|
|
1216
|
-
this.logger.error(
|
|
1455
|
+
this.logger.error(
|
|
1456
|
+
`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`
|
|
1457
|
+
);
|
|
1217
1458
|
}
|
|
1218
1459
|
let absoluteFilesToUpload;
|
|
1219
1460
|
try {
|
|
1220
1461
|
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
1221
|
-
await bulkStorageOperation(
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1462
|
+
await bulkStorageOperation(
|
|
1463
|
+
async (absoluteFilePath) => {
|
|
1464
|
+
const relativeFilePath = path__default["default"].relative(directory, absoluteFilePath);
|
|
1465
|
+
const destination = getCloudPathForLocalPath(
|
|
1466
|
+
entity,
|
|
1467
|
+
relativeFilePath,
|
|
1468
|
+
useLegacyPathCasing,
|
|
1469
|
+
bucketRootPath
|
|
1470
|
+
);
|
|
1471
|
+
objects.push(destination);
|
|
1472
|
+
return await bucket.upload(absoluteFilePath, { destination });
|
|
1473
|
+
},
|
|
1474
|
+
absoluteFilesToUpload,
|
|
1475
|
+
{ concurrencyLimit: 10 }
|
|
1476
|
+
);
|
|
1477
|
+
this.logger.info(
|
|
1478
|
+
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`
|
|
1479
|
+
);
|
|
1228
1480
|
} catch (e) {
|
|
1229
1481
|
const errorMessage = `Unable to upload file(s) to Google Cloud Storage. ${e}`;
|
|
1230
1482
|
this.logger.error(errorMessage);
|
|
1231
1483
|
throw new Error(errorMessage);
|
|
1232
1484
|
}
|
|
1233
1485
|
try {
|
|
1234
|
-
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
1486
|
+
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
1487
|
+
(absoluteFilePath) => getCloudPathForLocalPath(
|
|
1488
|
+
entity,
|
|
1489
|
+
path__default["default"].relative(directory, absoluteFilePath),
|
|
1490
|
+
useLegacyPathCasing,
|
|
1491
|
+
bucketRootPath
|
|
1492
|
+
)
|
|
1493
|
+
);
|
|
1235
1494
|
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
1236
|
-
await bulkStorageOperation(
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1495
|
+
await bulkStorageOperation(
|
|
1496
|
+
async (relativeFilePath) => {
|
|
1497
|
+
return await bucket.file(relativeFilePath).delete();
|
|
1498
|
+
},
|
|
1499
|
+
staleFiles,
|
|
1500
|
+
{ concurrencyLimit: 10 }
|
|
1501
|
+
);
|
|
1502
|
+
this.logger.info(
|
|
1503
|
+
`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`
|
|
1504
|
+
);
|
|
1240
1505
|
} catch (error) {
|
|
1241
1506
|
const errorMessage = `Unable to delete file(s) from Google Cloud Storage. ${error}`;
|
|
1242
1507
|
this.logger.error(errorMessage);
|
|
@@ -1271,7 +1536,9 @@ class GoogleGCSPublish {
|
|
|
1271
1536
|
this.storageClient.bucket(this.bucketName).file(filePath).createReadStream().on("pipe", () => {
|
|
1272
1537
|
res.writeHead(200, responseHeaders);
|
|
1273
1538
|
}).on("error", (err) => {
|
|
1274
|
-
this.logger.warn(
|
|
1539
|
+
this.logger.warn(
|
|
1540
|
+
`TechDocs Google GCS router failed to serve content from bucket ${this.bucketName} at path ${filePath}: ${err.message}`
|
|
1541
|
+
);
|
|
1275
1542
|
if (!res.headersSent) {
|
|
1276
1543
|
res.status(404).send("File Not Found");
|
|
1277
1544
|
} else {
|
|
@@ -1295,7 +1562,11 @@ class GoogleGCSPublish {
|
|
|
1295
1562
|
migrateDocsCase({ removeOriginal = false, concurrency = 25 }) {
|
|
1296
1563
|
return new Promise((resolve, reject) => {
|
|
1297
1564
|
const allFileMetadata = this.storageClient.bucket(this.bucketName).getFilesStream();
|
|
1298
|
-
const migrateFiles = new MigrateWriteStream(
|
|
1565
|
+
const migrateFiles = new MigrateWriteStream(
|
|
1566
|
+
this.logger,
|
|
1567
|
+
removeOriginal,
|
|
1568
|
+
concurrency
|
|
1569
|
+
);
|
|
1299
1570
|
migrateFiles.on("finish", resolve).on("error", reject);
|
|
1300
1571
|
allFileMetadata.pipe(migrateFiles).on("error", (error) => {
|
|
1301
1572
|
migrateFiles.destroy();
|
|
@@ -1328,11 +1599,18 @@ class LocalPublish {
|
|
|
1328
1599
|
this.staticDocsDir = options.staticDocsDir;
|
|
1329
1600
|
}
|
|
1330
1601
|
static fromConfig(config, logger, discovery) {
|
|
1331
|
-
const legacyPathCasing = config.getOptionalBoolean(
|
|
1332
|
-
|
|
1602
|
+
const legacyPathCasing = config.getOptionalBoolean(
|
|
1603
|
+
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
1604
|
+
) || false;
|
|
1605
|
+
let staticDocsDir = config.getOptionalString(
|
|
1606
|
+
"techdocs.publisher.local.publishDirectory"
|
|
1607
|
+
);
|
|
1333
1608
|
if (!staticDocsDir) {
|
|
1334
1609
|
try {
|
|
1335
|
-
staticDocsDir = backendCommon.resolvePackagePath(
|
|
1610
|
+
staticDocsDir = backendCommon.resolvePackagePath(
|
|
1611
|
+
"@backstage/plugin-techdocs-backend",
|
|
1612
|
+
"static/docs"
|
|
1613
|
+
);
|
|
1336
1614
|
} catch (err) {
|
|
1337
1615
|
staticDocsDir = os__default["default"].tmpdir();
|
|
1338
1616
|
}
|
|
@@ -1357,9 +1635,18 @@ class LocalPublish {
|
|
|
1357
1635
|
const entityNamespace = (_a = entity.metadata.namespace) != null ? _a : "default";
|
|
1358
1636
|
let publishDir;
|
|
1359
1637
|
try {
|
|
1360
|
-
publishDir = this.staticEntityPathJoin(
|
|
1638
|
+
publishDir = this.staticEntityPathJoin(
|
|
1639
|
+
entityNamespace,
|
|
1640
|
+
entity.kind,
|
|
1641
|
+
entity.metadata.name
|
|
1642
|
+
);
|
|
1361
1643
|
} catch (error) {
|
|
1362
|
-
throw new errors.ForwardedError(
|
|
1644
|
+
throw new errors.ForwardedError(
|
|
1645
|
+
`Unable to publish TechDocs site for entity: ${catalogModel.stringifyEntityRef(
|
|
1646
|
+
entity
|
|
1647
|
+
)}`,
|
|
1648
|
+
error
|
|
1649
|
+
);
|
|
1363
1650
|
}
|
|
1364
1651
|
if (!fs__default["default"].existsSync(publishDir)) {
|
|
1365
1652
|
this.logger.info(`Could not find ${publishDir}, creating the directory.`);
|
|
@@ -1369,29 +1656,48 @@ class LocalPublish {
|
|
|
1369
1656
|
await fs__default["default"].copy(directory, publishDir);
|
|
1370
1657
|
this.logger.info(`Published site stored at ${publishDir}`);
|
|
1371
1658
|
} catch (error) {
|
|
1372
|
-
this.logger.debug(
|
|
1659
|
+
this.logger.debug(
|
|
1660
|
+
`Failed to copy docs from ${directory} to ${publishDir}`
|
|
1661
|
+
);
|
|
1373
1662
|
throw error;
|
|
1374
1663
|
}
|
|
1375
1664
|
const techdocsApiUrl = await this.discovery.getBaseUrl("techdocs");
|
|
1376
|
-
const publishedFilePaths = (await getFileTreeRecursively(publishDir)).map(
|
|
1377
|
-
|
|
1378
|
-
|
|
1665
|
+
const publishedFilePaths = (await getFileTreeRecursively(publishDir)).map(
|
|
1666
|
+
(abs) => {
|
|
1667
|
+
return abs.split(`${this.staticDocsDir}/`)[1];
|
|
1668
|
+
}
|
|
1669
|
+
);
|
|
1379
1670
|
return {
|
|
1380
|
-
remoteUrl: `${techdocsApiUrl}/static/docs/${encodeURIComponent(
|
|
1671
|
+
remoteUrl: `${techdocsApiUrl}/static/docs/${encodeURIComponent(
|
|
1672
|
+
entity.metadata.name
|
|
1673
|
+
)}`,
|
|
1381
1674
|
objects: publishedFilePaths
|
|
1382
1675
|
};
|
|
1383
1676
|
}
|
|
1384
1677
|
async fetchTechDocsMetadata(entityName) {
|
|
1385
1678
|
let metadataPath;
|
|
1386
1679
|
try {
|
|
1387
|
-
metadataPath = this.staticEntityPathJoin(
|
|
1680
|
+
metadataPath = this.staticEntityPathJoin(
|
|
1681
|
+
entityName.namespace,
|
|
1682
|
+
entityName.kind,
|
|
1683
|
+
entityName.name,
|
|
1684
|
+
"techdocs_metadata.json"
|
|
1685
|
+
);
|
|
1388
1686
|
} catch (err) {
|
|
1389
|
-
throw new errors.ForwardedError(
|
|
1687
|
+
throw new errors.ForwardedError(
|
|
1688
|
+
`Unexpected entity when fetching metadata: ${catalogModel.stringifyEntityRef(
|
|
1689
|
+
entityName
|
|
1690
|
+
)}`,
|
|
1691
|
+
err
|
|
1692
|
+
);
|
|
1390
1693
|
}
|
|
1391
1694
|
try {
|
|
1392
1695
|
return await fs__default["default"].readJson(metadataPath);
|
|
1393
1696
|
} catch (err) {
|
|
1394
|
-
throw new errors.ForwardedError(
|
|
1697
|
+
throw new errors.ForwardedError(
|
|
1698
|
+
`Unable to read techdocs_metadata.json at ${metadataPath}. Error: ${err}`,
|
|
1699
|
+
err
|
|
1700
|
+
);
|
|
1395
1701
|
}
|
|
1396
1702
|
}
|
|
1397
1703
|
docsRouter() {
|
|
@@ -1416,27 +1722,38 @@ class LocalPublish {
|
|
|
1416
1722
|
}
|
|
1417
1723
|
return res.redirect(301, req.baseUrl + newPath);
|
|
1418
1724
|
});
|
|
1419
|
-
router.use(
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1725
|
+
router.use(
|
|
1726
|
+
express__default["default"].static(this.staticDocsDir, {
|
|
1727
|
+
setHeaders: (res, filePath) => {
|
|
1728
|
+
const fileExtension = path__default["default"].extname(filePath);
|
|
1729
|
+
const headers = getHeadersForFileExtension(fileExtension);
|
|
1730
|
+
for (const [header, value] of Object.entries(headers)) {
|
|
1731
|
+
res.setHeader(header, value);
|
|
1732
|
+
}
|
|
1425
1733
|
}
|
|
1426
|
-
}
|
|
1427
|
-
|
|
1734
|
+
})
|
|
1735
|
+
);
|
|
1428
1736
|
return router;
|
|
1429
1737
|
}
|
|
1430
1738
|
async hasDocsBeenGenerated(entity) {
|
|
1431
1739
|
var _a;
|
|
1432
1740
|
const namespace = (_a = entity.metadata.namespace) != null ? _a : "default";
|
|
1433
1741
|
try {
|
|
1434
|
-
const indexHtmlPath = this.staticEntityPathJoin(
|
|
1742
|
+
const indexHtmlPath = this.staticEntityPathJoin(
|
|
1743
|
+
namespace,
|
|
1744
|
+
entity.kind,
|
|
1745
|
+
entity.metadata.name,
|
|
1746
|
+
"index.html"
|
|
1747
|
+
);
|
|
1435
1748
|
await fs__default["default"].access(indexHtmlPath, fs__default["default"].constants.F_OK);
|
|
1436
1749
|
return true;
|
|
1437
1750
|
} catch (err) {
|
|
1438
1751
|
if (err.name === "NotAllowedError") {
|
|
1439
|
-
this.logger.error(
|
|
1752
|
+
this.logger.error(
|
|
1753
|
+
`Unexpected entity when checking if generated: ${catalogModel.stringifyEntityRef(
|
|
1754
|
+
entity
|
|
1755
|
+
)}`
|
|
1756
|
+
);
|
|
1440
1757
|
}
|
|
1441
1758
|
return false;
|
|
1442
1759
|
}
|
|
@@ -1447,29 +1764,41 @@ class LocalPublish {
|
|
|
1447
1764
|
}) {
|
|
1448
1765
|
const files = await getFileTreeRecursively(this.staticDocsDir);
|
|
1449
1766
|
const limit = createLimiter__default["default"](concurrency);
|
|
1450
|
-
await Promise.all(
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
|
|
1460
|
-
if (err) {
|
|
1461
|
-
this.logger.warn(`Unable to migrate ${relativeFile}: ${err.message}`);
|
|
1767
|
+
await Promise.all(
|
|
1768
|
+
files.map(
|
|
1769
|
+
(f) => limit(async (file) => {
|
|
1770
|
+
const relativeFile = file.replace(
|
|
1771
|
+
`${this.staticDocsDir}${path__default["default"].sep}`,
|
|
1772
|
+
""
|
|
1773
|
+
);
|
|
1774
|
+
const newFile = lowerCaseEntityTripletInStoragePath(relativeFile);
|
|
1775
|
+
if (relativeFile === newFile) {
|
|
1776
|
+
return;
|
|
1462
1777
|
}
|
|
1463
|
-
resolve
|
|
1464
|
-
|
|
1465
|
-
|
|
1466
|
-
|
|
1778
|
+
await new Promise((resolve) => {
|
|
1779
|
+
const migrate = removeOriginal ? fs__default["default"].move : fs__default["default"].copyFile;
|
|
1780
|
+
this.logger.verbose(`Migrating ${relativeFile}`);
|
|
1781
|
+
migrate(file, newFile, (err) => {
|
|
1782
|
+
if (err) {
|
|
1783
|
+
this.logger.warn(
|
|
1784
|
+
`Unable to migrate ${relativeFile}: ${err.message}`
|
|
1785
|
+
);
|
|
1786
|
+
}
|
|
1787
|
+
resolve();
|
|
1788
|
+
});
|
|
1789
|
+
});
|
|
1790
|
+
}, f)
|
|
1791
|
+
)
|
|
1792
|
+
);
|
|
1467
1793
|
}
|
|
1468
1794
|
staticEntityPathJoin(...allParts) {
|
|
1469
1795
|
let staticEntityPath = this.staticDocsDir;
|
|
1470
1796
|
allParts.map((part) => part.split(path__default["default"].sep)).flat().forEach((part, index) => {
|
|
1471
1797
|
if (index < 3) {
|
|
1472
|
-
staticEntityPath = backendCommon.resolveSafeChildPath(
|
|
1798
|
+
staticEntityPath = backendCommon.resolveSafeChildPath(
|
|
1799
|
+
staticEntityPath,
|
|
1800
|
+
this.legacyPathCasing ? part : part.toLowerCase()
|
|
1801
|
+
);
|
|
1473
1802
|
return;
|
|
1474
1803
|
}
|
|
1475
1804
|
staticEntityPath = backendCommon.resolveSafeChildPath(staticEntityPath, part);
|
|
@@ -1505,11 +1834,17 @@ class OpenStackSwiftPublish {
|
|
|
1505
1834
|
static fromConfig(config, logger) {
|
|
1506
1835
|
let containerName = "";
|
|
1507
1836
|
try {
|
|
1508
|
-
containerName = config.getString(
|
|
1837
|
+
containerName = config.getString(
|
|
1838
|
+
"techdocs.publisher.openStackSwift.containerName"
|
|
1839
|
+
);
|
|
1509
1840
|
} catch (error) {
|
|
1510
|
-
throw new Error(
|
|
1841
|
+
throw new Error(
|
|
1842
|
+
"Since techdocs.publisher.type is set to 'openStackSwift' in your app config, techdocs.publisher.openStackSwift.containerName is required."
|
|
1843
|
+
);
|
|
1511
1844
|
}
|
|
1512
|
-
const openStackSwiftConfig = config.getConfig(
|
|
1845
|
+
const openStackSwiftConfig = config.getConfig(
|
|
1846
|
+
"techdocs.publisher.openStackSwift"
|
|
1847
|
+
);
|
|
1513
1848
|
const storageClient = new openstackSwiftSdk.SwiftClient({
|
|
1514
1849
|
authEndpoint: openStackSwiftConfig.getString("authUrl"),
|
|
1515
1850
|
swiftEndpoint: openStackSwiftConfig.getString("swiftUrl"),
|
|
@@ -1520,14 +1855,20 @@ class OpenStackSwiftPublish {
|
|
|
1520
1855
|
}
|
|
1521
1856
|
async getReadiness() {
|
|
1522
1857
|
try {
|
|
1523
|
-
const container = await this.storageClient.getContainerMetadata(
|
|
1858
|
+
const container = await this.storageClient.getContainerMetadata(
|
|
1859
|
+
this.containerName
|
|
1860
|
+
);
|
|
1524
1861
|
if (!(container instanceof types.NotFound)) {
|
|
1525
|
-
this.logger.info(
|
|
1862
|
+
this.logger.info(
|
|
1863
|
+
`Successfully connected to the OpenStack Swift container ${this.containerName}.`
|
|
1864
|
+
);
|
|
1526
1865
|
return {
|
|
1527
1866
|
isAvailable: true
|
|
1528
1867
|
};
|
|
1529
1868
|
}
|
|
1530
|
-
this.logger.error(
|
|
1869
|
+
this.logger.error(
|
|
1870
|
+
`Could not retrieve metadata about the OpenStack Swift container ${this.containerName}. Make sure the container exists. Also make sure that authentication is setup either by explicitly defining credentials and region in techdocs.publisher.openStackSwift in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
1871
|
+
);
|
|
1531
1872
|
return {
|
|
1532
1873
|
isAvailable: false
|
|
1533
1874
|
};
|
|
@@ -1557,12 +1898,18 @@ class OpenStackSwiftPublish {
|
|
|
1557
1898
|
const uploadFile = limiter(async () => {
|
|
1558
1899
|
const fileBuffer = await fs__default["default"].readFile(filePath);
|
|
1559
1900
|
const stream = bufferToStream(fileBuffer);
|
|
1560
|
-
return this.storageClient.upload(
|
|
1901
|
+
return this.storageClient.upload(
|
|
1902
|
+
this.containerName,
|
|
1903
|
+
destination,
|
|
1904
|
+
stream
|
|
1905
|
+
);
|
|
1561
1906
|
});
|
|
1562
1907
|
uploadPromises.push(uploadFile);
|
|
1563
1908
|
}
|
|
1564
1909
|
await Promise.all(uploadPromises);
|
|
1565
|
-
this.logger.info(
|
|
1910
|
+
this.logger.info(
|
|
1911
|
+
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${allFilesToUpload.length}`
|
|
1912
|
+
);
|
|
1566
1913
|
return { objects };
|
|
1567
1914
|
} catch (e) {
|
|
1568
1915
|
const errorMessage = `Unable to upload file(s) to OpenStack Swift. ${e}`;
|
|
@@ -1573,15 +1920,22 @@ class OpenStackSwiftPublish {
|
|
|
1573
1920
|
async fetchTechDocsMetadata(entityName) {
|
|
1574
1921
|
return await new Promise(async (resolve, reject) => {
|
|
1575
1922
|
const entityRootDir = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
1576
|
-
const downloadResponse = await this.storageClient.download(
|
|
1923
|
+
const downloadResponse = await this.storageClient.download(
|
|
1924
|
+
this.containerName,
|
|
1925
|
+
`${entityRootDir}/techdocs_metadata.json`
|
|
1926
|
+
);
|
|
1577
1927
|
if (!(downloadResponse instanceof types.NotFound)) {
|
|
1578
1928
|
const stream = downloadResponse.data;
|
|
1579
1929
|
try {
|
|
1580
1930
|
const techdocsMetadataJson = await streamToBuffer(stream);
|
|
1581
1931
|
if (!techdocsMetadataJson) {
|
|
1582
|
-
throw new Error(
|
|
1932
|
+
throw new Error(
|
|
1933
|
+
`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
|
|
1934
|
+
);
|
|
1583
1935
|
}
|
|
1584
|
-
const techdocsMetadata = JSON5__default["default"].parse(
|
|
1936
|
+
const techdocsMetadata = JSON5__default["default"].parse(
|
|
1937
|
+
techdocsMetadataJson.toString("utf-8")
|
|
1938
|
+
);
|
|
1585
1939
|
resolve(techdocsMetadata);
|
|
1586
1940
|
} catch (err) {
|
|
1587
1941
|
errors.assertError(err);
|
|
@@ -1600,21 +1954,30 @@ class OpenStackSwiftPublish {
|
|
|
1600
1954
|
const filePath = decodeURI(req.path.replace(/^\//, ""));
|
|
1601
1955
|
const fileExtension = path__default["default"].extname(filePath);
|
|
1602
1956
|
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
1603
|
-
const downloadResponse = await this.storageClient.download(
|
|
1957
|
+
const downloadResponse = await this.storageClient.download(
|
|
1958
|
+
this.containerName,
|
|
1959
|
+
filePath
|
|
1960
|
+
);
|
|
1604
1961
|
if (!(downloadResponse instanceof types.NotFound)) {
|
|
1605
1962
|
const stream = downloadResponse.data;
|
|
1606
1963
|
try {
|
|
1607
|
-
for (const [headerKey, headerValue] of Object.entries(
|
|
1964
|
+
for (const [headerKey, headerValue] of Object.entries(
|
|
1965
|
+
responseHeaders
|
|
1966
|
+
)) {
|
|
1608
1967
|
res.setHeader(headerKey, headerValue);
|
|
1609
1968
|
}
|
|
1610
1969
|
res.send(await streamToBuffer(stream));
|
|
1611
1970
|
} catch (err) {
|
|
1612
1971
|
errors.assertError(err);
|
|
1613
|
-
this.logger.warn(
|
|
1972
|
+
this.logger.warn(
|
|
1973
|
+
`TechDocs OpenStack swift router failed to serve content from container ${this.containerName} at path ${filePath}: ${err.message}`
|
|
1974
|
+
);
|
|
1614
1975
|
res.status(404).send("File Not Found");
|
|
1615
1976
|
}
|
|
1616
1977
|
} else {
|
|
1617
|
-
this.logger.warn(
|
|
1978
|
+
this.logger.warn(
|
|
1979
|
+
`TechDocs OpenStack swift router failed to serve content from container ${this.containerName} at path ${filePath}: Not found`
|
|
1980
|
+
);
|
|
1618
1981
|
res.status(404).send("File Not Found");
|
|
1619
1982
|
}
|
|
1620
1983
|
};
|
|
@@ -1622,7 +1985,10 @@ class OpenStackSwiftPublish {
|
|
|
1622
1985
|
async hasDocsBeenGenerated(entity) {
|
|
1623
1986
|
const entityRootDir = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
1624
1987
|
try {
|
|
1625
|
-
const fileResponse = await this.storageClient.getMetadata(
|
|
1988
|
+
const fileResponse = await this.storageClient.getMetadata(
|
|
1989
|
+
this.containerName,
|
|
1990
|
+
`${entityRootDir}/index.html`
|
|
1991
|
+
);
|
|
1626
1992
|
if (!(fileResponse instanceof types.NotFound)) {
|
|
1627
1993
|
return true;
|
|
1628
1994
|
}
|
|
@@ -1639,34 +2005,47 @@ class OpenStackSwiftPublish {
|
|
|
1639
2005
|
}) {
|
|
1640
2006
|
const allObjects = await this.getAllObjectsFromContainer();
|
|
1641
2007
|
const limiter = createLimiter__default["default"](concurrency);
|
|
1642
|
-
await Promise.all(
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
2008
|
+
await Promise.all(
|
|
2009
|
+
allObjects.map(
|
|
2010
|
+
(f) => limiter(async (file) => {
|
|
2011
|
+
let newPath;
|
|
2012
|
+
try {
|
|
2013
|
+
newPath = lowerCaseEntityTripletInStoragePath(file);
|
|
2014
|
+
} catch (e) {
|
|
2015
|
+
errors.assertError(e);
|
|
2016
|
+
this.logger.warn(e.message);
|
|
2017
|
+
return;
|
|
2018
|
+
}
|
|
2019
|
+
if (file === newPath) {
|
|
2020
|
+
return;
|
|
2021
|
+
}
|
|
2022
|
+
try {
|
|
2023
|
+
this.logger.verbose(`Migrating ${file} to ${newPath}`);
|
|
2024
|
+
await this.storageClient.copy(
|
|
2025
|
+
this.containerName,
|
|
2026
|
+
file,
|
|
2027
|
+
this.containerName,
|
|
2028
|
+
newPath
|
|
2029
|
+
);
|
|
2030
|
+
if (removeOriginal) {
|
|
2031
|
+
await this.storageClient.delete(this.containerName, file);
|
|
2032
|
+
}
|
|
2033
|
+
} catch (e) {
|
|
2034
|
+
errors.assertError(e);
|
|
2035
|
+
this.logger.warn(`Unable to migrate ${file}: ${e.message}`);
|
|
2036
|
+
}
|
|
2037
|
+
}, f)
|
|
2038
|
+
)
|
|
2039
|
+
);
|
|
1665
2040
|
}
|
|
1666
2041
|
async getAllObjectsFromContainer({ prefix } = { prefix: "" }) {
|
|
1667
2042
|
let objects = [];
|
|
1668
2043
|
const OSS_MAX_LIMIT = Math.pow(2, 31) - 1;
|
|
1669
|
-
const allObjects = await this.storageClient.list(
|
|
2044
|
+
const allObjects = await this.storageClient.list(
|
|
2045
|
+
this.containerName,
|
|
2046
|
+
prefix,
|
|
2047
|
+
OSS_MAX_LIMIT
|
|
2048
|
+
);
|
|
1670
2049
|
objects = allObjects.map((object) => object.name);
|
|
1671
2050
|
return objects;
|
|
1672
2051
|
}
|
|
@@ -1675,7 +2054,9 @@ class OpenStackSwiftPublish {
|
|
|
1675
2054
|
class Publisher {
|
|
1676
2055
|
static async fromConfig(config, { logger, discovery }) {
|
|
1677
2056
|
var _a;
|
|
1678
|
-
const publisherType = (_a = config.getOptionalString(
|
|
2057
|
+
const publisherType = (_a = config.getOptionalString(
|
|
2058
|
+
"techdocs.publisher.type"
|
|
2059
|
+
)) != null ? _a : "local";
|
|
1679
2060
|
switch (publisherType) {
|
|
1680
2061
|
case "googleGcs":
|
|
1681
2062
|
logger.info("Creating Google Storage Bucket publisher for TechDocs");
|
|
@@ -1684,10 +2065,14 @@ class Publisher {
|
|
|
1684
2065
|
logger.info("Creating AWS S3 Bucket publisher for TechDocs");
|
|
1685
2066
|
return AwsS3Publish.fromConfig(config, logger);
|
|
1686
2067
|
case "azureBlobStorage":
|
|
1687
|
-
logger.info(
|
|
2068
|
+
logger.info(
|
|
2069
|
+
"Creating Azure Blob Storage Container publisher for TechDocs"
|
|
2070
|
+
);
|
|
1688
2071
|
return AzureBlobStoragePublish.fromConfig(config, logger);
|
|
1689
2072
|
case "openStackSwift":
|
|
1690
|
-
logger.info(
|
|
2073
|
+
logger.info(
|
|
2074
|
+
"Creating OpenStack Swift Container publisher for TechDocs"
|
|
2075
|
+
);
|
|
1691
2076
|
return OpenStackSwiftPublish.fromConfig(config, logger);
|
|
1692
2077
|
case "local":
|
|
1693
2078
|
logger.info("Creating Local publisher for TechDocs");
|