@backstage/plugin-techdocs-node 1.12.0-next.2 → 1.12.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs.js CHANGED
@@ -1,7 +1,5 @@
1
1
  'use strict';
2
2
 
3
- Object.defineProperty(exports, '__esModule', { value: true });
4
-
5
3
  var path = require('path');
6
4
  var integration = require('@backstage/integration');
7
5
  var backendCommon = require('@backstage/backend-common');
@@ -31,25 +29,25 @@ var openstackSwiftSdk = require('@trendyol-js/openstack-swift-sdk');
31
29
  var types = require('@trendyol-js/openstack-swift-sdk/lib/types');
32
30
  var backendPluginApi = require('@backstage/backend-plugin-api');
33
31
 
34
- function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
32
+ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
35
33
 
36
- var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
37
- var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
38
- var gitUrlParse__default = /*#__PURE__*/_interopDefaultLegacy(gitUrlParse);
39
- var yaml__default = /*#__PURE__*/_interopDefaultLegacy(yaml);
40
- var mime__default = /*#__PURE__*/_interopDefaultLegacy(mime);
41
- var createLimiter__default = /*#__PURE__*/_interopDefaultLegacy(createLimiter);
42
- var recursiveReadDir__default = /*#__PURE__*/_interopDefaultLegacy(recursiveReadDir);
43
- var JSON5__default = /*#__PURE__*/_interopDefaultLegacy(JSON5);
44
- var express__default = /*#__PURE__*/_interopDefaultLegacy(express);
45
- var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
34
+ var path__default = /*#__PURE__*/_interopDefaultCompat(path);
35
+ var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
36
+ var gitUrlParse__default = /*#__PURE__*/_interopDefaultCompat(gitUrlParse);
37
+ var yaml__default = /*#__PURE__*/_interopDefaultCompat(yaml);
38
+ var mime__default = /*#__PURE__*/_interopDefaultCompat(mime);
39
+ var createLimiter__default = /*#__PURE__*/_interopDefaultCompat(createLimiter);
40
+ var recursiveReadDir__default = /*#__PURE__*/_interopDefaultCompat(recursiveReadDir);
41
+ var JSON5__default = /*#__PURE__*/_interopDefaultCompat(JSON5);
42
+ var express__default = /*#__PURE__*/_interopDefaultCompat(express);
43
+ var os__default = /*#__PURE__*/_interopDefaultCompat(os);
46
44
 
47
45
  const getContentTypeForExtension = (ext) => {
48
46
  const defaultContentType = "text/plain; charset=utf-8";
49
47
  if (ext.match(/htm|xml|svg/i)) {
50
48
  return defaultContentType;
51
49
  }
52
- return mime__default["default"].contentType(ext) || defaultContentType;
50
+ return mime__default.default.contentType(ext) || defaultContentType;
53
51
  };
54
52
  const getHeadersForFileExtension = (fileExtension) => {
55
53
  return {
@@ -57,24 +55,24 @@ const getHeadersForFileExtension = (fileExtension) => {
57
55
  };
58
56
  };
59
57
  const getFileTreeRecursively = async (rootDirPath) => {
60
- const fileList = await recursiveReadDir__default["default"](rootDirPath).catch((error) => {
58
+ const fileList = await recursiveReadDir__default.default(rootDirPath).catch((error) => {
61
59
  throw new Error(`Failed to read template directory: ${error.message}`);
62
60
  });
63
61
  return fileList;
64
62
  };
65
63
  const lowerCaseEntityTriplet = (posixPath) => {
66
- const [namespace, kind, name, ...rest] = posixPath.split(path__default["default"].posix.sep);
64
+ const [namespace, kind, name, ...rest] = posixPath.split(path__default.default.posix.sep);
67
65
  const lowerNamespace = namespace.toLowerCase();
68
66
  const lowerKind = kind.toLowerCase();
69
67
  const lowerName = name.toLowerCase();
70
- return [lowerNamespace, lowerKind, lowerName, ...rest].join(path__default["default"].posix.sep);
68
+ return [lowerNamespace, lowerKind, lowerName, ...rest].join(path__default.default.posix.sep);
71
69
  };
72
70
  const lowerCaseEntityTripletInStoragePath = (originalPath) => {
73
71
  let posixPath = originalPath;
74
- if (originalPath.includes(path__default["default"].win32.sep)) {
75
- posixPath = originalPath.split(path__default["default"].win32.sep).join(path__default["default"].posix.sep);
72
+ if (originalPath.includes(path__default.default.win32.sep)) {
73
+ posixPath = originalPath.split(path__default.default.win32.sep).join(path__default.default.posix.sep);
76
74
  }
77
- const parts = posixPath.split(path__default["default"].posix.sep);
75
+ const parts = posixPath.split(path__default.default.posix.sep);
78
76
  if (parts[0] === "") {
79
77
  parts.shift();
80
78
  }
@@ -83,14 +81,14 @@ const lowerCaseEntityTripletInStoragePath = (originalPath) => {
83
81
  `Encountered file unmanaged by TechDocs ${originalPath}. Skipping.`
84
82
  );
85
83
  }
86
- return lowerCaseEntityTriplet(parts.join(path__default["default"].posix.sep));
84
+ return lowerCaseEntityTriplet(parts.join(path__default.default.posix.sep));
87
85
  };
88
86
  const normalizeExternalStorageRootPath = (posixPath) => {
89
87
  let normalizedPath = posixPath;
90
- if (posixPath.startsWith(path__default["default"].posix.sep)) {
88
+ if (posixPath.startsWith(path__default.default.posix.sep)) {
91
89
  normalizedPath = posixPath.slice(1);
92
90
  }
93
- if (normalizedPath.endsWith(path__default["default"].posix.sep)) {
91
+ if (normalizedPath.endsWith(path__default.default.posix.sep)) {
94
92
  normalizedPath = normalizedPath.slice(0, normalizedPath.length - 1);
95
93
  }
96
94
  return normalizedPath;
@@ -111,19 +109,19 @@ const getStaleFiles = (newFiles, oldFiles) => {
111
109
  };
112
110
  const getCloudPathForLocalPath = (entity, localPath = "", useLegacyPathCasing = false, externalStorageRootPath = "") => {
113
111
  var _a, _b;
114
- const relativeFilePathPosix = localPath.split(path__default["default"].sep).join(path__default["default"].posix.sep);
112
+ const relativeFilePathPosix = localPath.split(path__default.default.sep).join(path__default.default.posix.sep);
115
113
  const entityRootDir = `${(_b = (_a = entity.metadata) == null ? void 0 : _a.namespace) != null ? _b : catalogModel.DEFAULT_NAMESPACE}/${entity.kind}/${entity.metadata.name}`;
116
114
  const relativeFilePathTriplet = `${entityRootDir}/${relativeFilePathPosix}`;
117
115
  const destination = useLegacyPathCasing ? relativeFilePathTriplet : lowerCaseEntityTriplet(relativeFilePathTriplet);
118
116
  const destinationWithRoot = [
119
117
  // The extra filter prevents unintended double slashes and prefixes.
120
- ...externalStorageRootPath.split(path__default["default"].posix.sep).filter((s) => s !== ""),
118
+ ...externalStorageRootPath.split(path__default.default.posix.sep).filter((s) => s !== ""),
121
119
  destination
122
120
  ].join("/");
123
121
  return destinationWithRoot;
124
122
  };
125
123
  const bulkStorageOperation = async (operation, args, { concurrencyLimit } = { concurrencyLimit: 25 }) => {
126
- const limiter = createLimiter__default["default"](concurrencyLimit);
124
+ const limiter = createLimiter__default.default(concurrencyLimit);
127
125
  await Promise.all(args.map((arg) => limiter(operation, arg)));
128
126
  };
129
127
 
@@ -163,7 +161,7 @@ const getRepoUrlFromLocationAnnotation = (parsedLocationAnnotation, scmIntegrati
163
161
  if (locationType === "url") {
164
162
  const integration = scmIntegrations.byUrl(target);
165
163
  if (integration && ["github", "gitlab", "bitbucketServer"].includes(integration.type)) {
166
- const { filepathtype } = gitUrlParse__default["default"](target);
164
+ const { filepathtype } = gitUrlParse__default.default(target);
167
165
  if (filepathtype === "") {
168
166
  return { repo_url: target };
169
167
  }
@@ -212,16 +210,16 @@ const MKDOCS_SCHEMA = yaml.DEFAULT_SCHEMA.extend([
212
210
  const generateMkdocsYml = async (inputDir, siteOptions) => {
213
211
  var _a;
214
212
  try {
215
- const mkdocsYmlPath = path__default["default"].join(inputDir, "mkdocs.yml");
213
+ const mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yml");
216
214
  const defaultSiteName = (_a = siteOptions == null ? void 0 : siteOptions.name) != null ? _a : "Documentation Site";
217
215
  const defaultMkdocsContent = {
218
216
  site_name: defaultSiteName,
219
217
  docs_dir: "docs",
220
218
  plugins: ["techdocs-core"]
221
219
  };
222
- await fs__default["default"].writeFile(
220
+ await fs__default.default.writeFile(
223
221
  mkdocsYmlPath,
224
- yaml__default["default"].dump(defaultMkdocsContent, { schema: MKDOCS_SCHEMA })
222
+ yaml__default.default.dump(defaultMkdocsContent, { schema: MKDOCS_SCHEMA })
225
223
  );
226
224
  } catch (error) {
227
225
  throw new errors.ForwardedError("Could not generate mkdocs.yml file", error);
@@ -232,29 +230,29 @@ const getMkdocsYml = async (inputDir, options) => {
232
230
  let mkdocsYmlFileString;
233
231
  try {
234
232
  if (options == null ? void 0 : options.mkdocsConfigFileName) {
235
- mkdocsYmlPath = path__default["default"].join(inputDir, options.mkdocsConfigFileName);
236
- if (!await fs__default["default"].pathExists(mkdocsYmlPath)) {
233
+ mkdocsYmlPath = path__default.default.join(inputDir, options.mkdocsConfigFileName);
234
+ if (!await fs__default.default.pathExists(mkdocsYmlPath)) {
237
235
  throw new Error(`The specified file ${mkdocsYmlPath} does not exist`);
238
236
  }
239
- mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
237
+ mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
240
238
  return {
241
239
  path: mkdocsYmlPath,
242
240
  content: mkdocsYmlFileString,
243
241
  configIsTemporary: false
244
242
  };
245
243
  }
246
- mkdocsYmlPath = path__default["default"].join(inputDir, "mkdocs.yaml");
247
- if (await fs__default["default"].pathExists(mkdocsYmlPath)) {
248
- mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
244
+ mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yaml");
245
+ if (await fs__default.default.pathExists(mkdocsYmlPath)) {
246
+ mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
249
247
  return {
250
248
  path: mkdocsYmlPath,
251
249
  content: mkdocsYmlFileString,
252
250
  configIsTemporary: false
253
251
  };
254
252
  }
255
- mkdocsYmlPath = path__default["default"].join(inputDir, "mkdocs.yml");
256
- if (await fs__default["default"].pathExists(mkdocsYmlPath)) {
257
- mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
253
+ mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yml");
254
+ if (await fs__default.default.pathExists(mkdocsYmlPath)) {
255
+ mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
258
256
  return {
259
257
  path: mkdocsYmlPath,
260
258
  content: mkdocsYmlFileString,
@@ -262,7 +260,7 @@ const getMkdocsYml = async (inputDir, options) => {
262
260
  };
263
261
  }
264
262
  await generateMkdocsYml(inputDir, options);
265
- mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
263
+ mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
266
264
  } catch (error) {
267
265
  throw new errors.ForwardedError(
268
266
  "Could not read MkDocs YAML config file mkdocs.yml or mkdocs.yaml or default for validation",
@@ -276,7 +274,7 @@ const getMkdocsYml = async (inputDir, options) => {
276
274
  };
277
275
  };
278
276
  const validateMkdocsYaml = async (inputDir, mkdocsYmlFileString) => {
279
- const mkdocsYml = yaml__default["default"].load(mkdocsYmlFileString, {
277
+ const mkdocsYml = yaml__default.default.load(mkdocsYmlFileString, {
280
278
  schema: MKDOCS_SCHEMA
281
279
  });
282
280
  if (mkdocsYml === null || typeof mkdocsYml !== "object") {
@@ -296,25 +294,25 @@ const patchIndexPreBuild = async ({
296
294
  logger,
297
295
  docsDir = "docs"
298
296
  }) => {
299
- const docsPath = path__default["default"].join(inputDir, docsDir);
300
- const indexMdPath = path__default["default"].join(docsPath, "index.md");
301
- if (await fs__default["default"].pathExists(indexMdPath)) {
297
+ const docsPath = path__default.default.join(inputDir, docsDir);
298
+ const indexMdPath = path__default.default.join(docsPath, "index.md");
299
+ if (await fs__default.default.pathExists(indexMdPath)) {
302
300
  return;
303
301
  }
304
- logger.warn(`${path__default["default"].join(docsDir, "index.md")} not found.`);
302
+ logger.warn(`${path__default.default.join(docsDir, "index.md")} not found.`);
305
303
  const fallbacks = [
306
- path__default["default"].join(docsPath, "README.md"),
307
- path__default["default"].join(docsPath, "readme.md"),
308
- path__default["default"].join(inputDir, "README.md"),
309
- path__default["default"].join(inputDir, "readme.md")
304
+ path__default.default.join(docsPath, "README.md"),
305
+ path__default.default.join(docsPath, "readme.md"),
306
+ path__default.default.join(inputDir, "README.md"),
307
+ path__default.default.join(inputDir, "readme.md")
310
308
  ];
311
- await fs__default["default"].ensureDir(docsPath);
309
+ await fs__default.default.ensureDir(docsPath);
312
310
  for (const filePath of fallbacks) {
313
311
  try {
314
- await fs__default["default"].copyFile(filePath, indexMdPath);
312
+ await fs__default.default.copyFile(filePath, indexMdPath);
315
313
  return;
316
314
  } catch (error) {
317
- logger.warn(`${path__default["default"].relative(inputDir, filePath)} not found.`);
315
+ logger.warn(`${path__default.default.relative(inputDir, filePath)} not found.`);
318
316
  }
319
317
  }
320
318
  logger.warn(
@@ -325,15 +323,15 @@ const patchIndexPreBuild = async ({
325
323
  );
326
324
  };
327
325
  const createOrUpdateMetadata = async (techdocsMetadataPath, logger) => {
328
- const techdocsMetadataDir = techdocsMetadataPath.split(path__default["default"].sep).slice(0, -1).join(path__default["default"].sep);
326
+ const techdocsMetadataDir = techdocsMetadataPath.split(path__default.default.sep).slice(0, -1).join(path__default.default.sep);
329
327
  try {
330
- await fs__default["default"].access(techdocsMetadataPath, fs__default["default"].constants.F_OK);
328
+ await fs__default.default.access(techdocsMetadataPath, fs__default.default.constants.F_OK);
331
329
  } catch (err) {
332
- await fs__default["default"].writeJson(techdocsMetadataPath, JSON.parse("{}"));
330
+ await fs__default.default.writeJson(techdocsMetadataPath, JSON.parse("{}"));
333
331
  }
334
332
  let json;
335
333
  try {
336
- json = await fs__default["default"].readJson(techdocsMetadataPath);
334
+ json = await fs__default.default.readJson(techdocsMetadataPath);
337
335
  } catch (err) {
338
336
  errors.assertError(err);
339
337
  const message = `Invalid JSON at ${techdocsMetadataPath} with error ${err.message}`;
@@ -343,27 +341,27 @@ const createOrUpdateMetadata = async (techdocsMetadataPath, logger) => {
343
341
  json.build_timestamp = Date.now();
344
342
  try {
345
343
  json.files = (await getFileTreeRecursively(techdocsMetadataDir)).map(
346
- (file) => file.replace(`${techdocsMetadataDir}${path__default["default"].sep}`, "")
344
+ (file) => file.replace(`${techdocsMetadataDir}${path__default.default.sep}`, "")
347
345
  );
348
346
  } catch (err) {
349
347
  errors.assertError(err);
350
348
  json.files = [];
351
349
  logger.warn(`Unable to add files list to metadata: ${err.message}`);
352
350
  }
353
- await fs__default["default"].writeJson(techdocsMetadataPath, json);
351
+ await fs__default.default.writeJson(techdocsMetadataPath, json);
354
352
  return;
355
353
  };
356
354
  const storeEtagMetadata = async (techdocsMetadataPath, etag) => {
357
- const json = await fs__default["default"].readJson(techdocsMetadataPath);
355
+ const json = await fs__default.default.readJson(techdocsMetadataPath);
358
356
  json.etag = etag;
359
- await fs__default["default"].writeJson(techdocsMetadataPath, json);
357
+ await fs__default.default.writeJson(techdocsMetadataPath, json);
360
358
  };
361
359
 
362
360
  const patchMkdocsFile = async (mkdocsYmlPath, logger, updateAction) => {
363
361
  let didEdit = false;
364
362
  let mkdocsYmlFileString;
365
363
  try {
366
- mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
364
+ mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
367
365
  } catch (error) {
368
366
  errors.assertError(error);
369
367
  logger.warn(
@@ -373,7 +371,7 @@ const patchMkdocsFile = async (mkdocsYmlPath, logger, updateAction) => {
373
371
  }
374
372
  let mkdocsYml;
375
373
  try {
376
- mkdocsYml = yaml__default["default"].load(mkdocsYmlFileString, { schema: MKDOCS_SCHEMA });
374
+ mkdocsYml = yaml__default.default.load(mkdocsYmlFileString, { schema: MKDOCS_SCHEMA });
377
375
  if (typeof mkdocsYml === "string" || typeof mkdocsYml === "undefined") {
378
376
  throw new Error("Bad YAML format.");
379
377
  }
@@ -387,9 +385,9 @@ const patchMkdocsFile = async (mkdocsYmlPath, logger, updateAction) => {
387
385
  didEdit = updateAction(mkdocsYml);
388
386
  try {
389
387
  if (didEdit) {
390
- await fs__default["default"].writeFile(
388
+ await fs__default.default.writeFile(
391
389
  mkdocsYmlPath,
392
- yaml__default["default"].dump(mkdocsYml, { schema: MKDOCS_SCHEMA }),
390
+ yaml__default.default.dump(mkdocsYml, { schema: MKDOCS_SCHEMA }),
393
391
  "utf8"
394
392
  );
395
393
  }
@@ -562,12 +560,12 @@ const _TechdocsGenerator = class _TechdocsGenerator {
562
560
  );
563
561
  }
564
562
  await createOrUpdateMetadata(
565
- path__default["default"].join(outputDir, "techdocs_metadata.json"),
563
+ path__default.default.join(outputDir, "techdocs_metadata.json"),
566
564
  childLogger
567
565
  );
568
566
  if (etag) {
569
567
  await storeEtagMetadata(
570
- path__default["default"].join(outputDir, "techdocs_metadata.json"),
568
+ path__default.default.join(outputDir, "techdocs_metadata.json"),
571
569
  etag
572
570
  );
573
571
  }
@@ -680,7 +678,7 @@ const transformDirLocation = (entity, dirAnnotation, scmIntegrations) => {
680
678
  }
681
679
  case "file": {
682
680
  const target = backendCommon.resolveSafeChildPath(
683
- path__default["default"].dirname(location.target),
681
+ path__default.default.dirname(location.target),
684
682
  dirAnnotation.target
685
683
  );
686
684
  return {
@@ -1071,8 +1069,8 @@ class AwsS3Publish {
1071
1069
  absoluteFilesToUpload = await getFileTreeRecursively(directory);
1072
1070
  await bulkStorageOperation(
1073
1071
  async (absoluteFilePath) => {
1074
- const relativeFilePath = path__default["default"].relative(directory, absoluteFilePath);
1075
- const fileStream = fs__default["default"].createReadStream(absoluteFilePath);
1072
+ const relativeFilePath = path__default.default.relative(directory, absoluteFilePath);
1073
+ const fileStream = fs__default.default.createReadStream(absoluteFilePath);
1076
1074
  const params = {
1077
1075
  Bucket: this.bucketName,
1078
1076
  Key: getCloudPathForLocalPath(
@@ -1106,7 +1104,7 @@ class AwsS3Publish {
1106
1104
  const relativeFilesToUpload = absoluteFilesToUpload.map(
1107
1105
  (absoluteFilePath) => getCloudPathForLocalPath(
1108
1106
  entity,
1109
- path__default["default"].relative(directory, absoluteFilePath),
1107
+ path__default.default.relative(directory, absoluteFilePath),
1110
1108
  useLegacyPathCasing,
1111
1109
  bucketRootPath
1112
1110
  )
@@ -1138,7 +1136,7 @@ class AwsS3Publish {
1138
1136
  return await new Promise(async (resolve, reject) => {
1139
1137
  const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
1140
1138
  const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
1141
- const entityRootDir = path__default["default"].posix.join(this.bucketRootPath, entityDir);
1139
+ const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
1142
1140
  try {
1143
1141
  const resp = await this.storageClient.send(
1144
1142
  new clientS3.GetObjectCommand({
@@ -1154,7 +1152,7 @@ class AwsS3Publish {
1154
1152
  `Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
1155
1153
  );
1156
1154
  }
1157
- const techdocsMetadata = JSON5__default["default"].parse(
1155
+ const techdocsMetadata = JSON5__default.default.parse(
1158
1156
  techdocsMetadataJson.toString("utf-8")
1159
1157
  );
1160
1158
  resolve(techdocsMetadata);
@@ -1175,8 +1173,8 @@ class AwsS3Publish {
1175
1173
  return async (req, res) => {
1176
1174
  const decodedUri = decodeURI(req.path.replace(/^\//, ""));
1177
1175
  const filePathNoRoot = this.legacyPathCasing ? decodedUri : lowerCaseEntityTripletInStoragePath(decodedUri);
1178
- const filePath = path__default["default"].posix.join(this.bucketRootPath, filePathNoRoot);
1179
- const fileExtension = path__default["default"].extname(filePath);
1176
+ const filePath = path__default.default.posix.join(this.bucketRootPath, filePathNoRoot);
1177
+ const fileExtension = path__default.default.extname(filePath);
1180
1178
  const responseHeaders = getHeadersForFileExtension(fileExtension);
1181
1179
  try {
1182
1180
  const resp = await this.storageClient.send(
@@ -1205,7 +1203,7 @@ class AwsS3Publish {
1205
1203
  try {
1206
1204
  const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
1207
1205
  const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
1208
- const entityRootDir = path__default["default"].posix.join(this.bucketRootPath, entityDir);
1206
+ const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
1209
1207
  await this.storageClient.send(
1210
1208
  new clientS3.HeadObjectCommand({
1211
1209
  Bucket: this.bucketName,
@@ -1222,7 +1220,7 @@ class AwsS3Publish {
1222
1220
  concurrency = 25
1223
1221
  }) {
1224
1222
  const allObjects = await this.getAllObjectsFromBucket();
1225
- const limiter = createLimiter__default["default"](concurrency);
1223
+ const limiter = createLimiter__default.default(concurrency);
1226
1224
  await Promise.all(
1227
1225
  allObjects.map(
1228
1226
  (f) => limiter(async (file) => {
@@ -1415,8 +1413,8 @@ class AzureBlobStoragePublish {
1415
1413
  const failedOperations = [];
1416
1414
  await bulkStorageOperation(
1417
1415
  async (absoluteFilePath) => {
1418
- const relativeFilePath = path__default["default"].normalize(
1419
- path__default["default"].relative(directory, absoluteFilePath)
1416
+ const relativeFilePath = path__default.default.normalize(
1417
+ path__default.default.relative(directory, absoluteFilePath)
1420
1418
  );
1421
1419
  const remotePath = getCloudPathForLocalPath(
1422
1420
  entity,
@@ -1454,7 +1452,7 @@ class AzureBlobStoragePublish {
1454
1452
  const relativeFilesToUpload = absoluteFilesToUpload.map(
1455
1453
  (absoluteFilePath) => getCloudPathForLocalPath(
1456
1454
  entity,
1457
- path__default["default"].relative(directory, absoluteFilePath),
1455
+ path__default.default.relative(directory, absoluteFilePath),
1458
1456
  useLegacyPathCasing
1459
1457
  )
1460
1458
  );
@@ -1505,7 +1503,7 @@ class AzureBlobStoragePublish {
1505
1503
  `Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
1506
1504
  );
1507
1505
  }
1508
- const techdocsMetadata = JSON5__default["default"].parse(
1506
+ const techdocsMetadata = JSON5__default.default.parse(
1509
1507
  techdocsMetadataJson.toString("utf-8")
1510
1508
  );
1511
1509
  return techdocsMetadata;
@@ -1520,7 +1518,7 @@ class AzureBlobStoragePublish {
1520
1518
  return (req, res) => {
1521
1519
  const decodedUri = decodeURI(req.path.replace(/^\//, ""));
1522
1520
  const filePath = this.legacyPathCasing ? decodedUri : lowerCaseEntityTripletInStoragePath(decodedUri);
1523
- const fileExtension = path__default["default"].extname(filePath);
1521
+ const fileExtension = path__default.default.extname(filePath);
1524
1522
  const responseHeaders = getHeadersForFileExtension(fileExtension);
1525
1523
  this.download(this.containerName, filePath).then((fileContent) => {
1526
1524
  for (const [headerKey, headerValue] of Object.entries(
@@ -1580,7 +1578,7 @@ class AzureBlobStoragePublish {
1580
1578
  concurrency = 25
1581
1579
  }) {
1582
1580
  const promises = [];
1583
- const limiter = createLimiter__default["default"](concurrency);
1581
+ const limiter = createLimiter__default.default(concurrency);
1584
1582
  const container = this.storageClient.getContainerClient(this.containerName);
1585
1583
  for await (const blob of container.listBlobsFlat()) {
1586
1584
  promises.push(
@@ -1786,7 +1784,7 @@ class GoogleGCSPublish {
1786
1784
  absoluteFilesToUpload = await getFileTreeRecursively(directory);
1787
1785
  await bulkStorageOperation(
1788
1786
  async (absoluteFilePath) => {
1789
- const relativeFilePath = path__default["default"].relative(directory, absoluteFilePath);
1787
+ const relativeFilePath = path__default.default.relative(directory, absoluteFilePath);
1790
1788
  const destination = getCloudPathForLocalPath(
1791
1789
  entity,
1792
1790
  relativeFilePath,
@@ -1811,7 +1809,7 @@ class GoogleGCSPublish {
1811
1809
  const relativeFilesToUpload = absoluteFilesToUpload.map(
1812
1810
  (absoluteFilePath) => getCloudPathForLocalPath(
1813
1811
  entity,
1814
- path__default["default"].relative(directory, absoluteFilePath),
1812
+ path__default.default.relative(directory, absoluteFilePath),
1815
1813
  useLegacyPathCasing,
1816
1814
  bucketRootPath
1817
1815
  )
@@ -1837,7 +1835,7 @@ class GoogleGCSPublish {
1837
1835
  return new Promise((resolve, reject) => {
1838
1836
  const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
1839
1837
  const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
1840
- const entityRootDir = path__default["default"].posix.join(this.bucketRootPath, entityDir);
1838
+ const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
1841
1839
  const fileStreamChunks = [];
1842
1840
  this.storageClient.bucket(this.bucketName).file(`${entityRootDir}/techdocs_metadata.json`).createReadStream().on("error", (err) => {
1843
1841
  this.logger.error(err.message);
@@ -1846,7 +1844,7 @@ class GoogleGCSPublish {
1846
1844
  fileStreamChunks.push(chunk);
1847
1845
  }).on("end", () => {
1848
1846
  const techdocsMetadataJson = Buffer.concat(fileStreamChunks).toString("utf-8");
1849
- resolve(JSON5__default["default"].parse(techdocsMetadataJson));
1847
+ resolve(JSON5__default.default.parse(techdocsMetadataJson));
1850
1848
  });
1851
1849
  });
1852
1850
  }
@@ -1857,8 +1855,8 @@ class GoogleGCSPublish {
1857
1855
  return (req, res) => {
1858
1856
  const decodedUri = decodeURI(req.path.replace(/^\//, ""));
1859
1857
  const filePathNoRoot = this.legacyPathCasing ? decodedUri : lowerCaseEntityTripletInStoragePath(decodedUri);
1860
- const filePath = path__default["default"].posix.join(this.bucketRootPath, filePathNoRoot);
1861
- const fileExtension = path__default["default"].extname(filePath);
1858
+ const filePath = path__default.default.posix.join(this.bucketRootPath, filePathNoRoot);
1859
+ const fileExtension = path__default.default.extname(filePath);
1862
1860
  const responseHeaders = getHeadersForFileExtension(fileExtension);
1863
1861
  this.storageClient.bucket(this.bucketName).file(filePath).createReadStream().on("pipe", () => {
1864
1862
  res.writeHead(200, responseHeaders);
@@ -1882,7 +1880,7 @@ class GoogleGCSPublish {
1882
1880
  return new Promise((resolve) => {
1883
1881
  const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
1884
1882
  const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
1885
- const entityRootDir = path__default["default"].posix.join(this.bucketRootPath, entityDir);
1883
+ const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
1886
1884
  this.storageClient.bucket(this.bucketName).file(`${entityRootDir}/index.html`).exists().then((response) => {
1887
1885
  resolve(response[0]);
1888
1886
  }).catch(() => {
@@ -1953,7 +1951,7 @@ class LocalPublish {
1953
1951
  "static/docs"
1954
1952
  );
1955
1953
  } catch (err) {
1956
- staticDocsDir = os__default["default"].tmpdir();
1954
+ staticDocsDir = os__default.default.tmpdir();
1957
1955
  }
1958
1956
  }
1959
1957
  return new LocalPublish({
@@ -1989,12 +1987,12 @@ class LocalPublish {
1989
1987
  error
1990
1988
  );
1991
1989
  }
1992
- if (!fs__default["default"].existsSync(publishDir)) {
1990
+ if (!fs__default.default.existsSync(publishDir)) {
1993
1991
  this.logger.info(`Could not find ${publishDir}, creating the directory.`);
1994
- fs__default["default"].mkdirSync(publishDir, { recursive: true });
1992
+ fs__default.default.mkdirSync(publishDir, { recursive: true });
1995
1993
  }
1996
1994
  try {
1997
- await fs__default["default"].copy(directory, publishDir);
1995
+ await fs__default.default.copy(directory, publishDir);
1998
1996
  this.logger.info(`Published site stored at ${publishDir}`);
1999
1997
  } catch (error) {
2000
1998
  this.logger.debug(
@@ -2033,7 +2031,7 @@ class LocalPublish {
2033
2031
  );
2034
2032
  }
2035
2033
  try {
2036
- return await fs__default["default"].readJson(metadataPath);
2034
+ return await fs__default.default.readJson(metadataPath);
2037
2035
  } catch (err) {
2038
2036
  throw new errors.ForwardedError(
2039
2037
  `Unable to read techdocs_metadata.json at ${metadataPath}. Error: ${err}`,
@@ -2042,7 +2040,7 @@ class LocalPublish {
2042
2040
  }
2043
2041
  }
2044
2042
  docsRouter() {
2045
- const router = express__default["default"].Router();
2043
+ const router = express__default.default.Router();
2046
2044
  router.use((req, res, next) => {
2047
2045
  if (this.legacyPathCasing) {
2048
2046
  return next();
@@ -2064,10 +2062,10 @@ class LocalPublish {
2064
2062
  return res.redirect(301, req.baseUrl + newPath);
2065
2063
  });
2066
2064
  router.use(
2067
- express__default["default"].static(this.staticDocsDir, {
2065
+ express__default.default.static(this.staticDocsDir, {
2068
2066
  // Handle content-type header the same as all other publishers.
2069
2067
  setHeaders: (res, filePath) => {
2070
- const fileExtension = path__default["default"].extname(filePath);
2068
+ const fileExtension = path__default.default.extname(filePath);
2071
2069
  const headers = getHeadersForFileExtension(fileExtension);
2072
2070
  for (const [header, value] of Object.entries(headers)) {
2073
2071
  res.setHeader(header, value);
@@ -2087,7 +2085,7 @@ class LocalPublish {
2087
2085
  entity.metadata.name,
2088
2086
  "index.html"
2089
2087
  );
2090
- await fs__default["default"].access(indexHtmlPath, fs__default["default"].constants.F_OK);
2088
+ await fs__default.default.access(indexHtmlPath, fs__default.default.constants.F_OK);
2091
2089
  return true;
2092
2090
  } catch (err) {
2093
2091
  if (err.name === "NotAllowedError") {
@@ -2109,12 +2107,12 @@ class LocalPublish {
2109
2107
  concurrency = 25
2110
2108
  }) {
2111
2109
  const files = await getFileTreeRecursively(this.staticDocsDir);
2112
- const limit = createLimiter__default["default"](concurrency);
2110
+ const limit = createLimiter__default.default(concurrency);
2113
2111
  await Promise.all(
2114
2112
  files.map(
2115
2113
  (f) => limit(async (file) => {
2116
2114
  const relativeFile = file.replace(
2117
- `${this.staticDocsDir}${path__default["default"].sep}`,
2115
+ `${this.staticDocsDir}${path__default.default.sep}`,
2118
2116
  ""
2119
2117
  );
2120
2118
  const newFile = lowerCaseEntityTripletInStoragePath(relativeFile);
@@ -2122,7 +2120,7 @@ class LocalPublish {
2122
2120
  return;
2123
2121
  }
2124
2122
  await new Promise((resolve) => {
2125
- const migrate = removeOriginal ? fs__default["default"].move : fs__default["default"].copyFile;
2123
+ const migrate = removeOriginal ? fs__default.default.move : fs__default.default.copyFile;
2126
2124
  this.logger.verbose(`Migrating ${relativeFile}`);
2127
2125
  migrate(file, newFile, (err) => {
2128
2126
  if (err) {
@@ -2142,7 +2140,7 @@ class LocalPublish {
2142
2140
  */
2143
2141
  staticEntityPathJoin(...allParts) {
2144
2142
  let staticEntityPath = this.staticDocsDir;
2145
- allParts.map((part) => part.split(path__default["default"].sep)).flat().forEach((part, index) => {
2143
+ allParts.map((part) => part.split(path__default.default.sep)).flat().forEach((part, index) => {
2146
2144
  if (index < 3) {
2147
2145
  staticEntityPath = backendCommon.resolveSafeChildPath(
2148
2146
  staticEntityPath,
@@ -2253,16 +2251,16 @@ class OpenStackSwiftPublish {
2253
2251
  try {
2254
2252
  const objects = [];
2255
2253
  const allFilesToUpload = await getFileTreeRecursively(directory);
2256
- const limiter = createLimiter__default["default"](10);
2254
+ const limiter = createLimiter__default.default(10);
2257
2255
  const uploadPromises = [];
2258
2256
  for (const filePath of allFilesToUpload) {
2259
- const relativeFilePath = path__default["default"].relative(directory, filePath);
2260
- const relativeFilePathPosix = relativeFilePath.split(path__default["default"].sep).join(path__default["default"].posix.sep);
2257
+ const relativeFilePath = path__default.default.relative(directory, filePath);
2258
+ const relativeFilePathPosix = relativeFilePath.split(path__default.default.sep).join(path__default.default.posix.sep);
2261
2259
  const entityRootDir = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
2262
2260
  const destination = `${entityRootDir}/${relativeFilePathPosix}`;
2263
2261
  objects.push(destination);
2264
2262
  const uploadFile = limiter(async () => {
2265
- const fileBuffer = await fs__default["default"].readFile(filePath);
2263
+ const fileBuffer = await fs__default.default.readFile(filePath);
2266
2264
  const stream = bufferToStream(fileBuffer);
2267
2265
  return this.storageClient.upload(
2268
2266
  this.containerName,
@@ -2299,7 +2297,7 @@ class OpenStackSwiftPublish {
2299
2297
  `Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
2300
2298
  );
2301
2299
  }
2302
- const techdocsMetadata = JSON5__default["default"].parse(
2300
+ const techdocsMetadata = JSON5__default.default.parse(
2303
2301
  techdocsMetadataJson.toString("utf-8")
2304
2302
  );
2305
2303
  resolve(techdocsMetadata);
@@ -2321,7 +2319,7 @@ class OpenStackSwiftPublish {
2321
2319
  docsRouter() {
2322
2320
  return async (req, res) => {
2323
2321
  const filePath = decodeURI(req.path.replace(/^\//, ""));
2324
- const fileExtension = path__default["default"].extname(filePath);
2322
+ const fileExtension = path__default.default.extname(filePath);
2325
2323
  const responseHeaders = getHeadersForFileExtension(fileExtension);
2326
2324
  const downloadResponse = await this.storageClient.download(
2327
2325
  this.containerName,
@@ -2377,7 +2375,7 @@ class OpenStackSwiftPublish {
2377
2375
  concurrency = 25
2378
2376
  }) {
2379
2377
  const allObjects = await this.getAllObjectsFromContainer();
2380
- const limiter = createLimiter__default["default"](concurrency);
2378
+ const limiter = createLimiter__default.default(concurrency);
2381
2379
  await Promise.all(
2382
2380
  allObjects.map(
2383
2381
  (f) => limiter(async (file) => {