@backstage/plugin-techdocs-node 0.0.0-nightly-20220305022735
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +1076 -0
- package/README.md +49 -0
- package/dist/index.cjs.js +1674 -0
- package/dist/index.cjs.js.map +1 -0
- package/package.json +81 -0
|
@@ -0,0 +1,1674 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var path = require('path');
|
|
6
|
+
var integration = require('@backstage/integration');
|
|
7
|
+
var backendCommon = require('@backstage/backend-common');
|
|
8
|
+
var errors = require('@backstage/errors');
|
|
9
|
+
var child_process = require('child_process');
|
|
10
|
+
var fs = require('fs-extra');
|
|
11
|
+
var gitUrlParse = require('git-url-parse');
|
|
12
|
+
var yaml = require('js-yaml');
|
|
13
|
+
var stream = require('stream');
|
|
14
|
+
var catalogModel = require('@backstage/catalog-model');
|
|
15
|
+
var mime = require('mime-types');
|
|
16
|
+
var createLimiter = require('p-limit');
|
|
17
|
+
var recursiveReadDir = require('recursive-readdir');
|
|
18
|
+
var aws = require('aws-sdk');
|
|
19
|
+
var JSON5 = require('json5');
|
|
20
|
+
var identity = require('@azure/identity');
|
|
21
|
+
var storageBlob = require('@azure/storage-blob');
|
|
22
|
+
var storage = require('@google-cloud/storage');
|
|
23
|
+
var express = require('express');
|
|
24
|
+
var os = require('os');
|
|
25
|
+
var openstackSwiftSdk = require('@trendyol-js/openstack-swift-sdk');
|
|
26
|
+
var types = require('@trendyol-js/openstack-swift-sdk/lib/types');
|
|
27
|
+
|
|
28
|
+
function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; }
|
|
29
|
+
|
|
30
|
+
var path__default = /*#__PURE__*/_interopDefaultLegacy(path);
|
|
31
|
+
var fs__default = /*#__PURE__*/_interopDefaultLegacy(fs);
|
|
32
|
+
var gitUrlParse__default = /*#__PURE__*/_interopDefaultLegacy(gitUrlParse);
|
|
33
|
+
var yaml__default = /*#__PURE__*/_interopDefaultLegacy(yaml);
|
|
34
|
+
var mime__default = /*#__PURE__*/_interopDefaultLegacy(mime);
|
|
35
|
+
var createLimiter__default = /*#__PURE__*/_interopDefaultLegacy(createLimiter);
|
|
36
|
+
var recursiveReadDir__default = /*#__PURE__*/_interopDefaultLegacy(recursiveReadDir);
|
|
37
|
+
var aws__default = /*#__PURE__*/_interopDefaultLegacy(aws);
|
|
38
|
+
var JSON5__default = /*#__PURE__*/_interopDefaultLegacy(JSON5);
|
|
39
|
+
var express__default = /*#__PURE__*/_interopDefaultLegacy(express);
|
|
40
|
+
var os__default = /*#__PURE__*/_interopDefaultLegacy(os);
|
|
41
|
+
|
|
42
|
+
const getContentTypeForExtension = (ext) => {
|
|
43
|
+
const defaultContentType = "text/plain; charset=utf-8";
|
|
44
|
+
if (ext.match(/htm|xml|svg/i)) {
|
|
45
|
+
return defaultContentType;
|
|
46
|
+
}
|
|
47
|
+
return mime__default["default"].contentType(ext) || defaultContentType;
|
|
48
|
+
};
|
|
49
|
+
const getHeadersForFileExtension = (fileExtension) => {
|
|
50
|
+
return {
|
|
51
|
+
"Content-Type": getContentTypeForExtension(fileExtension)
|
|
52
|
+
};
|
|
53
|
+
};
|
|
54
|
+
const getFileTreeRecursively = async (rootDirPath) => {
|
|
55
|
+
const fileList = await recursiveReadDir__default["default"](rootDirPath).catch((error) => {
|
|
56
|
+
throw new Error(`Failed to read template directory: ${error.message}`);
|
|
57
|
+
});
|
|
58
|
+
return fileList;
|
|
59
|
+
};
|
|
60
|
+
const lowerCaseEntityTriplet = (posixPath) => {
|
|
61
|
+
const [namespace, kind, name, ...rest] = posixPath.split(path__default["default"].posix.sep);
|
|
62
|
+
const lowerNamespace = namespace.toLowerCase();
|
|
63
|
+
const lowerKind = kind.toLowerCase();
|
|
64
|
+
const lowerName = name.toLowerCase();
|
|
65
|
+
return [lowerNamespace, lowerKind, lowerName, ...rest].join(path__default["default"].posix.sep);
|
|
66
|
+
};
|
|
67
|
+
const lowerCaseEntityTripletInStoragePath = (originalPath) => {
|
|
68
|
+
let posixPath = originalPath;
|
|
69
|
+
if (originalPath.includes(path__default["default"].win32.sep)) {
|
|
70
|
+
posixPath = originalPath.split(path__default["default"].win32.sep).join(path__default["default"].posix.sep);
|
|
71
|
+
}
|
|
72
|
+
const parts = posixPath.split(path__default["default"].posix.sep);
|
|
73
|
+
if (parts[0] === "") {
|
|
74
|
+
parts.shift();
|
|
75
|
+
}
|
|
76
|
+
if (parts.length <= 3) {
|
|
77
|
+
throw new Error(`Encountered file unmanaged by TechDocs ${originalPath}. Skipping.`);
|
|
78
|
+
}
|
|
79
|
+
return lowerCaseEntityTriplet(parts.join(path__default["default"].posix.sep));
|
|
80
|
+
};
|
|
81
|
+
const normalizeExternalStorageRootPath = (posixPath) => {
|
|
82
|
+
let normalizedPath = posixPath;
|
|
83
|
+
if (posixPath.startsWith(path__default["default"].posix.sep)) {
|
|
84
|
+
normalizedPath = posixPath.slice(1);
|
|
85
|
+
}
|
|
86
|
+
if (normalizedPath.endsWith(path__default["default"].posix.sep)) {
|
|
87
|
+
normalizedPath = normalizedPath.slice(0, normalizedPath.length - 1);
|
|
88
|
+
}
|
|
89
|
+
return normalizedPath;
|
|
90
|
+
};
|
|
91
|
+
const getStaleFiles = (newFiles, oldFiles) => {
|
|
92
|
+
const staleFiles = new Set(oldFiles);
|
|
93
|
+
newFiles.forEach((newFile) => {
|
|
94
|
+
staleFiles.delete(newFile);
|
|
95
|
+
});
|
|
96
|
+
return Array.from(staleFiles);
|
|
97
|
+
};
|
|
98
|
+
const getCloudPathForLocalPath = (entity, localPath = "", useLegacyPathCasing = false, externalStorageRootPath = "") => {
|
|
99
|
+
var _a, _b;
|
|
100
|
+
const relativeFilePathPosix = localPath.split(path__default["default"].sep).join(path__default["default"].posix.sep);
|
|
101
|
+
const entityRootDir = `${(_b = (_a = entity.metadata) == null ? void 0 : _a.namespace) != null ? _b : catalogModel.DEFAULT_NAMESPACE}/${entity.kind}/${entity.metadata.name}`;
|
|
102
|
+
const relativeFilePathTriplet = `${entityRootDir}/${relativeFilePathPosix}`;
|
|
103
|
+
const destination = useLegacyPathCasing ? relativeFilePathTriplet : lowerCaseEntityTriplet(relativeFilePathTriplet);
|
|
104
|
+
const destinationWithRoot = [
|
|
105
|
+
...externalStorageRootPath.split(path__default["default"].posix.sep).filter((s) => s !== ""),
|
|
106
|
+
destination
|
|
107
|
+
].join("/");
|
|
108
|
+
return destinationWithRoot;
|
|
109
|
+
};
|
|
110
|
+
const bulkStorageOperation = async (operation, args, { concurrencyLimit } = { concurrencyLimit: 25 }) => {
|
|
111
|
+
const limiter = createLimiter__default["default"](concurrencyLimit);
|
|
112
|
+
await Promise.all(args.map((arg) => limiter(operation, arg)));
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
function getGeneratorKey(entity) {
|
|
116
|
+
if (!entity) {
|
|
117
|
+
throw new Error("No entity provided");
|
|
118
|
+
}
|
|
119
|
+
return "techdocs";
|
|
120
|
+
}
|
|
121
|
+
const runCommand = async ({
|
|
122
|
+
command,
|
|
123
|
+
args,
|
|
124
|
+
options,
|
|
125
|
+
logStream = new stream.PassThrough()
|
|
126
|
+
}) => {
|
|
127
|
+
await new Promise((resolve, reject) => {
|
|
128
|
+
const process = child_process.spawn(command, args, options);
|
|
129
|
+
process.stdout.on("data", (stream) => {
|
|
130
|
+
logStream.write(stream);
|
|
131
|
+
});
|
|
132
|
+
process.stderr.on("data", (stream) => {
|
|
133
|
+
logStream.write(stream);
|
|
134
|
+
});
|
|
135
|
+
process.on("error", (error) => {
|
|
136
|
+
return reject(error);
|
|
137
|
+
});
|
|
138
|
+
process.on("close", (code) => {
|
|
139
|
+
if (code !== 0) {
|
|
140
|
+
return reject(`Command ${command} failed, exit code: ${code}`);
|
|
141
|
+
}
|
|
142
|
+
return resolve();
|
|
143
|
+
});
|
|
144
|
+
});
|
|
145
|
+
};
|
|
146
|
+
const getRepoUrlFromLocationAnnotation = (parsedLocationAnnotation, scmIntegrations, docsFolder = "docs") => {
|
|
147
|
+
const { type: locationType, target } = parsedLocationAnnotation;
|
|
148
|
+
if (locationType === "url") {
|
|
149
|
+
const integration = scmIntegrations.byUrl(target);
|
|
150
|
+
if (integration && ["github", "gitlab"].includes(integration.type)) {
|
|
151
|
+
const { filepathtype } = gitUrlParse__default["default"](target);
|
|
152
|
+
if (filepathtype === "") {
|
|
153
|
+
return { repo_url: target };
|
|
154
|
+
}
|
|
155
|
+
const sourceFolder = integration.resolveUrl({
|
|
156
|
+
url: `./${docsFolder}`,
|
|
157
|
+
base: target
|
|
158
|
+
});
|
|
159
|
+
return { edit_uri: integration.resolveEditUrl(sourceFolder) };
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
return {};
|
|
163
|
+
};
|
|
164
|
+
class UnknownTag {
|
|
165
|
+
constructor(data, type) {
|
|
166
|
+
this.data = data;
|
|
167
|
+
this.type = type;
|
|
168
|
+
}
|
|
169
|
+
}
|
|
170
|
+
const MKDOCS_SCHEMA = yaml.DEFAULT_SCHEMA.extend([
|
|
171
|
+
new yaml.Type("", {
|
|
172
|
+
kind: "scalar",
|
|
173
|
+
multi: true,
|
|
174
|
+
representName: (o) => o.type,
|
|
175
|
+
represent: (o) => {
|
|
176
|
+
var _a;
|
|
177
|
+
return (_a = o.data) != null ? _a : "";
|
|
178
|
+
},
|
|
179
|
+
instanceOf: UnknownTag,
|
|
180
|
+
construct: (data, type) => new UnknownTag(data, type)
|
|
181
|
+
})
|
|
182
|
+
]);
|
|
183
|
+
const getMkdocsYml = async (inputDir) => {
|
|
184
|
+
let mkdocsYmlPath;
|
|
185
|
+
let mkdocsYmlFileString;
|
|
186
|
+
try {
|
|
187
|
+
mkdocsYmlPath = path__default["default"].join(inputDir, "mkdocs.yaml");
|
|
188
|
+
mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
|
|
189
|
+
} catch {
|
|
190
|
+
try {
|
|
191
|
+
mkdocsYmlPath = path__default["default"].join(inputDir, "mkdocs.yml");
|
|
192
|
+
mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
|
|
193
|
+
} catch (error) {
|
|
194
|
+
throw new errors.ForwardedError("Could not read MkDocs YAML config file mkdocs.yml or mkdocs.yaml for validation", error);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
return {
|
|
198
|
+
path: mkdocsYmlPath,
|
|
199
|
+
content: mkdocsYmlFileString
|
|
200
|
+
};
|
|
201
|
+
};
|
|
202
|
+
const validateMkdocsYaml = async (inputDir, mkdocsYmlFileString) => {
|
|
203
|
+
const mkdocsYml = yaml__default["default"].load(mkdocsYmlFileString, {
|
|
204
|
+
schema: MKDOCS_SCHEMA
|
|
205
|
+
});
|
|
206
|
+
if (mkdocsYml === null || typeof mkdocsYml !== "object") {
|
|
207
|
+
return void 0;
|
|
208
|
+
}
|
|
209
|
+
const parsedMkdocsYml = mkdocsYml;
|
|
210
|
+
if (parsedMkdocsYml.docs_dir && !backendCommon.isChildPath(inputDir, path.resolve(inputDir, parsedMkdocsYml.docs_dir))) {
|
|
211
|
+
throw new Error(`docs_dir configuration value in mkdocs can't be an absolute directory or start with ../ for security reasons.
|
|
212
|
+
Use relative paths instead which are resolved relative to your mkdocs.yml file location.`);
|
|
213
|
+
}
|
|
214
|
+
return parsedMkdocsYml.docs_dir;
|
|
215
|
+
};
|
|
216
|
+
const patchIndexPreBuild = async ({
|
|
217
|
+
inputDir,
|
|
218
|
+
logger,
|
|
219
|
+
docsDir = "docs"
|
|
220
|
+
}) => {
|
|
221
|
+
const docsPath = path__default["default"].join(inputDir, docsDir);
|
|
222
|
+
const indexMdPath = path__default["default"].join(docsPath, "index.md");
|
|
223
|
+
if (await fs__default["default"].pathExists(indexMdPath)) {
|
|
224
|
+
return;
|
|
225
|
+
}
|
|
226
|
+
logger.warn(`${path__default["default"].join(docsDir, "index.md")} not found.`);
|
|
227
|
+
const fallbacks = [
|
|
228
|
+
path__default["default"].join(docsPath, "README.md"),
|
|
229
|
+
path__default["default"].join(docsPath, "readme.md"),
|
|
230
|
+
path__default["default"].join(inputDir, "README.md"),
|
|
231
|
+
path__default["default"].join(inputDir, "readme.md")
|
|
232
|
+
];
|
|
233
|
+
await fs__default["default"].ensureDir(docsPath);
|
|
234
|
+
for (const filePath of fallbacks) {
|
|
235
|
+
try {
|
|
236
|
+
await fs__default["default"].copyFile(filePath, indexMdPath);
|
|
237
|
+
return;
|
|
238
|
+
} catch (error) {
|
|
239
|
+
logger.warn(`${path__default["default"].relative(inputDir, filePath)} not found.`);
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
logger.warn(`Could not find any techdocs' index file. Please make sure at least one of ${[
|
|
243
|
+
indexMdPath,
|
|
244
|
+
...fallbacks
|
|
245
|
+
].join(" ")} exists.`);
|
|
246
|
+
};
|
|
247
|
+
const createOrUpdateMetadata = async (techdocsMetadataPath, logger) => {
|
|
248
|
+
const techdocsMetadataDir = techdocsMetadataPath.split(path__default["default"].sep).slice(0, -1).join(path__default["default"].sep);
|
|
249
|
+
try {
|
|
250
|
+
await fs__default["default"].access(techdocsMetadataPath, fs__default["default"].constants.F_OK);
|
|
251
|
+
} catch (err) {
|
|
252
|
+
await fs__default["default"].writeJson(techdocsMetadataPath, JSON.parse("{}"));
|
|
253
|
+
}
|
|
254
|
+
let json;
|
|
255
|
+
try {
|
|
256
|
+
json = await fs__default["default"].readJson(techdocsMetadataPath);
|
|
257
|
+
} catch (err) {
|
|
258
|
+
errors.assertError(err);
|
|
259
|
+
const message = `Invalid JSON at ${techdocsMetadataPath} with error ${err.message}`;
|
|
260
|
+
logger.error(message);
|
|
261
|
+
throw new Error(message);
|
|
262
|
+
}
|
|
263
|
+
json.build_timestamp = Date.now();
|
|
264
|
+
try {
|
|
265
|
+
json.files = (await getFileTreeRecursively(techdocsMetadataDir)).map((file) => file.replace(`${techdocsMetadataDir}${path__default["default"].sep}`, ""));
|
|
266
|
+
} catch (err) {
|
|
267
|
+
errors.assertError(err);
|
|
268
|
+
json.files = [];
|
|
269
|
+
logger.warn(`Unable to add files list to metadata: ${err.message}`);
|
|
270
|
+
}
|
|
271
|
+
await fs__default["default"].writeJson(techdocsMetadataPath, json);
|
|
272
|
+
return;
|
|
273
|
+
};
|
|
274
|
+
const storeEtagMetadata = async (techdocsMetadataPath, etag) => {
|
|
275
|
+
const json = await fs__default["default"].readJson(techdocsMetadataPath);
|
|
276
|
+
json.etag = etag;
|
|
277
|
+
await fs__default["default"].writeJson(techdocsMetadataPath, json);
|
|
278
|
+
};
|
|
279
|
+
|
|
280
|
+
const patchMkdocsFile = async (mkdocsYmlPath, logger, updateAction) => {
|
|
281
|
+
let didEdit = false;
|
|
282
|
+
let mkdocsYmlFileString;
|
|
283
|
+
try {
|
|
284
|
+
mkdocsYmlFileString = await fs__default["default"].readFile(mkdocsYmlPath, "utf8");
|
|
285
|
+
} catch (error) {
|
|
286
|
+
errors.assertError(error);
|
|
287
|
+
logger.warn(`Could not read MkDocs YAML config file ${mkdocsYmlPath} before running the generator: ${error.message}`);
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
let mkdocsYml;
|
|
291
|
+
try {
|
|
292
|
+
mkdocsYml = yaml__default["default"].load(mkdocsYmlFileString, { schema: MKDOCS_SCHEMA });
|
|
293
|
+
if (typeof mkdocsYml === "string" || typeof mkdocsYml === "undefined") {
|
|
294
|
+
throw new Error("Bad YAML format.");
|
|
295
|
+
}
|
|
296
|
+
} catch (error) {
|
|
297
|
+
errors.assertError(error);
|
|
298
|
+
logger.warn(`Error in parsing YAML at ${mkdocsYmlPath} before running the generator. ${error.message}`);
|
|
299
|
+
return;
|
|
300
|
+
}
|
|
301
|
+
didEdit = updateAction(mkdocsYml);
|
|
302
|
+
try {
|
|
303
|
+
if (didEdit) {
|
|
304
|
+
await fs__default["default"].writeFile(mkdocsYmlPath, yaml__default["default"].dump(mkdocsYml, { schema: MKDOCS_SCHEMA }), "utf8");
|
|
305
|
+
}
|
|
306
|
+
} catch (error) {
|
|
307
|
+
errors.assertError(error);
|
|
308
|
+
logger.warn(`Could not write to ${mkdocsYmlPath} after updating it before running the generator. ${error.message}`);
|
|
309
|
+
return;
|
|
310
|
+
}
|
|
311
|
+
};
|
|
312
|
+
const patchMkdocsYmlPreBuild = async (mkdocsYmlPath, logger, parsedLocationAnnotation, scmIntegrations) => {
|
|
313
|
+
await patchMkdocsFile(mkdocsYmlPath, logger, (mkdocsYml) => {
|
|
314
|
+
if (!("repo_url" in mkdocsYml) && !("edit_uri" in mkdocsYml)) {
|
|
315
|
+
const result = getRepoUrlFromLocationAnnotation(parsedLocationAnnotation, scmIntegrations, mkdocsYml.docs_dir);
|
|
316
|
+
if (result.repo_url || result.edit_uri) {
|
|
317
|
+
mkdocsYml.repo_url = result.repo_url;
|
|
318
|
+
mkdocsYml.edit_uri = result.edit_uri;
|
|
319
|
+
logger.info(`Set ${JSON.stringify(result)}. You can disable this feature by manually setting 'repo_url' or 'edit_uri' according to the MkDocs documentation at https://www.mkdocs.org/user-guide/configuration/#repo_url`);
|
|
320
|
+
return true;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
return false;
|
|
324
|
+
});
|
|
325
|
+
};
|
|
326
|
+
const pathMkdocsYmlWithTechdocsPlugin = async (mkdocsYmlPath, logger) => {
|
|
327
|
+
await patchMkdocsFile(mkdocsYmlPath, logger, (mkdocsYml) => {
|
|
328
|
+
if (!("plugins" in mkdocsYml)) {
|
|
329
|
+
mkdocsYml.plugins = ["techdocs-core"];
|
|
330
|
+
return true;
|
|
331
|
+
}
|
|
332
|
+
if (mkdocsYml.plugins && !mkdocsYml.plugins.includes("techdocs-core")) {
|
|
333
|
+
mkdocsYml.plugins.push("techdocs-core");
|
|
334
|
+
return true;
|
|
335
|
+
}
|
|
336
|
+
return false;
|
|
337
|
+
});
|
|
338
|
+
};
|
|
339
|
+
|
|
340
|
+
const _TechdocsGenerator = class {
|
|
341
|
+
static fromConfig(config, options) {
|
|
342
|
+
const { containerRunner, logger } = options;
|
|
343
|
+
const scmIntegrations = integration.ScmIntegrations.fromConfig(config);
|
|
344
|
+
return new _TechdocsGenerator({
|
|
345
|
+
logger,
|
|
346
|
+
containerRunner,
|
|
347
|
+
config,
|
|
348
|
+
scmIntegrations
|
|
349
|
+
});
|
|
350
|
+
}
|
|
351
|
+
constructor(options) {
|
|
352
|
+
this.logger = options.logger;
|
|
353
|
+
this.options = readGeneratorConfig(options.config, options.logger);
|
|
354
|
+
this.containerRunner = options.containerRunner;
|
|
355
|
+
this.scmIntegrations = options.scmIntegrations;
|
|
356
|
+
}
|
|
357
|
+
async run(options) {
|
|
358
|
+
var _a;
|
|
359
|
+
const {
|
|
360
|
+
inputDir,
|
|
361
|
+
outputDir,
|
|
362
|
+
parsedLocationAnnotation,
|
|
363
|
+
etag,
|
|
364
|
+
logger: childLogger,
|
|
365
|
+
logStream
|
|
366
|
+
} = options;
|
|
367
|
+
const { path: mkdocsYmlPath, content } = await getMkdocsYml(inputDir);
|
|
368
|
+
const docsDir = await validateMkdocsYaml(inputDir, content);
|
|
369
|
+
if (parsedLocationAnnotation) {
|
|
370
|
+
await patchMkdocsYmlPreBuild(mkdocsYmlPath, childLogger, parsedLocationAnnotation, this.scmIntegrations);
|
|
371
|
+
await patchIndexPreBuild({ inputDir, logger: childLogger, docsDir });
|
|
372
|
+
}
|
|
373
|
+
if (!this.options.omitTechdocsCoreMkdocsPlugin) {
|
|
374
|
+
await pathMkdocsYmlWithTechdocsPlugin(mkdocsYmlPath, childLogger);
|
|
375
|
+
}
|
|
376
|
+
const mountDirs = {
|
|
377
|
+
[inputDir]: "/input",
|
|
378
|
+
[outputDir]: "/output"
|
|
379
|
+
};
|
|
380
|
+
try {
|
|
381
|
+
switch (this.options.runIn) {
|
|
382
|
+
case "local":
|
|
383
|
+
await runCommand({
|
|
384
|
+
command: "mkdocs",
|
|
385
|
+
args: ["build", "-d", outputDir, "-v"],
|
|
386
|
+
options: {
|
|
387
|
+
cwd: inputDir
|
|
388
|
+
},
|
|
389
|
+
logStream
|
|
390
|
+
});
|
|
391
|
+
childLogger.info(`Successfully generated docs from ${inputDir} into ${outputDir} using local mkdocs`);
|
|
392
|
+
break;
|
|
393
|
+
case "docker":
|
|
394
|
+
await this.containerRunner.runContainer({
|
|
395
|
+
imageName: (_a = this.options.dockerImage) != null ? _a : _TechdocsGenerator.defaultDockerImage,
|
|
396
|
+
args: ["build", "-d", "/output"],
|
|
397
|
+
logStream,
|
|
398
|
+
mountDirs,
|
|
399
|
+
workingDir: "/input",
|
|
400
|
+
envVars: { HOME: "/tmp" },
|
|
401
|
+
pullImage: this.options.pullImage
|
|
402
|
+
});
|
|
403
|
+
childLogger.info(`Successfully generated docs from ${inputDir} into ${outputDir} using techdocs-container`);
|
|
404
|
+
break;
|
|
405
|
+
default:
|
|
406
|
+
throw new Error(`Invalid config value "${this.options.runIn}" provided in 'techdocs.generators.techdocs'.`);
|
|
407
|
+
}
|
|
408
|
+
} catch (error) {
|
|
409
|
+
this.logger.debug(`Failed to generate docs from ${inputDir} into ${outputDir}`);
|
|
410
|
+
throw new errors.ForwardedError(`Failed to generate docs from ${inputDir} into ${outputDir}`, error);
|
|
411
|
+
}
|
|
412
|
+
await createOrUpdateMetadata(path__default["default"].join(outputDir, "techdocs_metadata.json"), childLogger);
|
|
413
|
+
if (etag) {
|
|
414
|
+
await storeEtagMetadata(path__default["default"].join(outputDir, "techdocs_metadata.json"), etag);
|
|
415
|
+
}
|
|
416
|
+
}
|
|
417
|
+
};
|
|
418
|
+
let TechdocsGenerator = _TechdocsGenerator;
|
|
419
|
+
TechdocsGenerator.defaultDockerImage = "spotify/techdocs:v0.3.7";
|
|
420
|
+
function readGeneratorConfig(config, logger) {
|
|
421
|
+
var _a;
|
|
422
|
+
const legacyGeneratorType = config.getOptionalString("techdocs.generators.techdocs");
|
|
423
|
+
if (legacyGeneratorType) {
|
|
424
|
+
logger.warn(`The 'techdocs.generators.techdocs' configuration key is deprecated and will be removed in the future. Please use 'techdocs.generator' instead. See here https://backstage.io/docs/features/techdocs/configuration`);
|
|
425
|
+
}
|
|
426
|
+
return {
|
|
427
|
+
runIn: (_a = legacyGeneratorType != null ? legacyGeneratorType : config.getOptionalString("techdocs.generator.runIn")) != null ? _a : "docker",
|
|
428
|
+
dockerImage: config.getOptionalString("techdocs.generator.dockerImage"),
|
|
429
|
+
pullImage: config.getOptionalBoolean("techdocs.generator.pullImage"),
|
|
430
|
+
omitTechdocsCoreMkdocsPlugin: config.getOptionalBoolean("techdocs.generator.mkdocs.omitTechdocsCorePlugin")
|
|
431
|
+
};
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
class Generators {
|
|
435
|
+
constructor() {
|
|
436
|
+
this.generatorMap = /* @__PURE__ */ new Map();
|
|
437
|
+
}
|
|
438
|
+
static async fromConfig(config, options) {
|
|
439
|
+
const generators = new Generators();
|
|
440
|
+
const techdocsGenerator = TechdocsGenerator.fromConfig(config, options);
|
|
441
|
+
generators.register("techdocs", techdocsGenerator);
|
|
442
|
+
return generators;
|
|
443
|
+
}
|
|
444
|
+
register(generatorKey, generator) {
|
|
445
|
+
this.generatorMap.set(generatorKey, generator);
|
|
446
|
+
}
|
|
447
|
+
get(entity) {
|
|
448
|
+
const generatorKey = getGeneratorKey(entity);
|
|
449
|
+
const generator = this.generatorMap.get(generatorKey);
|
|
450
|
+
if (!generator) {
|
|
451
|
+
throw new Error(`No generator registered for entity: "${generatorKey}"`);
|
|
452
|
+
}
|
|
453
|
+
return generator;
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
|
|
457
|
+
const parseReferenceAnnotation = (annotationName, entity) => {
|
|
458
|
+
var _a;
|
|
459
|
+
const annotation = (_a = entity.metadata.annotations) == null ? void 0 : _a[annotationName];
|
|
460
|
+
if (!annotation) {
|
|
461
|
+
throw new errors.InputError(`No location annotation provided in entity: ${entity.metadata.name}`);
|
|
462
|
+
}
|
|
463
|
+
const { type, target } = catalogModel.parseLocationRef(annotation);
|
|
464
|
+
return {
|
|
465
|
+
type,
|
|
466
|
+
target
|
|
467
|
+
};
|
|
468
|
+
};
|
|
469
|
+
const transformDirLocation = (entity, dirAnnotation, scmIntegrations) => {
|
|
470
|
+
const location = catalogModel.getEntitySourceLocation(entity);
|
|
471
|
+
switch (location.type) {
|
|
472
|
+
case "url": {
|
|
473
|
+
const target = scmIntegrations.resolveUrl({
|
|
474
|
+
url: dirAnnotation.target,
|
|
475
|
+
base: location.target
|
|
476
|
+
});
|
|
477
|
+
return {
|
|
478
|
+
type: "url",
|
|
479
|
+
target
|
|
480
|
+
};
|
|
481
|
+
}
|
|
482
|
+
case "file": {
|
|
483
|
+
const target = backendCommon.resolveSafeChildPath(path__default["default"].dirname(location.target), dirAnnotation.target);
|
|
484
|
+
return {
|
|
485
|
+
type: "dir",
|
|
486
|
+
target
|
|
487
|
+
};
|
|
488
|
+
}
|
|
489
|
+
default:
|
|
490
|
+
throw new errors.InputError(`Unable to resolve location type ${location.type}`);
|
|
491
|
+
}
|
|
492
|
+
};
|
|
493
|
+
const getLocationForEntity = (entity, scmIntegration) => {
|
|
494
|
+
const annotation = parseReferenceAnnotation("backstage.io/techdocs-ref", entity);
|
|
495
|
+
switch (annotation.type) {
|
|
496
|
+
case "url":
|
|
497
|
+
return annotation;
|
|
498
|
+
case "dir":
|
|
499
|
+
return transformDirLocation(entity, annotation, scmIntegration);
|
|
500
|
+
default:
|
|
501
|
+
throw new Error(`Invalid reference annotation ${annotation.type}`);
|
|
502
|
+
}
|
|
503
|
+
};
|
|
504
|
+
const getDocFilesFromRepository = async (reader, entity, opts) => {
|
|
505
|
+
var _a, _b;
|
|
506
|
+
const { target } = parseReferenceAnnotation("backstage.io/techdocs-ref", entity);
|
|
507
|
+
(_a = opts == null ? void 0 : opts.logger) == null ? void 0 : _a.debug(`Reading files from ${target}`);
|
|
508
|
+
const readTreeResponse = await reader.readTree(target, { etag: opts == null ? void 0 : opts.etag });
|
|
509
|
+
const preparedDir = await readTreeResponse.dir();
|
|
510
|
+
(_b = opts == null ? void 0 : opts.logger) == null ? void 0 : _b.debug(`Tree downloaded and stored at ${preparedDir}`);
|
|
511
|
+
return {
|
|
512
|
+
preparedDir,
|
|
513
|
+
etag: readTreeResponse.etag
|
|
514
|
+
};
|
|
515
|
+
};
|
|
516
|
+
|
|
517
|
+
class DirectoryPreparer {
|
|
518
|
+
constructor(config, _logger, reader) {
|
|
519
|
+
this.reader = reader;
|
|
520
|
+
this.scmIntegrations = integration.ScmIntegrations.fromConfig(config);
|
|
521
|
+
}
|
|
522
|
+
static fromConfig(config, { logger, reader }) {
|
|
523
|
+
return new DirectoryPreparer(config, logger, reader);
|
|
524
|
+
}
|
|
525
|
+
async prepare(entity, options) {
|
|
526
|
+
var _a, _b;
|
|
527
|
+
const annotation = parseReferenceAnnotation("backstage.io/techdocs-ref", entity);
|
|
528
|
+
const { type, target } = transformDirLocation(entity, annotation, this.scmIntegrations);
|
|
529
|
+
switch (type) {
|
|
530
|
+
case "url": {
|
|
531
|
+
(_a = options == null ? void 0 : options.logger) == null ? void 0 : _a.debug(`Reading files from ${target}`);
|
|
532
|
+
const response = await this.reader.readTree(target, {
|
|
533
|
+
etag: options == null ? void 0 : options.etag
|
|
534
|
+
});
|
|
535
|
+
const preparedDir = await response.dir();
|
|
536
|
+
(_b = options == null ? void 0 : options.logger) == null ? void 0 : _b.debug(`Tree downloaded and stored at ${preparedDir}`);
|
|
537
|
+
return {
|
|
538
|
+
preparedDir,
|
|
539
|
+
etag: response.etag
|
|
540
|
+
};
|
|
541
|
+
}
|
|
542
|
+
case "dir": {
|
|
543
|
+
return {
|
|
544
|
+
preparedDir: target,
|
|
545
|
+
etag: ""
|
|
546
|
+
};
|
|
547
|
+
}
|
|
548
|
+
default:
|
|
549
|
+
throw new errors.InputError(`Unable to resolve location type ${type}`);
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
|
|
554
|
+
class UrlPreparer {
|
|
555
|
+
constructor(reader, logger) {
|
|
556
|
+
this.logger = logger;
|
|
557
|
+
this.reader = reader;
|
|
558
|
+
}
|
|
559
|
+
static fromConfig({ reader, logger }) {
|
|
560
|
+
return new UrlPreparer(reader, logger);
|
|
561
|
+
}
|
|
562
|
+
async prepare(entity, options) {
|
|
563
|
+
try {
|
|
564
|
+
return await getDocFilesFromRepository(this.reader, entity, {
|
|
565
|
+
etag: options == null ? void 0 : options.etag,
|
|
566
|
+
logger: this.logger
|
|
567
|
+
});
|
|
568
|
+
} catch (error) {
|
|
569
|
+
errors.assertError(error);
|
|
570
|
+
if (error.name === "NotModifiedError") {
|
|
571
|
+
this.logger.debug(`Cache is valid for etag ${options == null ? void 0 : options.etag}`);
|
|
572
|
+
} else {
|
|
573
|
+
this.logger.debug(`Unable to fetch files for building docs ${error.message}`);
|
|
574
|
+
}
|
|
575
|
+
throw error;
|
|
576
|
+
}
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
class Preparers {
|
|
581
|
+
constructor() {
|
|
582
|
+
this.preparerMap = /* @__PURE__ */ new Map();
|
|
583
|
+
}
|
|
584
|
+
static async fromConfig(backstageConfig, { logger, reader }) {
|
|
585
|
+
const preparers = new Preparers();
|
|
586
|
+
const urlPreparer = new UrlPreparer(reader, logger);
|
|
587
|
+
preparers.register("url", urlPreparer);
|
|
588
|
+
const directoryPreparer = new DirectoryPreparer(backstageConfig, logger, reader);
|
|
589
|
+
preparers.register("dir", directoryPreparer);
|
|
590
|
+
return preparers;
|
|
591
|
+
}
|
|
592
|
+
register(protocol, preparer) {
|
|
593
|
+
this.preparerMap.set(protocol, preparer);
|
|
594
|
+
}
|
|
595
|
+
get(entity) {
|
|
596
|
+
const { type } = parseReferenceAnnotation("backstage.io/techdocs-ref", entity);
|
|
597
|
+
const preparer = this.preparerMap.get(type);
|
|
598
|
+
if (!preparer) {
|
|
599
|
+
throw new Error(`No preparer registered for type: "${type}"`);
|
|
600
|
+
}
|
|
601
|
+
return preparer;
|
|
602
|
+
}
|
|
603
|
+
}
|
|
604
|
+
|
|
605
|
+
const streamToBuffer$1 = (stream) => {
|
|
606
|
+
return new Promise((resolve, reject) => {
|
|
607
|
+
try {
|
|
608
|
+
const chunks = [];
|
|
609
|
+
stream.on("data", (chunk) => chunks.push(chunk));
|
|
610
|
+
stream.on("error", reject);
|
|
611
|
+
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
|
612
|
+
} catch (e) {
|
|
613
|
+
throw new errors.ForwardedError("Unable to parse the response data", e);
|
|
614
|
+
}
|
|
615
|
+
});
|
|
616
|
+
};
|
|
617
|
+
class AwsS3Publish {
|
|
618
|
+
constructor(options) {
|
|
619
|
+
this.storageClient = options.storageClient;
|
|
620
|
+
this.bucketName = options.bucketName;
|
|
621
|
+
this.legacyPathCasing = options.legacyPathCasing;
|
|
622
|
+
this.logger = options.logger;
|
|
623
|
+
this.bucketRootPath = options.bucketRootPath;
|
|
624
|
+
this.sse = options.sse;
|
|
625
|
+
}
|
|
626
|
+
static fromConfig(config, logger) {
|
|
627
|
+
let bucketName = "";
|
|
628
|
+
try {
|
|
629
|
+
bucketName = config.getString("techdocs.publisher.awsS3.bucketName");
|
|
630
|
+
} catch (error) {
|
|
631
|
+
throw new Error("Since techdocs.publisher.type is set to 'awsS3' in your app config, techdocs.publisher.awsS3.bucketName is required.");
|
|
632
|
+
}
|
|
633
|
+
const bucketRootPath = normalizeExternalStorageRootPath(config.getOptionalString("techdocs.publisher.awsS3.bucketRootPath") || "");
|
|
634
|
+
const sse = config.getOptionalString("techdocs.publisher.awsS3.sse");
|
|
635
|
+
const credentialsConfig = config.getOptionalConfig("techdocs.publisher.awsS3.credentials");
|
|
636
|
+
const credentials = AwsS3Publish.buildCredentials(credentialsConfig);
|
|
637
|
+
const region = config.getOptionalString("techdocs.publisher.awsS3.region");
|
|
638
|
+
const endpoint = config.getOptionalString("techdocs.publisher.awsS3.endpoint");
|
|
639
|
+
const s3ForcePathStyle = config.getOptionalBoolean("techdocs.publisher.awsS3.s3ForcePathStyle");
|
|
640
|
+
const storageClient = new aws__default["default"].S3({
|
|
641
|
+
credentials,
|
|
642
|
+
...region && { region },
|
|
643
|
+
...endpoint && { endpoint },
|
|
644
|
+
...s3ForcePathStyle && { s3ForcePathStyle }
|
|
645
|
+
});
|
|
646
|
+
const legacyPathCasing = config.getOptionalBoolean("techdocs.legacyUseCaseSensitiveTripletPaths") || false;
|
|
647
|
+
return new AwsS3Publish({
|
|
648
|
+
storageClient,
|
|
649
|
+
bucketName,
|
|
650
|
+
bucketRootPath,
|
|
651
|
+
legacyPathCasing,
|
|
652
|
+
logger,
|
|
653
|
+
sse
|
|
654
|
+
});
|
|
655
|
+
}
|
|
656
|
+
static buildCredentials(config) {
|
|
657
|
+
if (!config) {
|
|
658
|
+
return void 0;
|
|
659
|
+
}
|
|
660
|
+
const accessKeyId = config.getOptionalString("accessKeyId");
|
|
661
|
+
const secretAccessKey = config.getOptionalString("secretAccessKey");
|
|
662
|
+
let explicitCredentials;
|
|
663
|
+
if (accessKeyId && secretAccessKey) {
|
|
664
|
+
explicitCredentials = new aws.Credentials({
|
|
665
|
+
accessKeyId,
|
|
666
|
+
secretAccessKey
|
|
667
|
+
});
|
|
668
|
+
}
|
|
669
|
+
const roleArn = config.getOptionalString("roleArn");
|
|
670
|
+
if (roleArn) {
|
|
671
|
+
return new aws__default["default"].ChainableTemporaryCredentials({
|
|
672
|
+
masterCredentials: explicitCredentials,
|
|
673
|
+
params: {
|
|
674
|
+
RoleSessionName: "backstage-aws-techdocs-s3-publisher",
|
|
675
|
+
RoleArn: roleArn
|
|
676
|
+
}
|
|
677
|
+
});
|
|
678
|
+
}
|
|
679
|
+
return explicitCredentials;
|
|
680
|
+
}
|
|
681
|
+
async getReadiness() {
|
|
682
|
+
try {
|
|
683
|
+
await this.storageClient.headBucket({ Bucket: this.bucketName }).promise();
|
|
684
|
+
this.logger.info(`Successfully connected to the AWS S3 bucket ${this.bucketName}.`);
|
|
685
|
+
return { isAvailable: true };
|
|
686
|
+
} catch (error) {
|
|
687
|
+
this.logger.error(`Could not retrieve metadata about the AWS S3 bucket ${this.bucketName}. Make sure the bucket exists. Also make sure that authentication is setup either by explicitly defining credentials and region in techdocs.publisher.awsS3 in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`);
|
|
688
|
+
this.logger.error(`from AWS client library`, error);
|
|
689
|
+
return {
|
|
690
|
+
isAvailable: false
|
|
691
|
+
};
|
|
692
|
+
}
|
|
693
|
+
}
|
|
694
|
+
async publish({
|
|
695
|
+
entity,
|
|
696
|
+
directory
|
|
697
|
+
}) {
|
|
698
|
+
const objects = [];
|
|
699
|
+
const useLegacyPathCasing = this.legacyPathCasing;
|
|
700
|
+
const bucketRootPath = this.bucketRootPath;
|
|
701
|
+
const sse = this.sse;
|
|
702
|
+
let existingFiles = [];
|
|
703
|
+
try {
|
|
704
|
+
const remoteFolder = getCloudPathForLocalPath(entity, void 0, useLegacyPathCasing, bucketRootPath);
|
|
705
|
+
existingFiles = await this.getAllObjectsFromBucket({
|
|
706
|
+
prefix: remoteFolder
|
|
707
|
+
});
|
|
708
|
+
} catch (e) {
|
|
709
|
+
errors.assertError(e);
|
|
710
|
+
this.logger.error(`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`);
|
|
711
|
+
}
|
|
712
|
+
let absoluteFilesToUpload;
|
|
713
|
+
try {
|
|
714
|
+
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
715
|
+
await bulkStorageOperation(async (absoluteFilePath) => {
|
|
716
|
+
const relativeFilePath = path__default["default"].relative(directory, absoluteFilePath);
|
|
717
|
+
const fileStream = fs__default["default"].createReadStream(absoluteFilePath);
|
|
718
|
+
const params = {
|
|
719
|
+
Bucket: this.bucketName,
|
|
720
|
+
Key: getCloudPathForLocalPath(entity, relativeFilePath, useLegacyPathCasing, bucketRootPath),
|
|
721
|
+
Body: fileStream,
|
|
722
|
+
...sse && { ServerSideEncryption: sse }
|
|
723
|
+
};
|
|
724
|
+
objects.push(params.Key);
|
|
725
|
+
return this.storageClient.upload(params).promise();
|
|
726
|
+
}, absoluteFilesToUpload, { concurrencyLimit: 10 });
|
|
727
|
+
this.logger.info(`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`);
|
|
728
|
+
} catch (e) {
|
|
729
|
+
const errorMessage = `Unable to upload file(s) to AWS S3. ${e}`;
|
|
730
|
+
this.logger.error(errorMessage);
|
|
731
|
+
throw new Error(errorMessage);
|
|
732
|
+
}
|
|
733
|
+
try {
|
|
734
|
+
const relativeFilesToUpload = absoluteFilesToUpload.map((absoluteFilePath) => getCloudPathForLocalPath(entity, path__default["default"].relative(directory, absoluteFilePath), useLegacyPathCasing, bucketRootPath));
|
|
735
|
+
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
736
|
+
await bulkStorageOperation(async (relativeFilePath) => {
|
|
737
|
+
return await this.storageClient.deleteObject({
|
|
738
|
+
Bucket: this.bucketName,
|
|
739
|
+
Key: relativeFilePath
|
|
740
|
+
}).promise();
|
|
741
|
+
}, staleFiles, { concurrencyLimit: 10 });
|
|
742
|
+
this.logger.info(`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`);
|
|
743
|
+
} catch (error) {
|
|
744
|
+
const errorMessage = `Unable to delete file(s) from AWS S3. ${error}`;
|
|
745
|
+
this.logger.error(errorMessage);
|
|
746
|
+
}
|
|
747
|
+
return { objects };
|
|
748
|
+
}
|
|
749
|
+
async fetchTechDocsMetadata(entityName) {
|
|
750
|
+
try {
|
|
751
|
+
return await new Promise(async (resolve, reject) => {
|
|
752
|
+
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
753
|
+
const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
754
|
+
const entityRootDir = path__default["default"].posix.join(this.bucketRootPath, entityDir);
|
|
755
|
+
const stream = this.storageClient.getObject({
|
|
756
|
+
Bucket: this.bucketName,
|
|
757
|
+
Key: `${entityRootDir}/techdocs_metadata.json`
|
|
758
|
+
}).createReadStream();
|
|
759
|
+
try {
|
|
760
|
+
const techdocsMetadataJson = await streamToBuffer$1(stream);
|
|
761
|
+
if (!techdocsMetadataJson) {
|
|
762
|
+
throw new Error(`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`);
|
|
763
|
+
}
|
|
764
|
+
const techdocsMetadata = JSON5__default["default"].parse(techdocsMetadataJson.toString("utf-8"));
|
|
765
|
+
resolve(techdocsMetadata);
|
|
766
|
+
} catch (err) {
|
|
767
|
+
errors.assertError(err);
|
|
768
|
+
this.logger.error(err.message);
|
|
769
|
+
reject(new Error(err.message));
|
|
770
|
+
}
|
|
771
|
+
});
|
|
772
|
+
} catch (e) {
|
|
773
|
+
throw new errors.ForwardedError("TechDocs metadata fetch failed", e);
|
|
774
|
+
}
|
|
775
|
+
}
|
|
776
|
+
docsRouter() {
|
|
777
|
+
return async (req, res) => {
|
|
778
|
+
const decodedUri = decodeURI(req.path.replace(/^\//, ""));
|
|
779
|
+
const decodedUriNoRoot = path__default["default"].relative(this.bucketRootPath, decodedUri);
|
|
780
|
+
const filePathNoRoot = this.legacyPathCasing ? decodedUriNoRoot : lowerCaseEntityTripletInStoragePath(decodedUriNoRoot);
|
|
781
|
+
const filePath = path__default["default"].posix.join(this.bucketRootPath, filePathNoRoot);
|
|
782
|
+
const fileExtension = path__default["default"].extname(filePath);
|
|
783
|
+
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
784
|
+
const stream = this.storageClient.getObject({ Bucket: this.bucketName, Key: filePath }).createReadStream();
|
|
785
|
+
try {
|
|
786
|
+
for (const [headerKey, headerValue] of Object.entries(responseHeaders)) {
|
|
787
|
+
res.setHeader(headerKey, headerValue);
|
|
788
|
+
}
|
|
789
|
+
res.send(await streamToBuffer$1(stream));
|
|
790
|
+
} catch (err) {
|
|
791
|
+
errors.assertError(err);
|
|
792
|
+
this.logger.warn(`TechDocs S3 router failed to serve static files from bucket ${this.bucketName} at key ${filePath}: ${err.message}`);
|
|
793
|
+
res.status(404).send("File Not Found");
|
|
794
|
+
}
|
|
795
|
+
};
|
|
796
|
+
}
|
|
797
|
+
async hasDocsBeenGenerated(entity) {
|
|
798
|
+
try {
|
|
799
|
+
const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
800
|
+
const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
801
|
+
const entityRootDir = path__default["default"].posix.join(this.bucketRootPath, entityDir);
|
|
802
|
+
await this.storageClient.headObject({
|
|
803
|
+
Bucket: this.bucketName,
|
|
804
|
+
Key: `${entityRootDir}/index.html`
|
|
805
|
+
}).promise();
|
|
806
|
+
return Promise.resolve(true);
|
|
807
|
+
} catch (e) {
|
|
808
|
+
return Promise.resolve(false);
|
|
809
|
+
}
|
|
810
|
+
}
|
|
811
|
+
async migrateDocsCase({
|
|
812
|
+
removeOriginal = false,
|
|
813
|
+
concurrency = 25
|
|
814
|
+
}) {
|
|
815
|
+
const allObjects = await this.getAllObjectsFromBucket();
|
|
816
|
+
const limiter = createLimiter__default["default"](concurrency);
|
|
817
|
+
await Promise.all(allObjects.map((f) => limiter(async (file) => {
|
|
818
|
+
let newPath;
|
|
819
|
+
try {
|
|
820
|
+
newPath = lowerCaseEntityTripletInStoragePath(file);
|
|
821
|
+
} catch (e) {
|
|
822
|
+
errors.assertError(e);
|
|
823
|
+
this.logger.warn(e.message);
|
|
824
|
+
return;
|
|
825
|
+
}
|
|
826
|
+
if (file === newPath) {
|
|
827
|
+
return;
|
|
828
|
+
}
|
|
829
|
+
try {
|
|
830
|
+
this.logger.verbose(`Migrating ${file}`);
|
|
831
|
+
await this.storageClient.copyObject({
|
|
832
|
+
Bucket: this.bucketName,
|
|
833
|
+
CopySource: [this.bucketName, file].join("/"),
|
|
834
|
+
Key: newPath
|
|
835
|
+
}).promise();
|
|
836
|
+
if (removeOriginal) {
|
|
837
|
+
await this.storageClient.deleteObject({
|
|
838
|
+
Bucket: this.bucketName,
|
|
839
|
+
Key: file
|
|
840
|
+
}).promise();
|
|
841
|
+
}
|
|
842
|
+
} catch (e) {
|
|
843
|
+
errors.assertError(e);
|
|
844
|
+
this.logger.warn(`Unable to migrate ${file}: ${e.message}`);
|
|
845
|
+
}
|
|
846
|
+
}, f)));
|
|
847
|
+
}
|
|
848
|
+
async getAllObjectsFromBucket({ prefix } = { prefix: "" }) {
|
|
849
|
+
const objects = [];
|
|
850
|
+
let nextContinuation;
|
|
851
|
+
let allObjects;
|
|
852
|
+
do {
|
|
853
|
+
allObjects = await this.storageClient.listObjectsV2({
|
|
854
|
+
Bucket: this.bucketName,
|
|
855
|
+
ContinuationToken: nextContinuation,
|
|
856
|
+
...prefix ? { Prefix: prefix } : {}
|
|
857
|
+
}).promise();
|
|
858
|
+
objects.push(...(allObjects.Contents || []).map((f) => f.Key || "").filter((f) => !!f));
|
|
859
|
+
nextContinuation = allObjects.NextContinuationToken;
|
|
860
|
+
} while (nextContinuation);
|
|
861
|
+
return objects;
|
|
862
|
+
}
|
|
863
|
+
}
|
|
864
|
+
|
|
865
|
+
const BATCH_CONCURRENCY = 3;
|
|
866
|
+
class AzureBlobStoragePublish {
|
|
867
|
+
constructor(options) {
|
|
868
|
+
this.storageClient = options.storageClient;
|
|
869
|
+
this.containerName = options.containerName;
|
|
870
|
+
this.legacyPathCasing = options.legacyPathCasing;
|
|
871
|
+
this.logger = options.logger;
|
|
872
|
+
}
|
|
873
|
+
static fromConfig(config, logger) {
|
|
874
|
+
let containerName = "";
|
|
875
|
+
try {
|
|
876
|
+
containerName = config.getString("techdocs.publisher.azureBlobStorage.containerName");
|
|
877
|
+
} catch (error) {
|
|
878
|
+
throw new Error("Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, techdocs.publisher.azureBlobStorage.containerName is required.");
|
|
879
|
+
}
|
|
880
|
+
let accountName = "";
|
|
881
|
+
try {
|
|
882
|
+
accountName = config.getString("techdocs.publisher.azureBlobStorage.credentials.accountName");
|
|
883
|
+
} catch (error) {
|
|
884
|
+
throw new Error("Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, techdocs.publisher.azureBlobStorage.credentials.accountName is required.");
|
|
885
|
+
}
|
|
886
|
+
const accountKey = config.getOptionalString("techdocs.publisher.azureBlobStorage.credentials.accountKey");
|
|
887
|
+
let credential;
|
|
888
|
+
if (accountKey) {
|
|
889
|
+
credential = new storageBlob.StorageSharedKeyCredential(accountName, accountKey);
|
|
890
|
+
} else {
|
|
891
|
+
credential = new identity.DefaultAzureCredential();
|
|
892
|
+
}
|
|
893
|
+
const storageClient = new storageBlob.BlobServiceClient(`https://${accountName}.blob.core.windows.net`, credential);
|
|
894
|
+
const legacyPathCasing = config.getOptionalBoolean("techdocs.legacyUseCaseSensitiveTripletPaths") || false;
|
|
895
|
+
return new AzureBlobStoragePublish({
|
|
896
|
+
storageClient,
|
|
897
|
+
containerName,
|
|
898
|
+
legacyPathCasing,
|
|
899
|
+
logger
|
|
900
|
+
});
|
|
901
|
+
}
|
|
902
|
+
async getReadiness() {
|
|
903
|
+
try {
|
|
904
|
+
const response = await this.storageClient.getContainerClient(this.containerName).getProperties();
|
|
905
|
+
if (response._response.status === 200) {
|
|
906
|
+
return {
|
|
907
|
+
isAvailable: true
|
|
908
|
+
};
|
|
909
|
+
}
|
|
910
|
+
if (response._response.status >= 400) {
|
|
911
|
+
this.logger.error(`Failed to retrieve metadata from ${response._response.request.url} with status code ${response._response.status}.`);
|
|
912
|
+
}
|
|
913
|
+
} catch (e) {
|
|
914
|
+
errors.assertError(e);
|
|
915
|
+
this.logger.error(`from Azure Blob Storage client library: ${e.message}`);
|
|
916
|
+
}
|
|
917
|
+
this.logger.error(`Could not retrieve metadata about the Azure Blob Storage container ${this.containerName}. Make sure that the Azure project and container exist and the access key is setup correctly techdocs.publisher.azureBlobStorage.credentials defined in app config has correct permissions. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`);
|
|
918
|
+
return { isAvailable: false };
|
|
919
|
+
}
|
|
920
|
+
async publish({
|
|
921
|
+
entity,
|
|
922
|
+
directory
|
|
923
|
+
}) {
|
|
924
|
+
const objects = [];
|
|
925
|
+
const useLegacyPathCasing = this.legacyPathCasing;
|
|
926
|
+
const remoteFolder = getCloudPathForLocalPath(entity, void 0, useLegacyPathCasing);
|
|
927
|
+
let existingFiles = [];
|
|
928
|
+
try {
|
|
929
|
+
existingFiles = await this.getAllBlobsFromContainer({
|
|
930
|
+
prefix: remoteFolder,
|
|
931
|
+
maxPageSize: BATCH_CONCURRENCY
|
|
932
|
+
});
|
|
933
|
+
} catch (e) {
|
|
934
|
+
errors.assertError(e);
|
|
935
|
+
this.logger.error(`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`);
|
|
936
|
+
}
|
|
937
|
+
let absoluteFilesToUpload;
|
|
938
|
+
let container;
|
|
939
|
+
try {
|
|
940
|
+
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
941
|
+
container = this.storageClient.getContainerClient(this.containerName);
|
|
942
|
+
const failedOperations = [];
|
|
943
|
+
await bulkStorageOperation(async (absoluteFilePath) => {
|
|
944
|
+
const relativeFilePath = path__default["default"].normalize(path__default["default"].relative(directory, absoluteFilePath));
|
|
945
|
+
const remotePath = getCloudPathForLocalPath(entity, relativeFilePath, useLegacyPathCasing);
|
|
946
|
+
objects.push(remotePath);
|
|
947
|
+
const response = await container.getBlockBlobClient(remotePath).uploadFile(absoluteFilePath);
|
|
948
|
+
if (response._response.status >= 400) {
|
|
949
|
+
failedOperations.push(new Error(`Upload failed for ${absoluteFilePath} with status code ${response._response.status}`));
|
|
950
|
+
}
|
|
951
|
+
return response;
|
|
952
|
+
}, absoluteFilesToUpload, { concurrencyLimit: BATCH_CONCURRENCY });
|
|
953
|
+
if (failedOperations.length > 0) {
|
|
954
|
+
throw new Error(failedOperations.map((r) => r.message).filter(Boolean).join(" "));
|
|
955
|
+
}
|
|
956
|
+
this.logger.info(`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`);
|
|
957
|
+
} catch (e) {
|
|
958
|
+
const errorMessage = `Unable to upload file(s) to Azure. ${e}`;
|
|
959
|
+
this.logger.error(errorMessage);
|
|
960
|
+
throw new Error(errorMessage);
|
|
961
|
+
}
|
|
962
|
+
try {
|
|
963
|
+
const relativeFilesToUpload = absoluteFilesToUpload.map((absoluteFilePath) => getCloudPathForLocalPath(entity, path__default["default"].relative(directory, absoluteFilePath), useLegacyPathCasing));
|
|
964
|
+
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
965
|
+
await bulkStorageOperation(async (relativeFilePath) => {
|
|
966
|
+
return await container.deleteBlob(relativeFilePath);
|
|
967
|
+
}, staleFiles, { concurrencyLimit: BATCH_CONCURRENCY });
|
|
968
|
+
this.logger.info(`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`);
|
|
969
|
+
} catch (error) {
|
|
970
|
+
const errorMessage = `Unable to delete file(s) from Azure. ${error}`;
|
|
971
|
+
this.logger.error(errorMessage);
|
|
972
|
+
}
|
|
973
|
+
return { objects };
|
|
974
|
+
}
|
|
975
|
+
download(containerName, blobPath) {
|
|
976
|
+
return new Promise((resolve, reject) => {
|
|
977
|
+
const fileStreamChunks = [];
|
|
978
|
+
this.storageClient.getContainerClient(containerName).getBlockBlobClient(blobPath).download().then((res) => {
|
|
979
|
+
const body = res.readableStreamBody;
|
|
980
|
+
if (!body) {
|
|
981
|
+
reject(new Error(`Unable to parse the response data`));
|
|
982
|
+
return;
|
|
983
|
+
}
|
|
984
|
+
body.on("error", reject).on("data", (chunk) => {
|
|
985
|
+
fileStreamChunks.push(chunk);
|
|
986
|
+
}).on("end", () => {
|
|
987
|
+
resolve(Buffer.concat(fileStreamChunks));
|
|
988
|
+
});
|
|
989
|
+
}).catch(reject);
|
|
990
|
+
});
|
|
991
|
+
}
|
|
992
|
+
async fetchTechDocsMetadata(entityName) {
|
|
993
|
+
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
994
|
+
const entityRootDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
995
|
+
try {
|
|
996
|
+
const techdocsMetadataJson = await this.download(this.containerName, `${entityRootDir}/techdocs_metadata.json`);
|
|
997
|
+
if (!techdocsMetadataJson) {
|
|
998
|
+
throw new Error(`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`);
|
|
999
|
+
}
|
|
1000
|
+
const techdocsMetadata = JSON5__default["default"].parse(techdocsMetadataJson.toString("utf-8"));
|
|
1001
|
+
return techdocsMetadata;
|
|
1002
|
+
} catch (e) {
|
|
1003
|
+
throw new errors.ForwardedError("TechDocs metadata fetch failed", e);
|
|
1004
|
+
}
|
|
1005
|
+
}
|
|
1006
|
+
docsRouter() {
|
|
1007
|
+
return (req, res) => {
|
|
1008
|
+
const decodedUri = decodeURI(req.path.replace(/^\//, ""));
|
|
1009
|
+
const filePath = this.legacyPathCasing ? decodedUri : lowerCaseEntityTripletInStoragePath(decodedUri);
|
|
1010
|
+
const fileExtension = path__default["default"].extname(filePath);
|
|
1011
|
+
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
1012
|
+
this.download(this.containerName, filePath).then((fileContent) => {
|
|
1013
|
+
for (const [headerKey, headerValue] of Object.entries(responseHeaders)) {
|
|
1014
|
+
res.setHeader(headerKey, headerValue);
|
|
1015
|
+
}
|
|
1016
|
+
res.send(fileContent);
|
|
1017
|
+
}).catch((e) => {
|
|
1018
|
+
this.logger.warn(`TechDocs Azure router failed to serve content from container ${this.containerName} at path ${filePath}: ${e.message}`);
|
|
1019
|
+
res.status(404).send("File Not Found");
|
|
1020
|
+
});
|
|
1021
|
+
};
|
|
1022
|
+
}
|
|
1023
|
+
hasDocsBeenGenerated(entity) {
|
|
1024
|
+
const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
1025
|
+
const entityRootDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1026
|
+
return this.storageClient.getContainerClient(this.containerName).getBlockBlobClient(`${entityRootDir}/index.html`).exists();
|
|
1027
|
+
}
|
|
1028
|
+
async renameBlob(originalName, newName, removeOriginal = false) {
|
|
1029
|
+
const container = this.storageClient.getContainerClient(this.containerName);
|
|
1030
|
+
const blob = container.getBlobClient(newName);
|
|
1031
|
+
const { url } = container.getBlobClient(originalName);
|
|
1032
|
+
const response = await blob.beginCopyFromURL(url);
|
|
1033
|
+
await response.pollUntilDone();
|
|
1034
|
+
if (removeOriginal) {
|
|
1035
|
+
await container.deleteBlob(originalName);
|
|
1036
|
+
}
|
|
1037
|
+
}
|
|
1038
|
+
async renameBlobToLowerCase(originalPath, removeOriginal) {
|
|
1039
|
+
let newPath;
|
|
1040
|
+
try {
|
|
1041
|
+
newPath = lowerCaseEntityTripletInStoragePath(originalPath);
|
|
1042
|
+
} catch (e) {
|
|
1043
|
+
errors.assertError(e);
|
|
1044
|
+
this.logger.warn(e.message);
|
|
1045
|
+
return;
|
|
1046
|
+
}
|
|
1047
|
+
if (originalPath === newPath)
|
|
1048
|
+
return;
|
|
1049
|
+
try {
|
|
1050
|
+
this.logger.verbose(`Migrating ${originalPath}`);
|
|
1051
|
+
await this.renameBlob(originalPath, newPath, removeOriginal);
|
|
1052
|
+
} catch (e) {
|
|
1053
|
+
errors.assertError(e);
|
|
1054
|
+
this.logger.warn(`Unable to migrate ${originalPath}: ${e.message}`);
|
|
1055
|
+
}
|
|
1056
|
+
}
|
|
1057
|
+
async migrateDocsCase({
|
|
1058
|
+
removeOriginal = false,
|
|
1059
|
+
concurrency = 25
|
|
1060
|
+
}) {
|
|
1061
|
+
const promises = [];
|
|
1062
|
+
const limiter = createLimiter__default["default"](concurrency);
|
|
1063
|
+
const container = this.storageClient.getContainerClient(this.containerName);
|
|
1064
|
+
for await (const blob of container.listBlobsFlat()) {
|
|
1065
|
+
promises.push(limiter(this.renameBlobToLowerCase.bind(this), blob.name, removeOriginal));
|
|
1066
|
+
}
|
|
1067
|
+
await Promise.all(promises);
|
|
1068
|
+
}
|
|
1069
|
+
async getAllBlobsFromContainer({
|
|
1070
|
+
prefix,
|
|
1071
|
+
maxPageSize
|
|
1072
|
+
}) {
|
|
1073
|
+
var _a, _b;
|
|
1074
|
+
const blobs = [];
|
|
1075
|
+
const container = this.storageClient.getContainerClient(this.containerName);
|
|
1076
|
+
let iterator = container.listBlobsFlat({ prefix }).byPage({ maxPageSize });
|
|
1077
|
+
let response = (await iterator.next()).value;
|
|
1078
|
+
do {
|
|
1079
|
+
for (const blob of (_b = (_a = response == null ? void 0 : response.segment) == null ? void 0 : _a.blobItems) != null ? _b : []) {
|
|
1080
|
+
blobs.push(blob.name);
|
|
1081
|
+
}
|
|
1082
|
+
iterator = container.listBlobsFlat({ prefix }).byPage({ continuationToken: response.continuationToken, maxPageSize });
|
|
1083
|
+
response = (await iterator.next()).value;
|
|
1084
|
+
} while (response && response.continuationToken);
|
|
1085
|
+
return blobs;
|
|
1086
|
+
}
|
|
1087
|
+
}
|
|
1088
|
+
|
|
1089
|
+
class MigrateWriteStream extends stream.Writable {
|
|
1090
|
+
constructor(logger, removeOriginal, concurrency) {
|
|
1091
|
+
super({ objectMode: true });
|
|
1092
|
+
this.inFlight = 0;
|
|
1093
|
+
this.logger = logger;
|
|
1094
|
+
this.removeOriginal = removeOriginal;
|
|
1095
|
+
this.maxConcurrency = concurrency;
|
|
1096
|
+
}
|
|
1097
|
+
_write(file, _encoding, next) {
|
|
1098
|
+
let shouldCallNext = true;
|
|
1099
|
+
let newFile;
|
|
1100
|
+
try {
|
|
1101
|
+
newFile = lowerCaseEntityTripletInStoragePath(file.name);
|
|
1102
|
+
} catch (e) {
|
|
1103
|
+
errors.assertError(e);
|
|
1104
|
+
this.logger.warn(e.message);
|
|
1105
|
+
next();
|
|
1106
|
+
return;
|
|
1107
|
+
}
|
|
1108
|
+
if (newFile === file.name) {
|
|
1109
|
+
next();
|
|
1110
|
+
return;
|
|
1111
|
+
}
|
|
1112
|
+
this.inFlight++;
|
|
1113
|
+
if (this.inFlight < this.maxConcurrency) {
|
|
1114
|
+
next();
|
|
1115
|
+
shouldCallNext = false;
|
|
1116
|
+
}
|
|
1117
|
+
const migrate = this.removeOriginal ? file.move.bind(file) : file.copy.bind(file);
|
|
1118
|
+
this.logger.verbose(`Migrating ${file.name}`);
|
|
1119
|
+
migrate(newFile).catch((e) => this.logger.warn(`Unable to migrate ${file.name}: ${e.message}`)).finally(() => {
|
|
1120
|
+
this.inFlight--;
|
|
1121
|
+
if (shouldCallNext) {
|
|
1122
|
+
next();
|
|
1123
|
+
}
|
|
1124
|
+
});
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
|
|
1128
|
+
class GoogleGCSPublish {
|
|
1129
|
+
constructor(options) {
|
|
1130
|
+
this.storageClient = options.storageClient;
|
|
1131
|
+
this.bucketName = options.bucketName;
|
|
1132
|
+
this.legacyPathCasing = options.legacyPathCasing;
|
|
1133
|
+
this.logger = options.logger;
|
|
1134
|
+
this.bucketRootPath = options.bucketRootPath;
|
|
1135
|
+
}
|
|
1136
|
+
static fromConfig(config, logger) {
|
|
1137
|
+
let bucketName = "";
|
|
1138
|
+
try {
|
|
1139
|
+
bucketName = config.getString("techdocs.publisher.googleGcs.bucketName");
|
|
1140
|
+
} catch (error) {
|
|
1141
|
+
throw new Error("Since techdocs.publisher.type is set to 'googleGcs' in your app config, techdocs.publisher.googleGcs.bucketName is required.");
|
|
1142
|
+
}
|
|
1143
|
+
const bucketRootPath = normalizeExternalStorageRootPath(config.getOptionalString("techdocs.publisher.googleGcs.bucketRootPath") || "");
|
|
1144
|
+
const credentials = config.getOptionalString("techdocs.publisher.googleGcs.credentials");
|
|
1145
|
+
let credentialsJson = {};
|
|
1146
|
+
if (credentials) {
|
|
1147
|
+
try {
|
|
1148
|
+
credentialsJson = JSON.parse(credentials);
|
|
1149
|
+
} catch (err) {
|
|
1150
|
+
throw new Error("Error in parsing techdocs.publisher.googleGcs.credentials config to JSON.");
|
|
1151
|
+
}
|
|
1152
|
+
}
|
|
1153
|
+
const storageClient = new storage.Storage({
|
|
1154
|
+
...credentials && {
|
|
1155
|
+
projectId: credentialsJson.project_id,
|
|
1156
|
+
credentials: credentialsJson
|
|
1157
|
+
}
|
|
1158
|
+
});
|
|
1159
|
+
const legacyPathCasing = config.getOptionalBoolean("techdocs.legacyUseCaseSensitiveTripletPaths") || false;
|
|
1160
|
+
return new GoogleGCSPublish({
|
|
1161
|
+
storageClient,
|
|
1162
|
+
bucketName,
|
|
1163
|
+
legacyPathCasing,
|
|
1164
|
+
logger,
|
|
1165
|
+
bucketRootPath
|
|
1166
|
+
});
|
|
1167
|
+
}
|
|
1168
|
+
async getReadiness() {
|
|
1169
|
+
try {
|
|
1170
|
+
await this.storageClient.bucket(this.bucketName).getMetadata();
|
|
1171
|
+
this.logger.info(`Successfully connected to the GCS bucket ${this.bucketName}.`);
|
|
1172
|
+
return {
|
|
1173
|
+
isAvailable: true
|
|
1174
|
+
};
|
|
1175
|
+
} catch (err) {
|
|
1176
|
+
errors.assertError(err);
|
|
1177
|
+
this.logger.error(`Could not retrieve metadata about the GCS bucket ${this.bucketName}. Make sure the bucket exists. Also make sure that authentication is setup either by explicitly defining techdocs.publisher.googleGcs.credentials in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`);
|
|
1178
|
+
this.logger.error(`from GCS client library: ${err.message}`);
|
|
1179
|
+
return { isAvailable: false };
|
|
1180
|
+
}
|
|
1181
|
+
}
|
|
1182
|
+
async publish({
|
|
1183
|
+
entity,
|
|
1184
|
+
directory
|
|
1185
|
+
}) {
|
|
1186
|
+
const objects = [];
|
|
1187
|
+
const useLegacyPathCasing = this.legacyPathCasing;
|
|
1188
|
+
const bucket = this.storageClient.bucket(this.bucketName);
|
|
1189
|
+
const bucketRootPath = this.bucketRootPath;
|
|
1190
|
+
let existingFiles = [];
|
|
1191
|
+
try {
|
|
1192
|
+
const remoteFolder = getCloudPathForLocalPath(entity, void 0, useLegacyPathCasing, bucketRootPath);
|
|
1193
|
+
existingFiles = await this.getFilesForFolder(remoteFolder);
|
|
1194
|
+
} catch (e) {
|
|
1195
|
+
errors.assertError(e);
|
|
1196
|
+
this.logger.error(`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`);
|
|
1197
|
+
}
|
|
1198
|
+
let absoluteFilesToUpload;
|
|
1199
|
+
try {
|
|
1200
|
+
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
1201
|
+
await bulkStorageOperation(async (absoluteFilePath) => {
|
|
1202
|
+
const relativeFilePath = path__default["default"].relative(directory, absoluteFilePath);
|
|
1203
|
+
const destination = getCloudPathForLocalPath(entity, relativeFilePath, useLegacyPathCasing, bucketRootPath);
|
|
1204
|
+
objects.push(destination);
|
|
1205
|
+
return await bucket.upload(absoluteFilePath, { destination });
|
|
1206
|
+
}, absoluteFilesToUpload, { concurrencyLimit: 10 });
|
|
1207
|
+
this.logger.info(`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`);
|
|
1208
|
+
} catch (e) {
|
|
1209
|
+
const errorMessage = `Unable to upload file(s) to Google Cloud Storage. ${e}`;
|
|
1210
|
+
this.logger.error(errorMessage);
|
|
1211
|
+
throw new Error(errorMessage);
|
|
1212
|
+
}
|
|
1213
|
+
try {
|
|
1214
|
+
const relativeFilesToUpload = absoluteFilesToUpload.map((absoluteFilePath) => getCloudPathForLocalPath(entity, path__default["default"].relative(directory, absoluteFilePath), useLegacyPathCasing, bucketRootPath));
|
|
1215
|
+
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
1216
|
+
await bulkStorageOperation(async (relativeFilePath) => {
|
|
1217
|
+
return await bucket.file(relativeFilePath).delete();
|
|
1218
|
+
}, staleFiles, { concurrencyLimit: 10 });
|
|
1219
|
+
this.logger.info(`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`);
|
|
1220
|
+
} catch (error) {
|
|
1221
|
+
const errorMessage = `Unable to delete file(s) from Google Cloud Storage. ${error}`;
|
|
1222
|
+
this.logger.error(errorMessage);
|
|
1223
|
+
}
|
|
1224
|
+
return { objects };
|
|
1225
|
+
}
|
|
1226
|
+
fetchTechDocsMetadata(entityName) {
|
|
1227
|
+
return new Promise((resolve, reject) => {
|
|
1228
|
+
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
1229
|
+
const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1230
|
+
const entityRootDir = path__default["default"].posix.join(this.bucketRootPath, entityDir);
|
|
1231
|
+
const fileStreamChunks = [];
|
|
1232
|
+
this.storageClient.bucket(this.bucketName).file(`${entityRootDir}/techdocs_metadata.json`).createReadStream().on("error", (err) => {
|
|
1233
|
+
this.logger.error(err.message);
|
|
1234
|
+
reject(err);
|
|
1235
|
+
}).on("data", (chunk) => {
|
|
1236
|
+
fileStreamChunks.push(chunk);
|
|
1237
|
+
}).on("end", () => {
|
|
1238
|
+
const techdocsMetadataJson = Buffer.concat(fileStreamChunks).toString("utf-8");
|
|
1239
|
+
resolve(JSON5__default["default"].parse(techdocsMetadataJson));
|
|
1240
|
+
});
|
|
1241
|
+
});
|
|
1242
|
+
}
|
|
1243
|
+
docsRouter() {
|
|
1244
|
+
return (req, res) => {
|
|
1245
|
+
const decodedUri = decodeURI(req.path.replace(/^\//, ""));
|
|
1246
|
+
const decodedUriNoRoot = path__default["default"].relative(this.bucketRootPath, decodedUri);
|
|
1247
|
+
const filePathNoRoot = this.legacyPathCasing ? decodedUriNoRoot : lowerCaseEntityTripletInStoragePath(decodedUriNoRoot);
|
|
1248
|
+
const filePath = path__default["default"].posix.join(this.bucketRootPath, filePathNoRoot);
|
|
1249
|
+
const fileExtension = path__default["default"].extname(filePath);
|
|
1250
|
+
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
1251
|
+
this.storageClient.bucket(this.bucketName).file(filePath).createReadStream().on("pipe", () => {
|
|
1252
|
+
res.writeHead(200, responseHeaders);
|
|
1253
|
+
}).on("error", (err) => {
|
|
1254
|
+
this.logger.warn(`TechDocs Google GCS router failed to serve content from bucket ${this.bucketName} at path ${filePath}: ${err.message}`);
|
|
1255
|
+
if (!res.headersSent) {
|
|
1256
|
+
res.status(404).send("File Not Found");
|
|
1257
|
+
} else {
|
|
1258
|
+
res.destroy();
|
|
1259
|
+
}
|
|
1260
|
+
}).pipe(res);
|
|
1261
|
+
};
|
|
1262
|
+
}
|
|
1263
|
+
async hasDocsBeenGenerated(entity) {
|
|
1264
|
+
return new Promise((resolve) => {
|
|
1265
|
+
const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
1266
|
+
const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1267
|
+
const entityRootDir = path__default["default"].posix.join(this.bucketRootPath, entityDir);
|
|
1268
|
+
this.storageClient.bucket(this.bucketName).file(`${entityRootDir}/index.html`).exists().then((response) => {
|
|
1269
|
+
resolve(response[0]);
|
|
1270
|
+
}).catch(() => {
|
|
1271
|
+
resolve(false);
|
|
1272
|
+
});
|
|
1273
|
+
});
|
|
1274
|
+
}
|
|
1275
|
+
migrateDocsCase({ removeOriginal = false, concurrency = 25 }) {
|
|
1276
|
+
return new Promise((resolve, reject) => {
|
|
1277
|
+
const allFileMetadata = this.storageClient.bucket(this.bucketName).getFilesStream();
|
|
1278
|
+
const migrateFiles = new MigrateWriteStream(this.logger, removeOriginal, concurrency);
|
|
1279
|
+
migrateFiles.on("finish", resolve).on("error", reject);
|
|
1280
|
+
allFileMetadata.pipe(migrateFiles).on("error", (error) => {
|
|
1281
|
+
migrateFiles.destroy();
|
|
1282
|
+
reject(error);
|
|
1283
|
+
});
|
|
1284
|
+
});
|
|
1285
|
+
}
|
|
1286
|
+
getFilesForFolder(folder) {
|
|
1287
|
+
const fileMetadataStream = this.storageClient.bucket(this.bucketName).getFilesStream({ prefix: folder });
|
|
1288
|
+
return new Promise((resolve, reject) => {
|
|
1289
|
+
const files = [];
|
|
1290
|
+
fileMetadataStream.on("error", (error) => {
|
|
1291
|
+
reject(error);
|
|
1292
|
+
});
|
|
1293
|
+
fileMetadataStream.on("data", (file) => {
|
|
1294
|
+
files.push(file.name);
|
|
1295
|
+
});
|
|
1296
|
+
fileMetadataStream.on("end", () => {
|
|
1297
|
+
resolve(files);
|
|
1298
|
+
});
|
|
1299
|
+
});
|
|
1300
|
+
}
|
|
1301
|
+
}
|
|
1302
|
+
|
|
1303
|
+
let staticDocsDir = "";
|
|
1304
|
+
try {
|
|
1305
|
+
staticDocsDir = backendCommon.resolvePackagePath("@backstage/plugin-techdocs-backend", "static/docs");
|
|
1306
|
+
} catch (err) {
|
|
1307
|
+
staticDocsDir = os__default["default"].tmpdir();
|
|
1308
|
+
}
|
|
1309
|
+
class LocalPublish {
|
|
1310
|
+
constructor(options) {
|
|
1311
|
+
this.logger = options.logger;
|
|
1312
|
+
this.discovery = options.discovery;
|
|
1313
|
+
this.legacyPathCasing = options.legacyPathCasing;
|
|
1314
|
+
}
|
|
1315
|
+
static fromConfig(config, logger, discovery) {
|
|
1316
|
+
const legacyPathCasing = config.getOptionalBoolean("techdocs.legacyUseCaseSensitiveTripletPaths") || false;
|
|
1317
|
+
return new LocalPublish({
|
|
1318
|
+
logger,
|
|
1319
|
+
discovery,
|
|
1320
|
+
legacyPathCasing
|
|
1321
|
+
});
|
|
1322
|
+
}
|
|
1323
|
+
async getReadiness() {
|
|
1324
|
+
return {
|
|
1325
|
+
isAvailable: true
|
|
1326
|
+
};
|
|
1327
|
+
}
|
|
1328
|
+
async publish({
|
|
1329
|
+
entity,
|
|
1330
|
+
directory
|
|
1331
|
+
}) {
|
|
1332
|
+
var _a;
|
|
1333
|
+
const entityNamespace = (_a = entity.metadata.namespace) != null ? _a : "default";
|
|
1334
|
+
const publishDir = this.staticEntityPathJoin(entityNamespace, entity.kind, entity.metadata.name);
|
|
1335
|
+
if (!fs__default["default"].existsSync(publishDir)) {
|
|
1336
|
+
this.logger.info(`Could not find ${publishDir}, creating the directory.`);
|
|
1337
|
+
fs__default["default"].mkdirSync(publishDir, { recursive: true });
|
|
1338
|
+
}
|
|
1339
|
+
try {
|
|
1340
|
+
await fs__default["default"].copy(directory, publishDir);
|
|
1341
|
+
this.logger.info(`Published site stored at ${publishDir}`);
|
|
1342
|
+
} catch (error) {
|
|
1343
|
+
this.logger.debug(`Failed to copy docs from ${directory} to ${publishDir}`);
|
|
1344
|
+
throw error;
|
|
1345
|
+
}
|
|
1346
|
+
const techdocsApiUrl = await this.discovery.getBaseUrl("techdocs");
|
|
1347
|
+
const publishedFilePaths = (await getFileTreeRecursively(publishDir)).map((abs) => {
|
|
1348
|
+
return abs.split(`${staticDocsDir}/`)[1];
|
|
1349
|
+
});
|
|
1350
|
+
return {
|
|
1351
|
+
remoteUrl: `${techdocsApiUrl}/static/docs/${encodeURIComponent(entity.metadata.name)}`,
|
|
1352
|
+
objects: publishedFilePaths
|
|
1353
|
+
};
|
|
1354
|
+
}
|
|
1355
|
+
async fetchTechDocsMetadata(entityName) {
|
|
1356
|
+
const metadataPath = this.staticEntityPathJoin(entityName.namespace, entityName.kind, entityName.name, "techdocs_metadata.json");
|
|
1357
|
+
try {
|
|
1358
|
+
return await fs__default["default"].readJson(metadataPath);
|
|
1359
|
+
} catch (err) {
|
|
1360
|
+
errors.assertError(err);
|
|
1361
|
+
this.logger.error(`Unable to read techdocs_metadata.json at ${metadataPath}. Error: ${err}`);
|
|
1362
|
+
throw new Error(err.message);
|
|
1363
|
+
}
|
|
1364
|
+
}
|
|
1365
|
+
docsRouter() {
|
|
1366
|
+
const router = express__default["default"].Router();
|
|
1367
|
+
router.use((req, res, next) => {
|
|
1368
|
+
if (this.legacyPathCasing) {
|
|
1369
|
+
return next();
|
|
1370
|
+
}
|
|
1371
|
+
const [_, namespace, kind, name, ...rest] = req.path.split("/");
|
|
1372
|
+
if (!namespace || !kind || !name) {
|
|
1373
|
+
return next();
|
|
1374
|
+
}
|
|
1375
|
+
const newPath = [
|
|
1376
|
+
_,
|
|
1377
|
+
namespace.toLowerCase(),
|
|
1378
|
+
kind.toLowerCase(),
|
|
1379
|
+
name.toLowerCase(),
|
|
1380
|
+
...rest
|
|
1381
|
+
].join("/");
|
|
1382
|
+
if (newPath === req.path) {
|
|
1383
|
+
return next();
|
|
1384
|
+
}
|
|
1385
|
+
return res.redirect(req.baseUrl + newPath, 301);
|
|
1386
|
+
});
|
|
1387
|
+
router.use(express__default["default"].static(staticDocsDir, {
|
|
1388
|
+
setHeaders: (res, filePath) => {
|
|
1389
|
+
const fileExtension = path__default["default"].extname(filePath);
|
|
1390
|
+
const headers = getHeadersForFileExtension(fileExtension);
|
|
1391
|
+
for (const [header, value] of Object.entries(headers)) {
|
|
1392
|
+
res.setHeader(header, value);
|
|
1393
|
+
}
|
|
1394
|
+
}
|
|
1395
|
+
}));
|
|
1396
|
+
return router;
|
|
1397
|
+
}
|
|
1398
|
+
async hasDocsBeenGenerated(entity) {
|
|
1399
|
+
var _a;
|
|
1400
|
+
const namespace = (_a = entity.metadata.namespace) != null ? _a : "default";
|
|
1401
|
+
const indexHtmlPath = this.staticEntityPathJoin(namespace, entity.kind, entity.metadata.name, "index.html");
|
|
1402
|
+
try {
|
|
1403
|
+
await fs__default["default"].access(indexHtmlPath, fs__default["default"].constants.F_OK);
|
|
1404
|
+
return true;
|
|
1405
|
+
} catch (err) {
|
|
1406
|
+
return false;
|
|
1407
|
+
}
|
|
1408
|
+
}
|
|
1409
|
+
async migrateDocsCase({
|
|
1410
|
+
removeOriginal = false,
|
|
1411
|
+
concurrency = 25
|
|
1412
|
+
}) {
|
|
1413
|
+
const files = await getFileTreeRecursively(staticDocsDir);
|
|
1414
|
+
const limit = createLimiter__default["default"](concurrency);
|
|
1415
|
+
await Promise.all(files.map((f) => limit(async (file) => {
|
|
1416
|
+
const relativeFile = file.replace(`${staticDocsDir}${path__default["default"].sep}`, "");
|
|
1417
|
+
const newFile = lowerCaseEntityTripletInStoragePath(relativeFile);
|
|
1418
|
+
if (relativeFile === newFile) {
|
|
1419
|
+
return;
|
|
1420
|
+
}
|
|
1421
|
+
await new Promise((resolve) => {
|
|
1422
|
+
const migrate = removeOriginal ? fs__default["default"].move : fs__default["default"].copyFile;
|
|
1423
|
+
this.logger.verbose(`Migrating ${relativeFile}`);
|
|
1424
|
+
migrate(file, newFile, (err) => {
|
|
1425
|
+
if (err) {
|
|
1426
|
+
this.logger.warn(`Unable to migrate ${relativeFile}: ${err.message}`);
|
|
1427
|
+
}
|
|
1428
|
+
resolve();
|
|
1429
|
+
});
|
|
1430
|
+
});
|
|
1431
|
+
}, f)));
|
|
1432
|
+
}
|
|
1433
|
+
staticEntityPathJoin(...allParts) {
|
|
1434
|
+
if (this.legacyPathCasing) {
|
|
1435
|
+
const [namespace2, kind2, name2, ...parts2] = allParts;
|
|
1436
|
+
return path__default["default"].join(staticDocsDir, namespace2, kind2, name2, ...parts2);
|
|
1437
|
+
}
|
|
1438
|
+
const [namespace, kind, name, ...parts] = allParts;
|
|
1439
|
+
return path__default["default"].join(staticDocsDir, namespace.toLowerCase(), kind.toLowerCase(), name.toLowerCase(), ...parts);
|
|
1440
|
+
}
|
|
1441
|
+
}
|
|
1442
|
+
|
|
1443
|
+
const streamToBuffer = (stream) => {
|
|
1444
|
+
return new Promise((resolve, reject) => {
|
|
1445
|
+
try {
|
|
1446
|
+
const chunks = [];
|
|
1447
|
+
stream.on("data", (chunk) => chunks.push(chunk));
|
|
1448
|
+
stream.on("error", reject);
|
|
1449
|
+
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
|
1450
|
+
} catch (e) {
|
|
1451
|
+
throw new errors.ForwardedError("Unable to parse the response data", e);
|
|
1452
|
+
}
|
|
1453
|
+
});
|
|
1454
|
+
};
|
|
1455
|
+
const bufferToStream = (buffer) => {
|
|
1456
|
+
const stream$1 = new stream.Readable();
|
|
1457
|
+
stream$1.push(buffer);
|
|
1458
|
+
stream$1.push(null);
|
|
1459
|
+
return stream$1;
|
|
1460
|
+
};
|
|
1461
|
+
class OpenStackSwiftPublish {
|
|
1462
|
+
constructor(options) {
|
|
1463
|
+
this.storageClient = options.storageClient;
|
|
1464
|
+
this.containerName = options.containerName;
|
|
1465
|
+
this.logger = options.logger;
|
|
1466
|
+
}
|
|
1467
|
+
static fromConfig(config, logger) {
|
|
1468
|
+
let containerName = "";
|
|
1469
|
+
try {
|
|
1470
|
+
containerName = config.getString("techdocs.publisher.openStackSwift.containerName");
|
|
1471
|
+
} catch (error) {
|
|
1472
|
+
throw new Error("Since techdocs.publisher.type is set to 'openStackSwift' in your app config, techdocs.publisher.openStackSwift.containerName is required.");
|
|
1473
|
+
}
|
|
1474
|
+
const openStackSwiftConfig = config.getConfig("techdocs.publisher.openStackSwift");
|
|
1475
|
+
const storageClient = new openstackSwiftSdk.SwiftClient({
|
|
1476
|
+
authEndpoint: openStackSwiftConfig.getString("authUrl"),
|
|
1477
|
+
swiftEndpoint: openStackSwiftConfig.getString("swiftUrl"),
|
|
1478
|
+
credentialId: openStackSwiftConfig.getString("credentials.id"),
|
|
1479
|
+
secret: openStackSwiftConfig.getString("credentials.secret")
|
|
1480
|
+
});
|
|
1481
|
+
return new OpenStackSwiftPublish({ storageClient, containerName, logger });
|
|
1482
|
+
}
|
|
1483
|
+
async getReadiness() {
|
|
1484
|
+
try {
|
|
1485
|
+
const container = await this.storageClient.getContainerMetadata(this.containerName);
|
|
1486
|
+
if (!(container instanceof types.NotFound)) {
|
|
1487
|
+
this.logger.info(`Successfully connected to the OpenStack Swift container ${this.containerName}.`);
|
|
1488
|
+
return {
|
|
1489
|
+
isAvailable: true
|
|
1490
|
+
};
|
|
1491
|
+
}
|
|
1492
|
+
this.logger.error(`Could not retrieve metadata about the OpenStack Swift container ${this.containerName}. Make sure the container exists. Also make sure that authentication is setup either by explicitly defining credentials and region in techdocs.publisher.openStackSwift in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`);
|
|
1493
|
+
return {
|
|
1494
|
+
isAvailable: false
|
|
1495
|
+
};
|
|
1496
|
+
} catch (err) {
|
|
1497
|
+
errors.assertError(err);
|
|
1498
|
+
this.logger.error(`from OpenStack client library: ${err.message}`);
|
|
1499
|
+
return {
|
|
1500
|
+
isAvailable: false
|
|
1501
|
+
};
|
|
1502
|
+
}
|
|
1503
|
+
}
|
|
1504
|
+
async publish({
|
|
1505
|
+
entity,
|
|
1506
|
+
directory
|
|
1507
|
+
}) {
|
|
1508
|
+
try {
|
|
1509
|
+
const objects = [];
|
|
1510
|
+
const allFilesToUpload = await getFileTreeRecursively(directory);
|
|
1511
|
+
const limiter = createLimiter__default["default"](10);
|
|
1512
|
+
const uploadPromises = [];
|
|
1513
|
+
for (const filePath of allFilesToUpload) {
|
|
1514
|
+
const relativeFilePath = path__default["default"].relative(directory, filePath);
|
|
1515
|
+
const relativeFilePathPosix = relativeFilePath.split(path__default["default"].sep).join(path__default["default"].posix.sep);
|
|
1516
|
+
const entityRootDir = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
1517
|
+
const destination = `${entityRootDir}/${relativeFilePathPosix}`;
|
|
1518
|
+
objects.push(destination);
|
|
1519
|
+
const uploadFile = limiter(async () => {
|
|
1520
|
+
const fileBuffer = await fs__default["default"].readFile(filePath);
|
|
1521
|
+
const stream = bufferToStream(fileBuffer);
|
|
1522
|
+
return this.storageClient.upload(this.containerName, destination, stream);
|
|
1523
|
+
});
|
|
1524
|
+
uploadPromises.push(uploadFile);
|
|
1525
|
+
}
|
|
1526
|
+
await Promise.all(uploadPromises);
|
|
1527
|
+
this.logger.info(`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${allFilesToUpload.length}`);
|
|
1528
|
+
return { objects };
|
|
1529
|
+
} catch (e) {
|
|
1530
|
+
const errorMessage = `Unable to upload file(s) to OpenStack Swift. ${e}`;
|
|
1531
|
+
this.logger.error(errorMessage);
|
|
1532
|
+
throw new Error(errorMessage);
|
|
1533
|
+
}
|
|
1534
|
+
}
|
|
1535
|
+
async fetchTechDocsMetadata(entityName) {
|
|
1536
|
+
return await new Promise(async (resolve, reject) => {
|
|
1537
|
+
const entityRootDir = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
1538
|
+
const downloadResponse = await this.storageClient.download(this.containerName, `${entityRootDir}/techdocs_metadata.json`);
|
|
1539
|
+
if (!(downloadResponse instanceof types.NotFound)) {
|
|
1540
|
+
const stream = downloadResponse.data;
|
|
1541
|
+
try {
|
|
1542
|
+
const techdocsMetadataJson = await streamToBuffer(stream);
|
|
1543
|
+
if (!techdocsMetadataJson) {
|
|
1544
|
+
throw new Error(`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`);
|
|
1545
|
+
}
|
|
1546
|
+
const techdocsMetadata = JSON5__default["default"].parse(techdocsMetadataJson.toString("utf-8"));
|
|
1547
|
+
resolve(techdocsMetadata);
|
|
1548
|
+
} catch (err) {
|
|
1549
|
+
errors.assertError(err);
|
|
1550
|
+
this.logger.error(err.message);
|
|
1551
|
+
reject(new Error(err.message));
|
|
1552
|
+
}
|
|
1553
|
+
} else {
|
|
1554
|
+
reject({
|
|
1555
|
+
message: `TechDocs metadata fetch failed, The file /rootDir/${entityRootDir}/techdocs_metadata.json does not exist !`
|
|
1556
|
+
});
|
|
1557
|
+
}
|
|
1558
|
+
});
|
|
1559
|
+
}
|
|
1560
|
+
docsRouter() {
|
|
1561
|
+
return async (req, res) => {
|
|
1562
|
+
const filePath = decodeURI(req.path.replace(/^\//, ""));
|
|
1563
|
+
const fileExtension = path__default["default"].extname(filePath);
|
|
1564
|
+
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
1565
|
+
const downloadResponse = await this.storageClient.download(this.containerName, filePath);
|
|
1566
|
+
if (!(downloadResponse instanceof types.NotFound)) {
|
|
1567
|
+
const stream = downloadResponse.data;
|
|
1568
|
+
try {
|
|
1569
|
+
for (const [headerKey, headerValue] of Object.entries(responseHeaders)) {
|
|
1570
|
+
res.setHeader(headerKey, headerValue);
|
|
1571
|
+
}
|
|
1572
|
+
res.send(await streamToBuffer(stream));
|
|
1573
|
+
} catch (err) {
|
|
1574
|
+
errors.assertError(err);
|
|
1575
|
+
this.logger.warn(`TechDocs OpenStack swift router failed to serve content from container ${this.containerName} at path ${filePath}: ${err.message}`);
|
|
1576
|
+
res.status(404).send("File Not Found");
|
|
1577
|
+
}
|
|
1578
|
+
} else {
|
|
1579
|
+
this.logger.warn(`TechDocs OpenStack swift router failed to serve content from container ${this.containerName} at path ${filePath}: Not found`);
|
|
1580
|
+
res.status(404).send("File Not Found");
|
|
1581
|
+
}
|
|
1582
|
+
};
|
|
1583
|
+
}
|
|
1584
|
+
async hasDocsBeenGenerated(entity) {
|
|
1585
|
+
const entityRootDir = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
1586
|
+
try {
|
|
1587
|
+
const fileResponse = await this.storageClient.getMetadata(this.containerName, `${entityRootDir}/index.html`);
|
|
1588
|
+
if (!(fileResponse instanceof types.NotFound)) {
|
|
1589
|
+
return true;
|
|
1590
|
+
}
|
|
1591
|
+
return false;
|
|
1592
|
+
} catch (err) {
|
|
1593
|
+
errors.assertError(err);
|
|
1594
|
+
this.logger.warn(err.message);
|
|
1595
|
+
return false;
|
|
1596
|
+
}
|
|
1597
|
+
}
|
|
1598
|
+
async migrateDocsCase({
|
|
1599
|
+
removeOriginal = false,
|
|
1600
|
+
concurrency = 25
|
|
1601
|
+
}) {
|
|
1602
|
+
const allObjects = await this.getAllObjectsFromContainer();
|
|
1603
|
+
const limiter = createLimiter__default["default"](concurrency);
|
|
1604
|
+
await Promise.all(allObjects.map((f) => limiter(async (file) => {
|
|
1605
|
+
let newPath;
|
|
1606
|
+
try {
|
|
1607
|
+
newPath = lowerCaseEntityTripletInStoragePath(file);
|
|
1608
|
+
} catch (e) {
|
|
1609
|
+
errors.assertError(e);
|
|
1610
|
+
this.logger.warn(e.message);
|
|
1611
|
+
return;
|
|
1612
|
+
}
|
|
1613
|
+
if (file === newPath) {
|
|
1614
|
+
return;
|
|
1615
|
+
}
|
|
1616
|
+
try {
|
|
1617
|
+
this.logger.verbose(`Migrating ${file} to ${newPath}`);
|
|
1618
|
+
await this.storageClient.copy(this.containerName, file, this.containerName, newPath);
|
|
1619
|
+
if (removeOriginal) {
|
|
1620
|
+
await this.storageClient.delete(this.containerName, file);
|
|
1621
|
+
}
|
|
1622
|
+
} catch (e) {
|
|
1623
|
+
errors.assertError(e);
|
|
1624
|
+
this.logger.warn(`Unable to migrate ${file}: ${e.message}`);
|
|
1625
|
+
}
|
|
1626
|
+
}, f)));
|
|
1627
|
+
}
|
|
1628
|
+
async getAllObjectsFromContainer({ prefix } = { prefix: "" }) {
|
|
1629
|
+
let objects = [];
|
|
1630
|
+
const OSS_MAX_LIMIT = Math.pow(2, 31) - 1;
|
|
1631
|
+
const allObjects = await this.storageClient.list(this.containerName, prefix, OSS_MAX_LIMIT);
|
|
1632
|
+
objects = allObjects.map((object) => object.name);
|
|
1633
|
+
return objects;
|
|
1634
|
+
}
|
|
1635
|
+
}
|
|
1636
|
+
|
|
1637
|
+
class Publisher {
|
|
1638
|
+
static async fromConfig(config, { logger, discovery }) {
|
|
1639
|
+
var _a;
|
|
1640
|
+
const publisherType = (_a = config.getOptionalString("techdocs.publisher.type")) != null ? _a : "local";
|
|
1641
|
+
switch (publisherType) {
|
|
1642
|
+
case "googleGcs":
|
|
1643
|
+
logger.info("Creating Google Storage Bucket publisher for TechDocs");
|
|
1644
|
+
return GoogleGCSPublish.fromConfig(config, logger);
|
|
1645
|
+
case "awsS3":
|
|
1646
|
+
logger.info("Creating AWS S3 Bucket publisher for TechDocs");
|
|
1647
|
+
return AwsS3Publish.fromConfig(config, logger);
|
|
1648
|
+
case "azureBlobStorage":
|
|
1649
|
+
logger.info("Creating Azure Blob Storage Container publisher for TechDocs");
|
|
1650
|
+
return AzureBlobStoragePublish.fromConfig(config, logger);
|
|
1651
|
+
case "openStackSwift":
|
|
1652
|
+
logger.info("Creating OpenStack Swift Container publisher for TechDocs");
|
|
1653
|
+
return OpenStackSwiftPublish.fromConfig(config, logger);
|
|
1654
|
+
case "local":
|
|
1655
|
+
logger.info("Creating Local publisher for TechDocs");
|
|
1656
|
+
return LocalPublish.fromConfig(config, logger, discovery);
|
|
1657
|
+
default:
|
|
1658
|
+
logger.info("Creating Local publisher for TechDocs");
|
|
1659
|
+
return LocalPublish.fromConfig(config, logger, discovery);
|
|
1660
|
+
}
|
|
1661
|
+
}
|
|
1662
|
+
}
|
|
1663
|
+
|
|
1664
|
+
exports.DirectoryPreparer = DirectoryPreparer;
|
|
1665
|
+
exports.Generators = Generators;
|
|
1666
|
+
exports.Preparers = Preparers;
|
|
1667
|
+
exports.Publisher = Publisher;
|
|
1668
|
+
exports.TechdocsGenerator = TechdocsGenerator;
|
|
1669
|
+
exports.UrlPreparer = UrlPreparer;
|
|
1670
|
+
exports.getDocFilesFromRepository = getDocFilesFromRepository;
|
|
1671
|
+
exports.getLocationForEntity = getLocationForEntity;
|
|
1672
|
+
exports.parseReferenceAnnotation = parseReferenceAnnotation;
|
|
1673
|
+
exports.transformDirLocation = transformDirLocation;
|
|
1674
|
+
//# sourceMappingURL=index.cjs.js.map
|