@backstage/plugin-techdocs-node 1.12.12-next.1 → 1.12.12-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +17 -0
- package/dist/extensions.cjs.js +22 -0
- package/dist/extensions.cjs.js.map +1 -0
- package/dist/helpers.cjs.js +80 -0
- package/dist/helpers.cjs.js.map +1 -0
- package/dist/index.cjs.js +29 -2602
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +11 -1
- package/dist/stages/generate/DockerContainerRunner.cjs.js +99 -0
- package/dist/stages/generate/DockerContainerRunner.cjs.js.map +1 -0
- package/dist/stages/generate/generators.cjs.js +42 -0
- package/dist/stages/generate/generators.cjs.js.map +1 -0
- package/dist/stages/generate/helpers.cjs.js +265 -0
- package/dist/stages/generate/helpers.cjs.js.map +1 -0
- package/dist/stages/generate/index.cjs.js +15 -0
- package/dist/stages/generate/index.cjs.js.map +1 -0
- package/dist/stages/generate/mkdocsPatchers.cjs.js +96 -0
- package/dist/stages/generate/mkdocsPatchers.cjs.js.map +1 -0
- package/dist/stages/generate/techdocs.cjs.js +169 -0
- package/dist/stages/generate/techdocs.cjs.js.map +1 -0
- package/dist/stages/prepare/dir.cjs.js +63 -0
- package/dist/stages/prepare/dir.cjs.js.map +1 -0
- package/dist/stages/prepare/preparers.cjs.js +54 -0
- package/dist/stages/prepare/preparers.cjs.js.map +1 -0
- package/dist/stages/prepare/url.cjs.js +46 -0
- package/dist/stages/prepare/url.cjs.js.map +1 -0
- package/dist/stages/publish/awsS3.cjs.js +436 -0
- package/dist/stages/publish/awsS3.cjs.js.map +1 -0
- package/dist/stages/publish/azureBlobStorage.cjs.js +337 -0
- package/dist/stages/publish/azureBlobStorage.cjs.js.map +1 -0
- package/dist/stages/publish/googleStorage.cjs.js +288 -0
- package/dist/stages/publish/googleStorage.cjs.js.map +1 -0
- package/dist/stages/publish/helpers.cjs.js +138 -0
- package/dist/stages/publish/helpers.cjs.js.map +1 -0
- package/dist/stages/publish/local.cjs.js +248 -0
- package/dist/stages/publish/local.cjs.js.map +1 -0
- package/dist/stages/publish/migrations/GoogleMigration.cjs.js +52 -0
- package/dist/stages/publish/migrations/GoogleMigration.cjs.js.map +1 -0
- package/dist/stages/publish/openStackSwift.cjs.js +286 -0
- package/dist/stages/publish/openStackSwift.cjs.js.map +1 -0
- package/dist/stages/publish/publish.cjs.js +100 -0
- package/dist/stages/publish/publish.cjs.js.map +1 -0
- package/package.json +7 -7
package/dist/index.cjs.js
CHANGED
|
@@ -1,2605 +1,32 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
4
|
-
var
|
|
5
|
-
var
|
|
6
|
-
var
|
|
7
|
-
var
|
|
8
|
-
var
|
|
9
|
-
var
|
|
10
|
-
var
|
|
11
|
-
var
|
|
12
|
-
var
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
var types = require('@trendyol-js/openstack-swift-sdk/lib/types');
|
|
33
|
-
|
|
34
|
-
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
35
|
-
|
|
36
|
-
var path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
37
|
-
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
38
|
-
var gitUrlParse__default = /*#__PURE__*/_interopDefaultCompat(gitUrlParse);
|
|
39
|
-
var yaml__default = /*#__PURE__*/_interopDefaultCompat(yaml);
|
|
40
|
-
var mime__default = /*#__PURE__*/_interopDefaultCompat(mime);
|
|
41
|
-
var createLimiter__default = /*#__PURE__*/_interopDefaultCompat(createLimiter);
|
|
42
|
-
var recursiveReadDir__default = /*#__PURE__*/_interopDefaultCompat(recursiveReadDir);
|
|
43
|
-
var Docker__default = /*#__PURE__*/_interopDefaultCompat(Docker);
|
|
44
|
-
var JSON5__default = /*#__PURE__*/_interopDefaultCompat(JSON5);
|
|
45
|
-
var express__default = /*#__PURE__*/_interopDefaultCompat(express);
|
|
46
|
-
var os__default = /*#__PURE__*/_interopDefaultCompat(os);
|
|
47
|
-
|
|
48
|
-
const getContentTypeForExtension = (ext) => {
|
|
49
|
-
const defaultContentType = "text/plain; charset=utf-8";
|
|
50
|
-
const excludedTypes = [
|
|
51
|
-
"text/html",
|
|
52
|
-
"text/xml",
|
|
53
|
-
"image/svg+xml",
|
|
54
|
-
"text/xsl",
|
|
55
|
-
"application/vnd.wap.xhtml+xml",
|
|
56
|
-
"multipart/x-mixed-replace",
|
|
57
|
-
"text/rdf",
|
|
58
|
-
"application/mathml+xml",
|
|
59
|
-
"application/octet-stream",
|
|
60
|
-
"application/rdf+xml",
|
|
61
|
-
"application/xhtml+xml",
|
|
62
|
-
"application/xml",
|
|
63
|
-
"text/cache-manifest",
|
|
64
|
-
"text/vtt"
|
|
65
|
-
];
|
|
66
|
-
if (ext.match(
|
|
67
|
-
/htm|xml|svg|appcache|manifest|mathml|owl|rdf|rng|vtt|xht|xsd|xsl/i
|
|
68
|
-
)) {
|
|
69
|
-
return defaultContentType;
|
|
70
|
-
}
|
|
71
|
-
const contentType = mime__default.default.lookup(ext);
|
|
72
|
-
if (contentType && excludedTypes.includes(contentType)) {
|
|
73
|
-
return defaultContentType;
|
|
74
|
-
}
|
|
75
|
-
return mime__default.default.contentType(ext) || defaultContentType;
|
|
76
|
-
};
|
|
77
|
-
const getHeadersForFileExtension = (fileExtension) => {
|
|
78
|
-
return {
|
|
79
|
-
"Content-Type": getContentTypeForExtension(fileExtension)
|
|
80
|
-
};
|
|
81
|
-
};
|
|
82
|
-
const getFileTreeRecursively = async (rootDirPath) => {
|
|
83
|
-
const fileList = await recursiveReadDir__default.default(rootDirPath).catch((error) => {
|
|
84
|
-
throw new Error(`Failed to read template directory: ${error.message}`);
|
|
85
|
-
});
|
|
86
|
-
return fileList;
|
|
87
|
-
};
|
|
88
|
-
const lowerCaseEntityTriplet = (posixPath) => {
|
|
89
|
-
const [namespace, kind, name, ...rest] = posixPath.split(path__default.default.posix.sep);
|
|
90
|
-
const lowerNamespace = namespace.toLowerCase();
|
|
91
|
-
const lowerKind = kind.toLowerCase();
|
|
92
|
-
const lowerName = name.toLowerCase();
|
|
93
|
-
return [lowerNamespace, lowerKind, lowerName, ...rest].join(path__default.default.posix.sep);
|
|
94
|
-
};
|
|
95
|
-
const lowerCaseEntityTripletInStoragePath = (originalPath) => {
|
|
96
|
-
let posixPath = originalPath;
|
|
97
|
-
if (originalPath.includes(path__default.default.win32.sep)) {
|
|
98
|
-
posixPath = originalPath.split(path__default.default.win32.sep).join(path__default.default.posix.sep);
|
|
99
|
-
}
|
|
100
|
-
const parts = posixPath.split(path__default.default.posix.sep);
|
|
101
|
-
if (parts[0] === "") {
|
|
102
|
-
parts.shift();
|
|
103
|
-
}
|
|
104
|
-
if (parts.length <= 3) {
|
|
105
|
-
throw new Error(
|
|
106
|
-
`Encountered file unmanaged by TechDocs ${originalPath}. Skipping.`
|
|
107
|
-
);
|
|
108
|
-
}
|
|
109
|
-
return lowerCaseEntityTriplet(parts.join(path__default.default.posix.sep));
|
|
110
|
-
};
|
|
111
|
-
const normalizeExternalStorageRootPath = (posixPath) => {
|
|
112
|
-
let normalizedPath = posixPath;
|
|
113
|
-
if (posixPath.startsWith(path__default.default.posix.sep)) {
|
|
114
|
-
normalizedPath = posixPath.slice(1);
|
|
115
|
-
}
|
|
116
|
-
if (normalizedPath.endsWith(path__default.default.posix.sep)) {
|
|
117
|
-
normalizedPath = normalizedPath.slice(0, normalizedPath.length - 1);
|
|
118
|
-
}
|
|
119
|
-
return normalizedPath;
|
|
120
|
-
};
|
|
121
|
-
const getStaleFiles = (newFiles, oldFiles) => {
|
|
122
|
-
const staleFiles = new Set(oldFiles);
|
|
123
|
-
const removedParentDirs = /* @__PURE__ */ new Set();
|
|
124
|
-
newFiles.forEach((newFile) => {
|
|
125
|
-
staleFiles.delete(newFile);
|
|
126
|
-
let parentDir = newFile.substring(0, newFile.lastIndexOf("/"));
|
|
127
|
-
while (!removedParentDirs.has(parentDir) && parentDir.length >= newFile.indexOf("/")) {
|
|
128
|
-
staleFiles.delete(parentDir);
|
|
129
|
-
removedParentDirs.add(parentDir);
|
|
130
|
-
parentDir = parentDir.substring(0, parentDir.lastIndexOf("/"));
|
|
131
|
-
}
|
|
132
|
-
});
|
|
133
|
-
return Array.from(staleFiles);
|
|
134
|
-
};
|
|
135
|
-
const getCloudPathForLocalPath = (entity, localPath = "", useLegacyPathCasing = false, externalStorageRootPath = "") => {
|
|
136
|
-
const relativeFilePathPosix = localPath.split(path__default.default.sep).join(path__default.default.posix.sep);
|
|
137
|
-
const entityRootDir = `${entity.metadata?.namespace ?? catalogModel.DEFAULT_NAMESPACE}/${entity.kind}/${entity.metadata.name}`;
|
|
138
|
-
const relativeFilePathTriplet = `${entityRootDir}/${relativeFilePathPosix}`;
|
|
139
|
-
const destination = useLegacyPathCasing ? relativeFilePathTriplet : lowerCaseEntityTriplet(relativeFilePathTriplet);
|
|
140
|
-
const destinationWithRoot = [
|
|
141
|
-
// The extra filter prevents unintended double slashes and prefixes.
|
|
142
|
-
...externalStorageRootPath.split(path__default.default.posix.sep).filter((s) => s !== ""),
|
|
143
|
-
destination
|
|
144
|
-
].join("/");
|
|
145
|
-
return destinationWithRoot;
|
|
146
|
-
};
|
|
147
|
-
const bulkStorageOperation = async (operation, args, { concurrencyLimit } = { concurrencyLimit: 25 }) => {
|
|
148
|
-
const limiter = createLimiter__default.default(concurrencyLimit);
|
|
149
|
-
await Promise.all(args.map((arg) => limiter(operation, arg)));
|
|
150
|
-
};
|
|
151
|
-
const isValidContentPath = (bucketRoot, contentPath) => {
|
|
152
|
-
const relativePath = path__default.default.posix.relative(bucketRoot, contentPath);
|
|
153
|
-
if (relativePath === "") {
|
|
154
|
-
return true;
|
|
155
|
-
}
|
|
156
|
-
const outsideBase = relativePath.startsWith("..");
|
|
157
|
-
const differentDrive = path__default.default.posix.isAbsolute(relativePath);
|
|
158
|
-
return !outsideBase && !differentDrive;
|
|
159
|
-
};
|
|
160
|
-
|
|
161
|
-
function getGeneratorKey(entity) {
|
|
162
|
-
if (!entity) {
|
|
163
|
-
throw new Error("No entity provided");
|
|
164
|
-
}
|
|
165
|
-
return "techdocs";
|
|
166
|
-
}
|
|
167
|
-
const runCommand = async ({
|
|
168
|
-
command,
|
|
169
|
-
args,
|
|
170
|
-
options,
|
|
171
|
-
logStream = new stream.PassThrough()
|
|
172
|
-
}) => {
|
|
173
|
-
await new Promise((resolve, reject) => {
|
|
174
|
-
const process = child_process.spawn(command, args, options);
|
|
175
|
-
process.stdout.on("data", (stream) => {
|
|
176
|
-
logStream.write(stream);
|
|
177
|
-
});
|
|
178
|
-
process.stderr.on("data", (stream) => {
|
|
179
|
-
logStream.write(stream);
|
|
180
|
-
});
|
|
181
|
-
process.on("error", (error) => {
|
|
182
|
-
return reject(error);
|
|
183
|
-
});
|
|
184
|
-
process.on("close", (code) => {
|
|
185
|
-
if (code !== 0) {
|
|
186
|
-
return reject(`Command ${command} failed, exit code: ${code}`);
|
|
187
|
-
}
|
|
188
|
-
return resolve();
|
|
189
|
-
});
|
|
190
|
-
});
|
|
191
|
-
};
|
|
192
|
-
const getRepoUrlFromLocationAnnotation = (parsedLocationAnnotation, scmIntegrations, docsFolder = "docs") => {
|
|
193
|
-
const { type: locationType, target } = parsedLocationAnnotation;
|
|
194
|
-
if (locationType === "url") {
|
|
195
|
-
const integration = scmIntegrations.byUrl(target);
|
|
196
|
-
if (integration && ["github", "gitlab", "bitbucketServer", "harness"].includes(
|
|
197
|
-
integration.type
|
|
198
|
-
)) {
|
|
199
|
-
const { filepathtype } = gitUrlParse__default.default(target);
|
|
200
|
-
if (filepathtype === "") {
|
|
201
|
-
return { repo_url: target };
|
|
202
|
-
}
|
|
203
|
-
const sourceFolder = integration.resolveUrl({
|
|
204
|
-
url: `./${docsFolder}`,
|
|
205
|
-
base: target.endsWith("/") ? target : `${target}/`
|
|
206
|
-
});
|
|
207
|
-
return {
|
|
208
|
-
repo_url: target,
|
|
209
|
-
edit_uri: integration.resolveEditUrl(sourceFolder)
|
|
210
|
-
};
|
|
211
|
-
}
|
|
212
|
-
}
|
|
213
|
-
return {};
|
|
214
|
-
};
|
|
215
|
-
class UnknownTag {
|
|
216
|
-
constructor(data, type) {
|
|
217
|
-
this.data = data;
|
|
218
|
-
this.type = type;
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
const MKDOCS_SCHEMA = yaml.DEFAULT_SCHEMA.extend([
|
|
222
|
-
new yaml.Type("", {
|
|
223
|
-
kind: "scalar",
|
|
224
|
-
multi: true,
|
|
225
|
-
representName: (o) => o.type,
|
|
226
|
-
represent: (o) => o.data ?? "",
|
|
227
|
-
instanceOf: UnknownTag,
|
|
228
|
-
construct: (data, type) => new UnknownTag(data, type)
|
|
229
|
-
}),
|
|
230
|
-
new yaml.Type("tag:", {
|
|
231
|
-
kind: "mapping",
|
|
232
|
-
multi: true,
|
|
233
|
-
representName: (o) => o.type,
|
|
234
|
-
represent: (o) => o.data ?? "",
|
|
235
|
-
instanceOf: UnknownTag,
|
|
236
|
-
construct: (data, type) => new UnknownTag(data, type)
|
|
237
|
-
}),
|
|
238
|
-
new yaml.Type("", {
|
|
239
|
-
kind: "sequence",
|
|
240
|
-
multi: true,
|
|
241
|
-
representName: (o) => o.type,
|
|
242
|
-
represent: (o) => o.data ?? "",
|
|
243
|
-
instanceOf: UnknownTag,
|
|
244
|
-
construct: (data, type) => new UnknownTag(data, type)
|
|
245
|
-
})
|
|
246
|
-
]);
|
|
247
|
-
const generateMkdocsYml = async (inputDir, siteOptions) => {
|
|
248
|
-
try {
|
|
249
|
-
const mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yml");
|
|
250
|
-
const defaultSiteName = siteOptions?.name ?? "Documentation Site";
|
|
251
|
-
const defaultMkdocsContent = {
|
|
252
|
-
site_name: defaultSiteName,
|
|
253
|
-
docs_dir: "docs",
|
|
254
|
-
plugins: ["techdocs-core"]
|
|
255
|
-
};
|
|
256
|
-
await fs__default.default.writeFile(
|
|
257
|
-
mkdocsYmlPath,
|
|
258
|
-
yaml__default.default.dump(defaultMkdocsContent, { schema: MKDOCS_SCHEMA })
|
|
259
|
-
);
|
|
260
|
-
} catch (error) {
|
|
261
|
-
throw new errors.ForwardedError("Could not generate mkdocs.yml file", error);
|
|
262
|
-
}
|
|
263
|
-
};
|
|
264
|
-
const getMkdocsYml = async (inputDir, options) => {
|
|
265
|
-
let mkdocsYmlPath;
|
|
266
|
-
let mkdocsYmlFileString;
|
|
267
|
-
try {
|
|
268
|
-
if (options?.mkdocsConfigFileName) {
|
|
269
|
-
mkdocsYmlPath = path__default.default.join(inputDir, options.mkdocsConfigFileName);
|
|
270
|
-
if (!await fs__default.default.pathExists(mkdocsYmlPath)) {
|
|
271
|
-
throw new Error(`The specified file ${mkdocsYmlPath} does not exist`);
|
|
272
|
-
}
|
|
273
|
-
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
274
|
-
return {
|
|
275
|
-
path: mkdocsYmlPath,
|
|
276
|
-
content: mkdocsYmlFileString,
|
|
277
|
-
configIsTemporary: false
|
|
278
|
-
};
|
|
279
|
-
}
|
|
280
|
-
mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yaml");
|
|
281
|
-
if (await fs__default.default.pathExists(mkdocsYmlPath)) {
|
|
282
|
-
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
283
|
-
return {
|
|
284
|
-
path: mkdocsYmlPath,
|
|
285
|
-
content: mkdocsYmlFileString,
|
|
286
|
-
configIsTemporary: false
|
|
287
|
-
};
|
|
288
|
-
}
|
|
289
|
-
mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yml");
|
|
290
|
-
if (await fs__default.default.pathExists(mkdocsYmlPath)) {
|
|
291
|
-
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
292
|
-
return {
|
|
293
|
-
path: mkdocsYmlPath,
|
|
294
|
-
content: mkdocsYmlFileString,
|
|
295
|
-
configIsTemporary: false
|
|
296
|
-
};
|
|
297
|
-
}
|
|
298
|
-
await generateMkdocsYml(inputDir, options);
|
|
299
|
-
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
300
|
-
} catch (error) {
|
|
301
|
-
throw new errors.ForwardedError(
|
|
302
|
-
"Could not read MkDocs YAML config file mkdocs.yml or mkdocs.yaml or default for validation",
|
|
303
|
-
error
|
|
304
|
-
);
|
|
305
|
-
}
|
|
306
|
-
return {
|
|
307
|
-
path: mkdocsYmlPath,
|
|
308
|
-
content: mkdocsYmlFileString,
|
|
309
|
-
configIsTemporary: true
|
|
310
|
-
};
|
|
311
|
-
};
|
|
312
|
-
const validateMkdocsYaml = async (inputDir, mkdocsYmlFileString) => {
|
|
313
|
-
const mkdocsYml = yaml__default.default.load(mkdocsYmlFileString, {
|
|
314
|
-
schema: MKDOCS_SCHEMA
|
|
315
|
-
});
|
|
316
|
-
if (mkdocsYml === null || typeof mkdocsYml !== "object") {
|
|
317
|
-
return void 0;
|
|
318
|
-
}
|
|
319
|
-
const parsedMkdocsYml = mkdocsYml;
|
|
320
|
-
if (parsedMkdocsYml.docs_dir && !backendPluginApi.isChildPath(inputDir, path.resolve(inputDir, parsedMkdocsYml.docs_dir))) {
|
|
321
|
-
throw new Error(
|
|
322
|
-
`docs_dir configuration value in mkdocs can't be an absolute directory or start with ../ for security reasons.
|
|
323
|
-
Use relative paths instead which are resolved relative to your mkdocs.yml file location.`
|
|
324
|
-
);
|
|
325
|
-
}
|
|
326
|
-
return parsedMkdocsYml.docs_dir;
|
|
327
|
-
};
|
|
328
|
-
const patchIndexPreBuild = async ({
|
|
329
|
-
inputDir,
|
|
330
|
-
logger,
|
|
331
|
-
docsDir = "docs"
|
|
332
|
-
}) => {
|
|
333
|
-
const docsPath = path__default.default.join(inputDir, docsDir);
|
|
334
|
-
const indexMdPath = path__default.default.join(docsPath, "index.md");
|
|
335
|
-
if (await fs__default.default.pathExists(indexMdPath)) {
|
|
336
|
-
return;
|
|
337
|
-
}
|
|
338
|
-
logger.warn(`${path__default.default.join(docsDir, "index.md")} not found.`);
|
|
339
|
-
const fallbacks = [
|
|
340
|
-
path__default.default.join(docsPath, "README.md"),
|
|
341
|
-
path__default.default.join(docsPath, "readme.md"),
|
|
342
|
-
path__default.default.join(inputDir, "README.md"),
|
|
343
|
-
path__default.default.join(inputDir, "readme.md")
|
|
344
|
-
];
|
|
345
|
-
await fs__default.default.ensureDir(docsPath);
|
|
346
|
-
for (const filePath of fallbacks) {
|
|
347
|
-
try {
|
|
348
|
-
await fs__default.default.copyFile(filePath, indexMdPath);
|
|
349
|
-
return;
|
|
350
|
-
} catch (error) {
|
|
351
|
-
logger.warn(`${path__default.default.relative(inputDir, filePath)} not found.`);
|
|
352
|
-
}
|
|
353
|
-
}
|
|
354
|
-
logger.warn(
|
|
355
|
-
`Could not find any techdocs' index file. Please make sure at least one of ${[
|
|
356
|
-
indexMdPath,
|
|
357
|
-
...fallbacks
|
|
358
|
-
].join(" ")} exists.`
|
|
359
|
-
);
|
|
360
|
-
};
|
|
361
|
-
const createOrUpdateMetadata = async (techdocsMetadataPath, logger) => {
|
|
362
|
-
const techdocsMetadataDir = techdocsMetadataPath.split(path__default.default.sep).slice(0, -1).join(path__default.default.sep);
|
|
363
|
-
try {
|
|
364
|
-
await fs__default.default.access(techdocsMetadataPath, fs__default.default.constants.F_OK);
|
|
365
|
-
} catch (err) {
|
|
366
|
-
await fs__default.default.writeJson(techdocsMetadataPath, JSON.parse("{}"));
|
|
367
|
-
}
|
|
368
|
-
let json;
|
|
369
|
-
try {
|
|
370
|
-
json = await fs__default.default.readJson(techdocsMetadataPath);
|
|
371
|
-
} catch (err) {
|
|
372
|
-
errors.assertError(err);
|
|
373
|
-
const message = `Invalid JSON at ${techdocsMetadataPath} with error ${err.message}`;
|
|
374
|
-
logger.error(message);
|
|
375
|
-
throw new Error(message);
|
|
376
|
-
}
|
|
377
|
-
json.build_timestamp = Date.now();
|
|
378
|
-
try {
|
|
379
|
-
json.files = (await getFileTreeRecursively(techdocsMetadataDir)).map(
|
|
380
|
-
(file) => file.replace(`${techdocsMetadataDir}${path__default.default.sep}`, "")
|
|
381
|
-
);
|
|
382
|
-
} catch (err) {
|
|
383
|
-
errors.assertError(err);
|
|
384
|
-
json.files = [];
|
|
385
|
-
logger.warn(`Unable to add files list to metadata: ${err.message}`);
|
|
386
|
-
}
|
|
387
|
-
await fs__default.default.writeJson(techdocsMetadataPath, json);
|
|
388
|
-
return;
|
|
389
|
-
};
|
|
390
|
-
const storeEtagMetadata = async (techdocsMetadataPath, etag) => {
|
|
391
|
-
const json = await fs__default.default.readJson(techdocsMetadataPath);
|
|
392
|
-
json.etag = etag;
|
|
393
|
-
await fs__default.default.writeJson(techdocsMetadataPath, json);
|
|
394
|
-
};
|
|
395
|
-
|
|
396
|
-
const patchMkdocsFile = async (mkdocsYmlPath, logger, updateAction) => {
|
|
397
|
-
let didEdit = false;
|
|
398
|
-
let mkdocsYmlFileString;
|
|
399
|
-
try {
|
|
400
|
-
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
401
|
-
} catch (error) {
|
|
402
|
-
errors.assertError(error);
|
|
403
|
-
logger.warn(
|
|
404
|
-
`Could not read MkDocs YAML config file ${mkdocsYmlPath} before running the generator: ${error.message}`
|
|
405
|
-
);
|
|
406
|
-
return;
|
|
407
|
-
}
|
|
408
|
-
let mkdocsYml;
|
|
409
|
-
try {
|
|
410
|
-
mkdocsYml = yaml__default.default.load(mkdocsYmlFileString, { schema: MKDOCS_SCHEMA });
|
|
411
|
-
if (typeof mkdocsYml === "string" || typeof mkdocsYml === "undefined") {
|
|
412
|
-
throw new Error("Bad YAML format.");
|
|
413
|
-
}
|
|
414
|
-
} catch (error) {
|
|
415
|
-
errors.assertError(error);
|
|
416
|
-
logger.warn(
|
|
417
|
-
`Error in parsing YAML at ${mkdocsYmlPath} before running the generator. ${error.message}`
|
|
418
|
-
);
|
|
419
|
-
return;
|
|
420
|
-
}
|
|
421
|
-
didEdit = updateAction(mkdocsYml);
|
|
422
|
-
try {
|
|
423
|
-
if (didEdit) {
|
|
424
|
-
await fs__default.default.writeFile(
|
|
425
|
-
mkdocsYmlPath,
|
|
426
|
-
yaml__default.default.dump(mkdocsYml, { schema: MKDOCS_SCHEMA }),
|
|
427
|
-
"utf8"
|
|
428
|
-
);
|
|
429
|
-
}
|
|
430
|
-
} catch (error) {
|
|
431
|
-
errors.assertError(error);
|
|
432
|
-
logger.warn(
|
|
433
|
-
`Could not write to ${mkdocsYmlPath} after updating it before running the generator. ${error.message}`
|
|
434
|
-
);
|
|
435
|
-
return;
|
|
436
|
-
}
|
|
437
|
-
};
|
|
438
|
-
const patchMkdocsYmlPreBuild = async (mkdocsYmlPath, logger, parsedLocationAnnotation, scmIntegrations) => {
|
|
439
|
-
await patchMkdocsFile(mkdocsYmlPath, logger, (mkdocsYml) => {
|
|
440
|
-
if (!("repo_url" in mkdocsYml) || !("edit_uri" in mkdocsYml)) {
|
|
441
|
-
const result = getRepoUrlFromLocationAnnotation(
|
|
442
|
-
parsedLocationAnnotation,
|
|
443
|
-
scmIntegrations,
|
|
444
|
-
mkdocsYml.docs_dir
|
|
445
|
-
);
|
|
446
|
-
if (result.repo_url || result.edit_uri) {
|
|
447
|
-
mkdocsYml.repo_url = mkdocsYml.repo_url || result.repo_url;
|
|
448
|
-
mkdocsYml.edit_uri = mkdocsYml.edit_uri || result.edit_uri;
|
|
449
|
-
logger.info(
|
|
450
|
-
`Set ${JSON.stringify(
|
|
451
|
-
result
|
|
452
|
-
)}. You can disable this feature by manually setting 'repo_url' or 'edit_uri' according to the MkDocs documentation at https://www.mkdocs.org/user-guide/configuration/#repo_url`
|
|
453
|
-
);
|
|
454
|
-
return true;
|
|
455
|
-
}
|
|
456
|
-
}
|
|
457
|
-
return false;
|
|
458
|
-
});
|
|
459
|
-
};
|
|
460
|
-
const patchMkdocsYmlWithPlugins = async (mkdocsYmlPath, logger, defaultPlugins = ["techdocs-core"]) => {
|
|
461
|
-
await patchMkdocsFile(mkdocsYmlPath, logger, (mkdocsYml) => {
|
|
462
|
-
if (!("plugins" in mkdocsYml)) {
|
|
463
|
-
mkdocsYml.plugins = defaultPlugins;
|
|
464
|
-
return true;
|
|
465
|
-
}
|
|
466
|
-
let changesMade = false;
|
|
467
|
-
defaultPlugins.forEach((dp) => {
|
|
468
|
-
if (!(mkdocsYml.plugins.includes(dp) || mkdocsYml.plugins.some((p) => p.hasOwnProperty(dp)))) {
|
|
469
|
-
mkdocsYml.plugins = [.../* @__PURE__ */ new Set([...mkdocsYml.plugins, dp])];
|
|
470
|
-
changesMade = true;
|
|
471
|
-
}
|
|
472
|
-
});
|
|
473
|
-
return changesMade;
|
|
474
|
-
});
|
|
475
|
-
};
|
|
476
|
-
|
|
477
|
-
const pipeline = util.promisify(stream.pipeline);
|
|
478
|
-
class DockerContainerRunner {
|
|
479
|
-
dockerClient;
|
|
480
|
-
constructor() {
|
|
481
|
-
this.dockerClient = new Docker__default.default();
|
|
482
|
-
}
|
|
483
|
-
async runContainer(options) {
|
|
484
|
-
const {
|
|
485
|
-
imageName,
|
|
486
|
-
command,
|
|
487
|
-
args,
|
|
488
|
-
logStream = new stream.PassThrough(),
|
|
489
|
-
mountDirs = {},
|
|
490
|
-
workingDir,
|
|
491
|
-
envVars = {},
|
|
492
|
-
pullImage = true,
|
|
493
|
-
defaultUser = false
|
|
494
|
-
} = options;
|
|
495
|
-
try {
|
|
496
|
-
await this.dockerClient.ping();
|
|
497
|
-
} catch (e) {
|
|
498
|
-
throw new errors.ForwardedError(
|
|
499
|
-
"This operation requires Docker. Docker does not appear to be available. Docker.ping() failed with",
|
|
500
|
-
e
|
|
501
|
-
);
|
|
502
|
-
}
|
|
503
|
-
if (pullImage) {
|
|
504
|
-
await new Promise((resolve, reject) => {
|
|
505
|
-
this.dockerClient.pull(imageName, {}, (err, stream) => {
|
|
506
|
-
if (err) {
|
|
507
|
-
reject(err);
|
|
508
|
-
} else if (!stream) {
|
|
509
|
-
reject(
|
|
510
|
-
new Error(
|
|
511
|
-
"Unexpeected error: no stream returned from Docker while pulling image"
|
|
512
|
-
)
|
|
513
|
-
);
|
|
514
|
-
} else {
|
|
515
|
-
pipeline(stream, logStream, { end: false }).then(resolve).catch(reject);
|
|
516
|
-
}
|
|
517
|
-
});
|
|
518
|
-
});
|
|
519
|
-
}
|
|
520
|
-
const userOptions = {};
|
|
521
|
-
if (!defaultUser && process.getuid && process.getgid) {
|
|
522
|
-
userOptions.User = `${process.getuid()}:${process.getgid()}`;
|
|
523
|
-
}
|
|
524
|
-
const Volumes = {};
|
|
525
|
-
for (const containerDir of Object.values(mountDirs)) {
|
|
526
|
-
Volumes[containerDir] = {};
|
|
527
|
-
}
|
|
528
|
-
const Binds = [];
|
|
529
|
-
for (const [hostDir, containerDir] of Object.entries(mountDirs)) {
|
|
530
|
-
const realHostDir = await fs__default.default.realpath(hostDir);
|
|
531
|
-
Binds.push(`${realHostDir}:${containerDir}`);
|
|
532
|
-
}
|
|
533
|
-
const Env = new Array();
|
|
534
|
-
for (const [key, value] of Object.entries(envVars)) {
|
|
535
|
-
Env.push(`${key}=${value}`);
|
|
536
|
-
}
|
|
537
|
-
const [{ Error: error, StatusCode: statusCode }] = await this.dockerClient.run(imageName, args, logStream, {
|
|
538
|
-
Volumes,
|
|
539
|
-
HostConfig: {
|
|
540
|
-
AutoRemove: true,
|
|
541
|
-
Binds
|
|
542
|
-
},
|
|
543
|
-
...workingDir ? { WorkingDir: workingDir } : {},
|
|
544
|
-
Entrypoint: command,
|
|
545
|
-
Env,
|
|
546
|
-
...userOptions
|
|
547
|
-
});
|
|
548
|
-
if (error) {
|
|
549
|
-
throw new Error(
|
|
550
|
-
`Docker failed to run with the following error message: ${error}`
|
|
551
|
-
);
|
|
552
|
-
}
|
|
553
|
-
if (statusCode !== 0) {
|
|
554
|
-
throw new Error(
|
|
555
|
-
`Docker container returned a non-zero exit code (${statusCode})`
|
|
556
|
-
);
|
|
557
|
-
}
|
|
558
|
-
}
|
|
559
|
-
}
|
|
560
|
-
|
|
561
|
-
class TechdocsGenerator {
|
|
562
|
-
/**
|
|
563
|
-
* The default docker image (and version) used to generate content. Public
|
|
564
|
-
* and static so that techdocs-node consumers can use the same version.
|
|
565
|
-
*/
|
|
566
|
-
static defaultDockerImage = "spotify/techdocs:v1.2.4";
|
|
567
|
-
logger;
|
|
568
|
-
containerRunner;
|
|
569
|
-
options;
|
|
570
|
-
scmIntegrations;
|
|
571
|
-
/**
|
|
572
|
-
* Returns a instance of TechDocs generator
|
|
573
|
-
* @param config - A Backstage configuration
|
|
574
|
-
* @param options - Options to configure the generator
|
|
575
|
-
*/
|
|
576
|
-
static fromConfig(config, options) {
|
|
577
|
-
const { containerRunner, logger } = options;
|
|
578
|
-
const scmIntegrations = integration.ScmIntegrations.fromConfig(config);
|
|
579
|
-
return new TechdocsGenerator({
|
|
580
|
-
logger,
|
|
581
|
-
containerRunner,
|
|
582
|
-
config,
|
|
583
|
-
scmIntegrations
|
|
584
|
-
});
|
|
585
|
-
}
|
|
586
|
-
constructor(options) {
|
|
587
|
-
this.logger = options.logger;
|
|
588
|
-
this.options = readGeneratorConfig(options.config, options.logger);
|
|
589
|
-
this.containerRunner = options.containerRunner;
|
|
590
|
-
this.scmIntegrations = options.scmIntegrations;
|
|
591
|
-
}
|
|
592
|
-
/** {@inheritDoc GeneratorBase.run} */
|
|
593
|
-
async run(options) {
|
|
594
|
-
const {
|
|
595
|
-
inputDir,
|
|
596
|
-
outputDir,
|
|
597
|
-
parsedLocationAnnotation,
|
|
598
|
-
etag,
|
|
599
|
-
logger: childLogger,
|
|
600
|
-
logStream,
|
|
601
|
-
siteOptions,
|
|
602
|
-
runAsDefaultUser
|
|
603
|
-
} = options;
|
|
604
|
-
const { path: mkdocsYmlPath, content } = await getMkdocsYml(
|
|
605
|
-
inputDir,
|
|
606
|
-
siteOptions
|
|
607
|
-
);
|
|
608
|
-
const docsDir = await validateMkdocsYaml(inputDir, content);
|
|
609
|
-
if (parsedLocationAnnotation) {
|
|
610
|
-
await patchMkdocsYmlPreBuild(
|
|
611
|
-
mkdocsYmlPath,
|
|
612
|
-
childLogger,
|
|
613
|
-
parsedLocationAnnotation,
|
|
614
|
-
this.scmIntegrations
|
|
615
|
-
);
|
|
616
|
-
}
|
|
617
|
-
if (this.options.legacyCopyReadmeMdToIndexMd) {
|
|
618
|
-
await patchIndexPreBuild({ inputDir, logger: childLogger, docsDir });
|
|
619
|
-
}
|
|
620
|
-
const defaultPlugins = this.options.defaultPlugins ?? [];
|
|
621
|
-
if (!this.options.omitTechdocsCoreMkdocsPlugin && !defaultPlugins.includes("techdocs-core")) {
|
|
622
|
-
defaultPlugins.push("techdocs-core");
|
|
623
|
-
}
|
|
624
|
-
await patchMkdocsYmlWithPlugins(mkdocsYmlPath, childLogger, defaultPlugins);
|
|
625
|
-
const mountDirs = {
|
|
626
|
-
[inputDir]: "/input",
|
|
627
|
-
[outputDir]: "/output"
|
|
628
|
-
};
|
|
629
|
-
try {
|
|
630
|
-
switch (this.options.runIn) {
|
|
631
|
-
case "local":
|
|
632
|
-
await runCommand({
|
|
633
|
-
command: "mkdocs",
|
|
634
|
-
args: ["build", "-d", outputDir, "-v"],
|
|
635
|
-
options: {
|
|
636
|
-
cwd: inputDir
|
|
637
|
-
},
|
|
638
|
-
logStream
|
|
639
|
-
});
|
|
640
|
-
childLogger.info(
|
|
641
|
-
`Successfully generated docs from ${inputDir} into ${outputDir} using local mkdocs`
|
|
642
|
-
);
|
|
643
|
-
break;
|
|
644
|
-
case "docker": {
|
|
645
|
-
const containerRunner = this.containerRunner || new DockerContainerRunner();
|
|
646
|
-
await containerRunner.runContainer({
|
|
647
|
-
imageName: this.options.dockerImage ?? TechdocsGenerator.defaultDockerImage,
|
|
648
|
-
args: ["build", "-d", "/output"],
|
|
649
|
-
logStream,
|
|
650
|
-
mountDirs,
|
|
651
|
-
workingDir: "/input",
|
|
652
|
-
// Set the home directory inside the container as something that applications can
|
|
653
|
-
// write to, otherwise they will just fail trying to write to /
|
|
654
|
-
envVars: { HOME: "/tmp" },
|
|
655
|
-
pullImage: this.options.pullImage,
|
|
656
|
-
defaultUser: runAsDefaultUser
|
|
657
|
-
});
|
|
658
|
-
childLogger.info(
|
|
659
|
-
`Successfully generated docs from ${inputDir} into ${outputDir} using techdocs-container`
|
|
660
|
-
);
|
|
661
|
-
break;
|
|
662
|
-
}
|
|
663
|
-
default:
|
|
664
|
-
throw new Error(
|
|
665
|
-
`Invalid config value "${this.options.runIn}" provided in 'techdocs.generators.techdocs'.`
|
|
666
|
-
);
|
|
667
|
-
}
|
|
668
|
-
} catch (error) {
|
|
669
|
-
this.logger.debug(
|
|
670
|
-
`Failed to generate docs from ${inputDir} into ${outputDir}`
|
|
671
|
-
);
|
|
672
|
-
throw new errors.ForwardedError(
|
|
673
|
-
`Failed to generate docs from ${inputDir} into ${outputDir}`,
|
|
674
|
-
error
|
|
675
|
-
);
|
|
676
|
-
}
|
|
677
|
-
await createOrUpdateMetadata(
|
|
678
|
-
path__default.default.join(outputDir, "techdocs_metadata.json"),
|
|
679
|
-
childLogger
|
|
680
|
-
);
|
|
681
|
-
if (etag) {
|
|
682
|
-
await storeEtagMetadata(
|
|
683
|
-
path__default.default.join(outputDir, "techdocs_metadata.json"),
|
|
684
|
-
etag
|
|
685
|
-
);
|
|
686
|
-
}
|
|
687
|
-
}
|
|
688
|
-
}
|
|
689
|
-
function readGeneratorConfig(config, logger) {
|
|
690
|
-
const legacyGeneratorType = config.getOptionalString(
|
|
691
|
-
"techdocs.generators.techdocs"
|
|
692
|
-
);
|
|
693
|
-
if (legacyGeneratorType) {
|
|
694
|
-
logger.warn(
|
|
695
|
-
`The 'techdocs.generators.techdocs' configuration key is deprecated and will be removed in the future. Please use 'techdocs.generator' instead. See here https://backstage.io/docs/features/techdocs/configuration`
|
|
696
|
-
);
|
|
697
|
-
}
|
|
698
|
-
return {
|
|
699
|
-
runIn: legacyGeneratorType ?? config.getOptionalString("techdocs.generator.runIn") ?? "docker",
|
|
700
|
-
dockerImage: config.getOptionalString("techdocs.generator.dockerImage"),
|
|
701
|
-
pullImage: config.getOptionalBoolean("techdocs.generator.pullImage"),
|
|
702
|
-
omitTechdocsCoreMkdocsPlugin: config.getOptionalBoolean(
|
|
703
|
-
"techdocs.generator.mkdocs.omitTechdocsCorePlugin"
|
|
704
|
-
),
|
|
705
|
-
legacyCopyReadmeMdToIndexMd: config.getOptionalBoolean(
|
|
706
|
-
"techdocs.generator.mkdocs.legacyCopyReadmeMdToIndexMd"
|
|
707
|
-
),
|
|
708
|
-
defaultPlugins: config.getOptionalStringArray(
|
|
709
|
-
"techdocs.generator.mkdocs.defaultPlugins"
|
|
710
|
-
)
|
|
711
|
-
};
|
|
712
|
-
}
|
|
713
|
-
|
|
714
|
-
class Generators {
|
|
715
|
-
generatorMap = /* @__PURE__ */ new Map();
|
|
716
|
-
/**
|
|
717
|
-
* Returns a generators instance containing a generator for TechDocs
|
|
718
|
-
* @param config - A Backstage configuration
|
|
719
|
-
* @param options - Options to configure the TechDocs generator
|
|
720
|
-
*/
|
|
721
|
-
static async fromConfig(config, options) {
|
|
722
|
-
const generators = new Generators();
|
|
723
|
-
const techdocsGenerator = options.customGenerator ?? TechdocsGenerator.fromConfig(config, options);
|
|
724
|
-
generators.register("techdocs", techdocsGenerator);
|
|
725
|
-
return generators;
|
|
726
|
-
}
|
|
727
|
-
/**
|
|
728
|
-
* Register a generator in the generators collection
|
|
729
|
-
* @param generatorKey - Unique identifier for the generator
|
|
730
|
-
* @param generator - The generator instance to register
|
|
731
|
-
*/
|
|
732
|
-
register(generatorKey, generator) {
|
|
733
|
-
this.generatorMap.set(generatorKey, generator);
|
|
734
|
-
}
|
|
735
|
-
/**
|
|
736
|
-
* Returns the generator for a given TechDocs entity
|
|
737
|
-
* @param entity - A TechDocs entity instance
|
|
738
|
-
*/
|
|
739
|
-
get(entity) {
|
|
740
|
-
const generatorKey = getGeneratorKey(entity);
|
|
741
|
-
const generator = this.generatorMap.get(generatorKey);
|
|
742
|
-
if (!generator) {
|
|
743
|
-
throw new Error(`No generator registered for entity: "${generatorKey}"`);
|
|
744
|
-
}
|
|
745
|
-
return generator;
|
|
746
|
-
}
|
|
747
|
-
}
|
|
748
|
-
|
|
749
|
-
const getMkDocsYml = getMkdocsYml;
|
|
750
|
-
|
|
751
|
-
const parseReferenceAnnotation = (annotationName, entity) => {
|
|
752
|
-
const annotation = entity.metadata.annotations?.[annotationName];
|
|
753
|
-
if (!annotation) {
|
|
754
|
-
throw new errors.InputError(
|
|
755
|
-
`No location annotation provided in entity: ${entity.metadata.name}`
|
|
756
|
-
);
|
|
757
|
-
}
|
|
758
|
-
const { type, target } = catalogModel.parseLocationRef(annotation);
|
|
759
|
-
return {
|
|
760
|
-
type,
|
|
761
|
-
target
|
|
762
|
-
};
|
|
763
|
-
};
|
|
764
|
-
const transformDirLocation = (entity, dirAnnotation, scmIntegrations) => {
|
|
765
|
-
const location = catalogModel.getEntitySourceLocation(entity);
|
|
766
|
-
switch (location.type) {
|
|
767
|
-
case "url": {
|
|
768
|
-
const target = scmIntegrations.resolveUrl({
|
|
769
|
-
url: dirAnnotation.target,
|
|
770
|
-
base: location.target
|
|
771
|
-
});
|
|
772
|
-
return {
|
|
773
|
-
type: "url",
|
|
774
|
-
target
|
|
775
|
-
};
|
|
776
|
-
}
|
|
777
|
-
case "file": {
|
|
778
|
-
const target = backendPluginApi.resolveSafeChildPath(
|
|
779
|
-
path__default.default.dirname(location.target),
|
|
780
|
-
dirAnnotation.target
|
|
781
|
-
);
|
|
782
|
-
return {
|
|
783
|
-
type: "dir",
|
|
784
|
-
target
|
|
785
|
-
};
|
|
786
|
-
}
|
|
787
|
-
default:
|
|
788
|
-
throw new errors.InputError(`Unable to resolve location type ${location.type}`);
|
|
789
|
-
}
|
|
790
|
-
};
|
|
791
|
-
const getLocationForEntity = (entity, scmIntegration) => {
|
|
792
|
-
const annotation = parseReferenceAnnotation(pluginTechdocsCommon.TECHDOCS_ANNOTATION, entity);
|
|
793
|
-
switch (annotation.type) {
|
|
794
|
-
case "url":
|
|
795
|
-
return annotation;
|
|
796
|
-
case "dir":
|
|
797
|
-
return transformDirLocation(entity, annotation, scmIntegration);
|
|
798
|
-
default:
|
|
799
|
-
throw new Error(`Invalid reference annotation ${annotation.type}`);
|
|
800
|
-
}
|
|
801
|
-
};
|
|
802
|
-
const getDocFilesFromRepository = async (reader, entity, opts) => {
|
|
803
|
-
const { target } = parseReferenceAnnotation(pluginTechdocsCommon.TECHDOCS_ANNOTATION, entity);
|
|
804
|
-
opts?.logger?.debug(`Reading files from ${target}`);
|
|
805
|
-
const readTreeResponse = await reader.readTree(target, { etag: opts?.etag });
|
|
806
|
-
const preparedDir = await readTreeResponse.dir();
|
|
807
|
-
opts?.logger?.debug(`Tree downloaded and stored at ${preparedDir}`);
|
|
808
|
-
return {
|
|
809
|
-
preparedDir,
|
|
810
|
-
etag: readTreeResponse.etag
|
|
811
|
-
};
|
|
812
|
-
};
|
|
813
|
-
|
|
814
|
-
class DirectoryPreparer {
|
|
815
|
-
scmIntegrations;
|
|
816
|
-
reader;
|
|
817
|
-
/**
|
|
818
|
-
* Returns a directory preparer instance
|
|
819
|
-
* @param config - A backstage config
|
|
820
|
-
* @param options - A directory preparer options containing a logger and reader
|
|
821
|
-
*/
|
|
822
|
-
static fromConfig(config, options) {
|
|
823
|
-
return new DirectoryPreparer(config, options.logger, options.reader);
|
|
824
|
-
}
|
|
825
|
-
constructor(config, _logger, reader) {
|
|
826
|
-
this.reader = reader;
|
|
827
|
-
this.scmIntegrations = integration.ScmIntegrations.fromConfig(config);
|
|
828
|
-
}
|
|
829
|
-
/** {@inheritDoc PreparerBase.shouldCleanPreparedDirectory} */
|
|
830
|
-
shouldCleanPreparedDirectory() {
|
|
831
|
-
return false;
|
|
832
|
-
}
|
|
833
|
-
/** {@inheritDoc PreparerBase.prepare} */
|
|
834
|
-
async prepare(entity, options) {
|
|
835
|
-
const annotation = parseReferenceAnnotation(pluginTechdocsCommon.TECHDOCS_ANNOTATION, entity);
|
|
836
|
-
const { type, target } = transformDirLocation(
|
|
837
|
-
entity,
|
|
838
|
-
annotation,
|
|
839
|
-
this.scmIntegrations
|
|
840
|
-
);
|
|
841
|
-
switch (type) {
|
|
842
|
-
case "url": {
|
|
843
|
-
options?.logger?.debug(`Reading files from ${target}`);
|
|
844
|
-
const response = await this.reader.readTree(target, {
|
|
845
|
-
etag: options?.etag
|
|
846
|
-
});
|
|
847
|
-
const preparedDir = await response.dir();
|
|
848
|
-
options?.logger?.debug(`Tree downloaded and stored at ${preparedDir}`);
|
|
849
|
-
return {
|
|
850
|
-
preparedDir,
|
|
851
|
-
etag: response.etag
|
|
852
|
-
};
|
|
853
|
-
}
|
|
854
|
-
case "dir": {
|
|
855
|
-
return {
|
|
856
|
-
// the transformation already validated that the target is in a safe location
|
|
857
|
-
preparedDir: target,
|
|
858
|
-
// Instead of supporting caching on local sources, use techdocs-cli for local development and debugging.
|
|
859
|
-
etag: ""
|
|
860
|
-
};
|
|
861
|
-
}
|
|
862
|
-
default:
|
|
863
|
-
throw new errors.InputError(`Unable to resolve location type ${type}`);
|
|
864
|
-
}
|
|
865
|
-
}
|
|
866
|
-
}
|
|
867
|
-
|
|
868
|
-
class UrlPreparer {
|
|
869
|
-
logger;
|
|
870
|
-
reader;
|
|
871
|
-
/**
|
|
872
|
-
* Returns a directory preparer instance
|
|
873
|
-
* @param config - A URL preparer config containing the a logger and reader
|
|
874
|
-
*/
|
|
875
|
-
static fromConfig(options) {
|
|
876
|
-
return new UrlPreparer(options.reader, options.logger);
|
|
877
|
-
}
|
|
878
|
-
constructor(reader, logger) {
|
|
879
|
-
this.logger = logger;
|
|
880
|
-
this.reader = reader;
|
|
881
|
-
}
|
|
882
|
-
/** {@inheritDoc PreparerBase.shouldCleanPreparedDirectory} */
|
|
883
|
-
shouldCleanPreparedDirectory() {
|
|
884
|
-
return true;
|
|
885
|
-
}
|
|
886
|
-
/** {@inheritDoc PreparerBase.prepare} */
|
|
887
|
-
async prepare(entity, options) {
|
|
888
|
-
try {
|
|
889
|
-
return await getDocFilesFromRepository(this.reader, entity, {
|
|
890
|
-
etag: options?.etag,
|
|
891
|
-
logger: this.logger
|
|
892
|
-
});
|
|
893
|
-
} catch (error) {
|
|
894
|
-
errors.assertError(error);
|
|
895
|
-
if (error.name === "NotModifiedError") {
|
|
896
|
-
this.logger.debug(`Cache is valid for etag ${options?.etag}`);
|
|
897
|
-
} else {
|
|
898
|
-
this.logger.debug(
|
|
899
|
-
`Unable to fetch files for building docs ${error.message}`
|
|
900
|
-
);
|
|
901
|
-
}
|
|
902
|
-
throw error;
|
|
903
|
-
}
|
|
904
|
-
}
|
|
905
|
-
}
|
|
906
|
-
|
|
907
|
-
class Preparers {
|
|
908
|
-
preparerMap = /* @__PURE__ */ new Map();
|
|
909
|
-
/**
|
|
910
|
-
* Returns a generators instance containing a generator for TechDocs
|
|
911
|
-
* @public
|
|
912
|
-
* @param backstageConfig - A Backstage configuration
|
|
913
|
-
* @param preparerConfig - Options to configure preparers
|
|
914
|
-
*/
|
|
915
|
-
static async fromConfig(backstageConfig, options) {
|
|
916
|
-
const preparers = new Preparers();
|
|
917
|
-
const urlPreparer = UrlPreparer.fromConfig({
|
|
918
|
-
reader: options.reader,
|
|
919
|
-
logger: options.logger
|
|
920
|
-
});
|
|
921
|
-
preparers.register("url", urlPreparer);
|
|
922
|
-
const directoryPreparer = DirectoryPreparer.fromConfig(backstageConfig, {
|
|
923
|
-
reader: options.reader,
|
|
924
|
-
logger: options.logger
|
|
925
|
-
});
|
|
926
|
-
preparers.register("dir", directoryPreparer);
|
|
927
|
-
return preparers;
|
|
928
|
-
}
|
|
929
|
-
/**
|
|
930
|
-
* Register a preparer in the preparers collection
|
|
931
|
-
* @param protocol - url or dir to associate with preparer
|
|
932
|
-
* @param preparer - The preparer instance to set
|
|
933
|
-
*/
|
|
934
|
-
register(protocol, preparer) {
|
|
935
|
-
this.preparerMap.set(protocol, preparer);
|
|
936
|
-
}
|
|
937
|
-
/**
|
|
938
|
-
* Returns the preparer for a given TechDocs entity
|
|
939
|
-
* @param entity - A TechDocs entity instance
|
|
940
|
-
* @returns
|
|
941
|
-
*/
|
|
942
|
-
get(entity) {
|
|
943
|
-
const { type } = parseReferenceAnnotation(pluginTechdocsCommon.TECHDOCS_ANNOTATION, entity);
|
|
944
|
-
const preparer = this.preparerMap.get(type);
|
|
945
|
-
if (!preparer) {
|
|
946
|
-
throw new Error(`No preparer registered for type: "${type}"`);
|
|
947
|
-
}
|
|
948
|
-
return preparer;
|
|
949
|
-
}
|
|
950
|
-
}
|
|
951
|
-
|
|
952
|
-
const streamToBuffer$1 = (stream) => {
|
|
953
|
-
return new Promise((resolve, reject) => {
|
|
954
|
-
try {
|
|
955
|
-
const chunks = [];
|
|
956
|
-
stream.on("data", (chunk) => chunks.push(chunk));
|
|
957
|
-
stream.on(
|
|
958
|
-
"error",
|
|
959
|
-
(e) => reject(new errors.ForwardedError("Unable to read stream", e))
|
|
960
|
-
);
|
|
961
|
-
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
|
962
|
-
} catch (e) {
|
|
963
|
-
throw new errors.ForwardedError("Unable to parse the response data", e);
|
|
964
|
-
}
|
|
965
|
-
});
|
|
966
|
-
};
|
|
967
|
-
class AwsS3Publish {
|
|
968
|
-
storageClient;
|
|
969
|
-
bucketName;
|
|
970
|
-
legacyPathCasing;
|
|
971
|
-
logger;
|
|
972
|
-
bucketRootPath;
|
|
973
|
-
sse;
|
|
974
|
-
constructor(options) {
|
|
975
|
-
this.storageClient = options.storageClient;
|
|
976
|
-
this.bucketName = options.bucketName;
|
|
977
|
-
this.legacyPathCasing = options.legacyPathCasing;
|
|
978
|
-
this.logger = options.logger;
|
|
979
|
-
this.bucketRootPath = options.bucketRootPath;
|
|
980
|
-
this.sse = options.sse;
|
|
981
|
-
}
|
|
982
|
-
static async fromConfig(config, logger) {
|
|
983
|
-
let bucketName = "";
|
|
984
|
-
try {
|
|
985
|
-
bucketName = config.getString("techdocs.publisher.awsS3.bucketName");
|
|
986
|
-
} catch (error) {
|
|
987
|
-
throw new Error(
|
|
988
|
-
"Since techdocs.publisher.type is set to 'awsS3' in your app config, techdocs.publisher.awsS3.bucketName is required."
|
|
989
|
-
);
|
|
990
|
-
}
|
|
991
|
-
const bucketRootPath = normalizeExternalStorageRootPath(
|
|
992
|
-
config.getOptionalString("techdocs.publisher.awsS3.bucketRootPath") || ""
|
|
993
|
-
);
|
|
994
|
-
const sse = config.getOptionalString("techdocs.publisher.awsS3.sse");
|
|
995
|
-
const region = config.getOptionalString("techdocs.publisher.awsS3.region");
|
|
996
|
-
const accountId = config.getOptionalString(
|
|
997
|
-
"techdocs.publisher.awsS3.accountId"
|
|
998
|
-
);
|
|
999
|
-
const credentialsConfig = config.getOptionalConfig(
|
|
1000
|
-
"techdocs.publisher.awsS3.credentials"
|
|
1001
|
-
);
|
|
1002
|
-
const credsManager = integrationAwsNode.DefaultAwsCredentialsManager.fromConfig(config);
|
|
1003
|
-
const sdkCredentialProvider = await AwsS3Publish.buildCredentials(
|
|
1004
|
-
credsManager,
|
|
1005
|
-
accountId,
|
|
1006
|
-
credentialsConfig,
|
|
1007
|
-
region
|
|
1008
|
-
);
|
|
1009
|
-
const endpoint = config.getOptionalString(
|
|
1010
|
-
"techdocs.publisher.awsS3.endpoint"
|
|
1011
|
-
);
|
|
1012
|
-
const httpsProxy = config.getOptionalString(
|
|
1013
|
-
"techdocs.publisher.awsS3.httpsProxy"
|
|
1014
|
-
);
|
|
1015
|
-
const forcePathStyle = config.getOptionalBoolean(
|
|
1016
|
-
"techdocs.publisher.awsS3.s3ForcePathStyle"
|
|
1017
|
-
);
|
|
1018
|
-
const storageClient = new clientS3.S3Client({
|
|
1019
|
-
customUserAgent: "backstage-aws-techdocs-s3-publisher",
|
|
1020
|
-
credentialDefaultProvider: () => sdkCredentialProvider,
|
|
1021
|
-
...region && { region },
|
|
1022
|
-
...endpoint && { endpoint },
|
|
1023
|
-
...forcePathStyle && { forcePathStyle },
|
|
1024
|
-
...httpsProxy && {
|
|
1025
|
-
requestHandler: new nodeHttpHandler.NodeHttpHandler({
|
|
1026
|
-
httpsAgent: new hpagent.HttpsProxyAgent({ proxy: httpsProxy })
|
|
1027
|
-
})
|
|
1028
|
-
}
|
|
1029
|
-
});
|
|
1030
|
-
const legacyPathCasing = config.getOptionalBoolean(
|
|
1031
|
-
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
1032
|
-
) || false;
|
|
1033
|
-
return new AwsS3Publish({
|
|
1034
|
-
storageClient,
|
|
1035
|
-
bucketName,
|
|
1036
|
-
bucketRootPath,
|
|
1037
|
-
legacyPathCasing,
|
|
1038
|
-
logger,
|
|
1039
|
-
sse
|
|
1040
|
-
});
|
|
1041
|
-
}
|
|
1042
|
-
static buildStaticCredentials(accessKeyId, secretAccessKey) {
|
|
1043
|
-
return async () => {
|
|
1044
|
-
return Promise.resolve({
|
|
1045
|
-
accessKeyId,
|
|
1046
|
-
secretAccessKey
|
|
1047
|
-
});
|
|
1048
|
-
};
|
|
1049
|
-
}
|
|
1050
|
-
static async buildCredentials(credsManager, accountId, config, region) {
|
|
1051
|
-
if (accountId) {
|
|
1052
|
-
return (await credsManager.getCredentialProvider({ accountId })).sdkCredentialProvider;
|
|
1053
|
-
}
|
|
1054
|
-
if (!config) {
|
|
1055
|
-
return (await credsManager.getCredentialProvider()).sdkCredentialProvider;
|
|
1056
|
-
}
|
|
1057
|
-
const accessKeyId = config.getOptionalString("accessKeyId");
|
|
1058
|
-
const secretAccessKey = config.getOptionalString("secretAccessKey");
|
|
1059
|
-
const explicitCredentials = accessKeyId && secretAccessKey ? AwsS3Publish.buildStaticCredentials(accessKeyId, secretAccessKey) : (await credsManager.getCredentialProvider()).sdkCredentialProvider;
|
|
1060
|
-
const roleArn = config.getOptionalString("roleArn");
|
|
1061
|
-
if (roleArn) {
|
|
1062
|
-
return credentialProviders.fromTemporaryCredentials({
|
|
1063
|
-
masterCredentials: explicitCredentials,
|
|
1064
|
-
params: {
|
|
1065
|
-
RoleSessionName: "backstage-aws-techdocs-s3-publisher",
|
|
1066
|
-
RoleArn: roleArn
|
|
1067
|
-
},
|
|
1068
|
-
clientConfig: { region }
|
|
1069
|
-
});
|
|
1070
|
-
}
|
|
1071
|
-
return explicitCredentials;
|
|
1072
|
-
}
|
|
1073
|
-
/**
|
|
1074
|
-
* Check if the defined bucket exists. Being able to connect means the configuration is good
|
|
1075
|
-
* and the storage client will work.
|
|
1076
|
-
*/
|
|
1077
|
-
async getReadiness() {
|
|
1078
|
-
try {
|
|
1079
|
-
await this.storageClient.send(
|
|
1080
|
-
new clientS3.HeadBucketCommand({ Bucket: this.bucketName })
|
|
1081
|
-
);
|
|
1082
|
-
this.logger.info(
|
|
1083
|
-
`Successfully connected to the AWS S3 bucket ${this.bucketName}.`
|
|
1084
|
-
);
|
|
1085
|
-
return { isAvailable: true };
|
|
1086
|
-
} catch (error) {
|
|
1087
|
-
this.logger.error(
|
|
1088
|
-
`Could not retrieve metadata about the AWS S3 bucket ${this.bucketName}. Make sure the bucket exists. Also make sure that authentication is setup either by explicitly defining credentials and region in techdocs.publisher.awsS3 in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
1089
|
-
);
|
|
1090
|
-
this.logger.error(`from AWS client library`, error);
|
|
1091
|
-
return {
|
|
1092
|
-
isAvailable: false
|
|
1093
|
-
};
|
|
1094
|
-
}
|
|
1095
|
-
}
|
|
1096
|
-
/**
|
|
1097
|
-
* Upload all the files from the generated `directory` to the S3 bucket.
|
|
1098
|
-
* Directory structure used in the bucket is - entityNamespace/entityKind/entityName/index.html
|
|
1099
|
-
*/
|
|
1100
|
-
async publish({
|
|
1101
|
-
entity,
|
|
1102
|
-
directory
|
|
1103
|
-
}) {
|
|
1104
|
-
const objects = [];
|
|
1105
|
-
const useLegacyPathCasing = this.legacyPathCasing;
|
|
1106
|
-
const bucketRootPath = this.bucketRootPath;
|
|
1107
|
-
const sse = this.sse;
|
|
1108
|
-
let existingFiles = [];
|
|
1109
|
-
try {
|
|
1110
|
-
const remoteFolder = getCloudPathForLocalPath(
|
|
1111
|
-
entity,
|
|
1112
|
-
void 0,
|
|
1113
|
-
useLegacyPathCasing,
|
|
1114
|
-
bucketRootPath
|
|
1115
|
-
);
|
|
1116
|
-
existingFiles = await this.getAllObjectsFromBucket({
|
|
1117
|
-
prefix: remoteFolder
|
|
1118
|
-
});
|
|
1119
|
-
} catch (e) {
|
|
1120
|
-
errors.assertError(e);
|
|
1121
|
-
this.logger.error(
|
|
1122
|
-
`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`
|
|
1123
|
-
);
|
|
1124
|
-
}
|
|
1125
|
-
let absoluteFilesToUpload;
|
|
1126
|
-
try {
|
|
1127
|
-
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
1128
|
-
await bulkStorageOperation(
|
|
1129
|
-
async (absoluteFilePath) => {
|
|
1130
|
-
const relativeFilePath = path__default.default.relative(directory, absoluteFilePath);
|
|
1131
|
-
const fileStream = fs__default.default.createReadStream(absoluteFilePath);
|
|
1132
|
-
const params = {
|
|
1133
|
-
Bucket: this.bucketName,
|
|
1134
|
-
Key: getCloudPathForLocalPath(
|
|
1135
|
-
entity,
|
|
1136
|
-
relativeFilePath,
|
|
1137
|
-
useLegacyPathCasing,
|
|
1138
|
-
bucketRootPath
|
|
1139
|
-
),
|
|
1140
|
-
Body: fileStream,
|
|
1141
|
-
...sse && { ServerSideEncryption: sse }
|
|
1142
|
-
};
|
|
1143
|
-
objects.push(params.Key);
|
|
1144
|
-
const upload = new libStorage.Upload({
|
|
1145
|
-
client: this.storageClient,
|
|
1146
|
-
params
|
|
1147
|
-
});
|
|
1148
|
-
return upload.done();
|
|
1149
|
-
},
|
|
1150
|
-
absoluteFilesToUpload,
|
|
1151
|
-
{ concurrencyLimit: 10 }
|
|
1152
|
-
);
|
|
1153
|
-
this.logger.info(
|
|
1154
|
-
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`
|
|
1155
|
-
);
|
|
1156
|
-
} catch (e) {
|
|
1157
|
-
const errorMessage = `Unable to upload file(s) to AWS S3. ${e}`;
|
|
1158
|
-
this.logger.error(errorMessage);
|
|
1159
|
-
throw new Error(errorMessage);
|
|
1160
|
-
}
|
|
1161
|
-
try {
|
|
1162
|
-
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
1163
|
-
(absoluteFilePath) => getCloudPathForLocalPath(
|
|
1164
|
-
entity,
|
|
1165
|
-
path__default.default.relative(directory, absoluteFilePath),
|
|
1166
|
-
useLegacyPathCasing,
|
|
1167
|
-
bucketRootPath
|
|
1168
|
-
)
|
|
1169
|
-
);
|
|
1170
|
-
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
1171
|
-
await bulkStorageOperation(
|
|
1172
|
-
async (relativeFilePath) => {
|
|
1173
|
-
return await this.storageClient.send(
|
|
1174
|
-
new clientS3.DeleteObjectCommand({
|
|
1175
|
-
Bucket: this.bucketName,
|
|
1176
|
-
Key: relativeFilePath
|
|
1177
|
-
})
|
|
1178
|
-
);
|
|
1179
|
-
},
|
|
1180
|
-
staleFiles,
|
|
1181
|
-
{ concurrencyLimit: 10 }
|
|
1182
|
-
);
|
|
1183
|
-
this.logger.info(
|
|
1184
|
-
`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`
|
|
1185
|
-
);
|
|
1186
|
-
} catch (error) {
|
|
1187
|
-
const errorMessage = `Unable to delete file(s) from AWS S3. ${error}`;
|
|
1188
|
-
this.logger.error(errorMessage);
|
|
1189
|
-
}
|
|
1190
|
-
return { objects };
|
|
1191
|
-
}
|
|
1192
|
-
async fetchTechDocsMetadata(entityName) {
|
|
1193
|
-
try {
|
|
1194
|
-
return await new Promise(async (resolve, reject) => {
|
|
1195
|
-
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
1196
|
-
const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1197
|
-
const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
|
|
1198
|
-
if (!isValidContentPath(this.bucketRootPath, entityRootDir)) {
|
|
1199
|
-
this.logger.error(
|
|
1200
|
-
`Invalid content path found while fetching TechDocs metadata: ${entityRootDir}`
|
|
1201
|
-
);
|
|
1202
|
-
throw new Error(`Metadata Not Found`);
|
|
1203
|
-
}
|
|
1204
|
-
try {
|
|
1205
|
-
const resp = await this.storageClient.send(
|
|
1206
|
-
new clientS3.GetObjectCommand({
|
|
1207
|
-
Bucket: this.bucketName,
|
|
1208
|
-
Key: `${entityRootDir}/techdocs_metadata.json`
|
|
1209
|
-
})
|
|
1210
|
-
);
|
|
1211
|
-
const techdocsMetadataJson = await streamToBuffer$1(
|
|
1212
|
-
resp.Body
|
|
1213
|
-
);
|
|
1214
|
-
if (!techdocsMetadataJson) {
|
|
1215
|
-
throw new Error(
|
|
1216
|
-
`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
|
|
1217
|
-
);
|
|
1218
|
-
}
|
|
1219
|
-
const techdocsMetadata = JSON5__default.default.parse(
|
|
1220
|
-
techdocsMetadataJson.toString("utf-8")
|
|
1221
|
-
);
|
|
1222
|
-
resolve(techdocsMetadata);
|
|
1223
|
-
} catch (err) {
|
|
1224
|
-
errors.assertError(err);
|
|
1225
|
-
this.logger.error(err.message);
|
|
1226
|
-
reject(new Error(err.message));
|
|
1227
|
-
}
|
|
1228
|
-
});
|
|
1229
|
-
} catch (e) {
|
|
1230
|
-
throw new errors.ForwardedError("TechDocs metadata fetch failed", e);
|
|
1231
|
-
}
|
|
1232
|
-
}
|
|
1233
|
-
/**
|
|
1234
|
-
* Express route middleware to serve static files on a route in techdocs-backend.
|
|
1235
|
-
*/
|
|
1236
|
-
docsRouter() {
|
|
1237
|
-
return async (req, res) => {
|
|
1238
|
-
const decodedUri = decodeURI(req.path.replace(/^\//, ""));
|
|
1239
|
-
const filePathNoRoot = this.legacyPathCasing ? decodedUri : lowerCaseEntityTripletInStoragePath(decodedUri);
|
|
1240
|
-
const filePath = path__default.default.posix.join(this.bucketRootPath, filePathNoRoot);
|
|
1241
|
-
if (!isValidContentPath(this.bucketRootPath, filePath)) {
|
|
1242
|
-
this.logger.error(
|
|
1243
|
-
`Attempted to fetch TechDocs content for a file outside of the bucket root: ${filePathNoRoot}`
|
|
1244
|
-
);
|
|
1245
|
-
res.status(404).send("File Not Found");
|
|
1246
|
-
return;
|
|
1247
|
-
}
|
|
1248
|
-
const fileExtension = path__default.default.extname(filePath);
|
|
1249
|
-
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
1250
|
-
try {
|
|
1251
|
-
const resp = await this.storageClient.send(
|
|
1252
|
-
new clientS3.GetObjectCommand({ Bucket: this.bucketName, Key: filePath })
|
|
1253
|
-
);
|
|
1254
|
-
for (const [headerKey, headerValue] of Object.entries(
|
|
1255
|
-
responseHeaders
|
|
1256
|
-
)) {
|
|
1257
|
-
res.setHeader(headerKey, headerValue);
|
|
1258
|
-
}
|
|
1259
|
-
res.send(await streamToBuffer$1(resp.Body));
|
|
1260
|
-
} catch (err) {
|
|
1261
|
-
errors.assertError(err);
|
|
1262
|
-
this.logger.warn(
|
|
1263
|
-
`TechDocs S3 router failed to serve static files from bucket ${this.bucketName} at key ${filePath}: ${err.message}`
|
|
1264
|
-
);
|
|
1265
|
-
res.status(404).send("File Not Found");
|
|
1266
|
-
}
|
|
1267
|
-
};
|
|
1268
|
-
}
|
|
1269
|
-
/**
|
|
1270
|
-
* A helper function which checks if index.html of an Entity's docs site is available. This
|
|
1271
|
-
* can be used to verify if there are any pre-generated docs available to serve.
|
|
1272
|
-
*/
|
|
1273
|
-
async hasDocsBeenGenerated(entity) {
|
|
1274
|
-
try {
|
|
1275
|
-
const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
1276
|
-
const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1277
|
-
const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
|
|
1278
|
-
if (!isValidContentPath(this.bucketRootPath, entityRootDir)) {
|
|
1279
|
-
this.logger.error(
|
|
1280
|
-
`Invalid content path found while checking if docs have been generated: ${entityRootDir}`
|
|
1281
|
-
);
|
|
1282
|
-
return Promise.resolve(false);
|
|
1283
|
-
}
|
|
1284
|
-
await this.storageClient.send(
|
|
1285
|
-
new clientS3.HeadObjectCommand({
|
|
1286
|
-
Bucket: this.bucketName,
|
|
1287
|
-
Key: `${entityRootDir}/index.html`
|
|
1288
|
-
})
|
|
1289
|
-
);
|
|
1290
|
-
return Promise.resolve(true);
|
|
1291
|
-
} catch (e) {
|
|
1292
|
-
return Promise.resolve(false);
|
|
1293
|
-
}
|
|
1294
|
-
}
|
|
1295
|
-
async migrateDocsCase({
|
|
1296
|
-
removeOriginal = false,
|
|
1297
|
-
concurrency = 25
|
|
1298
|
-
}) {
|
|
1299
|
-
const allObjects = await this.getAllObjectsFromBucket();
|
|
1300
|
-
const limiter = createLimiter__default.default(concurrency);
|
|
1301
|
-
await Promise.all(
|
|
1302
|
-
allObjects.map(
|
|
1303
|
-
(f) => limiter(async (file) => {
|
|
1304
|
-
let newPath;
|
|
1305
|
-
try {
|
|
1306
|
-
newPath = lowerCaseEntityTripletInStoragePath(file);
|
|
1307
|
-
} catch (e) {
|
|
1308
|
-
errors.assertError(e);
|
|
1309
|
-
this.logger.warn(e.message);
|
|
1310
|
-
return;
|
|
1311
|
-
}
|
|
1312
|
-
if (file === newPath) {
|
|
1313
|
-
return;
|
|
1314
|
-
}
|
|
1315
|
-
try {
|
|
1316
|
-
this.logger.debug(`Migrating ${file}`);
|
|
1317
|
-
await this.storageClient.send(
|
|
1318
|
-
new clientS3.CopyObjectCommand({
|
|
1319
|
-
Bucket: this.bucketName,
|
|
1320
|
-
CopySource: [this.bucketName, file].join("/"),
|
|
1321
|
-
Key: newPath
|
|
1322
|
-
})
|
|
1323
|
-
);
|
|
1324
|
-
if (removeOriginal) {
|
|
1325
|
-
await this.storageClient.send(
|
|
1326
|
-
new clientS3.DeleteObjectCommand({
|
|
1327
|
-
Bucket: this.bucketName,
|
|
1328
|
-
Key: file
|
|
1329
|
-
})
|
|
1330
|
-
);
|
|
1331
|
-
}
|
|
1332
|
-
} catch (e) {
|
|
1333
|
-
errors.assertError(e);
|
|
1334
|
-
this.logger.warn(`Unable to migrate ${file}: ${e.message}`);
|
|
1335
|
-
}
|
|
1336
|
-
}, f)
|
|
1337
|
-
)
|
|
1338
|
-
);
|
|
1339
|
-
}
|
|
1340
|
-
/**
|
|
1341
|
-
* Returns a list of all object keys from the configured bucket.
|
|
1342
|
-
*/
|
|
1343
|
-
async getAllObjectsFromBucket({ prefix } = { prefix: "" }) {
|
|
1344
|
-
const objects = [];
|
|
1345
|
-
let nextContinuation;
|
|
1346
|
-
let allObjects;
|
|
1347
|
-
do {
|
|
1348
|
-
allObjects = await this.storageClient.send(
|
|
1349
|
-
new clientS3.ListObjectsV2Command({
|
|
1350
|
-
Bucket: this.bucketName,
|
|
1351
|
-
ContinuationToken: nextContinuation,
|
|
1352
|
-
...prefix ? { Prefix: prefix } : {}
|
|
1353
|
-
})
|
|
1354
|
-
);
|
|
1355
|
-
objects.push(
|
|
1356
|
-
...(allObjects.Contents || []).map((f) => f.Key || "").filter((f) => !!f)
|
|
1357
|
-
);
|
|
1358
|
-
nextContinuation = allObjects.NextContinuationToken;
|
|
1359
|
-
} while (nextContinuation);
|
|
1360
|
-
return objects;
|
|
1361
|
-
}
|
|
1362
|
-
}
|
|
1363
|
-
|
|
1364
|
-
const BATCH_CONCURRENCY = 3;
|
|
1365
|
-
class AzureBlobStoragePublish {
|
|
1366
|
-
storageClient;
|
|
1367
|
-
containerName;
|
|
1368
|
-
legacyPathCasing;
|
|
1369
|
-
logger;
|
|
1370
|
-
constructor(options) {
|
|
1371
|
-
this.storageClient = options.storageClient;
|
|
1372
|
-
this.containerName = options.containerName;
|
|
1373
|
-
this.legacyPathCasing = options.legacyPathCasing;
|
|
1374
|
-
this.logger = options.logger;
|
|
1375
|
-
}
|
|
1376
|
-
static fromConfig(config, logger) {
|
|
1377
|
-
let storageClient;
|
|
1378
|
-
let containerName = "";
|
|
1379
|
-
try {
|
|
1380
|
-
containerName = config.getString(
|
|
1381
|
-
"techdocs.publisher.azureBlobStorage.containerName"
|
|
1382
|
-
);
|
|
1383
|
-
} catch (error) {
|
|
1384
|
-
throw new Error(
|
|
1385
|
-
"Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, techdocs.publisher.azureBlobStorage.containerName is required."
|
|
1386
|
-
);
|
|
1387
|
-
}
|
|
1388
|
-
const legacyPathCasing = config.getOptionalBoolean(
|
|
1389
|
-
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
1390
|
-
) || false;
|
|
1391
|
-
const connectionStringKey = "techdocs.publisher.azureBlobStorage.connectionString";
|
|
1392
|
-
const connectionString = config.getOptionalString(connectionStringKey);
|
|
1393
|
-
if (connectionString) {
|
|
1394
|
-
logger.info(
|
|
1395
|
-
`Using '${connectionStringKey}' configuration to create storage client`
|
|
1396
|
-
);
|
|
1397
|
-
storageClient = storageBlob.BlobServiceClient.fromConnectionString(connectionString);
|
|
1398
|
-
} else {
|
|
1399
|
-
let accountName = "";
|
|
1400
|
-
try {
|
|
1401
|
-
accountName = config.getString(
|
|
1402
|
-
"techdocs.publisher.azureBlobStorage.credentials.accountName"
|
|
1403
|
-
);
|
|
1404
|
-
} catch (error) {
|
|
1405
|
-
throw new Error(
|
|
1406
|
-
"Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, techdocs.publisher.azureBlobStorage.credentials.accountName is required."
|
|
1407
|
-
);
|
|
1408
|
-
}
|
|
1409
|
-
const accountKey = config.getOptionalString(
|
|
1410
|
-
"techdocs.publisher.azureBlobStorage.credentials.accountKey"
|
|
1411
|
-
);
|
|
1412
|
-
let credential;
|
|
1413
|
-
if (accountKey) {
|
|
1414
|
-
credential = new storageBlob.StorageSharedKeyCredential(accountName, accountKey);
|
|
1415
|
-
} else {
|
|
1416
|
-
credential = new identity.DefaultAzureCredential();
|
|
1417
|
-
}
|
|
1418
|
-
storageClient = new storageBlob.BlobServiceClient(
|
|
1419
|
-
`https://${accountName}.blob.core.windows.net`,
|
|
1420
|
-
credential
|
|
1421
|
-
);
|
|
1422
|
-
}
|
|
1423
|
-
return new AzureBlobStoragePublish({
|
|
1424
|
-
storageClient,
|
|
1425
|
-
containerName,
|
|
1426
|
-
legacyPathCasing,
|
|
1427
|
-
logger
|
|
1428
|
-
});
|
|
1429
|
-
}
|
|
1430
|
-
async getReadiness() {
|
|
1431
|
-
try {
|
|
1432
|
-
const response = await this.storageClient.getContainerClient(this.containerName).getProperties();
|
|
1433
|
-
if (response._response.status === 200) {
|
|
1434
|
-
return {
|
|
1435
|
-
isAvailable: true
|
|
1436
|
-
};
|
|
1437
|
-
}
|
|
1438
|
-
if (response._response.status >= 400) {
|
|
1439
|
-
this.logger.error(
|
|
1440
|
-
`Failed to retrieve metadata from ${response._response.request.url} with status code ${response._response.status}.`
|
|
1441
|
-
);
|
|
1442
|
-
}
|
|
1443
|
-
} catch (e) {
|
|
1444
|
-
errors.assertError(e);
|
|
1445
|
-
this.logger.error(`from Azure Blob Storage client library: ${e.message}`);
|
|
1446
|
-
}
|
|
1447
|
-
this.logger.error(
|
|
1448
|
-
`Could not retrieve metadata about the Azure Blob Storage container ${this.containerName}. Make sure that the Azure project and container exist and the access key is setup correctly techdocs.publisher.azureBlobStorage.credentials defined in app config has correct permissions. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
1449
|
-
);
|
|
1450
|
-
return { isAvailable: false };
|
|
1451
|
-
}
|
|
1452
|
-
/**
|
|
1453
|
-
* Upload all the files from the generated `directory` to the Azure Blob Storage container.
|
|
1454
|
-
* Directory structure used in the container is - entityNamespace/entityKind/entityName/index.html
|
|
1455
|
-
*/
|
|
1456
|
-
async publish({
|
|
1457
|
-
entity,
|
|
1458
|
-
directory
|
|
1459
|
-
}) {
|
|
1460
|
-
const objects = [];
|
|
1461
|
-
const useLegacyPathCasing = this.legacyPathCasing;
|
|
1462
|
-
const remoteFolder = getCloudPathForLocalPath(
|
|
1463
|
-
entity,
|
|
1464
|
-
void 0,
|
|
1465
|
-
useLegacyPathCasing
|
|
1466
|
-
);
|
|
1467
|
-
let existingFiles = [];
|
|
1468
|
-
try {
|
|
1469
|
-
existingFiles = await this.getAllBlobsFromContainer({
|
|
1470
|
-
prefix: remoteFolder,
|
|
1471
|
-
maxPageSize: BATCH_CONCURRENCY
|
|
1472
|
-
});
|
|
1473
|
-
} catch (e) {
|
|
1474
|
-
errors.assertError(e);
|
|
1475
|
-
this.logger.error(
|
|
1476
|
-
`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`
|
|
1477
|
-
);
|
|
1478
|
-
}
|
|
1479
|
-
let absoluteFilesToUpload;
|
|
1480
|
-
let container;
|
|
1481
|
-
try {
|
|
1482
|
-
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
1483
|
-
container = this.storageClient.getContainerClient(this.containerName);
|
|
1484
|
-
const failedOperations = [];
|
|
1485
|
-
await bulkStorageOperation(
|
|
1486
|
-
async (absoluteFilePath) => {
|
|
1487
|
-
const relativeFilePath = path__default.default.normalize(
|
|
1488
|
-
path__default.default.relative(directory, absoluteFilePath)
|
|
1489
|
-
);
|
|
1490
|
-
const remotePath = getCloudPathForLocalPath(
|
|
1491
|
-
entity,
|
|
1492
|
-
relativeFilePath,
|
|
1493
|
-
useLegacyPathCasing
|
|
1494
|
-
);
|
|
1495
|
-
objects.push(remotePath);
|
|
1496
|
-
const response = await container.getBlockBlobClient(remotePath).uploadFile(absoluteFilePath);
|
|
1497
|
-
if (response._response.status >= 400) {
|
|
1498
|
-
failedOperations.push(
|
|
1499
|
-
new Error(
|
|
1500
|
-
`Upload failed for ${absoluteFilePath} with status code ${response._response.status}`
|
|
1501
|
-
)
|
|
1502
|
-
);
|
|
1503
|
-
}
|
|
1504
|
-
return response;
|
|
1505
|
-
},
|
|
1506
|
-
absoluteFilesToUpload,
|
|
1507
|
-
{ concurrencyLimit: BATCH_CONCURRENCY }
|
|
1508
|
-
);
|
|
1509
|
-
if (failedOperations.length > 0) {
|
|
1510
|
-
throw new Error(
|
|
1511
|
-
failedOperations.map((r) => r.message).filter(Boolean).join(" ")
|
|
1512
|
-
);
|
|
1513
|
-
}
|
|
1514
|
-
this.logger.info(
|
|
1515
|
-
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`
|
|
1516
|
-
);
|
|
1517
|
-
} catch (e) {
|
|
1518
|
-
const errorMessage = `Unable to upload file(s) to Azure. ${e}`;
|
|
1519
|
-
this.logger.error(errorMessage);
|
|
1520
|
-
throw new Error(errorMessage);
|
|
1521
|
-
}
|
|
1522
|
-
try {
|
|
1523
|
-
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
1524
|
-
(absoluteFilePath) => getCloudPathForLocalPath(
|
|
1525
|
-
entity,
|
|
1526
|
-
path__default.default.relative(directory, absoluteFilePath),
|
|
1527
|
-
useLegacyPathCasing
|
|
1528
|
-
)
|
|
1529
|
-
);
|
|
1530
|
-
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
1531
|
-
await bulkStorageOperation(
|
|
1532
|
-
async (relativeFilePath) => {
|
|
1533
|
-
return await container.deleteBlob(relativeFilePath);
|
|
1534
|
-
},
|
|
1535
|
-
staleFiles,
|
|
1536
|
-
{ concurrencyLimit: BATCH_CONCURRENCY }
|
|
1537
|
-
);
|
|
1538
|
-
this.logger.info(
|
|
1539
|
-
`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`
|
|
1540
|
-
);
|
|
1541
|
-
} catch (error) {
|
|
1542
|
-
const errorMessage = `Unable to delete file(s) from Azure. ${error}`;
|
|
1543
|
-
this.logger.error(errorMessage);
|
|
1544
|
-
}
|
|
1545
|
-
return { objects };
|
|
1546
|
-
}
|
|
1547
|
-
download(containerName, blobPath) {
|
|
1548
|
-
return new Promise((resolve, reject) => {
|
|
1549
|
-
const fileStreamChunks = [];
|
|
1550
|
-
this.storageClient.getContainerClient(containerName).getBlockBlobClient(blobPath).download().then((res) => {
|
|
1551
|
-
const body = res.readableStreamBody;
|
|
1552
|
-
if (!body) {
|
|
1553
|
-
reject(new Error(`Unable to parse the response data`));
|
|
1554
|
-
return;
|
|
1555
|
-
}
|
|
1556
|
-
body.on("error", reject).on("data", (chunk) => {
|
|
1557
|
-
fileStreamChunks.push(chunk);
|
|
1558
|
-
}).on("end", () => {
|
|
1559
|
-
resolve(Buffer.concat(fileStreamChunks));
|
|
1560
|
-
});
|
|
1561
|
-
}).catch(reject);
|
|
1562
|
-
});
|
|
1563
|
-
}
|
|
1564
|
-
async fetchTechDocsMetadata(entityName) {
|
|
1565
|
-
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
1566
|
-
const entityRootDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1567
|
-
try {
|
|
1568
|
-
const techdocsMetadataJson = await this.download(
|
|
1569
|
-
this.containerName,
|
|
1570
|
-
`${entityRootDir}/techdocs_metadata.json`
|
|
1571
|
-
);
|
|
1572
|
-
if (!techdocsMetadataJson) {
|
|
1573
|
-
throw new Error(
|
|
1574
|
-
`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
|
|
1575
|
-
);
|
|
1576
|
-
}
|
|
1577
|
-
const techdocsMetadata = JSON5__default.default.parse(
|
|
1578
|
-
techdocsMetadataJson.toString("utf-8")
|
|
1579
|
-
);
|
|
1580
|
-
return techdocsMetadata;
|
|
1581
|
-
} catch (e) {
|
|
1582
|
-
throw new errors.ForwardedError("TechDocs metadata fetch failed", e);
|
|
1583
|
-
}
|
|
1584
|
-
}
|
|
1585
|
-
/**
|
|
1586
|
-
* Express route middleware to serve static files on a route in techdocs-backend.
|
|
1587
|
-
*/
|
|
1588
|
-
docsRouter() {
|
|
1589
|
-
return (req, res) => {
|
|
1590
|
-
const decodedUri = decodeURI(req.path.replace(/^\//, ""));
|
|
1591
|
-
const filePath = this.legacyPathCasing ? decodedUri : lowerCaseEntityTripletInStoragePath(decodedUri);
|
|
1592
|
-
const fileExtension = path__default.default.extname(filePath);
|
|
1593
|
-
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
1594
|
-
this.download(this.containerName, filePath).then((fileContent) => {
|
|
1595
|
-
for (const [headerKey, headerValue] of Object.entries(
|
|
1596
|
-
responseHeaders
|
|
1597
|
-
)) {
|
|
1598
|
-
res.setHeader(headerKey, headerValue);
|
|
1599
|
-
}
|
|
1600
|
-
res.send(fileContent);
|
|
1601
|
-
}).catch((e) => {
|
|
1602
|
-
this.logger.warn(
|
|
1603
|
-
`TechDocs Azure router failed to serve content from container ${this.containerName} at path ${filePath}: ${e.message}`
|
|
1604
|
-
);
|
|
1605
|
-
res.status(404).send("File Not Found");
|
|
1606
|
-
});
|
|
1607
|
-
};
|
|
1608
|
-
}
|
|
1609
|
-
/**
|
|
1610
|
-
* A helper function which checks if index.html of an Entity's docs site is available. This
|
|
1611
|
-
* can be used to verify if there are any pre-generated docs available to serve.
|
|
1612
|
-
*/
|
|
1613
|
-
hasDocsBeenGenerated(entity) {
|
|
1614
|
-
const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
1615
|
-
const entityRootDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1616
|
-
return this.storageClient.getContainerClient(this.containerName).getBlockBlobClient(`${entityRootDir}/index.html`).exists();
|
|
1617
|
-
}
|
|
1618
|
-
async renameBlob(originalName, newName, removeOriginal = false) {
|
|
1619
|
-
const container = this.storageClient.getContainerClient(this.containerName);
|
|
1620
|
-
const blob = container.getBlobClient(newName);
|
|
1621
|
-
const { url } = container.getBlobClient(originalName);
|
|
1622
|
-
const response = await blob.beginCopyFromURL(url);
|
|
1623
|
-
await response.pollUntilDone();
|
|
1624
|
-
if (removeOriginal) {
|
|
1625
|
-
await container.deleteBlob(originalName);
|
|
1626
|
-
}
|
|
1627
|
-
}
|
|
1628
|
-
async renameBlobToLowerCase(originalPath, removeOriginal) {
|
|
1629
|
-
let newPath;
|
|
1630
|
-
try {
|
|
1631
|
-
newPath = lowerCaseEntityTripletInStoragePath(originalPath);
|
|
1632
|
-
} catch (e) {
|
|
1633
|
-
errors.assertError(e);
|
|
1634
|
-
this.logger.warn(e.message);
|
|
1635
|
-
return;
|
|
1636
|
-
}
|
|
1637
|
-
if (originalPath === newPath) return;
|
|
1638
|
-
try {
|
|
1639
|
-
this.logger.debug(`Migrating ${originalPath}`);
|
|
1640
|
-
await this.renameBlob(originalPath, newPath, removeOriginal);
|
|
1641
|
-
} catch (e) {
|
|
1642
|
-
errors.assertError(e);
|
|
1643
|
-
this.logger.warn(`Unable to migrate ${originalPath}: ${e.message}`);
|
|
1644
|
-
}
|
|
1645
|
-
}
|
|
1646
|
-
async migrateDocsCase({
|
|
1647
|
-
removeOriginal = false,
|
|
1648
|
-
concurrency = 25
|
|
1649
|
-
}) {
|
|
1650
|
-
const promises = [];
|
|
1651
|
-
const limiter = createLimiter__default.default(concurrency);
|
|
1652
|
-
const container = this.storageClient.getContainerClient(this.containerName);
|
|
1653
|
-
for await (const blob of container.listBlobsFlat()) {
|
|
1654
|
-
promises.push(
|
|
1655
|
-
limiter(
|
|
1656
|
-
this.renameBlobToLowerCase.bind(this),
|
|
1657
|
-
blob.name,
|
|
1658
|
-
removeOriginal
|
|
1659
|
-
)
|
|
1660
|
-
);
|
|
1661
|
-
}
|
|
1662
|
-
await Promise.all(promises);
|
|
1663
|
-
}
|
|
1664
|
-
async getAllBlobsFromContainer({
|
|
1665
|
-
prefix,
|
|
1666
|
-
maxPageSize
|
|
1667
|
-
}) {
|
|
1668
|
-
const blobs = [];
|
|
1669
|
-
const container = this.storageClient.getContainerClient(this.containerName);
|
|
1670
|
-
let iterator = container.listBlobsFlat({ prefix }).byPage({ maxPageSize });
|
|
1671
|
-
let response = (await iterator.next()).value;
|
|
1672
|
-
do {
|
|
1673
|
-
for (const blob of response?.segment?.blobItems ?? []) {
|
|
1674
|
-
blobs.push(blob.name);
|
|
1675
|
-
}
|
|
1676
|
-
iterator = container.listBlobsFlat({ prefix }).byPage({ continuationToken: response.continuationToken, maxPageSize });
|
|
1677
|
-
response = (await iterator.next()).value;
|
|
1678
|
-
} while (response && response.continuationToken);
|
|
1679
|
-
return blobs;
|
|
1680
|
-
}
|
|
1681
|
-
}
|
|
1682
|
-
|
|
1683
|
-
class MigrateWriteStream extends stream.Writable {
|
|
1684
|
-
logger;
|
|
1685
|
-
removeOriginal;
|
|
1686
|
-
maxConcurrency;
|
|
1687
|
-
inFlight = 0;
|
|
1688
|
-
constructor(logger, removeOriginal, concurrency) {
|
|
1689
|
-
super({ objectMode: true });
|
|
1690
|
-
this.logger = logger;
|
|
1691
|
-
this.removeOriginal = removeOriginal;
|
|
1692
|
-
this.maxConcurrency = concurrency;
|
|
1693
|
-
}
|
|
1694
|
-
_write(file, _encoding, next) {
|
|
1695
|
-
let shouldCallNext = true;
|
|
1696
|
-
let newFile;
|
|
1697
|
-
try {
|
|
1698
|
-
newFile = lowerCaseEntityTripletInStoragePath(file.name);
|
|
1699
|
-
} catch (e) {
|
|
1700
|
-
errors.assertError(e);
|
|
1701
|
-
this.logger.warn(e.message);
|
|
1702
|
-
next();
|
|
1703
|
-
return;
|
|
1704
|
-
}
|
|
1705
|
-
if (newFile === file.name) {
|
|
1706
|
-
next();
|
|
1707
|
-
return;
|
|
1708
|
-
}
|
|
1709
|
-
this.inFlight++;
|
|
1710
|
-
if (this.inFlight < this.maxConcurrency) {
|
|
1711
|
-
next();
|
|
1712
|
-
shouldCallNext = false;
|
|
1713
|
-
}
|
|
1714
|
-
const migrate = this.removeOriginal ? file.move.bind(file) : file.copy.bind(file);
|
|
1715
|
-
this.logger.debug(`Migrating ${file.name}`);
|
|
1716
|
-
migrate(newFile).catch(
|
|
1717
|
-
(e) => this.logger.warn(`Unable to migrate ${file.name}: ${e.message}`)
|
|
1718
|
-
).finally(() => {
|
|
1719
|
-
this.inFlight--;
|
|
1720
|
-
if (shouldCallNext) {
|
|
1721
|
-
next();
|
|
1722
|
-
}
|
|
1723
|
-
});
|
|
1724
|
-
}
|
|
1725
|
-
}
|
|
1726
|
-
|
|
1727
|
-
class GoogleGCSPublish {
|
|
1728
|
-
storageClient;
|
|
1729
|
-
bucketName;
|
|
1730
|
-
legacyPathCasing;
|
|
1731
|
-
logger;
|
|
1732
|
-
bucketRootPath;
|
|
1733
|
-
constructor(options) {
|
|
1734
|
-
this.storageClient = options.storageClient;
|
|
1735
|
-
this.bucketName = options.bucketName;
|
|
1736
|
-
this.legacyPathCasing = options.legacyPathCasing;
|
|
1737
|
-
this.logger = options.logger;
|
|
1738
|
-
this.bucketRootPath = options.bucketRootPath;
|
|
1739
|
-
}
|
|
1740
|
-
static fromConfig(config, logger) {
|
|
1741
|
-
let bucketName = "";
|
|
1742
|
-
try {
|
|
1743
|
-
bucketName = config.getString("techdocs.publisher.googleGcs.bucketName");
|
|
1744
|
-
} catch (error) {
|
|
1745
|
-
throw new Error(
|
|
1746
|
-
"Since techdocs.publisher.type is set to 'googleGcs' in your app config, techdocs.publisher.googleGcs.bucketName is required."
|
|
1747
|
-
);
|
|
1748
|
-
}
|
|
1749
|
-
const bucketRootPath = normalizeExternalStorageRootPath(
|
|
1750
|
-
config.getOptionalString("techdocs.publisher.googleGcs.bucketRootPath") || ""
|
|
1751
|
-
);
|
|
1752
|
-
const credentials = config.getOptionalString(
|
|
1753
|
-
"techdocs.publisher.googleGcs.credentials"
|
|
1754
|
-
);
|
|
1755
|
-
const projectId = config.getOptionalString(
|
|
1756
|
-
"techdocs.publisher.googleGcs.projectId"
|
|
1757
|
-
);
|
|
1758
|
-
let credentialsJson = {};
|
|
1759
|
-
if (credentials) {
|
|
1760
|
-
try {
|
|
1761
|
-
credentialsJson = JSON.parse(credentials);
|
|
1762
|
-
} catch (err) {
|
|
1763
|
-
throw new Error(
|
|
1764
|
-
"Error in parsing techdocs.publisher.googleGcs.credentials config to JSON."
|
|
1765
|
-
);
|
|
1766
|
-
}
|
|
1767
|
-
}
|
|
1768
|
-
const clientOpts = {};
|
|
1769
|
-
if (projectId) {
|
|
1770
|
-
clientOpts.projectId = projectId;
|
|
1771
|
-
}
|
|
1772
|
-
const storageClient = new storage.Storage({
|
|
1773
|
-
...credentials && {
|
|
1774
|
-
projectId: credentialsJson.project_id,
|
|
1775
|
-
credentials: credentialsJson
|
|
1776
|
-
},
|
|
1777
|
-
...clientOpts
|
|
1778
|
-
});
|
|
1779
|
-
const legacyPathCasing = config.getOptionalBoolean(
|
|
1780
|
-
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
1781
|
-
) || false;
|
|
1782
|
-
return new GoogleGCSPublish({
|
|
1783
|
-
storageClient,
|
|
1784
|
-
bucketName,
|
|
1785
|
-
legacyPathCasing,
|
|
1786
|
-
logger,
|
|
1787
|
-
bucketRootPath
|
|
1788
|
-
});
|
|
1789
|
-
}
|
|
1790
|
-
/**
|
|
1791
|
-
* Check if the defined bucket exists. Being able to connect means the configuration is good
|
|
1792
|
-
* and the storage client will work.
|
|
1793
|
-
*/
|
|
1794
|
-
async getReadiness() {
|
|
1795
|
-
try {
|
|
1796
|
-
await this.storageClient.bucket(this.bucketName).getMetadata();
|
|
1797
|
-
this.logger.info(
|
|
1798
|
-
`Successfully connected to the GCS bucket ${this.bucketName}.`
|
|
1799
|
-
);
|
|
1800
|
-
return {
|
|
1801
|
-
isAvailable: true
|
|
1802
|
-
};
|
|
1803
|
-
} catch (err) {
|
|
1804
|
-
errors.assertError(err);
|
|
1805
|
-
this.logger.error(
|
|
1806
|
-
`Could not retrieve metadata about the GCS bucket ${this.bucketName}. Make sure the bucket exists. Also make sure that authentication is setup either by explicitly defining techdocs.publisher.googleGcs.credentials in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
1807
|
-
);
|
|
1808
|
-
this.logger.error(`from GCS client library: ${err.message}`);
|
|
1809
|
-
return { isAvailable: false };
|
|
1810
|
-
}
|
|
1811
|
-
}
|
|
1812
|
-
/**
|
|
1813
|
-
* Upload all the files from the generated `directory` to the GCS bucket.
|
|
1814
|
-
* Directory structure used in the bucket is - entityNamespace/entityKind/entityName/index.html
|
|
1815
|
-
*/
|
|
1816
|
-
async publish({
|
|
1817
|
-
entity,
|
|
1818
|
-
directory
|
|
1819
|
-
}) {
|
|
1820
|
-
const objects = [];
|
|
1821
|
-
const useLegacyPathCasing = this.legacyPathCasing;
|
|
1822
|
-
const bucket = this.storageClient.bucket(this.bucketName);
|
|
1823
|
-
const bucketRootPath = this.bucketRootPath;
|
|
1824
|
-
let existingFiles = [];
|
|
1825
|
-
try {
|
|
1826
|
-
const remoteFolder = getCloudPathForLocalPath(
|
|
1827
|
-
entity,
|
|
1828
|
-
void 0,
|
|
1829
|
-
useLegacyPathCasing,
|
|
1830
|
-
bucketRootPath
|
|
1831
|
-
);
|
|
1832
|
-
existingFiles = await this.getFilesForFolder(remoteFolder);
|
|
1833
|
-
} catch (e) {
|
|
1834
|
-
errors.assertError(e);
|
|
1835
|
-
this.logger.error(
|
|
1836
|
-
`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`
|
|
1837
|
-
);
|
|
1838
|
-
}
|
|
1839
|
-
let absoluteFilesToUpload;
|
|
1840
|
-
try {
|
|
1841
|
-
absoluteFilesToUpload = await getFileTreeRecursively(directory);
|
|
1842
|
-
await bulkStorageOperation(
|
|
1843
|
-
async (absoluteFilePath) => {
|
|
1844
|
-
const relativeFilePath = path__default.default.relative(directory, absoluteFilePath);
|
|
1845
|
-
const destination = getCloudPathForLocalPath(
|
|
1846
|
-
entity,
|
|
1847
|
-
relativeFilePath,
|
|
1848
|
-
useLegacyPathCasing,
|
|
1849
|
-
bucketRootPath
|
|
1850
|
-
);
|
|
1851
|
-
objects.push(destination);
|
|
1852
|
-
return await bucket.upload(absoluteFilePath, { destination });
|
|
1853
|
-
},
|
|
1854
|
-
absoluteFilesToUpload,
|
|
1855
|
-
{ concurrencyLimit: 10 }
|
|
1856
|
-
);
|
|
1857
|
-
this.logger.info(
|
|
1858
|
-
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`
|
|
1859
|
-
);
|
|
1860
|
-
} catch (e) {
|
|
1861
|
-
const errorMessage = `Unable to upload file(s) to Google Cloud Storage. ${e}`;
|
|
1862
|
-
this.logger.error(errorMessage);
|
|
1863
|
-
throw new Error(errorMessage);
|
|
1864
|
-
}
|
|
1865
|
-
try {
|
|
1866
|
-
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
1867
|
-
(absoluteFilePath) => getCloudPathForLocalPath(
|
|
1868
|
-
entity,
|
|
1869
|
-
path__default.default.relative(directory, absoluteFilePath),
|
|
1870
|
-
useLegacyPathCasing,
|
|
1871
|
-
bucketRootPath
|
|
1872
|
-
)
|
|
1873
|
-
);
|
|
1874
|
-
const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
1875
|
-
await bulkStorageOperation(
|
|
1876
|
-
async (relativeFilePath) => {
|
|
1877
|
-
return await bucket.file(relativeFilePath).delete();
|
|
1878
|
-
},
|
|
1879
|
-
staleFiles,
|
|
1880
|
-
{ concurrencyLimit: 10 }
|
|
1881
|
-
);
|
|
1882
|
-
this.logger.info(
|
|
1883
|
-
`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`
|
|
1884
|
-
);
|
|
1885
|
-
} catch (error) {
|
|
1886
|
-
const errorMessage = `Unable to delete file(s) from Google Cloud Storage. ${error}`;
|
|
1887
|
-
this.logger.error(errorMessage);
|
|
1888
|
-
}
|
|
1889
|
-
return { objects };
|
|
1890
|
-
}
|
|
1891
|
-
fetchTechDocsMetadata(entityName) {
|
|
1892
|
-
return new Promise((resolve, reject) => {
|
|
1893
|
-
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
1894
|
-
const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1895
|
-
const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
|
|
1896
|
-
if (!isValidContentPath(this.bucketRootPath, entityRootDir)) {
|
|
1897
|
-
this.logger.error(
|
|
1898
|
-
`Invalid content path found while fetching TechDocs metadata: ${entityRootDir}`
|
|
1899
|
-
);
|
|
1900
|
-
reject(new Error(`Metadata Not Found`));
|
|
1901
|
-
}
|
|
1902
|
-
const fileStreamChunks = [];
|
|
1903
|
-
this.storageClient.bucket(this.bucketName).file(`${entityRootDir}/techdocs_metadata.json`).createReadStream().on("error", (err) => {
|
|
1904
|
-
this.logger.error(err.message);
|
|
1905
|
-
reject(err);
|
|
1906
|
-
}).on("data", (chunk) => {
|
|
1907
|
-
fileStreamChunks.push(chunk);
|
|
1908
|
-
}).on("end", () => {
|
|
1909
|
-
const techdocsMetadataJson = Buffer.concat(fileStreamChunks).toString("utf-8");
|
|
1910
|
-
resolve(JSON5__default.default.parse(techdocsMetadataJson));
|
|
1911
|
-
});
|
|
1912
|
-
});
|
|
1913
|
-
}
|
|
1914
|
-
/**
|
|
1915
|
-
* Express route middleware to serve static files on a route in techdocs-backend.
|
|
1916
|
-
*/
|
|
1917
|
-
docsRouter() {
|
|
1918
|
-
return (req, res) => {
|
|
1919
|
-
const decodedUri = decodeURI(req.path.replace(/^\//, ""));
|
|
1920
|
-
const filePathNoRoot = this.legacyPathCasing ? decodedUri : lowerCaseEntityTripletInStoragePath(decodedUri);
|
|
1921
|
-
const filePath = path__default.default.posix.join(this.bucketRootPath, filePathNoRoot);
|
|
1922
|
-
if (!isValidContentPath(this.bucketRootPath, filePath)) {
|
|
1923
|
-
this.logger.error(
|
|
1924
|
-
`Attempted to fetch TechDocs content for a file outside of the bucket root: ${filePathNoRoot}`
|
|
1925
|
-
);
|
|
1926
|
-
res.status(404).send("File Not Found");
|
|
1927
|
-
return;
|
|
1928
|
-
}
|
|
1929
|
-
const fileExtension = path__default.default.extname(filePath);
|
|
1930
|
-
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
1931
|
-
this.storageClient.bucket(this.bucketName).file(filePath).createReadStream().on("pipe", () => {
|
|
1932
|
-
res.writeHead(200, responseHeaders);
|
|
1933
|
-
}).on("error", (err) => {
|
|
1934
|
-
this.logger.warn(
|
|
1935
|
-
`TechDocs Google GCS router failed to serve content from bucket ${this.bucketName} at path ${filePath}: ${err.message}`
|
|
1936
|
-
);
|
|
1937
|
-
if (!res.headersSent) {
|
|
1938
|
-
res.status(404).send("File Not Found");
|
|
1939
|
-
} else {
|
|
1940
|
-
res.destroy();
|
|
1941
|
-
}
|
|
1942
|
-
}).pipe(res);
|
|
1943
|
-
};
|
|
1944
|
-
}
|
|
1945
|
-
/**
|
|
1946
|
-
* A helper function which checks if index.html of an Entity's docs site is available. This
|
|
1947
|
-
* can be used to verify if there are any pre-generated docs available to serve.
|
|
1948
|
-
*/
|
|
1949
|
-
async hasDocsBeenGenerated(entity) {
|
|
1950
|
-
return new Promise((resolve) => {
|
|
1951
|
-
const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
1952
|
-
const entityDir = this.legacyPathCasing ? entityTriplet : lowerCaseEntityTriplet(entityTriplet);
|
|
1953
|
-
const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
|
|
1954
|
-
if (!isValidContentPath(this.bucketRootPath, entityRootDir)) {
|
|
1955
|
-
this.logger.error(
|
|
1956
|
-
`Invalid content path found while checking if docs have been generated: ${entityRootDir}`
|
|
1957
|
-
);
|
|
1958
|
-
resolve(false);
|
|
1959
|
-
}
|
|
1960
|
-
this.storageClient.bucket(this.bucketName).file(`${entityRootDir}/index.html`).exists().then((response) => {
|
|
1961
|
-
resolve(response[0]);
|
|
1962
|
-
}).catch(() => {
|
|
1963
|
-
resolve(false);
|
|
1964
|
-
});
|
|
1965
|
-
});
|
|
1966
|
-
}
|
|
1967
|
-
migrateDocsCase({ removeOriginal = false, concurrency = 25 }) {
|
|
1968
|
-
return new Promise((resolve, reject) => {
|
|
1969
|
-
const allFileMetadata = this.storageClient.bucket(this.bucketName).getFilesStream();
|
|
1970
|
-
const migrateFiles = new MigrateWriteStream(
|
|
1971
|
-
this.logger,
|
|
1972
|
-
removeOriginal,
|
|
1973
|
-
concurrency
|
|
1974
|
-
);
|
|
1975
|
-
migrateFiles.on("finish", resolve).on("error", reject);
|
|
1976
|
-
allFileMetadata.pipe(migrateFiles).on("error", (error) => {
|
|
1977
|
-
migrateFiles.destroy();
|
|
1978
|
-
reject(error);
|
|
1979
|
-
});
|
|
1980
|
-
});
|
|
1981
|
-
}
|
|
1982
|
-
getFilesForFolder(folder) {
|
|
1983
|
-
const fileMetadataStream = this.storageClient.bucket(this.bucketName).getFilesStream({ prefix: folder });
|
|
1984
|
-
return new Promise((resolve, reject) => {
|
|
1985
|
-
const files = [];
|
|
1986
|
-
fileMetadataStream.on("error", (error) => {
|
|
1987
|
-
reject(error);
|
|
1988
|
-
});
|
|
1989
|
-
fileMetadataStream.on("data", (file) => {
|
|
1990
|
-
files.push(file.name);
|
|
1991
|
-
});
|
|
1992
|
-
fileMetadataStream.on("end", () => {
|
|
1993
|
-
resolve(files);
|
|
1994
|
-
});
|
|
1995
|
-
});
|
|
1996
|
-
}
|
|
1997
|
-
}
|
|
1998
|
-
|
|
1999
|
-
class LocalPublish {
|
|
2000
|
-
legacyPathCasing;
|
|
2001
|
-
logger;
|
|
2002
|
-
discovery;
|
|
2003
|
-
staticDocsDir;
|
|
2004
|
-
constructor(options) {
|
|
2005
|
-
this.logger = options.logger;
|
|
2006
|
-
this.discovery = options.discovery;
|
|
2007
|
-
this.legacyPathCasing = options.legacyPathCasing;
|
|
2008
|
-
this.staticDocsDir = options.staticDocsDir;
|
|
2009
|
-
}
|
|
2010
|
-
static fromConfig(config, logger, discovery) {
|
|
2011
|
-
const legacyPathCasing = config.getOptionalBoolean(
|
|
2012
|
-
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
2013
|
-
) || false;
|
|
2014
|
-
let staticDocsDir = config.getOptionalString(
|
|
2015
|
-
"techdocs.publisher.local.publishDirectory"
|
|
2016
|
-
);
|
|
2017
|
-
if (!staticDocsDir) {
|
|
2018
|
-
try {
|
|
2019
|
-
staticDocsDir = backendPluginApi.resolvePackagePath(
|
|
2020
|
-
"@backstage/plugin-techdocs-backend",
|
|
2021
|
-
"static/docs"
|
|
2022
|
-
);
|
|
2023
|
-
} catch (err) {
|
|
2024
|
-
staticDocsDir = os__default.default.tmpdir();
|
|
2025
|
-
}
|
|
2026
|
-
}
|
|
2027
|
-
return new LocalPublish({
|
|
2028
|
-
logger,
|
|
2029
|
-
discovery,
|
|
2030
|
-
legacyPathCasing,
|
|
2031
|
-
staticDocsDir
|
|
2032
|
-
});
|
|
2033
|
-
}
|
|
2034
|
-
async getReadiness() {
|
|
2035
|
-
return {
|
|
2036
|
-
isAvailable: true
|
|
2037
|
-
};
|
|
2038
|
-
}
|
|
2039
|
-
async publish({
|
|
2040
|
-
entity,
|
|
2041
|
-
directory
|
|
2042
|
-
}) {
|
|
2043
|
-
const entityNamespace = entity.metadata.namespace ?? "default";
|
|
2044
|
-
let publishDir;
|
|
2045
|
-
try {
|
|
2046
|
-
publishDir = this.staticEntityPathJoin(
|
|
2047
|
-
entityNamespace,
|
|
2048
|
-
entity.kind,
|
|
2049
|
-
entity.metadata.name
|
|
2050
|
-
);
|
|
2051
|
-
} catch (error) {
|
|
2052
|
-
throw new errors.ForwardedError(
|
|
2053
|
-
`Unable to publish TechDocs site for entity: ${catalogModel.stringifyEntityRef(
|
|
2054
|
-
entity
|
|
2055
|
-
)}`,
|
|
2056
|
-
error
|
|
2057
|
-
);
|
|
2058
|
-
}
|
|
2059
|
-
if (!fs__default.default.existsSync(publishDir)) {
|
|
2060
|
-
this.logger.info(`Could not find ${publishDir}, creating the directory.`);
|
|
2061
|
-
fs__default.default.mkdirSync(publishDir, { recursive: true });
|
|
2062
|
-
}
|
|
2063
|
-
try {
|
|
2064
|
-
await fs__default.default.copy(directory, publishDir);
|
|
2065
|
-
this.logger.info(`Published site stored at ${publishDir}`);
|
|
2066
|
-
} catch (error) {
|
|
2067
|
-
this.logger.debug(
|
|
2068
|
-
`Failed to copy docs from ${directory} to ${publishDir}`
|
|
2069
|
-
);
|
|
2070
|
-
throw error;
|
|
2071
|
-
}
|
|
2072
|
-
const techdocsApiUrl = await this.discovery.getBaseUrl("techdocs");
|
|
2073
|
-
const publishedFilePaths = (await getFileTreeRecursively(publishDir)).map(
|
|
2074
|
-
(abs) => {
|
|
2075
|
-
return abs.split(`${this.staticDocsDir}/`)[1];
|
|
2076
|
-
}
|
|
2077
|
-
);
|
|
2078
|
-
return {
|
|
2079
|
-
remoteUrl: `${techdocsApiUrl}/static/docs/${encodeURIComponent(
|
|
2080
|
-
entity.metadata.name
|
|
2081
|
-
)}`,
|
|
2082
|
-
objects: publishedFilePaths
|
|
2083
|
-
};
|
|
2084
|
-
}
|
|
2085
|
-
async fetchTechDocsMetadata(entityName) {
|
|
2086
|
-
let metadataPath;
|
|
2087
|
-
try {
|
|
2088
|
-
metadataPath = this.staticEntityPathJoin(
|
|
2089
|
-
entityName.namespace,
|
|
2090
|
-
entityName.kind,
|
|
2091
|
-
entityName.name,
|
|
2092
|
-
"techdocs_metadata.json"
|
|
2093
|
-
);
|
|
2094
|
-
} catch (err) {
|
|
2095
|
-
throw new errors.ForwardedError(
|
|
2096
|
-
`Unexpected entity when fetching metadata: ${catalogModel.stringifyEntityRef(
|
|
2097
|
-
entityName
|
|
2098
|
-
)}`,
|
|
2099
|
-
err
|
|
2100
|
-
);
|
|
2101
|
-
}
|
|
2102
|
-
try {
|
|
2103
|
-
return await fs__default.default.readJson(metadataPath);
|
|
2104
|
-
} catch (err) {
|
|
2105
|
-
throw new errors.ForwardedError(
|
|
2106
|
-
`Unable to read techdocs_metadata.json at ${metadataPath}. Error: ${err}`,
|
|
2107
|
-
err
|
|
2108
|
-
);
|
|
2109
|
-
}
|
|
2110
|
-
}
|
|
2111
|
-
docsRouter() {
|
|
2112
|
-
const router = express__default.default.Router();
|
|
2113
|
-
router.use((req, res, next) => {
|
|
2114
|
-
if (this.legacyPathCasing) {
|
|
2115
|
-
return next();
|
|
2116
|
-
}
|
|
2117
|
-
const [_, namespace, kind, name, ...rest] = req.path.split("/");
|
|
2118
|
-
if (!namespace || !kind || !name) {
|
|
2119
|
-
return next();
|
|
2120
|
-
}
|
|
2121
|
-
const newPath = [
|
|
2122
|
-
_,
|
|
2123
|
-
namespace.toLowerCase(),
|
|
2124
|
-
kind.toLowerCase(),
|
|
2125
|
-
name.toLowerCase(),
|
|
2126
|
-
...rest
|
|
2127
|
-
].join("/");
|
|
2128
|
-
if (newPath === req.path) {
|
|
2129
|
-
return next();
|
|
2130
|
-
}
|
|
2131
|
-
return res.redirect(301, req.baseUrl + newPath);
|
|
2132
|
-
});
|
|
2133
|
-
router.use(
|
|
2134
|
-
express__default.default.static(this.staticDocsDir, {
|
|
2135
|
-
// Handle content-type header the same as all other publishers.
|
|
2136
|
-
setHeaders: (res, filePath) => {
|
|
2137
|
-
const fileExtension = path__default.default.extname(filePath);
|
|
2138
|
-
const headers = getHeadersForFileExtension(fileExtension);
|
|
2139
|
-
for (const [header, value] of Object.entries(headers)) {
|
|
2140
|
-
res.setHeader(header, value);
|
|
2141
|
-
}
|
|
2142
|
-
}
|
|
2143
|
-
})
|
|
2144
|
-
);
|
|
2145
|
-
return router;
|
|
2146
|
-
}
|
|
2147
|
-
async hasDocsBeenGenerated(entity) {
|
|
2148
|
-
const namespace = entity.metadata.namespace ?? "default";
|
|
2149
|
-
try {
|
|
2150
|
-
const indexHtmlPath = this.staticEntityPathJoin(
|
|
2151
|
-
namespace,
|
|
2152
|
-
entity.kind,
|
|
2153
|
-
entity.metadata.name,
|
|
2154
|
-
"index.html"
|
|
2155
|
-
);
|
|
2156
|
-
await fs__default.default.access(indexHtmlPath, fs__default.default.constants.F_OK);
|
|
2157
|
-
return true;
|
|
2158
|
-
} catch (err) {
|
|
2159
|
-
if (err.name === "NotAllowedError") {
|
|
2160
|
-
this.logger.error(
|
|
2161
|
-
`Unexpected entity when checking if generated: ${catalogModel.stringifyEntityRef(
|
|
2162
|
-
entity
|
|
2163
|
-
)}`
|
|
2164
|
-
);
|
|
2165
|
-
}
|
|
2166
|
-
return false;
|
|
2167
|
-
}
|
|
2168
|
-
}
|
|
2169
|
-
/**
|
|
2170
|
-
* This code will never run in practice. It is merely here to illustrate how
|
|
2171
|
-
* to implement this method for other storage providers.
|
|
2172
|
-
*/
|
|
2173
|
-
async migrateDocsCase({
|
|
2174
|
-
removeOriginal = false,
|
|
2175
|
-
concurrency = 25
|
|
2176
|
-
}) {
|
|
2177
|
-
const files = await getFileTreeRecursively(this.staticDocsDir);
|
|
2178
|
-
const limit = createLimiter__default.default(concurrency);
|
|
2179
|
-
await Promise.all(
|
|
2180
|
-
files.map(
|
|
2181
|
-
(f) => limit(async (file) => {
|
|
2182
|
-
const relativeFile = file.replace(
|
|
2183
|
-
`${this.staticDocsDir}${path__default.default.sep}`,
|
|
2184
|
-
""
|
|
2185
|
-
);
|
|
2186
|
-
const newFile = lowerCaseEntityTripletInStoragePath(relativeFile);
|
|
2187
|
-
if (relativeFile === newFile) {
|
|
2188
|
-
return;
|
|
2189
|
-
}
|
|
2190
|
-
await new Promise((resolve) => {
|
|
2191
|
-
const migrate = removeOriginal ? fs__default.default.move : fs__default.default.copyFile;
|
|
2192
|
-
this.logger.debug(`Migrating ${relativeFile}`);
|
|
2193
|
-
migrate(file, newFile, (err) => {
|
|
2194
|
-
if (err) {
|
|
2195
|
-
this.logger.warn(
|
|
2196
|
-
`Unable to migrate ${relativeFile}: ${err.message}`
|
|
2197
|
-
);
|
|
2198
|
-
}
|
|
2199
|
-
resolve();
|
|
2200
|
-
});
|
|
2201
|
-
});
|
|
2202
|
-
}, f)
|
|
2203
|
-
)
|
|
2204
|
-
);
|
|
2205
|
-
}
|
|
2206
|
-
/**
|
|
2207
|
-
* Utility wrapper around path.join(), used to control legacy case logic.
|
|
2208
|
-
*/
|
|
2209
|
-
staticEntityPathJoin(...allParts) {
|
|
2210
|
-
let staticEntityPath = this.staticDocsDir;
|
|
2211
|
-
allParts.map((part) => part.split(path__default.default.sep)).flat().forEach((part, index) => {
|
|
2212
|
-
if (index < 3) {
|
|
2213
|
-
staticEntityPath = backendPluginApi.resolveSafeChildPath(
|
|
2214
|
-
staticEntityPath,
|
|
2215
|
-
this.legacyPathCasing ? part : part.toLowerCase()
|
|
2216
|
-
);
|
|
2217
|
-
return;
|
|
2218
|
-
}
|
|
2219
|
-
staticEntityPath = backendPluginApi.resolveSafeChildPath(staticEntityPath, part);
|
|
2220
|
-
});
|
|
2221
|
-
return staticEntityPath;
|
|
2222
|
-
}
|
|
2223
|
-
}
|
|
2224
|
-
|
|
2225
|
-
const streamToBuffer = (stream) => {
|
|
2226
|
-
return new Promise((resolve, reject) => {
|
|
2227
|
-
try {
|
|
2228
|
-
const chunks = [];
|
|
2229
|
-
stream.on("data", (chunk) => chunks.push(chunk));
|
|
2230
|
-
stream.on("error", reject);
|
|
2231
|
-
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
|
2232
|
-
} catch (e) {
|
|
2233
|
-
throw new errors.ForwardedError("Unable to parse the response data", e);
|
|
2234
|
-
}
|
|
2235
|
-
});
|
|
2236
|
-
};
|
|
2237
|
-
const bufferToStream = (buffer) => {
|
|
2238
|
-
const stream$1 = new stream.Readable();
|
|
2239
|
-
stream$1.push(buffer);
|
|
2240
|
-
stream$1.push(null);
|
|
2241
|
-
return stream$1;
|
|
2242
|
-
};
|
|
2243
|
-
class OpenStackSwiftPublish {
|
|
2244
|
-
storageClient;
|
|
2245
|
-
containerName;
|
|
2246
|
-
logger;
|
|
2247
|
-
constructor(options) {
|
|
2248
|
-
this.storageClient = options.storageClient;
|
|
2249
|
-
this.containerName = options.containerName;
|
|
2250
|
-
this.logger = options.logger;
|
|
2251
|
-
}
|
|
2252
|
-
static fromConfig(config, logger) {
|
|
2253
|
-
let containerName = "";
|
|
2254
|
-
try {
|
|
2255
|
-
containerName = config.getString(
|
|
2256
|
-
"techdocs.publisher.openStackSwift.containerName"
|
|
2257
|
-
);
|
|
2258
|
-
} catch (error) {
|
|
2259
|
-
throw new Error(
|
|
2260
|
-
"Since techdocs.publisher.type is set to 'openStackSwift' in your app config, techdocs.publisher.openStackSwift.containerName is required."
|
|
2261
|
-
);
|
|
2262
|
-
}
|
|
2263
|
-
const openStackSwiftConfig = config.getConfig(
|
|
2264
|
-
"techdocs.publisher.openStackSwift"
|
|
2265
|
-
);
|
|
2266
|
-
const storageClient = new openstackSwiftSdk.SwiftClient({
|
|
2267
|
-
authEndpoint: openStackSwiftConfig.getString("authUrl"),
|
|
2268
|
-
swiftEndpoint: openStackSwiftConfig.getString("swiftUrl"),
|
|
2269
|
-
credentialId: openStackSwiftConfig.getString("credentials.id"),
|
|
2270
|
-
secret: openStackSwiftConfig.getString("credentials.secret")
|
|
2271
|
-
});
|
|
2272
|
-
return new OpenStackSwiftPublish({ storageClient, containerName, logger });
|
|
2273
|
-
}
|
|
2274
|
-
/*
|
|
2275
|
-
* Check if the defined container exists. Being able to connect means the configuration is good
|
|
2276
|
-
* and the storage client will work.
|
|
2277
|
-
*/
|
|
2278
|
-
async getReadiness() {
|
|
2279
|
-
try {
|
|
2280
|
-
const container = await this.storageClient.getContainerMetadata(
|
|
2281
|
-
this.containerName
|
|
2282
|
-
);
|
|
2283
|
-
if (!(container instanceof types.NotFound)) {
|
|
2284
|
-
this.logger.info(
|
|
2285
|
-
`Successfully connected to the OpenStack Swift container ${this.containerName}.`
|
|
2286
|
-
);
|
|
2287
|
-
return {
|
|
2288
|
-
isAvailable: true
|
|
2289
|
-
};
|
|
2290
|
-
}
|
|
2291
|
-
this.logger.error(
|
|
2292
|
-
`Could not retrieve metadata about the OpenStack Swift container ${this.containerName}. Make sure the container exists. Also make sure that authentication is setup either by explicitly defining credentials and region in techdocs.publisher.openStackSwift in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
2293
|
-
);
|
|
2294
|
-
return {
|
|
2295
|
-
isAvailable: false
|
|
2296
|
-
};
|
|
2297
|
-
} catch (err) {
|
|
2298
|
-
errors.assertError(err);
|
|
2299
|
-
this.logger.error(`from OpenStack client library: ${err.message}`);
|
|
2300
|
-
return {
|
|
2301
|
-
isAvailable: false
|
|
2302
|
-
};
|
|
2303
|
-
}
|
|
2304
|
-
}
|
|
2305
|
-
/**
|
|
2306
|
-
* Upload all the files from the generated `directory` to the OpenStack Swift container.
|
|
2307
|
-
* Directory structure used in the bucket is - entityNamespace/entityKind/entityName/index.html
|
|
2308
|
-
*/
|
|
2309
|
-
async publish({
|
|
2310
|
-
entity,
|
|
2311
|
-
directory
|
|
2312
|
-
}) {
|
|
2313
|
-
try {
|
|
2314
|
-
const objects = [];
|
|
2315
|
-
const allFilesToUpload = await getFileTreeRecursively(directory);
|
|
2316
|
-
const limiter = createLimiter__default.default(10);
|
|
2317
|
-
const uploadPromises = [];
|
|
2318
|
-
for (const filePath of allFilesToUpload) {
|
|
2319
|
-
const relativeFilePath = path__default.default.relative(directory, filePath);
|
|
2320
|
-
const relativeFilePathPosix = relativeFilePath.split(path__default.default.sep).join(path__default.default.posix.sep);
|
|
2321
|
-
const entityRootDir = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
2322
|
-
const destination = `${entityRootDir}/${relativeFilePathPosix}`;
|
|
2323
|
-
objects.push(destination);
|
|
2324
|
-
const uploadFile = limiter(async () => {
|
|
2325
|
-
const fileBuffer = await fs__default.default.readFile(filePath);
|
|
2326
|
-
const stream = bufferToStream(fileBuffer);
|
|
2327
|
-
return this.storageClient.upload(
|
|
2328
|
-
this.containerName,
|
|
2329
|
-
destination,
|
|
2330
|
-
stream
|
|
2331
|
-
);
|
|
2332
|
-
});
|
|
2333
|
-
uploadPromises.push(uploadFile);
|
|
2334
|
-
}
|
|
2335
|
-
await Promise.all(uploadPromises);
|
|
2336
|
-
this.logger.info(
|
|
2337
|
-
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${allFilesToUpload.length}`
|
|
2338
|
-
);
|
|
2339
|
-
return { objects };
|
|
2340
|
-
} catch (e) {
|
|
2341
|
-
const errorMessage = `Unable to upload file(s) to OpenStack Swift. ${e}`;
|
|
2342
|
-
this.logger.error(errorMessage);
|
|
2343
|
-
throw new Error(errorMessage);
|
|
2344
|
-
}
|
|
2345
|
-
}
|
|
2346
|
-
async fetchTechDocsMetadata(entityName) {
|
|
2347
|
-
return await new Promise(async (resolve, reject) => {
|
|
2348
|
-
const entityRootDir = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
2349
|
-
const downloadResponse = await this.storageClient.download(
|
|
2350
|
-
this.containerName,
|
|
2351
|
-
`${entityRootDir}/techdocs_metadata.json`
|
|
2352
|
-
);
|
|
2353
|
-
if (!(downloadResponse instanceof types.NotFound)) {
|
|
2354
|
-
const stream = downloadResponse.data;
|
|
2355
|
-
try {
|
|
2356
|
-
const techdocsMetadataJson = await streamToBuffer(stream);
|
|
2357
|
-
if (!techdocsMetadataJson) {
|
|
2358
|
-
throw new Error(
|
|
2359
|
-
`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
|
|
2360
|
-
);
|
|
2361
|
-
}
|
|
2362
|
-
const techdocsMetadata = JSON5__default.default.parse(
|
|
2363
|
-
techdocsMetadataJson.toString("utf-8")
|
|
2364
|
-
);
|
|
2365
|
-
resolve(techdocsMetadata);
|
|
2366
|
-
} catch (err) {
|
|
2367
|
-
errors.assertError(err);
|
|
2368
|
-
this.logger.error(err.message);
|
|
2369
|
-
reject(new Error(err.message));
|
|
2370
|
-
}
|
|
2371
|
-
} else {
|
|
2372
|
-
reject({
|
|
2373
|
-
message: `TechDocs metadata fetch failed, The file /rootDir/${entityRootDir}/techdocs_metadata.json does not exist !`
|
|
2374
|
-
});
|
|
2375
|
-
}
|
|
2376
|
-
});
|
|
2377
|
-
}
|
|
2378
|
-
/**
|
|
2379
|
-
* Express route middleware to serve static files on a route in techdocs-backend.
|
|
2380
|
-
*/
|
|
2381
|
-
docsRouter() {
|
|
2382
|
-
return async (req, res) => {
|
|
2383
|
-
const filePath = decodeURI(req.path.replace(/^\//, ""));
|
|
2384
|
-
const fileExtension = path__default.default.extname(filePath);
|
|
2385
|
-
const responseHeaders = getHeadersForFileExtension(fileExtension);
|
|
2386
|
-
const downloadResponse = await this.storageClient.download(
|
|
2387
|
-
this.containerName,
|
|
2388
|
-
filePath
|
|
2389
|
-
);
|
|
2390
|
-
if (!(downloadResponse instanceof types.NotFound)) {
|
|
2391
|
-
const stream = downloadResponse.data;
|
|
2392
|
-
try {
|
|
2393
|
-
for (const [headerKey, headerValue] of Object.entries(
|
|
2394
|
-
responseHeaders
|
|
2395
|
-
)) {
|
|
2396
|
-
res.setHeader(headerKey, headerValue);
|
|
2397
|
-
}
|
|
2398
|
-
res.send(await streamToBuffer(stream));
|
|
2399
|
-
} catch (err) {
|
|
2400
|
-
errors.assertError(err);
|
|
2401
|
-
this.logger.warn(
|
|
2402
|
-
`TechDocs OpenStack swift router failed to serve content from container ${this.containerName} at path ${filePath}: ${err.message}`
|
|
2403
|
-
);
|
|
2404
|
-
res.status(404).send("File Not Found");
|
|
2405
|
-
}
|
|
2406
|
-
} else {
|
|
2407
|
-
this.logger.warn(
|
|
2408
|
-
`TechDocs OpenStack swift router failed to serve content from container ${this.containerName} at path ${filePath}: Not found`
|
|
2409
|
-
);
|
|
2410
|
-
res.status(404).send("File Not Found");
|
|
2411
|
-
}
|
|
2412
|
-
};
|
|
2413
|
-
}
|
|
2414
|
-
/**
|
|
2415
|
-
* A helper function which checks if index.html of an Entity's docs site is available. This
|
|
2416
|
-
* can be used to verify if there are any pre-generated docs available to serve.
|
|
2417
|
-
*/
|
|
2418
|
-
async hasDocsBeenGenerated(entity) {
|
|
2419
|
-
const entityRootDir = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
2420
|
-
try {
|
|
2421
|
-
const fileResponse = await this.storageClient.getMetadata(
|
|
2422
|
-
this.containerName,
|
|
2423
|
-
`${entityRootDir}/index.html`
|
|
2424
|
-
);
|
|
2425
|
-
if (!(fileResponse instanceof types.NotFound)) {
|
|
2426
|
-
return true;
|
|
2427
|
-
}
|
|
2428
|
-
return false;
|
|
2429
|
-
} catch (err) {
|
|
2430
|
-
errors.assertError(err);
|
|
2431
|
-
this.logger.warn(err.message);
|
|
2432
|
-
return false;
|
|
2433
|
-
}
|
|
2434
|
-
}
|
|
2435
|
-
async migrateDocsCase({
|
|
2436
|
-
removeOriginal = false,
|
|
2437
|
-
concurrency = 25
|
|
2438
|
-
}) {
|
|
2439
|
-
const allObjects = await this.getAllObjectsFromContainer();
|
|
2440
|
-
const limiter = createLimiter__default.default(concurrency);
|
|
2441
|
-
await Promise.all(
|
|
2442
|
-
allObjects.map(
|
|
2443
|
-
(f) => limiter(async (file) => {
|
|
2444
|
-
let newPath;
|
|
2445
|
-
try {
|
|
2446
|
-
newPath = lowerCaseEntityTripletInStoragePath(file);
|
|
2447
|
-
} catch (e) {
|
|
2448
|
-
errors.assertError(e);
|
|
2449
|
-
this.logger.warn(e.message);
|
|
2450
|
-
return;
|
|
2451
|
-
}
|
|
2452
|
-
if (file === newPath) {
|
|
2453
|
-
return;
|
|
2454
|
-
}
|
|
2455
|
-
try {
|
|
2456
|
-
this.logger.debug(`Migrating ${file} to ${newPath}`);
|
|
2457
|
-
await this.storageClient.copy(
|
|
2458
|
-
this.containerName,
|
|
2459
|
-
file,
|
|
2460
|
-
this.containerName,
|
|
2461
|
-
newPath
|
|
2462
|
-
);
|
|
2463
|
-
if (removeOriginal) {
|
|
2464
|
-
await this.storageClient.delete(this.containerName, file);
|
|
2465
|
-
}
|
|
2466
|
-
} catch (e) {
|
|
2467
|
-
errors.assertError(e);
|
|
2468
|
-
this.logger.warn(`Unable to migrate ${file}: ${e.message}`);
|
|
2469
|
-
}
|
|
2470
|
-
}, f)
|
|
2471
|
-
)
|
|
2472
|
-
);
|
|
2473
|
-
}
|
|
2474
|
-
/**
|
|
2475
|
-
* Returns a list of all object keys from the configured container.
|
|
2476
|
-
*/
|
|
2477
|
-
async getAllObjectsFromContainer({ prefix } = { prefix: "" }) {
|
|
2478
|
-
let objects = [];
|
|
2479
|
-
const OSS_MAX_LIMIT = Math.pow(2, 31) - 1;
|
|
2480
|
-
const allObjects = await this.storageClient.list(
|
|
2481
|
-
this.containerName,
|
|
2482
|
-
prefix,
|
|
2483
|
-
OSS_MAX_LIMIT
|
|
2484
|
-
);
|
|
2485
|
-
objects = allObjects.map((object) => object.name);
|
|
2486
|
-
return objects;
|
|
2487
|
-
}
|
|
2488
|
-
}
|
|
2489
|
-
|
|
2490
|
-
class Publisher {
|
|
2491
|
-
publishers = /* @__PURE__ */ new Map();
|
|
2492
|
-
register(type, publisher) {
|
|
2493
|
-
this.publishers.set(type, publisher);
|
|
2494
|
-
}
|
|
2495
|
-
get(config) {
|
|
2496
|
-
const publisherType = config.getOptionalString(
|
|
2497
|
-
"techdocs.publisher.type"
|
|
2498
|
-
) ?? "local";
|
|
2499
|
-
if (!publisherType) {
|
|
2500
|
-
throw new Error("TechDocs publisher type not specified for the entity");
|
|
2501
|
-
}
|
|
2502
|
-
const publisher = this.publishers.get(publisherType);
|
|
2503
|
-
if (!publisher) {
|
|
2504
|
-
throw new Error(
|
|
2505
|
-
`TechDocs publisher '${publisherType}' is not registered`
|
|
2506
|
-
);
|
|
2507
|
-
}
|
|
2508
|
-
return publisher;
|
|
2509
|
-
}
|
|
2510
|
-
/**
|
|
2511
|
-
* Returns a instance of TechDocs publisher
|
|
2512
|
-
* @param config - A Backstage configuration
|
|
2513
|
-
* @param options - Options for configuring the publisher factory
|
|
2514
|
-
*/
|
|
2515
|
-
static async fromConfig(config, options) {
|
|
2516
|
-
const { logger, discovery, customPublisher } = options;
|
|
2517
|
-
const publishers = new Publisher();
|
|
2518
|
-
if (customPublisher) {
|
|
2519
|
-
publishers.register("techdocs", customPublisher);
|
|
2520
|
-
return customPublisher;
|
|
2521
|
-
}
|
|
2522
|
-
const publisherType = config.getOptionalString(
|
|
2523
|
-
"techdocs.publisher.type"
|
|
2524
|
-
) ?? "local";
|
|
2525
|
-
switch (publisherType) {
|
|
2526
|
-
case "googleGcs":
|
|
2527
|
-
logger.info("Creating Google Storage Bucket publisher for TechDocs");
|
|
2528
|
-
publishers.register(
|
|
2529
|
-
publisherType,
|
|
2530
|
-
GoogleGCSPublish.fromConfig(config, logger)
|
|
2531
|
-
);
|
|
2532
|
-
break;
|
|
2533
|
-
case "awsS3":
|
|
2534
|
-
logger.info("Creating AWS S3 Bucket publisher for TechDocs");
|
|
2535
|
-
publishers.register(
|
|
2536
|
-
publisherType,
|
|
2537
|
-
await AwsS3Publish.fromConfig(config, logger)
|
|
2538
|
-
);
|
|
2539
|
-
break;
|
|
2540
|
-
case "azureBlobStorage":
|
|
2541
|
-
logger.info(
|
|
2542
|
-
"Creating Azure Blob Storage Container publisher for TechDocs"
|
|
2543
|
-
);
|
|
2544
|
-
publishers.register(
|
|
2545
|
-
publisherType,
|
|
2546
|
-
AzureBlobStoragePublish.fromConfig(config, logger)
|
|
2547
|
-
);
|
|
2548
|
-
break;
|
|
2549
|
-
case "openStackSwift":
|
|
2550
|
-
logger.info(
|
|
2551
|
-
"Creating OpenStack Swift Container publisher for TechDocs"
|
|
2552
|
-
);
|
|
2553
|
-
publishers.register(
|
|
2554
|
-
publisherType,
|
|
2555
|
-
OpenStackSwiftPublish.fromConfig(config, logger)
|
|
2556
|
-
);
|
|
2557
|
-
break;
|
|
2558
|
-
case "local":
|
|
2559
|
-
logger.info("Creating Local publisher for TechDocs");
|
|
2560
|
-
publishers.register(
|
|
2561
|
-
publisherType,
|
|
2562
|
-
LocalPublish.fromConfig(config, logger, discovery)
|
|
2563
|
-
);
|
|
2564
|
-
break;
|
|
2565
|
-
default:
|
|
2566
|
-
logger.info("Creating Local publisher for TechDocs");
|
|
2567
|
-
publishers.register(
|
|
2568
|
-
publisherType,
|
|
2569
|
-
LocalPublish.fromConfig(config, logger, discovery)
|
|
2570
|
-
);
|
|
2571
|
-
}
|
|
2572
|
-
return publishers.get(config);
|
|
2573
|
-
}
|
|
2574
|
-
}
|
|
2575
|
-
|
|
2576
|
-
const techdocsBuildsExtensionPoint = backendPluginApi.createExtensionPoint({
|
|
2577
|
-
id: "techdocs.builds"
|
|
2578
|
-
});
|
|
2579
|
-
const techdocsGeneratorExtensionPoint = backendPluginApi.createExtensionPoint({
|
|
2580
|
-
id: "techdocs.generator"
|
|
2581
|
-
});
|
|
2582
|
-
const techdocsPreparerExtensionPoint = backendPluginApi.createExtensionPoint({
|
|
2583
|
-
id: "techdocs.preparer"
|
|
2584
|
-
});
|
|
2585
|
-
const techdocsPublisherExtensionPoint = backendPluginApi.createExtensionPoint({
|
|
2586
|
-
id: "techdocs.publisher"
|
|
2587
|
-
});
|
|
2588
|
-
|
|
2589
|
-
exports.DirectoryPreparer = DirectoryPreparer;
|
|
2590
|
-
exports.Generators = Generators;
|
|
2591
|
-
exports.Preparers = Preparers;
|
|
2592
|
-
exports.Publisher = Publisher;
|
|
2593
|
-
exports.TechdocsGenerator = TechdocsGenerator;
|
|
2594
|
-
exports.UrlPreparer = UrlPreparer;
|
|
2595
|
-
exports.getDocFilesFromRepository = getDocFilesFromRepository;
|
|
2596
|
-
exports.getLocationForEntity = getLocationForEntity;
|
|
2597
|
-
exports.getMkDocsYml = getMkDocsYml;
|
|
2598
|
-
exports.getMkdocsYml = getMkdocsYml;
|
|
2599
|
-
exports.parseReferenceAnnotation = parseReferenceAnnotation;
|
|
2600
|
-
exports.techdocsBuildsExtensionPoint = techdocsBuildsExtensionPoint;
|
|
2601
|
-
exports.techdocsGeneratorExtensionPoint = techdocsGeneratorExtensionPoint;
|
|
2602
|
-
exports.techdocsPreparerExtensionPoint = techdocsPreparerExtensionPoint;
|
|
2603
|
-
exports.techdocsPublisherExtensionPoint = techdocsPublisherExtensionPoint;
|
|
2604
|
-
exports.transformDirLocation = transformDirLocation;
|
|
3
|
+
var index = require('./stages/generate/index.cjs.js');
|
|
4
|
+
var dir = require('./stages/prepare/dir.cjs.js');
|
|
5
|
+
var url = require('./stages/prepare/url.cjs.js');
|
|
6
|
+
var preparers = require('./stages/prepare/preparers.cjs.js');
|
|
7
|
+
var publish = require('./stages/publish/publish.cjs.js');
|
|
8
|
+
var helpers$1 = require('./helpers.cjs.js');
|
|
9
|
+
var extensions = require('./extensions.cjs.js');
|
|
10
|
+
var techdocs = require('./stages/generate/techdocs.cjs.js');
|
|
11
|
+
var generators = require('./stages/generate/generators.cjs.js');
|
|
12
|
+
var helpers = require('./stages/generate/helpers.cjs.js');
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
exports.getMkDocsYml = index.getMkDocsYml;
|
|
17
|
+
exports.DirectoryPreparer = dir.DirectoryPreparer;
|
|
18
|
+
exports.UrlPreparer = url.UrlPreparer;
|
|
19
|
+
exports.Preparers = preparers.Preparers;
|
|
20
|
+
exports.Publisher = publish.Publisher;
|
|
21
|
+
exports.getDocFilesFromRepository = helpers$1.getDocFilesFromRepository;
|
|
22
|
+
exports.getLocationForEntity = helpers$1.getLocationForEntity;
|
|
23
|
+
exports.parseReferenceAnnotation = helpers$1.parseReferenceAnnotation;
|
|
24
|
+
exports.transformDirLocation = helpers$1.transformDirLocation;
|
|
25
|
+
exports.techdocsBuildsExtensionPoint = extensions.techdocsBuildsExtensionPoint;
|
|
26
|
+
exports.techdocsGeneratorExtensionPoint = extensions.techdocsGeneratorExtensionPoint;
|
|
27
|
+
exports.techdocsPreparerExtensionPoint = extensions.techdocsPreparerExtensionPoint;
|
|
28
|
+
exports.techdocsPublisherExtensionPoint = extensions.techdocsPublisherExtensionPoint;
|
|
29
|
+
exports.TechdocsGenerator = techdocs.TechdocsGenerator;
|
|
30
|
+
exports.Generators = generators.Generators;
|
|
31
|
+
exports.getMkdocsYml = helpers.getMkdocsYml;
|
|
2605
32
|
//# sourceMappingURL=index.cjs.js.map
|