@backstage/plugin-techdocs-node 1.12.12-next.1 → 1.12.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +34 -0
- package/dist/extensions.cjs.js +22 -0
- package/dist/extensions.cjs.js.map +1 -0
- package/dist/helpers.cjs.js +80 -0
- package/dist/helpers.cjs.js.map +1 -0
- package/dist/index.cjs.js +29 -2602
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +11 -1
- package/dist/stages/generate/DockerContainerRunner.cjs.js +99 -0
- package/dist/stages/generate/DockerContainerRunner.cjs.js.map +1 -0
- package/dist/stages/generate/generators.cjs.js +42 -0
- package/dist/stages/generate/generators.cjs.js.map +1 -0
- package/dist/stages/generate/helpers.cjs.js +265 -0
- package/dist/stages/generate/helpers.cjs.js.map +1 -0
- package/dist/stages/generate/index.cjs.js +15 -0
- package/dist/stages/generate/index.cjs.js.map +1 -0
- package/dist/stages/generate/mkdocsPatchers.cjs.js +96 -0
- package/dist/stages/generate/mkdocsPatchers.cjs.js.map +1 -0
- package/dist/stages/generate/techdocs.cjs.js +169 -0
- package/dist/stages/generate/techdocs.cjs.js.map +1 -0
- package/dist/stages/prepare/dir.cjs.js +63 -0
- package/dist/stages/prepare/dir.cjs.js.map +1 -0
- package/dist/stages/prepare/preparers.cjs.js +54 -0
- package/dist/stages/prepare/preparers.cjs.js.map +1 -0
- package/dist/stages/prepare/url.cjs.js +46 -0
- package/dist/stages/prepare/url.cjs.js.map +1 -0
- package/dist/stages/publish/awsS3.cjs.js +436 -0
- package/dist/stages/publish/awsS3.cjs.js.map +1 -0
- package/dist/stages/publish/azureBlobStorage.cjs.js +337 -0
- package/dist/stages/publish/azureBlobStorage.cjs.js.map +1 -0
- package/dist/stages/publish/googleStorage.cjs.js +288 -0
- package/dist/stages/publish/googleStorage.cjs.js.map +1 -0
- package/dist/stages/publish/helpers.cjs.js +138 -0
- package/dist/stages/publish/helpers.cjs.js.map +1 -0
- package/dist/stages/publish/local.cjs.js +248 -0
- package/dist/stages/publish/local.cjs.js.map +1 -0
- package/dist/stages/publish/migrations/GoogleMigration.cjs.js +52 -0
- package/dist/stages/publish/migrations/GoogleMigration.cjs.js.map +1 -0
- package/dist/stages/publish/openStackSwift.cjs.js +286 -0
- package/dist/stages/publish/openStackSwift.cjs.js.map +1 -0
- package/dist/stages/publish/publish.cjs.js +100 -0
- package/dist/stages/publish/publish.cjs.js.map +1 -0
- package/package.json +13 -13
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var Docker = require('dockerode');
|
|
4
|
+
var fs = require('fs-extra');
|
|
5
|
+
var errors = require('@backstage/errors');
|
|
6
|
+
var stream = require('stream');
|
|
7
|
+
var util = require('util');
|
|
8
|
+
|
|
9
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
10
|
+
|
|
11
|
+
var Docker__default = /*#__PURE__*/_interopDefaultCompat(Docker);
|
|
12
|
+
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
13
|
+
|
|
14
|
+
const pipeline = util.promisify(stream.pipeline);
|
|
15
|
+
class DockerContainerRunner {
|
|
16
|
+
dockerClient;
|
|
17
|
+
constructor() {
|
|
18
|
+
this.dockerClient = new Docker__default.default();
|
|
19
|
+
}
|
|
20
|
+
async runContainer(options) {
|
|
21
|
+
const {
|
|
22
|
+
imageName,
|
|
23
|
+
command,
|
|
24
|
+
args,
|
|
25
|
+
logStream = new stream.PassThrough(),
|
|
26
|
+
mountDirs = {},
|
|
27
|
+
workingDir,
|
|
28
|
+
envVars = {},
|
|
29
|
+
pullImage = true,
|
|
30
|
+
defaultUser = false
|
|
31
|
+
} = options;
|
|
32
|
+
try {
|
|
33
|
+
await this.dockerClient.ping();
|
|
34
|
+
} catch (e) {
|
|
35
|
+
throw new errors.ForwardedError(
|
|
36
|
+
"This operation requires Docker. Docker does not appear to be available. Docker.ping() failed with",
|
|
37
|
+
e
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
if (pullImage) {
|
|
41
|
+
await new Promise((resolve, reject) => {
|
|
42
|
+
this.dockerClient.pull(imageName, {}, (err, stream) => {
|
|
43
|
+
if (err) {
|
|
44
|
+
reject(err);
|
|
45
|
+
} else if (!stream) {
|
|
46
|
+
reject(
|
|
47
|
+
new Error(
|
|
48
|
+
"Unexpeected error: no stream returned from Docker while pulling image"
|
|
49
|
+
)
|
|
50
|
+
);
|
|
51
|
+
} else {
|
|
52
|
+
pipeline(stream, logStream, { end: false }).then(resolve).catch(reject);
|
|
53
|
+
}
|
|
54
|
+
});
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
const userOptions = {};
|
|
58
|
+
if (!defaultUser && process.getuid && process.getgid) {
|
|
59
|
+
userOptions.User = `${process.getuid()}:${process.getgid()}`;
|
|
60
|
+
}
|
|
61
|
+
const Volumes = {};
|
|
62
|
+
for (const containerDir of Object.values(mountDirs)) {
|
|
63
|
+
Volumes[containerDir] = {};
|
|
64
|
+
}
|
|
65
|
+
const Binds = [];
|
|
66
|
+
for (const [hostDir, containerDir] of Object.entries(mountDirs)) {
|
|
67
|
+
const realHostDir = await fs__default.default.realpath(hostDir);
|
|
68
|
+
Binds.push(`${realHostDir}:${containerDir}`);
|
|
69
|
+
}
|
|
70
|
+
const Env = new Array();
|
|
71
|
+
for (const [key, value] of Object.entries(envVars)) {
|
|
72
|
+
Env.push(`${key}=${value}`);
|
|
73
|
+
}
|
|
74
|
+
const [{ Error: error, StatusCode: statusCode }] = await this.dockerClient.run(imageName, args, logStream, {
|
|
75
|
+
Volumes,
|
|
76
|
+
HostConfig: {
|
|
77
|
+
AutoRemove: true,
|
|
78
|
+
Binds
|
|
79
|
+
},
|
|
80
|
+
...workingDir ? { WorkingDir: workingDir } : {},
|
|
81
|
+
Entrypoint: command,
|
|
82
|
+
Env,
|
|
83
|
+
...userOptions
|
|
84
|
+
});
|
|
85
|
+
if (error) {
|
|
86
|
+
throw new Error(
|
|
87
|
+
`Docker failed to run with the following error message: ${error}`
|
|
88
|
+
);
|
|
89
|
+
}
|
|
90
|
+
if (statusCode !== 0) {
|
|
91
|
+
throw new Error(
|
|
92
|
+
`Docker container returned a non-zero exit code (${statusCode})`
|
|
93
|
+
);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
exports.DockerContainerRunner = DockerContainerRunner;
|
|
99
|
+
//# sourceMappingURL=DockerContainerRunner.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"DockerContainerRunner.cjs.js","sources":["../../../src/stages/generate/DockerContainerRunner.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport Docker from 'dockerode';\nimport fs from 'fs-extra';\nimport { ForwardedError } from '@backstage/errors';\nimport { PassThrough } from 'stream';\nimport { pipeline as pipelineStream } from 'stream';\nimport { promisify } from 'util';\nimport { TechDocsContainerRunner } from './types';\nimport { Writable } from 'stream';\n\nconst pipeline = promisify(pipelineStream);\n\nexport type UserOptions = {\n User?: string;\n};\n\n/**\n * @internal\n */\nexport class DockerContainerRunner implements TechDocsContainerRunner {\n private readonly dockerClient: Docker;\n\n constructor() {\n this.dockerClient = new Docker();\n }\n\n async runContainer(options: {\n imageName: string;\n command?: string | string[];\n args: string[];\n logStream?: Writable;\n mountDirs?: Record<string, string>;\n workingDir?: string;\n envVars?: Record<string, string>;\n pullImage?: boolean;\n defaultUser?: boolean;\n }) {\n const {\n imageName,\n command,\n args,\n logStream = new PassThrough(),\n mountDirs = {},\n workingDir,\n envVars = {},\n pullImage = true,\n defaultUser = false,\n } = options;\n\n // Show a better error message when Docker is unavailable.\n try {\n await this.dockerClient.ping();\n } catch (e) {\n throw new ForwardedError(\n 'This operation requires Docker. Docker does not appear to be available. Docker.ping() failed with',\n e,\n );\n }\n\n if (pullImage) {\n await new Promise<void>((resolve, reject) => {\n this.dockerClient.pull(imageName, {}, (err, stream) => {\n if (err) {\n reject(err);\n } else if (!stream) {\n reject(\n new Error(\n 'Unexpeected error: no stream returned from Docker while pulling image',\n ),\n );\n } else {\n pipeline(stream, logStream, { end: false })\n .then(resolve)\n .catch(reject);\n }\n });\n });\n }\n\n const userOptions: UserOptions = {};\n if (!defaultUser && process.getuid && process.getgid) {\n // Files that are created inside the Docker container will be owned by\n // root on the host system on non Mac systems, because of reasons. Mainly the fact that\n // volume sharing is done using NFS on Mac and actual mounts in Linux world.\n // So we set the user in the container as the same user and group id as the host.\n // On Windows we don't have process.getuid nor process.getgid\n userOptions.User = `${process.getuid()}:${process.getgid()}`;\n }\n\n // Initialize volumes to mount based on mountDirs map\n const Volumes: { [T: string]: object } = {};\n for (const containerDir of Object.values(mountDirs)) {\n Volumes[containerDir] = {};\n }\n\n // Create bind volumes\n const Binds: string[] = [];\n for (const [hostDir, containerDir] of Object.entries(mountDirs)) {\n // Need to use realpath here as Docker mounting does not like\n // symlinks for binding volumes\n const realHostDir = await fs.realpath(hostDir);\n Binds.push(`${realHostDir}:${containerDir}`);\n }\n\n // Create docker environment variables array\n const Env = new Array<string>();\n for (const [key, value] of Object.entries(envVars)) {\n Env.push(`${key}=${value}`);\n }\n\n const [{ Error: error, StatusCode: statusCode }] =\n await this.dockerClient.run(imageName, args, logStream, {\n Volumes,\n HostConfig: {\n AutoRemove: true,\n Binds,\n },\n ...(workingDir ? { WorkingDir: workingDir } : {}),\n Entrypoint: command,\n Env,\n ...userOptions,\n } as Docker.ContainerCreateOptions);\n\n if (error) {\n throw new Error(\n `Docker failed to run with the following error message: ${error}`,\n );\n }\n\n if (statusCode !== 0) {\n throw new Error(\n `Docker container returned a non-zero exit code (${statusCode})`,\n );\n }\n }\n}\n"],"names":["promisify","pipelineStream","Docker","PassThrough","ForwardedError","fs"],"mappings":";;;;;;;;;;;;;AAyBA,MAAM,QAAA,GAAWA,eAAUC,eAAc,CAAA,CAAA;AASlC,MAAM,qBAAyD,CAAA;AAAA,EACnD,YAAA,CAAA;AAAA,EAEjB,WAAc,GAAA;AACZ,IAAK,IAAA,CAAA,YAAA,GAAe,IAAIC,uBAAO,EAAA,CAAA;AAAA,GACjC;AAAA,EAEA,MAAM,aAAa,OAUhB,EAAA;AACD,IAAM,MAAA;AAAA,MACJ,SAAA;AAAA,MACA,OAAA;AAAA,MACA,IAAA;AAAA,MACA,SAAA,GAAY,IAAIC,kBAAY,EAAA;AAAA,MAC5B,YAAY,EAAC;AAAA,MACb,UAAA;AAAA,MACA,UAAU,EAAC;AAAA,MACX,SAAY,GAAA,IAAA;AAAA,MACZ,WAAc,GAAA,KAAA;AAAA,KACZ,GAAA,OAAA,CAAA;AAGJ,IAAI,IAAA;AACF,MAAM,MAAA,IAAA,CAAK,aAAa,IAAK,EAAA,CAAA;AAAA,aACtB,CAAG,EAAA;AACV,MAAA,MAAM,IAAIC,qBAAA;AAAA,QACR,mGAAA;AAAA,QACA,CAAA;AAAA,OACF,CAAA;AAAA,KACF;AAEA,IAAA,IAAI,SAAW,EAAA;AACb,MAAA,MAAM,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAW,KAAA;AAC3C,QAAA,IAAA,CAAK,aAAa,IAAK,CAAA,SAAA,EAAW,EAAI,EAAA,CAAC,KAAK,MAAW,KAAA;AACrD,UAAA,IAAI,GAAK,EAAA;AACP,YAAA,MAAA,CAAO,GAAG,CAAA,CAAA;AAAA,WACZ,MAAA,IAAW,CAAC,MAAQ,EAAA;AAClB,YAAA,MAAA;AAAA,cACE,IAAI,KAAA;AAAA,gBACF,uEAAA;AAAA,eACF;AAAA,aACF,CAAA;AAAA,WACK,MAAA;AACL,YAAS,QAAA,CAAA,MAAA,EAAQ,SAAW,EAAA,EAAE,GAAK,EAAA,KAAA,EAAO,CAAA,CACvC,IAAK,CAAA,OAAO,CACZ,CAAA,KAAA,CAAM,MAAM,CAAA,CAAA;AAAA,WACjB;AAAA,SACD,CAAA,CAAA;AAAA,OACF,CAAA,CAAA;AAAA,KACH;AAEA,IAAA,MAAM,cAA2B,EAAC,CAAA;AAClC,IAAA,IAAI,CAAC,WAAA,IAAe,OAAQ,CAAA,MAAA,IAAU,QAAQ,MAAQ,EAAA;AAMpD,MAAY,WAAA,CAAA,IAAA,GAAO,GAAG,OAAQ,CAAA,MAAA,EAAQ,CAAI,CAAA,EAAA,OAAA,CAAQ,QAAQ,CAAA,CAAA,CAAA;AAAA,KAC5D;AAGA,IAAA,MAAM,UAAmC,EAAC,CAAA;AAC1C,IAAA,KAAA,MAAW,YAAgB,IAAA,MAAA,CAAO,MAAO,CAAA,SAAS,CAAG,EAAA;AACnD,MAAQ,OAAA,CAAA,YAAY,IAAI,EAAC,CAAA;AAAA,KAC3B;AAGA,IAAA,MAAM,QAAkB,EAAC,CAAA;AACzB,IAAA,KAAA,MAAW,CAAC,OAAS,EAAA,YAAY,KAAK,MAAO,CAAA,OAAA,CAAQ,SAAS,CAAG,EAAA;AAG/D,MAAA,MAAM,WAAc,GAAA,MAAMC,mBAAG,CAAA,QAAA,CAAS,OAAO,CAAA,CAAA;AAC7C,MAAA,KAAA,CAAM,IAAK,CAAA,CAAA,EAAG,WAAW,CAAA,CAAA,EAAI,YAAY,CAAE,CAAA,CAAA,CAAA;AAAA,KAC7C;AAGA,IAAM,MAAA,GAAA,GAAM,IAAI,KAAc,EAAA,CAAA;AAC9B,IAAA,KAAA,MAAW,CAAC,GAAK,EAAA,KAAK,KAAK,MAAO,CAAA,OAAA,CAAQ,OAAO,CAAG,EAAA;AAClD,MAAA,GAAA,CAAI,IAAK,CAAA,CAAA,EAAG,GAAG,CAAA,CAAA,EAAI,KAAK,CAAE,CAAA,CAAA,CAAA;AAAA,KAC5B;AAEA,IAAA,MAAM,CAAC,EAAE,KAAO,EAAA,KAAA,EAAO,YAAY,UAAW,EAAC,CAC7C,GAAA,MAAM,IAAK,CAAA,YAAA,CAAa,GAAI,CAAA,SAAA,EAAW,MAAM,SAAW,EAAA;AAAA,MACtD,OAAA;AAAA,MACA,UAAY,EAAA;AAAA,QACV,UAAY,EAAA,IAAA;AAAA,QACZ,KAAA;AAAA,OACF;AAAA,MACA,GAAI,UAAa,GAAA,EAAE,UAAY,EAAA,UAAA,KAAe,EAAC;AAAA,MAC/C,UAAY,EAAA,OAAA;AAAA,MACZ,GAAA;AAAA,MACA,GAAG,WAAA;AAAA,KAC6B,CAAA,CAAA;AAEpC,IAAA,IAAI,KAAO,EAAA;AACT,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,0DAA0D,KAAK,CAAA,CAAA;AAAA,OACjE,CAAA;AAAA,KACF;AAEA,IAAA,IAAI,eAAe,CAAG,EAAA;AACpB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,mDAAmD,UAAU,CAAA,CAAA,CAAA;AAAA,OAC/D,CAAA;AAAA,KACF;AAAA,GACF;AACF;;;;"}
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var helpers = require('./helpers.cjs.js');
|
|
4
|
+
var techdocs = require('./techdocs.cjs.js');
|
|
5
|
+
|
|
6
|
+
class Generators {
|
|
7
|
+
generatorMap = /* @__PURE__ */ new Map();
|
|
8
|
+
/**
|
|
9
|
+
* Returns a generators instance containing a generator for TechDocs
|
|
10
|
+
* @param config - A Backstage configuration
|
|
11
|
+
* @param options - Options to configure the TechDocs generator
|
|
12
|
+
*/
|
|
13
|
+
static async fromConfig(config, options) {
|
|
14
|
+
const generators = new Generators();
|
|
15
|
+
const techdocsGenerator = options.customGenerator ?? techdocs.TechdocsGenerator.fromConfig(config, options);
|
|
16
|
+
generators.register("techdocs", techdocsGenerator);
|
|
17
|
+
return generators;
|
|
18
|
+
}
|
|
19
|
+
/**
|
|
20
|
+
* Register a generator in the generators collection
|
|
21
|
+
* @param generatorKey - Unique identifier for the generator
|
|
22
|
+
* @param generator - The generator instance to register
|
|
23
|
+
*/
|
|
24
|
+
register(generatorKey, generator) {
|
|
25
|
+
this.generatorMap.set(generatorKey, generator);
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Returns the generator for a given TechDocs entity
|
|
29
|
+
* @param entity - A TechDocs entity instance
|
|
30
|
+
*/
|
|
31
|
+
get(entity) {
|
|
32
|
+
const generatorKey = helpers.getGeneratorKey(entity);
|
|
33
|
+
const generator = this.generatorMap.get(generatorKey);
|
|
34
|
+
if (!generator) {
|
|
35
|
+
throw new Error(`No generator registered for entity: "${generatorKey}"`);
|
|
36
|
+
}
|
|
37
|
+
return generator;
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
exports.Generators = Generators;
|
|
42
|
+
//# sourceMappingURL=generators.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"generators.cjs.js","sources":["../../../src/stages/generate/generators.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Entity } from '@backstage/catalog-model';\nimport { Config } from '@backstage/config';\nimport { getGeneratorKey } from './helpers';\nimport { TechdocsGenerator } from './techdocs';\nimport {\n GeneratorBase,\n GeneratorBuilder,\n SupportedGeneratorKey,\n} from './types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { TechDocsContainerRunner } from './types';\n\n/**\n * Collection of docs generators\n * @public\n */\nexport class Generators implements GeneratorBuilder {\n private generatorMap = new Map<SupportedGeneratorKey, GeneratorBase>();\n\n /**\n * Returns a generators instance containing a generator for TechDocs\n * @param config - A Backstage configuration\n * @param options - Options to configure the TechDocs generator\n */\n static async fromConfig(\n config: Config,\n options: {\n logger: LoggerService;\n containerRunner?: TechDocsContainerRunner;\n customGenerator?: TechdocsGenerator;\n },\n ): Promise<GeneratorBuilder> {\n const generators = new Generators();\n\n const techdocsGenerator =\n options.customGenerator ?? TechdocsGenerator.fromConfig(config, options);\n generators.register('techdocs', techdocsGenerator);\n\n return generators;\n }\n\n /**\n * Register a generator in the generators collection\n * @param generatorKey - Unique identifier for the generator\n * @param generator - The generator instance to register\n */\n register(generatorKey: SupportedGeneratorKey, generator: GeneratorBase) {\n this.generatorMap.set(generatorKey, generator);\n }\n\n /**\n * Returns the generator for a given TechDocs entity\n * @param entity - A TechDocs entity instance\n */\n get(entity: Entity): GeneratorBase {\n const generatorKey = getGeneratorKey(entity);\n const generator = this.generatorMap.get(generatorKey);\n\n if (!generator) {\n throw new Error(`No generator registered for entity: \"${generatorKey}\"`);\n }\n\n return generator;\n }\n}\n"],"names":["TechdocsGenerator","getGeneratorKey"],"mappings":";;;;;AAgCO,MAAM,UAAuC,CAAA;AAAA,EAC1C,YAAA,uBAAmB,GAA0C,EAAA,CAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOrE,aAAa,UACX,CAAA,MAAA,EACA,OAK2B,EAAA;AAC3B,IAAM,MAAA,UAAA,GAAa,IAAI,UAAW,EAAA,CAAA;AAElC,IAAA,MAAM,oBACJ,OAAQ,CAAA,eAAA,IAAmBA,0BAAkB,CAAA,UAAA,CAAW,QAAQ,OAAO,CAAA,CAAA;AACzE,IAAW,UAAA,CAAA,QAAA,CAAS,YAAY,iBAAiB,CAAA,CAAA;AAEjD,IAAO,OAAA,UAAA,CAAA;AAAA,GACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOA,QAAA,CAAS,cAAqC,SAA0B,EAAA;AACtE,IAAK,IAAA,CAAA,YAAA,CAAa,GAAI,CAAA,YAAA,EAAc,SAAS,CAAA,CAAA;AAAA,GAC/C;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,MAA+B,EAAA;AACjC,IAAM,MAAA,YAAA,GAAeC,wBAAgB,MAAM,CAAA,CAAA;AAC3C,IAAA,MAAM,SAAY,GAAA,IAAA,CAAK,YAAa,CAAA,GAAA,CAAI,YAAY,CAAA,CAAA;AAEpD,IAAA,IAAI,CAAC,SAAW,EAAA;AACd,MAAA,MAAM,IAAI,KAAA,CAAM,CAAwC,qCAAA,EAAA,YAAY,CAAG,CAAA,CAAA,CAAA,CAAA;AAAA,KACzE;AAEA,IAAO,OAAA,SAAA,CAAA;AAAA,GACT;AACF;;;;"}
|
|
@@ -0,0 +1,265 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var backendPluginApi = require('@backstage/backend-plugin-api');
|
|
4
|
+
var errors = require('@backstage/errors');
|
|
5
|
+
var child_process = require('child_process');
|
|
6
|
+
var fs = require('fs-extra');
|
|
7
|
+
var gitUrlParse = require('git-url-parse');
|
|
8
|
+
var yaml = require('js-yaml');
|
|
9
|
+
var path = require('path');
|
|
10
|
+
var stream = require('stream');
|
|
11
|
+
var helpers = require('../publish/helpers.cjs.js');
|
|
12
|
+
|
|
13
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
14
|
+
|
|
15
|
+
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
16
|
+
var gitUrlParse__default = /*#__PURE__*/_interopDefaultCompat(gitUrlParse);
|
|
17
|
+
var yaml__default = /*#__PURE__*/_interopDefaultCompat(yaml);
|
|
18
|
+
var path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
19
|
+
|
|
20
|
+
function getGeneratorKey(entity) {
|
|
21
|
+
if (!entity) {
|
|
22
|
+
throw new Error("No entity provided");
|
|
23
|
+
}
|
|
24
|
+
return "techdocs";
|
|
25
|
+
}
|
|
26
|
+
const runCommand = async ({
|
|
27
|
+
command,
|
|
28
|
+
args,
|
|
29
|
+
options,
|
|
30
|
+
logStream = new stream.PassThrough()
|
|
31
|
+
}) => {
|
|
32
|
+
await new Promise((resolve, reject) => {
|
|
33
|
+
const process = child_process.spawn(command, args, options);
|
|
34
|
+
process.stdout.on("data", (stream) => {
|
|
35
|
+
logStream.write(stream);
|
|
36
|
+
});
|
|
37
|
+
process.stderr.on("data", (stream) => {
|
|
38
|
+
logStream.write(stream);
|
|
39
|
+
});
|
|
40
|
+
process.on("error", (error) => {
|
|
41
|
+
return reject(error);
|
|
42
|
+
});
|
|
43
|
+
process.on("close", (code) => {
|
|
44
|
+
if (code !== 0) {
|
|
45
|
+
return reject(`Command ${command} failed, exit code: ${code}`);
|
|
46
|
+
}
|
|
47
|
+
return resolve();
|
|
48
|
+
});
|
|
49
|
+
});
|
|
50
|
+
};
|
|
51
|
+
const getRepoUrlFromLocationAnnotation = (parsedLocationAnnotation, scmIntegrations, docsFolder = "docs") => {
|
|
52
|
+
const { type: locationType, target } = parsedLocationAnnotation;
|
|
53
|
+
if (locationType === "url") {
|
|
54
|
+
const integration = scmIntegrations.byUrl(target);
|
|
55
|
+
if (integration && ["github", "gitlab", "bitbucketServer", "harness"].includes(
|
|
56
|
+
integration.type
|
|
57
|
+
)) {
|
|
58
|
+
const { filepathtype } = gitUrlParse__default.default(target);
|
|
59
|
+
if (filepathtype === "") {
|
|
60
|
+
return { repo_url: target };
|
|
61
|
+
}
|
|
62
|
+
const sourceFolder = integration.resolveUrl({
|
|
63
|
+
url: `./${docsFolder}`,
|
|
64
|
+
base: target.endsWith("/") ? target : `${target}/`
|
|
65
|
+
});
|
|
66
|
+
return {
|
|
67
|
+
repo_url: target,
|
|
68
|
+
edit_uri: integration.resolveEditUrl(sourceFolder)
|
|
69
|
+
};
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
return {};
|
|
73
|
+
};
|
|
74
|
+
class UnknownTag {
|
|
75
|
+
constructor(data, type) {
|
|
76
|
+
this.data = data;
|
|
77
|
+
this.type = type;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
const MKDOCS_SCHEMA = yaml.DEFAULT_SCHEMA.extend([
|
|
81
|
+
new yaml.Type("", {
|
|
82
|
+
kind: "scalar",
|
|
83
|
+
multi: true,
|
|
84
|
+
representName: (o) => o.type,
|
|
85
|
+
represent: (o) => o.data ?? "",
|
|
86
|
+
instanceOf: UnknownTag,
|
|
87
|
+
construct: (data, type) => new UnknownTag(data, type)
|
|
88
|
+
}),
|
|
89
|
+
new yaml.Type("tag:", {
|
|
90
|
+
kind: "mapping",
|
|
91
|
+
multi: true,
|
|
92
|
+
representName: (o) => o.type,
|
|
93
|
+
represent: (o) => o.data ?? "",
|
|
94
|
+
instanceOf: UnknownTag,
|
|
95
|
+
construct: (data, type) => new UnknownTag(data, type)
|
|
96
|
+
}),
|
|
97
|
+
new yaml.Type("", {
|
|
98
|
+
kind: "sequence",
|
|
99
|
+
multi: true,
|
|
100
|
+
representName: (o) => o.type,
|
|
101
|
+
represent: (o) => o.data ?? "",
|
|
102
|
+
instanceOf: UnknownTag,
|
|
103
|
+
construct: (data, type) => new UnknownTag(data, type)
|
|
104
|
+
})
|
|
105
|
+
]);
|
|
106
|
+
const generateMkdocsYml = async (inputDir, siteOptions) => {
|
|
107
|
+
try {
|
|
108
|
+
const mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yml");
|
|
109
|
+
const defaultSiteName = siteOptions?.name ?? "Documentation Site";
|
|
110
|
+
const defaultMkdocsContent = {
|
|
111
|
+
site_name: defaultSiteName,
|
|
112
|
+
docs_dir: "docs",
|
|
113
|
+
plugins: ["techdocs-core"]
|
|
114
|
+
};
|
|
115
|
+
await fs__default.default.writeFile(
|
|
116
|
+
mkdocsYmlPath,
|
|
117
|
+
yaml__default.default.dump(defaultMkdocsContent, { schema: MKDOCS_SCHEMA })
|
|
118
|
+
);
|
|
119
|
+
} catch (error) {
|
|
120
|
+
throw new errors.ForwardedError("Could not generate mkdocs.yml file", error);
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
const getMkdocsYml = async (inputDir, options) => {
|
|
124
|
+
let mkdocsYmlPath;
|
|
125
|
+
let mkdocsYmlFileString;
|
|
126
|
+
try {
|
|
127
|
+
if (options?.mkdocsConfigFileName) {
|
|
128
|
+
mkdocsYmlPath = path__default.default.join(inputDir, options.mkdocsConfigFileName);
|
|
129
|
+
if (!await fs__default.default.pathExists(mkdocsYmlPath)) {
|
|
130
|
+
throw new Error(`The specified file ${mkdocsYmlPath} does not exist`);
|
|
131
|
+
}
|
|
132
|
+
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
133
|
+
return {
|
|
134
|
+
path: mkdocsYmlPath,
|
|
135
|
+
content: mkdocsYmlFileString,
|
|
136
|
+
configIsTemporary: false
|
|
137
|
+
};
|
|
138
|
+
}
|
|
139
|
+
mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yaml");
|
|
140
|
+
if (await fs__default.default.pathExists(mkdocsYmlPath)) {
|
|
141
|
+
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
142
|
+
return {
|
|
143
|
+
path: mkdocsYmlPath,
|
|
144
|
+
content: mkdocsYmlFileString,
|
|
145
|
+
configIsTemporary: false
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
mkdocsYmlPath = path__default.default.join(inputDir, "mkdocs.yml");
|
|
149
|
+
if (await fs__default.default.pathExists(mkdocsYmlPath)) {
|
|
150
|
+
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
151
|
+
return {
|
|
152
|
+
path: mkdocsYmlPath,
|
|
153
|
+
content: mkdocsYmlFileString,
|
|
154
|
+
configIsTemporary: false
|
|
155
|
+
};
|
|
156
|
+
}
|
|
157
|
+
await generateMkdocsYml(inputDir, options);
|
|
158
|
+
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
159
|
+
} catch (error) {
|
|
160
|
+
throw new errors.ForwardedError(
|
|
161
|
+
"Could not read MkDocs YAML config file mkdocs.yml or mkdocs.yaml or default for validation",
|
|
162
|
+
error
|
|
163
|
+
);
|
|
164
|
+
}
|
|
165
|
+
return {
|
|
166
|
+
path: mkdocsYmlPath,
|
|
167
|
+
content: mkdocsYmlFileString,
|
|
168
|
+
configIsTemporary: true
|
|
169
|
+
};
|
|
170
|
+
};
|
|
171
|
+
const validateMkdocsYaml = async (inputDir, mkdocsYmlFileString) => {
|
|
172
|
+
const mkdocsYml = yaml__default.default.load(mkdocsYmlFileString, {
|
|
173
|
+
schema: MKDOCS_SCHEMA
|
|
174
|
+
});
|
|
175
|
+
if (mkdocsYml === null || typeof mkdocsYml !== "object") {
|
|
176
|
+
return void 0;
|
|
177
|
+
}
|
|
178
|
+
const parsedMkdocsYml = mkdocsYml;
|
|
179
|
+
if (parsedMkdocsYml.docs_dir && !backendPluginApi.isChildPath(inputDir, path.resolve(inputDir, parsedMkdocsYml.docs_dir))) {
|
|
180
|
+
throw new Error(
|
|
181
|
+
`docs_dir configuration value in mkdocs can't be an absolute directory or start with ../ for security reasons.
|
|
182
|
+
Use relative paths instead which are resolved relative to your mkdocs.yml file location.`
|
|
183
|
+
);
|
|
184
|
+
}
|
|
185
|
+
return parsedMkdocsYml.docs_dir;
|
|
186
|
+
};
|
|
187
|
+
const patchIndexPreBuild = async ({
|
|
188
|
+
inputDir,
|
|
189
|
+
logger,
|
|
190
|
+
docsDir = "docs"
|
|
191
|
+
}) => {
|
|
192
|
+
const docsPath = path__default.default.join(inputDir, docsDir);
|
|
193
|
+
const indexMdPath = path__default.default.join(docsPath, "index.md");
|
|
194
|
+
if (await fs__default.default.pathExists(indexMdPath)) {
|
|
195
|
+
return;
|
|
196
|
+
}
|
|
197
|
+
logger.warn(`${path__default.default.join(docsDir, "index.md")} not found.`);
|
|
198
|
+
const fallbacks = [
|
|
199
|
+
path__default.default.join(docsPath, "README.md"),
|
|
200
|
+
path__default.default.join(docsPath, "readme.md"),
|
|
201
|
+
path__default.default.join(inputDir, "README.md"),
|
|
202
|
+
path__default.default.join(inputDir, "readme.md")
|
|
203
|
+
];
|
|
204
|
+
await fs__default.default.ensureDir(docsPath);
|
|
205
|
+
for (const filePath of fallbacks) {
|
|
206
|
+
try {
|
|
207
|
+
await fs__default.default.copyFile(filePath, indexMdPath);
|
|
208
|
+
return;
|
|
209
|
+
} catch (error) {
|
|
210
|
+
logger.warn(`${path__default.default.relative(inputDir, filePath)} not found.`);
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
logger.warn(
|
|
214
|
+
`Could not find any techdocs' index file. Please make sure at least one of ${[
|
|
215
|
+
indexMdPath,
|
|
216
|
+
...fallbacks
|
|
217
|
+
].join(" ")} exists.`
|
|
218
|
+
);
|
|
219
|
+
};
|
|
220
|
+
const createOrUpdateMetadata = async (techdocsMetadataPath, logger) => {
|
|
221
|
+
const techdocsMetadataDir = techdocsMetadataPath.split(path__default.default.sep).slice(0, -1).join(path__default.default.sep);
|
|
222
|
+
try {
|
|
223
|
+
await fs__default.default.access(techdocsMetadataPath, fs__default.default.constants.F_OK);
|
|
224
|
+
} catch (err) {
|
|
225
|
+
await fs__default.default.writeJson(techdocsMetadataPath, JSON.parse("{}"));
|
|
226
|
+
}
|
|
227
|
+
let json;
|
|
228
|
+
try {
|
|
229
|
+
json = await fs__default.default.readJson(techdocsMetadataPath);
|
|
230
|
+
} catch (err) {
|
|
231
|
+
errors.assertError(err);
|
|
232
|
+
const message = `Invalid JSON at ${techdocsMetadataPath} with error ${err.message}`;
|
|
233
|
+
logger.error(message);
|
|
234
|
+
throw new Error(message);
|
|
235
|
+
}
|
|
236
|
+
json.build_timestamp = Date.now();
|
|
237
|
+
try {
|
|
238
|
+
json.files = (await helpers.getFileTreeRecursively(techdocsMetadataDir)).map(
|
|
239
|
+
(file) => file.replace(`${techdocsMetadataDir}${path__default.default.sep}`, "")
|
|
240
|
+
);
|
|
241
|
+
} catch (err) {
|
|
242
|
+
errors.assertError(err);
|
|
243
|
+
json.files = [];
|
|
244
|
+
logger.warn(`Unable to add files list to metadata: ${err.message}`);
|
|
245
|
+
}
|
|
246
|
+
await fs__default.default.writeJson(techdocsMetadataPath, json);
|
|
247
|
+
return;
|
|
248
|
+
};
|
|
249
|
+
const storeEtagMetadata = async (techdocsMetadataPath, etag) => {
|
|
250
|
+
const json = await fs__default.default.readJson(techdocsMetadataPath);
|
|
251
|
+
json.etag = etag;
|
|
252
|
+
await fs__default.default.writeJson(techdocsMetadataPath, json);
|
|
253
|
+
};
|
|
254
|
+
|
|
255
|
+
exports.MKDOCS_SCHEMA = MKDOCS_SCHEMA;
|
|
256
|
+
exports.createOrUpdateMetadata = createOrUpdateMetadata;
|
|
257
|
+
exports.generateMkdocsYml = generateMkdocsYml;
|
|
258
|
+
exports.getGeneratorKey = getGeneratorKey;
|
|
259
|
+
exports.getMkdocsYml = getMkdocsYml;
|
|
260
|
+
exports.getRepoUrlFromLocationAnnotation = getRepoUrlFromLocationAnnotation;
|
|
261
|
+
exports.patchIndexPreBuild = patchIndexPreBuild;
|
|
262
|
+
exports.runCommand = runCommand;
|
|
263
|
+
exports.storeEtagMetadata = storeEtagMetadata;
|
|
264
|
+
exports.validateMkdocsYaml = validateMkdocsYaml;
|
|
265
|
+
//# sourceMappingURL=helpers.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"helpers.cjs.js","sources":["../../../src/stages/generate/helpers.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { isChildPath, LoggerService } from '@backstage/backend-plugin-api';\nimport { Entity } from '@backstage/catalog-model';\nimport { assertError, ForwardedError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport { SpawnOptionsWithoutStdio, spawn } from 'child_process';\nimport fs from 'fs-extra';\nimport gitUrlParse from 'git-url-parse';\nimport yaml, { DEFAULT_SCHEMA, Type } from 'js-yaml';\nimport path, { resolve as resolvePath } from 'path';\nimport { PassThrough, Writable } from 'stream';\nimport { ParsedLocationAnnotation } from '../../helpers';\nimport { DefaultMkdocsContent, SupportedGeneratorKey } from './types';\nimport { getFileTreeRecursively } from '../publish/helpers';\n\n// TODO: Implement proper support for more generators.\nexport function getGeneratorKey(entity: Entity): SupportedGeneratorKey {\n if (!entity) {\n throw new Error('No entity provided');\n }\n\n return 'techdocs';\n}\n\nexport type RunCommandOptions = {\n /** command to run */\n command: string;\n /** arguments to pass the command */\n args: string[];\n /** options to pass to spawn */\n options: SpawnOptionsWithoutStdio;\n /** stream to capture stdout and stderr output */\n logStream?: Writable;\n};\n\n/**\n * Run a command in a sub-process, normally a shell command.\n */\nexport const runCommand = async ({\n command,\n args,\n options,\n logStream = new PassThrough(),\n}: RunCommandOptions) => {\n await new Promise<void>((resolve, reject) => {\n const process = spawn(command, args, options);\n\n process.stdout.on('data', stream => {\n logStream.write(stream);\n });\n\n process.stderr.on('data', stream => {\n logStream.write(stream);\n });\n\n process.on('error', error => {\n return reject(error);\n });\n\n process.on('close', code => {\n if (code !== 0) {\n return reject(`Command ${command} failed, exit code: ${code}`);\n }\n return resolve();\n });\n });\n};\n\n/**\n * Return the source url for MkDocs based on the backstage.io/techdocs-ref annotation.\n * Depending on the type of target, it can either return a repo_url, an edit_uri, both, or none.\n *\n * @param parsedLocationAnnotation - Object with location url and type\n * @param scmIntegrations - the scmIntegration to do url transformations\n * @param docsFolder - the configured docs folder in the mkdocs.yml (defaults to 'docs')\n * @returns the settings for the mkdocs.yml\n */\nexport const getRepoUrlFromLocationAnnotation = (\n parsedLocationAnnotation: ParsedLocationAnnotation,\n scmIntegrations: ScmIntegrationRegistry,\n docsFolder: string = 'docs',\n): { repo_url?: string; edit_uri?: string } => {\n const { type: locationType, target } = parsedLocationAnnotation;\n\n if (locationType === 'url') {\n const integration = scmIntegrations.byUrl(target);\n\n // We only support it for github, gitlab, bitbucketServer and harness for now as the edit_uri\n // is not properly supported for others yet.\n if (\n integration &&\n ['github', 'gitlab', 'bitbucketServer', 'harness'].includes(\n integration.type,\n )\n ) {\n // handle the case where a user manually writes url:https://github.com/backstage/backstage i.e. without /blob/...\n const { filepathtype } = gitUrlParse(target);\n if (filepathtype === '') {\n return { repo_url: target };\n }\n\n const sourceFolder = integration.resolveUrl({\n url: `./${docsFolder}`,\n base: target.endsWith('/') ? target : `${target}/`,\n });\n return {\n repo_url: target,\n edit_uri: integration.resolveEditUrl(sourceFolder),\n };\n }\n }\n\n return {};\n};\n\nclass UnknownTag {\n constructor(public readonly data: any, public readonly type?: string) {}\n}\n\nexport const MKDOCS_SCHEMA = DEFAULT_SCHEMA.extend([\n new Type('', {\n kind: 'scalar',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n new Type('tag:', {\n kind: 'mapping',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n new Type('', {\n kind: 'sequence',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n]);\n\n/**\n * Generates a mkdocs.yml configuration file\n *\n * @param inputDir - base dir to where the mkdocs.yml file will be created\n * @param siteOptions - options for the site: `name` property will be used in mkdocs.yml for the\n * required `site_name` property, default value is \"Documentation Site\"\n */\nexport const generateMkdocsYml = async (\n inputDir: string,\n siteOptions?: { name?: string },\n) => {\n try {\n // TODO(awanlin): Use a provided default mkdocs.yml\n // from config or some specified location. If this is\n // not provided then fall back to generating bare\n // minimum mkdocs.yml file\n\n const mkdocsYmlPath = path.join(inputDir, 'mkdocs.yml');\n const defaultSiteName = siteOptions?.name ?? 'Documentation Site';\n const defaultMkdocsContent: DefaultMkdocsContent = {\n site_name: defaultSiteName,\n docs_dir: 'docs',\n plugins: ['techdocs-core'],\n };\n\n await fs.writeFile(\n mkdocsYmlPath,\n yaml.dump(defaultMkdocsContent, { schema: MKDOCS_SCHEMA }),\n );\n } catch (error) {\n throw new ForwardedError('Could not generate mkdocs.yml file', error);\n }\n};\n\n/**\n * Finds and loads the contents of an mkdocs.yml, mkdocs.yaml file, a file\n * with a specified name or an ad-hoc created file with minimal config.\n * @public\n *\n * @param inputDir - base dir to be searched for either an mkdocs.yml or mkdocs.yaml file.\n * @param options - name: default mkdocs site_name to be used with a ad hoc file default value is \"Documentation Site\"\n * mkdocsConfigFileName (optional): a non-default file name to be used as the config\n */\nexport const getMkdocsYml = async (\n inputDir: string,\n options?: { name?: string; mkdocsConfigFileName?: string },\n): Promise<{ path: string; content: string; configIsTemporary: boolean }> => {\n let mkdocsYmlPath: string;\n let mkdocsYmlFileString: string;\n try {\n if (options?.mkdocsConfigFileName) {\n mkdocsYmlPath = path.join(inputDir, options.mkdocsConfigFileName);\n if (!(await fs.pathExists(mkdocsYmlPath))) {\n throw new Error(`The specified file ${mkdocsYmlPath} does not exist`);\n }\n\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n mkdocsYmlPath = path.join(inputDir, 'mkdocs.yaml');\n if (await fs.pathExists(mkdocsYmlPath)) {\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n mkdocsYmlPath = path.join(inputDir, 'mkdocs.yml');\n if (await fs.pathExists(mkdocsYmlPath)) {\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n // No mkdocs file, generate it\n await generateMkdocsYml(inputDir, options);\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n } catch (error) {\n throw new ForwardedError(\n 'Could not read MkDocs YAML config file mkdocs.yml or mkdocs.yaml or default for validation',\n error,\n );\n }\n\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: true,\n };\n};\n\n/**\n * Validating mkdocs config file for incorrect/insecure values\n * Throws on invalid configs\n *\n * @param inputDir - base dir to be used as a docs_dir path validity check\n * @param mkdocsYmlFileString - The string contents of the loaded\n * mkdocs.yml or equivalent of a docs site\n * @returns the parsed docs_dir or undefined\n */\nexport const validateMkdocsYaml = async (\n inputDir: string,\n mkdocsYmlFileString: string,\n): Promise<string | undefined> => {\n const mkdocsYml = yaml.load(mkdocsYmlFileString, {\n schema: MKDOCS_SCHEMA,\n });\n\n if (mkdocsYml === null || typeof mkdocsYml !== 'object') {\n return undefined;\n }\n\n const parsedMkdocsYml: Record<string, any> = mkdocsYml;\n if (\n parsedMkdocsYml.docs_dir &&\n !isChildPath(inputDir, resolvePath(inputDir, parsedMkdocsYml.docs_dir))\n ) {\n throw new Error(\n `docs_dir configuration value in mkdocs can't be an absolute directory or start with ../ for security reasons.\n Use relative paths instead which are resolved relative to your mkdocs.yml file location.`,\n );\n }\n return parsedMkdocsYml.docs_dir;\n};\n\n/**\n * Update docs/index.md file before TechDocs generator uses it to generate docs site,\n * falling back to docs/README.md or README.md in case a default docs/index.md\n * is not provided.\n */\nexport const patchIndexPreBuild = async ({\n inputDir,\n logger,\n docsDir = 'docs',\n}: {\n inputDir: string;\n logger: LoggerService;\n docsDir?: string;\n}) => {\n const docsPath = path.join(inputDir, docsDir);\n const indexMdPath = path.join(docsPath, 'index.md');\n\n if (await fs.pathExists(indexMdPath)) {\n return;\n }\n logger.warn(`${path.join(docsDir, 'index.md')} not found.`);\n const fallbacks = [\n path.join(docsPath, 'README.md'),\n path.join(docsPath, 'readme.md'),\n path.join(inputDir, 'README.md'),\n path.join(inputDir, 'readme.md'),\n ];\n\n await fs.ensureDir(docsPath);\n for (const filePath of fallbacks) {\n try {\n await fs.copyFile(filePath, indexMdPath);\n return;\n } catch (error) {\n logger.warn(`${path.relative(inputDir, filePath)} not found.`);\n }\n }\n\n logger.warn(\n `Could not find any techdocs' index file. Please make sure at least one of ${[\n indexMdPath,\n ...fallbacks,\n ].join(' ')} exists.`,\n );\n};\n\n/**\n * Create or update the techdocs_metadata.json. Values initialized/updated are:\n * - The build_timestamp (now)\n * - The list of files generated\n *\n * @param techdocsMetadataPath - File path to techdocs_metadata.json\n */\nexport const createOrUpdateMetadata = async (\n techdocsMetadataPath: string,\n logger: LoggerService,\n): Promise<void> => {\n const techdocsMetadataDir = techdocsMetadataPath\n .split(path.sep)\n .slice(0, -1)\n .join(path.sep);\n // check if file exists, create if it does not.\n try {\n await fs.access(techdocsMetadataPath, fs.constants.F_OK);\n } catch (err) {\n // Bootstrap file with empty JSON\n await fs.writeJson(techdocsMetadataPath, JSON.parse('{}'));\n }\n // check if valid Json\n let json;\n try {\n json = await fs.readJson(techdocsMetadataPath);\n } catch (err) {\n assertError(err);\n const message = `Invalid JSON at ${techdocsMetadataPath} with error ${err.message}`;\n logger.error(message);\n throw new Error(message);\n }\n\n json.build_timestamp = Date.now();\n\n // Get and write generated files to the metadata JSON. Each file string is in\n // a form appropriate for invalidating the associated object from cache.\n try {\n json.files = (await getFileTreeRecursively(techdocsMetadataDir)).map(file =>\n file.replace(`${techdocsMetadataDir}${path.sep}`, ''),\n );\n } catch (err) {\n assertError(err);\n json.files = [];\n logger.warn(`Unable to add files list to metadata: ${err.message}`);\n }\n\n await fs.writeJson(techdocsMetadataPath, json);\n return;\n};\n\n/**\n * Update the techdocs_metadata.json to add etag of the prepared tree (e.g. commit SHA or actual Etag of the resource).\n * This is helpful to check if a TechDocs site in storage has gone outdated, without maintaining an in-memory build info\n * per Backstage instance.\n *\n * @param techdocsMetadataPath - File path to techdocs_metadata.json\n * @param etag - The ETag to use\n */\nexport const storeEtagMetadata = async (\n techdocsMetadataPath: string,\n etag: string,\n): Promise<void> => {\n const json = await fs.readJson(techdocsMetadataPath);\n json.etag = etag;\n await fs.writeJson(techdocsMetadataPath, json);\n};\n"],"names":["PassThrough","spawn","gitUrlParse","DEFAULT_SCHEMA","Type","path","fs","yaml","ForwardedError","isChildPath","resolvePath","assertError","getFileTreeRecursively"],"mappings":";;;;;;;;;;;;;;;;;;;AA+BO,SAAS,gBAAgB,MAAuC,EAAA;AACrE,EAAA,IAAI,CAAC,MAAQ,EAAA;AACX,IAAM,MAAA,IAAI,MAAM,oBAAoB,CAAA,CAAA;AAAA,GACtC;AAEA,EAAO,OAAA,UAAA,CAAA;AACT,CAAA;AAgBO,MAAM,aAAa,OAAO;AAAA,EAC/B,OAAA;AAAA,EACA,IAAA;AAAA,EACA,OAAA;AAAA,EACA,SAAA,GAAY,IAAIA,kBAAY,EAAA;AAC9B,CAAyB,KAAA;AACvB,EAAA,MAAM,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAW,KAAA;AAC3C,IAAA,MAAM,OAAU,GAAAC,mBAAA,CAAM,OAAS,EAAA,IAAA,EAAM,OAAO,CAAA,CAAA;AAE5C,IAAQ,OAAA,CAAA,MAAA,CAAO,EAAG,CAAA,MAAA,EAAQ,CAAU,MAAA,KAAA;AAClC,MAAA,SAAA,CAAU,MAAM,MAAM,CAAA,CAAA;AAAA,KACvB,CAAA,CAAA;AAED,IAAQ,OAAA,CAAA,MAAA,CAAO,EAAG,CAAA,MAAA,EAAQ,CAAU,MAAA,KAAA;AAClC,MAAA,SAAA,CAAU,MAAM,MAAM,CAAA,CAAA;AAAA,KACvB,CAAA,CAAA;AAED,IAAQ,OAAA,CAAA,EAAA,CAAG,SAAS,CAAS,KAAA,KAAA;AAC3B,MAAA,OAAO,OAAO,KAAK,CAAA,CAAA;AAAA,KACpB,CAAA,CAAA;AAED,IAAQ,OAAA,CAAA,EAAA,CAAG,SAAS,CAAQ,IAAA,KAAA;AAC1B,MAAA,IAAI,SAAS,CAAG,EAAA;AACd,QAAA,OAAO,MAAO,CAAA,CAAA,QAAA,EAAW,OAAO,CAAA,oBAAA,EAAuB,IAAI,CAAE,CAAA,CAAA,CAAA;AAAA,OAC/D;AACA,MAAA,OAAO,OAAQ,EAAA,CAAA;AAAA,KAChB,CAAA,CAAA;AAAA,GACF,CAAA,CAAA;AACH,EAAA;AAWO,MAAM,gCAAmC,GAAA,CAC9C,wBACA,EAAA,eAAA,EACA,aAAqB,MACwB,KAAA;AAC7C,EAAA,MAAM,EAAE,IAAA,EAAM,YAAc,EAAA,MAAA,EAAW,GAAA,wBAAA,CAAA;AAEvC,EAAA,IAAI,iBAAiB,KAAO,EAAA;AAC1B,IAAM,MAAA,WAAA,GAAc,eAAgB,CAAA,KAAA,CAAM,MAAM,CAAA,CAAA;AAIhD,IAAA,IACE,eACA,CAAC,QAAA,EAAU,QAAU,EAAA,iBAAA,EAAmB,SAAS,CAAE,CAAA,QAAA;AAAA,MACjD,WAAY,CAAA,IAAA;AAAA,KAEd,EAAA;AAEA,MAAA,MAAM,EAAE,YAAA,EAAiB,GAAAC,4BAAA,CAAY,MAAM,CAAA,CAAA;AAC3C,MAAA,IAAI,iBAAiB,EAAI,EAAA;AACvB,QAAO,OAAA,EAAE,UAAU,MAAO,EAAA,CAAA;AAAA,OAC5B;AAEA,MAAM,MAAA,YAAA,GAAe,YAAY,UAAW,CAAA;AAAA,QAC1C,GAAA,EAAK,KAAK,UAAU,CAAA,CAAA;AAAA,QACpB,MAAM,MAAO,CAAA,QAAA,CAAS,GAAG,CAAI,GAAA,MAAA,GAAS,GAAG,MAAM,CAAA,CAAA,CAAA;AAAA,OAChD,CAAA,CAAA;AACD,MAAO,OAAA;AAAA,QACL,QAAU,EAAA,MAAA;AAAA,QACV,QAAA,EAAU,WAAY,CAAA,cAAA,CAAe,YAAY,CAAA;AAAA,OACnD,CAAA;AAAA,KACF;AAAA,GACF;AAEA,EAAA,OAAO,EAAC,CAAA;AACV,EAAA;AAEA,MAAM,UAAW,CAAA;AAAA,EACf,WAAA,CAA4B,MAA2B,IAAe,EAAA;AAA1C,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA,CAAA;AAA2B,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA,CAAA;AAAA,GAAgB;AACzE,CAAA;AAEa,MAAA,aAAA,GAAgBC,oBAAe,MAAO,CAAA;AAAA,EACjD,IAAIC,UAAK,EAAI,EAAA;AAAA,IACX,IAAM,EAAA,QAAA;AAAA,IACN,KAAO,EAAA,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAiB,CAAA,IAAA;AAAA,IACtC,SAAA,EAAW,CAAM,CAAA,KAAA,CAAA,CAAiB,IAAQ,IAAA,EAAA;AAAA,IAC1C,UAAY,EAAA,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI,CAAA;AAAA,GACtE,CAAA;AAAA,EACD,IAAIA,UAAK,MAAQ,EAAA;AAAA,IACf,IAAM,EAAA,SAAA;AAAA,IACN,KAAO,EAAA,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAiB,CAAA,IAAA;AAAA,IACtC,SAAA,EAAW,CAAM,CAAA,KAAA,CAAA,CAAiB,IAAQ,IAAA,EAAA;AAAA,IAC1C,UAAY,EAAA,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI,CAAA;AAAA,GACtE,CAAA;AAAA,EACD,IAAIA,UAAK,EAAI,EAAA;AAAA,IACX,IAAM,EAAA,UAAA;AAAA,IACN,KAAO,EAAA,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAiB,CAAA,IAAA;AAAA,IACtC,SAAA,EAAW,CAAM,CAAA,KAAA,CAAA,CAAiB,IAAQ,IAAA,EAAA;AAAA,IAC1C,UAAY,EAAA,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI,CAAA;AAAA,GACtE,CAAA;AACH,CAAC,EAAA;AASY,MAAA,iBAAA,GAAoB,OAC/B,QAAA,EACA,WACG,KAAA;AACH,EAAI,IAAA;AAMF,IAAA,MAAM,aAAgB,GAAAC,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,YAAY,CAAA,CAAA;AACtD,IAAM,MAAA,eAAA,GAAkB,aAAa,IAAQ,IAAA,oBAAA,CAAA;AAC7C,IAAA,MAAM,oBAA6C,GAAA;AAAA,MACjD,SAAW,EAAA,eAAA;AAAA,MACX,QAAU,EAAA,MAAA;AAAA,MACV,OAAA,EAAS,CAAC,eAAe,CAAA;AAAA,KAC3B,CAAA;AAEA,IAAA,MAAMC,mBAAG,CAAA,SAAA;AAAA,MACP,aAAA;AAAA,MACAC,sBAAK,IAAK,CAAA,oBAAA,EAAsB,EAAE,MAAA,EAAQ,eAAe,CAAA;AAAA,KAC3D,CAAA;AAAA,WACO,KAAO,EAAA;AACd,IAAM,MAAA,IAAIC,qBAAe,CAAA,oCAAA,EAAsC,KAAK,CAAA,CAAA;AAAA,GACtE;AACF,EAAA;AAWa,MAAA,YAAA,GAAe,OAC1B,QAAA,EACA,OAC2E,KAAA;AAC3E,EAAI,IAAA,aAAA,CAAA;AACJ,EAAI,IAAA,mBAAA,CAAA;AACJ,EAAI,IAAA;AACF,IAAA,IAAI,SAAS,oBAAsB,EAAA;AACjC,MAAA,aAAA,GAAgBH,qBAAK,CAAA,IAAA,CAAK,QAAU,EAAA,OAAA,CAAQ,oBAAoB,CAAA,CAAA;AAChE,MAAA,IAAI,CAAE,MAAMC,mBAAG,CAAA,UAAA,CAAW,aAAa,CAAI,EAAA;AACzC,QAAA,MAAM,IAAI,KAAA,CAAM,CAAsB,mBAAA,EAAA,aAAa,CAAiB,eAAA,CAAA,CAAA,CAAA;AAAA,OACtE;AAEA,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAS,CAAA,aAAA,EAAe,MAAM,CAAA,CAAA;AAC7D,MAAO,OAAA;AAAA,QACL,IAAM,EAAA,aAAA;AAAA,QACN,OAAS,EAAA,mBAAA;AAAA,QACT,iBAAmB,EAAA,KAAA;AAAA,OACrB,CAAA;AAAA,KACF;AAEA,IAAgB,aAAA,GAAAD,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,aAAa,CAAA,CAAA;AACjD,IAAA,IAAI,MAAMC,mBAAA,CAAG,UAAW,CAAA,aAAa,CAAG,EAAA;AACtC,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAS,CAAA,aAAA,EAAe,MAAM,CAAA,CAAA;AAC7D,MAAO,OAAA;AAAA,QACL,IAAM,EAAA,aAAA;AAAA,QACN,OAAS,EAAA,mBAAA;AAAA,QACT,iBAAmB,EAAA,KAAA;AAAA,OACrB,CAAA;AAAA,KACF;AAEA,IAAgB,aAAA,GAAAD,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,YAAY,CAAA,CAAA;AAChD,IAAA,IAAI,MAAMC,mBAAA,CAAG,UAAW,CAAA,aAAa,CAAG,EAAA;AACtC,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAS,CAAA,aAAA,EAAe,MAAM,CAAA,CAAA;AAC7D,MAAO,OAAA;AAAA,QACL,IAAM,EAAA,aAAA;AAAA,QACN,OAAS,EAAA,mBAAA;AAAA,QACT,iBAAmB,EAAA,KAAA;AAAA,OACrB,CAAA;AAAA,KACF;AAGA,IAAM,MAAA,iBAAA,CAAkB,UAAU,OAAO,CAAA,CAAA;AACzC,IAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAS,CAAA,aAAA,EAAe,MAAM,CAAA,CAAA;AAAA,WACtD,KAAO,EAAA;AACd,IAAA,MAAM,IAAIE,qBAAA;AAAA,MACR,4FAAA;AAAA,MACA,KAAA;AAAA,KACF,CAAA;AAAA,GACF;AAEA,EAAO,OAAA;AAAA,IACL,IAAM,EAAA,aAAA;AAAA,IACN,OAAS,EAAA,mBAAA;AAAA,IACT,iBAAmB,EAAA,IAAA;AAAA,GACrB,CAAA;AACF,EAAA;AAWa,MAAA,kBAAA,GAAqB,OAChC,QAAA,EACA,mBACgC,KAAA;AAChC,EAAM,MAAA,SAAA,GAAYD,qBAAK,CAAA,IAAA,CAAK,mBAAqB,EAAA;AAAA,IAC/C,MAAQ,EAAA,aAAA;AAAA,GACT,CAAA,CAAA;AAED,EAAA,IAAI,SAAc,KAAA,IAAA,IAAQ,OAAO,SAAA,KAAc,QAAU,EAAA;AACvD,IAAO,OAAA,KAAA,CAAA,CAAA;AAAA,GACT;AAEA,EAAA,MAAM,eAAuC,GAAA,SAAA,CAAA;AAC7C,EACE,IAAA,eAAA,CAAgB,QAChB,IAAA,CAACE,4BAAY,CAAA,QAAA,EAAUC,aAAY,QAAU,EAAA,eAAA,CAAgB,QAAQ,CAAC,CACtE,EAAA;AACA,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA;AAAA,+FAAA,CAAA;AAAA,KAEF,CAAA;AAAA,GACF;AACA,EAAA,OAAO,eAAgB,CAAA,QAAA,CAAA;AACzB,EAAA;AAOO,MAAM,qBAAqB,OAAO;AAAA,EACvC,QAAA;AAAA,EACA,MAAA;AAAA,EACA,OAAU,GAAA,MAAA;AACZ,CAIM,KAAA;AACJ,EAAA,MAAM,QAAW,GAAAL,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,OAAO,CAAA,CAAA;AAC5C,EAAA,MAAM,WAAc,GAAAA,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,UAAU,CAAA,CAAA;AAElD,EAAA,IAAI,MAAMC,mBAAA,CAAG,UAAW,CAAA,WAAW,CAAG,EAAA;AACpC,IAAA,OAAA;AAAA,GACF;AACA,EAAA,MAAA,CAAO,KAAK,CAAG,EAAAD,qBAAA,CAAK,KAAK,OAAS,EAAA,UAAU,CAAC,CAAa,WAAA,CAAA,CAAA,CAAA;AAC1D,EAAA,MAAM,SAAY,GAAA;AAAA,IAChBA,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAK,CAAA,QAAA,EAAU,WAAW,CAAA;AAAA,GACjC,CAAA;AAEA,EAAM,MAAAC,mBAAA,CAAG,UAAU,QAAQ,CAAA,CAAA;AAC3B,EAAA,KAAA,MAAW,YAAY,SAAW,EAAA;AAChC,IAAI,IAAA;AACF,MAAM,MAAAA,mBAAA,CAAG,QAAS,CAAA,QAAA,EAAU,WAAW,CAAA,CAAA;AACvC,MAAA,OAAA;AAAA,aACO,KAAO,EAAA;AACd,MAAA,MAAA,CAAO,KAAK,CAAG,EAAAD,qBAAA,CAAK,SAAS,QAAU,EAAA,QAAQ,CAAC,CAAa,WAAA,CAAA,CAAA,CAAA;AAAA,KAC/D;AAAA,GACF;AAEA,EAAO,MAAA,CAAA,IAAA;AAAA,IACL,CAA6E,0EAAA,EAAA;AAAA,MAC3E,WAAA;AAAA,MACA,GAAG,SAAA;AAAA,KACL,CAAE,IAAK,CAAA,GAAG,CAAC,CAAA,QAAA,CAAA;AAAA,GACb,CAAA;AACF,EAAA;AASa,MAAA,sBAAA,GAAyB,OACpC,oBAAA,EACA,MACkB,KAAA;AAClB,EAAA,MAAM,mBAAsB,GAAA,oBAAA,CACzB,KAAM,CAAAA,qBAAA,CAAK,GAAG,CAAA,CACd,KAAM,CAAA,CAAA,EAAG,CAAE,CAAA,CAAA,CACX,IAAK,CAAAA,qBAAA,CAAK,GAAG,CAAA,CAAA;AAEhB,EAAI,IAAA;AACF,IAAA,MAAMC,mBAAG,CAAA,MAAA,CAAO,oBAAsB,EAAAA,mBAAA,CAAG,UAAU,IAAI,CAAA,CAAA;AAAA,WAChD,GAAK,EAAA;AAEZ,IAAA,MAAMA,oBAAG,SAAU,CAAA,oBAAA,EAAsB,IAAK,CAAA,KAAA,CAAM,IAAI,CAAC,CAAA,CAAA;AAAA,GAC3D;AAEA,EAAI,IAAA,IAAA,CAAA;AACJ,EAAI,IAAA;AACF,IAAO,IAAA,GAAA,MAAMA,mBAAG,CAAA,QAAA,CAAS,oBAAoB,CAAA,CAAA;AAAA,WACtC,GAAK,EAAA;AACZ,IAAAK,kBAAA,CAAY,GAAG,CAAA,CAAA;AACf,IAAA,MAAM,OAAU,GAAA,CAAA,gBAAA,EAAmB,oBAAoB,CAAA,YAAA,EAAe,IAAI,OAAO,CAAA,CAAA,CAAA;AACjF,IAAA,MAAA,CAAO,MAAM,OAAO,CAAA,CAAA;AACpB,IAAM,MAAA,IAAI,MAAM,OAAO,CAAA,CAAA;AAAA,GACzB;AAEA,EAAK,IAAA,CAAA,eAAA,GAAkB,KAAK,GAAI,EAAA,CAAA;AAIhC,EAAI,IAAA;AACF,IAAA,IAAA,CAAK,KAAS,GAAA,CAAA,MAAMC,8BAAuB,CAAA,mBAAmB,CAAG,EAAA,GAAA;AAAA,MAAI,CAAA,IAAA,KACnE,KAAK,OAAQ,CAAA,CAAA,EAAG,mBAAmB,CAAG,EAAAP,qBAAA,CAAK,GAAG,CAAA,CAAA,EAAI,EAAE,CAAA;AAAA,KACtD,CAAA;AAAA,WACO,GAAK,EAAA;AACZ,IAAAM,kBAAA,CAAY,GAAG,CAAA,CAAA;AACf,IAAA,IAAA,CAAK,QAAQ,EAAC,CAAA;AACd,IAAA,MAAA,CAAO,IAAK,CAAA,CAAA,sCAAA,EAAyC,GAAI,CAAA,OAAO,CAAE,CAAA,CAAA,CAAA;AAAA,GACpE;AAEA,EAAM,MAAAL,mBAAA,CAAG,SAAU,CAAA,oBAAA,EAAsB,IAAI,CAAA,CAAA;AAC7C,EAAA,OAAA;AACF,EAAA;AAUa,MAAA,iBAAA,GAAoB,OAC/B,oBAAA,EACA,IACkB,KAAA;AAClB,EAAA,MAAM,IAAO,GAAA,MAAMA,mBAAG,CAAA,QAAA,CAAS,oBAAoB,CAAA,CAAA;AACnD,EAAA,IAAA,CAAK,IAAO,GAAA,IAAA,CAAA;AACZ,EAAM,MAAAA,mBAAA,CAAG,SAAU,CAAA,oBAAA,EAAsB,IAAI,CAAA,CAAA;AAC/C;;;;;;;;;;;;;"}
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
require('path');
|
|
4
|
+
require('@backstage/integration');
|
|
5
|
+
var helpers = require('./helpers.cjs.js');
|
|
6
|
+
require('fs-extra');
|
|
7
|
+
require('js-yaml');
|
|
8
|
+
require('@backstage/errors');
|
|
9
|
+
require('./DockerContainerRunner.cjs.js');
|
|
10
|
+
|
|
11
|
+
const getMkDocsYml = helpers.getMkdocsYml;
|
|
12
|
+
|
|
13
|
+
exports.getMkdocsYml = helpers.getMkdocsYml;
|
|
14
|
+
exports.getMkDocsYml = getMkDocsYml;
|
|
15
|
+
//# sourceMappingURL=index.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"index.cjs.js","sources":["../../../src/stages/generate/index.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nexport { TechdocsGenerator } from './techdocs';\nexport { Generators } from './generators';\nexport { getMkdocsYml } from './helpers';\nexport type {\n GeneratorBase,\n GeneratorOptions,\n GeneratorBuilder,\n GeneratorRunOptions,\n SupportedGeneratorKey,\n TechDocsContainerRunner,\n} from './types';\nimport { getMkdocsYml } from './helpers';\n/**\n * @public\n * @deprecated\n * Deprecated in favor of getMkdocsYml (lowercase 'd')\n */\nexport const getMkDocsYml = getMkdocsYml;\n"],"names":["getMkdocsYml"],"mappings":";;;;;;;;;;AAgCO,MAAM,YAAeA,GAAAA;;;;;"}
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var fs = require('fs-extra');
|
|
4
|
+
var yaml = require('js-yaml');
|
|
5
|
+
var helpers = require('./helpers.cjs.js');
|
|
6
|
+
var errors = require('@backstage/errors');
|
|
7
|
+
|
|
8
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
9
|
+
|
|
10
|
+
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
11
|
+
var yaml__default = /*#__PURE__*/_interopDefaultCompat(yaml);
|
|
12
|
+
|
|
13
|
+
const patchMkdocsFile = async (mkdocsYmlPath, logger, updateAction) => {
|
|
14
|
+
let didEdit = false;
|
|
15
|
+
let mkdocsYmlFileString;
|
|
16
|
+
try {
|
|
17
|
+
mkdocsYmlFileString = await fs__default.default.readFile(mkdocsYmlPath, "utf8");
|
|
18
|
+
} catch (error) {
|
|
19
|
+
errors.assertError(error);
|
|
20
|
+
logger.warn(
|
|
21
|
+
`Could not read MkDocs YAML config file ${mkdocsYmlPath} before running the generator: ${error.message}`
|
|
22
|
+
);
|
|
23
|
+
return;
|
|
24
|
+
}
|
|
25
|
+
let mkdocsYml;
|
|
26
|
+
try {
|
|
27
|
+
mkdocsYml = yaml__default.default.load(mkdocsYmlFileString, { schema: helpers.MKDOCS_SCHEMA });
|
|
28
|
+
if (typeof mkdocsYml === "string" || typeof mkdocsYml === "undefined") {
|
|
29
|
+
throw new Error("Bad YAML format.");
|
|
30
|
+
}
|
|
31
|
+
} catch (error) {
|
|
32
|
+
errors.assertError(error);
|
|
33
|
+
logger.warn(
|
|
34
|
+
`Error in parsing YAML at ${mkdocsYmlPath} before running the generator. ${error.message}`
|
|
35
|
+
);
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
didEdit = updateAction(mkdocsYml);
|
|
39
|
+
try {
|
|
40
|
+
if (didEdit) {
|
|
41
|
+
await fs__default.default.writeFile(
|
|
42
|
+
mkdocsYmlPath,
|
|
43
|
+
yaml__default.default.dump(mkdocsYml, { schema: helpers.MKDOCS_SCHEMA }),
|
|
44
|
+
"utf8"
|
|
45
|
+
);
|
|
46
|
+
}
|
|
47
|
+
} catch (error) {
|
|
48
|
+
errors.assertError(error);
|
|
49
|
+
logger.warn(
|
|
50
|
+
`Could not write to ${mkdocsYmlPath} after updating it before running the generator. ${error.message}`
|
|
51
|
+
);
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
54
|
+
};
|
|
55
|
+
const patchMkdocsYmlPreBuild = async (mkdocsYmlPath, logger, parsedLocationAnnotation, scmIntegrations) => {
|
|
56
|
+
await patchMkdocsFile(mkdocsYmlPath, logger, (mkdocsYml) => {
|
|
57
|
+
if (!("repo_url" in mkdocsYml) || !("edit_uri" in mkdocsYml)) {
|
|
58
|
+
const result = helpers.getRepoUrlFromLocationAnnotation(
|
|
59
|
+
parsedLocationAnnotation,
|
|
60
|
+
scmIntegrations,
|
|
61
|
+
mkdocsYml.docs_dir
|
|
62
|
+
);
|
|
63
|
+
if (result.repo_url || result.edit_uri) {
|
|
64
|
+
mkdocsYml.repo_url = mkdocsYml.repo_url || result.repo_url;
|
|
65
|
+
mkdocsYml.edit_uri = mkdocsYml.edit_uri || result.edit_uri;
|
|
66
|
+
logger.info(
|
|
67
|
+
`Set ${JSON.stringify(
|
|
68
|
+
result
|
|
69
|
+
)}. You can disable this feature by manually setting 'repo_url' or 'edit_uri' according to the MkDocs documentation at https://www.mkdocs.org/user-guide/configuration/#repo_url`
|
|
70
|
+
);
|
|
71
|
+
return true;
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
return false;
|
|
75
|
+
});
|
|
76
|
+
};
|
|
77
|
+
const patchMkdocsYmlWithPlugins = async (mkdocsYmlPath, logger, defaultPlugins = ["techdocs-core"]) => {
|
|
78
|
+
await patchMkdocsFile(mkdocsYmlPath, logger, (mkdocsYml) => {
|
|
79
|
+
if (!("plugins" in mkdocsYml)) {
|
|
80
|
+
mkdocsYml.plugins = defaultPlugins;
|
|
81
|
+
return true;
|
|
82
|
+
}
|
|
83
|
+
let changesMade = false;
|
|
84
|
+
defaultPlugins.forEach((dp) => {
|
|
85
|
+
if (!(mkdocsYml.plugins.includes(dp) || mkdocsYml.plugins.some((p) => p.hasOwnProperty(dp)))) {
|
|
86
|
+
mkdocsYml.plugins = [.../* @__PURE__ */ new Set([...mkdocsYml.plugins, dp])];
|
|
87
|
+
changesMade = true;
|
|
88
|
+
}
|
|
89
|
+
});
|
|
90
|
+
return changesMade;
|
|
91
|
+
});
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
exports.patchMkdocsYmlPreBuild = patchMkdocsYmlPreBuild;
|
|
95
|
+
exports.patchMkdocsYmlWithPlugins = patchMkdocsYmlWithPlugins;
|
|
96
|
+
//# sourceMappingURL=mkdocsPatchers.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"mkdocsPatchers.cjs.js","sources":["../../../src/stages/generate/mkdocsPatchers.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport fs from 'fs-extra';\nimport yaml from 'js-yaml';\nimport { ParsedLocationAnnotation } from '../../helpers';\nimport { getRepoUrlFromLocationAnnotation, MKDOCS_SCHEMA } from './helpers';\nimport { assertError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\ntype MkDocsObject = {\n plugins?: string[];\n docs_dir: string;\n repo_url?: string;\n edit_uri?: string;\n};\n\nconst patchMkdocsFile = async (\n mkdocsYmlPath: string,\n logger: LoggerService,\n updateAction: (mkdocsYml: MkDocsObject) => boolean,\n) => {\n // We only want to override the mkdocs.yml if it has actually changed. This is relevant if\n // used with a 'dir' location on the file system as this would permanently update the file.\n let didEdit = false;\n\n let mkdocsYmlFileString;\n try {\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n } catch (error) {\n assertError(error);\n logger.warn(\n `Could not read MkDocs YAML config file ${mkdocsYmlPath} before running the generator: ${error.message}`,\n );\n return;\n }\n\n let mkdocsYml: any;\n try {\n mkdocsYml = yaml.load(mkdocsYmlFileString, { schema: MKDOCS_SCHEMA });\n\n // mkdocsYml should be an object type after successful parsing.\n // But based on its type definition, it can also be a string or undefined, which we don't want.\n if (typeof mkdocsYml === 'string' || typeof mkdocsYml === 'undefined') {\n throw new Error('Bad YAML format.');\n }\n } catch (error) {\n assertError(error);\n logger.warn(\n `Error in parsing YAML at ${mkdocsYmlPath} before running the generator. ${error.message}`,\n );\n return;\n }\n\n didEdit = updateAction(mkdocsYml);\n\n try {\n if (didEdit) {\n await fs.writeFile(\n mkdocsYmlPath,\n yaml.dump(mkdocsYml, { schema: MKDOCS_SCHEMA }),\n 'utf8',\n );\n }\n } catch (error) {\n assertError(error);\n logger.warn(\n `Could not write to ${mkdocsYmlPath} after updating it before running the generator. ${error.message}`,\n );\n return;\n }\n};\n\n/**\n * Update the mkdocs.yml file before TechDocs generator uses it to generate docs site.\n *\n * List of tasks:\n * - Add repo_url or edit_uri if it does not exists\n * If mkdocs.yml has a repo_url, the generated docs site gets an Edit button on the pages by default.\n * If repo_url is missing in mkdocs.yml, we will use techdocs annotation of the entity to possibly get\n * the repository URL.\n *\n * This function will not throw an error since this is not critical to the whole TechDocs pipeline.\n * Instead it will log warnings if there are any errors in reading, parsing or writing YAML.\n *\n * @param mkdocsYmlPath - Absolute path to mkdocs.yml or equivalent of a docs site\n * @param logger - A logger instance\n * @param parsedLocationAnnotation - Object with location url and type\n * @param scmIntegrations - the scmIntegration to do url transformations\n */\nexport const patchMkdocsYmlPreBuild = async (\n mkdocsYmlPath: string,\n logger: LoggerService,\n parsedLocationAnnotation: ParsedLocationAnnotation,\n scmIntegrations: ScmIntegrationRegistry,\n) => {\n await patchMkdocsFile(mkdocsYmlPath, logger, mkdocsYml => {\n if (!('repo_url' in mkdocsYml) || !('edit_uri' in mkdocsYml)) {\n // Add edit_uri and/or repo_url to mkdocs.yml if it is missing.\n // This will enable the Page edit button generated by MkDocs.\n // If the either has been set, keep the original value\n const result = getRepoUrlFromLocationAnnotation(\n parsedLocationAnnotation,\n scmIntegrations,\n mkdocsYml.docs_dir,\n );\n\n if (result.repo_url || result.edit_uri) {\n mkdocsYml.repo_url = mkdocsYml.repo_url || result.repo_url;\n mkdocsYml.edit_uri = mkdocsYml.edit_uri || result.edit_uri;\n\n logger.info(\n `Set ${JSON.stringify(\n result,\n )}. You can disable this feature by manually setting 'repo_url' or 'edit_uri' according to the MkDocs documentation at https://www.mkdocs.org/user-guide/configuration/#repo_url`,\n );\n return true;\n }\n }\n return false;\n });\n};\n\n/**\n * Update the mkdocs.yml file before TechDocs generator uses it to generate docs site.\n *\n * List of tasks:\n * - Add all provided default plugins\n *\n * This function will not throw an error since this is not critical to the whole TechDocs pipeline.\n * Instead it will log warnings if there are any errors in reading, parsing or writing YAML.\n *\n * @param mkdocsYmlPath - Absolute path to mkdocs.yml or equivalent of a docs site\n * @param logger - A logger instance\n * @param defaultPlugins - List of default mkdocs plugins\n */\nexport const patchMkdocsYmlWithPlugins = async (\n mkdocsYmlPath: string,\n logger: LoggerService,\n defaultPlugins: string[] = ['techdocs-core'],\n) => {\n await patchMkdocsFile(mkdocsYmlPath, logger, mkdocsYml => {\n // Modify mkdocs.yaml to contain the required default plugins.\n // If no plugins are defined we can just return the defaults.\n if (!('plugins' in mkdocsYml)) {\n mkdocsYml.plugins = defaultPlugins;\n return true;\n }\n\n // Otherwise, check each default plugin and include it if necessary.\n let changesMade = false;\n\n defaultPlugins.forEach(dp => {\n // if the plugin isn't there as a string, and isn't there as an object (which may itself contain extra config)\n // then we need to add it\n if (\n !(\n mkdocsYml.plugins!.includes(dp) ||\n mkdocsYml.plugins!.some(p => p.hasOwnProperty(dp))\n )\n ) {\n mkdocsYml.plugins = [...new Set([...mkdocsYml.plugins!, dp])];\n changesMade = true;\n }\n });\n\n return changesMade;\n });\n};\n"],"names":["fs","assertError","yaml","MKDOCS_SCHEMA","getRepoUrlFromLocationAnnotation"],"mappings":";;;;;;;;;;;;AA8BA,MAAM,eAAkB,GAAA,OACtB,aACA,EAAA,MAAA,EACA,YACG,KAAA;AAGH,EAAA,IAAI,OAAU,GAAA,KAAA,CAAA;AAEd,EAAI,IAAA,mBAAA,CAAA;AACJ,EAAI,IAAA;AACF,IAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAS,CAAA,aAAA,EAAe,MAAM,CAAA,CAAA;AAAA,WACtD,KAAO,EAAA;AACd,IAAAC,kBAAA,CAAY,KAAK,CAAA,CAAA;AACjB,IAAO,MAAA,CAAA,IAAA;AAAA,MACL,CAA0C,uCAAA,EAAA,aAAa,CAAkC,+BAAA,EAAA,KAAA,CAAM,OAAO,CAAA,CAAA;AAAA,KACxG,CAAA;AACA,IAAA,OAAA;AAAA,GACF;AAEA,EAAI,IAAA,SAAA,CAAA;AACJ,EAAI,IAAA;AACF,IAAA,SAAA,GAAYC,sBAAK,IAAK,CAAA,mBAAA,EAAqB,EAAE,MAAA,EAAQC,uBAAe,CAAA,CAAA;AAIpE,IAAA,IAAI,OAAO,SAAA,KAAc,QAAY,IAAA,OAAO,cAAc,WAAa,EAAA;AACrE,MAAM,MAAA,IAAI,MAAM,kBAAkB,CAAA,CAAA;AAAA,KACpC;AAAA,WACO,KAAO,EAAA;AACd,IAAAF,kBAAA,CAAY,KAAK,CAAA,CAAA;AACjB,IAAO,MAAA,CAAA,IAAA;AAAA,MACL,CAA4B,yBAAA,EAAA,aAAa,CAAkC,+BAAA,EAAA,KAAA,CAAM,OAAO,CAAA,CAAA;AAAA,KAC1F,CAAA;AACA,IAAA,OAAA;AAAA,GACF;AAEA,EAAA,OAAA,GAAU,aAAa,SAAS,CAAA,CAAA;AAEhC,EAAI,IAAA;AACF,IAAA,IAAI,OAAS,EAAA;AACX,MAAA,MAAMD,mBAAG,CAAA,SAAA;AAAA,QACP,aAAA;AAAA,QACAE,sBAAK,IAAK,CAAA,SAAA,EAAW,EAAE,MAAA,EAAQC,uBAAe,CAAA;AAAA,QAC9C,MAAA;AAAA,OACF,CAAA;AAAA,KACF;AAAA,WACO,KAAO,EAAA;AACd,IAAAF,kBAAA,CAAY,KAAK,CAAA,CAAA;AACjB,IAAO,MAAA,CAAA,IAAA;AAAA,MACL,CAAsB,mBAAA,EAAA,aAAa,CAAoD,iDAAA,EAAA,KAAA,CAAM,OAAO,CAAA,CAAA;AAAA,KACtG,CAAA;AACA,IAAA,OAAA;AAAA,GACF;AACF,CAAA,CAAA;AAmBO,MAAM,sBAAyB,GAAA,OACpC,aACA,EAAA,MAAA,EACA,0BACA,eACG,KAAA;AACH,EAAM,MAAA,eAAA,CAAgB,aAAe,EAAA,MAAA,EAAQ,CAAa,SAAA,KAAA;AACxD,IAAA,IAAI,EAAE,UAAA,IAAc,SAAc,CAAA,IAAA,EAAE,cAAc,SAAY,CAAA,EAAA;AAI5D,MAAA,MAAM,MAAS,GAAAG,wCAAA;AAAA,QACb,wBAAA;AAAA,QACA,eAAA;AAAA,QACA,SAAU,CAAA,QAAA;AAAA,OACZ,CAAA;AAEA,MAAI,IAAA,MAAA,CAAO,QAAY,IAAA,MAAA,CAAO,QAAU,EAAA;AACtC,QAAU,SAAA,CAAA,QAAA,GAAW,SAAU,CAAA,QAAA,IAAY,MAAO,CAAA,QAAA,CAAA;AAClD,QAAU,SAAA,CAAA,QAAA,GAAW,SAAU,CAAA,QAAA,IAAY,MAAO,CAAA,QAAA,CAAA;AAElD,QAAO,MAAA,CAAA,IAAA;AAAA,UACL,OAAO,IAAK,CAAA,SAAA;AAAA,YACV,MAAA;AAAA,WACD,CAAA,8KAAA,CAAA;AAAA,SACH,CAAA;AACA,QAAO,OAAA,IAAA,CAAA;AAAA,OACT;AAAA,KACF;AACA,IAAO,OAAA,KAAA,CAAA;AAAA,GACR,CAAA,CAAA;AACH,EAAA;AAeO,MAAM,4BAA4B,OACvC,aAAA,EACA,QACA,cAA2B,GAAA,CAAC,eAAe,CACxC,KAAA;AACH,EAAM,MAAA,eAAA,CAAgB,aAAe,EAAA,MAAA,EAAQ,CAAa,SAAA,KAAA;AAGxD,IAAI,IAAA,EAAE,aAAa,SAAY,CAAA,EAAA;AAC7B,MAAA,SAAA,CAAU,OAAU,GAAA,cAAA,CAAA;AACpB,MAAO,OAAA,IAAA,CAAA;AAAA,KACT;AAGA,IAAA,IAAI,WAAc,GAAA,KAAA,CAAA;AAElB,IAAA,cAAA,CAAe,QAAQ,CAAM,EAAA,KAAA;AAG3B,MAAA,IACE,EACE,SAAA,CAAU,OAAS,CAAA,QAAA,CAAS,EAAE,CAC9B,IAAA,SAAA,CAAU,OAAS,CAAA,IAAA,CAAK,CAAK,CAAA,KAAA,CAAA,CAAE,cAAe,CAAA,EAAE,CAAC,CAEnD,CAAA,EAAA;AACA,QAAU,SAAA,CAAA,OAAA,GAAU,CAAC,mBAAO,IAAA,GAAA,CAAI,CAAC,GAAG,SAAU,CAAA,OAAA,EAAU,EAAE,CAAC,CAAC,CAAA,CAAA;AAC5D,QAAc,WAAA,GAAA,IAAA,CAAA;AAAA,OAChB;AAAA,KACD,CAAA,CAAA;AAED,IAAO,OAAA,WAAA,CAAA;AAAA,GACR,CAAA,CAAA;AACH;;;;;"}
|