@backstage/plugin-techdocs-node 1.14.0 → 1.14.1-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +15 -0
- package/dist/helpers.cjs.js +1 -1
- package/dist/helpers.cjs.js.map +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/stages/generate/DockerContainerRunner.cjs.js +4 -4
- package/dist/stages/generate/DockerContainerRunner.cjs.js.map +1 -1
- package/dist/stages/generate/helpers.cjs.js +5 -5
- package/dist/stages/generate/helpers.cjs.js.map +1 -1
- package/dist/stages/generate/index.cjs.js +1 -1
- package/dist/stages/generate/techdocs.cjs.js +1 -1
- package/dist/stages/generate/techdocs.cjs.js.map +1 -1
- package/dist/stages/publish/awsS3.cjs.js +1 -1
- package/dist/stages/publish/awsS3.cjs.js.map +1 -1
- package/dist/stages/publish/azureBlobStorage.cjs.js +1 -1
- package/dist/stages/publish/azureBlobStorage.cjs.js.map +1 -1
- package/dist/stages/publish/googleStorage.cjs.js +1 -1
- package/dist/stages/publish/googleStorage.cjs.js.map +1 -1
- package/dist/stages/publish/helpers.cjs.js +1 -1
- package/dist/stages/publish/helpers.cjs.js.map +1 -1
- package/dist/stages/publish/local.cjs.js +2 -2
- package/dist/stages/publish/local.cjs.js.map +1 -1
- package/dist/stages/publish/migrations/GoogleMigration.cjs.js +2 -2
- package/dist/stages/publish/migrations/GoogleMigration.cjs.js.map +1 -1
- package/dist/stages/publish/openStackSwift.cjs.js +6 -6
- package/dist/stages/publish/openStackSwift.cjs.js.map +1 -1
- package/package.json +11 -11
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,20 @@
|
|
|
1
1
|
# @backstage/plugin-techdocs-node
|
|
2
2
|
|
|
3
|
+
## 1.14.1-next.0
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 7455dae: Use node prefix on native imports
|
|
8
|
+
- Updated dependencies
|
|
9
|
+
- @backstage/integration-aws-node@0.1.20-next.0
|
|
10
|
+
- @backstage/backend-plugin-api@1.7.0-next.0
|
|
11
|
+
- @backstage/plugin-search-common@1.2.22-next.0
|
|
12
|
+
- @backstage/integration@1.19.3-next.0
|
|
13
|
+
- @backstage/catalog-model@1.7.6
|
|
14
|
+
- @backstage/config@1.3.6
|
|
15
|
+
- @backstage/errors@1.2.7
|
|
16
|
+
- @backstage/plugin-techdocs-common@0.1.1
|
|
17
|
+
|
|
3
18
|
## 1.14.0
|
|
4
19
|
|
|
5
20
|
### Minor Changes
|
package/dist/helpers.cjs.js
CHANGED
|
@@ -4,7 +4,7 @@ var backendPluginApi = require('@backstage/backend-plugin-api');
|
|
|
4
4
|
var catalogModel = require('@backstage/catalog-model');
|
|
5
5
|
var errors = require('@backstage/errors');
|
|
6
6
|
var pluginTechdocsCommon = require('@backstage/plugin-techdocs-common');
|
|
7
|
-
var path = require('path');
|
|
7
|
+
var path = require('node:path');
|
|
8
8
|
|
|
9
9
|
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
10
10
|
|
package/dist/helpers.cjs.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"helpers.cjs.js","sources":["../src/helpers.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n UrlReaderService,\n resolveSafeChildPath,\n} from '@backstage/backend-plugin-api';\nimport {\n Entity,\n getEntitySourceLocation,\n parseLocationRef,\n} from '@backstage/catalog-model';\nimport { InputError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport { TECHDOCS_ANNOTATION } from '@backstage/plugin-techdocs-common';\nimport path from 'path';\nimport { PreparerResponse, RemoteProtocol } from './stages/prepare/types';\n\n/**\n * Parsed location annotation\n * @public\n */\nexport type ParsedLocationAnnotation = {\n type: RemoteProtocol;\n target: string;\n};\n\n/**\n * Returns a parsed locations annotation\n * @public\n * @param annotationName - The name of the annotation in the entity metadata\n * @param entity - A TechDocs entity instance\n */\nexport const parseReferenceAnnotation = (\n annotationName: string,\n entity: Entity,\n): ParsedLocationAnnotation => {\n const annotation = entity.metadata.annotations?.[annotationName];\n if (!annotation) {\n throw new InputError(\n `No ${annotationName} annotation provided in entity: ${entity.metadata.name}`,\n );\n }\n\n const { type, target } = parseLocationRef(annotation);\n return {\n type: type as RemoteProtocol,\n target,\n };\n};\n\n/**\n * TechDocs references of type `dir` are relative the source location of the entity.\n * This function transforms relative references to absolute ones, based on the\n * location the entity was ingested from. If the entity was registered by a `url`\n * location, it returns a `url` location with a resolved target that points to the\n * targeted subfolder. If the entity was registered by a `file` location, it returns\n * an absolute `dir` location.\n * @public\n * @param entity - the entity with annotations\n * @param dirAnnotation - the parsed techdocs-ref annotation of type 'dir'\n * @param scmIntegrations - access to the scmIntegration to do url transformations\n * @throws if the entity doesn't specify a `dir` location or is ingested from an unsupported location.\n * @returns the transformed location with an absolute target.\n */\nexport const transformDirLocation = (\n entity: Entity,\n dirAnnotation: ParsedLocationAnnotation,\n scmIntegrations: ScmIntegrationRegistry,\n): { type: 'dir' | 'url'; target: string } => {\n const location = getEntitySourceLocation(entity);\n\n switch (location.type) {\n case 'url': {\n const target = scmIntegrations.resolveUrl({\n url: dirAnnotation.target,\n base: location.target,\n });\n\n return {\n type: 'url',\n target,\n };\n }\n\n case 'file': {\n // only permit targets in the same folder as the target of the `file` location!\n const target = resolveSafeChildPath(\n path.dirname(location.target),\n dirAnnotation.target,\n );\n\n return {\n type: 'dir',\n target,\n };\n }\n\n default:\n throw new InputError(`Unable to resolve location type ${location.type}`);\n }\n};\n\n/**\n * Returns an entity reference based on the TechDocs annotation type\n * @public\n * @param entity - A TechDocs instance\n * @param scmIntegration - An implementation for SCM integration API\n */\nexport const getLocationForEntity = (\n entity: Entity,\n scmIntegration: ScmIntegrationRegistry,\n): ParsedLocationAnnotation => {\n const annotation = parseReferenceAnnotation(TECHDOCS_ANNOTATION, entity);\n\n switch (annotation.type) {\n case 'url':\n return annotation;\n case 'dir':\n return transformDirLocation(entity, annotation, scmIntegration);\n default:\n throw new Error(`Invalid reference annotation ${annotation.type}`);\n }\n};\n\n/**\n * Returns a preparer response {@link PreparerResponse}\n * @public\n * @param reader - Read a tree of files from a repository\n * @param entity - A TechDocs entity instance\n * @param opts - Options for configuring the reader, e.g. logger, etag, etc.\n */\nexport const getDocFilesFromRepository = async (\n reader: UrlReaderService,\n entity: Entity,\n opts?: { etag?: string; logger?: LoggerService },\n): Promise<PreparerResponse> => {\n const { target } = parseReferenceAnnotation(TECHDOCS_ANNOTATION, entity);\n\n opts?.logger?.debug(`Reading files from ${target}`);\n // readTree will throw NotModifiedError if etag has not changed.\n const readTreeResponse = await reader.readTree(target, { etag: opts?.etag });\n const preparedDir = await readTreeResponse.dir();\n\n opts?.logger?.debug(`Tree downloaded and stored at ${preparedDir}`);\n\n return {\n preparedDir,\n etag: readTreeResponse.etag,\n };\n};\n"],"names":["InputError","parseLocationRef","getEntitySourceLocation","resolveSafeChildPath","path","TECHDOCS_ANNOTATION"],"mappings":";;;;;;;;;;;;AA+CO,MAAM,wBAAA,GAA2B,CACtC,cAAA,EACA,MAAA,KAC6B;AAC7B,EAAA,MAAM,UAAA,GAAa,MAAA,CAAO,QAAA,CAAS,WAAA,GAAc,cAAc,CAAA;AAC/D,EAAA,IAAI,CAAC,UAAA,EAAY;AACf,IAAA,MAAM,IAAIA,iBAAA;AAAA,MACR,CAAA,GAAA,EAAM,cAAc,CAAA,gCAAA,EAAmC,MAAA,CAAO,SAAS,IAAI,CAAA;AAAA,KAC7E;AAAA,EACF;AAEA,EAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAO,GAAIC,8BAAiB,UAAU,CAAA;AACpD,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA;AAAA,GACF;AACF;AAgBO,MAAM,oBAAA,GAAuB,CAClC,MAAA,EACA,aAAA,EACA,eAAA,KAC4C;AAC5C,EAAA,MAAM,QAAA,GAAWC,qCAAwB,MAAM,CAAA;AAE/C,EAAA,QAAQ,SAAS,IAAA;AAAM,IACrB,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,MAAA,GAAS,gBAAgB,UAAA,CAAW;AAAA,QACxC,KAAK,aAAA,CAAc,MAAA;AAAA,QACnB,MAAM,QAAA,CAAS;AAAA,OAChB,CAAA;AAED,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,KAAA;AAAA,QACN;AAAA,OACF;AAAA,IACF;AAAA,IAEA,KAAK,MAAA,EAAQ;AAEX,MAAA,MAAM,MAAA,GAASC,qCAAA;AAAA,QACbC,qBAAA,CAAK,OAAA,CAAQ,QAAA,CAAS,MAAM,CAAA;AAAA,QAC5B,aAAA,CAAc;AAAA,OAChB;AAEA,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,KAAA;AAAA,QACN;AAAA,OACF;AAAA,IACF;AAAA,IAEA;AACE,MAAA,MAAM,IAAIJ,iBAAA,CAAW,CAAA,gCAAA,EAAmC,QAAA,CAAS,IAAI,CAAA,CAAE,CAAA;AAAA;AAE7E;AAQO,MAAM,oBAAA,GAAuB,CAClC,MAAA,EACA,cAAA,KAC6B;AAC7B,EAAA,MAAM,UAAA,GAAa,wBAAA,CAAyBK,wCAAA,EAAqB,MAAM,CAAA;AAEvE,EAAA,QAAQ,WAAW,IAAA;AAAM,IACvB,KAAK,KAAA;AACH,MAAA,OAAO,UAAA;AAAA,IACT,KAAK,KAAA;AACH,MAAA,OAAO,oBAAA,CAAqB,MAAA,EAAQ,UAAA,EAAY,cAAc,CAAA;AAAA,IAChE;AACE,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,6BAAA,EAAgC,UAAA,CAAW,IAAI,CAAA,CAAE,CAAA;AAAA;AAEvE;AASO,MAAM,yBAAA,GAA4B,OACvC,MAAA,EACA,MAAA,EACA,IAAA,KAC8B;AAC9B,EAAA,MAAM,EAAE,MAAA,EAAO,GAAI,wBAAA,CAAyBA,0CAAqB,MAAM,CAAA;AAEvE,EAAA,IAAA,EAAM,MAAA,EAAQ,KAAA,CAAM,CAAA,mBAAA,EAAsB,MAAM,CAAA,CAAE,CAAA;AAElD,EAAA,MAAM,gBAAA,GAAmB,MAAM,MAAA,CAAO,QAAA,CAAS,QAAQ,EAAE,IAAA,EAAM,IAAA,EAAM,IAAA,EAAM,CAAA;AAC3E,EAAA,MAAM,WAAA,GAAc,MAAM,gBAAA,CAAiB,GAAA,EAAI;AAE/C,EAAA,IAAA,EAAM,MAAA,EAAQ,KAAA,CAAM,CAAA,8BAAA,EAAiC,WAAW,CAAA,CAAE,CAAA;AAElE,EAAA,OAAO;AAAA,IACL,WAAA;AAAA,IACA,MAAM,gBAAA,CAAiB;AAAA,GACzB;AACF;;;;;;;"}
|
|
1
|
+
{"version":3,"file":"helpers.cjs.js","sources":["../src/helpers.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n UrlReaderService,\n resolveSafeChildPath,\n} from '@backstage/backend-plugin-api';\nimport {\n Entity,\n getEntitySourceLocation,\n parseLocationRef,\n} from '@backstage/catalog-model';\nimport { InputError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport { TECHDOCS_ANNOTATION } from '@backstage/plugin-techdocs-common';\nimport path from 'node:path';\nimport { PreparerResponse, RemoteProtocol } from './stages/prepare/types';\n\n/**\n * Parsed location annotation\n * @public\n */\nexport type ParsedLocationAnnotation = {\n type: RemoteProtocol;\n target: string;\n};\n\n/**\n * Returns a parsed locations annotation\n * @public\n * @param annotationName - The name of the annotation in the entity metadata\n * @param entity - A TechDocs entity instance\n */\nexport const parseReferenceAnnotation = (\n annotationName: string,\n entity: Entity,\n): ParsedLocationAnnotation => {\n const annotation = entity.metadata.annotations?.[annotationName];\n if (!annotation) {\n throw new InputError(\n `No ${annotationName} annotation provided in entity: ${entity.metadata.name}`,\n );\n }\n\n const { type, target } = parseLocationRef(annotation);\n return {\n type: type as RemoteProtocol,\n target,\n };\n};\n\n/**\n * TechDocs references of type `dir` are relative the source location of the entity.\n * This function transforms relative references to absolute ones, based on the\n * location the entity was ingested from. If the entity was registered by a `url`\n * location, it returns a `url` location with a resolved target that points to the\n * targeted subfolder. If the entity was registered by a `file` location, it returns\n * an absolute `dir` location.\n * @public\n * @param entity - the entity with annotations\n * @param dirAnnotation - the parsed techdocs-ref annotation of type 'dir'\n * @param scmIntegrations - access to the scmIntegration to do url transformations\n * @throws if the entity doesn't specify a `dir` location or is ingested from an unsupported location.\n * @returns the transformed location with an absolute target.\n */\nexport const transformDirLocation = (\n entity: Entity,\n dirAnnotation: ParsedLocationAnnotation,\n scmIntegrations: ScmIntegrationRegistry,\n): { type: 'dir' | 'url'; target: string } => {\n const location = getEntitySourceLocation(entity);\n\n switch (location.type) {\n case 'url': {\n const target = scmIntegrations.resolveUrl({\n url: dirAnnotation.target,\n base: location.target,\n });\n\n return {\n type: 'url',\n target,\n };\n }\n\n case 'file': {\n // only permit targets in the same folder as the target of the `file` location!\n const target = resolveSafeChildPath(\n path.dirname(location.target),\n dirAnnotation.target,\n );\n\n return {\n type: 'dir',\n target,\n };\n }\n\n default:\n throw new InputError(`Unable to resolve location type ${location.type}`);\n }\n};\n\n/**\n * Returns an entity reference based on the TechDocs annotation type\n * @public\n * @param entity - A TechDocs instance\n * @param scmIntegration - An implementation for SCM integration API\n */\nexport const getLocationForEntity = (\n entity: Entity,\n scmIntegration: ScmIntegrationRegistry,\n): ParsedLocationAnnotation => {\n const annotation = parseReferenceAnnotation(TECHDOCS_ANNOTATION, entity);\n\n switch (annotation.type) {\n case 'url':\n return annotation;\n case 'dir':\n return transformDirLocation(entity, annotation, scmIntegration);\n default:\n throw new Error(`Invalid reference annotation ${annotation.type}`);\n }\n};\n\n/**\n * Returns a preparer response {@link PreparerResponse}\n * @public\n * @param reader - Read a tree of files from a repository\n * @param entity - A TechDocs entity instance\n * @param opts - Options for configuring the reader, e.g. logger, etag, etc.\n */\nexport const getDocFilesFromRepository = async (\n reader: UrlReaderService,\n entity: Entity,\n opts?: { etag?: string; logger?: LoggerService },\n): Promise<PreparerResponse> => {\n const { target } = parseReferenceAnnotation(TECHDOCS_ANNOTATION, entity);\n\n opts?.logger?.debug(`Reading files from ${target}`);\n // readTree will throw NotModifiedError if etag has not changed.\n const readTreeResponse = await reader.readTree(target, { etag: opts?.etag });\n const preparedDir = await readTreeResponse.dir();\n\n opts?.logger?.debug(`Tree downloaded and stored at ${preparedDir}`);\n\n return {\n preparedDir,\n etag: readTreeResponse.etag,\n };\n};\n"],"names":["InputError","parseLocationRef","getEntitySourceLocation","resolveSafeChildPath","path","TECHDOCS_ANNOTATION"],"mappings":";;;;;;;;;;;;AA+CO,MAAM,wBAAA,GAA2B,CACtC,cAAA,EACA,MAAA,KAC6B;AAC7B,EAAA,MAAM,UAAA,GAAa,MAAA,CAAO,QAAA,CAAS,WAAA,GAAc,cAAc,CAAA;AAC/D,EAAA,IAAI,CAAC,UAAA,EAAY;AACf,IAAA,MAAM,IAAIA,iBAAA;AAAA,MACR,CAAA,GAAA,EAAM,cAAc,CAAA,gCAAA,EAAmC,MAAA,CAAO,SAAS,IAAI,CAAA;AAAA,KAC7E;AAAA,EACF;AAEA,EAAA,MAAM,EAAE,IAAA,EAAM,MAAA,EAAO,GAAIC,8BAAiB,UAAU,CAAA;AACpD,EAAA,OAAO;AAAA,IACL,IAAA;AAAA,IACA;AAAA,GACF;AACF;AAgBO,MAAM,oBAAA,GAAuB,CAClC,MAAA,EACA,aAAA,EACA,eAAA,KAC4C;AAC5C,EAAA,MAAM,QAAA,GAAWC,qCAAwB,MAAM,CAAA;AAE/C,EAAA,QAAQ,SAAS,IAAA;AAAM,IACrB,KAAK,KAAA,EAAO;AACV,MAAA,MAAM,MAAA,GAAS,gBAAgB,UAAA,CAAW;AAAA,QACxC,KAAK,aAAA,CAAc,MAAA;AAAA,QACnB,MAAM,QAAA,CAAS;AAAA,OAChB,CAAA;AAED,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,KAAA;AAAA,QACN;AAAA,OACF;AAAA,IACF;AAAA,IAEA,KAAK,MAAA,EAAQ;AAEX,MAAA,MAAM,MAAA,GAASC,qCAAA;AAAA,QACbC,qBAAA,CAAK,OAAA,CAAQ,QAAA,CAAS,MAAM,CAAA;AAAA,QAC5B,aAAA,CAAc;AAAA,OAChB;AAEA,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,KAAA;AAAA,QACN;AAAA,OACF;AAAA,IACF;AAAA,IAEA;AACE,MAAA,MAAM,IAAIJ,iBAAA,CAAW,CAAA,gCAAA,EAAmC,QAAA,CAAS,IAAI,CAAA,CAAE,CAAA;AAAA;AAE7E;AAQO,MAAM,oBAAA,GAAuB,CAClC,MAAA,EACA,cAAA,KAC6B;AAC7B,EAAA,MAAM,UAAA,GAAa,wBAAA,CAAyBK,wCAAA,EAAqB,MAAM,CAAA;AAEvE,EAAA,QAAQ,WAAW,IAAA;AAAM,IACvB,KAAK,KAAA;AACH,MAAA,OAAO,UAAA;AAAA,IACT,KAAK,KAAA;AACH,MAAA,OAAO,oBAAA,CAAqB,MAAA,EAAQ,UAAA,EAAY,cAAc,CAAA;AAAA,IAChE;AACE,MAAA,MAAM,IAAI,KAAA,CAAM,CAAA,6BAAA,EAAgC,UAAA,CAAW,IAAI,CAAA,CAAE,CAAA;AAAA;AAEvE;AASO,MAAM,yBAAA,GAA4B,OACvC,MAAA,EACA,MAAA,EACA,IAAA,KAC8B;AAC9B,EAAA,MAAM,EAAE,MAAA,EAAO,GAAI,wBAAA,CAAyBA,0CAAqB,MAAM,CAAA;AAEvE,EAAA,IAAA,EAAM,MAAA,EAAQ,KAAA,CAAM,CAAA,mBAAA,EAAsB,MAAM,CAAA,CAAE,CAAA;AAElD,EAAA,MAAM,gBAAA,GAAmB,MAAM,MAAA,CAAO,QAAA,CAAS,QAAQ,EAAE,IAAA,EAAM,IAAA,EAAM,IAAA,EAAM,CAAA;AAC3E,EAAA,MAAM,WAAA,GAAc,MAAM,gBAAA,CAAiB,GAAA,EAAI;AAE/C,EAAA,IAAA,EAAM,MAAA,EAAQ,KAAA,CAAM,CAAA,8BAAA,EAAiC,WAAW,CAAA,CAAE,CAAA;AAElE,EAAA,OAAO;AAAA,IACL,WAAA;AAAA,IACA,MAAM,gBAAA,CAAiB;AAAA,GACzB;AACF;;;;;;;"}
|
package/dist/index.d.ts
CHANGED
|
@@ -6,7 +6,7 @@ import express from 'express';
|
|
|
6
6
|
import { StorageOptions } from '@google-cloud/storage';
|
|
7
7
|
import { ScmIntegrationRegistry } from '@backstage/integration';
|
|
8
8
|
import { IndexableDocument } from '@backstage/plugin-search-common';
|
|
9
|
-
import { Writable } from 'stream';
|
|
9
|
+
import { Writable } from 'node:stream';
|
|
10
10
|
import * as winston from 'winston';
|
|
11
11
|
import { Logger } from 'winston';
|
|
12
12
|
|
|
@@ -3,15 +3,15 @@
|
|
|
3
3
|
var Docker = require('dockerode');
|
|
4
4
|
var fs = require('fs-extra');
|
|
5
5
|
var errors = require('@backstage/errors');
|
|
6
|
-
var
|
|
7
|
-
var
|
|
6
|
+
var node_stream = require('node:stream');
|
|
7
|
+
var node_util = require('node:util');
|
|
8
8
|
|
|
9
9
|
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
10
10
|
|
|
11
11
|
var Docker__default = /*#__PURE__*/_interopDefaultCompat(Docker);
|
|
12
12
|
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
13
13
|
|
|
14
|
-
const pipeline =
|
|
14
|
+
const pipeline = node_util.promisify(node_stream.pipeline);
|
|
15
15
|
class DockerContainerRunner {
|
|
16
16
|
dockerClient;
|
|
17
17
|
constructor() {
|
|
@@ -22,7 +22,7 @@ class DockerContainerRunner {
|
|
|
22
22
|
imageName,
|
|
23
23
|
command,
|
|
24
24
|
args,
|
|
25
|
-
logStream = new
|
|
25
|
+
logStream = new node_stream.PassThrough(),
|
|
26
26
|
mountDirs = {},
|
|
27
27
|
workingDir,
|
|
28
28
|
envVars = {},
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"DockerContainerRunner.cjs.js","sources":["../../../src/stages/generate/DockerContainerRunner.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport Docker from 'dockerode';\nimport fs from 'fs-extra';\nimport { ForwardedError } from '@backstage/errors';\nimport { PassThrough } from 'stream';\nimport { pipeline as pipelineStream } from 'stream';\nimport { promisify } from 'util';\nimport { TechDocsContainerRunner } from './types';\nimport { Writable } from 'stream';\n\nconst pipeline = promisify(pipelineStream);\n\nexport type UserOptions = {\n User?: string;\n};\n\n/**\n * @internal\n */\nexport class DockerContainerRunner implements TechDocsContainerRunner {\n private readonly dockerClient: Docker;\n\n constructor() {\n this.dockerClient = new Docker();\n }\n\n async runContainer(options: {\n imageName: string;\n command?: string | string[];\n args: string[];\n logStream?: Writable;\n mountDirs?: Record<string, string>;\n workingDir?: string;\n envVars?: Record<string, string>;\n pullImage?: boolean;\n defaultUser?: boolean;\n }) {\n const {\n imageName,\n command,\n args,\n logStream = new PassThrough(),\n mountDirs = {},\n workingDir,\n envVars = {},\n pullImage = true,\n defaultUser = false,\n } = options;\n\n // Show a better error message when Docker is unavailable.\n try {\n await this.dockerClient.ping();\n } catch (e) {\n throw new ForwardedError(\n 'This operation requires Docker. Docker does not appear to be available. Docker.ping() failed with',\n e,\n );\n }\n\n if (pullImage) {\n await new Promise<void>((resolve, reject) => {\n this.dockerClient.pull(imageName, {}, (err, stream) => {\n if (err) {\n reject(err);\n } else if (!stream) {\n reject(\n new Error(\n 'Unexpected error: no stream returned from Docker while pulling image',\n ),\n );\n } else {\n pipeline(stream, logStream, { end: false })\n .then(resolve)\n .catch(reject);\n }\n });\n });\n }\n\n const userOptions: UserOptions = {};\n if (!defaultUser && process.getuid && process.getgid) {\n // Files that are created inside the Docker container will be owned by\n // root on the host system on non Mac systems, because of reasons. Mainly the fact that\n // volume sharing is done using NFS on Mac and actual mounts in Linux world.\n // So we set the user in the container as the same user and group id as the host.\n // On Windows we don't have process.getuid nor process.getgid\n userOptions.User = `${process.getuid()}:${process.getgid()}`;\n }\n\n // Initialize volumes to mount based on mountDirs map\n const Volumes: { [T: string]: object } = {};\n for (const containerDir of Object.values(mountDirs)) {\n Volumes[containerDir] = {};\n }\n\n // Create bind volumes\n const Binds: string[] = [];\n for (const [hostDir, containerDir] of Object.entries(mountDirs)) {\n // Need to use realpath here as Docker mounting does not like\n // symlinks for binding volumes\n const realHostDir = await fs.realpath(hostDir);\n Binds.push(`${realHostDir}:${containerDir}`);\n }\n\n // Create docker environment variables array\n const Env = new Array<string>();\n for (const [key, value] of Object.entries(envVars)) {\n Env.push(`${key}=${value}`);\n }\n\n const [{ Error: error, StatusCode: statusCode }] =\n await this.dockerClient.run(imageName, args, logStream, {\n Volumes,\n HostConfig: {\n AutoRemove: true,\n Binds,\n },\n ...(workingDir ? { WorkingDir: workingDir } : {}),\n Entrypoint: command,\n Env,\n ...userOptions,\n } as Docker.ContainerCreateOptions);\n\n if (error) {\n throw new Error(\n `Docker failed to run with the following error message: ${error}`,\n );\n }\n\n if (statusCode !== 0) {\n throw new Error(\n `Docker container returned a non-zero exit code (${statusCode})`,\n );\n }\n }\n}\n"],"names":["promisify","pipelineStream","Docker","PassThrough","ForwardedError","fs"],"mappings":";;;;;;;;;;;;;AAyBA,MAAM,QAAA,GAAWA,
|
|
1
|
+
{"version":3,"file":"DockerContainerRunner.cjs.js","sources":["../../../src/stages/generate/DockerContainerRunner.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport Docker from 'dockerode';\nimport fs from 'fs-extra';\nimport { ForwardedError } from '@backstage/errors';\nimport { PassThrough } from 'node:stream';\nimport { pipeline as pipelineStream } from 'node:stream';\nimport { promisify } from 'node:util';\nimport { TechDocsContainerRunner } from './types';\nimport { Writable } from 'node:stream';\n\nconst pipeline = promisify(pipelineStream);\n\nexport type UserOptions = {\n User?: string;\n};\n\n/**\n * @internal\n */\nexport class DockerContainerRunner implements TechDocsContainerRunner {\n private readonly dockerClient: Docker;\n\n constructor() {\n this.dockerClient = new Docker();\n }\n\n async runContainer(options: {\n imageName: string;\n command?: string | string[];\n args: string[];\n logStream?: Writable;\n mountDirs?: Record<string, string>;\n workingDir?: string;\n envVars?: Record<string, string>;\n pullImage?: boolean;\n defaultUser?: boolean;\n }) {\n const {\n imageName,\n command,\n args,\n logStream = new PassThrough(),\n mountDirs = {},\n workingDir,\n envVars = {},\n pullImage = true,\n defaultUser = false,\n } = options;\n\n // Show a better error message when Docker is unavailable.\n try {\n await this.dockerClient.ping();\n } catch (e) {\n throw new ForwardedError(\n 'This operation requires Docker. Docker does not appear to be available. Docker.ping() failed with',\n e,\n );\n }\n\n if (pullImage) {\n await new Promise<void>((resolve, reject) => {\n this.dockerClient.pull(imageName, {}, (err, stream) => {\n if (err) {\n reject(err);\n } else if (!stream) {\n reject(\n new Error(\n 'Unexpected error: no stream returned from Docker while pulling image',\n ),\n );\n } else {\n pipeline(stream, logStream, { end: false })\n .then(resolve)\n .catch(reject);\n }\n });\n });\n }\n\n const userOptions: UserOptions = {};\n if (!defaultUser && process.getuid && process.getgid) {\n // Files that are created inside the Docker container will be owned by\n // root on the host system on non Mac systems, because of reasons. Mainly the fact that\n // volume sharing is done using NFS on Mac and actual mounts in Linux world.\n // So we set the user in the container as the same user and group id as the host.\n // On Windows we don't have process.getuid nor process.getgid\n userOptions.User = `${process.getuid()}:${process.getgid()}`;\n }\n\n // Initialize volumes to mount based on mountDirs map\n const Volumes: { [T: string]: object } = {};\n for (const containerDir of Object.values(mountDirs)) {\n Volumes[containerDir] = {};\n }\n\n // Create bind volumes\n const Binds: string[] = [];\n for (const [hostDir, containerDir] of Object.entries(mountDirs)) {\n // Need to use realpath here as Docker mounting does not like\n // symlinks for binding volumes\n const realHostDir = await fs.realpath(hostDir);\n Binds.push(`${realHostDir}:${containerDir}`);\n }\n\n // Create docker environment variables array\n const Env = new Array<string>();\n for (const [key, value] of Object.entries(envVars)) {\n Env.push(`${key}=${value}`);\n }\n\n const [{ Error: error, StatusCode: statusCode }] =\n await this.dockerClient.run(imageName, args, logStream, {\n Volumes,\n HostConfig: {\n AutoRemove: true,\n Binds,\n },\n ...(workingDir ? { WorkingDir: workingDir } : {}),\n Entrypoint: command,\n Env,\n ...userOptions,\n } as Docker.ContainerCreateOptions);\n\n if (error) {\n throw new Error(\n `Docker failed to run with the following error message: ${error}`,\n );\n }\n\n if (statusCode !== 0) {\n throw new Error(\n `Docker container returned a non-zero exit code (${statusCode})`,\n );\n }\n }\n}\n"],"names":["promisify","pipelineStream","Docker","PassThrough","ForwardedError","fs"],"mappings":";;;;;;;;;;;;;AAyBA,MAAM,QAAA,GAAWA,oBAAUC,oBAAc,CAAA;AASlC,MAAM,qBAAA,CAAyD;AAAA,EACnD,YAAA;AAAA,EAEjB,WAAA,GAAc;AACZ,IAAA,IAAA,CAAK,YAAA,GAAe,IAAIC,uBAAA,EAAO;AAAA,EACjC;AAAA,EAEA,MAAM,aAAa,OAAA,EAUhB;AACD,IAAA,MAAM;AAAA,MACJ,SAAA;AAAA,MACA,OAAA;AAAA,MACA,IAAA;AAAA,MACA,SAAA,GAAY,IAAIC,uBAAA,EAAY;AAAA,MAC5B,YAAY,EAAC;AAAA,MACb,UAAA;AAAA,MACA,UAAU,EAAC;AAAA,MACX,SAAA,GAAY,IAAA;AAAA,MACZ,WAAA,GAAc;AAAA,KAChB,GAAI,OAAA;AAGJ,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAK,aAAa,IAAA,EAAK;AAAA,IAC/B,SAAS,CAAA,EAAG;AACV,MAAA,MAAM,IAAIC,qBAAA;AAAA,QACR,mGAAA;AAAA,QACA;AAAA,OACF;AAAA,IACF;AAEA,IAAA,IAAI,SAAA,EAAW;AACb,MAAA,MAAM,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,KAAW;AAC3C,QAAA,IAAA,CAAK,aAAa,IAAA,CAAK,SAAA,EAAW,EAAC,EAAG,CAAC,KAAK,MAAA,KAAW;AACrD,UAAA,IAAI,GAAA,EAAK;AACP,YAAA,MAAA,CAAO,GAAG,CAAA;AAAA,UACZ,CAAA,MAAA,IAAW,CAAC,MAAA,EAAQ;AAClB,YAAA,MAAA;AAAA,cACE,IAAI,KAAA;AAAA,gBACF;AAAA;AACF,aACF;AAAA,UACF,CAAA,MAAO;AACL,YAAA,QAAA,CAAS,MAAA,EAAQ,SAAA,EAAW,EAAE,GAAA,EAAK,KAAA,EAAO,CAAA,CACvC,IAAA,CAAK,OAAO,CAAA,CACZ,KAAA,CAAM,MAAM,CAAA;AAAA,UACjB;AAAA,QACF,CAAC,CAAA;AAAA,MACH,CAAC,CAAA;AAAA,IACH;AAEA,IAAA,MAAM,cAA2B,EAAC;AAClC,IAAA,IAAI,CAAC,WAAA,IAAe,OAAA,CAAQ,MAAA,IAAU,QAAQ,MAAA,EAAQ;AAMpD,MAAA,WAAA,CAAY,IAAA,GAAO,GAAG,OAAA,CAAQ,MAAA,EAAQ,CAAA,CAAA,EAAI,OAAA,CAAQ,QAAQ,CAAA,CAAA;AAAA,IAC5D;AAGA,IAAA,MAAM,UAAmC,EAAC;AAC1C,IAAA,KAAA,MAAW,YAAA,IAAgB,MAAA,CAAO,MAAA,CAAO,SAAS,CAAA,EAAG;AACnD,MAAA,OAAA,CAAQ,YAAY,IAAI,EAAC;AAAA,IAC3B;AAGA,IAAA,MAAM,QAAkB,EAAC;AACzB,IAAA,KAAA,MAAW,CAAC,OAAA,EAAS,YAAY,KAAK,MAAA,CAAO,OAAA,CAAQ,SAAS,CAAA,EAAG;AAG/D,MAAA,MAAM,WAAA,GAAc,MAAMC,mBAAA,CAAG,QAAA,CAAS,OAAO,CAAA;AAC7C,MAAA,KAAA,CAAM,IAAA,CAAK,CAAA,EAAG,WAAW,CAAA,CAAA,EAAI,YAAY,CAAA,CAAE,CAAA;AAAA,IAC7C;AAGA,IAAA,MAAM,GAAA,GAAM,IAAI,KAAA,EAAc;AAC9B,IAAA,KAAA,MAAW,CAAC,GAAA,EAAK,KAAK,KAAK,MAAA,CAAO,OAAA,CAAQ,OAAO,CAAA,EAAG;AAClD,MAAA,GAAA,CAAI,IAAA,CAAK,CAAA,EAAG,GAAG,CAAA,CAAA,EAAI,KAAK,CAAA,CAAE,CAAA;AAAA,IAC5B;AAEA,IAAA,MAAM,CAAC,EAAE,KAAA,EAAO,KAAA,EAAO,YAAY,UAAA,EAAY,CAAA,GAC7C,MAAM,IAAA,CAAK,YAAA,CAAa,GAAA,CAAI,SAAA,EAAW,MAAM,SAAA,EAAW;AAAA,MACtD,OAAA;AAAA,MACA,UAAA,EAAY;AAAA,QACV,UAAA,EAAY,IAAA;AAAA,QACZ;AAAA,OACF;AAAA,MACA,GAAI,UAAA,GAAa,EAAE,UAAA,EAAY,UAAA,KAAe,EAAC;AAAA,MAC/C,UAAA,EAAY,OAAA;AAAA,MACZ,GAAA;AAAA,MACA,GAAG;AAAA,KAC6B,CAAA;AAEpC,IAAA,IAAI,KAAA,EAAO;AACT,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,0DAA0D,KAAK,CAAA;AAAA,OACjE;AAAA,IACF;AAEA,IAAA,IAAI,eAAe,CAAA,EAAG;AACpB,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,mDAAmD,UAAU,CAAA,CAAA;AAAA,OAC/D;AAAA,IACF;AAAA,EACF;AACF;;;;"}
|
|
@@ -2,12 +2,12 @@
|
|
|
2
2
|
|
|
3
3
|
var backendPluginApi = require('@backstage/backend-plugin-api');
|
|
4
4
|
var errors = require('@backstage/errors');
|
|
5
|
-
var
|
|
5
|
+
var node_child_process = require('node:child_process');
|
|
6
6
|
var fs = require('fs-extra');
|
|
7
7
|
var gitUrlParse = require('git-url-parse');
|
|
8
8
|
var yaml = require('js-yaml');
|
|
9
|
-
var path = require('path');
|
|
10
|
-
var
|
|
9
|
+
var path = require('node:path');
|
|
10
|
+
var node_stream = require('node:stream');
|
|
11
11
|
var helpers = require('../publish/helpers.cjs.js');
|
|
12
12
|
|
|
13
13
|
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
@@ -27,10 +27,10 @@ const runCommand = async ({
|
|
|
27
27
|
command,
|
|
28
28
|
args,
|
|
29
29
|
options,
|
|
30
|
-
logStream = new
|
|
30
|
+
logStream = new node_stream.PassThrough()
|
|
31
31
|
}) => {
|
|
32
32
|
await new Promise((resolve, reject) => {
|
|
33
|
-
const process =
|
|
33
|
+
const process = node_child_process.spawn(command, args, options);
|
|
34
34
|
process.stdout.on("data", (stream) => {
|
|
35
35
|
logStream.write(stream);
|
|
36
36
|
});
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"helpers.cjs.js","sources":["../../../src/stages/generate/helpers.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { isChildPath, LoggerService } from '@backstage/backend-plugin-api';\nimport { Entity } from '@backstage/catalog-model';\nimport { assertError, ForwardedError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport { SpawnOptionsWithoutStdio, spawn } from 'child_process';\nimport fs from 'fs-extra';\nimport gitUrlParse from 'git-url-parse';\nimport yaml, { DEFAULT_SCHEMA, Type } from 'js-yaml';\nimport path, { resolve as resolvePath } from 'path';\nimport { PassThrough, Writable } from 'stream';\nimport { ParsedLocationAnnotation } from '../../helpers';\nimport { DefaultMkdocsContent, SupportedGeneratorKey } from './types';\nimport { getFileTreeRecursively } from '../publish/helpers';\n\n// TODO: Implement proper support for more generators.\nexport function getGeneratorKey(entity: Entity): SupportedGeneratorKey {\n if (!entity) {\n throw new Error('No entity provided');\n }\n\n return 'techdocs';\n}\n\nexport type RunCommandOptions = {\n /** command to run */\n command: string;\n /** arguments to pass the command */\n args: string[];\n /** options to pass to spawn */\n options: SpawnOptionsWithoutStdio;\n /** stream to capture stdout and stderr output */\n logStream?: Writable;\n};\n\n/**\n * Run a command in a sub-process, normally a shell command.\n */\nexport const runCommand = async ({\n command,\n args,\n options,\n logStream = new PassThrough(),\n}: RunCommandOptions) => {\n await new Promise<void>((resolve, reject) => {\n const process = spawn(command, args, options);\n\n process.stdout.on('data', stream => {\n logStream.write(stream);\n });\n\n process.stderr.on('data', stream => {\n logStream.write(stream);\n });\n\n process.on('error', error => {\n return reject(error);\n });\n\n process.on('close', code => {\n if (code !== 0) {\n return reject(`Command ${command} failed, exit code: ${code}`);\n }\n return resolve();\n });\n });\n};\n\n/**\n * Return the source url for MkDocs based on the backstage.io/techdocs-ref annotation.\n * Depending on the type of target, it can either return a repo_url, an edit_uri, both, or none.\n *\n * @param parsedLocationAnnotation - Object with location url and type\n * @param scmIntegrations - the scmIntegration to do url transformations\n * @param docsFolder - the configured docs folder in the mkdocs.yml (defaults to 'docs')\n * @returns the settings for the mkdocs.yml\n */\nexport const getRepoUrlFromLocationAnnotation = (\n parsedLocationAnnotation: ParsedLocationAnnotation,\n scmIntegrations: ScmIntegrationRegistry,\n docsFolder: string = 'docs',\n): { repo_url?: string; edit_uri?: string } => {\n const { type: locationType, target } = parsedLocationAnnotation;\n\n if (locationType === 'url') {\n const integration = scmIntegrations.byUrl(target);\n\n // We only support it for github, gitlab, bitbucketServer and harness for now as the edit_uri\n // is not properly supported for others yet.\n if (\n integration &&\n ['github', 'gitlab', 'bitbucketServer', 'harness'].includes(\n integration.type,\n )\n ) {\n // handle the case where a user manually writes url:https://github.com/backstage/backstage i.e. without /blob/...\n const { filepathtype } = gitUrlParse(target);\n if (filepathtype === '') {\n return { repo_url: target };\n }\n\n const sourceFolder = integration.resolveUrl({\n url: `./${docsFolder}`,\n base: target.endsWith('/') ? target : `${target}/`,\n });\n return {\n repo_url: target,\n edit_uri: integration.resolveEditUrl(sourceFolder),\n };\n }\n }\n\n return {};\n};\n\nclass UnknownTag {\n public readonly data: any;\n public readonly type?: string;\n\n constructor(data: any, type?: string) {\n this.data = data;\n this.type = type;\n }\n}\n\nexport const MKDOCS_SCHEMA = DEFAULT_SCHEMA.extend([\n new Type('', {\n kind: 'scalar',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n new Type('tag:', {\n kind: 'mapping',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n new Type('', {\n kind: 'sequence',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n]);\n\n/**\n * Generates a mkdocs.yml configuration file\n *\n * @param inputDir - base dir to where the mkdocs.yml file will be created\n * @param siteOptions - options for the site: `name` property will be used in mkdocs.yml for the\n * required `site_name` property, default value is \"Documentation Site\"\n */\nexport const generateMkdocsYml = async (\n inputDir: string,\n siteOptions?: { name?: string },\n) => {\n try {\n // TODO(awanlin): Use a provided default mkdocs.yml\n // from config or some specified location. If this is\n // not provided then fall back to generating bare\n // minimum mkdocs.yml file\n\n const mkdocsYmlPath = path.join(inputDir, 'mkdocs.yml');\n const defaultSiteName = siteOptions?.name ?? 'Documentation Site';\n const defaultMkdocsContent: DefaultMkdocsContent = {\n site_name: defaultSiteName,\n docs_dir: 'docs',\n plugins: ['techdocs-core'],\n };\n\n await fs.writeFile(\n mkdocsYmlPath,\n yaml.dump(defaultMkdocsContent, { schema: MKDOCS_SCHEMA }),\n );\n } catch (error) {\n throw new ForwardedError('Could not generate mkdocs.yml file', error);\n }\n};\n\n/**\n * Finds and loads the contents of an mkdocs.yml, mkdocs.yaml file, a file\n * with a specified name or an ad-hoc created file with minimal config.\n * @public\n *\n * @param inputDir - base dir to be searched for either an mkdocs.yml or mkdocs.yaml file.\n * @param options - name: default mkdocs site_name to be used with a ad hoc file default value is \"Documentation Site\"\n * mkdocsConfigFileName (optional): a non-default file name to be used as the config\n */\nexport const getMkdocsYml = async (\n inputDir: string,\n options?: { name?: string; mkdocsConfigFileName?: string },\n): Promise<{ path: string; content: string; configIsTemporary: boolean }> => {\n let mkdocsYmlPath: string;\n let mkdocsYmlFileString: string;\n try {\n if (options?.mkdocsConfigFileName) {\n mkdocsYmlPath = path.join(inputDir, options.mkdocsConfigFileName);\n if (!(await fs.pathExists(mkdocsYmlPath))) {\n throw new Error(`The specified file ${mkdocsYmlPath} does not exist`);\n }\n\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n mkdocsYmlPath = path.join(inputDir, 'mkdocs.yaml');\n if (await fs.pathExists(mkdocsYmlPath)) {\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n mkdocsYmlPath = path.join(inputDir, 'mkdocs.yml');\n if (await fs.pathExists(mkdocsYmlPath)) {\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n // No mkdocs file, generate it\n await generateMkdocsYml(inputDir, options);\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n } catch (error) {\n throw new ForwardedError(\n 'Could not read MkDocs YAML config file mkdocs.yml or mkdocs.yaml or default for validation',\n error,\n );\n }\n\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: true,\n };\n};\n\n/**\n * Validating mkdocs config file for incorrect/insecure values\n * Throws on invalid configs\n *\n * @param inputDir - base dir to be used as a docs_dir path validity check\n * @param mkdocsYmlFileString - The string contents of the loaded\n * mkdocs.yml or equivalent of a docs site\n * @returns the parsed docs_dir or undefined\n */\nexport const validateMkdocsYaml = async (\n inputDir: string,\n mkdocsYmlFileString: string,\n): Promise<string | undefined> => {\n const mkdocsYml = yaml.load(mkdocsYmlFileString, {\n schema: MKDOCS_SCHEMA,\n });\n\n if (mkdocsYml === null || typeof mkdocsYml !== 'object') {\n return undefined;\n }\n\n const parsedMkdocsYml: Record<string, any> = mkdocsYml;\n if (\n parsedMkdocsYml.docs_dir &&\n !isChildPath(inputDir, resolvePath(inputDir, parsedMkdocsYml.docs_dir))\n ) {\n throw new Error(\n `docs_dir configuration value in mkdocs can't be an absolute directory or start with ../ for security reasons.\n Use relative paths instead which are resolved relative to your mkdocs.yml file location.`,\n );\n }\n return parsedMkdocsYml.docs_dir;\n};\n\n/**\n * Update docs/index.md file before TechDocs generator uses it to generate docs site,\n * falling back to docs/README.md or README.md in case a default docs/index.md\n * is not provided.\n */\nexport const patchIndexPreBuild = async ({\n inputDir,\n logger,\n docsDir = 'docs',\n}: {\n inputDir: string;\n logger: LoggerService;\n docsDir?: string;\n}) => {\n const docsPath = path.join(inputDir, docsDir);\n const indexMdPath = path.join(docsPath, 'index.md');\n\n if (await fs.pathExists(indexMdPath)) {\n return;\n }\n logger.warn(`${path.join(docsDir, 'index.md')} not found.`);\n const fallbacks = [\n path.join(docsPath, 'README.md'),\n path.join(docsPath, 'readme.md'),\n path.join(inputDir, 'README.md'),\n path.join(inputDir, 'readme.md'),\n ];\n\n await fs.ensureDir(docsPath);\n for (const filePath of fallbacks) {\n try {\n await fs.copyFile(filePath, indexMdPath);\n return;\n } catch (error) {\n logger.warn(`${path.relative(inputDir, filePath)} not found.`);\n }\n }\n\n logger.warn(\n `Could not find any techdocs' index file. Please make sure at least one of ${[\n indexMdPath,\n ...fallbacks,\n ].join(' ')} exists.`,\n );\n};\n\n/**\n * Create or update the techdocs_metadata.json. Values initialized/updated are:\n * - The build_timestamp (now)\n * - The list of files generated\n *\n * @param techdocsMetadataPath - File path to techdocs_metadata.json\n */\nexport const createOrUpdateMetadata = async (\n techdocsMetadataPath: string,\n logger: LoggerService,\n): Promise<void> => {\n const techdocsMetadataDir = techdocsMetadataPath\n .split(path.sep)\n .slice(0, -1)\n .join(path.sep);\n // check if file exists, create if it does not.\n try {\n await fs.access(techdocsMetadataPath, fs.constants.F_OK);\n } catch (err) {\n // Bootstrap file with empty JSON\n await fs.writeJson(techdocsMetadataPath, JSON.parse('{}'));\n }\n // check if valid Json\n let json;\n try {\n json = await fs.readJson(techdocsMetadataPath);\n } catch (err) {\n assertError(err);\n const message = `Invalid JSON at ${techdocsMetadataPath} with error ${err.message}`;\n logger.error(message);\n throw new Error(message);\n }\n\n json.build_timestamp = Date.now();\n\n // Get and write generated files to the metadata JSON. Each file string is in\n // a form appropriate for invalidating the associated object from cache.\n try {\n json.files = (await getFileTreeRecursively(techdocsMetadataDir)).map(file =>\n file.replace(`${techdocsMetadataDir}${path.sep}`, ''),\n );\n } catch (err) {\n assertError(err);\n json.files = [];\n logger.warn(`Unable to add files list to metadata: ${err.message}`);\n }\n\n await fs.writeJson(techdocsMetadataPath, json);\n return;\n};\n\n/**\n * Update the techdocs_metadata.json to add etag of the prepared tree (e.g. commit SHA or actual Etag of the resource).\n * This is helpful to check if a TechDocs site in storage has gone outdated, without maintaining an in-memory build info\n * per Backstage instance.\n *\n * @param techdocsMetadataPath - File path to techdocs_metadata.json\n * @param etag - The ETag to use\n */\nexport const storeEtagMetadata = async (\n techdocsMetadataPath: string,\n etag: string,\n): Promise<void> => {\n const json = await fs.readJson(techdocsMetadataPath);\n json.etag = etag;\n await fs.writeJson(techdocsMetadataPath, json);\n};\n"],"names":["PassThrough","spawn","gitUrlParse","DEFAULT_SCHEMA","Type","path","fs","yaml","ForwardedError","isChildPath","resolvePath","assertError","getFileTreeRecursively"],"mappings":";;;;;;;;;;;;;;;;;;;AA+BO,SAAS,gBAAgB,MAAA,EAAuC;AACrE,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,MAAM,oBAAoB,CAAA;AAAA,EACtC;AAEA,EAAA,OAAO,UAAA;AACT;AAgBO,MAAM,aAAa,OAAO;AAAA,EAC/B,OAAA;AAAA,EACA,IAAA;AAAA,EACA,OAAA;AAAA,EACA,SAAA,GAAY,IAAIA,kBAAA;AAClB,CAAA,KAAyB;AACvB,EAAA,MAAM,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,KAAW;AAC3C,IAAA,MAAM,OAAA,GAAUC,mBAAA,CAAM,OAAA,EAAS,IAAA,EAAM,OAAO,CAAA;AAE5C,IAAA,OAAA,CAAQ,MAAA,CAAO,EAAA,CAAG,MAAA,EAAQ,CAAA,MAAA,KAAU;AAClC,MAAA,SAAA,CAAU,MAAM,MAAM,CAAA;AAAA,IACxB,CAAC,CAAA;AAED,IAAA,OAAA,CAAQ,MAAA,CAAO,EAAA,CAAG,MAAA,EAAQ,CAAA,MAAA,KAAU;AAClC,MAAA,SAAA,CAAU,MAAM,MAAM,CAAA;AAAA,IACxB,CAAC,CAAA;AAED,IAAA,OAAA,CAAQ,EAAA,CAAG,SAAS,CAAA,KAAA,KAAS;AAC3B,MAAA,OAAO,OAAO,KAAK,CAAA;AAAA,IACrB,CAAC,CAAA;AAED,IAAA,OAAA,CAAQ,EAAA,CAAG,SAAS,CAAA,IAAA,KAAQ;AAC1B,MAAA,IAAI,SAAS,CAAA,EAAG;AACd,QAAA,OAAO,MAAA,CAAO,CAAA,QAAA,EAAW,OAAO,CAAA,oBAAA,EAAuB,IAAI,CAAA,CAAE,CAAA;AAAA,MAC/D;AACA,MAAA,OAAO,OAAA,EAAQ;AAAA,IACjB,CAAC,CAAA;AAAA,EACH,CAAC,CAAA;AACH;AAWO,MAAM,gCAAA,GAAmC,CAC9C,wBAAA,EACA,eAAA,EACA,aAAqB,MAAA,KACwB;AAC7C,EAAA,MAAM,EAAE,IAAA,EAAM,YAAA,EAAc,MAAA,EAAO,GAAI,wBAAA;AAEvC,EAAA,IAAI,iBAAiB,KAAA,EAAO;AAC1B,IAAA,MAAM,WAAA,GAAc,eAAA,CAAgB,KAAA,CAAM,MAAM,CAAA;AAIhD,IAAA,IACE,eACA,CAAC,QAAA,EAAU,QAAA,EAAU,iBAAA,EAAmB,SAAS,CAAA,CAAE,QAAA;AAAA,MACjD,WAAA,CAAY;AAAA,KACd,EACA;AAEA,MAAA,MAAM,EAAE,YAAA,EAAa,GAAIC,4BAAA,CAAY,MAAM,CAAA;AAC3C,MAAA,IAAI,iBAAiB,EAAA,EAAI;AACvB,QAAA,OAAO,EAAE,UAAU,MAAA,EAAO;AAAA,MAC5B;AAEA,MAAA,MAAM,YAAA,GAAe,YAAY,UAAA,CAAW;AAAA,QAC1C,GAAA,EAAK,KAAK,UAAU,CAAA,CAAA;AAAA,QACpB,MAAM,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,GAAI,MAAA,GAAS,GAAG,MAAM,CAAA,CAAA;AAAA,OAChD,CAAA;AACD,MAAA,OAAO;AAAA,QACL,QAAA,EAAU,MAAA;AAAA,QACV,QAAA,EAAU,WAAA,CAAY,cAAA,CAAe,YAAY;AAAA,OACnD;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,EAAC;AACV;AAEA,MAAM,UAAA,CAAW;AAAA,EACC,IAAA;AAAA,EACA,IAAA;AAAA,EAEhB,WAAA,CAAY,MAAW,IAAA,EAAe;AACpC,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AACF;AAEO,MAAM,aAAA,GAAgBC,oBAAe,MAAA,CAAO;AAAA,EACjD,IAAIC,UAAK,EAAA,EAAI;AAAA,IACX,IAAA,EAAM,QAAA;AAAA,IACN,KAAA,EAAO,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAA,CAAiB,IAAA;AAAA,IACtC,SAAA,EAAW,CAAA,CAAA,KAAM,CAAA,CAAiB,IAAA,IAAQ,EAAA;AAAA,IAC1C,UAAA,EAAY,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI;AAAA,GACtE,CAAA;AAAA,EACD,IAAIA,UAAK,MAAA,EAAQ;AAAA,IACf,IAAA,EAAM,SAAA;AAAA,IACN,KAAA,EAAO,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAA,CAAiB,IAAA;AAAA,IACtC,SAAA,EAAW,CAAA,CAAA,KAAM,CAAA,CAAiB,IAAA,IAAQ,EAAA;AAAA,IAC1C,UAAA,EAAY,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI;AAAA,GACtE,CAAA;AAAA,EACD,IAAIA,UAAK,EAAA,EAAI;AAAA,IACX,IAAA,EAAM,UAAA;AAAA,IACN,KAAA,EAAO,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAA,CAAiB,IAAA;AAAA,IACtC,SAAA,EAAW,CAAA,CAAA,KAAM,CAAA,CAAiB,IAAA,IAAQ,EAAA;AAAA,IAC1C,UAAA,EAAY,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI;AAAA,GACtE;AACH,CAAC;AASM,MAAM,iBAAA,GAAoB,OAC/B,QAAA,EACA,WAAA,KACG;AACH,EAAA,IAAI;AAMF,IAAA,MAAM,aAAA,GAAgBC,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AACtD,IAAA,MAAM,eAAA,GAAkB,aAAa,IAAA,IAAQ,oBAAA;AAC7C,IAAA,MAAM,oBAAA,GAA6C;AAAA,MACjD,SAAA,EAAW,eAAA;AAAA,MACX,QAAA,EAAU,MAAA;AAAA,MACV,OAAA,EAAS,CAAC,eAAe;AAAA,KAC3B;AAEA,IAAA,MAAMC,mBAAA,CAAG,SAAA;AAAA,MACP,aAAA;AAAA,MACAC,sBAAK,IAAA,CAAK,oBAAA,EAAsB,EAAE,MAAA,EAAQ,eAAe;AAAA,KAC3D;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,MAAM,IAAIC,qBAAA,CAAe,oCAAA,EAAsC,KAAK,CAAA;AAAA,EACtE;AACF;AAWO,MAAM,YAAA,GAAe,OAC1B,QAAA,EACA,OAAA,KAC2E;AAC3E,EAAA,IAAI,aAAA;AACJ,EAAA,IAAI,mBAAA;AACJ,EAAA,IAAI;AACF,IAAA,IAAI,SAAS,oBAAA,EAAsB;AACjC,MAAA,aAAA,GAAgBH,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,OAAA,CAAQ,oBAAoB,CAAA;AAChE,MAAA,IAAI,CAAE,MAAMC,mBAAA,CAAG,UAAA,CAAW,aAAa,CAAA,EAAI;AACzC,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,mBAAA,EAAsB,aAAa,CAAA,eAAA,CAAiB,CAAA;AAAA,MACtE;AAEA,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAA,CAAS,aAAA,EAAe,MAAM,CAAA;AAC7D,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,aAAA;AAAA,QACN,OAAA,EAAS,mBAAA;AAAA,QACT,iBAAA,EAAmB;AAAA,OACrB;AAAA,IACF;AAEA,IAAA,aAAA,GAAgBD,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,aAAa,CAAA;AACjD,IAAA,IAAI,MAAMC,mBAAA,CAAG,UAAA,CAAW,aAAa,CAAA,EAAG;AACtC,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAA,CAAS,aAAA,EAAe,MAAM,CAAA;AAC7D,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,aAAA;AAAA,QACN,OAAA,EAAS,mBAAA;AAAA,QACT,iBAAA,EAAmB;AAAA,OACrB;AAAA,IACF;AAEA,IAAA,aAAA,GAAgBD,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAChD,IAAA,IAAI,MAAMC,mBAAA,CAAG,UAAA,CAAW,aAAa,CAAA,EAAG;AACtC,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAA,CAAS,aAAA,EAAe,MAAM,CAAA;AAC7D,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,aAAA;AAAA,QACN,OAAA,EAAS,mBAAA;AAAA,QACT,iBAAA,EAAmB;AAAA,OACrB;AAAA,IACF;AAGA,IAAA,MAAM,iBAAA,CAAkB,UAAU,OAAO,CAAA;AACzC,IAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAA,CAAS,aAAA,EAAe,MAAM,CAAA;AAAA,EAC/D,SAAS,KAAA,EAAO;AACd,IAAA,MAAM,IAAIE,qBAAA;AAAA,MACR,4FAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AAEA,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,aAAA;AAAA,IACN,OAAA,EAAS,mBAAA;AAAA,IACT,iBAAA,EAAmB;AAAA,GACrB;AACF;AAWO,MAAM,kBAAA,GAAqB,OAChC,QAAA,EACA,mBAAA,KACgC;AAChC,EAAA,MAAM,SAAA,GAAYD,qBAAA,CAAK,IAAA,CAAK,mBAAA,EAAqB;AAAA,IAC/C,MAAA,EAAQ;AAAA,GACT,CAAA;AAED,EAAA,IAAI,SAAA,KAAc,IAAA,IAAQ,OAAO,SAAA,KAAc,QAAA,EAAU;AACvD,IAAA,OAAO,MAAA;AAAA,EACT;AAEA,EAAA,MAAM,eAAA,GAAuC,SAAA;AAC7C,EAAA,IACE,eAAA,CAAgB,QAAA,IAChB,CAACE,4BAAA,CAAY,QAAA,EAAUC,aAAY,QAAA,EAAU,eAAA,CAAgB,QAAQ,CAAC,CAAA,EACtE;AACA,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA;AAAA,+FAAA;AAAA,KAEF;AAAA,EACF;AACA,EAAA,OAAO,eAAA,CAAgB,QAAA;AACzB;AAOO,MAAM,qBAAqB,OAAO;AAAA,EACvC,QAAA;AAAA,EACA,MAAA;AAAA,EACA,OAAA,GAAU;AACZ,CAAA,KAIM;AACJ,EAAA,MAAM,QAAA,GAAWL,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,OAAO,CAAA;AAC5C,EAAA,MAAM,WAAA,GAAcA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,UAAU,CAAA;AAElD,EAAA,IAAI,MAAMC,mBAAA,CAAG,UAAA,CAAW,WAAW,CAAA,EAAG;AACpC,IAAA;AAAA,EACF;AACA,EAAA,MAAA,CAAO,KAAK,CAAA,EAAGD,qBAAA,CAAK,KAAK,OAAA,EAAS,UAAU,CAAC,CAAA,WAAA,CAAa,CAAA;AAC1D,EAAA,MAAM,SAAA,GAAY;AAAA,IAChBA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,WAAW;AAAA,GACjC;AAEA,EAAA,MAAMC,mBAAA,CAAG,UAAU,QAAQ,CAAA;AAC3B,EAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,IAAA,IAAI;AACF,MAAA,MAAMA,mBAAA,CAAG,QAAA,CAAS,QAAA,EAAU,WAAW,CAAA;AACvC,MAAA;AAAA,IACF,SAAS,KAAA,EAAO;AACd,MAAA,MAAA,CAAO,KAAK,CAAA,EAAGD,qBAAA,CAAK,SAAS,QAAA,EAAU,QAAQ,CAAC,CAAA,WAAA,CAAa,CAAA;AAAA,IAC/D;AAAA,EACF;AAEA,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,CAAA,0EAAA,EAA6E;AAAA,MAC3E,WAAA;AAAA,MACA,GAAG;AAAA,KACL,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA,QAAA;AAAA,GACb;AACF;AASO,MAAM,sBAAA,GAAyB,OACpC,oBAAA,EACA,MAAA,KACkB;AAClB,EAAA,MAAM,mBAAA,GAAsB,oBAAA,CACzB,KAAA,CAAMA,qBAAA,CAAK,GAAG,CAAA,CACd,KAAA,CAAM,CAAA,EAAG,EAAE,CAAA,CACX,IAAA,CAAKA,qBAAA,CAAK,GAAG,CAAA;AAEhB,EAAA,IAAI;AACF,IAAA,MAAMC,mBAAA,CAAG,MAAA,CAAO,oBAAA,EAAsBA,mBAAA,CAAG,UAAU,IAAI,CAAA;AAAA,EACzD,SAAS,GAAA,EAAK;AAEZ,IAAA,MAAMA,oBAAG,SAAA,CAAU,oBAAA,EAAsB,IAAA,CAAK,KAAA,CAAM,IAAI,CAAC,CAAA;AAAA,EAC3D;AAEA,EAAA,IAAI,IAAA;AACJ,EAAA,IAAI;AACF,IAAA,IAAA,GAAO,MAAMA,mBAAA,CAAG,QAAA,CAAS,oBAAoB,CAAA;AAAA,EAC/C,SAAS,GAAA,EAAK;AACZ,IAAAK,kBAAA,CAAY,GAAG,CAAA;AACf,IAAA,MAAM,OAAA,GAAU,CAAA,gBAAA,EAAmB,oBAAoB,CAAA,YAAA,EAAe,IAAI,OAAO,CAAA,CAAA;AACjF,IAAA,MAAA,CAAO,MAAM,OAAO,CAAA;AACpB,IAAA,MAAM,IAAI,MAAM,OAAO,CAAA;AAAA,EACzB;AAEA,EAAA,IAAA,CAAK,eAAA,GAAkB,KAAK,GAAA,EAAI;AAIhC,EAAA,IAAI;AACF,IAAA,IAAA,CAAK,KAAA,GAAA,CAAS,MAAMC,8BAAA,CAAuB,mBAAmB,CAAA,EAAG,GAAA;AAAA,MAAI,CAAA,IAAA,KACnE,KAAK,OAAA,CAAQ,CAAA,EAAG,mBAAmB,CAAA,EAAGP,qBAAA,CAAK,GAAG,CAAA,CAAA,EAAI,EAAE;AAAA,KACtD;AAAA,EACF,SAAS,GAAA,EAAK;AACZ,IAAAM,kBAAA,CAAY,GAAG,CAAA;AACf,IAAA,IAAA,CAAK,QAAQ,EAAC;AACd,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,sCAAA,EAAyC,GAAA,CAAI,OAAO,CAAA,CAAE,CAAA;AAAA,EACpE;AAEA,EAAA,MAAML,mBAAA,CAAG,SAAA,CAAU,oBAAA,EAAsB,IAAI,CAAA;AAC7C,EAAA;AACF;AAUO,MAAM,iBAAA,GAAoB,OAC/B,oBAAA,EACA,IAAA,KACkB;AAClB,EAAA,MAAM,IAAA,GAAO,MAAMA,mBAAA,CAAG,QAAA,CAAS,oBAAoB,CAAA;AACnD,EAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,EAAA,MAAMA,mBAAA,CAAG,SAAA,CAAU,oBAAA,EAAsB,IAAI,CAAA;AAC/C;;;;;;;;;;;;;"}
|
|
1
|
+
{"version":3,"file":"helpers.cjs.js","sources":["../../../src/stages/generate/helpers.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { isChildPath, LoggerService } from '@backstage/backend-plugin-api';\nimport { Entity } from '@backstage/catalog-model';\nimport { assertError, ForwardedError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport { SpawnOptionsWithoutStdio, spawn } from 'node:child_process';\nimport fs from 'fs-extra';\nimport gitUrlParse from 'git-url-parse';\nimport yaml, { DEFAULT_SCHEMA, Type } from 'js-yaml';\nimport path, { resolve as resolvePath } from 'node:path';\nimport { PassThrough, Writable } from 'node:stream';\nimport { ParsedLocationAnnotation } from '../../helpers';\nimport { DefaultMkdocsContent, SupportedGeneratorKey } from './types';\nimport { getFileTreeRecursively } from '../publish/helpers';\n\n// TODO: Implement proper support for more generators.\nexport function getGeneratorKey(entity: Entity): SupportedGeneratorKey {\n if (!entity) {\n throw new Error('No entity provided');\n }\n\n return 'techdocs';\n}\n\nexport type RunCommandOptions = {\n /** command to run */\n command: string;\n /** arguments to pass the command */\n args: string[];\n /** options to pass to spawn */\n options: SpawnOptionsWithoutStdio;\n /** stream to capture stdout and stderr output */\n logStream?: Writable;\n};\n\n/**\n * Run a command in a sub-process, normally a shell command.\n */\nexport const runCommand = async ({\n command,\n args,\n options,\n logStream = new PassThrough(),\n}: RunCommandOptions) => {\n await new Promise<void>((resolve, reject) => {\n const process = spawn(command, args, options);\n\n process.stdout.on('data', stream => {\n logStream.write(stream);\n });\n\n process.stderr.on('data', stream => {\n logStream.write(stream);\n });\n\n process.on('error', error => {\n return reject(error);\n });\n\n process.on('close', code => {\n if (code !== 0) {\n return reject(`Command ${command} failed, exit code: ${code}`);\n }\n return resolve();\n });\n });\n};\n\n/**\n * Return the source url for MkDocs based on the backstage.io/techdocs-ref annotation.\n * Depending on the type of target, it can either return a repo_url, an edit_uri, both, or none.\n *\n * @param parsedLocationAnnotation - Object with location url and type\n * @param scmIntegrations - the scmIntegration to do url transformations\n * @param docsFolder - the configured docs folder in the mkdocs.yml (defaults to 'docs')\n * @returns the settings for the mkdocs.yml\n */\nexport const getRepoUrlFromLocationAnnotation = (\n parsedLocationAnnotation: ParsedLocationAnnotation,\n scmIntegrations: ScmIntegrationRegistry,\n docsFolder: string = 'docs',\n): { repo_url?: string; edit_uri?: string } => {\n const { type: locationType, target } = parsedLocationAnnotation;\n\n if (locationType === 'url') {\n const integration = scmIntegrations.byUrl(target);\n\n // We only support it for github, gitlab, bitbucketServer and harness for now as the edit_uri\n // is not properly supported for others yet.\n if (\n integration &&\n ['github', 'gitlab', 'bitbucketServer', 'harness'].includes(\n integration.type,\n )\n ) {\n // handle the case where a user manually writes url:https://github.com/backstage/backstage i.e. without /blob/...\n const { filepathtype } = gitUrlParse(target);\n if (filepathtype === '') {\n return { repo_url: target };\n }\n\n const sourceFolder = integration.resolveUrl({\n url: `./${docsFolder}`,\n base: target.endsWith('/') ? target : `${target}/`,\n });\n return {\n repo_url: target,\n edit_uri: integration.resolveEditUrl(sourceFolder),\n };\n }\n }\n\n return {};\n};\n\nclass UnknownTag {\n public readonly data: any;\n public readonly type?: string;\n\n constructor(data: any, type?: string) {\n this.data = data;\n this.type = type;\n }\n}\n\nexport const MKDOCS_SCHEMA = DEFAULT_SCHEMA.extend([\n new Type('', {\n kind: 'scalar',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n new Type('tag:', {\n kind: 'mapping',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n new Type('', {\n kind: 'sequence',\n multi: true,\n representName: o => (o as UnknownTag).type,\n represent: o => (o as UnknownTag).data ?? '',\n instanceOf: UnknownTag,\n construct: (data: string, type?: string) => new UnknownTag(data, type),\n }),\n]);\n\n/**\n * Generates a mkdocs.yml configuration file\n *\n * @param inputDir - base dir to where the mkdocs.yml file will be created\n * @param siteOptions - options for the site: `name` property will be used in mkdocs.yml for the\n * required `site_name` property, default value is \"Documentation Site\"\n */\nexport const generateMkdocsYml = async (\n inputDir: string,\n siteOptions?: { name?: string },\n) => {\n try {\n // TODO(awanlin): Use a provided default mkdocs.yml\n // from config or some specified location. If this is\n // not provided then fall back to generating bare\n // minimum mkdocs.yml file\n\n const mkdocsYmlPath = path.join(inputDir, 'mkdocs.yml');\n const defaultSiteName = siteOptions?.name ?? 'Documentation Site';\n const defaultMkdocsContent: DefaultMkdocsContent = {\n site_name: defaultSiteName,\n docs_dir: 'docs',\n plugins: ['techdocs-core'],\n };\n\n await fs.writeFile(\n mkdocsYmlPath,\n yaml.dump(defaultMkdocsContent, { schema: MKDOCS_SCHEMA }),\n );\n } catch (error) {\n throw new ForwardedError('Could not generate mkdocs.yml file', error);\n }\n};\n\n/**\n * Finds and loads the contents of an mkdocs.yml, mkdocs.yaml file, a file\n * with a specified name or an ad-hoc created file with minimal config.\n * @public\n *\n * @param inputDir - base dir to be searched for either an mkdocs.yml or mkdocs.yaml file.\n * @param options - name: default mkdocs site_name to be used with a ad hoc file default value is \"Documentation Site\"\n * mkdocsConfigFileName (optional): a non-default file name to be used as the config\n */\nexport const getMkdocsYml = async (\n inputDir: string,\n options?: { name?: string; mkdocsConfigFileName?: string },\n): Promise<{ path: string; content: string; configIsTemporary: boolean }> => {\n let mkdocsYmlPath: string;\n let mkdocsYmlFileString: string;\n try {\n if (options?.mkdocsConfigFileName) {\n mkdocsYmlPath = path.join(inputDir, options.mkdocsConfigFileName);\n if (!(await fs.pathExists(mkdocsYmlPath))) {\n throw new Error(`The specified file ${mkdocsYmlPath} does not exist`);\n }\n\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n mkdocsYmlPath = path.join(inputDir, 'mkdocs.yaml');\n if (await fs.pathExists(mkdocsYmlPath)) {\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n mkdocsYmlPath = path.join(inputDir, 'mkdocs.yml');\n if (await fs.pathExists(mkdocsYmlPath)) {\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: false,\n };\n }\n\n // No mkdocs file, generate it\n await generateMkdocsYml(inputDir, options);\n mkdocsYmlFileString = await fs.readFile(mkdocsYmlPath, 'utf8');\n } catch (error) {\n throw new ForwardedError(\n 'Could not read MkDocs YAML config file mkdocs.yml or mkdocs.yaml or default for validation',\n error,\n );\n }\n\n return {\n path: mkdocsYmlPath,\n content: mkdocsYmlFileString,\n configIsTemporary: true,\n };\n};\n\n/**\n * Validating mkdocs config file for incorrect/insecure values\n * Throws on invalid configs\n *\n * @param inputDir - base dir to be used as a docs_dir path validity check\n * @param mkdocsYmlFileString - The string contents of the loaded\n * mkdocs.yml or equivalent of a docs site\n * @returns the parsed docs_dir or undefined\n */\nexport const validateMkdocsYaml = async (\n inputDir: string,\n mkdocsYmlFileString: string,\n): Promise<string | undefined> => {\n const mkdocsYml = yaml.load(mkdocsYmlFileString, {\n schema: MKDOCS_SCHEMA,\n });\n\n if (mkdocsYml === null || typeof mkdocsYml !== 'object') {\n return undefined;\n }\n\n const parsedMkdocsYml: Record<string, any> = mkdocsYml;\n if (\n parsedMkdocsYml.docs_dir &&\n !isChildPath(inputDir, resolvePath(inputDir, parsedMkdocsYml.docs_dir))\n ) {\n throw new Error(\n `docs_dir configuration value in mkdocs can't be an absolute directory or start with ../ for security reasons.\n Use relative paths instead which are resolved relative to your mkdocs.yml file location.`,\n );\n }\n return parsedMkdocsYml.docs_dir;\n};\n\n/**\n * Update docs/index.md file before TechDocs generator uses it to generate docs site,\n * falling back to docs/README.md or README.md in case a default docs/index.md\n * is not provided.\n */\nexport const patchIndexPreBuild = async ({\n inputDir,\n logger,\n docsDir = 'docs',\n}: {\n inputDir: string;\n logger: LoggerService;\n docsDir?: string;\n}) => {\n const docsPath = path.join(inputDir, docsDir);\n const indexMdPath = path.join(docsPath, 'index.md');\n\n if (await fs.pathExists(indexMdPath)) {\n return;\n }\n logger.warn(`${path.join(docsDir, 'index.md')} not found.`);\n const fallbacks = [\n path.join(docsPath, 'README.md'),\n path.join(docsPath, 'readme.md'),\n path.join(inputDir, 'README.md'),\n path.join(inputDir, 'readme.md'),\n ];\n\n await fs.ensureDir(docsPath);\n for (const filePath of fallbacks) {\n try {\n await fs.copyFile(filePath, indexMdPath);\n return;\n } catch (error) {\n logger.warn(`${path.relative(inputDir, filePath)} not found.`);\n }\n }\n\n logger.warn(\n `Could not find any techdocs' index file. Please make sure at least one of ${[\n indexMdPath,\n ...fallbacks,\n ].join(' ')} exists.`,\n );\n};\n\n/**\n * Create or update the techdocs_metadata.json. Values initialized/updated are:\n * - The build_timestamp (now)\n * - The list of files generated\n *\n * @param techdocsMetadataPath - File path to techdocs_metadata.json\n */\nexport const createOrUpdateMetadata = async (\n techdocsMetadataPath: string,\n logger: LoggerService,\n): Promise<void> => {\n const techdocsMetadataDir = techdocsMetadataPath\n .split(path.sep)\n .slice(0, -1)\n .join(path.sep);\n // check if file exists, create if it does not.\n try {\n await fs.access(techdocsMetadataPath, fs.constants.F_OK);\n } catch (err) {\n // Bootstrap file with empty JSON\n await fs.writeJson(techdocsMetadataPath, JSON.parse('{}'));\n }\n // check if valid Json\n let json;\n try {\n json = await fs.readJson(techdocsMetadataPath);\n } catch (err) {\n assertError(err);\n const message = `Invalid JSON at ${techdocsMetadataPath} with error ${err.message}`;\n logger.error(message);\n throw new Error(message);\n }\n\n json.build_timestamp = Date.now();\n\n // Get and write generated files to the metadata JSON. Each file string is in\n // a form appropriate for invalidating the associated object from cache.\n try {\n json.files = (await getFileTreeRecursively(techdocsMetadataDir)).map(file =>\n file.replace(`${techdocsMetadataDir}${path.sep}`, ''),\n );\n } catch (err) {\n assertError(err);\n json.files = [];\n logger.warn(`Unable to add files list to metadata: ${err.message}`);\n }\n\n await fs.writeJson(techdocsMetadataPath, json);\n return;\n};\n\n/**\n * Update the techdocs_metadata.json to add etag of the prepared tree (e.g. commit SHA or actual Etag of the resource).\n * This is helpful to check if a TechDocs site in storage has gone outdated, without maintaining an in-memory build info\n * per Backstage instance.\n *\n * @param techdocsMetadataPath - File path to techdocs_metadata.json\n * @param etag - The ETag to use\n */\nexport const storeEtagMetadata = async (\n techdocsMetadataPath: string,\n etag: string,\n): Promise<void> => {\n const json = await fs.readJson(techdocsMetadataPath);\n json.etag = etag;\n await fs.writeJson(techdocsMetadataPath, json);\n};\n"],"names":["PassThrough","spawn","gitUrlParse","DEFAULT_SCHEMA","Type","path","fs","yaml","ForwardedError","isChildPath","resolvePath","assertError","getFileTreeRecursively"],"mappings":";;;;;;;;;;;;;;;;;;;AA+BO,SAAS,gBAAgB,MAAA,EAAuC;AACrE,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,MAAM,IAAI,MAAM,oBAAoB,CAAA;AAAA,EACtC;AAEA,EAAA,OAAO,UAAA;AACT;AAgBO,MAAM,aAAa,OAAO;AAAA,EAC/B,OAAA;AAAA,EACA,IAAA;AAAA,EACA,OAAA;AAAA,EACA,SAAA,GAAY,IAAIA,uBAAA;AAClB,CAAA,KAAyB;AACvB,EAAA,MAAM,IAAI,OAAA,CAAc,CAAC,OAAA,EAAS,MAAA,KAAW;AAC3C,IAAA,MAAM,OAAA,GAAUC,wBAAA,CAAM,OAAA,EAAS,IAAA,EAAM,OAAO,CAAA;AAE5C,IAAA,OAAA,CAAQ,MAAA,CAAO,EAAA,CAAG,MAAA,EAAQ,CAAA,MAAA,KAAU;AAClC,MAAA,SAAA,CAAU,MAAM,MAAM,CAAA;AAAA,IACxB,CAAC,CAAA;AAED,IAAA,OAAA,CAAQ,MAAA,CAAO,EAAA,CAAG,MAAA,EAAQ,CAAA,MAAA,KAAU;AAClC,MAAA,SAAA,CAAU,MAAM,MAAM,CAAA;AAAA,IACxB,CAAC,CAAA;AAED,IAAA,OAAA,CAAQ,EAAA,CAAG,SAAS,CAAA,KAAA,KAAS;AAC3B,MAAA,OAAO,OAAO,KAAK,CAAA;AAAA,IACrB,CAAC,CAAA;AAED,IAAA,OAAA,CAAQ,EAAA,CAAG,SAAS,CAAA,IAAA,KAAQ;AAC1B,MAAA,IAAI,SAAS,CAAA,EAAG;AACd,QAAA,OAAO,MAAA,CAAO,CAAA,QAAA,EAAW,OAAO,CAAA,oBAAA,EAAuB,IAAI,CAAA,CAAE,CAAA;AAAA,MAC/D;AACA,MAAA,OAAO,OAAA,EAAQ;AAAA,IACjB,CAAC,CAAA;AAAA,EACH,CAAC,CAAA;AACH;AAWO,MAAM,gCAAA,GAAmC,CAC9C,wBAAA,EACA,eAAA,EACA,aAAqB,MAAA,KACwB;AAC7C,EAAA,MAAM,EAAE,IAAA,EAAM,YAAA,EAAc,MAAA,EAAO,GAAI,wBAAA;AAEvC,EAAA,IAAI,iBAAiB,KAAA,EAAO;AAC1B,IAAA,MAAM,WAAA,GAAc,eAAA,CAAgB,KAAA,CAAM,MAAM,CAAA;AAIhD,IAAA,IACE,eACA,CAAC,QAAA,EAAU,QAAA,EAAU,iBAAA,EAAmB,SAAS,CAAA,CAAE,QAAA;AAAA,MACjD,WAAA,CAAY;AAAA,KACd,EACA;AAEA,MAAA,MAAM,EAAE,YAAA,EAAa,GAAIC,4BAAA,CAAY,MAAM,CAAA;AAC3C,MAAA,IAAI,iBAAiB,EAAA,EAAI;AACvB,QAAA,OAAO,EAAE,UAAU,MAAA,EAAO;AAAA,MAC5B;AAEA,MAAA,MAAM,YAAA,GAAe,YAAY,UAAA,CAAW;AAAA,QAC1C,GAAA,EAAK,KAAK,UAAU,CAAA,CAAA;AAAA,QACpB,MAAM,MAAA,CAAO,QAAA,CAAS,GAAG,CAAA,GAAI,MAAA,GAAS,GAAG,MAAM,CAAA,CAAA;AAAA,OAChD,CAAA;AACD,MAAA,OAAO;AAAA,QACL,QAAA,EAAU,MAAA;AAAA,QACV,QAAA,EAAU,WAAA,CAAY,cAAA,CAAe,YAAY;AAAA,OACnD;AAAA,IACF;AAAA,EACF;AAEA,EAAA,OAAO,EAAC;AACV;AAEA,MAAM,UAAA,CAAW;AAAA,EACC,IAAA;AAAA,EACA,IAAA;AAAA,EAEhB,WAAA,CAAY,MAAW,IAAA,EAAe;AACpC,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,IAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AAAA,EACd;AACF;AAEO,MAAM,aAAA,GAAgBC,oBAAe,MAAA,CAAO;AAAA,EACjD,IAAIC,UAAK,EAAA,EAAI;AAAA,IACX,IAAA,EAAM,QAAA;AAAA,IACN,KAAA,EAAO,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAA,CAAiB,IAAA;AAAA,IACtC,SAAA,EAAW,CAAA,CAAA,KAAM,CAAA,CAAiB,IAAA,IAAQ,EAAA;AAAA,IAC1C,UAAA,EAAY,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI;AAAA,GACtE,CAAA;AAAA,EACD,IAAIA,UAAK,MAAA,EAAQ;AAAA,IACf,IAAA,EAAM,SAAA;AAAA,IACN,KAAA,EAAO,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAA,CAAiB,IAAA;AAAA,IACtC,SAAA,EAAW,CAAA,CAAA,KAAM,CAAA,CAAiB,IAAA,IAAQ,EAAA;AAAA,IAC1C,UAAA,EAAY,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI;AAAA,GACtE,CAAA;AAAA,EACD,IAAIA,UAAK,EAAA,EAAI;AAAA,IACX,IAAA,EAAM,UAAA;AAAA,IACN,KAAA,EAAO,IAAA;AAAA,IACP,aAAA,EAAe,OAAM,CAAA,CAAiB,IAAA;AAAA,IACtC,SAAA,EAAW,CAAA,CAAA,KAAM,CAAA,CAAiB,IAAA,IAAQ,EAAA;AAAA,IAC1C,UAAA,EAAY,UAAA;AAAA,IACZ,WAAW,CAAC,IAAA,EAAc,SAAkB,IAAI,UAAA,CAAW,MAAM,IAAI;AAAA,GACtE;AACH,CAAC;AASM,MAAM,iBAAA,GAAoB,OAC/B,QAAA,EACA,WAAA,KACG;AACH,EAAA,IAAI;AAMF,IAAA,MAAM,aAAA,GAAgBC,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AACtD,IAAA,MAAM,eAAA,GAAkB,aAAa,IAAA,IAAQ,oBAAA;AAC7C,IAAA,MAAM,oBAAA,GAA6C;AAAA,MACjD,SAAA,EAAW,eAAA;AAAA,MACX,QAAA,EAAU,MAAA;AAAA,MACV,OAAA,EAAS,CAAC,eAAe;AAAA,KAC3B;AAEA,IAAA,MAAMC,mBAAA,CAAG,SAAA;AAAA,MACP,aAAA;AAAA,MACAC,sBAAK,IAAA,CAAK,oBAAA,EAAsB,EAAE,MAAA,EAAQ,eAAe;AAAA,KAC3D;AAAA,EACF,SAAS,KAAA,EAAO;AACd,IAAA,MAAM,IAAIC,qBAAA,CAAe,oCAAA,EAAsC,KAAK,CAAA;AAAA,EACtE;AACF;AAWO,MAAM,YAAA,GAAe,OAC1B,QAAA,EACA,OAAA,KAC2E;AAC3E,EAAA,IAAI,aAAA;AACJ,EAAA,IAAI,mBAAA;AACJ,EAAA,IAAI;AACF,IAAA,IAAI,SAAS,oBAAA,EAAsB;AACjC,MAAA,aAAA,GAAgBH,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,OAAA,CAAQ,oBAAoB,CAAA;AAChE,MAAA,IAAI,CAAE,MAAMC,mBAAA,CAAG,UAAA,CAAW,aAAa,CAAA,EAAI;AACzC,QAAA,MAAM,IAAI,KAAA,CAAM,CAAA,mBAAA,EAAsB,aAAa,CAAA,eAAA,CAAiB,CAAA;AAAA,MACtE;AAEA,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAA,CAAS,aAAA,EAAe,MAAM,CAAA;AAC7D,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,aAAA;AAAA,QACN,OAAA,EAAS,mBAAA;AAAA,QACT,iBAAA,EAAmB;AAAA,OACrB;AAAA,IACF;AAEA,IAAA,aAAA,GAAgBD,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,aAAa,CAAA;AACjD,IAAA,IAAI,MAAMC,mBAAA,CAAG,UAAA,CAAW,aAAa,CAAA,EAAG;AACtC,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAA,CAAS,aAAA,EAAe,MAAM,CAAA;AAC7D,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,aAAA;AAAA,QACN,OAAA,EAAS,mBAAA;AAAA,QACT,iBAAA,EAAmB;AAAA,OACrB;AAAA,IACF;AAEA,IAAA,aAAA,GAAgBD,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,YAAY,CAAA;AAChD,IAAA,IAAI,MAAMC,mBAAA,CAAG,UAAA,CAAW,aAAa,CAAA,EAAG;AACtC,MAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAA,CAAS,aAAA,EAAe,MAAM,CAAA;AAC7D,MAAA,OAAO;AAAA,QACL,IAAA,EAAM,aAAA;AAAA,QACN,OAAA,EAAS,mBAAA;AAAA,QACT,iBAAA,EAAmB;AAAA,OACrB;AAAA,IACF;AAGA,IAAA,MAAM,iBAAA,CAAkB,UAAU,OAAO,CAAA;AACzC,IAAA,mBAAA,GAAsB,MAAMA,mBAAA,CAAG,QAAA,CAAS,aAAA,EAAe,MAAM,CAAA;AAAA,EAC/D,SAAS,KAAA,EAAO;AACd,IAAA,MAAM,IAAIE,qBAAA;AAAA,MACR,4FAAA;AAAA,MACA;AAAA,KACF;AAAA,EACF;AAEA,EAAA,OAAO;AAAA,IACL,IAAA,EAAM,aAAA;AAAA,IACN,OAAA,EAAS,mBAAA;AAAA,IACT,iBAAA,EAAmB;AAAA,GACrB;AACF;AAWO,MAAM,kBAAA,GAAqB,OAChC,QAAA,EACA,mBAAA,KACgC;AAChC,EAAA,MAAM,SAAA,GAAYD,qBAAA,CAAK,IAAA,CAAK,mBAAA,EAAqB;AAAA,IAC/C,MAAA,EAAQ;AAAA,GACT,CAAA;AAED,EAAA,IAAI,SAAA,KAAc,IAAA,IAAQ,OAAO,SAAA,KAAc,QAAA,EAAU;AACvD,IAAA,OAAO,MAAA;AAAA,EACT;AAEA,EAAA,MAAM,eAAA,GAAuC,SAAA;AAC7C,EAAA,IACE,eAAA,CAAgB,QAAA,IAChB,CAACE,4BAAA,CAAY,QAAA,EAAUC,aAAY,QAAA,EAAU,eAAA,CAAgB,QAAQ,CAAC,CAAA,EACtE;AACA,IAAA,MAAM,IAAI,KAAA;AAAA,MACR,CAAA;AAAA,+FAAA;AAAA,KAEF;AAAA,EACF;AACA,EAAA,OAAO,eAAA,CAAgB,QAAA;AACzB;AAOO,MAAM,qBAAqB,OAAO;AAAA,EACvC,QAAA;AAAA,EACA,MAAA;AAAA,EACA,OAAA,GAAU;AACZ,CAAA,KAIM;AACJ,EAAA,MAAM,QAAA,GAAWL,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,OAAO,CAAA;AAC5C,EAAA,MAAM,WAAA,GAAcA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,UAAU,CAAA;AAElD,EAAA,IAAI,MAAMC,mBAAA,CAAG,UAAA,CAAW,WAAW,CAAA,EAAG;AACpC,IAAA;AAAA,EACF;AACA,EAAA,MAAA,CAAO,KAAK,CAAA,EAAGD,qBAAA,CAAK,KAAK,OAAA,EAAS,UAAU,CAAC,CAAA,WAAA,CAAa,CAAA;AAC1D,EAAA,MAAM,SAAA,GAAY;AAAA,IAChBA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,WAAW,CAAA;AAAA,IAC/BA,qBAAA,CAAK,IAAA,CAAK,QAAA,EAAU,WAAW;AAAA,GACjC;AAEA,EAAA,MAAMC,mBAAA,CAAG,UAAU,QAAQ,CAAA;AAC3B,EAAA,KAAA,MAAW,YAAY,SAAA,EAAW;AAChC,IAAA,IAAI;AACF,MAAA,MAAMA,mBAAA,CAAG,QAAA,CAAS,QAAA,EAAU,WAAW,CAAA;AACvC,MAAA;AAAA,IACF,SAAS,KAAA,EAAO;AACd,MAAA,MAAA,CAAO,KAAK,CAAA,EAAGD,qBAAA,CAAK,SAAS,QAAA,EAAU,QAAQ,CAAC,CAAA,WAAA,CAAa,CAAA;AAAA,IAC/D;AAAA,EACF;AAEA,EAAA,MAAA,CAAO,IAAA;AAAA,IACL,CAAA,0EAAA,EAA6E;AAAA,MAC3E,WAAA;AAAA,MACA,GAAG;AAAA,KACL,CAAE,IAAA,CAAK,GAAG,CAAC,CAAA,QAAA;AAAA,GACb;AACF;AASO,MAAM,sBAAA,GAAyB,OACpC,oBAAA,EACA,MAAA,KACkB;AAClB,EAAA,MAAM,mBAAA,GAAsB,oBAAA,CACzB,KAAA,CAAMA,qBAAA,CAAK,GAAG,CAAA,CACd,KAAA,CAAM,CAAA,EAAG,EAAE,CAAA,CACX,IAAA,CAAKA,qBAAA,CAAK,GAAG,CAAA;AAEhB,EAAA,IAAI;AACF,IAAA,MAAMC,mBAAA,CAAG,MAAA,CAAO,oBAAA,EAAsBA,mBAAA,CAAG,UAAU,IAAI,CAAA;AAAA,EACzD,SAAS,GAAA,EAAK;AAEZ,IAAA,MAAMA,oBAAG,SAAA,CAAU,oBAAA,EAAsB,IAAA,CAAK,KAAA,CAAM,IAAI,CAAC,CAAA;AAAA,EAC3D;AAEA,EAAA,IAAI,IAAA;AACJ,EAAA,IAAI;AACF,IAAA,IAAA,GAAO,MAAMA,mBAAA,CAAG,QAAA,CAAS,oBAAoB,CAAA;AAAA,EAC/C,SAAS,GAAA,EAAK;AACZ,IAAAK,kBAAA,CAAY,GAAG,CAAA;AACf,IAAA,MAAM,OAAA,GAAU,CAAA,gBAAA,EAAmB,oBAAoB,CAAA,YAAA,EAAe,IAAI,OAAO,CAAA,CAAA;AACjF,IAAA,MAAA,CAAO,MAAM,OAAO,CAAA;AACpB,IAAA,MAAM,IAAI,MAAM,OAAO,CAAA;AAAA,EACzB;AAEA,EAAA,IAAA,CAAK,eAAA,GAAkB,KAAK,GAAA,EAAI;AAIhC,EAAA,IAAI;AACF,IAAA,IAAA,CAAK,KAAA,GAAA,CAAS,MAAMC,8BAAA,CAAuB,mBAAmB,CAAA,EAAG,GAAA;AAAA,MAAI,CAAA,IAAA,KACnE,KAAK,OAAA,CAAQ,CAAA,EAAG,mBAAmB,CAAA,EAAGP,qBAAA,CAAK,GAAG,CAAA,CAAA,EAAI,EAAE;AAAA,KACtD;AAAA,EACF,SAAS,GAAA,EAAK;AACZ,IAAAM,kBAAA,CAAY,GAAG,CAAA;AACf,IAAA,IAAA,CAAK,QAAQ,EAAC;AACd,IAAA,MAAA,CAAO,IAAA,CAAK,CAAA,sCAAA,EAAyC,GAAA,CAAI,OAAO,CAAA,CAAE,CAAA;AAAA,EACpE;AAEA,EAAA,MAAML,mBAAA,CAAG,SAAA,CAAU,oBAAA,EAAsB,IAAI,CAAA;AAC7C,EAAA;AACF;AAUO,MAAM,iBAAA,GAAoB,OAC/B,oBAAA,EACA,IAAA,KACkB;AAClB,EAAA,MAAM,IAAA,GAAO,MAAMA,mBAAA,CAAG,QAAA,CAAS,oBAAoB,CAAA;AACnD,EAAA,IAAA,CAAK,IAAA,GAAO,IAAA;AACZ,EAAA,MAAMA,mBAAA,CAAG,SAAA,CAAU,oBAAA,EAAsB,IAAI,CAAA;AAC/C;;;;;;;;;;;;;"}
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"techdocs.cjs.js","sources":["../../../src/stages/generate/techdocs.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Config } from '@backstage/config';\nimport path from 'path';\nimport {\n ScmIntegrationRegistry,\n ScmIntegrations,\n} from '@backstage/integration';\nimport {\n createOrUpdateMetadata,\n getMkdocsYml,\n patchIndexPreBuild,\n runCommand,\n storeEtagMetadata,\n validateMkdocsYaml,\n} from './helpers';\n\nimport {\n patchMkdocsYmlPreBuild,\n patchMkdocsYmlWithPlugins,\n} from './mkdocsPatchers';\nimport {\n GeneratorBase,\n GeneratorConfig,\n GeneratorOptions,\n GeneratorRunInType,\n GeneratorRunOptions,\n} from './types';\nimport { ForwardedError } from '@backstage/errors';\nimport { DockerContainerRunner } from './DockerContainerRunner';\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { TechDocsContainerRunner } from './types';\n\n/**\n * Generates documentation files\n * @public\n */\nexport class TechdocsGenerator implements GeneratorBase {\n /**\n * The default docker image (and version) used to generate content. Public\n * and static so that techdocs-node consumers can use the same version.\n *\n * See {@link https://hub.docker.com/r/spotify/techdocs/tags} for list of available versions.\n */\n public static readonly defaultDockerImage = 'spotify/techdocs:v1.2.8';\n private readonly logger: LoggerService;\n private readonly containerRunner?: TechDocsContainerRunner;\n private readonly options: GeneratorConfig;\n private readonly scmIntegrations: ScmIntegrationRegistry;\n\n /**\n * Returns a instance of TechDocs generator\n * @param config - A Backstage configuration\n * @param options - Options to configure the generator\n */\n static fromConfig(config: Config, options: GeneratorOptions) {\n const { containerRunner, logger } = options;\n const scmIntegrations = ScmIntegrations.fromConfig(config);\n return new TechdocsGenerator({\n logger,\n containerRunner,\n config,\n scmIntegrations,\n });\n }\n\n constructor(options: {\n logger: LoggerService;\n containerRunner?: TechDocsContainerRunner;\n config: Config;\n scmIntegrations: ScmIntegrationRegistry;\n }) {\n this.logger = options.logger;\n this.options = readGeneratorConfig(options.config, options.logger);\n this.containerRunner = options.containerRunner;\n this.scmIntegrations = options.scmIntegrations;\n }\n\n /** {@inheritDoc GeneratorBase.run} */\n public async run(options: GeneratorRunOptions): Promise<void> {\n const {\n inputDir,\n outputDir,\n parsedLocationAnnotation,\n etag,\n logger: childLogger,\n logStream,\n siteOptions,\n runAsDefaultUser,\n } = options;\n\n // Do some updates to mkdocs.yml before generating docs e.g. adding repo_url\n const { path: mkdocsYmlPath, content } = await getMkdocsYml(\n inputDir,\n siteOptions,\n );\n\n // validate the docs_dir first\n const docsDir = await validateMkdocsYaml(inputDir, content);\n\n if (parsedLocationAnnotation) {\n await patchMkdocsYmlPreBuild(\n mkdocsYmlPath,\n childLogger,\n parsedLocationAnnotation,\n this.scmIntegrations,\n );\n }\n\n if (this.options.legacyCopyReadmeMdToIndexMd) {\n await patchIndexPreBuild({ inputDir, logger: childLogger, docsDir });\n }\n\n // patch the list of mkdocs plugins\n const defaultPlugins = this.options.defaultPlugins ?? [];\n\n if (\n !this.options.omitTechdocsCoreMkdocsPlugin &&\n !defaultPlugins.includes('techdocs-core')\n ) {\n defaultPlugins.push('techdocs-core');\n }\n\n await patchMkdocsYmlWithPlugins(mkdocsYmlPath, childLogger, defaultPlugins);\n\n // Directories to bind on container\n const mountDirs = {\n [inputDir]: '/input',\n [outputDir]: '/output',\n };\n\n try {\n switch (this.options.runIn) {\n case 'local':\n await runCommand({\n command: 'mkdocs',\n args: ['build', '-d', outputDir, '-v'],\n options: {\n cwd: inputDir,\n },\n logStream,\n });\n childLogger.info(\n `Successfully generated docs from ${inputDir} into ${outputDir} using local mkdocs`,\n );\n break;\n case 'docker': {\n const containerRunner =\n this.containerRunner || new DockerContainerRunner();\n await containerRunner.runContainer({\n imageName:\n this.options.dockerImage ?? TechdocsGenerator.defaultDockerImage,\n args: ['build', '-d', '/output'],\n logStream,\n mountDirs,\n workingDir: '/input',\n // Set the home directory inside the container as something that applications can\n // write to, otherwise they will just fail trying to write to /\n envVars: { HOME: '/tmp' },\n pullImage: this.options.pullImage,\n defaultUser: runAsDefaultUser,\n });\n childLogger.info(\n `Successfully generated docs from ${inputDir} into ${outputDir} using techdocs-container`,\n );\n break;\n }\n default:\n throw new Error(\n `Invalid config value \"${this.options.runIn}\" provided in 'techdocs.generators.techdocs'.`,\n );\n }\n } catch (error) {\n this.logger.debug(\n `Failed to generate docs from ${inputDir} into ${outputDir}`,\n );\n throw new ForwardedError(\n `Failed to generate docs from ${inputDir} into ${outputDir}`,\n error,\n );\n }\n\n /**\n * Post Generate steps\n */\n\n // Add build timestamp and files to techdocs_metadata.json\n // Creates techdocs_metadata.json if file does not exist.\n await createOrUpdateMetadata(\n path.join(outputDir, 'techdocs_metadata.json'),\n childLogger,\n );\n\n // Add etag of the prepared tree to techdocs_metadata.json\n // Assumes that the file already exists.\n if (etag) {\n await storeEtagMetadata(\n path.join(outputDir, 'techdocs_metadata.json'),\n etag,\n );\n }\n }\n}\n\nexport function readGeneratorConfig(\n config: Config,\n logger: LoggerService,\n): GeneratorConfig {\n const legacyGeneratorType = config.getOptionalString(\n 'techdocs.generators.techdocs',\n ) as GeneratorRunInType;\n\n if (legacyGeneratorType) {\n logger.warn(\n `The 'techdocs.generators.techdocs' configuration key is deprecated and will be removed in the future. Please use 'techdocs.generator' instead. ` +\n `See here https://backstage.io/docs/features/techdocs/configuration`,\n );\n }\n\n return {\n runIn:\n legacyGeneratorType ??\n config.getOptionalString('techdocs.generator.runIn') ??\n 'docker',\n dockerImage: config.getOptionalString('techdocs.generator.dockerImage'),\n pullImage: config.getOptionalBoolean('techdocs.generator.pullImage'),\n omitTechdocsCoreMkdocsPlugin: config.getOptionalBoolean(\n 'techdocs.generator.mkdocs.omitTechdocsCorePlugin',\n ),\n legacyCopyReadmeMdToIndexMd: config.getOptionalBoolean(\n 'techdocs.generator.mkdocs.legacyCopyReadmeMdToIndexMd',\n ),\n defaultPlugins: config.getOptionalStringArray(\n 'techdocs.generator.mkdocs.defaultPlugins',\n ),\n };\n}\n"],"names":["ScmIntegrations","getMkdocsYml","validateMkdocsYaml","patchMkdocsYmlPreBuild","patchIndexPreBuild","patchMkdocsYmlWithPlugins","runCommand","DockerContainerRunner","ForwardedError","createOrUpdateMetadata","path","storeEtagMetadata"],"mappings":";;;;;;;;;;;;;AAmDO,MAAM,iBAAA,CAA2C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOtD,OAAuB,kBAAA,GAAqB,yBAAA;AAAA,EAC3B,MAAA;AAAA,EACA,eAAA;AAAA,EACA,OAAA;AAAA,EACA,eAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOjB,OAAO,UAAA,CAAW,MAAA,EAAgB,OAAA,EAA2B;AAC3D,IAAA,MAAM,EAAE,eAAA,EAAiB,MAAA,EAAO,GAAI,OAAA;AACpC,IAAA,MAAM,eAAA,GAAkBA,2BAAA,CAAgB,UAAA,CAAW,MAAM,CAAA;AACzD,IAAA,OAAO,IAAI,iBAAA,CAAkB;AAAA,MAC3B,MAAA;AAAA,MACA,eAAA;AAAA,MACA,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAAA,EAEA,YAAY,OAAA,EAKT;AACD,IAAA,IAAA,CAAK,SAAS,OAAA,CAAQ,MAAA;AACtB,IAAA,IAAA,CAAK,OAAA,GAAU,mBAAA,CAAoB,OAAA,CAAQ,MAAA,EAAQ,QAAQ,MAAM,CAAA;AACjE,IAAA,IAAA,CAAK,kBAAkB,OAAA,CAAQ,eAAA;AAC/B,IAAA,IAAA,CAAK,kBAAkB,OAAA,CAAQ,eAAA;AAAA,EACjC;AAAA;AAAA,EAGA,MAAa,IAAI,OAAA,EAA6C;AAC5D,IAAA,MAAM;AAAA,MACJ,QAAA;AAAA,MACA,SAAA;AAAA,MACA,wBAAA;AAAA,MACA,IAAA;AAAA,MACA,MAAA,EAAQ,WAAA;AAAA,MACR,SAAA;AAAA,MACA,WAAA;AAAA,MACA;AAAA,KACF,GAAI,OAAA;AAGJ,IAAA,MAAM,EAAE,IAAA,EAAM,aAAA,EAAe,OAAA,KAAY,MAAMC,oBAAA;AAAA,MAC7C,QAAA;AAAA,MACA;AAAA,KACF;AAGA,IAAA,MAAM,OAAA,GAAU,MAAMC,0BAAA,CAAmB,QAAA,EAAU,OAAO,CAAA;AAE1D,IAAA,IAAI,wBAAA,EAA0B;AAC5B,MAAA,MAAMC,qCAAA;AAAA,QACJ,aAAA;AAAA,QACA,WAAA;AAAA,QACA,wBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAEA,IAAA,IAAI,IAAA,CAAK,QAAQ,2BAAA,EAA6B;AAC5C,MAAA,MAAMC,2BAAmB,EAAE,QAAA,EAAU,MAAA,EAAQ,WAAA,EAAa,SAAS,CAAA;AAAA,IACrE;AAGA,IAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,OAAA,CAAQ,cAAA,IAAkB,EAAC;AAEvD,IAAA,IACE,CAAC,KAAK,OAAA,CAAQ,4BAAA,IACd,CAAC,cAAA,CAAe,QAAA,CAAS,eAAe,CAAA,EACxC;AACA,MAAA,cAAA,CAAe,KAAK,eAAe,CAAA;AAAA,IACrC;AAEA,IAAA,MAAMC,wCAAA,CAA0B,aAAA,EAAe,WAAA,EAAa,cAAc,CAAA;AAG1E,IAAA,MAAM,SAAA,GAAY;AAAA,MAChB,CAAC,QAAQ,GAAG,QAAA;AAAA,MACZ,CAAC,SAAS,GAAG;AAAA,KACf;AAEA,IAAA,IAAI;AACF,MAAA,QAAQ,IAAA,CAAK,QAAQ,KAAA;AAAO,QAC1B,KAAK,OAAA;AACH,UAAA,MAAMC,kBAAA,CAAW;AAAA,YACf,OAAA,EAAS,QAAA;AAAA,YACT,IAAA,EAAM,CAAC,OAAA,EAAS,IAAA,EAAM,WAAW,IAAI,CAAA;AAAA,YACrC,OAAA,EAAS;AAAA,cACP,GAAA,EAAK;AAAA,aACP;AAAA,YACA;AAAA,WACD,CAAA;AACD,UAAA,WAAA,CAAY,IAAA;AAAA,YACV,CAAA,iCAAA,EAAoC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA,mBAAA;AAAA,WAChE;AACA,UAAA;AAAA,QACF,KAAK,QAAA,EAAU;AACb,UAAA,MAAM,eAAA,GACJ,IAAA,CAAK,eAAA,IAAmB,IAAIC,2CAAA,EAAsB;AACpD,UAAA,MAAM,gBAAgB,YAAA,CAAa;AAAA,YACjC,SAAA,EACE,IAAA,CAAK,OAAA,CAAQ,WAAA,IAAe,iBAAA,CAAkB,kBAAA;AAAA,YAChD,IAAA,EAAM,CAAC,OAAA,EAAS,IAAA,EAAM,SAAS,CAAA;AAAA,YAC/B,SAAA;AAAA,YACA,SAAA;AAAA,YACA,UAAA,EAAY,QAAA;AAAA;AAAA;AAAA,YAGZ,OAAA,EAAS,EAAE,IAAA,EAAM,MAAA,EAAO;AAAA,YACxB,SAAA,EAAW,KAAK,OAAA,CAAQ,SAAA;AAAA,YACxB,WAAA,EAAa;AAAA,WACd,CAAA;AACD,UAAA,WAAA,CAAY,IAAA;AAAA,YACV,CAAA,iCAAA,EAAoC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA,yBAAA;AAAA,WAChE;AACA,UAAA;AAAA,QACF;AAAA,QACA;AACE,UAAA,MAAM,IAAI,KAAA;AAAA,YACR,CAAA,sBAAA,EAAyB,IAAA,CAAK,OAAA,CAAQ,KAAK,CAAA,6CAAA;AAAA,WAC7C;AAAA;AACJ,IACF,SAAS,KAAA,EAAO;AACd,MAAA,IAAA,CAAK,MAAA,CAAO,KAAA;AAAA,QACV,CAAA,6BAAA,EAAgC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA;AAAA,OAC5D;AACA,MAAA,MAAM,IAAIC,qBAAA;AAAA,QACR,CAAA,6BAAA,EAAgC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA,CAAA;AAAA,QAC1D;AAAA,OACF;AAAA,IACF;AAQA,IAAA,MAAMC,8BAAA;AAAA,MACJC,qBAAA,CAAK,IAAA,CAAK,SAAA,EAAW,wBAAwB,CAAA;AAAA,MAC7C;AAAA,KACF;AAIA,IAAA,IAAI,IAAA,EAAM;AACR,MAAA,MAAMC,yBAAA;AAAA,QACJD,qBAAA,CAAK,IAAA,CAAK,SAAA,EAAW,wBAAwB,CAAA;AAAA,QAC7C;AAAA,OACF;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,mBAAA,CACd,QACA,MAAA,EACiB;AACjB,EAAA,MAAM,sBAAsB,MAAA,CAAO,iBAAA;AAAA,IACjC;AAAA,GACF;AAEA,EAAA,IAAI,mBAAA,EAAqB;AACvB,IAAA,MAAA,CAAO,IAAA;AAAA,MACL,CAAA,iNAAA;AAAA,KAEF;AAAA,EACF;AAEA,EAAA,OAAO;AAAA,IACL,KAAA,EACE,mBAAA,IACA,MAAA,CAAO,iBAAA,CAAkB,0BAA0B,CAAA,IACnD,QAAA;AAAA,IACF,WAAA,EAAa,MAAA,CAAO,iBAAA,CAAkB,gCAAgC,CAAA;AAAA,IACtE,SAAA,EAAW,MAAA,CAAO,kBAAA,CAAmB,8BAA8B,CAAA;AAAA,IACnE,8BAA8B,MAAA,CAAO,kBAAA;AAAA,MACnC;AAAA,KACF;AAAA,IACA,6BAA6B,MAAA,CAAO,kBAAA;AAAA,MAClC;AAAA,KACF;AAAA,IACA,gBAAgB,MAAA,CAAO,sBAAA;AAAA,MACrB;AAAA;AACF,GACF;AACF;;;;;"}
|
|
1
|
+
{"version":3,"file":"techdocs.cjs.js","sources":["../../../src/stages/generate/techdocs.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Config } from '@backstage/config';\nimport path from 'node:path';\nimport {\n ScmIntegrationRegistry,\n ScmIntegrations,\n} from '@backstage/integration';\nimport {\n createOrUpdateMetadata,\n getMkdocsYml,\n patchIndexPreBuild,\n runCommand,\n storeEtagMetadata,\n validateMkdocsYaml,\n} from './helpers';\n\nimport {\n patchMkdocsYmlPreBuild,\n patchMkdocsYmlWithPlugins,\n} from './mkdocsPatchers';\nimport {\n GeneratorBase,\n GeneratorConfig,\n GeneratorOptions,\n GeneratorRunInType,\n GeneratorRunOptions,\n} from './types';\nimport { ForwardedError } from '@backstage/errors';\nimport { DockerContainerRunner } from './DockerContainerRunner';\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { TechDocsContainerRunner } from './types';\n\n/**\n * Generates documentation files\n * @public\n */\nexport class TechdocsGenerator implements GeneratorBase {\n /**\n * The default docker image (and version) used to generate content. Public\n * and static so that techdocs-node consumers can use the same version.\n *\n * See {@link https://hub.docker.com/r/spotify/techdocs/tags} for list of available versions.\n */\n public static readonly defaultDockerImage = 'spotify/techdocs:v1.2.8';\n private readonly logger: LoggerService;\n private readonly containerRunner?: TechDocsContainerRunner;\n private readonly options: GeneratorConfig;\n private readonly scmIntegrations: ScmIntegrationRegistry;\n\n /**\n * Returns a instance of TechDocs generator\n * @param config - A Backstage configuration\n * @param options - Options to configure the generator\n */\n static fromConfig(config: Config, options: GeneratorOptions) {\n const { containerRunner, logger } = options;\n const scmIntegrations = ScmIntegrations.fromConfig(config);\n return new TechdocsGenerator({\n logger,\n containerRunner,\n config,\n scmIntegrations,\n });\n }\n\n constructor(options: {\n logger: LoggerService;\n containerRunner?: TechDocsContainerRunner;\n config: Config;\n scmIntegrations: ScmIntegrationRegistry;\n }) {\n this.logger = options.logger;\n this.options = readGeneratorConfig(options.config, options.logger);\n this.containerRunner = options.containerRunner;\n this.scmIntegrations = options.scmIntegrations;\n }\n\n /** {@inheritDoc GeneratorBase.run} */\n public async run(options: GeneratorRunOptions): Promise<void> {\n const {\n inputDir,\n outputDir,\n parsedLocationAnnotation,\n etag,\n logger: childLogger,\n logStream,\n siteOptions,\n runAsDefaultUser,\n } = options;\n\n // Do some updates to mkdocs.yml before generating docs e.g. adding repo_url\n const { path: mkdocsYmlPath, content } = await getMkdocsYml(\n inputDir,\n siteOptions,\n );\n\n // validate the docs_dir first\n const docsDir = await validateMkdocsYaml(inputDir, content);\n\n if (parsedLocationAnnotation) {\n await patchMkdocsYmlPreBuild(\n mkdocsYmlPath,\n childLogger,\n parsedLocationAnnotation,\n this.scmIntegrations,\n );\n }\n\n if (this.options.legacyCopyReadmeMdToIndexMd) {\n await patchIndexPreBuild({ inputDir, logger: childLogger, docsDir });\n }\n\n // patch the list of mkdocs plugins\n const defaultPlugins = this.options.defaultPlugins ?? [];\n\n if (\n !this.options.omitTechdocsCoreMkdocsPlugin &&\n !defaultPlugins.includes('techdocs-core')\n ) {\n defaultPlugins.push('techdocs-core');\n }\n\n await patchMkdocsYmlWithPlugins(mkdocsYmlPath, childLogger, defaultPlugins);\n\n // Directories to bind on container\n const mountDirs = {\n [inputDir]: '/input',\n [outputDir]: '/output',\n };\n\n try {\n switch (this.options.runIn) {\n case 'local':\n await runCommand({\n command: 'mkdocs',\n args: ['build', '-d', outputDir, '-v'],\n options: {\n cwd: inputDir,\n },\n logStream,\n });\n childLogger.info(\n `Successfully generated docs from ${inputDir} into ${outputDir} using local mkdocs`,\n );\n break;\n case 'docker': {\n const containerRunner =\n this.containerRunner || new DockerContainerRunner();\n await containerRunner.runContainer({\n imageName:\n this.options.dockerImage ?? TechdocsGenerator.defaultDockerImage,\n args: ['build', '-d', '/output'],\n logStream,\n mountDirs,\n workingDir: '/input',\n // Set the home directory inside the container as something that applications can\n // write to, otherwise they will just fail trying to write to /\n envVars: { HOME: '/tmp' },\n pullImage: this.options.pullImage,\n defaultUser: runAsDefaultUser,\n });\n childLogger.info(\n `Successfully generated docs from ${inputDir} into ${outputDir} using techdocs-container`,\n );\n break;\n }\n default:\n throw new Error(\n `Invalid config value \"${this.options.runIn}\" provided in 'techdocs.generators.techdocs'.`,\n );\n }\n } catch (error) {\n this.logger.debug(\n `Failed to generate docs from ${inputDir} into ${outputDir}`,\n );\n throw new ForwardedError(\n `Failed to generate docs from ${inputDir} into ${outputDir}`,\n error,\n );\n }\n\n /**\n * Post Generate steps\n */\n\n // Add build timestamp and files to techdocs_metadata.json\n // Creates techdocs_metadata.json if file does not exist.\n await createOrUpdateMetadata(\n path.join(outputDir, 'techdocs_metadata.json'),\n childLogger,\n );\n\n // Add etag of the prepared tree to techdocs_metadata.json\n // Assumes that the file already exists.\n if (etag) {\n await storeEtagMetadata(\n path.join(outputDir, 'techdocs_metadata.json'),\n etag,\n );\n }\n }\n}\n\nexport function readGeneratorConfig(\n config: Config,\n logger: LoggerService,\n): GeneratorConfig {\n const legacyGeneratorType = config.getOptionalString(\n 'techdocs.generators.techdocs',\n ) as GeneratorRunInType;\n\n if (legacyGeneratorType) {\n logger.warn(\n `The 'techdocs.generators.techdocs' configuration key is deprecated and will be removed in the future. Please use 'techdocs.generator' instead. ` +\n `See here https://backstage.io/docs/features/techdocs/configuration`,\n );\n }\n\n return {\n runIn:\n legacyGeneratorType ??\n config.getOptionalString('techdocs.generator.runIn') ??\n 'docker',\n dockerImage: config.getOptionalString('techdocs.generator.dockerImage'),\n pullImage: config.getOptionalBoolean('techdocs.generator.pullImage'),\n omitTechdocsCoreMkdocsPlugin: config.getOptionalBoolean(\n 'techdocs.generator.mkdocs.omitTechdocsCorePlugin',\n ),\n legacyCopyReadmeMdToIndexMd: config.getOptionalBoolean(\n 'techdocs.generator.mkdocs.legacyCopyReadmeMdToIndexMd',\n ),\n defaultPlugins: config.getOptionalStringArray(\n 'techdocs.generator.mkdocs.defaultPlugins',\n ),\n };\n}\n"],"names":["ScmIntegrations","getMkdocsYml","validateMkdocsYaml","patchMkdocsYmlPreBuild","patchIndexPreBuild","patchMkdocsYmlWithPlugins","runCommand","DockerContainerRunner","ForwardedError","createOrUpdateMetadata","path","storeEtagMetadata"],"mappings":";;;;;;;;;;;;;AAmDO,MAAM,iBAAA,CAA2C;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOtD,OAAuB,kBAAA,GAAqB,yBAAA;AAAA,EAC3B,MAAA;AAAA,EACA,eAAA;AAAA,EACA,OAAA;AAAA,EACA,eAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAOjB,OAAO,UAAA,CAAW,MAAA,EAAgB,OAAA,EAA2B;AAC3D,IAAA,MAAM,EAAE,eAAA,EAAiB,MAAA,EAAO,GAAI,OAAA;AACpC,IAAA,MAAM,eAAA,GAAkBA,2BAAA,CAAgB,UAAA,CAAW,MAAM,CAAA;AACzD,IAAA,OAAO,IAAI,iBAAA,CAAkB;AAAA,MAC3B,MAAA;AAAA,MACA,eAAA;AAAA,MACA,MAAA;AAAA,MACA;AAAA,KACD,CAAA;AAAA,EACH;AAAA,EAEA,YAAY,OAAA,EAKT;AACD,IAAA,IAAA,CAAK,SAAS,OAAA,CAAQ,MAAA;AACtB,IAAA,IAAA,CAAK,OAAA,GAAU,mBAAA,CAAoB,OAAA,CAAQ,MAAA,EAAQ,QAAQ,MAAM,CAAA;AACjE,IAAA,IAAA,CAAK,kBAAkB,OAAA,CAAQ,eAAA;AAC/B,IAAA,IAAA,CAAK,kBAAkB,OAAA,CAAQ,eAAA;AAAA,EACjC;AAAA;AAAA,EAGA,MAAa,IAAI,OAAA,EAA6C;AAC5D,IAAA,MAAM;AAAA,MACJ,QAAA;AAAA,MACA,SAAA;AAAA,MACA,wBAAA;AAAA,MACA,IAAA;AAAA,MACA,MAAA,EAAQ,WAAA;AAAA,MACR,SAAA;AAAA,MACA,WAAA;AAAA,MACA;AAAA,KACF,GAAI,OAAA;AAGJ,IAAA,MAAM,EAAE,IAAA,EAAM,aAAA,EAAe,OAAA,KAAY,MAAMC,oBAAA;AAAA,MAC7C,QAAA;AAAA,MACA;AAAA,KACF;AAGA,IAAA,MAAM,OAAA,GAAU,MAAMC,0BAAA,CAAmB,QAAA,EAAU,OAAO,CAAA;AAE1D,IAAA,IAAI,wBAAA,EAA0B;AAC5B,MAAA,MAAMC,qCAAA;AAAA,QACJ,aAAA;AAAA,QACA,WAAA;AAAA,QACA,wBAAA;AAAA,QACA,IAAA,CAAK;AAAA,OACP;AAAA,IACF;AAEA,IAAA,IAAI,IAAA,CAAK,QAAQ,2BAAA,EAA6B;AAC5C,MAAA,MAAMC,2BAAmB,EAAE,QAAA,EAAU,MAAA,EAAQ,WAAA,EAAa,SAAS,CAAA;AAAA,IACrE;AAGA,IAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,OAAA,CAAQ,cAAA,IAAkB,EAAC;AAEvD,IAAA,IACE,CAAC,KAAK,OAAA,CAAQ,4BAAA,IACd,CAAC,cAAA,CAAe,QAAA,CAAS,eAAe,CAAA,EACxC;AACA,MAAA,cAAA,CAAe,KAAK,eAAe,CAAA;AAAA,IACrC;AAEA,IAAA,MAAMC,wCAAA,CAA0B,aAAA,EAAe,WAAA,EAAa,cAAc,CAAA;AAG1E,IAAA,MAAM,SAAA,GAAY;AAAA,MAChB,CAAC,QAAQ,GAAG,QAAA;AAAA,MACZ,CAAC,SAAS,GAAG;AAAA,KACf;AAEA,IAAA,IAAI;AACF,MAAA,QAAQ,IAAA,CAAK,QAAQ,KAAA;AAAO,QAC1B,KAAK,OAAA;AACH,UAAA,MAAMC,kBAAA,CAAW;AAAA,YACf,OAAA,EAAS,QAAA;AAAA,YACT,IAAA,EAAM,CAAC,OAAA,EAAS,IAAA,EAAM,WAAW,IAAI,CAAA;AAAA,YACrC,OAAA,EAAS;AAAA,cACP,GAAA,EAAK;AAAA,aACP;AAAA,YACA;AAAA,WACD,CAAA;AACD,UAAA,WAAA,CAAY,IAAA;AAAA,YACV,CAAA,iCAAA,EAAoC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA,mBAAA;AAAA,WAChE;AACA,UAAA;AAAA,QACF,KAAK,QAAA,EAAU;AACb,UAAA,MAAM,eAAA,GACJ,IAAA,CAAK,eAAA,IAAmB,IAAIC,2CAAA,EAAsB;AACpD,UAAA,MAAM,gBAAgB,YAAA,CAAa;AAAA,YACjC,SAAA,EACE,IAAA,CAAK,OAAA,CAAQ,WAAA,IAAe,iBAAA,CAAkB,kBAAA;AAAA,YAChD,IAAA,EAAM,CAAC,OAAA,EAAS,IAAA,EAAM,SAAS,CAAA;AAAA,YAC/B,SAAA;AAAA,YACA,SAAA;AAAA,YACA,UAAA,EAAY,QAAA;AAAA;AAAA;AAAA,YAGZ,OAAA,EAAS,EAAE,IAAA,EAAM,MAAA,EAAO;AAAA,YACxB,SAAA,EAAW,KAAK,OAAA,CAAQ,SAAA;AAAA,YACxB,WAAA,EAAa;AAAA,WACd,CAAA;AACD,UAAA,WAAA,CAAY,IAAA;AAAA,YACV,CAAA,iCAAA,EAAoC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA,yBAAA;AAAA,WAChE;AACA,UAAA;AAAA,QACF;AAAA,QACA;AACE,UAAA,MAAM,IAAI,KAAA;AAAA,YACR,CAAA,sBAAA,EAAyB,IAAA,CAAK,OAAA,CAAQ,KAAK,CAAA,6CAAA;AAAA,WAC7C;AAAA;AACJ,IACF,SAAS,KAAA,EAAO;AACd,MAAA,IAAA,CAAK,MAAA,CAAO,KAAA;AAAA,QACV,CAAA,6BAAA,EAAgC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA;AAAA,OAC5D;AACA,MAAA,MAAM,IAAIC,qBAAA;AAAA,QACR,CAAA,6BAAA,EAAgC,QAAQ,CAAA,MAAA,EAAS,SAAS,CAAA,CAAA;AAAA,QAC1D;AAAA,OACF;AAAA,IACF;AAQA,IAAA,MAAMC,8BAAA;AAAA,MACJC,qBAAA,CAAK,IAAA,CAAK,SAAA,EAAW,wBAAwB,CAAA;AAAA,MAC7C;AAAA,KACF;AAIA,IAAA,IAAI,IAAA,EAAM;AACR,MAAA,MAAMC,yBAAA;AAAA,QACJD,qBAAA,CAAK,IAAA,CAAK,SAAA,EAAW,wBAAwB,CAAA;AAAA,QAC7C;AAAA,OACF;AAAA,IACF;AAAA,EACF;AACF;AAEO,SAAS,mBAAA,CACd,QACA,MAAA,EACiB;AACjB,EAAA,MAAM,sBAAsB,MAAA,CAAO,iBAAA;AAAA,IACjC;AAAA,GACF;AAEA,EAAA,IAAI,mBAAA,EAAqB;AACvB,IAAA,MAAA,CAAO,IAAA;AAAA,MACL,CAAA,iNAAA;AAAA,KAEF;AAAA,EACF;AAEA,EAAA,OAAO;AAAA,IACL,KAAA,EACE,mBAAA,IACA,MAAA,CAAO,iBAAA,CAAkB,0BAA0B,CAAA,IACnD,QAAA;AAAA,IACF,WAAA,EAAa,MAAA,CAAO,iBAAA,CAAkB,gCAAgC,CAAA;AAAA,IACtE,SAAA,EAAW,MAAA,CAAO,kBAAA,CAAmB,8BAA8B,CAAA;AAAA,IACnE,8BAA8B,MAAA,CAAO,kBAAA;AAAA,MACnC;AAAA,KACF;AAAA,IACA,6BAA6B,MAAA,CAAO,kBAAA;AAAA,MAClC;AAAA,KACF;AAAA,IACA,gBAAgB,MAAA,CAAO,sBAAA;AAAA,MACrB;AAAA;AACF,GACF;AACF;;;;;"}
|
|
@@ -10,7 +10,7 @@ var hpagent = require('hpagent');
|
|
|
10
10
|
var fs = require('fs-extra');
|
|
11
11
|
var JSON5 = require('json5');
|
|
12
12
|
var createLimiter = require('p-limit');
|
|
13
|
-
var path = require('path');
|
|
13
|
+
var path = require('node:path');
|
|
14
14
|
var helpers = require('./helpers.cjs.js');
|
|
15
15
|
var integration = require('@backstage/integration');
|
|
16
16
|
|