@backstage/plugin-techdocs-backend 0.9.2 → 0.10.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +125 -0
- package/README.md +6 -6
- package/config.d.ts +16 -22
- package/dist/index.cjs.js +16 -4
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +19 -17
- package/package.json +11 -10
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,130 @@
|
|
|
1
1
|
# @backstage/plugin-techdocs-backend
|
|
2
2
|
|
|
3
|
+
## 0.10.3
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- Updated dependencies
|
|
8
|
+
- @backstage/integration@0.6.5
|
|
9
|
+
- @backstage/catalog-client@0.4.0
|
|
10
|
+
- @backstage/catalog-model@0.9.3
|
|
11
|
+
- @backstage/backend-common@0.9.4
|
|
12
|
+
- @backstage/config@0.1.10
|
|
13
|
+
|
|
14
|
+
## 0.10.2
|
|
15
|
+
|
|
16
|
+
### Patch Changes
|
|
17
|
+
|
|
18
|
+
- 1d346ba903: Modify TechDocsCollator to be aware of new TechDocs URL pattern. Modify tech docs in context search to use correct casing when creating initial filter.
|
|
19
|
+
- Updated dependencies
|
|
20
|
+
- @backstage/backend-common@0.9.3
|
|
21
|
+
- @backstage/integration@0.6.4
|
|
22
|
+
- @backstage/techdocs-common@0.10.1
|
|
23
|
+
|
|
24
|
+
## 0.10.1
|
|
25
|
+
|
|
26
|
+
### Patch Changes
|
|
27
|
+
|
|
28
|
+
- 30ed662a3: Adding in-context search to TechDocs Reader component. Using existing search-backend to query for indexed search results scoped into a specific entity's techdocs. Needs TechDocsCollator enabled on the backend to work.
|
|
29
|
+
|
|
30
|
+
Adding extra information to indexed tech docs documents for search.
|
|
31
|
+
|
|
32
|
+
- a42a142c2: Errors encountered while attempting to load TechDocs search indices at
|
|
33
|
+
collation-time are now logged at `DEBUG` instead of `WARN` level.
|
|
34
|
+
- Updated dependencies
|
|
35
|
+
- @backstage/techdocs-common@0.10.0
|
|
36
|
+
- @backstage/integration@0.6.3
|
|
37
|
+
- @backstage/search-common@0.2.0
|
|
38
|
+
- @backstage/catalog-model@0.9.1
|
|
39
|
+
- @backstage/backend-common@0.9.1
|
|
40
|
+
|
|
41
|
+
## 0.10.0
|
|
42
|
+
|
|
43
|
+
### Minor Changes
|
|
44
|
+
|
|
45
|
+
- 58452cdb7: OpenStack Swift Client changed with Trendyol's OpenStack Swift SDK.
|
|
46
|
+
|
|
47
|
+
## Migration from old OpenStack Swift Configuration
|
|
48
|
+
|
|
49
|
+
Let's assume we have the old OpenStack Swift configuration here.
|
|
50
|
+
|
|
51
|
+
```yaml
|
|
52
|
+
techdocs:
|
|
53
|
+
publisher:
|
|
54
|
+
type: 'openStackSwift'
|
|
55
|
+
openStackSwift:
|
|
56
|
+
containerName: 'name-of-techdocs-storage-bucket'
|
|
57
|
+
credentials:
|
|
58
|
+
username: ${OPENSTACK_SWIFT_STORAGE_USERNAME}
|
|
59
|
+
password: ${OPENSTACK_SWIFT_STORAGE_PASSWORD}
|
|
60
|
+
authUrl: ${OPENSTACK_SWIFT_STORAGE_AUTH_URL}
|
|
61
|
+
keystoneAuthVersion: ${OPENSTACK_SWIFT_STORAGE_AUTH_VERSION}
|
|
62
|
+
domainId: ${OPENSTACK_SWIFT_STORAGE_DOMAIN_ID}
|
|
63
|
+
domainName: ${OPENSTACK_SWIFT_STORAGE_DOMAIN_NAME}
|
|
64
|
+
region: ${OPENSTACK_SWIFT_STORAGE_REGION}
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
##### Step 1: Change the credential keys
|
|
68
|
+
|
|
69
|
+
Since the new SDK uses _Application Credentials_ to authenticate OpenStack, we
|
|
70
|
+
need to change the keys `credentials.username` to `credentials.id`,
|
|
71
|
+
`credentials.password` to `credentials.secret` and use Application Credential ID
|
|
72
|
+
and secret here. For more detail about credentials look
|
|
73
|
+
[here](https://docs.openstack.org/api-ref/identity/v3/?expanded=password-authentication-with-unscoped-authorization-detail,authenticating-with-an-application-credential-detail#authenticating-with-an-application-credential).
|
|
74
|
+
|
|
75
|
+
##### Step 2: Remove the unused keys
|
|
76
|
+
|
|
77
|
+
Since the new SDK doesn't use the old way authentication, we don't need the keys
|
|
78
|
+
`openStackSwift.keystoneAuthVersion`, `openStackSwift.domainId`,
|
|
79
|
+
`openStackSwift.domainName` and `openStackSwift.region`. So you can remove them.
|
|
80
|
+
|
|
81
|
+
##### Step 3: Add Swift URL
|
|
82
|
+
|
|
83
|
+
The new SDK needs the OpenStack Swift connection URL for connecting the Swift.
|
|
84
|
+
So you need to add a new key called `openStackSwift.swiftUrl` and give the
|
|
85
|
+
OpenStack Swift url here. Example url should look like that:
|
|
86
|
+
`https://example.com:6780/swift/v1`
|
|
87
|
+
|
|
88
|
+
##### That's it!
|
|
89
|
+
|
|
90
|
+
Your new configuration should look like that!
|
|
91
|
+
|
|
92
|
+
```yaml
|
|
93
|
+
techdocs:
|
|
94
|
+
publisher:
|
|
95
|
+
type: 'openStackSwift'
|
|
96
|
+
openStackSwift:
|
|
97
|
+
containerName: 'name-of-techdocs-storage-bucket'
|
|
98
|
+
credentials:
|
|
99
|
+
id: ${OPENSTACK_SWIFT_STORAGE_APPLICATION_CREDENTIALS_ID}
|
|
100
|
+
secret: ${OPENSTACK_SWIFT_STORAGE_APPLICATION_CREDENTIALS_SECRET}
|
|
101
|
+
authUrl: ${OPENSTACK_SWIFT_STORAGE_AUTH_URL}
|
|
102
|
+
swiftUrl: ${OPENSTACK_SWIFT_STORAGE_SWIFT_URL}
|
|
103
|
+
```
|
|
104
|
+
|
|
105
|
+
- c772d9a84: TechDocs sites can now be accessed using paths containing entity triplets of
|
|
106
|
+
any case (e.g. `/docs/namespace/KIND/name` or `/docs/namespace/kind/name`).
|
|
107
|
+
|
|
108
|
+
If you do not use an external storage provider for serving TechDocs, this is a
|
|
109
|
+
transparent change and no action is required from you.
|
|
110
|
+
|
|
111
|
+
If you _do_ use an external storage provider for serving TechDocs (one of\* GCS,
|
|
112
|
+
AWS S3, or Azure Blob Storage), you must run a migration command against your
|
|
113
|
+
storage provider before updating.
|
|
114
|
+
|
|
115
|
+
[A migration guide is available here](https://backstage.io/docs/features/techdocs/how-to-guides#how-to-migrate-from-techdocs-alpha-to-beta).
|
|
116
|
+
|
|
117
|
+
- (\*) We're seeking help from the community to bring OpenStack Swift support
|
|
118
|
+
[to feature parity](https://github.com/backstage/backstage/issues/6763) with the above.
|
|
119
|
+
|
|
120
|
+
### Patch Changes
|
|
121
|
+
|
|
122
|
+
- Updated dependencies
|
|
123
|
+
- @backstage/backend-common@0.9.0
|
|
124
|
+
- @backstage/integration@0.6.2
|
|
125
|
+
- @backstage/config@0.1.8
|
|
126
|
+
- @backstage/techdocs-common@0.9.0
|
|
127
|
+
|
|
3
128
|
## 0.9.2
|
|
4
129
|
|
|
5
130
|
### Patch Changes
|
package/README.md
CHANGED
|
@@ -4,22 +4,22 @@ This is the backend part of the techdocs plugin.
|
|
|
4
4
|
|
|
5
5
|
## Getting Started
|
|
6
6
|
|
|
7
|
-
This backend plugin can be started in a standalone mode from
|
|
8
|
-
|
|
7
|
+
This backend plugin can be started in a standalone mode directly from in this package
|
|
8
|
+
using `yarn start`. However, it will have limited functionality and that process is
|
|
9
9
|
most convenient when developing the techdocs backend plugin itself.
|
|
10
10
|
|
|
11
|
-
To evaluate TechDocs and have a greater amount of functionality available, instead do
|
|
11
|
+
To evaluate TechDocs and have a greater amount of functionality available, instead do:
|
|
12
12
|
|
|
13
13
|
```bash
|
|
14
|
-
#
|
|
14
|
+
# From your Backstage root directory
|
|
15
15
|
cd packages/backend
|
|
16
16
|
yarn start
|
|
17
17
|
```
|
|
18
18
|
|
|
19
19
|
## What techdocs-backend does
|
|
20
20
|
|
|
21
|
-
This
|
|
22
|
-
To configure various storage providers and building options, see http://backstage.io/docs/features/techdocs/configuration
|
|
21
|
+
This provides serving and building of documentation for any entity.
|
|
22
|
+
To configure various storage providers and building options, see http://backstage.io/docs/features/techdocs/configuration.
|
|
23
23
|
|
|
24
24
|
The techdocs-backend re-exports the [techdocs-common](https://github.com/backstage/backstage/tree/master/packages/techdocs-common) package which has the features to prepare, generate and publish docs.
|
|
25
25
|
The Publishers are also used to fetch the static documentation files and render them in TechDocs.
|
package/config.d.ts
CHANGED
|
@@ -137,15 +137,15 @@ export interface Config {
|
|
|
137
137
|
*/
|
|
138
138
|
credentials: {
|
|
139
139
|
/**
|
|
140
|
-
* (Required)
|
|
140
|
+
* (Required) Application Credential ID
|
|
141
141
|
* @visibility secret
|
|
142
142
|
*/
|
|
143
|
-
|
|
143
|
+
id: string;
|
|
144
144
|
/**
|
|
145
|
-
* (Required)
|
|
145
|
+
* (Required) Application Credential Secret
|
|
146
146
|
* @visibility secret
|
|
147
147
|
*/
|
|
148
|
-
|
|
148
|
+
secret: string; // required
|
|
149
149
|
};
|
|
150
150
|
/**
|
|
151
151
|
* (Required) Cloud Storage Container Name
|
|
@@ -158,26 +158,10 @@ export interface Config {
|
|
|
158
158
|
*/
|
|
159
159
|
authUrl: string;
|
|
160
160
|
/**
|
|
161
|
-
* (
|
|
162
|
-
* If not set, 'v2.0' will be used.
|
|
161
|
+
* (Required) Swift URL
|
|
163
162
|
* @visibility backend
|
|
164
163
|
*/
|
|
165
|
-
|
|
166
|
-
/**
|
|
167
|
-
* (Required) Domain Id
|
|
168
|
-
* @visibility backend
|
|
169
|
-
*/
|
|
170
|
-
domainId: string;
|
|
171
|
-
/**
|
|
172
|
-
* (Required) Domain Name
|
|
173
|
-
* @visibility backend
|
|
174
|
-
*/
|
|
175
|
-
domainName: string;
|
|
176
|
-
/**
|
|
177
|
-
* (Required) Region
|
|
178
|
-
* @visibility backend
|
|
179
|
-
*/
|
|
180
|
-
region: string;
|
|
164
|
+
swiftUrl: string;
|
|
181
165
|
};
|
|
182
166
|
}
|
|
183
167
|
| {
|
|
@@ -246,5 +230,15 @@ export interface Config {
|
|
|
246
230
|
* @deprecated
|
|
247
231
|
*/
|
|
248
232
|
storageUrl?: string;
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* (Optional and not recommended) Prior to version [0.x.y] of TechDocs, docs
|
|
236
|
+
* sites could only be accessed over paths with case-sensitive entity triplets
|
|
237
|
+
* e.g. (namespace/Kind/name). If you are upgrading from an older version of
|
|
238
|
+
* TechDocs and are unable to perform the necessary migration of files in your
|
|
239
|
+
* external storage, you can set this value to `true` to temporarily revert to
|
|
240
|
+
* the old, case-sensitive entity triplet behavior.
|
|
241
|
+
*/
|
|
242
|
+
legacyUseCaseSensitiveTripletPaths?: boolean;
|
|
249
243
|
};
|
|
250
244
|
}
|
package/dist/index.cjs.js
CHANGED
|
@@ -371,7 +371,8 @@ class DefaultTechDocsCollator {
|
|
|
371
371
|
locationTemplate,
|
|
372
372
|
logger,
|
|
373
373
|
catalogClient: catalogClient$1,
|
|
374
|
-
parallelismLimit = 10
|
|
374
|
+
parallelismLimit = 10,
|
|
375
|
+
legacyPathCasing = false
|
|
375
376
|
}) {
|
|
376
377
|
this.type = "techdocs";
|
|
377
378
|
this.discovery = discovery;
|
|
@@ -379,6 +380,11 @@ class DefaultTechDocsCollator {
|
|
|
379
380
|
this.logger = logger;
|
|
380
381
|
this.catalogClient = catalogClient$1 || new catalogClient.CatalogClient({discoveryApi: discovery});
|
|
381
382
|
this.parallelismLimit = parallelismLimit;
|
|
383
|
+
this.legacyPathCasing = legacyPathCasing;
|
|
384
|
+
}
|
|
385
|
+
static fromConfig(config, options) {
|
|
386
|
+
const legacyPathCasing = config.getOptionalBoolean("techdocs.legacyUseCaseSensitiveTripletPaths") || false;
|
|
387
|
+
return new DefaultTechDocsCollator({...options, legacyPathCasing});
|
|
382
388
|
}
|
|
383
389
|
async execute() {
|
|
384
390
|
const limit = pLimit__default['default'](this.parallelismLimit);
|
|
@@ -399,11 +405,11 @@ class DefaultTechDocsCollator {
|
|
|
399
405
|
var _a, _b;
|
|
400
406
|
return (_b = (_a = it.metadata) == null ? void 0 : _a.annotations) == null ? void 0 : _b["backstage.io/techdocs-ref"];
|
|
401
407
|
}).map((entity) => limit(async () => {
|
|
402
|
-
const entityInfo = {
|
|
408
|
+
const entityInfo = DefaultTechDocsCollator.handleEntityInfoCasing(this.legacyPathCasing, {
|
|
403
409
|
kind: entity.kind,
|
|
404
410
|
namespace: entity.metadata.namespace || "default",
|
|
405
411
|
name: entity.metadata.name
|
|
406
|
-
};
|
|
412
|
+
});
|
|
407
413
|
try {
|
|
408
414
|
const searchIndexResponse = await fetch__default['default'](DefaultTechDocsCollator.constructDocsIndexUrl(techDocsBaseUrl, entityInfo));
|
|
409
415
|
const searchIndex = await searchIndexResponse.json();
|
|
@@ -416,6 +422,7 @@ class DefaultTechDocsCollator {
|
|
|
416
422
|
...entityInfo,
|
|
417
423
|
path: doc.location
|
|
418
424
|
}),
|
|
425
|
+
path: doc.location,
|
|
419
426
|
...entityInfo,
|
|
420
427
|
componentType: ((_b = (_a = entity.spec) == null ? void 0 : _a.type) == null ? void 0 : _b.toString()) || "other",
|
|
421
428
|
lifecycle: ((_c = entity.spec) == null ? void 0 : _c.lifecycle) || "",
|
|
@@ -423,7 +430,7 @@ class DefaultTechDocsCollator {
|
|
|
423
430
|
};
|
|
424
431
|
});
|
|
425
432
|
} catch (e) {
|
|
426
|
-
this.logger.
|
|
433
|
+
this.logger.debug(`Failed to retrieve tech docs search index for entity ${entityInfo.namespace}/${entityInfo.kind}/${entityInfo.name}`, e);
|
|
427
434
|
return [];
|
|
428
435
|
}
|
|
429
436
|
}));
|
|
@@ -439,6 +446,11 @@ class DefaultTechDocsCollator {
|
|
|
439
446
|
static constructDocsIndexUrl(techDocsBaseUrl, entityInfo) {
|
|
440
447
|
return `${techDocsBaseUrl}/static/docs/${entityInfo.namespace}/${entityInfo.kind}/${entityInfo.name}/search/search_index.json`;
|
|
441
448
|
}
|
|
449
|
+
static handleEntityInfoCasing(legacyPaths, entityInfo) {
|
|
450
|
+
return legacyPaths ? entityInfo : Object.entries(entityInfo).reduce((acc, [key, value]) => {
|
|
451
|
+
return {...acc, [key]: value.toLocaleLowerCase("en-US")};
|
|
452
|
+
}, {});
|
|
453
|
+
}
|
|
442
454
|
}
|
|
443
455
|
|
|
444
456
|
exports.DefaultTechDocsCollator = DefaultTechDocsCollator;
|
package/dist/index.cjs.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"index.cjs.js","sources":["../src/DocsBuilder/BuildMetadataStorage.ts","../src/DocsBuilder/builder.ts","../src/service/DocsSynchronizer.ts","../src/service/router.ts","../src/search/DefaultTechDocsCollator.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Entity uid: unix timestamp\nconst lastUpdatedRecord = {} as Record<string, number>;\n\n/**\n * Store timestamps of the most recent TechDocs update of each Entity. This is\n * used to avoid checking for an update on each and every request to TechDocs.\n */\nexport class BuildMetadataStorage {\n private entityUid: string;\n private lastUpdatedRecord: Record<string, number>;\n\n constructor(entityUid: string) {\n this.entityUid = entityUid;\n this.lastUpdatedRecord = lastUpdatedRecord;\n }\n\n setLastUpdated(): void {\n this.lastUpdatedRecord[this.entityUid] = Date.now();\n }\n\n getLastUpdated(): number | undefined {\n return this.lastUpdatedRecord[this.entityUid];\n }\n}\n\n/**\n * Return false if a check for update has happened in last 60 seconds.\n */\nexport const shouldCheckForUpdate = (entityUid: string) => {\n const lastUpdated = new BuildMetadataStorage(entityUid).getLastUpdated();\n if (lastUpdated) {\n // The difference is in milliseconds\n if (Date.now() - lastUpdated < 60 * 1000) {\n return false;\n }\n }\n return true;\n};\n","/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n Entity,\n ENTITY_DEFAULT_NAMESPACE,\n stringifyEntityRef,\n} from '@backstage/catalog-model';\nimport { Config } from '@backstage/config';\nimport { NotModifiedError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport {\n GeneratorBase,\n GeneratorBuilder,\n getLocationForEntity,\n PreparerBase,\n PreparerBuilder,\n PublisherBase,\n UrlPreparer,\n} from '@backstage/techdocs-common';\nimport fs from 'fs-extra';\nimport os from 'os';\nimport path from 'path';\nimport { Writable } from 'stream';\nimport { Logger } from 'winston';\nimport { BuildMetadataStorage } from './BuildMetadataStorage';\n\ntype DocsBuilderArguments = {\n preparers: PreparerBuilder;\n generators: GeneratorBuilder;\n publisher: PublisherBase;\n entity: Entity;\n logger: Logger;\n config: Config;\n scmIntegrations: ScmIntegrationRegistry;\n logStream?: Writable;\n};\n\nexport class DocsBuilder {\n private preparer: PreparerBase;\n private generator: GeneratorBase;\n private publisher: PublisherBase;\n private entity: Entity;\n private logger: Logger;\n private config: Config;\n private scmIntegrations: ScmIntegrationRegistry;\n private logStream: Writable | undefined;\n\n constructor({\n preparers,\n generators,\n publisher,\n entity,\n logger,\n config,\n scmIntegrations,\n logStream,\n }: DocsBuilderArguments) {\n this.preparer = preparers.get(entity);\n this.generator = generators.get(entity);\n this.publisher = publisher;\n this.entity = entity;\n this.logger = logger;\n this.config = config;\n this.scmIntegrations = scmIntegrations;\n this.logStream = logStream;\n }\n\n /**\n * Build the docs and return whether they have been newly generated or have been cached\n * @returns true, if the docs have been built. false, if the cached docs are still up-to-date.\n */\n public async build(): Promise<boolean> {\n if (!this.entity.metadata.uid) {\n throw new Error(\n 'Trying to build documentation for entity not in software catalog',\n );\n }\n\n /**\n * Prepare (and cache check)\n */\n\n this.logger.info(\n `Step 1 of 3: Preparing docs for entity ${stringifyEntityRef(\n this.entity,\n )}`,\n );\n\n // If available, use the etag stored in techdocs_metadata.json to\n // check if docs are outdated and need to be regenerated.\n let storedEtag: string | undefined;\n if (await this.publisher.hasDocsBeenGenerated(this.entity)) {\n try {\n storedEtag = (\n await this.publisher.fetchTechDocsMetadata({\n namespace:\n this.entity.metadata.namespace ?? ENTITY_DEFAULT_NAMESPACE,\n kind: this.entity.kind,\n name: this.entity.metadata.name,\n })\n ).etag;\n } catch (err) {\n // Proceed with a fresh build\n this.logger.warn(\n `Unable to read techdocs_metadata.json, proceeding with fresh build, error ${err}.`,\n );\n }\n }\n\n let preparedDir: string;\n let newEtag: string;\n try {\n const preparerResponse = await this.preparer.prepare(this.entity, {\n etag: storedEtag,\n logger: this.logger,\n });\n\n preparedDir = preparerResponse.preparedDir;\n newEtag = preparerResponse.etag;\n } catch (err) {\n if (err instanceof NotModifiedError) {\n // No need to prepare anymore since cache is valid.\n // Set last check happened to now\n new BuildMetadataStorage(this.entity.metadata.uid).setLastUpdated();\n this.logger.debug(\n `Docs for ${stringifyEntityRef(\n this.entity,\n )} are unmodified. Using cache, skipping generate and prepare`,\n );\n return false;\n }\n throw new Error(err.message);\n }\n\n this.logger.info(\n `Prepare step completed for entity ${stringifyEntityRef(\n this.entity,\n )}, stored at ${preparedDir}`,\n );\n\n /**\n * Generate\n */\n\n this.logger.info(\n `Step 2 of 3: Generating docs for entity ${stringifyEntityRef(\n this.entity,\n )}`,\n );\n\n const workingDir = this.config.getOptionalString(\n 'backend.workingDirectory',\n );\n const tmpdirPath = workingDir || os.tmpdir();\n // Fixes a problem with macOS returning a path that is a symlink\n const tmpdirResolvedPath = fs.realpathSync(tmpdirPath);\n const outputDir = await fs.mkdtemp(\n path.join(tmpdirResolvedPath, 'techdocs-tmp-'),\n );\n\n const parsedLocationAnnotation = getLocationForEntity(\n this.entity,\n this.scmIntegrations,\n );\n await this.generator.run({\n inputDir: preparedDir,\n outputDir,\n parsedLocationAnnotation,\n etag: newEtag,\n logger: this.logger,\n logStream: this.logStream,\n });\n\n // Remove Prepared directory since it is no longer needed.\n // Caveat: Can not remove prepared directory in case of git preparer since the\n // local git repository is used to get etag on subsequent requests.\n if (this.preparer instanceof UrlPreparer) {\n this.logger.debug(\n `Removing prepared directory ${preparedDir} since the site has been generated`,\n );\n try {\n // Not a blocker hence no need to await this.\n fs.remove(preparedDir);\n } catch (error) {\n this.logger.debug(`Error removing prepared directory ${error.message}`);\n }\n }\n\n /**\n * Publish\n */\n\n this.logger.info(\n `Step 3 of 3: Publishing docs for entity ${stringifyEntityRef(\n this.entity,\n )}`,\n );\n\n await this.publisher.publish({\n entity: this.entity,\n directory: outputDir,\n });\n\n try {\n // Not a blocker hence no need to await this.\n fs.remove(outputDir);\n this.logger.debug(\n `Removing generated directory ${outputDir} since the site has been published`,\n );\n } catch (error) {\n this.logger.debug(`Error removing generated directory ${error.message}`);\n }\n\n // Update the last check time for the entity\n new BuildMetadataStorage(this.entity.metadata.uid).setLastUpdated();\n\n return true;\n }\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Entity } from '@backstage/catalog-model';\nimport { Config } from '@backstage/config';\nimport { NotFoundError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport {\n GeneratorBuilder,\n PreparerBuilder,\n PublisherBase,\n} from '@backstage/techdocs-common';\nimport { PassThrough } from 'stream';\nimport * as winston from 'winston';\nimport { DocsBuilder, shouldCheckForUpdate } from '../DocsBuilder';\n\nexport type DocsSynchronizerSyncOpts = {\n log: (message: string) => void;\n error: (e: Error) => void;\n finish: (result: { updated: boolean }) => void;\n};\n\nexport class DocsSynchronizer {\n private readonly publisher: PublisherBase;\n private readonly logger: winston.Logger;\n private readonly config: Config;\n private readonly scmIntegrations: ScmIntegrationRegistry;\n\n constructor({\n publisher,\n logger,\n config,\n scmIntegrations,\n }: {\n publisher: PublisherBase;\n logger: winston.Logger;\n config: Config;\n scmIntegrations: ScmIntegrationRegistry;\n }) {\n this.config = config;\n this.logger = logger;\n this.publisher = publisher;\n this.scmIntegrations = scmIntegrations;\n }\n\n async doSync({\n responseHandler: { log, error, finish },\n entity,\n preparers,\n generators,\n }: {\n responseHandler: DocsSynchronizerSyncOpts;\n entity: Entity;\n preparers: PreparerBuilder;\n generators: GeneratorBuilder;\n }) {\n // create a new logger to log data to the caller\n const taskLogger = winston.createLogger({\n level: process.env.LOG_LEVEL || 'info',\n format: winston.format.combine(\n winston.format.colorize(),\n winston.format.timestamp(),\n winston.format.simple(),\n ),\n defaultMeta: {},\n });\n\n // create an in-memory stream to forward logs to the event-stream\n const logStream = new PassThrough();\n logStream.on('data', async data => {\n log(data.toString().trim());\n });\n\n taskLogger.add(new winston.transports.Stream({ stream: logStream }));\n\n // check if the last update check was too recent\n if (!shouldCheckForUpdate(entity.metadata.uid!)) {\n finish({ updated: false });\n return;\n }\n\n let foundDocs = false;\n\n try {\n const docsBuilder = new DocsBuilder({\n preparers,\n generators,\n publisher: this.publisher,\n logger: taskLogger,\n entity,\n config: this.config,\n scmIntegrations: this.scmIntegrations,\n logStream,\n });\n\n const updated = await docsBuilder.build();\n\n if (!updated) {\n finish({ updated: false });\n return;\n }\n } catch (e) {\n const msg = `Failed to build the docs page: ${e.message}`;\n taskLogger.error(msg);\n this.logger.error(msg, e);\n error(e);\n return;\n }\n\n // With a maximum of ~5 seconds wait, check if the files got published and if docs will be fetched\n // on the user's page. If not, respond with a message asking them to check back later.\n // The delay here is to make sure GCS/AWS/etc. registers newly uploaded files which is usually <1 second\n for (let attempt = 0; attempt < 5; attempt++) {\n if (await this.publisher.hasDocsBeenGenerated(entity)) {\n foundDocs = true;\n break;\n }\n await new Promise(r => setTimeout(r, 1000));\n }\n if (!foundDocs) {\n this.logger.error(\n 'Published files are taking longer to show up in storage. Something went wrong.',\n );\n error(\n new NotFoundError(\n 'Sorry! It took too long for the generated docs to show up in storage. Check back later.',\n ),\n );\n return;\n }\n\n finish({ updated: true });\n }\n}\n","/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { PluginEndpointDiscovery } from '@backstage/backend-common';\nimport { CatalogClient } from '@backstage/catalog-client';\nimport { Entity, stringifyEntityRef } from '@backstage/catalog-model';\nimport { Config } from '@backstage/config';\nimport { NotFoundError, NotModifiedError } from '@backstage/errors';\nimport {\n GeneratorBuilder,\n getLocationForEntity,\n PreparerBuilder,\n PublisherBase,\n} from '@backstage/techdocs-common';\nimport fetch from 'cross-fetch';\nimport express, { Response } from 'express';\nimport Router from 'express-promise-router';\nimport { Knex } from 'knex';\nimport { Logger } from 'winston';\nimport { ScmIntegrations } from '@backstage/integration';\nimport { DocsSynchronizer, DocsSynchronizerSyncOpts } from './DocsSynchronizer';\n\n/**\n * All of the required dependencies for running TechDocs in the \"out-of-the-box\"\n * deployment configuration (prepare/generate/publish all in the Backend).\n */\ntype OutOfTheBoxDeploymentOptions = {\n preparers: PreparerBuilder;\n generators: GeneratorBuilder;\n publisher: PublisherBase;\n logger: Logger;\n discovery: PluginEndpointDiscovery;\n database?: Knex; // TODO: Make database required when we're implementing database stuff.\n config: Config;\n};\n\n/**\n * Required dependencies for running TechDocs in the \"recommended\" deployment\n * configuration (prepare/generate handled externally in CI/CD).\n */\ntype RecommendedDeploymentOptions = {\n publisher: PublisherBase;\n logger: Logger;\n discovery: PluginEndpointDiscovery;\n config: Config;\n};\n\n/**\n * One of the two deployment configurations must be provided.\n */\ntype RouterOptions =\n | RecommendedDeploymentOptions\n | OutOfTheBoxDeploymentOptions;\n\n/**\n * Typeguard to help createRouter() understand when we are in a \"recommended\"\n * deployment vs. when we are in an out-of-the-box deployment configuration.\n */\nfunction isOutOfTheBoxOption(\n opt: RouterOptions,\n): opt is OutOfTheBoxDeploymentOptions {\n return (opt as OutOfTheBoxDeploymentOptions).preparers !== undefined;\n}\n\nexport async function createRouter(\n options: RouterOptions,\n): Promise<express.Router> {\n const router = Router();\n const { publisher, config, logger, discovery } = options;\n const catalogClient = new CatalogClient({ discoveryApi: discovery });\n const scmIntegrations = ScmIntegrations.fromConfig(config);\n const docsSynchronizer = new DocsSynchronizer({\n publisher,\n logger,\n config,\n scmIntegrations,\n });\n\n router.get('/metadata/techdocs/:namespace/:kind/:name', async (req, res) => {\n const { kind, namespace, name } = req.params;\n const entityName = { kind, namespace, name };\n\n try {\n const techdocsMetadata = await publisher.fetchTechDocsMetadata(\n entityName,\n );\n\n res.json(techdocsMetadata);\n } catch (err) {\n logger.info(\n `Unable to get metadata for '${stringifyEntityRef(\n entityName,\n )}' with error ${err}`,\n );\n throw new NotFoundError(\n `Unable to get metadata for '${stringifyEntityRef(entityName)}'`,\n err,\n );\n }\n });\n\n router.get('/metadata/entity/:namespace/:kind/:name', async (req, res) => {\n const catalogUrl = await discovery.getBaseUrl('catalog');\n\n const { kind, namespace, name } = req.params;\n const entityName = { kind, namespace, name };\n\n try {\n const token = getBearerToken(req.headers.authorization);\n // TODO: Consider using the catalog client here\n const entity = (await (\n await fetch(\n `${catalogUrl}/entities/by-name/${kind}/${namespace}/${name}`,\n {\n headers: token ? { Authorization: `Bearer ${token}` } : {},\n },\n )\n ).json()) as Entity;\n\n const locationMetadata = getLocationForEntity(entity, scmIntegrations);\n res.json({ ...entity, locationMetadata });\n } catch (err) {\n logger.info(\n `Unable to get metadata for '${stringifyEntityRef(\n entityName,\n )}' with error ${err}`,\n );\n throw new NotFoundError(\n `Unable to get metadata for '${stringifyEntityRef(entityName)}'`,\n err,\n );\n }\n });\n\n // Check if docs are the latest version and trigger rebuilds if not\n // Responds with an event-stream that closes after the build finished\n // Responds with an immediate success if rebuild not needed\n // If a build is required, responds with a success when finished\n router.get('/sync/:namespace/:kind/:name', async (req, res) => {\n const { kind, namespace, name } = req.params;\n const token = getBearerToken(req.headers.authorization);\n\n const entity = await catalogClient.getEntityByName(\n { kind, namespace, name },\n { token },\n );\n\n if (!entity?.metadata?.uid) {\n throw new NotFoundError('Entity metadata UID missing');\n }\n\n let responseHandler: DocsSynchronizerSyncOpts;\n if (req.header('accept') !== 'text/event-stream') {\n console.warn(\n \"The call to /sync/:namespace/:kind/:name wasn't done by an EventSource. This behavior is deprecated and will be removed soon. Make sure to update the @backstage/plugin-techdocs package in the frontend to the latest version.\",\n );\n responseHandler = createHttpResponse(res);\n } else {\n responseHandler = createEventStream(res);\n }\n\n // techdocs-backend will only try to build documentation for an entity if techdocs.builder is set to 'local'\n // If set to 'external', it will assume that an external process (e.g. CI/CD pipeline\n // of the repository) is responsible for building and publishing documentation to the storage provider\n if (config.getString('techdocs.builder') !== 'local') {\n responseHandler.finish({ updated: false });\n return;\n }\n\n // Set the synchronization and build process if \"out-of-the-box\" configuration is provided.\n if (isOutOfTheBoxOption(options)) {\n const { preparers, generators } = options;\n\n await docsSynchronizer.doSync({\n responseHandler,\n entity,\n preparers,\n generators,\n });\n return;\n }\n\n responseHandler.error(\n new Error(\n \"Invalid configuration. 'techdocs.builder' was set to 'local' but no 'preparer' was provided to the router initialization.\",\n ),\n );\n });\n\n // Route middleware which serves files from the storage set in the publisher.\n router.use('/static/docs', publisher.docsRouter());\n\n return router;\n}\n\nfunction getBearerToken(header?: string): string | undefined {\n return header?.match(/(?:Bearer)\\s+(\\S+)/i)?.[1];\n}\n\n/**\n * Create an event-stream response that emits the events 'log', 'error', and 'finish'.\n *\n * @param res the response to write the event-stream to\n * @returns A tuple of <log, error, finish> callbacks to emit messages. A call to 'error' or 'finish'\n * will close the event-stream.\n */\nexport function createEventStream(\n res: Response<any, any>,\n): DocsSynchronizerSyncOpts {\n // Mandatory headers and http status to keep connection open\n res.writeHead(200, {\n Connection: 'keep-alive',\n 'Cache-Control': 'no-cache',\n 'Content-Type': 'text/event-stream',\n });\n\n // client closes connection\n res.socket?.on('close', () => {\n res.end();\n });\n\n // write the event to the stream\n const send = (type: 'error' | 'finish' | 'log', data: any) => {\n res.write(`event: ${type}\\ndata: ${JSON.stringify(data)}\\n\\n`);\n\n // res.flush() is only available with the compression middleware\n if (res.flush) {\n res.flush();\n }\n };\n\n return {\n log: data => {\n send('log', data);\n },\n\n error: e => {\n send('error', e.message);\n res.end();\n },\n\n finish: result => {\n send('finish', result);\n res.end();\n },\n };\n}\n\n/**\n * Create a HTTP response. This is used for the legacy non-event-stream implementation of the sync endpoint.\n *\n * @param res the response to write the event-stream to\n * @returns A tuple of <log, error, finish> callbacks to emit messages. A call to 'error' or 'finish'\n * will close the event-stream.\n */\nexport function createHttpResponse(\n res: Response<any, any>,\n): DocsSynchronizerSyncOpts {\n return {\n log: () => {},\n error: e => {\n throw e;\n },\n finish: ({ updated }) => {\n if (!updated) {\n throw new NotModifiedError();\n }\n\n res\n .status(201)\n .json({ message: 'Docs updated or did not need updating' });\n },\n };\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { PluginEndpointDiscovery } from '@backstage/backend-common';\nimport { Entity, RELATION_OWNED_BY } from '@backstage/catalog-model';\nimport { IndexableDocument, DocumentCollator } from '@backstage/search-common';\nimport fetch from 'cross-fetch';\nimport unescape from 'lodash/unescape';\nimport { Logger } from 'winston';\nimport pLimit from 'p-limit';\nimport { CatalogApi, CatalogClient } from '@backstage/catalog-client';\n\ninterface MkSearchIndexDoc {\n title: string;\n text: string;\n location: string;\n}\n\nexport interface TechDocsDocument extends IndexableDocument {\n kind: string;\n namespace: string;\n name: string;\n lifecycle: string;\n owner: string;\n}\n\nexport class DefaultTechDocsCollator implements DocumentCollator {\n protected discovery: PluginEndpointDiscovery;\n protected locationTemplate: string;\n private readonly logger: Logger;\n private readonly catalogClient: CatalogApi;\n private readonly parallelismLimit: number;\n public readonly type: string = 'techdocs';\n\n constructor({\n discovery,\n locationTemplate,\n logger,\n catalogClient,\n parallelismLimit = 10,\n }: {\n discovery: PluginEndpointDiscovery;\n logger: Logger;\n locationTemplate?: string;\n catalogClient?: CatalogApi;\n parallelismLimit?: number;\n }) {\n this.discovery = discovery;\n this.locationTemplate =\n locationTemplate || '/docs/:namespace/:kind/:name/:path';\n this.logger = logger;\n this.catalogClient =\n catalogClient || new CatalogClient({ discoveryApi: discovery });\n this.parallelismLimit = parallelismLimit;\n }\n\n async execute() {\n const limit = pLimit(this.parallelismLimit);\n const techDocsBaseUrl = await this.discovery.getBaseUrl('techdocs');\n const entities = await this.catalogClient.getEntities({\n fields: [\n 'kind',\n 'namespace',\n 'metadata.annotations',\n 'metadata.name',\n 'metadata.namespace',\n 'spec.type',\n 'spec.lifecycle',\n 'relations',\n ],\n });\n const docPromises = entities.items\n .filter(it => it.metadata?.annotations?.['backstage.io/techdocs-ref'])\n .map((entity: Entity) =>\n limit(async (): Promise<TechDocsDocument[]> => {\n const entityInfo = {\n kind: entity.kind,\n namespace: entity.metadata.namespace || 'default',\n name: entity.metadata.name,\n };\n\n try {\n const searchIndexResponse = await fetch(\n DefaultTechDocsCollator.constructDocsIndexUrl(\n techDocsBaseUrl,\n entityInfo,\n ),\n );\n const searchIndex = await searchIndexResponse.json();\n\n return searchIndex.docs.map((doc: MkSearchIndexDoc) => ({\n title: unescape(doc.title),\n text: unescape(doc.text || ''),\n location: this.applyArgsToFormat(this.locationTemplate, {\n ...entityInfo,\n path: doc.location,\n }),\n ...entityInfo,\n componentType: entity.spec?.type?.toString() || 'other',\n lifecycle: (entity.spec?.lifecycle as string) || '',\n owner:\n entity.relations?.find(r => r.type === RELATION_OWNED_BY)\n ?.target?.name || '',\n }));\n } catch (e) {\n this.logger.warn(\n `Failed to retrieve tech docs search index for entity ${entityInfo.namespace}/${entityInfo.kind}/${entityInfo.name}`,\n e,\n );\n return [];\n }\n }),\n );\n return (await Promise.all(docPromises)).flat();\n }\n\n protected applyArgsToFormat(\n format: string,\n args: Record<string, string>,\n ): string {\n let formatted = format;\n for (const [key, value] of Object.entries(args)) {\n formatted = formatted.replace(`:${key}`, value);\n }\n return formatted;\n }\n\n private static constructDocsIndexUrl(\n techDocsBaseUrl: string,\n entityInfo: { kind: string; namespace: string; name: string },\n ) {\n return `${techDocsBaseUrl}/static/docs/${entityInfo.namespace}/${entityInfo.kind}/${entityInfo.name}/search/search_index.json`;\n }\n}\n"],"names":["stringifyEntityRef","ENTITY_DEFAULT_NAMESPACE","NotModifiedError","os","fs","path","getLocationForEntity","UrlPreparer","winston","PassThrough","NotFoundError","Router","catalogClient","CatalogClient","ScmIntegrations","fetch","pLimit","unescape","RELATION_OWNED_BY"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiBA,MAAM,oBAAoB;2BAMQ;AAAA,EAIhC,YAAY,WAAmB;AAC7B,SAAK,YAAY;AACjB,SAAK,oBAAoB;AAAA;AAAA,EAG3B,iBAAuB;AACrB,SAAK,kBAAkB,KAAK,aAAa,KAAK;AAAA;AAAA,EAGhD,iBAAqC;AACnC,WAAO,KAAK,kBAAkB,KAAK;AAAA;AAAA;MAO1B,uBAAuB,CAAC,cAAsB;AACzD,QAAM,cAAc,IAAI,qBAAqB,WAAW;AACxD,MAAI,aAAa;AAEf,QAAI,KAAK,QAAQ,cAAc,KAAK,KAAM;AACxC,aAAO;AAAA;AAAA;AAGX,SAAO;AAAA;;kBCFgB;AAAA,EAUvB,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,KACuB;AACvB,SAAK,WAAW,UAAU,IAAI;AAC9B,SAAK,YAAY,WAAW,IAAI;AAChC,SAAK,YAAY;AACjB,SAAK,SAAS;AACd,SAAK,SAAS;AACd,SAAK,SAAS;AACd,SAAK,kBAAkB;AACvB,SAAK,YAAY;AAAA;AAAA,QAON,QAA0B;AApFzC;AAqFI,QAAI,CAAC,KAAK,OAAO,SAAS,KAAK;AAC7B,YAAM,IAAI,MACR;AAAA;AAQJ,SAAK,OAAO,KACV,0CAA0CA,gCACxC,KAAK;AAMT,QAAI;AACJ,QAAI,MAAM,KAAK,UAAU,qBAAqB,KAAK,SAAS;AAC1D,UAAI;AACF,qBACE,OAAM,KAAK,UAAU,sBAAsB;AAAA,UACzC,WACE,WAAK,OAAO,SAAS,cAArB,YAAkCC;AAAA,UACpC,MAAM,KAAK,OAAO;AAAA,UAClB,MAAM,KAAK,OAAO,SAAS;AAAA,YAE7B;AAAA,eACK,KAAP;AAEA,aAAK,OAAO,KACV,6EAA6E;AAAA;AAAA;AAKnF,QAAI;AACJ,QAAI;AACJ,QAAI;AACF,YAAM,mBAAmB,MAAM,KAAK,SAAS,QAAQ,KAAK,QAAQ;AAAA,QAChE,MAAM;AAAA,QACN,QAAQ,KAAK;AAAA;AAGf,oBAAc,iBAAiB;AAC/B,gBAAU,iBAAiB;AAAA,aACpB,KAAP;AACA,UAAI,eAAeC,yBAAkB;AAGnC,YAAI,qBAAqB,KAAK,OAAO,SAAS,KAAK;AACnD,aAAK,OAAO,MACV,YAAYF,gCACV,KAAK;AAGT,eAAO;AAAA;AAET,YAAM,IAAI,MAAM,IAAI;AAAA;AAGtB,SAAK,OAAO,KACV,qCAAqCA,gCACnC,KAAK,sBACS;AAOlB,SAAK,OAAO,KACV,2CAA2CA,gCACzC,KAAK;AAIT,UAAM,aAAa,KAAK,OAAO,kBAC7B;AAEF,UAAM,aAAa,cAAcG,uBAAG;AAEpC,UAAM,qBAAqBC,uBAAG,aAAa;AAC3C,UAAM,YAAY,MAAMA,uBAAG,QACzBC,yBAAK,KAAK,oBAAoB;AAGhC,UAAM,2BAA2BC,oCAC/B,KAAK,QACL,KAAK;AAEP,UAAM,KAAK,UAAU,IAAI;AAAA,MACvB,UAAU;AAAA,MACV;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,QAAQ,KAAK;AAAA,MACb,WAAW,KAAK;AAAA;AAMlB,QAAI,KAAK,oBAAoBC,4BAAa;AACxC,WAAK,OAAO,MACV,+BAA+B;AAEjC,UAAI;AAEF,+BAAG,OAAO;AAAA,eACH,OAAP;AACA,aAAK,OAAO,MAAM,qCAAqC,MAAM;AAAA;AAAA;AAQjE,SAAK,OAAO,KACV,2CAA2CP,gCACzC,KAAK;AAIT,UAAM,KAAK,UAAU,QAAQ;AAAA,MAC3B,QAAQ,KAAK;AAAA,MACb,WAAW;AAAA;AAGb,QAAI;AAEF,6BAAG,OAAO;AACV,WAAK,OAAO,MACV,gCAAgC;AAAA,aAE3B,OAAP;AACA,WAAK,OAAO,MAAM,sCAAsC,MAAM;AAAA;AAIhE,QAAI,qBAAqB,KAAK,OAAO,SAAS,KAAK;AAEnD,WAAO;AAAA;AAAA;;uBClMmB;AAAA,EAM5B,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,KAMC;AACD,SAAK,SAAS;AACd,SAAK,SAAS;AACd,SAAK,YAAY;AACjB,SAAK,kBAAkB;AAAA;AAAA,QAGnB,OAAO;AAAA,IACX,iBAAiB,CAAE,KAAK,OAAO;AAAA,IAC/B;AAAA,IACA;AAAA,IACA;AAAA,KAMC;AAED,UAAM,aAAaQ,mBAAQ,aAAa;AAAA,MACtC,OAAO,QAAQ,IAAI,aAAa;AAAA,MAChC,QAAQA,mBAAQ,OAAO,QACrBA,mBAAQ,OAAO,YACfA,mBAAQ,OAAO,aACfA,mBAAQ,OAAO;AAAA,MAEjB,aAAa;AAAA;AAIf,UAAM,YAAY,IAAIC;AACtB,cAAU,GAAG,QAAQ,OAAM,SAAQ;AACjC,UAAI,KAAK,WAAW;AAAA;AAGtB,eAAW,IAAI,IAAID,mBAAQ,WAAW,OAAO,CAAE,QAAQ;AAGvD,QAAI,CAAC,qBAAqB,OAAO,SAAS,MAAO;AAC/C,aAAO,CAAE,SAAS;AAClB;AAAA;AAGF,QAAI,YAAY;AAEhB,QAAI;AACF,YAAM,cAAc,IAAI,YAAY;AAAA,QAClC;AAAA,QACA;AAAA,QACA,WAAW,KAAK;AAAA,QAChB,QAAQ;AAAA,QACR;AAAA,QACA,QAAQ,KAAK;AAAA,QACb,iBAAiB,KAAK;AAAA,QACtB;AAAA;AAGF,YAAM,UAAU,MAAM,YAAY;AAElC,UAAI,CAAC,SAAS;AACZ,eAAO,CAAE,SAAS;AAClB;AAAA;AAAA,aAEK,GAAP;AACA,YAAM,MAAM,kCAAkC,EAAE;AAChD,iBAAW,MAAM;AACjB,WAAK,OAAO,MAAM,KAAK;AACvB,YAAM;AACN;AAAA;AAMF,aAAS,UAAU,GAAG,UAAU,GAAG,WAAW;AAC5C,UAAI,MAAM,KAAK,UAAU,qBAAqB,SAAS;AACrD,oBAAY;AACZ;AAAA;AAEF,YAAM,IAAI,QAAQ,OAAK,WAAW,GAAG;AAAA;AAEvC,QAAI,CAAC,WAAW;AACd,WAAK,OAAO,MACV;AAEF,YACE,IAAIE,qBACF;AAGJ;AAAA;AAGF,WAAO,CAAE,SAAS;AAAA;AAAA;;AC1EtB,6BACE,KACqC;AACrC,SAAQ,IAAqC,cAAc;AAAA;4BAI3D,SACyB;AACzB,QAAM,SAASC;AACf,QAAM,CAAE,WAAW,QAAQ,QAAQ,aAAc;AACjD,QAAMC,kBAAgB,IAAIC,4BAAc,CAAE,cAAc;AACxD,QAAM,kBAAkBC,4BAAgB,WAAW;AACnD,QAAM,mBAAmB,IAAI,iBAAiB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAGF,SAAO,IAAI,6CAA6C,OAAO,KAAK,QAAQ;AAC1E,UAAM,CAAE,MAAM,WAAW,QAAS,IAAI;AACtC,UAAM,aAAa,CAAE,MAAM,WAAW;AAEtC,QAAI;AACF,YAAM,mBAAmB,MAAM,UAAU,sBACvC;AAGF,UAAI,KAAK;AAAA,aACF,KAAP;AACA,aAAO,KACL,+BAA+Bd,gCAC7B,2BACe;AAEnB,YAAM,IAAIU,qBACR,+BAA+BV,gCAAmB,gBAClD;AAAA;AAAA;AAKN,SAAO,IAAI,2CAA2C,OAAO,KAAK,QAAQ;AACxE,UAAM,aAAa,MAAM,UAAU,WAAW;AAE9C,UAAM,CAAE,MAAM,WAAW,QAAS,IAAI;AACtC,UAAM,aAAa,CAAE,MAAM,WAAW;AAEtC,QAAI;AACF,YAAM,QAAQ,eAAe,IAAI,QAAQ;AAEzC,YAAM,SAAU,MACd,OAAMe,0BACJ,GAAG,+BAA+B,QAAQ,aAAa,QACvD;AAAA,QACE,SAAS,QAAQ,CAAE,eAAe,UAAU,WAAY;AAAA,UAG5D;AAEF,YAAM,mBAAmBT,oCAAqB,QAAQ;AACtD,UAAI,KAAK,IAAK,QAAQ;AAAA,aACf,KAAP;AACA,aAAO,KACL,+BAA+BN,gCAC7B,2BACe;AAEnB,YAAM,IAAIU,qBACR,+BAA+BV,gCAAmB,gBAClD;AAAA;AAAA;AASN,SAAO,IAAI,gCAAgC,OAAO,KAAK,QAAQ;AAtJjE;AAuJI,UAAM,CAAE,MAAM,WAAW,QAAS,IAAI;AACtC,UAAM,QAAQ,eAAe,IAAI,QAAQ;AAEzC,UAAM,SAAS,MAAMY,gBAAc,gBACjC,CAAE,MAAM,WAAW,OACnB,CAAE;AAGJ,QAAI,yCAAS,aAAR,mBAAkB,MAAK;AAC1B,YAAM,IAAIF,qBAAc;AAAA;AAG1B,QAAI;AACJ,QAAI,IAAI,OAAO,cAAc,qBAAqB;AAChD,cAAQ,KACN;AAEF,wBAAkB,mBAAmB;AAAA,WAChC;AACL,wBAAkB,kBAAkB;AAAA;AAMtC,QAAI,OAAO,UAAU,wBAAwB,SAAS;AACpD,sBAAgB,OAAO,CAAE,SAAS;AAClC;AAAA;AAIF,QAAI,oBAAoB,UAAU;AAChC,YAAM,CAAE,WAAW,cAAe;AAElC,YAAM,iBAAiB,OAAO;AAAA,QAC5B;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA;AAEF;AAAA;AAGF,oBAAgB,MACd,IAAI,MACF;AAAA;AAMN,SAAO,IAAI,gBAAgB,UAAU;AAErC,SAAO;AAAA;AAGT,wBAAwB,QAAqC;AA/M7D;AAgNE,SAAO,uCAAQ,MAAM,2BAAd,mBAAuC;AAAA;2BAW9C,KAC0B;AA5N5B;AA8NE,MAAI,UAAU,KAAK;AAAA,IACjB,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB,gBAAgB;AAAA;AAIlB,YAAI,WAAJ,mBAAY,GAAG,SAAS,MAAM;AAC5B,QAAI;AAAA;AAIN,QAAM,OAAO,CAAC,MAAkC,SAAc;AAC5D,QAAI,MAAM,UAAU;AAAA,QAAe,KAAK,UAAU;AAAA;AAAA;AAGlD,QAAI,IAAI,OAAO;AACb,UAAI;AAAA;AAAA;AAIR,SAAO;AAAA,IACL,KAAK,UAAQ;AACX,WAAK,OAAO;AAAA;AAAA,IAGd,OAAO,OAAK;AACV,WAAK,SAAS,EAAE;AAChB,UAAI;AAAA;AAAA,IAGN,QAAQ,YAAU;AAChB,WAAK,UAAU;AACf,UAAI;AAAA;AAAA;AAAA;4BAaR,KAC0B;AAC1B,SAAO;AAAA,IACL,KAAK,MAAM;AAAA;AAAA,IACX,OAAO,OAAK;AACV,YAAM;AAAA;AAAA,IAER,QAAQ,CAAC,CAAE,aAAc;AACvB,UAAI,CAAC,SAAS;AACZ,cAAM,IAAIR;AAAA;AAGZ,UACG,OAAO,KACP,KAAK,CAAE,SAAS;AAAA;AAAA;AAAA;;8BCnPwC;AAAA,EAQ/D,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,mBACAU;AAAA,IACA,mBAAmB;AAAA,KAOlB;AAda,gBAAe;AAe7B,SAAK,YAAY;AACjB,SAAK,mBACH,oBAAoB;AACtB,SAAK,SAAS;AACd,SAAK,gBACHA,mBAAiB,IAAIC,4BAAc,CAAE,cAAc;AACrD,SAAK,mBAAmB;AAAA;AAAA,QAGpB,UAAU;AACd,UAAM,QAAQG,2BAAO,KAAK;AAC1B,UAAM,kBAAkB,MAAM,KAAK,UAAU,WAAW;AACxD,UAAM,WAAW,MAAM,KAAK,cAAc,YAAY;AAAA,MACpD,QAAQ;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA;AAAA;AAGJ,UAAM,cAAc,SAAS,MAC1B,OAAO,QAAG;AArFjB;AAqFoB,4BAAG,aAAH,mBAAa,gBAAb,mBAA2B;AAAA,OACxC,IAAI,CAAC,WACJ,MAAM,YAAyC;AAC7C,YAAM,aAAa;AAAA,QACjB,MAAM,OAAO;AAAA,QACb,WAAW,OAAO,SAAS,aAAa;AAAA,QACxC,MAAM,OAAO,SAAS;AAAA;AAGxB,UAAI;AACF,cAAM,sBAAsB,MAAMD,0BAChC,wBAAwB,sBACtB,iBACA;AAGJ,cAAM,cAAc,MAAM,oBAAoB;AAE9C,eAAO,YAAY,KAAK,IAAI,CAAC,QAAuB;AAvGhE;AAuGoE;AAAA,YACtD,OAAOE,6BAAS,IAAI;AAAA,YACpB,MAAMA,6BAAS,IAAI,QAAQ;AAAA,YAC3B,UAAU,KAAK,kBAAkB,KAAK,kBAAkB;AAAA,iBACnD;AAAA,cACH,MAAM,IAAI;AAAA;AAAA,eAET;AAAA,YACH,eAAe,oBAAO,SAAP,mBAAa,SAAb,mBAAmB,eAAc;AAAA,YAChD,WAAY,cAAO,SAAP,mBAAa,cAAwB;AAAA,YACjD,OACE,0BAAO,cAAP,mBAAkB,KAAK,OAAK,EAAE,SAASC,oCAAvC,mBACI,WADJ,mBACY,SAAQ;AAAA;AAAA;AAAA,eAEjB,GAAP;AACA,aAAK,OAAO,KACV,wDAAwD,WAAW,aAAa,WAAW,QAAQ,WAAW,QAC9G;AAEF,eAAO;AAAA;AAAA;AAIf,WAAQ,OAAM,QAAQ,IAAI,cAAc;AAAA;AAAA,EAGhC,kBACR,QACA,MACQ;AACR,QAAI,YAAY;AAChB,eAAW,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO;AAC/C,kBAAY,UAAU,QAAQ,IAAI,OAAO;AAAA;AAE3C,WAAO;AAAA;AAAA,SAGM,sBACb,iBACA,YACA;AACA,WAAO,GAAG,+BAA+B,WAAW,aAAa,WAAW,QAAQ,WAAW;AAAA;AAAA;;;;;;;;;;;;;"}
|
|
1
|
+
{"version":3,"file":"index.cjs.js","sources":["../src/DocsBuilder/BuildMetadataStorage.ts","../src/DocsBuilder/builder.ts","../src/service/DocsSynchronizer.ts","../src/service/router.ts","../src/search/DefaultTechDocsCollator.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\n// Entity uid: unix timestamp\nconst lastUpdatedRecord = {} as Record<string, number>;\n\n/**\n * Store timestamps of the most recent TechDocs update of each Entity. This is\n * used to avoid checking for an update on each and every request to TechDocs.\n */\nexport class BuildMetadataStorage {\n private entityUid: string;\n private lastUpdatedRecord: Record<string, number>;\n\n constructor(entityUid: string) {\n this.entityUid = entityUid;\n this.lastUpdatedRecord = lastUpdatedRecord;\n }\n\n setLastUpdated(): void {\n this.lastUpdatedRecord[this.entityUid] = Date.now();\n }\n\n getLastUpdated(): number | undefined {\n return this.lastUpdatedRecord[this.entityUid];\n }\n}\n\n/**\n * Return false if a check for update has happened in last 60 seconds.\n */\nexport const shouldCheckForUpdate = (entityUid: string) => {\n const lastUpdated = new BuildMetadataStorage(entityUid).getLastUpdated();\n if (lastUpdated) {\n // The difference is in milliseconds\n if (Date.now() - lastUpdated < 60 * 1000) {\n return false;\n }\n }\n return true;\n};\n","/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n Entity,\n ENTITY_DEFAULT_NAMESPACE,\n stringifyEntityRef,\n} from '@backstage/catalog-model';\nimport { Config } from '@backstage/config';\nimport { NotModifiedError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport {\n GeneratorBase,\n GeneratorBuilder,\n getLocationForEntity,\n PreparerBase,\n PreparerBuilder,\n PublisherBase,\n UrlPreparer,\n} from '@backstage/techdocs-common';\nimport fs from 'fs-extra';\nimport os from 'os';\nimport path from 'path';\nimport { Writable } from 'stream';\nimport { Logger } from 'winston';\nimport { BuildMetadataStorage } from './BuildMetadataStorage';\n\ntype DocsBuilderArguments = {\n preparers: PreparerBuilder;\n generators: GeneratorBuilder;\n publisher: PublisherBase;\n entity: Entity;\n logger: Logger;\n config: Config;\n scmIntegrations: ScmIntegrationRegistry;\n logStream?: Writable;\n};\n\nexport class DocsBuilder {\n private preparer: PreparerBase;\n private generator: GeneratorBase;\n private publisher: PublisherBase;\n private entity: Entity;\n private logger: Logger;\n private config: Config;\n private scmIntegrations: ScmIntegrationRegistry;\n private logStream: Writable | undefined;\n\n constructor({\n preparers,\n generators,\n publisher,\n entity,\n logger,\n config,\n scmIntegrations,\n logStream,\n }: DocsBuilderArguments) {\n this.preparer = preparers.get(entity);\n this.generator = generators.get(entity);\n this.publisher = publisher;\n this.entity = entity;\n this.logger = logger;\n this.config = config;\n this.scmIntegrations = scmIntegrations;\n this.logStream = logStream;\n }\n\n /**\n * Build the docs and return whether they have been newly generated or have been cached\n * @returns true, if the docs have been built. false, if the cached docs are still up-to-date.\n */\n public async build(): Promise<boolean> {\n if (!this.entity.metadata.uid) {\n throw new Error(\n 'Trying to build documentation for entity not in software catalog',\n );\n }\n\n /**\n * Prepare (and cache check)\n */\n\n this.logger.info(\n `Step 1 of 3: Preparing docs for entity ${stringifyEntityRef(\n this.entity,\n )}`,\n );\n\n // If available, use the etag stored in techdocs_metadata.json to\n // check if docs are outdated and need to be regenerated.\n let storedEtag: string | undefined;\n if (await this.publisher.hasDocsBeenGenerated(this.entity)) {\n try {\n storedEtag = (\n await this.publisher.fetchTechDocsMetadata({\n namespace:\n this.entity.metadata.namespace ?? ENTITY_DEFAULT_NAMESPACE,\n kind: this.entity.kind,\n name: this.entity.metadata.name,\n })\n ).etag;\n } catch (err) {\n // Proceed with a fresh build\n this.logger.warn(\n `Unable to read techdocs_metadata.json, proceeding with fresh build, error ${err}.`,\n );\n }\n }\n\n let preparedDir: string;\n let newEtag: string;\n try {\n const preparerResponse = await this.preparer.prepare(this.entity, {\n etag: storedEtag,\n logger: this.logger,\n });\n\n preparedDir = preparerResponse.preparedDir;\n newEtag = preparerResponse.etag;\n } catch (err) {\n if (err instanceof NotModifiedError) {\n // No need to prepare anymore since cache is valid.\n // Set last check happened to now\n new BuildMetadataStorage(this.entity.metadata.uid).setLastUpdated();\n this.logger.debug(\n `Docs for ${stringifyEntityRef(\n this.entity,\n )} are unmodified. Using cache, skipping generate and prepare`,\n );\n return false;\n }\n throw new Error(err.message);\n }\n\n this.logger.info(\n `Prepare step completed for entity ${stringifyEntityRef(\n this.entity,\n )}, stored at ${preparedDir}`,\n );\n\n /**\n * Generate\n */\n\n this.logger.info(\n `Step 2 of 3: Generating docs for entity ${stringifyEntityRef(\n this.entity,\n )}`,\n );\n\n const workingDir = this.config.getOptionalString(\n 'backend.workingDirectory',\n );\n const tmpdirPath = workingDir || os.tmpdir();\n // Fixes a problem with macOS returning a path that is a symlink\n const tmpdirResolvedPath = fs.realpathSync(tmpdirPath);\n const outputDir = await fs.mkdtemp(\n path.join(tmpdirResolvedPath, 'techdocs-tmp-'),\n );\n\n const parsedLocationAnnotation = getLocationForEntity(\n this.entity,\n this.scmIntegrations,\n );\n await this.generator.run({\n inputDir: preparedDir,\n outputDir,\n parsedLocationAnnotation,\n etag: newEtag,\n logger: this.logger,\n logStream: this.logStream,\n });\n\n // Remove Prepared directory since it is no longer needed.\n // Caveat: Can not remove prepared directory in case of git preparer since the\n // local git repository is used to get etag on subsequent requests.\n if (this.preparer instanceof UrlPreparer) {\n this.logger.debug(\n `Removing prepared directory ${preparedDir} since the site has been generated`,\n );\n try {\n // Not a blocker hence no need to await this.\n fs.remove(preparedDir);\n } catch (error) {\n this.logger.debug(`Error removing prepared directory ${error.message}`);\n }\n }\n\n /**\n * Publish\n */\n\n this.logger.info(\n `Step 3 of 3: Publishing docs for entity ${stringifyEntityRef(\n this.entity,\n )}`,\n );\n\n await this.publisher.publish({\n entity: this.entity,\n directory: outputDir,\n });\n\n try {\n // Not a blocker hence no need to await this.\n fs.remove(outputDir);\n this.logger.debug(\n `Removing generated directory ${outputDir} since the site has been published`,\n );\n } catch (error) {\n this.logger.debug(`Error removing generated directory ${error.message}`);\n }\n\n // Update the last check time for the entity\n new BuildMetadataStorage(this.entity.metadata.uid).setLastUpdated();\n\n return true;\n }\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { Entity } from '@backstage/catalog-model';\nimport { Config } from '@backstage/config';\nimport { NotFoundError } from '@backstage/errors';\nimport { ScmIntegrationRegistry } from '@backstage/integration';\nimport {\n GeneratorBuilder,\n PreparerBuilder,\n PublisherBase,\n} from '@backstage/techdocs-common';\nimport { PassThrough } from 'stream';\nimport * as winston from 'winston';\nimport { DocsBuilder, shouldCheckForUpdate } from '../DocsBuilder';\n\nexport type DocsSynchronizerSyncOpts = {\n log: (message: string) => void;\n error: (e: Error) => void;\n finish: (result: { updated: boolean }) => void;\n};\n\nexport class DocsSynchronizer {\n private readonly publisher: PublisherBase;\n private readonly logger: winston.Logger;\n private readonly config: Config;\n private readonly scmIntegrations: ScmIntegrationRegistry;\n\n constructor({\n publisher,\n logger,\n config,\n scmIntegrations,\n }: {\n publisher: PublisherBase;\n logger: winston.Logger;\n config: Config;\n scmIntegrations: ScmIntegrationRegistry;\n }) {\n this.config = config;\n this.logger = logger;\n this.publisher = publisher;\n this.scmIntegrations = scmIntegrations;\n }\n\n async doSync({\n responseHandler: { log, error, finish },\n entity,\n preparers,\n generators,\n }: {\n responseHandler: DocsSynchronizerSyncOpts;\n entity: Entity;\n preparers: PreparerBuilder;\n generators: GeneratorBuilder;\n }) {\n // create a new logger to log data to the caller\n const taskLogger = winston.createLogger({\n level: process.env.LOG_LEVEL || 'info',\n format: winston.format.combine(\n winston.format.colorize(),\n winston.format.timestamp(),\n winston.format.simple(),\n ),\n defaultMeta: {},\n });\n\n // create an in-memory stream to forward logs to the event-stream\n const logStream = new PassThrough();\n logStream.on('data', async data => {\n log(data.toString().trim());\n });\n\n taskLogger.add(new winston.transports.Stream({ stream: logStream }));\n\n // check if the last update check was too recent\n if (!shouldCheckForUpdate(entity.metadata.uid!)) {\n finish({ updated: false });\n return;\n }\n\n let foundDocs = false;\n\n try {\n const docsBuilder = new DocsBuilder({\n preparers,\n generators,\n publisher: this.publisher,\n logger: taskLogger,\n entity,\n config: this.config,\n scmIntegrations: this.scmIntegrations,\n logStream,\n });\n\n const updated = await docsBuilder.build();\n\n if (!updated) {\n finish({ updated: false });\n return;\n }\n } catch (e) {\n const msg = `Failed to build the docs page: ${e.message}`;\n taskLogger.error(msg);\n this.logger.error(msg, e);\n error(e);\n return;\n }\n\n // With a maximum of ~5 seconds wait, check if the files got published and if docs will be fetched\n // on the user's page. If not, respond with a message asking them to check back later.\n // The delay here is to make sure GCS/AWS/etc. registers newly uploaded files which is usually <1 second\n for (let attempt = 0; attempt < 5; attempt++) {\n if (await this.publisher.hasDocsBeenGenerated(entity)) {\n foundDocs = true;\n break;\n }\n await new Promise(r => setTimeout(r, 1000));\n }\n if (!foundDocs) {\n this.logger.error(\n 'Published files are taking longer to show up in storage. Something went wrong.',\n );\n error(\n new NotFoundError(\n 'Sorry! It took too long for the generated docs to show up in storage. Check back later.',\n ),\n );\n return;\n }\n\n finish({ updated: true });\n }\n}\n","/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { PluginEndpointDiscovery } from '@backstage/backend-common';\nimport { CatalogClient } from '@backstage/catalog-client';\nimport { Entity, stringifyEntityRef } from '@backstage/catalog-model';\nimport { Config } from '@backstage/config';\nimport { NotFoundError, NotModifiedError } from '@backstage/errors';\nimport {\n GeneratorBuilder,\n getLocationForEntity,\n PreparerBuilder,\n PublisherBase,\n} from '@backstage/techdocs-common';\nimport fetch from 'cross-fetch';\nimport express, { Response } from 'express';\nimport Router from 'express-promise-router';\nimport { Knex } from 'knex';\nimport { Logger } from 'winston';\nimport { ScmIntegrations } from '@backstage/integration';\nimport { DocsSynchronizer, DocsSynchronizerSyncOpts } from './DocsSynchronizer';\n\n/**\n * All of the required dependencies for running TechDocs in the \"out-of-the-box\"\n * deployment configuration (prepare/generate/publish all in the Backend).\n */\ntype OutOfTheBoxDeploymentOptions = {\n preparers: PreparerBuilder;\n generators: GeneratorBuilder;\n publisher: PublisherBase;\n logger: Logger;\n discovery: PluginEndpointDiscovery;\n database?: Knex; // TODO: Make database required when we're implementing database stuff.\n config: Config;\n};\n\n/**\n * Required dependencies for running TechDocs in the \"recommended\" deployment\n * configuration (prepare/generate handled externally in CI/CD).\n */\ntype RecommendedDeploymentOptions = {\n publisher: PublisherBase;\n logger: Logger;\n discovery: PluginEndpointDiscovery;\n config: Config;\n};\n\n/**\n * One of the two deployment configurations must be provided.\n */\ntype RouterOptions =\n | RecommendedDeploymentOptions\n | OutOfTheBoxDeploymentOptions;\n\n/**\n * Typeguard to help createRouter() understand when we are in a \"recommended\"\n * deployment vs. when we are in an out-of-the-box deployment configuration.\n */\nfunction isOutOfTheBoxOption(\n opt: RouterOptions,\n): opt is OutOfTheBoxDeploymentOptions {\n return (opt as OutOfTheBoxDeploymentOptions).preparers !== undefined;\n}\n\nexport async function createRouter(\n options: RouterOptions,\n): Promise<express.Router> {\n const router = Router();\n const { publisher, config, logger, discovery } = options;\n const catalogClient = new CatalogClient({ discoveryApi: discovery });\n const scmIntegrations = ScmIntegrations.fromConfig(config);\n const docsSynchronizer = new DocsSynchronizer({\n publisher,\n logger,\n config,\n scmIntegrations,\n });\n\n router.get('/metadata/techdocs/:namespace/:kind/:name', async (req, res) => {\n const { kind, namespace, name } = req.params;\n const entityName = { kind, namespace, name };\n\n try {\n const techdocsMetadata = await publisher.fetchTechDocsMetadata(\n entityName,\n );\n\n res.json(techdocsMetadata);\n } catch (err) {\n logger.info(\n `Unable to get metadata for '${stringifyEntityRef(\n entityName,\n )}' with error ${err}`,\n );\n throw new NotFoundError(\n `Unable to get metadata for '${stringifyEntityRef(entityName)}'`,\n err,\n );\n }\n });\n\n router.get('/metadata/entity/:namespace/:kind/:name', async (req, res) => {\n const catalogUrl = await discovery.getBaseUrl('catalog');\n\n const { kind, namespace, name } = req.params;\n const entityName = { kind, namespace, name };\n\n try {\n const token = getBearerToken(req.headers.authorization);\n // TODO: Consider using the catalog client here\n const entity = (await (\n await fetch(\n `${catalogUrl}/entities/by-name/${kind}/${namespace}/${name}`,\n {\n headers: token ? { Authorization: `Bearer ${token}` } : {},\n },\n )\n ).json()) as Entity;\n\n const locationMetadata = getLocationForEntity(entity, scmIntegrations);\n res.json({ ...entity, locationMetadata });\n } catch (err) {\n logger.info(\n `Unable to get metadata for '${stringifyEntityRef(\n entityName,\n )}' with error ${err}`,\n );\n throw new NotFoundError(\n `Unable to get metadata for '${stringifyEntityRef(entityName)}'`,\n err,\n );\n }\n });\n\n // Check if docs are the latest version and trigger rebuilds if not\n // Responds with an event-stream that closes after the build finished\n // Responds with an immediate success if rebuild not needed\n // If a build is required, responds with a success when finished\n router.get('/sync/:namespace/:kind/:name', async (req, res) => {\n const { kind, namespace, name } = req.params;\n const token = getBearerToken(req.headers.authorization);\n\n const entity = await catalogClient.getEntityByName(\n { kind, namespace, name },\n { token },\n );\n\n if (!entity?.metadata?.uid) {\n throw new NotFoundError('Entity metadata UID missing');\n }\n\n let responseHandler: DocsSynchronizerSyncOpts;\n if (req.header('accept') !== 'text/event-stream') {\n console.warn(\n \"The call to /sync/:namespace/:kind/:name wasn't done by an EventSource. This behavior is deprecated and will be removed soon. Make sure to update the @backstage/plugin-techdocs package in the frontend to the latest version.\",\n );\n responseHandler = createHttpResponse(res);\n } else {\n responseHandler = createEventStream(res);\n }\n\n // techdocs-backend will only try to build documentation for an entity if techdocs.builder is set to 'local'\n // If set to 'external', it will assume that an external process (e.g. CI/CD pipeline\n // of the repository) is responsible for building and publishing documentation to the storage provider\n if (config.getString('techdocs.builder') !== 'local') {\n responseHandler.finish({ updated: false });\n return;\n }\n\n // Set the synchronization and build process if \"out-of-the-box\" configuration is provided.\n if (isOutOfTheBoxOption(options)) {\n const { preparers, generators } = options;\n\n await docsSynchronizer.doSync({\n responseHandler,\n entity,\n preparers,\n generators,\n });\n return;\n }\n\n responseHandler.error(\n new Error(\n \"Invalid configuration. 'techdocs.builder' was set to 'local' but no 'preparer' was provided to the router initialization.\",\n ),\n );\n });\n\n // Route middleware which serves files from the storage set in the publisher.\n router.use('/static/docs', publisher.docsRouter());\n\n return router;\n}\n\nfunction getBearerToken(header?: string): string | undefined {\n return header?.match(/(?:Bearer)\\s+(\\S+)/i)?.[1];\n}\n\n/**\n * Create an event-stream response that emits the events 'log', 'error', and 'finish'.\n *\n * @param res the response to write the event-stream to\n * @returns A tuple of <log, error, finish> callbacks to emit messages. A call to 'error' or 'finish'\n * will close the event-stream.\n */\nexport function createEventStream(\n res: Response<any, any>,\n): DocsSynchronizerSyncOpts {\n // Mandatory headers and http status to keep connection open\n res.writeHead(200, {\n Connection: 'keep-alive',\n 'Cache-Control': 'no-cache',\n 'Content-Type': 'text/event-stream',\n });\n\n // client closes connection\n res.socket?.on('close', () => {\n res.end();\n });\n\n // write the event to the stream\n const send = (type: 'error' | 'finish' | 'log', data: any) => {\n res.write(`event: ${type}\\ndata: ${JSON.stringify(data)}\\n\\n`);\n\n // res.flush() is only available with the compression middleware\n if (res.flush) {\n res.flush();\n }\n };\n\n return {\n log: data => {\n send('log', data);\n },\n\n error: e => {\n send('error', e.message);\n res.end();\n },\n\n finish: result => {\n send('finish', result);\n res.end();\n },\n };\n}\n\n/**\n * Create a HTTP response. This is used for the legacy non-event-stream implementation of the sync endpoint.\n *\n * @param res the response to write the event-stream to\n * @returns A tuple of <log, error, finish> callbacks to emit messages. A call to 'error' or 'finish'\n * will close the event-stream.\n */\nexport function createHttpResponse(\n res: Response<any, any>,\n): DocsSynchronizerSyncOpts {\n return {\n log: () => {},\n error: e => {\n throw e;\n },\n finish: ({ updated }) => {\n if (!updated) {\n throw new NotModifiedError();\n }\n\n res\n .status(201)\n .json({ message: 'Docs updated or did not need updating' });\n },\n };\n}\n","/*\n * Copyright 2021 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { PluginEndpointDiscovery } from '@backstage/backend-common';\nimport { Entity, RELATION_OWNED_BY } from '@backstage/catalog-model';\nimport { DocumentCollator } from '@backstage/search-common';\nimport fetch from 'cross-fetch';\nimport unescape from 'lodash/unescape';\nimport { Logger } from 'winston';\nimport pLimit from 'p-limit';\nimport { Config } from '@backstage/config';\nimport { CatalogApi, CatalogClient } from '@backstage/catalog-client';\nimport { TechDocsDocument } from '@backstage/techdocs-common';\n\ninterface MkSearchIndexDoc {\n title: string;\n text: string;\n location: string;\n}\n\nexport type TechDocsCollatorOptions = {\n discovery: PluginEndpointDiscovery;\n logger: Logger;\n locationTemplate?: string;\n catalogClient?: CatalogApi;\n parallelismLimit?: number;\n legacyPathCasing?: boolean;\n};\n\ntype EntityInfo = {\n name: string;\n namespace: string;\n kind: string;\n};\n\nexport class DefaultTechDocsCollator implements DocumentCollator {\n protected discovery: PluginEndpointDiscovery;\n protected locationTemplate: string;\n private readonly logger: Logger;\n private readonly catalogClient: CatalogApi;\n private readonly parallelismLimit: number;\n private readonly legacyPathCasing: boolean;\n public readonly type: string = 'techdocs';\n\n /**\n * @deprecated use static fromConfig method instead.\n */\n constructor({\n discovery,\n locationTemplate,\n logger,\n catalogClient,\n parallelismLimit = 10,\n legacyPathCasing = false,\n }: TechDocsCollatorOptions) {\n this.discovery = discovery;\n this.locationTemplate =\n locationTemplate || '/docs/:namespace/:kind/:name/:path';\n this.logger = logger;\n this.catalogClient =\n catalogClient || new CatalogClient({ discoveryApi: discovery });\n this.parallelismLimit = parallelismLimit;\n this.legacyPathCasing = legacyPathCasing;\n }\n\n static fromConfig(config: Config, options: TechDocsCollatorOptions) {\n const legacyPathCasing =\n config.getOptionalBoolean(\n 'techdocs.legacyUseCaseSensitiveTripletPaths',\n ) || false;\n return new DefaultTechDocsCollator({ ...options, legacyPathCasing });\n }\n\n async execute() {\n const limit = pLimit(this.parallelismLimit);\n const techDocsBaseUrl = await this.discovery.getBaseUrl('techdocs');\n const entities = await this.catalogClient.getEntities({\n fields: [\n 'kind',\n 'namespace',\n 'metadata.annotations',\n 'metadata.name',\n 'metadata.namespace',\n 'spec.type',\n 'spec.lifecycle',\n 'relations',\n ],\n });\n const docPromises = entities.items\n .filter(it => it.metadata?.annotations?.['backstage.io/techdocs-ref'])\n .map((entity: Entity) =>\n limit(async (): Promise<TechDocsDocument[]> => {\n const entityInfo = DefaultTechDocsCollator.handleEntityInfoCasing(\n this.legacyPathCasing,\n {\n kind: entity.kind,\n namespace: entity.metadata.namespace || 'default',\n name: entity.metadata.name,\n },\n );\n\n try {\n const searchIndexResponse = await fetch(\n DefaultTechDocsCollator.constructDocsIndexUrl(\n techDocsBaseUrl,\n entityInfo,\n ),\n );\n const searchIndex = await searchIndexResponse.json();\n\n return searchIndex.docs.map((doc: MkSearchIndexDoc) => ({\n title: unescape(doc.title),\n text: unescape(doc.text || ''),\n location: this.applyArgsToFormat(this.locationTemplate, {\n ...entityInfo,\n path: doc.location,\n }),\n path: doc.location,\n ...entityInfo,\n componentType: entity.spec?.type?.toString() || 'other',\n lifecycle: (entity.spec?.lifecycle as string) || '',\n owner:\n entity.relations?.find(r => r.type === RELATION_OWNED_BY)\n ?.target?.name || '',\n }));\n } catch (e) {\n this.logger.debug(\n `Failed to retrieve tech docs search index for entity ${entityInfo.namespace}/${entityInfo.kind}/${entityInfo.name}`,\n e,\n );\n return [];\n }\n }),\n );\n return (await Promise.all(docPromises)).flat();\n }\n\n protected applyArgsToFormat(\n format: string,\n args: Record<string, string>,\n ): string {\n let formatted = format;\n for (const [key, value] of Object.entries(args)) {\n formatted = formatted.replace(`:${key}`, value);\n }\n return formatted;\n }\n\n private static constructDocsIndexUrl(\n techDocsBaseUrl: string,\n entityInfo: { kind: string; namespace: string; name: string },\n ) {\n return `${techDocsBaseUrl}/static/docs/${entityInfo.namespace}/${entityInfo.kind}/${entityInfo.name}/search/search_index.json`;\n }\n\n private static handleEntityInfoCasing(\n legacyPaths: boolean,\n entityInfo: EntityInfo,\n ): EntityInfo {\n return legacyPaths\n ? entityInfo\n : Object.entries(entityInfo).reduce((acc, [key, value]) => {\n return { ...acc, [key]: value.toLocaleLowerCase('en-US') };\n }, {} as EntityInfo);\n }\n}\n"],"names":["stringifyEntityRef","ENTITY_DEFAULT_NAMESPACE","NotModifiedError","os","fs","path","getLocationForEntity","UrlPreparer","winston","PassThrough","NotFoundError","Router","catalogClient","CatalogClient","ScmIntegrations","fetch","pLimit","unescape","RELATION_OWNED_BY"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiBA,MAAM,oBAAoB;2BAMQ;AAAA,EAIhC,YAAY,WAAmB;AAC7B,SAAK,YAAY;AACjB,SAAK,oBAAoB;AAAA;AAAA,EAG3B,iBAAuB;AACrB,SAAK,kBAAkB,KAAK,aAAa,KAAK;AAAA;AAAA,EAGhD,iBAAqC;AACnC,WAAO,KAAK,kBAAkB,KAAK;AAAA;AAAA;MAO1B,uBAAuB,CAAC,cAAsB;AACzD,QAAM,cAAc,IAAI,qBAAqB,WAAW;AACxD,MAAI,aAAa;AAEf,QAAI,KAAK,QAAQ,cAAc,KAAK,KAAM;AACxC,aAAO;AAAA;AAAA;AAGX,SAAO;AAAA;;kBCFgB;AAAA,EAUvB,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,KACuB;AACvB,SAAK,WAAW,UAAU,IAAI;AAC9B,SAAK,YAAY,WAAW,IAAI;AAChC,SAAK,YAAY;AACjB,SAAK,SAAS;AACd,SAAK,SAAS;AACd,SAAK,SAAS;AACd,SAAK,kBAAkB;AACvB,SAAK,YAAY;AAAA;AAAA,QAON,QAA0B;AApFzC;AAqFI,QAAI,CAAC,KAAK,OAAO,SAAS,KAAK;AAC7B,YAAM,IAAI,MACR;AAAA;AAQJ,SAAK,OAAO,KACV,0CAA0CA,gCACxC,KAAK;AAMT,QAAI;AACJ,QAAI,MAAM,KAAK,UAAU,qBAAqB,KAAK,SAAS;AAC1D,UAAI;AACF,qBACE,OAAM,KAAK,UAAU,sBAAsB;AAAA,UACzC,WACE,WAAK,OAAO,SAAS,cAArB,YAAkCC;AAAA,UACpC,MAAM,KAAK,OAAO;AAAA,UAClB,MAAM,KAAK,OAAO,SAAS;AAAA,YAE7B;AAAA,eACK,KAAP;AAEA,aAAK,OAAO,KACV,6EAA6E;AAAA;AAAA;AAKnF,QAAI;AACJ,QAAI;AACJ,QAAI;AACF,YAAM,mBAAmB,MAAM,KAAK,SAAS,QAAQ,KAAK,QAAQ;AAAA,QAChE,MAAM;AAAA,QACN,QAAQ,KAAK;AAAA;AAGf,oBAAc,iBAAiB;AAC/B,gBAAU,iBAAiB;AAAA,aACpB,KAAP;AACA,UAAI,eAAeC,yBAAkB;AAGnC,YAAI,qBAAqB,KAAK,OAAO,SAAS,KAAK;AACnD,aAAK,OAAO,MACV,YAAYF,gCACV,KAAK;AAGT,eAAO;AAAA;AAET,YAAM,IAAI,MAAM,IAAI;AAAA;AAGtB,SAAK,OAAO,KACV,qCAAqCA,gCACnC,KAAK,sBACS;AAOlB,SAAK,OAAO,KACV,2CAA2CA,gCACzC,KAAK;AAIT,UAAM,aAAa,KAAK,OAAO,kBAC7B;AAEF,UAAM,aAAa,cAAcG,uBAAG;AAEpC,UAAM,qBAAqBC,uBAAG,aAAa;AAC3C,UAAM,YAAY,MAAMA,uBAAG,QACzBC,yBAAK,KAAK,oBAAoB;AAGhC,UAAM,2BAA2BC,oCAC/B,KAAK,QACL,KAAK;AAEP,UAAM,KAAK,UAAU,IAAI;AAAA,MACvB,UAAU;AAAA,MACV;AAAA,MACA;AAAA,MACA,MAAM;AAAA,MACN,QAAQ,KAAK;AAAA,MACb,WAAW,KAAK;AAAA;AAMlB,QAAI,KAAK,oBAAoBC,4BAAa;AACxC,WAAK,OAAO,MACV,+BAA+B;AAEjC,UAAI;AAEF,+BAAG,OAAO;AAAA,eACH,OAAP;AACA,aAAK,OAAO,MAAM,qCAAqC,MAAM;AAAA;AAAA;AAQjE,SAAK,OAAO,KACV,2CAA2CP,gCACzC,KAAK;AAIT,UAAM,KAAK,UAAU,QAAQ;AAAA,MAC3B,QAAQ,KAAK;AAAA,MACb,WAAW;AAAA;AAGb,QAAI;AAEF,6BAAG,OAAO;AACV,WAAK,OAAO,MACV,gCAAgC;AAAA,aAE3B,OAAP;AACA,WAAK,OAAO,MAAM,sCAAsC,MAAM;AAAA;AAIhE,QAAI,qBAAqB,KAAK,OAAO,SAAS,KAAK;AAEnD,WAAO;AAAA;AAAA;;uBClMmB;AAAA,EAM5B,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,KAMC;AACD,SAAK,SAAS;AACd,SAAK,SAAS;AACd,SAAK,YAAY;AACjB,SAAK,kBAAkB;AAAA;AAAA,QAGnB,OAAO;AAAA,IACX,iBAAiB,CAAE,KAAK,OAAO;AAAA,IAC/B;AAAA,IACA;AAAA,IACA;AAAA,KAMC;AAED,UAAM,aAAaQ,mBAAQ,aAAa;AAAA,MACtC,OAAO,QAAQ,IAAI,aAAa;AAAA,MAChC,QAAQA,mBAAQ,OAAO,QACrBA,mBAAQ,OAAO,YACfA,mBAAQ,OAAO,aACfA,mBAAQ,OAAO;AAAA,MAEjB,aAAa;AAAA;AAIf,UAAM,YAAY,IAAIC;AACtB,cAAU,GAAG,QAAQ,OAAM,SAAQ;AACjC,UAAI,KAAK,WAAW;AAAA;AAGtB,eAAW,IAAI,IAAID,mBAAQ,WAAW,OAAO,CAAE,QAAQ;AAGvD,QAAI,CAAC,qBAAqB,OAAO,SAAS,MAAO;AAC/C,aAAO,CAAE,SAAS;AAClB;AAAA;AAGF,QAAI,YAAY;AAEhB,QAAI;AACF,YAAM,cAAc,IAAI,YAAY;AAAA,QAClC;AAAA,QACA;AAAA,QACA,WAAW,KAAK;AAAA,QAChB,QAAQ;AAAA,QACR;AAAA,QACA,QAAQ,KAAK;AAAA,QACb,iBAAiB,KAAK;AAAA,QACtB;AAAA;AAGF,YAAM,UAAU,MAAM,YAAY;AAElC,UAAI,CAAC,SAAS;AACZ,eAAO,CAAE,SAAS;AAClB;AAAA;AAAA,aAEK,GAAP;AACA,YAAM,MAAM,kCAAkC,EAAE;AAChD,iBAAW,MAAM;AACjB,WAAK,OAAO,MAAM,KAAK;AACvB,YAAM;AACN;AAAA;AAMF,aAAS,UAAU,GAAG,UAAU,GAAG,WAAW;AAC5C,UAAI,MAAM,KAAK,UAAU,qBAAqB,SAAS;AACrD,oBAAY;AACZ;AAAA;AAEF,YAAM,IAAI,QAAQ,OAAK,WAAW,GAAG;AAAA;AAEvC,QAAI,CAAC,WAAW;AACd,WAAK,OAAO,MACV;AAEF,YACE,IAAIE,qBACF;AAGJ;AAAA;AAGF,WAAO,CAAE,SAAS;AAAA;AAAA;;AC1EtB,6BACE,KACqC;AACrC,SAAQ,IAAqC,cAAc;AAAA;4BAI3D,SACyB;AACzB,QAAM,SAASC;AACf,QAAM,CAAE,WAAW,QAAQ,QAAQ,aAAc;AACjD,QAAMC,kBAAgB,IAAIC,4BAAc,CAAE,cAAc;AACxD,QAAM,kBAAkBC,4BAAgB,WAAW;AACnD,QAAM,mBAAmB,IAAI,iBAAiB;AAAA,IAC5C;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA;AAGF,SAAO,IAAI,6CAA6C,OAAO,KAAK,QAAQ;AAC1E,UAAM,CAAE,MAAM,WAAW,QAAS,IAAI;AACtC,UAAM,aAAa,CAAE,MAAM,WAAW;AAEtC,QAAI;AACF,YAAM,mBAAmB,MAAM,UAAU,sBACvC;AAGF,UAAI,KAAK;AAAA,aACF,KAAP;AACA,aAAO,KACL,+BAA+Bd,gCAC7B,2BACe;AAEnB,YAAM,IAAIU,qBACR,+BAA+BV,gCAAmB,gBAClD;AAAA;AAAA;AAKN,SAAO,IAAI,2CAA2C,OAAO,KAAK,QAAQ;AACxE,UAAM,aAAa,MAAM,UAAU,WAAW;AAE9C,UAAM,CAAE,MAAM,WAAW,QAAS,IAAI;AACtC,UAAM,aAAa,CAAE,MAAM,WAAW;AAEtC,QAAI;AACF,YAAM,QAAQ,eAAe,IAAI,QAAQ;AAEzC,YAAM,SAAU,MACd,OAAMe,0BACJ,GAAG,+BAA+B,QAAQ,aAAa,QACvD;AAAA,QACE,SAAS,QAAQ,CAAE,eAAe,UAAU,WAAY;AAAA,UAG5D;AAEF,YAAM,mBAAmBT,oCAAqB,QAAQ;AACtD,UAAI,KAAK,IAAK,QAAQ;AAAA,aACf,KAAP;AACA,aAAO,KACL,+BAA+BN,gCAC7B,2BACe;AAEnB,YAAM,IAAIU,qBACR,+BAA+BV,gCAAmB,gBAClD;AAAA;AAAA;AASN,SAAO,IAAI,gCAAgC,OAAO,KAAK,QAAQ;AAtJjE;AAuJI,UAAM,CAAE,MAAM,WAAW,QAAS,IAAI;AACtC,UAAM,QAAQ,eAAe,IAAI,QAAQ;AAEzC,UAAM,SAAS,MAAMY,gBAAc,gBACjC,CAAE,MAAM,WAAW,OACnB,CAAE;AAGJ,QAAI,yCAAS,aAAR,mBAAkB,MAAK;AAC1B,YAAM,IAAIF,qBAAc;AAAA;AAG1B,QAAI;AACJ,QAAI,IAAI,OAAO,cAAc,qBAAqB;AAChD,cAAQ,KACN;AAEF,wBAAkB,mBAAmB;AAAA,WAChC;AACL,wBAAkB,kBAAkB;AAAA;AAMtC,QAAI,OAAO,UAAU,wBAAwB,SAAS;AACpD,sBAAgB,OAAO,CAAE,SAAS;AAClC;AAAA;AAIF,QAAI,oBAAoB,UAAU;AAChC,YAAM,CAAE,WAAW,cAAe;AAElC,YAAM,iBAAiB,OAAO;AAAA,QAC5B;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA;AAEF;AAAA;AAGF,oBAAgB,MACd,IAAI,MACF;AAAA;AAMN,SAAO,IAAI,gBAAgB,UAAU;AAErC,SAAO;AAAA;AAGT,wBAAwB,QAAqC;AA/M7D;AAgNE,SAAO,uCAAQ,MAAM,2BAAd,mBAAuC;AAAA;2BAW9C,KAC0B;AA5N5B;AA8NE,MAAI,UAAU,KAAK;AAAA,IACjB,YAAY;AAAA,IACZ,iBAAiB;AAAA,IACjB,gBAAgB;AAAA;AAIlB,YAAI,WAAJ,mBAAY,GAAG,SAAS,MAAM;AAC5B,QAAI;AAAA;AAIN,QAAM,OAAO,CAAC,MAAkC,SAAc;AAC5D,QAAI,MAAM,UAAU;AAAA,QAAe,KAAK,UAAU;AAAA;AAAA;AAGlD,QAAI,IAAI,OAAO;AACb,UAAI;AAAA;AAAA;AAIR,SAAO;AAAA,IACL,KAAK,UAAQ;AACX,WAAK,OAAO;AAAA;AAAA,IAGd,OAAO,OAAK;AACV,WAAK,SAAS,EAAE;AAChB,UAAI;AAAA;AAAA,IAGN,QAAQ,YAAU;AAChB,WAAK,UAAU;AACf,UAAI;AAAA;AAAA;AAAA;4BAaR,KAC0B;AAC1B,SAAO;AAAA,IACL,KAAK,MAAM;AAAA;AAAA,IACX,OAAO,OAAK;AACV,YAAM;AAAA;AAAA,IAER,QAAQ,CAAC,CAAE,aAAc;AACvB,UAAI,CAAC,SAAS;AACZ,cAAM,IAAIR;AAAA;AAGZ,UACG,OAAO,KACP,KAAK,CAAE,SAAS;AAAA;AAAA;AAAA;;8BC1OwC;AAAA,EAY/D,YAAY;AAAA,IACV;AAAA,IACA;AAAA,IACA;AAAA,mBACAU;AAAA,IACA,mBAAmB;AAAA,IACnB,mBAAmB;AAAA,KACO;AAZZ,gBAAe;AAa7B,SAAK,YAAY;AACjB,SAAK,mBACH,oBAAoB;AACtB,SAAK,SAAS;AACd,SAAK,gBACHA,mBAAiB,IAAIC,4BAAc,CAAE,cAAc;AACrD,SAAK,mBAAmB;AACxB,SAAK,mBAAmB;AAAA;AAAA,SAGnB,WAAW,QAAgB,SAAkC;AAClE,UAAM,mBACJ,OAAO,mBACL,kDACG;AACP,WAAO,IAAI,wBAAwB,IAAK,SAAS;AAAA;AAAA,QAG7C,UAAU;AACd,UAAM,QAAQG,2BAAO,KAAK;AAC1B,UAAM,kBAAkB,MAAM,KAAK,UAAU,WAAW;AACxD,UAAM,WAAW,MAAM,KAAK,cAAc,YAAY;AAAA,MACpD,QAAQ;AAAA,QACN;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA,QACA;AAAA;AAAA;AAGJ,UAAM,cAAc,SAAS,MAC1B,OAAO,QAAG;AAtGjB;AAsGoB,4BAAG,aAAH,mBAAa,gBAAb,mBAA2B;AAAA,OACxC,IAAI,CAAC,WACJ,MAAM,YAAyC;AAC7C,YAAM,aAAa,wBAAwB,uBACzC,KAAK,kBACL;AAAA,QACE,MAAM,OAAO;AAAA,QACb,WAAW,OAAO,SAAS,aAAa;AAAA,QACxC,MAAM,OAAO,SAAS;AAAA;AAI1B,UAAI;AACF,cAAM,sBAAsB,MAAMD,0BAChC,wBAAwB,sBACtB,iBACA;AAGJ,cAAM,cAAc,MAAM,oBAAoB;AAE9C,eAAO,YAAY,KAAK,IAAI,CAAC,QAAuB;AA3HhE;AA2HoE;AAAA,YACtD,OAAOE,6BAAS,IAAI;AAAA,YACpB,MAAMA,6BAAS,IAAI,QAAQ;AAAA,YAC3B,UAAU,KAAK,kBAAkB,KAAK,kBAAkB;AAAA,iBACnD;AAAA,cACH,MAAM,IAAI;AAAA;AAAA,YAEZ,MAAM,IAAI;AAAA,eACP;AAAA,YACH,eAAe,oBAAO,SAAP,mBAAa,SAAb,mBAAmB,eAAc;AAAA,YAChD,WAAY,cAAO,SAAP,mBAAa,cAAwB;AAAA,YACjD,OACE,0BAAO,cAAP,mBAAkB,KAAK,OAAK,EAAE,SAASC,oCAAvC,mBACI,WADJ,mBACY,SAAQ;AAAA;AAAA;AAAA,eAEjB,GAAP;AACA,aAAK,OAAO,MACV,wDAAwD,WAAW,aAAa,WAAW,QAAQ,WAAW,QAC9G;AAEF,eAAO;AAAA;AAAA;AAIf,WAAQ,OAAM,QAAQ,IAAI,cAAc;AAAA;AAAA,EAGhC,kBACR,QACA,MACQ;AACR,QAAI,YAAY;AAChB,eAAW,CAAC,KAAK,UAAU,OAAO,QAAQ,OAAO;AAC/C,kBAAY,UAAU,QAAQ,IAAI,OAAO;AAAA;AAE3C,WAAO;AAAA;AAAA,SAGM,sBACb,iBACA,YACA;AACA,WAAO,GAAG,+BAA+B,WAAW,aAAa,WAAW,QAAQ,WAAW;AAAA;AAAA,SAGlF,uBACb,aACA,YACY;AACZ,WAAO,cACH,aACA,OAAO,QAAQ,YAAY,OAAO,CAAC,KAAK,CAAC,KAAK,WAAW;AACvD,aAAO,IAAK,MAAM,MAAM,MAAM,kBAAkB;AAAA,OAC/C;AAAA;AAAA;;;;;;;;;;;;;"}
|
package/dist/index.d.ts
CHANGED
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
import { PluginEndpointDiscovery } from '@backstage/backend-common';
|
|
2
2
|
import { Config } from '@backstage/config';
|
|
3
|
-
import { PreparerBuilder, GeneratorBuilder, PublisherBase } from '@backstage/techdocs-common';
|
|
3
|
+
import { PreparerBuilder, GeneratorBuilder, PublisherBase, TechDocsDocument } from '@backstage/techdocs-common';
|
|
4
4
|
export * from '@backstage/techdocs-common';
|
|
5
|
+
export { TechDocsDocument } from '@backstage/techdocs-common';
|
|
5
6
|
import express from 'express';
|
|
6
7
|
import { Knex } from 'knex';
|
|
7
8
|
import { Logger } from 'winston';
|
|
8
|
-
import {
|
|
9
|
+
import { DocumentCollator } from '@backstage/search-common';
|
|
9
10
|
import { CatalogApi } from '@backstage/catalog-client';
|
|
10
11
|
|
|
11
12
|
/**
|
|
@@ -37,30 +38,31 @@ declare type RecommendedDeploymentOptions = {
|
|
|
37
38
|
declare type RouterOptions = RecommendedDeploymentOptions | OutOfTheBoxDeploymentOptions;
|
|
38
39
|
declare function createRouter(options: RouterOptions): Promise<express.Router>;
|
|
39
40
|
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
41
|
+
declare type TechDocsCollatorOptions = {
|
|
42
|
+
discovery: PluginEndpointDiscovery;
|
|
43
|
+
logger: Logger;
|
|
44
|
+
locationTemplate?: string;
|
|
45
|
+
catalogClient?: CatalogApi;
|
|
46
|
+
parallelismLimit?: number;
|
|
47
|
+
legacyPathCasing?: boolean;
|
|
48
|
+
};
|
|
47
49
|
declare class DefaultTechDocsCollator implements DocumentCollator {
|
|
48
50
|
protected discovery: PluginEndpointDiscovery;
|
|
49
51
|
protected locationTemplate: string;
|
|
50
52
|
private readonly logger;
|
|
51
53
|
private readonly catalogClient;
|
|
52
54
|
private readonly parallelismLimit;
|
|
55
|
+
private readonly legacyPathCasing;
|
|
53
56
|
readonly type: string;
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
parallelismLimit?: number;
|
|
60
|
-
});
|
|
57
|
+
/**
|
|
58
|
+
* @deprecated use static fromConfig method instead.
|
|
59
|
+
*/
|
|
60
|
+
constructor({ discovery, locationTemplate, logger, catalogClient, parallelismLimit, legacyPathCasing, }: TechDocsCollatorOptions);
|
|
61
|
+
static fromConfig(config: Config, options: TechDocsCollatorOptions): DefaultTechDocsCollator;
|
|
61
62
|
execute(): Promise<TechDocsDocument[]>;
|
|
62
63
|
protected applyArgsToFormat(format: string, args: Record<string, string>): string;
|
|
63
64
|
private static constructDocsIndexUrl;
|
|
65
|
+
private static handleEntityInfoCasing;
|
|
64
66
|
}
|
|
65
67
|
|
|
66
|
-
export { DefaultTechDocsCollator,
|
|
68
|
+
export { DefaultTechDocsCollator, TechDocsCollatorOptions, createRouter };
|
package/package.json
CHANGED
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@backstage/plugin-techdocs-backend",
|
|
3
|
-
"
|
|
3
|
+
"description": "The Backstage backend plugin that renders technical documentation for your components",
|
|
4
|
+
"version": "0.10.3",
|
|
4
5
|
"main": "dist/index.cjs.js",
|
|
5
6
|
"types": "dist/index.d.ts",
|
|
6
7
|
"license": "Apache-2.0",
|
|
@@ -30,14 +31,14 @@
|
|
|
30
31
|
"clean": "backstage-cli clean"
|
|
31
32
|
},
|
|
32
33
|
"dependencies": {
|
|
33
|
-
"@backstage/backend-common": "^0.
|
|
34
|
-
"@backstage/catalog-client": "^0.
|
|
35
|
-
"@backstage/catalog-model": "^0.9.
|
|
36
|
-
"@backstage/config": "^0.1.
|
|
34
|
+
"@backstage/backend-common": "^0.9.4",
|
|
35
|
+
"@backstage/catalog-client": "^0.4.0",
|
|
36
|
+
"@backstage/catalog-model": "^0.9.3",
|
|
37
|
+
"@backstage/config": "^0.1.10",
|
|
37
38
|
"@backstage/errors": "^0.1.1",
|
|
38
|
-
"@backstage/integration": "^0.6.
|
|
39
|
-
"@backstage/search-common": "^0.
|
|
40
|
-
"@backstage/techdocs-common": "^0.
|
|
39
|
+
"@backstage/integration": "^0.6.5",
|
|
40
|
+
"@backstage/search-common": "^0.2.0",
|
|
41
|
+
"@backstage/techdocs-common": "^0.10.1",
|
|
41
42
|
"@types/express": "^4.17.6",
|
|
42
43
|
"cross-fetch": "^3.0.6",
|
|
43
44
|
"dockerode": "^3.2.1",
|
|
@@ -50,7 +51,7 @@
|
|
|
50
51
|
"winston": "^3.2.1"
|
|
51
52
|
},
|
|
52
53
|
"devDependencies": {
|
|
53
|
-
"@backstage/cli": "^0.7.
|
|
54
|
+
"@backstage/cli": "^0.7.13",
|
|
54
55
|
"@backstage/test-utils": "^0.1.17",
|
|
55
56
|
"@types/dockerode": "^3.2.1",
|
|
56
57
|
"msw": "^0.29.0",
|
|
@@ -61,5 +62,5 @@
|
|
|
61
62
|
"config.d.ts"
|
|
62
63
|
],
|
|
63
64
|
"configSchema": "config.d.ts",
|
|
64
|
-
"gitHead": "
|
|
65
|
+
"gitHead": "2fd2dbc5d2fad9da5f623126920958970aea469b"
|
|
65
66
|
}
|