@backstage/plugin-techdocs-node 1.12.12-next.1 → 1.12.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +34 -0
- package/dist/extensions.cjs.js +22 -0
- package/dist/extensions.cjs.js.map +1 -0
- package/dist/helpers.cjs.js +80 -0
- package/dist/helpers.cjs.js.map +1 -0
- package/dist/index.cjs.js +29 -2602
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +11 -1
- package/dist/stages/generate/DockerContainerRunner.cjs.js +99 -0
- package/dist/stages/generate/DockerContainerRunner.cjs.js.map +1 -0
- package/dist/stages/generate/generators.cjs.js +42 -0
- package/dist/stages/generate/generators.cjs.js.map +1 -0
- package/dist/stages/generate/helpers.cjs.js +265 -0
- package/dist/stages/generate/helpers.cjs.js.map +1 -0
- package/dist/stages/generate/index.cjs.js +15 -0
- package/dist/stages/generate/index.cjs.js.map +1 -0
- package/dist/stages/generate/mkdocsPatchers.cjs.js +96 -0
- package/dist/stages/generate/mkdocsPatchers.cjs.js.map +1 -0
- package/dist/stages/generate/techdocs.cjs.js +169 -0
- package/dist/stages/generate/techdocs.cjs.js.map +1 -0
- package/dist/stages/prepare/dir.cjs.js +63 -0
- package/dist/stages/prepare/dir.cjs.js.map +1 -0
- package/dist/stages/prepare/preparers.cjs.js +54 -0
- package/dist/stages/prepare/preparers.cjs.js.map +1 -0
- package/dist/stages/prepare/url.cjs.js +46 -0
- package/dist/stages/prepare/url.cjs.js.map +1 -0
- package/dist/stages/publish/awsS3.cjs.js +436 -0
- package/dist/stages/publish/awsS3.cjs.js.map +1 -0
- package/dist/stages/publish/azureBlobStorage.cjs.js +337 -0
- package/dist/stages/publish/azureBlobStorage.cjs.js.map +1 -0
- package/dist/stages/publish/googleStorage.cjs.js +288 -0
- package/dist/stages/publish/googleStorage.cjs.js.map +1 -0
- package/dist/stages/publish/helpers.cjs.js +138 -0
- package/dist/stages/publish/helpers.cjs.js.map +1 -0
- package/dist/stages/publish/local.cjs.js +248 -0
- package/dist/stages/publish/local.cjs.js.map +1 -0
- package/dist/stages/publish/migrations/GoogleMigration.cjs.js +52 -0
- package/dist/stages/publish/migrations/GoogleMigration.cjs.js.map +1 -0
- package/dist/stages/publish/openStackSwift.cjs.js +286 -0
- package/dist/stages/publish/openStackSwift.cjs.js.map +1 -0
- package/dist/stages/publish/publish.cjs.js +100 -0
- package/dist/stages/publish/publish.cjs.js.map +1 -0
- package/package.json +13 -13
|
@@ -0,0 +1,337 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var identity = require('@azure/identity');
|
|
4
|
+
var storageBlob = require('@azure/storage-blob');
|
|
5
|
+
var errors = require('@backstage/errors');
|
|
6
|
+
var JSON5 = require('json5');
|
|
7
|
+
var createLimiter = require('p-limit');
|
|
8
|
+
var path = require('path');
|
|
9
|
+
var helpers = require('./helpers.cjs.js');
|
|
10
|
+
|
|
11
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
12
|
+
|
|
13
|
+
var JSON5__default = /*#__PURE__*/_interopDefaultCompat(JSON5);
|
|
14
|
+
var createLimiter__default = /*#__PURE__*/_interopDefaultCompat(createLimiter);
|
|
15
|
+
var path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
16
|
+
|
|
17
|
+
const BATCH_CONCURRENCY = 3;
|
|
18
|
+
class AzureBlobStoragePublish {
|
|
19
|
+
storageClient;
|
|
20
|
+
containerName;
|
|
21
|
+
legacyPathCasing;
|
|
22
|
+
logger;
|
|
23
|
+
constructor(options) {
|
|
24
|
+
this.storageClient = options.storageClient;
|
|
25
|
+
this.containerName = options.containerName;
|
|
26
|
+
this.legacyPathCasing = options.legacyPathCasing;
|
|
27
|
+
this.logger = options.logger;
|
|
28
|
+
}
|
|
29
|
+
static fromConfig(config, logger) {
|
|
30
|
+
let storageClient;
|
|
31
|
+
let containerName = "";
|
|
32
|
+
try {
|
|
33
|
+
containerName = config.getString(
|
|
34
|
+
"techdocs.publisher.azureBlobStorage.containerName"
|
|
35
|
+
);
|
|
36
|
+
} catch (error) {
|
|
37
|
+
throw new Error(
|
|
38
|
+
"Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, techdocs.publisher.azureBlobStorage.containerName is required."
|
|
39
|
+
);
|
|
40
|
+
}
|
|
41
|
+
const legacyPathCasing = config.getOptionalBoolean(
|
|
42
|
+
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
43
|
+
) || false;
|
|
44
|
+
const connectionStringKey = "techdocs.publisher.azureBlobStorage.connectionString";
|
|
45
|
+
const connectionString = config.getOptionalString(connectionStringKey);
|
|
46
|
+
if (connectionString) {
|
|
47
|
+
logger.info(
|
|
48
|
+
`Using '${connectionStringKey}' configuration to create storage client`
|
|
49
|
+
);
|
|
50
|
+
storageClient = storageBlob.BlobServiceClient.fromConnectionString(connectionString);
|
|
51
|
+
} else {
|
|
52
|
+
let accountName = "";
|
|
53
|
+
try {
|
|
54
|
+
accountName = config.getString(
|
|
55
|
+
"techdocs.publisher.azureBlobStorage.credentials.accountName"
|
|
56
|
+
);
|
|
57
|
+
} catch (error) {
|
|
58
|
+
throw new Error(
|
|
59
|
+
"Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, techdocs.publisher.azureBlobStorage.credentials.accountName is required."
|
|
60
|
+
);
|
|
61
|
+
}
|
|
62
|
+
const accountKey = config.getOptionalString(
|
|
63
|
+
"techdocs.publisher.azureBlobStorage.credentials.accountKey"
|
|
64
|
+
);
|
|
65
|
+
let credential;
|
|
66
|
+
if (accountKey) {
|
|
67
|
+
credential = new storageBlob.StorageSharedKeyCredential(accountName, accountKey);
|
|
68
|
+
} else {
|
|
69
|
+
credential = new identity.DefaultAzureCredential();
|
|
70
|
+
}
|
|
71
|
+
storageClient = new storageBlob.BlobServiceClient(
|
|
72
|
+
`https://${accountName}.blob.core.windows.net`,
|
|
73
|
+
credential
|
|
74
|
+
);
|
|
75
|
+
}
|
|
76
|
+
return new AzureBlobStoragePublish({
|
|
77
|
+
storageClient,
|
|
78
|
+
containerName,
|
|
79
|
+
legacyPathCasing,
|
|
80
|
+
logger
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
async getReadiness() {
|
|
84
|
+
try {
|
|
85
|
+
const response = await this.storageClient.getContainerClient(this.containerName).getProperties();
|
|
86
|
+
if (response._response.status === 200) {
|
|
87
|
+
return {
|
|
88
|
+
isAvailable: true
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
if (response._response.status >= 400) {
|
|
92
|
+
this.logger.error(
|
|
93
|
+
`Failed to retrieve metadata from ${response._response.request.url} with status code ${response._response.status}.`
|
|
94
|
+
);
|
|
95
|
+
}
|
|
96
|
+
} catch (e) {
|
|
97
|
+
errors.assertError(e);
|
|
98
|
+
this.logger.error(`from Azure Blob Storage client library: ${e.message}`);
|
|
99
|
+
}
|
|
100
|
+
this.logger.error(
|
|
101
|
+
`Could not retrieve metadata about the Azure Blob Storage container ${this.containerName}. Make sure that the Azure project and container exist and the access key is setup correctly techdocs.publisher.azureBlobStorage.credentials defined in app config has correct permissions. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
102
|
+
);
|
|
103
|
+
return { isAvailable: false };
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Upload all the files from the generated `directory` to the Azure Blob Storage container.
|
|
107
|
+
* Directory structure used in the container is - entityNamespace/entityKind/entityName/index.html
|
|
108
|
+
*/
|
|
109
|
+
async publish({
|
|
110
|
+
entity,
|
|
111
|
+
directory
|
|
112
|
+
}) {
|
|
113
|
+
const objects = [];
|
|
114
|
+
const useLegacyPathCasing = this.legacyPathCasing;
|
|
115
|
+
const remoteFolder = helpers.getCloudPathForLocalPath(
|
|
116
|
+
entity,
|
|
117
|
+
void 0,
|
|
118
|
+
useLegacyPathCasing
|
|
119
|
+
);
|
|
120
|
+
let existingFiles = [];
|
|
121
|
+
try {
|
|
122
|
+
existingFiles = await this.getAllBlobsFromContainer({
|
|
123
|
+
prefix: remoteFolder,
|
|
124
|
+
maxPageSize: BATCH_CONCURRENCY
|
|
125
|
+
});
|
|
126
|
+
} catch (e) {
|
|
127
|
+
errors.assertError(e);
|
|
128
|
+
this.logger.error(
|
|
129
|
+
`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`
|
|
130
|
+
);
|
|
131
|
+
}
|
|
132
|
+
let absoluteFilesToUpload;
|
|
133
|
+
let container;
|
|
134
|
+
try {
|
|
135
|
+
absoluteFilesToUpload = await helpers.getFileTreeRecursively(directory);
|
|
136
|
+
container = this.storageClient.getContainerClient(this.containerName);
|
|
137
|
+
const failedOperations = [];
|
|
138
|
+
await helpers.bulkStorageOperation(
|
|
139
|
+
async (absoluteFilePath) => {
|
|
140
|
+
const relativeFilePath = path__default.default.normalize(
|
|
141
|
+
path__default.default.relative(directory, absoluteFilePath)
|
|
142
|
+
);
|
|
143
|
+
const remotePath = helpers.getCloudPathForLocalPath(
|
|
144
|
+
entity,
|
|
145
|
+
relativeFilePath,
|
|
146
|
+
useLegacyPathCasing
|
|
147
|
+
);
|
|
148
|
+
objects.push(remotePath);
|
|
149
|
+
const response = await container.getBlockBlobClient(remotePath).uploadFile(absoluteFilePath);
|
|
150
|
+
if (response._response.status >= 400) {
|
|
151
|
+
failedOperations.push(
|
|
152
|
+
new Error(
|
|
153
|
+
`Upload failed for ${absoluteFilePath} with status code ${response._response.status}`
|
|
154
|
+
)
|
|
155
|
+
);
|
|
156
|
+
}
|
|
157
|
+
return response;
|
|
158
|
+
},
|
|
159
|
+
absoluteFilesToUpload,
|
|
160
|
+
{ concurrencyLimit: BATCH_CONCURRENCY }
|
|
161
|
+
);
|
|
162
|
+
if (failedOperations.length > 0) {
|
|
163
|
+
throw new Error(
|
|
164
|
+
failedOperations.map((r) => r.message).filter(Boolean).join(" ")
|
|
165
|
+
);
|
|
166
|
+
}
|
|
167
|
+
this.logger.info(
|
|
168
|
+
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`
|
|
169
|
+
);
|
|
170
|
+
} catch (e) {
|
|
171
|
+
const errorMessage = `Unable to upload file(s) to Azure. ${e}`;
|
|
172
|
+
this.logger.error(errorMessage);
|
|
173
|
+
throw new Error(errorMessage);
|
|
174
|
+
}
|
|
175
|
+
try {
|
|
176
|
+
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
177
|
+
(absoluteFilePath) => helpers.getCloudPathForLocalPath(
|
|
178
|
+
entity,
|
|
179
|
+
path__default.default.relative(directory, absoluteFilePath),
|
|
180
|
+
useLegacyPathCasing
|
|
181
|
+
)
|
|
182
|
+
);
|
|
183
|
+
const staleFiles = helpers.getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
184
|
+
await helpers.bulkStorageOperation(
|
|
185
|
+
async (relativeFilePath) => {
|
|
186
|
+
return await container.deleteBlob(relativeFilePath);
|
|
187
|
+
},
|
|
188
|
+
staleFiles,
|
|
189
|
+
{ concurrencyLimit: BATCH_CONCURRENCY }
|
|
190
|
+
);
|
|
191
|
+
this.logger.info(
|
|
192
|
+
`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`
|
|
193
|
+
);
|
|
194
|
+
} catch (error) {
|
|
195
|
+
const errorMessage = `Unable to delete file(s) from Azure. ${error}`;
|
|
196
|
+
this.logger.error(errorMessage);
|
|
197
|
+
}
|
|
198
|
+
return { objects };
|
|
199
|
+
}
|
|
200
|
+
download(containerName, blobPath) {
|
|
201
|
+
return new Promise((resolve, reject) => {
|
|
202
|
+
const fileStreamChunks = [];
|
|
203
|
+
this.storageClient.getContainerClient(containerName).getBlockBlobClient(blobPath).download().then((res) => {
|
|
204
|
+
const body = res.readableStreamBody;
|
|
205
|
+
if (!body) {
|
|
206
|
+
reject(new Error(`Unable to parse the response data`));
|
|
207
|
+
return;
|
|
208
|
+
}
|
|
209
|
+
body.on("error", reject).on("data", (chunk) => {
|
|
210
|
+
fileStreamChunks.push(chunk);
|
|
211
|
+
}).on("end", () => {
|
|
212
|
+
resolve(Buffer.concat(fileStreamChunks));
|
|
213
|
+
});
|
|
214
|
+
}).catch(reject);
|
|
215
|
+
});
|
|
216
|
+
}
|
|
217
|
+
async fetchTechDocsMetadata(entityName) {
|
|
218
|
+
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
219
|
+
const entityRootDir = this.legacyPathCasing ? entityTriplet : helpers.lowerCaseEntityTriplet(entityTriplet);
|
|
220
|
+
try {
|
|
221
|
+
const techdocsMetadataJson = await this.download(
|
|
222
|
+
this.containerName,
|
|
223
|
+
`${entityRootDir}/techdocs_metadata.json`
|
|
224
|
+
);
|
|
225
|
+
if (!techdocsMetadataJson) {
|
|
226
|
+
throw new Error(
|
|
227
|
+
`Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`
|
|
228
|
+
);
|
|
229
|
+
}
|
|
230
|
+
const techdocsMetadata = JSON5__default.default.parse(
|
|
231
|
+
techdocsMetadataJson.toString("utf-8")
|
|
232
|
+
);
|
|
233
|
+
return techdocsMetadata;
|
|
234
|
+
} catch (e) {
|
|
235
|
+
throw new errors.ForwardedError("TechDocs metadata fetch failed", e);
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
/**
|
|
239
|
+
* Express route middleware to serve static files on a route in techdocs-backend.
|
|
240
|
+
*/
|
|
241
|
+
docsRouter() {
|
|
242
|
+
return (req, res) => {
|
|
243
|
+
const decodedUri = decodeURI(req.path.replace(/^\//, ""));
|
|
244
|
+
const filePath = this.legacyPathCasing ? decodedUri : helpers.lowerCaseEntityTripletInStoragePath(decodedUri);
|
|
245
|
+
const fileExtension = path__default.default.extname(filePath);
|
|
246
|
+
const responseHeaders = helpers.getHeadersForFileExtension(fileExtension);
|
|
247
|
+
this.download(this.containerName, filePath).then((fileContent) => {
|
|
248
|
+
for (const [headerKey, headerValue] of Object.entries(
|
|
249
|
+
responseHeaders
|
|
250
|
+
)) {
|
|
251
|
+
res.setHeader(headerKey, headerValue);
|
|
252
|
+
}
|
|
253
|
+
res.send(fileContent);
|
|
254
|
+
}).catch((e) => {
|
|
255
|
+
this.logger.warn(
|
|
256
|
+
`TechDocs Azure router failed to serve content from container ${this.containerName} at path ${filePath}: ${e.message}`
|
|
257
|
+
);
|
|
258
|
+
res.status(404).send("File Not Found");
|
|
259
|
+
});
|
|
260
|
+
};
|
|
261
|
+
}
|
|
262
|
+
/**
|
|
263
|
+
* A helper function which checks if index.html of an Entity's docs site is available. This
|
|
264
|
+
* can be used to verify if there are any pre-generated docs available to serve.
|
|
265
|
+
*/
|
|
266
|
+
hasDocsBeenGenerated(entity) {
|
|
267
|
+
const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
268
|
+
const entityRootDir = this.legacyPathCasing ? entityTriplet : helpers.lowerCaseEntityTriplet(entityTriplet);
|
|
269
|
+
return this.storageClient.getContainerClient(this.containerName).getBlockBlobClient(`${entityRootDir}/index.html`).exists();
|
|
270
|
+
}
|
|
271
|
+
async renameBlob(originalName, newName, removeOriginal = false) {
|
|
272
|
+
const container = this.storageClient.getContainerClient(this.containerName);
|
|
273
|
+
const blob = container.getBlobClient(newName);
|
|
274
|
+
const { url } = container.getBlobClient(originalName);
|
|
275
|
+
const response = await blob.beginCopyFromURL(url);
|
|
276
|
+
await response.pollUntilDone();
|
|
277
|
+
if (removeOriginal) {
|
|
278
|
+
await container.deleteBlob(originalName);
|
|
279
|
+
}
|
|
280
|
+
}
|
|
281
|
+
async renameBlobToLowerCase(originalPath, removeOriginal) {
|
|
282
|
+
let newPath;
|
|
283
|
+
try {
|
|
284
|
+
newPath = helpers.lowerCaseEntityTripletInStoragePath(originalPath);
|
|
285
|
+
} catch (e) {
|
|
286
|
+
errors.assertError(e);
|
|
287
|
+
this.logger.warn(e.message);
|
|
288
|
+
return;
|
|
289
|
+
}
|
|
290
|
+
if (originalPath === newPath) return;
|
|
291
|
+
try {
|
|
292
|
+
this.logger.debug(`Migrating ${originalPath}`);
|
|
293
|
+
await this.renameBlob(originalPath, newPath, removeOriginal);
|
|
294
|
+
} catch (e) {
|
|
295
|
+
errors.assertError(e);
|
|
296
|
+
this.logger.warn(`Unable to migrate ${originalPath}: ${e.message}`);
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
async migrateDocsCase({
|
|
300
|
+
removeOriginal = false,
|
|
301
|
+
concurrency = 25
|
|
302
|
+
}) {
|
|
303
|
+
const promises = [];
|
|
304
|
+
const limiter = createLimiter__default.default(concurrency);
|
|
305
|
+
const container = this.storageClient.getContainerClient(this.containerName);
|
|
306
|
+
for await (const blob of container.listBlobsFlat()) {
|
|
307
|
+
promises.push(
|
|
308
|
+
limiter(
|
|
309
|
+
this.renameBlobToLowerCase.bind(this),
|
|
310
|
+
blob.name,
|
|
311
|
+
removeOriginal
|
|
312
|
+
)
|
|
313
|
+
);
|
|
314
|
+
}
|
|
315
|
+
await Promise.all(promises);
|
|
316
|
+
}
|
|
317
|
+
async getAllBlobsFromContainer({
|
|
318
|
+
prefix,
|
|
319
|
+
maxPageSize
|
|
320
|
+
}) {
|
|
321
|
+
const blobs = [];
|
|
322
|
+
const container = this.storageClient.getContainerClient(this.containerName);
|
|
323
|
+
let iterator = container.listBlobsFlat({ prefix }).byPage({ maxPageSize });
|
|
324
|
+
let response = (await iterator.next()).value;
|
|
325
|
+
do {
|
|
326
|
+
for (const blob of response?.segment?.blobItems ?? []) {
|
|
327
|
+
blobs.push(blob.name);
|
|
328
|
+
}
|
|
329
|
+
iterator = container.listBlobsFlat({ prefix }).byPage({ continuationToken: response.continuationToken, maxPageSize });
|
|
330
|
+
response = (await iterator.next()).value;
|
|
331
|
+
} while (response && response.continuationToken);
|
|
332
|
+
return blobs;
|
|
333
|
+
}
|
|
334
|
+
}
|
|
335
|
+
|
|
336
|
+
exports.AzureBlobStoragePublish = AzureBlobStoragePublish;
|
|
337
|
+
//# sourceMappingURL=azureBlobStorage.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"azureBlobStorage.cjs.js","sources":["../../../src/stages/publish/azureBlobStorage.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { DefaultAzureCredential } from '@azure/identity';\nimport {\n BlobServiceClient,\n ContainerClient,\n StorageSharedKeyCredential,\n} from '@azure/storage-blob';\nimport { Entity, CompoundEntityRef } from '@backstage/catalog-model';\nimport { Config } from '@backstage/config';\nimport { assertError, ForwardedError } from '@backstage/errors';\nimport express from 'express';\nimport JSON5 from 'json5';\nimport limiterFactory from 'p-limit';\nimport { default as path, default as platformPath } from 'path';\nimport {\n bulkStorageOperation,\n getCloudPathForLocalPath,\n getFileTreeRecursively,\n getHeadersForFileExtension,\n lowerCaseEntityTriplet,\n getStaleFiles,\n lowerCaseEntityTripletInStoragePath,\n} from './helpers';\nimport {\n PublisherBase,\n PublishRequest,\n PublishResponse,\n ReadinessResponse,\n TechDocsMetadata,\n} from './types';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\n// The number of batches that may be ongoing at the same time.\nconst BATCH_CONCURRENCY = 3;\n\nexport class AzureBlobStoragePublish implements PublisherBase {\n private readonly storageClient: BlobServiceClient;\n private readonly containerName: string;\n private readonly legacyPathCasing: boolean;\n private readonly logger: LoggerService;\n\n constructor(options: {\n storageClient: BlobServiceClient;\n containerName: string;\n legacyPathCasing: boolean;\n logger: LoggerService;\n }) {\n this.storageClient = options.storageClient;\n this.containerName = options.containerName;\n this.legacyPathCasing = options.legacyPathCasing;\n this.logger = options.logger;\n }\n\n static fromConfig(config: Config, logger: LoggerService): PublisherBase {\n let storageClient: BlobServiceClient;\n let containerName = '';\n try {\n containerName = config.getString(\n 'techdocs.publisher.azureBlobStorage.containerName',\n );\n } catch (error) {\n throw new Error(\n \"Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, \" +\n 'techdocs.publisher.azureBlobStorage.containerName is required.',\n );\n }\n\n const legacyPathCasing =\n config.getOptionalBoolean(\n 'techdocs.legacyUseCaseSensitiveTripletPaths',\n ) || false;\n\n // Give more priority for connectionString, if configured, return the AzureBlobStoragePublish object here itself\n const connectionStringKey =\n 'techdocs.publisher.azureBlobStorage.connectionString';\n const connectionString = config.getOptionalString(connectionStringKey);\n\n if (connectionString) {\n logger.info(\n `Using '${connectionStringKey}' configuration to create storage client`,\n );\n storageClient = BlobServiceClient.fromConnectionString(connectionString);\n } else {\n let accountName = '';\n try {\n accountName = config.getString(\n 'techdocs.publisher.azureBlobStorage.credentials.accountName',\n );\n } catch (error) {\n throw new Error(\n \"Since techdocs.publisher.type is set to 'azureBlobStorage' in your app config, \" +\n 'techdocs.publisher.azureBlobStorage.credentials.accountName is required.',\n );\n }\n\n // Credentials is an optional config. If missing, default Azure Blob Storage environment variables will be used.\n // https://docs.microsoft.com/en-us/azure/storage/common/storage-auth-aad-app\n const accountKey = config.getOptionalString(\n 'techdocs.publisher.azureBlobStorage.credentials.accountKey',\n );\n\n let credential;\n if (accountKey) {\n credential = new StorageSharedKeyCredential(accountName, accountKey);\n } else {\n credential = new DefaultAzureCredential();\n }\n\n storageClient = new BlobServiceClient(\n `https://${accountName}.blob.core.windows.net`,\n credential,\n );\n }\n\n return new AzureBlobStoragePublish({\n storageClient: storageClient,\n containerName: containerName,\n legacyPathCasing: legacyPathCasing,\n logger: logger,\n });\n }\n\n async getReadiness(): Promise<ReadinessResponse> {\n try {\n const response = await this.storageClient\n .getContainerClient(this.containerName)\n .getProperties();\n\n if (response._response.status === 200) {\n return {\n isAvailable: true,\n };\n }\n\n if (response._response.status >= 400) {\n this.logger.error(\n `Failed to retrieve metadata from ${response._response.request.url} with status code ${response._response.status}.`,\n );\n }\n } catch (e) {\n assertError(e);\n this.logger.error(`from Azure Blob Storage client library: ${e.message}`);\n }\n\n this.logger.error(\n `Could not retrieve metadata about the Azure Blob Storage container ${this.containerName}. ` +\n 'Make sure that the Azure project and container exist and the access key is setup correctly ' +\n 'techdocs.publisher.azureBlobStorage.credentials defined in app config has correct permissions. ' +\n 'Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage',\n );\n\n return { isAvailable: false };\n }\n\n /**\n * Upload all the files from the generated `directory` to the Azure Blob Storage container.\n * Directory structure used in the container is - entityNamespace/entityKind/entityName/index.html\n */\n async publish({\n entity,\n directory,\n }: PublishRequest): Promise<PublishResponse> {\n const objects: string[] = [];\n const useLegacyPathCasing = this.legacyPathCasing;\n\n // First, try to retrieve a list of all individual files currently existing\n const remoteFolder = getCloudPathForLocalPath(\n entity,\n undefined,\n useLegacyPathCasing,\n );\n let existingFiles: string[] = [];\n try {\n existingFiles = await this.getAllBlobsFromContainer({\n prefix: remoteFolder,\n maxPageSize: BATCH_CONCURRENCY,\n });\n } catch (e) {\n assertError(e);\n this.logger.error(\n `Unable to list files for Entity ${entity.metadata.name}: ${e.message}`,\n );\n }\n\n // Then, merge new files into the same folder\n let absoluteFilesToUpload;\n let container: ContainerClient;\n try {\n // Remove the absolute path prefix of the source directory\n // Path of all files to upload, relative to the root of the source directory\n // e.g. ['index.html', 'sub-page/index.html', 'assets/images/favicon.png']\n absoluteFilesToUpload = await getFileTreeRecursively(directory);\n\n container = this.storageClient.getContainerClient(this.containerName);\n const failedOperations: Error[] = [];\n await bulkStorageOperation(\n async absoluteFilePath => {\n const relativeFilePath = path.normalize(\n path.relative(directory, absoluteFilePath),\n );\n const remotePath = getCloudPathForLocalPath(\n entity,\n relativeFilePath,\n useLegacyPathCasing,\n );\n objects.push(remotePath);\n const response = await container\n .getBlockBlobClient(remotePath)\n .uploadFile(absoluteFilePath);\n\n if (response._response.status >= 400) {\n failedOperations.push(\n new Error(\n `Upload failed for ${absoluteFilePath} with status code ${response._response.status}`,\n ),\n );\n }\n\n return response;\n },\n absoluteFilesToUpload,\n { concurrencyLimit: BATCH_CONCURRENCY },\n );\n\n if (failedOperations.length > 0) {\n throw new Error(\n failedOperations\n .map(r => r.message)\n .filter(Boolean)\n .join(' '),\n );\n }\n\n this.logger.info(\n `Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`,\n );\n } catch (e) {\n const errorMessage = `Unable to upload file(s) to Azure. ${e}`;\n this.logger.error(errorMessage);\n throw new Error(errorMessage);\n }\n\n // Last, try to remove the files that were *only* present previously\n try {\n const relativeFilesToUpload = absoluteFilesToUpload.map(\n absoluteFilePath =>\n getCloudPathForLocalPath(\n entity,\n path.relative(directory, absoluteFilePath),\n useLegacyPathCasing,\n ),\n );\n\n const staleFiles = getStaleFiles(relativeFilesToUpload, existingFiles);\n\n await bulkStorageOperation(\n async relativeFilePath => {\n return await container.deleteBlob(relativeFilePath);\n },\n staleFiles,\n { concurrencyLimit: BATCH_CONCURRENCY },\n );\n\n this.logger.info(\n `Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`,\n );\n } catch (error) {\n const errorMessage = `Unable to delete file(s) from Azure. ${error}`;\n this.logger.error(errorMessage);\n }\n\n return { objects };\n }\n\n private download(containerName: string, blobPath: string): Promise<Buffer> {\n return new Promise((resolve, reject) => {\n const fileStreamChunks: Array<any> = [];\n this.storageClient\n .getContainerClient(containerName)\n .getBlockBlobClient(blobPath)\n .download()\n .then(res => {\n const body = res.readableStreamBody;\n if (!body) {\n reject(new Error(`Unable to parse the response data`));\n return;\n }\n body\n .on('error', reject)\n .on('data', chunk => {\n fileStreamChunks.push(chunk);\n })\n .on('end', () => {\n resolve(Buffer.concat(fileStreamChunks));\n });\n })\n .catch(reject);\n });\n }\n\n async fetchTechDocsMetadata(\n entityName: CompoundEntityRef,\n ): Promise<TechDocsMetadata> {\n const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;\n const entityRootDir = this.legacyPathCasing\n ? entityTriplet\n : lowerCaseEntityTriplet(entityTriplet);\n\n try {\n const techdocsMetadataJson = await this.download(\n this.containerName,\n `${entityRootDir}/techdocs_metadata.json`,\n );\n if (!techdocsMetadataJson) {\n throw new Error(\n `Unable to parse the techdocs metadata file ${entityRootDir}/techdocs_metadata.json.`,\n );\n }\n const techdocsMetadata = JSON5.parse(\n techdocsMetadataJson.toString('utf-8'),\n );\n return techdocsMetadata;\n } catch (e) {\n throw new ForwardedError('TechDocs metadata fetch failed', e);\n }\n }\n\n /**\n * Express route middleware to serve static files on a route in techdocs-backend.\n */\n docsRouter(): express.Handler {\n return (req, res) => {\n // Decode and trim the leading forward slash\n const decodedUri = decodeURI(req.path.replace(/^\\//, ''));\n\n // filePath example - /default/Component/documented-component/index.html\n const filePath = this.legacyPathCasing\n ? decodedUri\n : lowerCaseEntityTripletInStoragePath(decodedUri);\n\n // Files with different extensions (CSS, HTML) need to be served with different headers\n const fileExtension = platformPath.extname(filePath);\n const responseHeaders = getHeadersForFileExtension(fileExtension);\n\n this.download(this.containerName, filePath)\n .then(fileContent => {\n // Inject response headers\n for (const [headerKey, headerValue] of Object.entries(\n responseHeaders,\n )) {\n res.setHeader(headerKey, headerValue);\n }\n res.send(fileContent);\n })\n .catch(e => {\n this.logger.warn(\n `TechDocs Azure router failed to serve content from container ${this.containerName} at path ${filePath}: ${e.message}`,\n );\n res.status(404).send('File Not Found');\n });\n };\n }\n\n /**\n * A helper function which checks if index.html of an Entity's docs site is available. This\n * can be used to verify if there are any pre-generated docs available to serve.\n */\n hasDocsBeenGenerated(entity: Entity): Promise<boolean> {\n const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;\n const entityRootDir = this.legacyPathCasing\n ? entityTriplet\n : lowerCaseEntityTriplet(entityTriplet);\n\n return this.storageClient\n .getContainerClient(this.containerName)\n .getBlockBlobClient(`${entityRootDir}/index.html`)\n .exists();\n }\n\n protected async renameBlob(\n originalName: string,\n newName: string,\n removeOriginal = false,\n ): Promise<void> {\n const container = this.storageClient.getContainerClient(this.containerName);\n const blob = container.getBlobClient(newName);\n const { url } = container.getBlobClient(originalName);\n const response = await blob.beginCopyFromURL(url);\n await response.pollUntilDone();\n if (removeOriginal) {\n await container.deleteBlob(originalName);\n }\n }\n\n protected async renameBlobToLowerCase(\n originalPath: string,\n removeOriginal: boolean,\n ) {\n let newPath;\n try {\n newPath = lowerCaseEntityTripletInStoragePath(originalPath);\n } catch (e) {\n assertError(e);\n this.logger.warn(e.message);\n return;\n }\n\n if (originalPath === newPath) return;\n try {\n this.logger.debug(`Migrating ${originalPath}`);\n await this.renameBlob(originalPath, newPath, removeOriginal);\n } catch (e) {\n assertError(e);\n this.logger.warn(`Unable to migrate ${originalPath}: ${e.message}`);\n }\n }\n\n async migrateDocsCase({\n removeOriginal = false,\n concurrency = 25,\n }): Promise<void> {\n const promises = [];\n const limiter = limiterFactory(concurrency);\n const container = this.storageClient.getContainerClient(this.containerName);\n\n for await (const blob of container.listBlobsFlat()) {\n promises.push(\n limiter(\n this.renameBlobToLowerCase.bind(this),\n blob.name,\n removeOriginal,\n ),\n );\n }\n\n await Promise.all(promises);\n }\n\n protected async getAllBlobsFromContainer({\n prefix,\n maxPageSize,\n }: {\n prefix: string;\n maxPageSize: number;\n }): Promise<string[]> {\n const blobs: string[] = [];\n const container = this.storageClient.getContainerClient(this.containerName);\n\n let iterator = container.listBlobsFlat({ prefix }).byPage({ maxPageSize });\n let response = (await iterator.next()).value;\n\n do {\n for (const blob of response?.segment?.blobItems ?? []) {\n blobs.push(blob.name);\n }\n iterator = container\n .listBlobsFlat({ prefix })\n .byPage({ continuationToken: response.continuationToken, maxPageSize });\n response = (await iterator.next()).value;\n } while (response && response.continuationToken);\n\n return blobs;\n }\n}\n"],"names":["BlobServiceClient","StorageSharedKeyCredential","DefaultAzureCredential","assertError","getCloudPathForLocalPath","getFileTreeRecursively","bulkStorageOperation","path","getStaleFiles","lowerCaseEntityTriplet","JSON5","ForwardedError","lowerCaseEntityTripletInStoragePath","platformPath","getHeadersForFileExtension","limiterFactory"],"mappings":";;;;;;;;;;;;;;;;AA+CA,MAAM,iBAAoB,GAAA,CAAA,CAAA;AAEnB,MAAM,uBAAiD,CAAA;AAAA,EAC3C,aAAA,CAAA;AAAA,EACA,aAAA,CAAA;AAAA,EACA,gBAAA,CAAA;AAAA,EACA,MAAA,CAAA;AAAA,EAEjB,YAAY,OAKT,EAAA;AACD,IAAA,IAAA,CAAK,gBAAgB,OAAQ,CAAA,aAAA,CAAA;AAC7B,IAAA,IAAA,CAAK,gBAAgB,OAAQ,CAAA,aAAA,CAAA;AAC7B,IAAA,IAAA,CAAK,mBAAmB,OAAQ,CAAA,gBAAA,CAAA;AAChC,IAAA,IAAA,CAAK,SAAS,OAAQ,CAAA,MAAA,CAAA;AAAA,GACxB;AAAA,EAEA,OAAO,UAAW,CAAA,MAAA,EAAgB,MAAsC,EAAA;AACtE,IAAI,IAAA,aAAA,CAAA;AACJ,IAAA,IAAI,aAAgB,GAAA,EAAA,CAAA;AACpB,IAAI,IAAA;AACF,MAAA,aAAA,GAAgB,MAAO,CAAA,SAAA;AAAA,QACrB,mDAAA;AAAA,OACF,CAAA;AAAA,aACO,KAAO,EAAA;AACd,MAAA,MAAM,IAAI,KAAA;AAAA,QACR,+IAAA;AAAA,OAEF,CAAA;AAAA,KACF;AAEA,IAAA,MAAM,mBACJ,MAAO,CAAA,kBAAA;AAAA,MACL,6CAAA;AAAA,KACG,IAAA,KAAA,CAAA;AAGP,IAAA,MAAM,mBACJ,GAAA,sDAAA,CAAA;AACF,IAAM,MAAA,gBAAA,GAAmB,MAAO,CAAA,iBAAA,CAAkB,mBAAmB,CAAA,CAAA;AAErE,IAAA,IAAI,gBAAkB,EAAA;AACpB,MAAO,MAAA,CAAA,IAAA;AAAA,QACL,UAAU,mBAAmB,CAAA,wCAAA,CAAA;AAAA,OAC/B,CAAA;AACA,MAAgB,aAAA,GAAAA,6BAAA,CAAkB,qBAAqB,gBAAgB,CAAA,CAAA;AAAA,KAClE,MAAA;AACL,MAAA,IAAI,WAAc,GAAA,EAAA,CAAA;AAClB,MAAI,IAAA;AACF,QAAA,WAAA,GAAc,MAAO,CAAA,SAAA;AAAA,UACnB,6DAAA;AAAA,SACF,CAAA;AAAA,eACO,KAAO,EAAA;AACd,QAAA,MAAM,IAAI,KAAA;AAAA,UACR,yJAAA;AAAA,SAEF,CAAA;AAAA,OACF;AAIA,MAAA,MAAM,aAAa,MAAO,CAAA,iBAAA;AAAA,QACxB,4DAAA;AAAA,OACF,CAAA;AAEA,MAAI,IAAA,UAAA,CAAA;AACJ,MAAA,IAAI,UAAY,EAAA;AACd,QAAa,UAAA,GAAA,IAAIC,sCAA2B,CAAA,WAAA,EAAa,UAAU,CAAA,CAAA;AAAA,OAC9D,MAAA;AACL,QAAA,UAAA,GAAa,IAAIC,+BAAuB,EAAA,CAAA;AAAA,OAC1C;AAEA,MAAA,aAAA,GAAgB,IAAIF,6BAAA;AAAA,QAClB,WAAW,WAAW,CAAA,sBAAA,CAAA;AAAA,QACtB,UAAA;AAAA,OACF,CAAA;AAAA,KACF;AAEA,IAAA,OAAO,IAAI,uBAAwB,CAAA;AAAA,MACjC,aAAA;AAAA,MACA,aAAA;AAAA,MACA,gBAAA;AAAA,MACA,MAAA;AAAA,KACD,CAAA,CAAA;AAAA,GACH;AAAA,EAEA,MAAM,YAA2C,GAAA;AAC/C,IAAI,IAAA;AACF,MAAM,MAAA,QAAA,GAAW,MAAM,IAAK,CAAA,aAAA,CACzB,mBAAmB,IAAK,CAAA,aAAa,EACrC,aAAc,EAAA,CAAA;AAEjB,MAAI,IAAA,QAAA,CAAS,SAAU,CAAA,MAAA,KAAW,GAAK,EAAA;AACrC,QAAO,OAAA;AAAA,UACL,WAAa,EAAA,IAAA;AAAA,SACf,CAAA;AAAA,OACF;AAEA,MAAI,IAAA,QAAA,CAAS,SAAU,CAAA,MAAA,IAAU,GAAK,EAAA;AACpC,QAAA,IAAA,CAAK,MAAO,CAAA,KAAA;AAAA,UACV,CAAA,iCAAA,EAAoC,SAAS,SAAU,CAAA,OAAA,CAAQ,GAAG,CAAqB,kBAAA,EAAA,QAAA,CAAS,UAAU,MAAM,CAAA,CAAA,CAAA;AAAA,SAClH,CAAA;AAAA,OACF;AAAA,aACO,CAAG,EAAA;AACV,MAAAG,kBAAA,CAAY,CAAC,CAAA,CAAA;AACb,MAAA,IAAA,CAAK,MAAO,CAAA,KAAA,CAAM,CAA2C,wCAAA,EAAA,CAAA,CAAE,OAAO,CAAE,CAAA,CAAA,CAAA;AAAA,KAC1E;AAEA,IAAA,IAAA,CAAK,MAAO,CAAA,KAAA;AAAA,MACV,CAAA,mEAAA,EAAsE,KAAK,aAAa,CAAA,oQAAA,CAAA;AAAA,KAI1F,CAAA;AAEA,IAAO,OAAA,EAAE,aAAa,KAAM,EAAA,CAAA;AAAA,GAC9B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,OAAQ,CAAA;AAAA,IACZ,MAAA;AAAA,IACA,SAAA;AAAA,GAC2C,EAAA;AAC3C,IAAA,MAAM,UAAoB,EAAC,CAAA;AAC3B,IAAA,MAAM,sBAAsB,IAAK,CAAA,gBAAA,CAAA;AAGjC,IAAA,MAAM,YAAe,GAAAC,gCAAA;AAAA,MACnB,MAAA;AAAA,MACA,KAAA,CAAA;AAAA,MACA,mBAAA;AAAA,KACF,CAAA;AACA,IAAA,IAAI,gBAA0B,EAAC,CAAA;AAC/B,IAAI,IAAA;AACF,MAAgB,aAAA,GAAA,MAAM,KAAK,wBAAyB,CAAA;AAAA,QAClD,MAAQ,EAAA,YAAA;AAAA,QACR,WAAa,EAAA,iBAAA;AAAA,OACd,CAAA,CAAA;AAAA,aACM,CAAG,EAAA;AACV,MAAAD,kBAAA,CAAY,CAAC,CAAA,CAAA;AACb,MAAA,IAAA,CAAK,MAAO,CAAA,KAAA;AAAA,QACV,mCAAmC,MAAO,CAAA,QAAA,CAAS,IAAI,CAAA,EAAA,EAAK,EAAE,OAAO,CAAA,CAAA;AAAA,OACvE,CAAA;AAAA,KACF;AAGA,IAAI,IAAA,qBAAA,CAAA;AACJ,IAAI,IAAA,SAAA,CAAA;AACJ,IAAI,IAAA;AAIF,MAAwB,qBAAA,GAAA,MAAME,+BAAuB,SAAS,CAAA,CAAA;AAE9D,MAAA,SAAA,GAAY,IAAK,CAAA,aAAA,CAAc,kBAAmB,CAAA,IAAA,CAAK,aAAa,CAAA,CAAA;AACpE,MAAA,MAAM,mBAA4B,EAAC,CAAA;AACnC,MAAM,MAAAC,4BAAA;AAAA,QACJ,OAAM,gBAAoB,KAAA;AACxB,UAAA,MAAM,mBAAmBC,qBAAK,CAAA,SAAA;AAAA,YAC5BA,qBAAA,CAAK,QAAS,CAAA,SAAA,EAAW,gBAAgB,CAAA;AAAA,WAC3C,CAAA;AACA,UAAA,MAAM,UAAa,GAAAH,gCAAA;AAAA,YACjB,MAAA;AAAA,YACA,gBAAA;AAAA,YACA,mBAAA;AAAA,WACF,CAAA;AACA,UAAA,OAAA,CAAQ,KAAK,UAAU,CAAA,CAAA;AACvB,UAAA,MAAM,WAAW,MAAM,SAAA,CACpB,mBAAmB,UAAU,CAAA,CAC7B,WAAW,gBAAgB,CAAA,CAAA;AAE9B,UAAI,IAAA,QAAA,CAAS,SAAU,CAAA,MAAA,IAAU,GAAK,EAAA;AACpC,YAAiB,gBAAA,CAAA,IAAA;AAAA,cACf,IAAI,KAAA;AAAA,gBACF,CAAqB,kBAAA,EAAA,gBAAgB,CAAqB,kBAAA,EAAA,QAAA,CAAS,UAAU,MAAM,CAAA,CAAA;AAAA,eACrF;AAAA,aACF,CAAA;AAAA,WACF;AAEA,UAAO,OAAA,QAAA,CAAA;AAAA,SACT;AAAA,QACA,qBAAA;AAAA,QACA,EAAE,kBAAkB,iBAAkB,EAAA;AAAA,OACxC,CAAA;AAEA,MAAI,IAAA,gBAAA,CAAiB,SAAS,CAAG,EAAA;AAC/B,QAAA,MAAM,IAAI,KAAA;AAAA,UACR,gBAAA,CACG,GAAI,CAAA,CAAA,CAAA,KAAK,CAAE,CAAA,OAAO,EAClB,MAAO,CAAA,OAAO,CACd,CAAA,IAAA,CAAK,GAAG,CAAA;AAAA,SACb,CAAA;AAAA,OACF;AAEA,MAAA,IAAA,CAAK,MAAO,CAAA,IAAA;AAAA,QACV,4DAA4D,MAAO,CAAA,QAAA,CAAS,IAAI,CAAA,yBAAA,EAA4B,sBAAsB,MAAM,CAAA,CAAA;AAAA,OAC1I,CAAA;AAAA,aACO,CAAG,EAAA;AACV,MAAM,MAAA,YAAA,GAAe,sCAAsC,CAAC,CAAA,CAAA,CAAA;AAC5D,MAAK,IAAA,CAAA,MAAA,CAAO,MAAM,YAAY,CAAA,CAAA;AAC9B,MAAM,MAAA,IAAI,MAAM,YAAY,CAAA,CAAA;AAAA,KAC9B;AAGA,IAAI,IAAA;AACF,MAAA,MAAM,wBAAwB,qBAAsB,CAAA,GAAA;AAAA,QAClD,CACE,gBAAA,KAAAA,gCAAA;AAAA,UACE,MAAA;AAAA,UACAG,qBAAA,CAAK,QAAS,CAAA,SAAA,EAAW,gBAAgB,CAAA;AAAA,UACzC,mBAAA;AAAA,SACF;AAAA,OACJ,CAAA;AAEA,MAAM,MAAA,UAAA,GAAaC,qBAAc,CAAA,qBAAA,EAAuB,aAAa,CAAA,CAAA;AAErE,MAAM,MAAAF,4BAAA;AAAA,QACJ,OAAM,gBAAoB,KAAA;AACxB,UAAO,OAAA,MAAM,SAAU,CAAA,UAAA,CAAW,gBAAgB,CAAA,CAAA;AAAA,SACpD;AAAA,QACA,UAAA;AAAA,QACA,EAAE,kBAAkB,iBAAkB,EAAA;AAAA,OACxC,CAAA;AAEA,MAAA,IAAA,CAAK,MAAO,CAAA,IAAA;AAAA,QACV,+CAA+C,MAAO,CAAA,QAAA,CAAS,IAAI,CAAA,yBAAA,EAA4B,WAAW,MAAM,CAAA,CAAA;AAAA,OAClH,CAAA;AAAA,aACO,KAAO,EAAA;AACd,MAAM,MAAA,YAAA,GAAe,wCAAwC,KAAK,CAAA,CAAA,CAAA;AAClE,MAAK,IAAA,CAAA,MAAA,CAAO,MAAM,YAAY,CAAA,CAAA;AAAA,KAChC;AAEA,IAAA,OAAO,EAAE,OAAQ,EAAA,CAAA;AAAA,GACnB;AAAA,EAEQ,QAAA,CAAS,eAAuB,QAAmC,EAAA;AACzE,IAAA,OAAO,IAAI,OAAA,CAAQ,CAAC,OAAA,EAAS,MAAW,KAAA;AACtC,MAAA,MAAM,mBAA+B,EAAC,CAAA;AACtC,MAAK,IAAA,CAAA,aAAA,CACF,kBAAmB,CAAA,aAAa,CAChC,CAAA,kBAAA,CAAmB,QAAQ,CAC3B,CAAA,QAAA,EACA,CAAA,IAAA,CAAK,CAAO,GAAA,KAAA;AACX,QAAA,MAAM,OAAO,GAAI,CAAA,kBAAA,CAAA;AACjB,QAAA,IAAI,CAAC,IAAM,EAAA;AACT,UAAO,MAAA,CAAA,IAAI,KAAM,CAAA,CAAA,iCAAA,CAAmC,CAAC,CAAA,CAAA;AACrD,UAAA,OAAA;AAAA,SACF;AACA,QAAA,IAAA,CACG,GAAG,OAAS,EAAA,MAAM,CAClB,CAAA,EAAA,CAAG,QAAQ,CAAS,KAAA,KAAA;AACnB,UAAA,gBAAA,CAAiB,KAAK,KAAK,CAAA,CAAA;AAAA,SAC5B,CAAA,CACA,EAAG,CAAA,KAAA,EAAO,MAAM;AACf,UAAQ,OAAA,CAAA,MAAA,CAAO,MAAO,CAAA,gBAAgB,CAAC,CAAA,CAAA;AAAA,SACxC,CAAA,CAAA;AAAA,OACJ,CACA,CAAA,KAAA,CAAM,MAAM,CAAA,CAAA;AAAA,KAChB,CAAA,CAAA;AAAA,GACH;AAAA,EAEA,MAAM,sBACJ,UAC2B,EAAA;AAC3B,IAAM,MAAA,aAAA,GAAgB,GAAG,UAAW,CAAA,SAAS,IAAI,UAAW,CAAA,IAAI,CAAI,CAAA,EAAA,UAAA,CAAW,IAAI,CAAA,CAAA,CAAA;AACnF,IAAA,MAAM,aAAgB,GAAA,IAAA,CAAK,gBACvB,GAAA,aAAA,GACAG,+BAAuB,aAAa,CAAA,CAAA;AAExC,IAAI,IAAA;AACF,MAAM,MAAA,oBAAA,GAAuB,MAAM,IAAK,CAAA,QAAA;AAAA,QACtC,IAAK,CAAA,aAAA;AAAA,QACL,GAAG,aAAa,CAAA,uBAAA,CAAA;AAAA,OAClB,CAAA;AACA,MAAA,IAAI,CAAC,oBAAsB,EAAA;AACzB,QAAA,MAAM,IAAI,KAAA;AAAA,UACR,8CAA8C,aAAa,CAAA,wBAAA,CAAA;AAAA,SAC7D,CAAA;AAAA,OACF;AACA,MAAA,MAAM,mBAAmBC,sBAAM,CAAA,KAAA;AAAA,QAC7B,oBAAA,CAAqB,SAAS,OAAO,CAAA;AAAA,OACvC,CAAA;AACA,MAAO,OAAA,gBAAA,CAAA;AAAA,aACA,CAAG,EAAA;AACV,MAAM,MAAA,IAAIC,qBAAe,CAAA,gCAAA,EAAkC,CAAC,CAAA,CAAA;AAAA,KAC9D;AAAA,GACF;AAAA;AAAA;AAAA;AAAA,EAKA,UAA8B,GAAA;AAC5B,IAAO,OAAA,CAAC,KAAK,GAAQ,KAAA;AAEnB,MAAA,MAAM,aAAa,SAAU,CAAA,GAAA,CAAI,KAAK,OAAQ,CAAA,KAAA,EAAO,EAAE,CAAC,CAAA,CAAA;AAGxD,MAAA,MAAM,QAAW,GAAA,IAAA,CAAK,gBAClB,GAAA,UAAA,GACAC,4CAAoC,UAAU,CAAA,CAAA;AAGlD,MAAM,MAAA,aAAA,GAAgBC,qBAAa,CAAA,OAAA,CAAQ,QAAQ,CAAA,CAAA;AACnD,MAAM,MAAA,eAAA,GAAkBC,mCAA2B,aAAa,CAAA,CAAA;AAEhE,MAAA,IAAA,CAAK,SAAS,IAAK,CAAA,aAAA,EAAe,QAAQ,CAAA,CACvC,KAAK,CAAe,WAAA,KAAA;AAEnB,QAAA,KAAA,MAAW,CAAC,SAAA,EAAW,WAAW,CAAA,IAAK,MAAO,CAAA,OAAA;AAAA,UAC5C,eAAA;AAAA,SACC,EAAA;AACD,UAAI,GAAA,CAAA,SAAA,CAAU,WAAW,WAAW,CAAA,CAAA;AAAA,SACtC;AACA,QAAA,GAAA,CAAI,KAAK,WAAW,CAAA,CAAA;AAAA,OACrB,CACA,CAAA,KAAA,CAAM,CAAK,CAAA,KAAA;AACV,QAAA,IAAA,CAAK,MAAO,CAAA,IAAA;AAAA,UACV,gEAAgE,IAAK,CAAA,aAAa,YAAY,QAAQ,CAAA,EAAA,EAAK,EAAE,OAAO,CAAA,CAAA;AAAA,SACtH,CAAA;AACA,QAAA,GAAA,CAAI,MAAO,CAAA,GAAG,CAAE,CAAA,IAAA,CAAK,gBAAgB,CAAA,CAAA;AAAA,OACtC,CAAA,CAAA;AAAA,KACL,CAAA;AAAA,GACF;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,qBAAqB,MAAkC,EAAA;AACrD,IAAM,MAAA,aAAA,GAAgB,CAAG,EAAA,MAAA,CAAO,QAAS,CAAA,SAAS,CAAI,CAAA,EAAA,MAAA,CAAO,IAAI,CAAA,CAAA,EAAI,MAAO,CAAA,QAAA,CAAS,IAAI,CAAA,CAAA,CAAA;AACzF,IAAA,MAAM,aAAgB,GAAA,IAAA,CAAK,gBACvB,GAAA,aAAA,GACAL,+BAAuB,aAAa,CAAA,CAAA;AAExC,IAAO,OAAA,IAAA,CAAK,aACT,CAAA,kBAAA,CAAmB,IAAK,CAAA,aAAa,CACrC,CAAA,kBAAA,CAAmB,CAAG,EAAA,aAAa,CAAa,WAAA,CAAA,CAAA,CAChD,MAAO,EAAA,CAAA;AAAA,GACZ;AAAA,EAEA,MAAgB,UAAA,CACd,YACA,EAAA,OAAA,EACA,iBAAiB,KACF,EAAA;AACf,IAAA,MAAM,SAAY,GAAA,IAAA,CAAK,aAAc,CAAA,kBAAA,CAAmB,KAAK,aAAa,CAAA,CAAA;AAC1E,IAAM,MAAA,IAAA,GAAO,SAAU,CAAA,aAAA,CAAc,OAAO,CAAA,CAAA;AAC5C,IAAA,MAAM,EAAE,GAAA,EAAQ,GAAA,SAAA,CAAU,cAAc,YAAY,CAAA,CAAA;AACpD,IAAA,MAAM,QAAW,GAAA,MAAM,IAAK,CAAA,gBAAA,CAAiB,GAAG,CAAA,CAAA;AAChD,IAAA,MAAM,SAAS,aAAc,EAAA,CAAA;AAC7B,IAAA,IAAI,cAAgB,EAAA;AAClB,MAAM,MAAA,SAAA,CAAU,WAAW,YAAY,CAAA,CAAA;AAAA,KACzC;AAAA,GACF;AAAA,EAEA,MAAgB,qBACd,CAAA,YAAA,EACA,cACA,EAAA;AACA,IAAI,IAAA,OAAA,CAAA;AACJ,IAAI,IAAA;AACF,MAAA,OAAA,GAAUG,4CAAoC,YAAY,CAAA,CAAA;AAAA,aACnD,CAAG,EAAA;AACV,MAAAT,kBAAA,CAAY,CAAC,CAAA,CAAA;AACb,MAAK,IAAA,CAAA,MAAA,CAAO,IAAK,CAAA,CAAA,CAAE,OAAO,CAAA,CAAA;AAC1B,MAAA,OAAA;AAAA,KACF;AAEA,IAAA,IAAI,iBAAiB,OAAS,EAAA,OAAA;AAC9B,IAAI,IAAA;AACF,MAAA,IAAA,CAAK,MAAO,CAAA,KAAA,CAAM,CAAa,UAAA,EAAA,YAAY,CAAE,CAAA,CAAA,CAAA;AAC7C,MAAA,MAAM,IAAK,CAAA,UAAA,CAAW,YAAc,EAAA,OAAA,EAAS,cAAc,CAAA,CAAA;AAAA,aACpD,CAAG,EAAA;AACV,MAAAA,kBAAA,CAAY,CAAC,CAAA,CAAA;AACb,MAAA,IAAA,CAAK,OAAO,IAAK,CAAA,CAAA,kBAAA,EAAqB,YAAY,CAAK,EAAA,EAAA,CAAA,CAAE,OAAO,CAAE,CAAA,CAAA,CAAA;AAAA,KACpE;AAAA,GACF;AAAA,EAEA,MAAM,eAAgB,CAAA;AAAA,IACpB,cAAiB,GAAA,KAAA;AAAA,IACjB,WAAc,GAAA,EAAA;AAAA,GACE,EAAA;AAChB,IAAA,MAAM,WAAW,EAAC,CAAA;AAClB,IAAM,MAAA,OAAA,GAAUY,+BAAe,WAAW,CAAA,CAAA;AAC1C,IAAA,MAAM,SAAY,GAAA,IAAA,CAAK,aAAc,CAAA,kBAAA,CAAmB,KAAK,aAAa,CAAA,CAAA;AAE1E,IAAiB,WAAA,MAAA,IAAA,IAAQ,SAAU,CAAA,aAAA,EAAiB,EAAA;AAClD,MAAS,QAAA,CAAA,IAAA;AAAA,QACP,OAAA;AAAA,UACE,IAAA,CAAK,qBAAsB,CAAA,IAAA,CAAK,IAAI,CAAA;AAAA,UACpC,IAAK,CAAA,IAAA;AAAA,UACL,cAAA;AAAA,SACF;AAAA,OACF,CAAA;AAAA,KACF;AAEA,IAAM,MAAA,OAAA,CAAQ,IAAI,QAAQ,CAAA,CAAA;AAAA,GAC5B;AAAA,EAEA,MAAgB,wBAAyB,CAAA;AAAA,IACvC,MAAA;AAAA,IACA,WAAA;AAAA,GAIoB,EAAA;AACpB,IAAA,MAAM,QAAkB,EAAC,CAAA;AACzB,IAAA,MAAM,SAAY,GAAA,IAAA,CAAK,aAAc,CAAA,kBAAA,CAAmB,KAAK,aAAa,CAAA,CAAA;AAE1E,IAAI,IAAA,QAAA,GAAW,SAAU,CAAA,aAAA,CAAc,EAAE,MAAA,EAAQ,CAAE,CAAA,MAAA,CAAO,EAAE,WAAA,EAAa,CAAA,CAAA;AACzE,IAAA,IAAI,QAAY,GAAA,CAAA,MAAM,QAAS,CAAA,IAAA,EAAQ,EAAA,KAAA,CAAA;AAEvC,IAAG,GAAA;AACD,MAAA,KAAA,MAAW,IAAQ,IAAA,QAAA,EAAU,OAAS,EAAA,SAAA,IAAa,EAAI,EAAA;AACrD,QAAM,KAAA,CAAA,IAAA,CAAK,KAAK,IAAI,CAAA,CAAA;AAAA,OACtB;AACA,MAAA,QAAA,GAAW,SACR,CAAA,aAAA,CAAc,EAAE,MAAA,EAAQ,CAAA,CACxB,MAAO,CAAA,EAAE,iBAAmB,EAAA,QAAA,CAAS,iBAAmB,EAAA,WAAA,EAAa,CAAA,CAAA;AACxE,MAAY,QAAA,GAAA,CAAA,MAAM,QAAS,CAAA,IAAA,EAAQ,EAAA,KAAA,CAAA;AAAA,KACrC,QAAS,YAAY,QAAS,CAAA,iBAAA,EAAA;AAE9B,IAAO,OAAA,KAAA,CAAA;AAAA,GACT;AACF;;;;"}
|
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var errors = require('@backstage/errors');
|
|
4
|
+
var storage = require('@google-cloud/storage');
|
|
5
|
+
var JSON5 = require('json5');
|
|
6
|
+
var path = require('path');
|
|
7
|
+
var helpers = require('./helpers.cjs.js');
|
|
8
|
+
var GoogleMigration = require('./migrations/GoogleMigration.cjs.js');
|
|
9
|
+
|
|
10
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
11
|
+
|
|
12
|
+
var JSON5__default = /*#__PURE__*/_interopDefaultCompat(JSON5);
|
|
13
|
+
var path__default = /*#__PURE__*/_interopDefaultCompat(path);
|
|
14
|
+
|
|
15
|
+
class GoogleGCSPublish {
|
|
16
|
+
storageClient;
|
|
17
|
+
bucketName;
|
|
18
|
+
legacyPathCasing;
|
|
19
|
+
logger;
|
|
20
|
+
bucketRootPath;
|
|
21
|
+
constructor(options) {
|
|
22
|
+
this.storageClient = options.storageClient;
|
|
23
|
+
this.bucketName = options.bucketName;
|
|
24
|
+
this.legacyPathCasing = options.legacyPathCasing;
|
|
25
|
+
this.logger = options.logger;
|
|
26
|
+
this.bucketRootPath = options.bucketRootPath;
|
|
27
|
+
}
|
|
28
|
+
static fromConfig(config, logger, options) {
|
|
29
|
+
let bucketName = "";
|
|
30
|
+
try {
|
|
31
|
+
bucketName = config.getString("techdocs.publisher.googleGcs.bucketName");
|
|
32
|
+
} catch (error) {
|
|
33
|
+
throw new Error(
|
|
34
|
+
"Since techdocs.publisher.type is set to 'googleGcs' in your app config, techdocs.publisher.googleGcs.bucketName is required."
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
const bucketRootPath = helpers.normalizeExternalStorageRootPath(
|
|
38
|
+
config.getOptionalString("techdocs.publisher.googleGcs.bucketRootPath") || ""
|
|
39
|
+
);
|
|
40
|
+
const credentials = config.getOptionalString(
|
|
41
|
+
"techdocs.publisher.googleGcs.credentials"
|
|
42
|
+
);
|
|
43
|
+
const projectId = config.getOptionalString(
|
|
44
|
+
"techdocs.publisher.googleGcs.projectId"
|
|
45
|
+
);
|
|
46
|
+
let credentialsJson = {};
|
|
47
|
+
if (credentials) {
|
|
48
|
+
try {
|
|
49
|
+
credentialsJson = JSON.parse(credentials);
|
|
50
|
+
} catch (err) {
|
|
51
|
+
throw new Error(
|
|
52
|
+
"Error in parsing techdocs.publisher.googleGcs.credentials config to JSON."
|
|
53
|
+
);
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
const clientOpts = options ?? {};
|
|
57
|
+
if (projectId) {
|
|
58
|
+
clientOpts.projectId = projectId;
|
|
59
|
+
}
|
|
60
|
+
const storageClient = new storage.Storage({
|
|
61
|
+
...credentials && {
|
|
62
|
+
projectId: credentialsJson.project_id,
|
|
63
|
+
credentials: credentialsJson
|
|
64
|
+
},
|
|
65
|
+
...clientOpts
|
|
66
|
+
});
|
|
67
|
+
const legacyPathCasing = config.getOptionalBoolean(
|
|
68
|
+
"techdocs.legacyUseCaseSensitiveTripletPaths"
|
|
69
|
+
) || false;
|
|
70
|
+
return new GoogleGCSPublish({
|
|
71
|
+
storageClient,
|
|
72
|
+
bucketName,
|
|
73
|
+
legacyPathCasing,
|
|
74
|
+
logger,
|
|
75
|
+
bucketRootPath
|
|
76
|
+
});
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Check if the defined bucket exists. Being able to connect means the configuration is good
|
|
80
|
+
* and the storage client will work.
|
|
81
|
+
*/
|
|
82
|
+
async getReadiness() {
|
|
83
|
+
try {
|
|
84
|
+
await this.storageClient.bucket(this.bucketName).getMetadata();
|
|
85
|
+
this.logger.info(
|
|
86
|
+
`Successfully connected to the GCS bucket ${this.bucketName}.`
|
|
87
|
+
);
|
|
88
|
+
return {
|
|
89
|
+
isAvailable: true
|
|
90
|
+
};
|
|
91
|
+
} catch (err) {
|
|
92
|
+
errors.assertError(err);
|
|
93
|
+
this.logger.error(
|
|
94
|
+
`Could not retrieve metadata about the GCS bucket ${this.bucketName}. Make sure the bucket exists. Also make sure that authentication is setup either by explicitly defining techdocs.publisher.googleGcs.credentials in app config or by using environment variables. Refer to https://backstage.io/docs/features/techdocs/using-cloud-storage`
|
|
95
|
+
);
|
|
96
|
+
this.logger.error(`from GCS client library: ${err.message}`);
|
|
97
|
+
return { isAvailable: false };
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
/**
|
|
101
|
+
* Upload all the files from the generated `directory` to the GCS bucket.
|
|
102
|
+
* Directory structure used in the bucket is - entityNamespace/entityKind/entityName/index.html
|
|
103
|
+
*/
|
|
104
|
+
async publish({
|
|
105
|
+
entity,
|
|
106
|
+
directory
|
|
107
|
+
}) {
|
|
108
|
+
const objects = [];
|
|
109
|
+
const useLegacyPathCasing = this.legacyPathCasing;
|
|
110
|
+
const bucket = this.storageClient.bucket(this.bucketName);
|
|
111
|
+
const bucketRootPath = this.bucketRootPath;
|
|
112
|
+
let existingFiles = [];
|
|
113
|
+
try {
|
|
114
|
+
const remoteFolder = helpers.getCloudPathForLocalPath(
|
|
115
|
+
entity,
|
|
116
|
+
void 0,
|
|
117
|
+
useLegacyPathCasing,
|
|
118
|
+
bucketRootPath
|
|
119
|
+
);
|
|
120
|
+
existingFiles = await this.getFilesForFolder(remoteFolder);
|
|
121
|
+
} catch (e) {
|
|
122
|
+
errors.assertError(e);
|
|
123
|
+
this.logger.error(
|
|
124
|
+
`Unable to list files for Entity ${entity.metadata.name}: ${e.message}`
|
|
125
|
+
);
|
|
126
|
+
}
|
|
127
|
+
let absoluteFilesToUpload;
|
|
128
|
+
try {
|
|
129
|
+
absoluteFilesToUpload = await helpers.getFileTreeRecursively(directory);
|
|
130
|
+
await helpers.bulkStorageOperation(
|
|
131
|
+
async (absoluteFilePath) => {
|
|
132
|
+
const relativeFilePath = path__default.default.relative(directory, absoluteFilePath);
|
|
133
|
+
const destination = helpers.getCloudPathForLocalPath(
|
|
134
|
+
entity,
|
|
135
|
+
relativeFilePath,
|
|
136
|
+
useLegacyPathCasing,
|
|
137
|
+
bucketRootPath
|
|
138
|
+
);
|
|
139
|
+
objects.push(destination);
|
|
140
|
+
return await bucket.upload(absoluteFilePath, { destination });
|
|
141
|
+
},
|
|
142
|
+
absoluteFilesToUpload,
|
|
143
|
+
{ concurrencyLimit: 10 }
|
|
144
|
+
);
|
|
145
|
+
this.logger.info(
|
|
146
|
+
`Successfully uploaded all the generated files for Entity ${entity.metadata.name}. Total number of files: ${absoluteFilesToUpload.length}`
|
|
147
|
+
);
|
|
148
|
+
} catch (e) {
|
|
149
|
+
const errorMessage = `Unable to upload file(s) to Google Cloud Storage. ${e}`;
|
|
150
|
+
this.logger.error(errorMessage);
|
|
151
|
+
throw new Error(errorMessage);
|
|
152
|
+
}
|
|
153
|
+
try {
|
|
154
|
+
const relativeFilesToUpload = absoluteFilesToUpload.map(
|
|
155
|
+
(absoluteFilePath) => helpers.getCloudPathForLocalPath(
|
|
156
|
+
entity,
|
|
157
|
+
path__default.default.relative(directory, absoluteFilePath),
|
|
158
|
+
useLegacyPathCasing,
|
|
159
|
+
bucketRootPath
|
|
160
|
+
)
|
|
161
|
+
);
|
|
162
|
+
const staleFiles = helpers.getStaleFiles(relativeFilesToUpload, existingFiles);
|
|
163
|
+
await helpers.bulkStorageOperation(
|
|
164
|
+
async (relativeFilePath) => {
|
|
165
|
+
return await bucket.file(relativeFilePath).delete();
|
|
166
|
+
},
|
|
167
|
+
staleFiles,
|
|
168
|
+
{ concurrencyLimit: 10 }
|
|
169
|
+
);
|
|
170
|
+
this.logger.info(
|
|
171
|
+
`Successfully deleted stale files for Entity ${entity.metadata.name}. Total number of files: ${staleFiles.length}`
|
|
172
|
+
);
|
|
173
|
+
} catch (error) {
|
|
174
|
+
const errorMessage = `Unable to delete file(s) from Google Cloud Storage. ${error}`;
|
|
175
|
+
this.logger.error(errorMessage);
|
|
176
|
+
}
|
|
177
|
+
return { objects };
|
|
178
|
+
}
|
|
179
|
+
fetchTechDocsMetadata(entityName) {
|
|
180
|
+
return new Promise((resolve, reject) => {
|
|
181
|
+
const entityTriplet = `${entityName.namespace}/${entityName.kind}/${entityName.name}`;
|
|
182
|
+
const entityDir = this.legacyPathCasing ? entityTriplet : helpers.lowerCaseEntityTriplet(entityTriplet);
|
|
183
|
+
const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
|
|
184
|
+
if (!helpers.isValidContentPath(this.bucketRootPath, entityRootDir)) {
|
|
185
|
+
this.logger.error(
|
|
186
|
+
`Invalid content path found while fetching TechDocs metadata: ${entityRootDir}`
|
|
187
|
+
);
|
|
188
|
+
reject(new Error(`Metadata Not Found`));
|
|
189
|
+
}
|
|
190
|
+
const fileStreamChunks = [];
|
|
191
|
+
this.storageClient.bucket(this.bucketName).file(`${entityRootDir}/techdocs_metadata.json`).createReadStream().on("error", (err) => {
|
|
192
|
+
this.logger.error(err.message);
|
|
193
|
+
reject(err);
|
|
194
|
+
}).on("data", (chunk) => {
|
|
195
|
+
fileStreamChunks.push(chunk);
|
|
196
|
+
}).on("end", () => {
|
|
197
|
+
const techdocsMetadataJson = Buffer.concat(fileStreamChunks).toString("utf-8");
|
|
198
|
+
resolve(JSON5__default.default.parse(techdocsMetadataJson));
|
|
199
|
+
});
|
|
200
|
+
});
|
|
201
|
+
}
|
|
202
|
+
/**
|
|
203
|
+
* Express route middleware to serve static files on a route in techdocs-backend.
|
|
204
|
+
*/
|
|
205
|
+
docsRouter() {
|
|
206
|
+
return (req, res) => {
|
|
207
|
+
const decodedUri = decodeURI(req.path.replace(/^\//, ""));
|
|
208
|
+
const filePathNoRoot = this.legacyPathCasing ? decodedUri : helpers.lowerCaseEntityTripletInStoragePath(decodedUri);
|
|
209
|
+
const filePath = path__default.default.posix.join(this.bucketRootPath, filePathNoRoot);
|
|
210
|
+
if (!helpers.isValidContentPath(this.bucketRootPath, filePath)) {
|
|
211
|
+
this.logger.error(
|
|
212
|
+
`Attempted to fetch TechDocs content for a file outside of the bucket root: ${filePathNoRoot}`
|
|
213
|
+
);
|
|
214
|
+
res.status(404).send("File Not Found");
|
|
215
|
+
return;
|
|
216
|
+
}
|
|
217
|
+
const fileExtension = path__default.default.extname(filePath);
|
|
218
|
+
const responseHeaders = helpers.getHeadersForFileExtension(fileExtension);
|
|
219
|
+
this.storageClient.bucket(this.bucketName).file(filePath).createReadStream().on("pipe", () => {
|
|
220
|
+
res.writeHead(200, responseHeaders);
|
|
221
|
+
}).on("error", (err) => {
|
|
222
|
+
this.logger.warn(
|
|
223
|
+
`TechDocs Google GCS router failed to serve content from bucket ${this.bucketName} at path ${filePath}: ${err.message}`
|
|
224
|
+
);
|
|
225
|
+
if (!res.headersSent) {
|
|
226
|
+
res.status(404).send("File Not Found");
|
|
227
|
+
} else {
|
|
228
|
+
res.destroy();
|
|
229
|
+
}
|
|
230
|
+
}).pipe(res);
|
|
231
|
+
};
|
|
232
|
+
}
|
|
233
|
+
/**
|
|
234
|
+
* A helper function which checks if index.html of an Entity's docs site is available. This
|
|
235
|
+
* can be used to verify if there are any pre-generated docs available to serve.
|
|
236
|
+
*/
|
|
237
|
+
async hasDocsBeenGenerated(entity) {
|
|
238
|
+
return new Promise((resolve) => {
|
|
239
|
+
const entityTriplet = `${entity.metadata.namespace}/${entity.kind}/${entity.metadata.name}`;
|
|
240
|
+
const entityDir = this.legacyPathCasing ? entityTriplet : helpers.lowerCaseEntityTriplet(entityTriplet);
|
|
241
|
+
const entityRootDir = path__default.default.posix.join(this.bucketRootPath, entityDir);
|
|
242
|
+
if (!helpers.isValidContentPath(this.bucketRootPath, entityRootDir)) {
|
|
243
|
+
this.logger.error(
|
|
244
|
+
`Invalid content path found while checking if docs have been generated: ${entityRootDir}`
|
|
245
|
+
);
|
|
246
|
+
resolve(false);
|
|
247
|
+
}
|
|
248
|
+
this.storageClient.bucket(this.bucketName).file(`${entityRootDir}/index.html`).exists().then((response) => {
|
|
249
|
+
resolve(response[0]);
|
|
250
|
+
}).catch(() => {
|
|
251
|
+
resolve(false);
|
|
252
|
+
});
|
|
253
|
+
});
|
|
254
|
+
}
|
|
255
|
+
migrateDocsCase({ removeOriginal = false, concurrency = 25 }) {
|
|
256
|
+
return new Promise((resolve, reject) => {
|
|
257
|
+
const allFileMetadata = this.storageClient.bucket(this.bucketName).getFilesStream();
|
|
258
|
+
const migrateFiles = new GoogleMigration.MigrateWriteStream(
|
|
259
|
+
this.logger,
|
|
260
|
+
removeOriginal,
|
|
261
|
+
concurrency
|
|
262
|
+
);
|
|
263
|
+
migrateFiles.on("finish", resolve).on("error", reject);
|
|
264
|
+
allFileMetadata.pipe(migrateFiles).on("error", (error) => {
|
|
265
|
+
migrateFiles.destroy();
|
|
266
|
+
reject(error);
|
|
267
|
+
});
|
|
268
|
+
});
|
|
269
|
+
}
|
|
270
|
+
getFilesForFolder(folder) {
|
|
271
|
+
const fileMetadataStream = this.storageClient.bucket(this.bucketName).getFilesStream({ prefix: folder });
|
|
272
|
+
return new Promise((resolve, reject) => {
|
|
273
|
+
const files = [];
|
|
274
|
+
fileMetadataStream.on("error", (error) => {
|
|
275
|
+
reject(error);
|
|
276
|
+
});
|
|
277
|
+
fileMetadataStream.on("data", (file) => {
|
|
278
|
+
files.push(file.name);
|
|
279
|
+
});
|
|
280
|
+
fileMetadataStream.on("end", () => {
|
|
281
|
+
resolve(files);
|
|
282
|
+
});
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
exports.GoogleGCSPublish = GoogleGCSPublish;
|
|
288
|
+
//# sourceMappingURL=googleStorage.cjs.js.map
|