@backstage/backend-defaults 0.3.0-next.2 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/CHANGELOG.md +62 -0
  2. package/auth/package.json +6 -0
  3. package/cache/package.json +1 -1
  4. package/config.d.ts +277 -0
  5. package/database/package.json +1 -1
  6. package/discovery/package.json +1 -1
  7. package/dist/auth.cjs.js +1025 -0
  8. package/dist/auth.cjs.js.map +1 -0
  9. package/dist/auth.d.ts +14 -0
  10. package/dist/cache.cjs.js.map +1 -1
  11. package/dist/cache.d.ts +31 -37
  12. package/dist/cjs/config-BDOwXIyo.cjs.js +64 -0
  13. package/dist/cjs/config-BDOwXIyo.cjs.js.map +1 -0
  14. package/dist/cjs/createConfigSecretEnumerator-DShyoWWL.cjs.js +33 -0
  15. package/dist/cjs/createConfigSecretEnumerator-DShyoWWL.cjs.js.map +1 -0
  16. package/dist/cjs/helpers-D2f1CG0o.cjs.js +53 -0
  17. package/dist/cjs/helpers-D2f1CG0o.cjs.js.map +1 -0
  18. package/dist/database.cjs.js +59 -145
  19. package/dist/database.cjs.js.map +1 -1
  20. package/dist/database.d.ts +7 -2
  21. package/dist/discovery.cjs.js +6 -6
  22. package/dist/discovery.cjs.js.map +1 -1
  23. package/dist/discovery.d.ts +9 -1
  24. package/dist/httpAuth.cjs.js +192 -0
  25. package/dist/httpAuth.cjs.js.map +1 -0
  26. package/dist/httpAuth.d.ts +15 -0
  27. package/dist/httpRouter.cjs.js +191 -0
  28. package/dist/httpRouter.cjs.js.map +1 -0
  29. package/dist/httpRouter.d.ts +55 -0
  30. package/dist/index.cjs.js +14 -8
  31. package/dist/index.cjs.js.map +1 -1
  32. package/dist/lifecycle.cjs.js.map +1 -1
  33. package/dist/lifecycle.d.ts +5 -1
  34. package/dist/logger.cjs.js +17 -0
  35. package/dist/logger.cjs.js.map +1 -0
  36. package/dist/logger.d.ts +14 -0
  37. package/dist/permissions.cjs.js.map +1 -1
  38. package/dist/permissions.d.ts +6 -0
  39. package/dist/rootConfig.cjs.js +3 -0
  40. package/dist/rootConfig.cjs.js.map +1 -1
  41. package/dist/rootConfig.d.ts +17 -2
  42. package/dist/rootHttpRouter.cjs.js +629 -0
  43. package/dist/rootHttpRouter.cjs.js.map +1 -0
  44. package/dist/rootHttpRouter.d.ts +283 -0
  45. package/dist/rootLifecycle.cjs.js.map +1 -1
  46. package/dist/rootLifecycle.d.ts +5 -1
  47. package/dist/rootLogger.cjs.js +143 -0
  48. package/dist/rootLogger.cjs.js.map +1 -0
  49. package/dist/rootLogger.d.ts +58 -0
  50. package/dist/scheduler.cjs.js +11 -40
  51. package/dist/scheduler.cjs.js.map +1 -1
  52. package/dist/scheduler.d.ts +19 -2
  53. package/dist/urlReader.cjs.js +2932 -2
  54. package/dist/urlReader.cjs.js.map +1 -1
  55. package/dist/urlReader.d.ts +422 -4
  56. package/dist/userInfo.cjs.js +70 -0
  57. package/dist/userInfo.cjs.js.map +1 -0
  58. package/dist/userInfo.d.ts +14 -0
  59. package/httpAuth/package.json +6 -0
  60. package/httpRouter/package.json +6 -0
  61. package/lifecycle/package.json +1 -1
  62. package/logger/package.json +6 -0
  63. package/migrations/auth/20240327104803_public_keys.js +50 -0
  64. package/package.json +103 -11
  65. package/permissions/package.json +1 -1
  66. package/rootConfig/package.json +1 -1
  67. package/rootHttpRouter/package.json +6 -0
  68. package/rootLifecycle/package.json +1 -1
  69. package/rootLogger/package.json +6 -0
  70. package/scheduler/package.json +1 -1
  71. package/urlReader/package.json +1 -1
  72. package/userInfo/package.json +6 -0
@@ -1,7 +1,2924 @@
1
1
  'use strict';
2
2
 
3
- var backendCommon = require('@backstage/backend-common');
3
+ var integration = require('@backstage/integration');
4
+ var fetch = require('node-fetch');
5
+ var minimatch = require('minimatch');
6
+ var stream = require('stream');
7
+ var errors = require('@backstage/errors');
8
+ var getRawBody = require('raw-body');
9
+ var parseGitUrl = require('git-url-parse');
10
+ var lodash = require('lodash');
11
+ var base64Stream = require('base64-stream');
12
+ var integrationAwsNode = require('@backstage/integration-aws-node');
13
+ var credentialProviders = require('@aws-sdk/credential-providers');
14
+ var clientS3 = require('@aws-sdk/client-s3');
15
+ var abortController = require('@aws-sdk/abort-controller');
16
+ var posix = require('path/posix');
17
+ var platformPath = require('path');
18
+ var os = require('os');
19
+ var concatStream = require('concat-stream');
20
+ var fs = require('fs-extra');
21
+ var tar = require('tar');
22
+ var util = require('util');
23
+ var archiver = require('archiver');
24
+ var yauzl = require('yauzl');
4
25
  var backendPluginApi = require('@backstage/backend-plugin-api');
26
+ var GoogleCloud = require('@google-cloud/storage');
27
+ var clientCodecommit = require('@aws-sdk/client-codecommit');
28
+
29
+ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
30
+
31
+ function _interopNamespaceCompat(e) {
32
+ if (e && typeof e === 'object' && 'default' in e) return e;
33
+ var n = Object.create(null);
34
+ if (e) {
35
+ Object.keys(e).forEach(function (k) {
36
+ if (k !== 'default') {
37
+ var d = Object.getOwnPropertyDescriptor(e, k);
38
+ Object.defineProperty(n, k, d.get ? d : {
39
+ enumerable: true,
40
+ get: function () { return e[k]; }
41
+ });
42
+ }
43
+ });
44
+ }
45
+ n.default = e;
46
+ return Object.freeze(n);
47
+ }
48
+
49
+ var fetch__default = /*#__PURE__*/_interopDefaultCompat(fetch);
50
+ var getRawBody__default = /*#__PURE__*/_interopDefaultCompat(getRawBody);
51
+ var parseGitUrl__default = /*#__PURE__*/_interopDefaultCompat(parseGitUrl);
52
+ var platformPath__default = /*#__PURE__*/_interopDefaultCompat(platformPath);
53
+ var os__default = /*#__PURE__*/_interopDefaultCompat(os);
54
+ var concatStream__default = /*#__PURE__*/_interopDefaultCompat(concatStream);
55
+ var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
56
+ var tar__default = /*#__PURE__*/_interopDefaultCompat(tar);
57
+ var archiver__default = /*#__PURE__*/_interopDefaultCompat(archiver);
58
+ var yauzl__default = /*#__PURE__*/_interopDefaultCompat(yauzl);
59
+ var GoogleCloud__namespace = /*#__PURE__*/_interopNamespaceCompat(GoogleCloud);
60
+
61
+ class ReadUrlResponseFactory {
62
+ /**
63
+ * Resolves a ReadUrlResponse from a Readable stream.
64
+ */
65
+ static async fromReadable(stream, options) {
66
+ let buffer;
67
+ const conflictError = new errors.ConflictError(
68
+ "Cannot use buffer() and stream() from the same ReadUrlResponse"
69
+ );
70
+ let hasCalledStream = false;
71
+ let hasCalledBuffer = false;
72
+ return {
73
+ buffer: () => {
74
+ hasCalledBuffer = true;
75
+ if (hasCalledStream)
76
+ throw conflictError;
77
+ if (buffer)
78
+ return buffer;
79
+ buffer = getRawBody__default.default(stream);
80
+ return buffer;
81
+ },
82
+ stream: () => {
83
+ hasCalledStream = true;
84
+ if (hasCalledBuffer)
85
+ throw conflictError;
86
+ return stream;
87
+ },
88
+ etag: options?.etag,
89
+ lastModifiedAt: options?.lastModifiedAt
90
+ };
91
+ }
92
+ /**
93
+ * Resolves a ReadUrlResponse from an old-style NodeJS.ReadableStream.
94
+ */
95
+ static async fromNodeJSReadable(oldStyleStream, options) {
96
+ const readable = stream.Readable.from(oldStyleStream);
97
+ return ReadUrlResponseFactory.fromReadable(readable, options);
98
+ }
99
+ }
100
+
101
+ class AzureUrlReader {
102
+ constructor(integration, deps) {
103
+ this.integration = integration;
104
+ this.deps = deps;
105
+ }
106
+ static factory = ({ config, treeResponseFactory }) => {
107
+ const integrations = integration.ScmIntegrations.fromConfig(config);
108
+ const credentialProvider = integration.DefaultAzureDevOpsCredentialsProvider.fromIntegrations(integrations);
109
+ return integrations.azure.list().map((integration) => {
110
+ const reader = new AzureUrlReader(integration, {
111
+ treeResponseFactory,
112
+ credentialsProvider: credentialProvider
113
+ });
114
+ const predicate = (url) => url.host === integration.config.host;
115
+ return { reader, predicate };
116
+ });
117
+ };
118
+ async read(url) {
119
+ const response = await this.readUrl(url);
120
+ return response.buffer();
121
+ }
122
+ async readUrl(url, options) {
123
+ const { signal } = options ?? {};
124
+ const builtUrl = integration.getAzureFileFetchUrl(url);
125
+ let response;
126
+ try {
127
+ const credentials = await this.deps.credentialsProvider.getCredentials({
128
+ url: builtUrl
129
+ });
130
+ response = await fetch__default.default(builtUrl, {
131
+ headers: credentials?.headers,
132
+ // TODO(freben): The signal cast is there because pre-3.x versions of
133
+ // node-fetch have a very slightly deviating AbortSignal type signature.
134
+ // The difference does not affect us in practice however. The cast can
135
+ // be removed after we support ESM for CLI dependencies and migrate to
136
+ // version 3 of node-fetch.
137
+ // https://github.com/backstage/backstage/issues/8242
138
+ ...signal && { signal }
139
+ });
140
+ } catch (e) {
141
+ throw new Error(`Unable to read ${url}, ${e}`);
142
+ }
143
+ if (response.ok && response.status !== 203) {
144
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body);
145
+ }
146
+ const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;
147
+ if (response.status === 404) {
148
+ throw new errors.NotFoundError(message);
149
+ }
150
+ throw new Error(message);
151
+ }
152
+ async readTree(url, options) {
153
+ const { etag, filter, signal } = options ?? {};
154
+ const credentials = await this.deps.credentialsProvider.getCredentials({
155
+ url
156
+ });
157
+ const commitsAzureResponse = await fetch__default.default(integration.getAzureCommitsUrl(url), {
158
+ headers: credentials?.headers
159
+ });
160
+ if (!commitsAzureResponse.ok) {
161
+ const message = `Failed to read tree from ${url}, ${commitsAzureResponse.status} ${commitsAzureResponse.statusText}`;
162
+ if (commitsAzureResponse.status === 404) {
163
+ throw new errors.NotFoundError(message);
164
+ }
165
+ throw new Error(message);
166
+ }
167
+ const commitSha = (await commitsAzureResponse.json()).value[0].commitId;
168
+ if (etag && etag === commitSha) {
169
+ throw new errors.NotModifiedError();
170
+ }
171
+ const archiveAzureResponse = await fetch__default.default(integration.getAzureDownloadUrl(url), {
172
+ headers: {
173
+ ...credentials?.headers,
174
+ Accept: "application/zip"
175
+ },
176
+ // TODO(freben): The signal cast is there because pre-3.x versions of
177
+ // node-fetch have a very slightly deviating AbortSignal type signature.
178
+ // The difference does not affect us in practice however. The cast can be
179
+ // removed after we support ESM for CLI dependencies and migrate to
180
+ // version 3 of node-fetch.
181
+ // https://github.com/backstage/backstage/issues/8242
182
+ ...signal && { signal }
183
+ });
184
+ if (!archiveAzureResponse.ok) {
185
+ const message = `Failed to read tree from ${url}, ${archiveAzureResponse.status} ${archiveAzureResponse.statusText}`;
186
+ if (archiveAzureResponse.status === 404) {
187
+ throw new errors.NotFoundError(message);
188
+ }
189
+ throw new Error(message);
190
+ }
191
+ let subpath;
192
+ const path = new URL(url).searchParams.get("path");
193
+ if (path) {
194
+ subpath = path.split("/").filter(Boolean).slice(-1)[0];
195
+ }
196
+ return await this.deps.treeResponseFactory.fromZipArchive({
197
+ stream: stream.Readable.from(archiveAzureResponse.body),
198
+ etag: commitSha,
199
+ filter,
200
+ subpath
201
+ });
202
+ }
203
+ async search(url, options) {
204
+ const treeUrl = new URL(url);
205
+ const path = treeUrl.searchParams.get("path");
206
+ const matcher = path && new minimatch.Minimatch(path.replace(/^\/+/, ""));
207
+ treeUrl.searchParams.delete("path");
208
+ const tree = await this.readTree(treeUrl.toString(), {
209
+ etag: options?.etag,
210
+ signal: options?.signal,
211
+ filter: (p) => matcher ? matcher.match(p) : true
212
+ });
213
+ const files = await tree.files();
214
+ return {
215
+ etag: tree.etag,
216
+ files: files.map((file) => ({
217
+ url: this.integration.resolveUrl({
218
+ url: `/${file.path}`,
219
+ base: url
220
+ }),
221
+ content: file.content,
222
+ lastModifiedAt: file.lastModifiedAt
223
+ }))
224
+ };
225
+ }
226
+ toString() {
227
+ const { host, credentials } = this.integration.config;
228
+ return `azure{host=${host},authed=${Boolean(
229
+ credentials !== void 0 && credentials.length > 0
230
+ )}}`;
231
+ }
232
+ }
233
+
234
+ function parseLastModified(value) {
235
+ if (!value) {
236
+ return void 0;
237
+ }
238
+ return new Date(value);
239
+ }
240
+
241
+ class BitbucketCloudUrlReader {
242
+ constructor(integration, deps) {
243
+ this.integration = integration;
244
+ this.deps = deps;
245
+ const { host, username, appPassword } = integration.config;
246
+ if (username && !appPassword) {
247
+ throw new Error(
248
+ `Bitbucket Cloud integration for '${host}' has configured a username but is missing a required appPassword.`
249
+ );
250
+ }
251
+ }
252
+ static factory = ({ config, treeResponseFactory }) => {
253
+ const integrations = integration.ScmIntegrations.fromConfig(config);
254
+ return integrations.bitbucketCloud.list().map((integration) => {
255
+ const reader = new BitbucketCloudUrlReader(integration, {
256
+ treeResponseFactory
257
+ });
258
+ const predicate = (url) => url.host === integration.config.host;
259
+ return { reader, predicate };
260
+ });
261
+ };
262
+ async read(url) {
263
+ const response = await this.readUrl(url);
264
+ return response.buffer();
265
+ }
266
+ async readUrl(url, options) {
267
+ const { etag, lastModifiedAfter, signal } = options ?? {};
268
+ const bitbucketUrl = integration.getBitbucketCloudFileFetchUrl(
269
+ url,
270
+ this.integration.config
271
+ );
272
+ const requestOptions = integration.getBitbucketCloudRequestOptions(
273
+ this.integration.config
274
+ );
275
+ let response;
276
+ try {
277
+ response = await fetch__default.default(bitbucketUrl.toString(), {
278
+ headers: {
279
+ ...requestOptions.headers,
280
+ ...etag && { "If-None-Match": etag },
281
+ ...lastModifiedAfter && {
282
+ "If-Modified-Since": lastModifiedAfter.toUTCString()
283
+ }
284
+ },
285
+ // TODO(freben): The signal cast is there because pre-3.x versions of
286
+ // node-fetch have a very slightly deviating AbortSignal type signature.
287
+ // The difference does not affect us in practice however. The cast can be
288
+ // removed after we support ESM for CLI dependencies and migrate to
289
+ // version 3 of node-fetch.
290
+ // https://github.com/backstage/backstage/issues/8242
291
+ ...signal && { signal }
292
+ });
293
+ } catch (e) {
294
+ throw new Error(`Unable to read ${url}, ${e}`);
295
+ }
296
+ if (response.status === 304) {
297
+ throw new errors.NotModifiedError();
298
+ }
299
+ if (response.ok) {
300
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
301
+ etag: response.headers.get("ETag") ?? void 0,
302
+ lastModifiedAt: parseLastModified(
303
+ response.headers.get("Last-Modified")
304
+ )
305
+ });
306
+ }
307
+ const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
308
+ if (response.status === 404) {
309
+ throw new errors.NotFoundError(message);
310
+ }
311
+ throw new Error(message);
312
+ }
313
+ async readTree(url, options) {
314
+ const { filepath } = parseGitUrl__default.default(url);
315
+ const lastCommitShortHash = await this.getLastCommitShortHash(url);
316
+ if (options?.etag && options.etag === lastCommitShortHash) {
317
+ throw new errors.NotModifiedError();
318
+ }
319
+ const downloadUrl = await integration.getBitbucketCloudDownloadUrl(
320
+ url,
321
+ this.integration.config
322
+ );
323
+ const archiveResponse = await fetch__default.default(
324
+ downloadUrl,
325
+ integration.getBitbucketCloudRequestOptions(this.integration.config)
326
+ );
327
+ if (!archiveResponse.ok) {
328
+ const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;
329
+ if (archiveResponse.status === 404) {
330
+ throw new errors.NotFoundError(message);
331
+ }
332
+ throw new Error(message);
333
+ }
334
+ return await this.deps.treeResponseFactory.fromTarArchive({
335
+ stream: stream.Readable.from(archiveResponse.body),
336
+ subpath: filepath,
337
+ etag: lastCommitShortHash,
338
+ filter: options?.filter
339
+ });
340
+ }
341
+ async search(url, options) {
342
+ const { filepath } = parseGitUrl__default.default(url);
343
+ const matcher = new minimatch.Minimatch(filepath);
344
+ const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
345
+ const tree = await this.readTree(treeUrl, {
346
+ etag: options?.etag,
347
+ filter: (path) => matcher.match(path)
348
+ });
349
+ const files = await tree.files();
350
+ return {
351
+ etag: tree.etag,
352
+ files: files.map((file) => ({
353
+ url: this.integration.resolveUrl({
354
+ url: `/${file.path}`,
355
+ base: url
356
+ }),
357
+ content: file.content,
358
+ lastModifiedAt: file.lastModifiedAt
359
+ }))
360
+ };
361
+ }
362
+ toString() {
363
+ const { host, username, appPassword } = this.integration.config;
364
+ const authed = Boolean(username && appPassword);
365
+ return `bitbucketCloud{host=${host},authed=${authed}}`;
366
+ }
367
+ async getLastCommitShortHash(url) {
368
+ const { name: repoName, owner: project, ref } = parseGitUrl__default.default(url);
369
+ let branch = ref;
370
+ if (!branch) {
371
+ branch = await integration.getBitbucketCloudDefaultBranch(
372
+ url,
373
+ this.integration.config
374
+ );
375
+ }
376
+ const commitsApiUrl = `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}`;
377
+ const commitsResponse = await fetch__default.default(
378
+ commitsApiUrl,
379
+ integration.getBitbucketCloudRequestOptions(this.integration.config)
380
+ );
381
+ if (!commitsResponse.ok) {
382
+ const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`;
383
+ if (commitsResponse.status === 404) {
384
+ throw new errors.NotFoundError(message);
385
+ }
386
+ throw new Error(message);
387
+ }
388
+ const commits = await commitsResponse.json();
389
+ if (commits && commits.values && commits.values.length > 0 && commits.values[0].hash) {
390
+ return commits.values[0].hash.substring(0, 12);
391
+ }
392
+ throw new Error(`Failed to read response from ${commitsApiUrl}`);
393
+ }
394
+ }
395
+
396
+ class BitbucketUrlReader {
397
+ constructor(integration, logger, deps) {
398
+ this.integration = integration;
399
+ this.deps = deps;
400
+ const { host, token, username, appPassword } = integration.config;
401
+ const replacement = host === "bitbucket.org" ? "bitbucketCloud" : "bitbucketServer";
402
+ logger.warn(
403
+ `[Deprecated] Please migrate from "integrations.bitbucket" to "integrations.${replacement}".`
404
+ );
405
+ if (!token && username && !appPassword) {
406
+ throw new Error(
407
+ `Bitbucket integration for '${host}' has configured a username but is missing a required appPassword.`
408
+ );
409
+ }
410
+ }
411
+ static factory = ({ config, logger, treeResponseFactory }) => {
412
+ const integrations = integration.ScmIntegrations.fromConfig(config);
413
+ return integrations.bitbucket.list().filter(
414
+ (item) => !integrations.bitbucketCloud.byHost(item.config.host) && !integrations.bitbucketServer.byHost(item.config.host)
415
+ ).map((integration) => {
416
+ const reader = new BitbucketUrlReader(integration, logger, {
417
+ treeResponseFactory
418
+ });
419
+ const predicate = (url) => url.host === integration.config.host;
420
+ return { reader, predicate };
421
+ });
422
+ };
423
+ async read(url) {
424
+ const response = await this.readUrl(url);
425
+ return response.buffer();
426
+ }
427
+ async readUrl(url, options) {
428
+ const { etag, lastModifiedAfter, signal } = options ?? {};
429
+ const bitbucketUrl = integration.getBitbucketFileFetchUrl(url, this.integration.config);
430
+ const requestOptions = integration.getBitbucketRequestOptions(this.integration.config);
431
+ let response;
432
+ try {
433
+ response = await fetch__default.default(bitbucketUrl.toString(), {
434
+ headers: {
435
+ ...requestOptions.headers,
436
+ ...etag && { "If-None-Match": etag },
437
+ ...lastModifiedAfter && {
438
+ "If-Modified-Since": lastModifiedAfter.toUTCString()
439
+ }
440
+ },
441
+ // TODO(freben): The signal cast is there because pre-3.x versions of
442
+ // node-fetch have a very slightly deviating AbortSignal type signature.
443
+ // The difference does not affect us in practice however. The cast can be
444
+ // removed after we support ESM for CLI dependencies and migrate to
445
+ // version 3 of node-fetch.
446
+ // https://github.com/backstage/backstage/issues/8242
447
+ ...signal && { signal }
448
+ });
449
+ } catch (e) {
450
+ throw new Error(`Unable to read ${url}, ${e}`);
451
+ }
452
+ if (response.status === 304) {
453
+ throw new errors.NotModifiedError();
454
+ }
455
+ if (response.ok) {
456
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
457
+ etag: response.headers.get("ETag") ?? void 0,
458
+ lastModifiedAt: parseLastModified(
459
+ response.headers.get("Last-Modified")
460
+ )
461
+ });
462
+ }
463
+ const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
464
+ if (response.status === 404) {
465
+ throw new errors.NotFoundError(message);
466
+ }
467
+ throw new Error(message);
468
+ }
469
+ async readTree(url, options) {
470
+ const { filepath } = parseGitUrl__default.default(url);
471
+ const lastCommitShortHash = await this.getLastCommitShortHash(url);
472
+ if (options?.etag && options.etag === lastCommitShortHash) {
473
+ throw new errors.NotModifiedError();
474
+ }
475
+ const downloadUrl = await integration.getBitbucketDownloadUrl(
476
+ url,
477
+ this.integration.config
478
+ );
479
+ const archiveBitbucketResponse = await fetch__default.default(
480
+ downloadUrl,
481
+ integration.getBitbucketRequestOptions(this.integration.config)
482
+ );
483
+ if (!archiveBitbucketResponse.ok) {
484
+ const message = `Failed to read tree from ${url}, ${archiveBitbucketResponse.status} ${archiveBitbucketResponse.statusText}`;
485
+ if (archiveBitbucketResponse.status === 404) {
486
+ throw new errors.NotFoundError(message);
487
+ }
488
+ throw new Error(message);
489
+ }
490
+ return await this.deps.treeResponseFactory.fromTarArchive({
491
+ stream: stream.Readable.from(archiveBitbucketResponse.body),
492
+ subpath: filepath,
493
+ etag: lastCommitShortHash,
494
+ filter: options?.filter
495
+ });
496
+ }
497
+ async search(url, options) {
498
+ const { filepath } = parseGitUrl__default.default(url);
499
+ const matcher = new minimatch.Minimatch(filepath);
500
+ const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
501
+ const tree = await this.readTree(treeUrl, {
502
+ etag: options?.etag,
503
+ filter: (path) => matcher.match(path)
504
+ });
505
+ const files = await tree.files();
506
+ return {
507
+ etag: tree.etag,
508
+ files: files.map((file) => ({
509
+ url: this.integration.resolveUrl({
510
+ url: `/${file.path}`,
511
+ base: url
512
+ }),
513
+ content: file.content,
514
+ lastModifiedAt: file.lastModifiedAt
515
+ }))
516
+ };
517
+ }
518
+ toString() {
519
+ const { host, token, username, appPassword } = this.integration.config;
520
+ let authed = Boolean(token);
521
+ if (!authed) {
522
+ authed = Boolean(username && appPassword);
523
+ }
524
+ return `bitbucket{host=${host},authed=${authed}}`;
525
+ }
526
+ async getLastCommitShortHash(url) {
527
+ const { resource, name: repoName, owner: project, ref } = parseGitUrl__default.default(url);
528
+ let branch = ref;
529
+ if (!branch) {
530
+ branch = await integration.getBitbucketDefaultBranch(url, this.integration.config);
531
+ }
532
+ const isHosted = resource === "bitbucket.org";
533
+ const commitsApiUrl = isHosted ? `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}` : `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/commits`;
534
+ const commitsResponse = await fetch__default.default(
535
+ commitsApiUrl,
536
+ integration.getBitbucketRequestOptions(this.integration.config)
537
+ );
538
+ if (!commitsResponse.ok) {
539
+ const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`;
540
+ if (commitsResponse.status === 404) {
541
+ throw new errors.NotFoundError(message);
542
+ }
543
+ throw new Error(message);
544
+ }
545
+ const commits = await commitsResponse.json();
546
+ if (isHosted) {
547
+ if (commits && commits.values && commits.values.length > 0 && commits.values[0].hash) {
548
+ return commits.values[0].hash.substring(0, 12);
549
+ }
550
+ } else {
551
+ if (commits && commits.values && commits.values.length > 0 && commits.values[0].id) {
552
+ return commits.values[0].id.substring(0, 12);
553
+ }
554
+ }
555
+ throw new Error(`Failed to read response from ${commitsApiUrl}`);
556
+ }
557
+ }
558
+
559
+ class BitbucketServerUrlReader {
560
+ constructor(integration, deps) {
561
+ this.integration = integration;
562
+ this.deps = deps;
563
+ }
564
+ static factory = ({ config, treeResponseFactory }) => {
565
+ const integrations = integration.ScmIntegrations.fromConfig(config);
566
+ return integrations.bitbucketServer.list().map((integration) => {
567
+ const reader = new BitbucketServerUrlReader(integration, {
568
+ treeResponseFactory
569
+ });
570
+ const predicate = (url) => url.host === integration.config.host;
571
+ return { reader, predicate };
572
+ });
573
+ };
574
+ async read(url) {
575
+ const response = await this.readUrl(url);
576
+ return response.buffer();
577
+ }
578
+ async readUrl(url, options) {
579
+ const { etag, lastModifiedAfter, signal } = options ?? {};
580
+ const bitbucketUrl = integration.getBitbucketServerFileFetchUrl(
581
+ url,
582
+ this.integration.config
583
+ );
584
+ const requestOptions = integration.getBitbucketServerRequestOptions(
585
+ this.integration.config
586
+ );
587
+ let response;
588
+ try {
589
+ response = await fetch__default.default(bitbucketUrl.toString(), {
590
+ headers: {
591
+ ...requestOptions.headers,
592
+ ...etag && { "If-None-Match": etag },
593
+ ...lastModifiedAfter && {
594
+ "If-Modified-Since": lastModifiedAfter.toUTCString()
595
+ }
596
+ },
597
+ // TODO(freben): The signal cast is there because pre-3.x versions of
598
+ // node-fetch have a very slightly deviating AbortSignal type signature.
599
+ // The difference does not affect us in practice however. The cast can be
600
+ // removed after we support ESM for CLI dependencies and migrate to
601
+ // version 3 of node-fetch.
602
+ // https://github.com/backstage/backstage/issues/8242
603
+ ...signal && { signal }
604
+ });
605
+ } catch (e) {
606
+ throw new Error(`Unable to read ${url}, ${e}`);
607
+ }
608
+ if (response.status === 304) {
609
+ throw new errors.NotModifiedError();
610
+ }
611
+ if (response.ok) {
612
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
613
+ etag: response.headers.get("ETag") ?? void 0,
614
+ lastModifiedAt: parseLastModified(
615
+ response.headers.get("Last-Modified")
616
+ )
617
+ });
618
+ }
619
+ const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
620
+ if (response.status === 404) {
621
+ throw new errors.NotFoundError(message);
622
+ }
623
+ throw new Error(message);
624
+ }
625
+ async readTree(url, options) {
626
+ const { filepath } = parseGitUrl__default.default(url);
627
+ const lastCommitShortHash = await this.getLastCommitShortHash(url);
628
+ if (options?.etag && options.etag === lastCommitShortHash) {
629
+ throw new errors.NotModifiedError();
630
+ }
631
+ const downloadUrl = await integration.getBitbucketServerDownloadUrl(
632
+ url,
633
+ this.integration.config
634
+ );
635
+ const archiveResponse = await fetch__default.default(
636
+ downloadUrl,
637
+ integration.getBitbucketServerRequestOptions(this.integration.config)
638
+ );
639
+ if (!archiveResponse.ok) {
640
+ const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;
641
+ if (archiveResponse.status === 404) {
642
+ throw new errors.NotFoundError(message);
643
+ }
644
+ throw new Error(message);
645
+ }
646
+ return await this.deps.treeResponseFactory.fromTarArchive({
647
+ stream: stream.Readable.from(archiveResponse.body),
648
+ subpath: filepath,
649
+ etag: lastCommitShortHash,
650
+ filter: options?.filter
651
+ });
652
+ }
653
+ async search(url, options) {
654
+ const { filepath } = parseGitUrl__default.default(url);
655
+ const matcher = new minimatch.Minimatch(filepath);
656
+ const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
657
+ const tree = await this.readTree(treeUrl, {
658
+ etag: options?.etag,
659
+ filter: (path) => matcher.match(path)
660
+ });
661
+ const files = await tree.files();
662
+ return {
663
+ etag: tree.etag,
664
+ files: files.map((file) => ({
665
+ url: this.integration.resolveUrl({
666
+ url: `/${file.path}`,
667
+ base: url
668
+ }),
669
+ content: file.content,
670
+ lastModifiedAt: file.lastModifiedAt
671
+ }))
672
+ };
673
+ }
674
+ toString() {
675
+ const { host, token } = this.integration.config;
676
+ const authed = Boolean(token);
677
+ return `bitbucketServer{host=${host},authed=${authed}}`;
678
+ }
679
+ async getLastCommitShortHash(url) {
680
+ const { name: repoName, owner: project, ref: branch } = parseGitUrl__default.default(url);
681
+ const branchParameter = branch ? `?filterText=${encodeURIComponent(branch)}` : "/default";
682
+ const branchListUrl = `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/branches${branchParameter}`;
683
+ const branchListResponse = await fetch__default.default(
684
+ branchListUrl,
685
+ integration.getBitbucketServerRequestOptions(this.integration.config)
686
+ );
687
+ if (!branchListResponse.ok) {
688
+ const message = `Failed to retrieve branch list from ${branchListUrl}, ${branchListResponse.status} ${branchListResponse.statusText}`;
689
+ if (branchListResponse.status === 404) {
690
+ throw new errors.NotFoundError(message);
691
+ }
692
+ throw new Error(message);
693
+ }
694
+ const branchMatches = await branchListResponse.json();
695
+ if (branchMatches && branchMatches.size > 0) {
696
+ const exactBranchMatch = branchMatches.values.filter(
697
+ (branchDetails) => branchDetails.displayId === branch
698
+ )[0];
699
+ return exactBranchMatch.latestCommit.substring(0, 12);
700
+ }
701
+ if (!branch && branchMatches) {
702
+ return branchMatches.latestCommit.substring(0, 12);
703
+ }
704
+ throw new Error(
705
+ `Failed to find Last Commit using ${branch ? `branch "${branch}"` : "default branch"} in response from ${branchListUrl}`
706
+ );
707
+ }
708
+ }
709
+
710
+ class GerritUrlReader {
711
+ constructor(integration, deps) {
712
+ this.integration = integration;
713
+ this.deps = deps;
714
+ }
715
+ static factory = ({ config, treeResponseFactory }) => {
716
+ const integrations = integration.ScmIntegrations.fromConfig(config);
717
+ if (!integrations.gerrit) {
718
+ return [];
719
+ }
720
+ return integrations.gerrit.list().map((integration) => {
721
+ const reader = new GerritUrlReader(integration, { treeResponseFactory });
722
+ const predicate = (url) => {
723
+ const gitilesUrl = new URL(integration.config.gitilesBaseUrl);
724
+ return url.host === gitilesUrl.host;
725
+ };
726
+ return { reader, predicate };
727
+ });
728
+ };
729
+ async read(url) {
730
+ const response = await this.readUrl(url);
731
+ return response.buffer();
732
+ }
733
+ async readUrl(url, options) {
734
+ const apiUrl = integration.getGerritFileContentsApiUrl(this.integration.config, url);
735
+ let response;
736
+ try {
737
+ response = await fetch__default.default(apiUrl, {
738
+ method: "GET",
739
+ ...integration.getGerritRequestOptions(this.integration.config),
740
+ // TODO(freben): The signal cast is there because pre-3.x versions of
741
+ // node-fetch have a very slightly deviating AbortSignal type signature.
742
+ // The difference does not affect us in practice however. The cast can
743
+ // be removed after we support ESM for CLI dependencies and migrate to
744
+ // version 3 of node-fetch.
745
+ // https://github.com/backstage/backstage/issues/8242
746
+ signal: options?.signal
747
+ });
748
+ } catch (e) {
749
+ throw new Error(`Unable to read gerrit file ${url}, ${e}`);
750
+ }
751
+ if (response.ok) {
752
+ let responseBody;
753
+ return {
754
+ buffer: async () => {
755
+ if (responseBody === void 0) {
756
+ responseBody = await response.text();
757
+ }
758
+ return Buffer.from(responseBody, "base64");
759
+ },
760
+ stream: () => {
761
+ const readable = stream.Readable.from(response.body);
762
+ return readable.pipe(new base64Stream.Base64Decode());
763
+ }
764
+ };
765
+ }
766
+ if (response.status === 404) {
767
+ throw new errors.NotFoundError(`File ${url} not found.`);
768
+ }
769
+ throw new Error(
770
+ `${url} could not be read as ${apiUrl}, ${response.status} ${response.statusText}`
771
+ );
772
+ }
773
+ async readTree(url, options) {
774
+ const apiUrl = integration.getGerritBranchApiUrl(this.integration.config, url);
775
+ let response;
776
+ try {
777
+ response = await fetch__default.default(apiUrl, {
778
+ method: "GET",
779
+ ...integration.getGerritRequestOptions(this.integration.config)
780
+ });
781
+ } catch (e) {
782
+ throw new Error(`Unable to read branch state ${url}, ${e}`);
783
+ }
784
+ if (response.status === 404) {
785
+ throw new errors.NotFoundError(`Not found: ${url}`);
786
+ }
787
+ if (!response.ok) {
788
+ throw new Error(
789
+ `${url} could not be read as ${apiUrl}, ${response.status} ${response.statusText}`
790
+ );
791
+ }
792
+ const branchInfo = await integration.parseGerritJsonResponse(response);
793
+ if (options?.etag === branchInfo.revision) {
794
+ throw new errors.NotModifiedError();
795
+ }
796
+ return this.readTreeFromGitiles(url, branchInfo.revision, options);
797
+ }
798
+ async search() {
799
+ throw new Error("GerritReader does not implement search");
800
+ }
801
+ toString() {
802
+ const { host, password } = this.integration.config;
803
+ return `gerrit{host=${host},authed=${Boolean(password)}}`;
804
+ }
805
+ async readTreeFromGitiles(url, revision, options) {
806
+ const { branch, filePath, project } = integration.parseGerritGitilesUrl(
807
+ this.integration.config,
808
+ url
809
+ );
810
+ const archiveUrl = integration.buildGerritGitilesArchiveUrl(
811
+ this.integration.config,
812
+ project,
813
+ branch,
814
+ filePath
815
+ );
816
+ const archiveResponse = await fetch__default.default(archiveUrl, {
817
+ ...integration.getGerritRequestOptions(this.integration.config),
818
+ // TODO(freben): The signal cast is there because pre-3.x versions of
819
+ // node-fetch have a very slightly deviating AbortSignal type signature.
820
+ // The difference does not affect us in practice however. The cast can
821
+ // be removed after we support ESM for CLI dependencies and migrate to
822
+ // version 3 of node-fetch.
823
+ // https://github.com/backstage/backstage/issues/8242
824
+ signal: options?.signal
825
+ });
826
+ if (archiveResponse.status === 404) {
827
+ throw new errors.NotFoundError(`Not found: ${archiveUrl}`);
828
+ }
829
+ if (!archiveResponse.ok) {
830
+ throw new Error(
831
+ `${url} could not be read as ${archiveUrl}, ${archiveResponse.status} ${archiveResponse.statusText}`
832
+ );
833
+ }
834
+ return await this.deps.treeResponseFactory.fromTarArchive({
835
+ stream: archiveResponse.body,
836
+ etag: revision,
837
+ filter: options?.filter,
838
+ stripFirstDirectory: false
839
+ });
840
+ }
841
+ }
842
+
843
+ class GithubUrlReader {
844
+ constructor(integration, deps) {
845
+ this.integration = integration;
846
+ this.deps = deps;
847
+ if (!integration.config.apiBaseUrl && !integration.config.rawBaseUrl) {
848
+ throw new Error(
849
+ `GitHub integration '${integration.title}' must configure an explicit apiBaseUrl or rawBaseUrl`
850
+ );
851
+ }
852
+ }
853
+ static factory = ({ config, treeResponseFactory }) => {
854
+ const integrations = integration.ScmIntegrations.fromConfig(config);
855
+ const credentialsProvider = integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations);
856
+ return integrations.github.list().map((integration) => {
857
+ const reader = new GithubUrlReader(integration, {
858
+ treeResponseFactory,
859
+ credentialsProvider
860
+ });
861
+ const predicate = (url) => url.host === integration.config.host;
862
+ return { reader, predicate };
863
+ });
864
+ };
865
+ async read(url) {
866
+ const response = await this.readUrl(url);
867
+ return response.buffer();
868
+ }
869
+ getCredentials = async (url, options) => {
870
+ if (options?.token) {
871
+ return {
872
+ headers: {
873
+ Authorization: `Bearer ${options.token}`
874
+ },
875
+ type: "token",
876
+ token: options.token
877
+ };
878
+ }
879
+ return await this.deps.credentialsProvider.getCredentials({
880
+ url
881
+ });
882
+ };
883
+ async readUrl(url, options) {
884
+ const credentials = await this.getCredentials(url, options);
885
+ const ghUrl = integration.getGithubFileFetchUrl(
886
+ url,
887
+ this.integration.config,
888
+ credentials
889
+ );
890
+ const response = await this.fetchResponse(ghUrl, {
891
+ headers: {
892
+ ...credentials?.headers,
893
+ ...options?.etag && { "If-None-Match": options.etag },
894
+ ...options?.lastModifiedAfter && {
895
+ "If-Modified-Since": options.lastModifiedAfter.toUTCString()
896
+ },
897
+ Accept: "application/vnd.github.v3.raw"
898
+ },
899
+ // TODO(freben): The signal cast is there because pre-3.x versions of
900
+ // node-fetch have a very slightly deviating AbortSignal type signature.
901
+ // The difference does not affect us in practice however. The cast can
902
+ // be removed after we support ESM for CLI dependencies and migrate to
903
+ // version 3 of node-fetch.
904
+ // https://github.com/backstage/backstage/issues/8242
905
+ signal: options?.signal
906
+ });
907
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
908
+ etag: response.headers.get("ETag") ?? void 0,
909
+ lastModifiedAt: parseLastModified(response.headers.get("Last-Modified"))
910
+ });
911
+ }
912
+ async readTree(url, options) {
913
+ const repoDetails = await this.getRepoDetails(url);
914
+ const commitSha = repoDetails.commitSha;
915
+ if (options?.etag && options.etag === commitSha) {
916
+ throw new errors.NotModifiedError();
917
+ }
918
+ const { filepath } = parseGitUrl__default.default(url);
919
+ const { headers } = await this.getCredentials(url, options);
920
+ return this.doReadTree(
921
+ repoDetails.repo.archive_url,
922
+ commitSha,
923
+ filepath,
924
+ // TODO(freben): The signal cast is there because pre-3.x versions of
925
+ // node-fetch have a very slightly deviating AbortSignal type signature.
926
+ // The difference does not affect us in practice however. The cast can be
927
+ // removed after we support ESM for CLI dependencies and migrate to
928
+ // version 3 of node-fetch.
929
+ // https://github.com/backstage/backstage/issues/8242
930
+ { headers, signal: options?.signal },
931
+ options
932
+ );
933
+ }
934
+ async search(url, options) {
935
+ const repoDetails = await this.getRepoDetails(url);
936
+ const commitSha = repoDetails.commitSha;
937
+ if (options?.etag && options.etag === commitSha) {
938
+ throw new errors.NotModifiedError();
939
+ }
940
+ const { filepath } = parseGitUrl__default.default(url);
941
+ const { headers } = await this.getCredentials(url, options);
942
+ const files = await this.doSearch(
943
+ url,
944
+ repoDetails.repo.trees_url,
945
+ repoDetails.repo.archive_url,
946
+ commitSha,
947
+ filepath,
948
+ { headers, signal: options?.signal }
949
+ );
950
+ return { files, etag: commitSha };
951
+ }
952
+ toString() {
953
+ const { host, token } = this.integration.config;
954
+ return `github{host=${host},authed=${Boolean(token)}}`;
955
+ }
956
+ async doReadTree(archiveUrl, sha, subpath, init, options) {
957
+ const archive = await this.fetchResponse(
958
+ archiveUrl.replace("{archive_format}", "tarball").replace("{/ref}", `/${sha}`),
959
+ init
960
+ );
961
+ return await this.deps.treeResponseFactory.fromTarArchive({
962
+ // TODO(Rugvip): Underlying implementation of fetch will be node-fetch, we probably want
963
+ // to stick to using that in exclusively backend code.
964
+ stream: stream.Readable.from(archive.body),
965
+ subpath,
966
+ etag: sha,
967
+ filter: options?.filter
968
+ });
969
+ }
970
+ async doSearch(url, treesUrl, archiveUrl, sha, query, init) {
971
+ function pathToUrl(path) {
972
+ const updated = new URL(url);
973
+ const base = updated.pathname.split("/").slice(1, 5).join("/");
974
+ updated.pathname = `${base}/${path}`;
975
+ return updated.toString();
976
+ }
977
+ const matcher = new minimatch.Minimatch(query.replace(/^\/+/, ""));
978
+ const recursiveTree = await this.fetchJson(
979
+ treesUrl.replace("{/sha}", `/${sha}?recursive=true`),
980
+ init
981
+ );
982
+ if (!recursiveTree.truncated) {
983
+ const matching = recursiveTree.tree.filter(
984
+ (item) => item.type === "blob" && item.path && item.url && matcher.match(item.path)
985
+ );
986
+ return matching.map((item) => ({
987
+ url: pathToUrl(item.path),
988
+ content: async () => {
989
+ const blob = await this.fetchJson(item.url, init);
990
+ return Buffer.from(blob.content, "base64");
991
+ }
992
+ }));
993
+ }
994
+ const tree = await this.doReadTree(archiveUrl, sha, "", init, {
995
+ filter: (path) => matcher.match(path)
996
+ });
997
+ const files = await tree.files();
998
+ return files.map((file) => ({
999
+ url: pathToUrl(file.path),
1000
+ content: file.content,
1001
+ lastModifiedAt: file.lastModifiedAt
1002
+ }));
1003
+ }
1004
+ async getRepoDetails(url) {
1005
+ const parsed = parseGitUrl__default.default(url);
1006
+ const { ref, full_name } = parsed;
1007
+ const credentials = await this.deps.credentialsProvider.getCredentials({
1008
+ url
1009
+ });
1010
+ const { headers } = credentials;
1011
+ const commitStatus = await this.fetchJson(
1012
+ `${this.integration.config.apiBaseUrl}/repos/${full_name}/commits/${ref || await this.getDefaultBranch(full_name, credentials)}/status?per_page=0`,
1013
+ { headers }
1014
+ );
1015
+ return {
1016
+ commitSha: commitStatus.sha,
1017
+ repo: commitStatus.repository
1018
+ };
1019
+ }
1020
+ async getDefaultBranch(repoFullName, credentials) {
1021
+ const repo = await this.fetchJson(
1022
+ `${this.integration.config.apiBaseUrl}/repos/${repoFullName}`,
1023
+ { headers: credentials.headers }
1024
+ );
1025
+ return repo.default_branch;
1026
+ }
1027
+ async fetchResponse(url, init) {
1028
+ const urlAsString = url.toString();
1029
+ const response = await fetch__default.default(urlAsString, init);
1030
+ if (!response.ok) {
1031
+ let message = `Request failed for ${urlAsString}, ${response.status} ${response.statusText}`;
1032
+ if (response.status === 304) {
1033
+ throw new errors.NotModifiedError();
1034
+ }
1035
+ if (response.status === 404) {
1036
+ throw new errors.NotFoundError(message);
1037
+ }
1038
+ if (this.integration.parseRateLimitInfo(response).isRateLimited) {
1039
+ message += " (rate limit exceeded)";
1040
+ }
1041
+ throw new Error(message);
1042
+ }
1043
+ return response;
1044
+ }
1045
+ async fetchJson(url, init) {
1046
+ const response = await this.fetchResponse(url, init);
1047
+ return await response.json();
1048
+ }
1049
+ }
1050
+
1051
+ class GitlabUrlReader {
1052
+ constructor(integration, deps) {
1053
+ this.integration = integration;
1054
+ this.deps = deps;
1055
+ }
1056
+ static factory = ({ config, treeResponseFactory }) => {
1057
+ const integrations = integration.ScmIntegrations.fromConfig(config);
1058
+ return integrations.gitlab.list().map((integration) => {
1059
+ const reader = new GitlabUrlReader(integration, {
1060
+ treeResponseFactory
1061
+ });
1062
+ const predicate = (url) => url.host === integration.config.host;
1063
+ return { reader, predicate };
1064
+ });
1065
+ };
1066
+ async read(url) {
1067
+ const response = await this.readUrl(url);
1068
+ return response.buffer();
1069
+ }
1070
+ async readUrl(url, options) {
1071
+ const { etag, lastModifiedAfter, signal } = options ?? {};
1072
+ const builtUrl = await this.getGitlabFetchUrl(url);
1073
+ let response;
1074
+ try {
1075
+ response = await fetch__default.default(builtUrl, {
1076
+ headers: {
1077
+ ...integration.getGitLabRequestOptions(this.integration.config).headers,
1078
+ ...etag && { "If-None-Match": etag },
1079
+ ...lastModifiedAfter && {
1080
+ "If-Modified-Since": lastModifiedAfter.toUTCString()
1081
+ }
1082
+ },
1083
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1084
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1085
+ // The difference does not affect us in practice however. The cast can be
1086
+ // removed after we support ESM for CLI dependencies and migrate to
1087
+ // version 3 of node-fetch.
1088
+ // https://github.com/backstage/backstage/issues/8242
1089
+ ...signal && { signal }
1090
+ });
1091
+ } catch (e) {
1092
+ throw new Error(`Unable to read ${url}, ${e}`);
1093
+ }
1094
+ if (response.status === 304) {
1095
+ throw new errors.NotModifiedError();
1096
+ }
1097
+ if (response.ok) {
1098
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
1099
+ etag: response.headers.get("ETag") ?? void 0,
1100
+ lastModifiedAt: parseLastModified(
1101
+ response.headers.get("Last-Modified")
1102
+ )
1103
+ });
1104
+ }
1105
+ const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;
1106
+ if (response.status === 404) {
1107
+ throw new errors.NotFoundError(message);
1108
+ }
1109
+ throw new Error(message);
1110
+ }
1111
+ async readTree(url, options) {
1112
+ const { etag, signal } = options ?? {};
1113
+ const { ref, full_name, filepath } = parseGitUrl__default.default(url);
1114
+ let repoFullName = full_name;
1115
+ const relativePath = integration.getGitLabIntegrationRelativePath(
1116
+ this.integration.config
1117
+ );
1118
+ if (relativePath) {
1119
+ const rectifiedRelativePath = `${lodash.trimStart(relativePath, "/")}/`;
1120
+ repoFullName = full_name.replace(rectifiedRelativePath, "");
1121
+ }
1122
+ const projectGitlabResponse = await fetch__default.default(
1123
+ new URL(
1124
+ `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
1125
+ repoFullName
1126
+ )}`
1127
+ ).toString(),
1128
+ integration.getGitLabRequestOptions(this.integration.config)
1129
+ );
1130
+ if (!projectGitlabResponse.ok) {
1131
+ const msg = `Failed to read tree from ${url}, ${projectGitlabResponse.status} ${projectGitlabResponse.statusText}`;
1132
+ if (projectGitlabResponse.status === 404) {
1133
+ throw new errors.NotFoundError(msg);
1134
+ }
1135
+ throw new Error(msg);
1136
+ }
1137
+ const projectGitlabResponseJson = await projectGitlabResponse.json();
1138
+ const branch = ref || projectGitlabResponseJson.default_branch;
1139
+ const commitsReqParams = new URLSearchParams();
1140
+ commitsReqParams.set("ref_name", branch);
1141
+ if (!!filepath) {
1142
+ commitsReqParams.set("path", filepath);
1143
+ }
1144
+ const commitsGitlabResponse = await fetch__default.default(
1145
+ new URL(
1146
+ `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
1147
+ repoFullName
1148
+ )}/repository/commits?${commitsReqParams.toString()}`
1149
+ ).toString(),
1150
+ {
1151
+ ...integration.getGitLabRequestOptions(this.integration.config),
1152
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1153
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1154
+ // The difference does not affect us in practice however. The cast can
1155
+ // be removed after we support ESM for CLI dependencies and migrate to
1156
+ // version 3 of node-fetch.
1157
+ // https://github.com/backstage/backstage/issues/8242
1158
+ ...signal && { signal }
1159
+ }
1160
+ );
1161
+ if (!commitsGitlabResponse.ok) {
1162
+ const message = `Failed to read tree (branch) from ${url}, ${commitsGitlabResponse.status} ${commitsGitlabResponse.statusText}`;
1163
+ if (commitsGitlabResponse.status === 404) {
1164
+ throw new errors.NotFoundError(message);
1165
+ }
1166
+ throw new Error(message);
1167
+ }
1168
+ const commitSha = (await commitsGitlabResponse.json())[0]?.id ?? "";
1169
+ if (etag && etag === commitSha) {
1170
+ throw new errors.NotModifiedError();
1171
+ }
1172
+ const archiveReqParams = new URLSearchParams();
1173
+ archiveReqParams.set("sha", branch);
1174
+ if (!!filepath) {
1175
+ archiveReqParams.set("path", filepath);
1176
+ }
1177
+ const archiveGitLabResponse = await fetch__default.default(
1178
+ `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
1179
+ repoFullName
1180
+ )}/repository/archive?${archiveReqParams.toString()}`,
1181
+ {
1182
+ ...integration.getGitLabRequestOptions(this.integration.config),
1183
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1184
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1185
+ // The difference does not affect us in practice however. The cast can
1186
+ // be removed after we support ESM for CLI dependencies and migrate to
1187
+ // version 3 of node-fetch.
1188
+ // https://github.com/backstage/backstage/issues/8242
1189
+ ...signal && { signal }
1190
+ }
1191
+ );
1192
+ if (!archiveGitLabResponse.ok) {
1193
+ const message = `Failed to read tree (archive) from ${url}, ${archiveGitLabResponse.status} ${archiveGitLabResponse.statusText}`;
1194
+ if (archiveGitLabResponse.status === 404) {
1195
+ throw new errors.NotFoundError(message);
1196
+ }
1197
+ throw new Error(message);
1198
+ }
1199
+ return await this.deps.treeResponseFactory.fromTarArchive({
1200
+ stream: stream.Readable.from(archiveGitLabResponse.body),
1201
+ subpath: filepath,
1202
+ etag: commitSha,
1203
+ filter: options?.filter
1204
+ });
1205
+ }
1206
+ async search(url, options) {
1207
+ const { filepath } = parseGitUrl__default.default(url);
1208
+ const staticPart = this.getStaticPart(filepath);
1209
+ const matcher = new minimatch.Minimatch(filepath);
1210
+ const treeUrl = lodash.trimEnd(url.replace(filepath, staticPart), `/`);
1211
+ const pathPrefix = staticPart ? `${staticPart}/` : "";
1212
+ const tree = await this.readTree(treeUrl, {
1213
+ etag: options?.etag,
1214
+ signal: options?.signal,
1215
+ filter: (path) => matcher.match(`${pathPrefix}${path}`)
1216
+ });
1217
+ const files = await tree.files();
1218
+ return {
1219
+ etag: tree.etag,
1220
+ files: files.map((file) => ({
1221
+ url: this.integration.resolveUrl({
1222
+ url: `/${pathPrefix}${file.path}`,
1223
+ base: url
1224
+ }),
1225
+ content: file.content,
1226
+ lastModifiedAt: file.lastModifiedAt
1227
+ }))
1228
+ };
1229
+ }
1230
+ /**
1231
+ * This function splits the input globPattern string into segments using the path separator /. It then iterates over
1232
+ * the segments from the end of the array towards the beginning, checking if the concatenated string up to that
1233
+ * segment matches the original globPattern using the minimatch function. If a match is found, it continues iterating.
1234
+ * If no match is found, it returns the concatenated string up to the current segment, which is the static part of the
1235
+ * glob pattern.
1236
+ *
1237
+ * E.g. `catalog/foo/*.yaml` will return `catalog/foo`.
1238
+ *
1239
+ * @param globPattern the glob pattern
1240
+ * @private
1241
+ */
1242
+ getStaticPart(globPattern) {
1243
+ const segments = globPattern.split("/");
1244
+ let i = segments.length;
1245
+ while (i > 0 && new minimatch.Minimatch(segments.slice(0, i).join("/")).match(globPattern)) {
1246
+ i--;
1247
+ }
1248
+ return segments.slice(0, i).join("/");
1249
+ }
1250
+ toString() {
1251
+ const { host, token } = this.integration.config;
1252
+ return `gitlab{host=${host},authed=${Boolean(token)}}`;
1253
+ }
1254
+ async getGitlabFetchUrl(target) {
1255
+ const targetUrl = new URL(target);
1256
+ if (targetUrl.pathname.includes("/-/jobs/artifacts/")) {
1257
+ return this.getGitlabArtifactFetchUrl(targetUrl).then(
1258
+ (value) => value.toString()
1259
+ );
1260
+ }
1261
+ return integration.getGitLabFileFetchUrl(target, this.integration.config);
1262
+ }
1263
+ // convert urls of the form:
1264
+ // https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/raw/<path_to_file>?job=<job_name>
1265
+ // to urls of the form:
1266
+ // https://example.com/api/v4/projects/:id/jobs/artifacts/:ref_name/raw/*artifact_path?job=<job_name>
1267
+ async getGitlabArtifactFetchUrl(target) {
1268
+ if (!target.pathname.includes("/-/jobs/artifacts/")) {
1269
+ throw new Error("Unable to process url as an GitLab artifact");
1270
+ }
1271
+ try {
1272
+ const [namespaceAndProject, ref] = target.pathname.split("/-/jobs/artifacts/");
1273
+ const projectPath = new URL(target);
1274
+ projectPath.pathname = namespaceAndProject;
1275
+ const projectId = await this.resolveProjectToId(projectPath);
1276
+ const relativePath = integration.getGitLabIntegrationRelativePath(
1277
+ this.integration.config
1278
+ );
1279
+ const newUrl = new URL(target);
1280
+ newUrl.pathname = `${relativePath}/api/v4/projects/${projectId}/jobs/artifacts/${ref}`;
1281
+ return newUrl;
1282
+ } catch (e) {
1283
+ throw new Error(
1284
+ `Unable to translate GitLab artifact URL: ${target}, ${e}`
1285
+ );
1286
+ }
1287
+ }
1288
+ async resolveProjectToId(pathToProject) {
1289
+ let project = pathToProject.pathname;
1290
+ const relativePath = integration.getGitLabIntegrationRelativePath(
1291
+ this.integration.config
1292
+ );
1293
+ if (relativePath) {
1294
+ project = project.replace(relativePath, "");
1295
+ }
1296
+ project = project.replace(/^\//, "");
1297
+ const result = await fetch__default.default(
1298
+ `${pathToProject.origin}${relativePath}/api/v4/projects/${encodeURIComponent(project)}`,
1299
+ integration.getGitLabRequestOptions(this.integration.config)
1300
+ );
1301
+ const data = await result.json();
1302
+ if (!result.ok) {
1303
+ throw new Error(`Gitlab error: ${data.error}, ${data.error_description}`);
1304
+ }
1305
+ return Number(data.id);
1306
+ }
1307
+ }
1308
+
1309
+ class GiteaUrlReader {
1310
+ constructor(integration, deps) {
1311
+ this.integration = integration;
1312
+ this.deps = deps;
1313
+ }
1314
+ static factory = ({ config, treeResponseFactory }) => {
1315
+ return integration.ScmIntegrations.fromConfig(config).gitea.list().map((integration) => {
1316
+ const reader = new GiteaUrlReader(integration, { treeResponseFactory });
1317
+ const predicate = (url) => {
1318
+ return url.host === integration.config.host;
1319
+ };
1320
+ return { reader, predicate };
1321
+ });
1322
+ };
1323
+ async read(url) {
1324
+ const response = await this.readUrl(url);
1325
+ return response.buffer();
1326
+ }
1327
+ async readUrl(url, options) {
1328
+ let response;
1329
+ const blobUrl = integration.getGiteaFileContentsUrl(this.integration.config, url);
1330
+ try {
1331
+ response = await fetch__default.default(blobUrl, {
1332
+ method: "GET",
1333
+ ...integration.getGiteaRequestOptions(this.integration.config),
1334
+ signal: options?.signal
1335
+ });
1336
+ } catch (e) {
1337
+ throw new Error(`Unable to read ${blobUrl}, ${e}`);
1338
+ }
1339
+ if (response.ok) {
1340
+ const { encoding, content } = await response.json();
1341
+ if (encoding === "base64") {
1342
+ return ReadUrlResponseFactory.fromReadable(
1343
+ stream.Readable.from(Buffer.from(content, "base64")),
1344
+ {
1345
+ etag: response.headers.get("ETag") ?? void 0,
1346
+ lastModifiedAt: parseLastModified(
1347
+ response.headers.get("Last-Modified")
1348
+ )
1349
+ }
1350
+ );
1351
+ }
1352
+ throw new Error(`Unknown encoding: ${encoding}`);
1353
+ }
1354
+ const message = `${url} could not be read as ${blobUrl}, ${response.status} ${response.statusText}`;
1355
+ if (response.status === 404) {
1356
+ throw new errors.NotFoundError(message);
1357
+ }
1358
+ if (response.status === 304) {
1359
+ throw new errors.NotModifiedError();
1360
+ }
1361
+ if (response.status === 403) {
1362
+ throw new errors.AuthenticationError();
1363
+ }
1364
+ throw new Error(message);
1365
+ }
1366
+ async readTree(url, options) {
1367
+ const lastCommitHash = await this.getLastCommitHash(url);
1368
+ if (options?.etag && options.etag === lastCommitHash) {
1369
+ throw new errors.NotModifiedError();
1370
+ }
1371
+ const archiveUri = integration.getGiteaArchiveUrl(this.integration.config, url);
1372
+ let response;
1373
+ try {
1374
+ response = await fetch__default.default(archiveUri, {
1375
+ method: "GET",
1376
+ ...integration.getGiteaRequestOptions(this.integration.config),
1377
+ signal: options?.signal
1378
+ });
1379
+ } catch (e) {
1380
+ throw new Error(`Unable to read ${archiveUri}, ${e}`);
1381
+ }
1382
+ const parsedUri = integration.parseGiteaUrl(this.integration.config, url);
1383
+ return this.deps.treeResponseFactory.fromTarArchive({
1384
+ stream: stream.Readable.from(response.body),
1385
+ subpath: parsedUri.path,
1386
+ etag: lastCommitHash,
1387
+ filter: options?.filter
1388
+ });
1389
+ }
1390
+ search() {
1391
+ throw new Error("GiteaUrlReader search not implemented.");
1392
+ }
1393
+ toString() {
1394
+ const { host } = this.integration.config;
1395
+ return `gitea{host=${host},authed=${Boolean(
1396
+ this.integration.config.password
1397
+ )}}`;
1398
+ }
1399
+ async getLastCommitHash(url) {
1400
+ const commitUri = integration.getGiteaLatestCommitUrl(this.integration.config, url);
1401
+ const response = await fetch__default.default(
1402
+ commitUri,
1403
+ integration.getGiteaRequestOptions(this.integration.config)
1404
+ );
1405
+ if (!response.ok) {
1406
+ const message = `Failed to retrieve latest commit information from ${commitUri}, ${response.status} ${response.statusText}`;
1407
+ if (response.status === 404) {
1408
+ throw new errors.NotFoundError(message);
1409
+ }
1410
+ throw new Error(message);
1411
+ }
1412
+ return (await response.json()).sha;
1413
+ }
1414
+ }
1415
+
1416
+ class HarnessUrlReader {
1417
+ constructor(integration, deps) {
1418
+ this.integration = integration;
1419
+ this.deps = deps;
1420
+ }
1421
+ static factory = ({ config, treeResponseFactory }) => {
1422
+ return integration.ScmIntegrations.fromConfig(config).harness.list().map((integration) => {
1423
+ const reader = new HarnessUrlReader(integration, {
1424
+ treeResponseFactory
1425
+ });
1426
+ const predicate = (url) => {
1427
+ return url.host === integration.config.host;
1428
+ };
1429
+ return { reader, predicate };
1430
+ });
1431
+ };
1432
+ async read(url) {
1433
+ const response = await this.readUrl(url);
1434
+ return response.buffer();
1435
+ }
1436
+ async readUrl(url, options) {
1437
+ let response;
1438
+ const blobUrl = integration.getHarnessFileContentsUrl(this.integration.config, url);
1439
+ try {
1440
+ response = await fetch__default.default(blobUrl, {
1441
+ method: "GET",
1442
+ ...integration.getHarnessRequestOptions(this.integration.config),
1443
+ signal: options?.signal
1444
+ });
1445
+ } catch (e) {
1446
+ throw new Error(`Unable to read ${blobUrl}, ${e}`);
1447
+ }
1448
+ if (response.ok) {
1449
+ const jsonResponse = { data: response.body };
1450
+ if (jsonResponse) {
1451
+ return ReadUrlResponseFactory.fromReadable(
1452
+ stream.Readable.from(jsonResponse.data),
1453
+ {
1454
+ etag: response.headers.get("ETag") ?? void 0
1455
+ }
1456
+ );
1457
+ }
1458
+ throw new Error(`Unknown json: ${jsonResponse}`);
1459
+ }
1460
+ const message = `${url} x ${blobUrl}, ${response.status} ${response.statusText}`;
1461
+ if (response.status === 404) {
1462
+ throw new errors.NotFoundError(message);
1463
+ }
1464
+ if (response.status === 304) {
1465
+ throw new errors.NotModifiedError();
1466
+ }
1467
+ if (response.status === 403) {
1468
+ throw new errors.AuthenticationError();
1469
+ }
1470
+ throw new Error(message);
1471
+ }
1472
+ async readTree(url, options) {
1473
+ const lastCommitHash = await this.getLastCommitHash(url);
1474
+ if (options?.etag && options.etag === lastCommitHash) {
1475
+ throw new errors.NotModifiedError();
1476
+ }
1477
+ const archiveUri = integration.getHarnessArchiveUrl(this.integration.config, url);
1478
+ let response;
1479
+ try {
1480
+ response = await fetch__default.default(archiveUri, {
1481
+ method: "GET",
1482
+ ...integration.getHarnessRequestOptions(this.integration.config),
1483
+ signal: options?.signal
1484
+ });
1485
+ } catch (e) {
1486
+ throw new Error(`Unable to read ${archiveUri}, ${e}`);
1487
+ }
1488
+ const parsedUri = integration.parseHarnessUrl(this.integration.config, url);
1489
+ return this.deps.treeResponseFactory.fromZipArchive({
1490
+ stream: stream.Readable.from(response.body),
1491
+ subpath: parsedUri.path,
1492
+ etag: lastCommitHash,
1493
+ filter: options?.filter
1494
+ });
1495
+ }
1496
+ search() {
1497
+ throw new Error("HarnessUrlReader search not implemented.");
1498
+ }
1499
+ toString() {
1500
+ const { host } = this.integration.config;
1501
+ return `harness{host=${host},authed=${Boolean(
1502
+ this.integration.config.token || this.integration.config.apiKey
1503
+ )}}`;
1504
+ }
1505
+ async getLastCommitHash(url) {
1506
+ const commitUri = integration.getHarnessLatestCommitUrl(this.integration.config, url);
1507
+ const response = await fetch__default.default(
1508
+ commitUri,
1509
+ integration.getHarnessRequestOptions(this.integration.config)
1510
+ );
1511
+ if (!response.ok) {
1512
+ const message = `Failed to retrieve latest commit information from ${commitUri}, ${response.status} ${response.statusText}`;
1513
+ if (response.status === 404) {
1514
+ throw new errors.NotFoundError(message);
1515
+ }
1516
+ throw new Error(message);
1517
+ }
1518
+ return (await response.json()).latest_commit.sha;
1519
+ }
1520
+ }
1521
+
1522
+ const DEFAULT_REGION = "us-east-1";
1523
+ function parseUrl$1(url, config) {
1524
+ const parsedUrl = new URL(url);
1525
+ const pathname = parsedUrl.pathname.substring(1);
1526
+ const host = parsedUrl.host;
1527
+ if (config.host === "amazonaws.com" || config.host === "amazonaws.com.cn") {
1528
+ const match = host.match(
1529
+ /^(?:([a-z0-9.-]+)\.)?s3(?:[.-]([a-z0-9-]+))?\.amazonaws\.com(\.cn)?$/
1530
+ );
1531
+ if (!match) {
1532
+ throw new Error(`Invalid AWS S3 URL ${url}`);
1533
+ }
1534
+ const [, hostBucket, hostRegion] = match;
1535
+ if (config.s3ForcePathStyle || !hostBucket) {
1536
+ const slashIndex = pathname.indexOf("/");
1537
+ if (slashIndex < 0) {
1538
+ throw new Error(
1539
+ `Invalid path-style AWS S3 URL ${url}, does not contain bucket in the path`
1540
+ );
1541
+ }
1542
+ return {
1543
+ path: pathname.substring(slashIndex + 1),
1544
+ bucket: pathname.substring(0, slashIndex),
1545
+ region: hostRegion ?? DEFAULT_REGION
1546
+ };
1547
+ }
1548
+ return {
1549
+ path: pathname,
1550
+ bucket: hostBucket,
1551
+ region: hostRegion ?? DEFAULT_REGION
1552
+ };
1553
+ }
1554
+ const usePathStyle = config.s3ForcePathStyle || host.length === config.host.length;
1555
+ if (usePathStyle) {
1556
+ const slashIndex = pathname.indexOf("/");
1557
+ if (slashIndex < 0) {
1558
+ throw new Error(
1559
+ `Invalid path-style AWS S3 URL ${url}, does not contain bucket in the path`
1560
+ );
1561
+ }
1562
+ return {
1563
+ path: pathname.substring(slashIndex + 1),
1564
+ bucket: pathname.substring(0, slashIndex),
1565
+ region: DEFAULT_REGION
1566
+ };
1567
+ }
1568
+ return {
1569
+ path: pathname,
1570
+ bucket: host.substring(0, host.length - config.host.length - 1),
1571
+ region: DEFAULT_REGION
1572
+ };
1573
+ }
1574
+ class AwsS3UrlReader {
1575
+ constructor(credsManager, integration, deps) {
1576
+ this.credsManager = credsManager;
1577
+ this.integration = integration;
1578
+ this.deps = deps;
1579
+ }
1580
+ static factory = ({ config, treeResponseFactory }) => {
1581
+ const integrations = integration.ScmIntegrations.fromConfig(config);
1582
+ const credsManager = integrationAwsNode.DefaultAwsCredentialsManager.fromConfig(config);
1583
+ return integrations.awsS3.list().map((integration) => {
1584
+ const reader = new AwsS3UrlReader(credsManager, integration, {
1585
+ treeResponseFactory
1586
+ });
1587
+ const predicate = (url) => url.host.endsWith(integration.config.host);
1588
+ return { reader, predicate };
1589
+ });
1590
+ };
1591
+ /**
1592
+ * If accessKeyId and secretAccessKey are missing, the standard credentials provider chain will be used:
1593
+ * https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html
1594
+ */
1595
+ static buildStaticCredentials(accessKeyId, secretAccessKey) {
1596
+ return async () => {
1597
+ return {
1598
+ accessKeyId,
1599
+ secretAccessKey
1600
+ };
1601
+ };
1602
+ }
1603
+ static async buildCredentials(credsManager, region, integration) {
1604
+ if (!integration) {
1605
+ return (await credsManager.getCredentialProvider()).sdkCredentialProvider;
1606
+ }
1607
+ const accessKeyId = integration.config.accessKeyId;
1608
+ const secretAccessKey = integration.config.secretAccessKey;
1609
+ let explicitCredentials;
1610
+ if (accessKeyId && secretAccessKey) {
1611
+ explicitCredentials = AwsS3UrlReader.buildStaticCredentials(
1612
+ accessKeyId,
1613
+ secretAccessKey
1614
+ );
1615
+ } else {
1616
+ explicitCredentials = (await credsManager.getCredentialProvider()).sdkCredentialProvider;
1617
+ }
1618
+ const roleArn = integration.config.roleArn;
1619
+ if (roleArn) {
1620
+ return credentialProviders.fromTemporaryCredentials({
1621
+ masterCredentials: explicitCredentials,
1622
+ params: {
1623
+ RoleSessionName: "backstage-aws-s3-url-reader",
1624
+ RoleArn: roleArn,
1625
+ ExternalId: integration.config.externalId
1626
+ },
1627
+ clientConfig: { region }
1628
+ });
1629
+ }
1630
+ return explicitCredentials;
1631
+ }
1632
+ async buildS3Client(credsManager, region, integration) {
1633
+ const credentials = await AwsS3UrlReader.buildCredentials(
1634
+ credsManager,
1635
+ region,
1636
+ integration
1637
+ );
1638
+ const s3 = new clientS3.S3Client({
1639
+ region,
1640
+ credentials,
1641
+ endpoint: integration.config.endpoint,
1642
+ forcePathStyle: integration.config.s3ForcePathStyle
1643
+ });
1644
+ return s3;
1645
+ }
1646
+ async retrieveS3ObjectData(stream$1) {
1647
+ return new Promise((resolve, reject) => {
1648
+ try {
1649
+ const chunks = [];
1650
+ stream$1.on("data", (chunk) => chunks.push(chunk));
1651
+ stream$1.on(
1652
+ "error",
1653
+ (e) => reject(new errors.ForwardedError("Unable to read stream", e))
1654
+ );
1655
+ stream$1.on("end", () => resolve(stream.Readable.from(Buffer.concat(chunks))));
1656
+ } catch (e) {
1657
+ throw new errors.ForwardedError("Unable to parse the response data", e);
1658
+ }
1659
+ });
1660
+ }
1661
+ async read(url) {
1662
+ const response = await this.readUrl(url);
1663
+ return response.buffer();
1664
+ }
1665
+ async readUrl(url, options) {
1666
+ const { etag, lastModifiedAfter } = options ?? {};
1667
+ try {
1668
+ const { path, bucket, region } = parseUrl$1(url, this.integration.config);
1669
+ const s3Client = await this.buildS3Client(
1670
+ this.credsManager,
1671
+ region,
1672
+ this.integration
1673
+ );
1674
+ const abortController$1 = new abortController.AbortController();
1675
+ const params = {
1676
+ Bucket: bucket,
1677
+ Key: path,
1678
+ ...etag && { IfNoneMatch: etag },
1679
+ ...lastModifiedAfter && {
1680
+ IfModifiedSince: lastModifiedAfter
1681
+ }
1682
+ };
1683
+ options?.signal?.addEventListener("abort", () => abortController$1.abort());
1684
+ const getObjectCommand = new clientS3.GetObjectCommand(params);
1685
+ const response = await s3Client.send(getObjectCommand, {
1686
+ abortSignal: abortController$1.signal
1687
+ });
1688
+ const s3ObjectData = await this.retrieveS3ObjectData(
1689
+ response.Body
1690
+ );
1691
+ return ReadUrlResponseFactory.fromReadable(s3ObjectData, {
1692
+ etag: response.ETag,
1693
+ lastModifiedAt: response.LastModified
1694
+ });
1695
+ } catch (e) {
1696
+ if (e.$metadata && e.$metadata.httpStatusCode === 304) {
1697
+ throw new errors.NotModifiedError();
1698
+ }
1699
+ throw new errors.ForwardedError("Could not retrieve file from S3", e);
1700
+ }
1701
+ }
1702
+ async readTree(url, options) {
1703
+ try {
1704
+ const { path, bucket, region } = parseUrl$1(url, this.integration.config);
1705
+ const s3Client = await this.buildS3Client(
1706
+ this.credsManager,
1707
+ region,
1708
+ this.integration
1709
+ );
1710
+ const abortController$1 = new abortController.AbortController();
1711
+ const allObjects = [];
1712
+ const responses = [];
1713
+ let continuationToken;
1714
+ let output;
1715
+ do {
1716
+ const listObjectsV2Command = new clientS3.ListObjectsV2Command({
1717
+ Bucket: bucket,
1718
+ ContinuationToken: continuationToken,
1719
+ Prefix: path
1720
+ });
1721
+ options?.signal?.addEventListener(
1722
+ "abort",
1723
+ () => abortController$1.abort()
1724
+ );
1725
+ output = await s3Client.send(listObjectsV2Command, {
1726
+ abortSignal: abortController$1.signal
1727
+ });
1728
+ if (output.Contents) {
1729
+ output.Contents.forEach((contents) => {
1730
+ allObjects.push(contents.Key);
1731
+ });
1732
+ }
1733
+ continuationToken = output.NextContinuationToken;
1734
+ } while (continuationToken);
1735
+ for (let i = 0; i < allObjects.length; i++) {
1736
+ const getObjectCommand = new clientS3.GetObjectCommand({
1737
+ Bucket: bucket,
1738
+ Key: String(allObjects[i])
1739
+ });
1740
+ const response = await s3Client.send(getObjectCommand);
1741
+ const s3ObjectData = await this.retrieveS3ObjectData(
1742
+ response.Body
1743
+ );
1744
+ responses.push({
1745
+ data: s3ObjectData,
1746
+ path: posix.relative(path, String(allObjects[i])),
1747
+ lastModifiedAt: response?.LastModified ?? void 0
1748
+ });
1749
+ }
1750
+ return await this.deps.treeResponseFactory.fromReadableArray(responses);
1751
+ } catch (e) {
1752
+ throw new errors.ForwardedError("Could not retrieve file tree from S3", e);
1753
+ }
1754
+ }
1755
+ async search() {
1756
+ throw new Error("AwsS3Reader does not implement search");
1757
+ }
1758
+ toString() {
1759
+ const secretAccessKey = this.integration.config.secretAccessKey;
1760
+ return `awsS3{host=${this.integration.config.host},authed=${Boolean(
1761
+ secretAccessKey
1762
+ )}}`;
1763
+ }
1764
+ }
1765
+
1766
+ const isInRange = (num, [start, end]) => {
1767
+ return num >= start && num <= end;
1768
+ };
1769
+ const parsePortRange = (port) => {
1770
+ const isRange = port.includes("-");
1771
+ if (isRange) {
1772
+ const range = port.split("-").map((v) => parseInt(v, 10)).filter(Boolean);
1773
+ if (range.length !== 2)
1774
+ throw new Error(`Port range is not valid: ${port}`);
1775
+ const [start, end] = range;
1776
+ if (start <= 0 || end <= 0 || start > end)
1777
+ throw new Error(`Port range is not valid: [${start}, ${end}]`);
1778
+ return range;
1779
+ }
1780
+ const parsedPort = parseInt(port, 10);
1781
+ return [parsedPort, parsedPort];
1782
+ };
1783
+ const parsePortPredicate = (port) => {
1784
+ if (port) {
1785
+ const range = parsePortRange(port);
1786
+ return (url) => {
1787
+ if (url.port)
1788
+ return isInRange(parseInt(url.port, 10), range);
1789
+ if (url.protocol === "http:")
1790
+ return isInRange(80, range);
1791
+ if (url.protocol === "https:")
1792
+ return isInRange(443, range);
1793
+ return false;
1794
+ };
1795
+ }
1796
+ return (url) => !url.port;
1797
+ };
1798
+ class FetchUrlReader {
1799
+ /**
1800
+ * The factory creates a single reader that will be used for reading any URL that's listed
1801
+ * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing
1802
+ * targets to allow, containing the following fields:
1803
+ *
1804
+ * `host`:
1805
+ * Either full hostnames to match, or subdomain wildcard matchers with a leading '*'.
1806
+ * For example 'example.com' and '*.example.com' are valid values, 'prod.*.example.com' is not.
1807
+ *
1808
+ * `paths`:
1809
+ * An optional list of paths which are allowed. If the list is omitted all paths are allowed.
1810
+ */
1811
+ static factory = ({ config }) => {
1812
+ const predicates = config.getOptionalConfigArray("backend.reading.allow")?.map((allowConfig) => {
1813
+ const paths = allowConfig.getOptionalStringArray("paths");
1814
+ const checkPath = paths ? (url) => {
1815
+ const targetPath = platformPath__default.default.posix.normalize(url.pathname);
1816
+ return paths.some(
1817
+ (allowedPath) => targetPath.startsWith(allowedPath)
1818
+ );
1819
+ } : (_url) => true;
1820
+ const host = allowConfig.getString("host");
1821
+ const [hostname, port] = host.split(":");
1822
+ const checkPort = parsePortPredicate(port);
1823
+ if (hostname.startsWith("*.")) {
1824
+ const suffix = hostname.slice(1);
1825
+ return (url) => url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);
1826
+ }
1827
+ return (url) => url.hostname === hostname && checkPath(url) && checkPort(url);
1828
+ }) ?? [];
1829
+ const reader = new FetchUrlReader();
1830
+ const predicate = (url) => predicates.some((p) => p(url));
1831
+ return [{ reader, predicate }];
1832
+ };
1833
+ async read(url) {
1834
+ const response = await this.readUrl(url);
1835
+ return response.buffer();
1836
+ }
1837
+ async readUrl(url, options) {
1838
+ let response;
1839
+ try {
1840
+ response = await fetch__default.default(url, {
1841
+ headers: {
1842
+ ...options?.etag && { "If-None-Match": options.etag },
1843
+ ...options?.lastModifiedAfter && {
1844
+ "If-Modified-Since": options.lastModifiedAfter.toUTCString()
1845
+ },
1846
+ ...options?.token && { Authorization: `Bearer ${options.token}` }
1847
+ },
1848
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1849
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1850
+ // The difference does not affect us in practice however. The cast can
1851
+ // be removed after we support ESM for CLI dependencies and migrate to
1852
+ // version 3 of node-fetch.
1853
+ // https://github.com/backstage/backstage/issues/8242
1854
+ signal: options?.signal
1855
+ });
1856
+ } catch (e) {
1857
+ throw new Error(`Unable to read ${url}, ${e}`);
1858
+ }
1859
+ if (response.status === 304) {
1860
+ throw new errors.NotModifiedError();
1861
+ }
1862
+ if (response.ok) {
1863
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
1864
+ etag: response.headers.get("ETag") ?? void 0,
1865
+ lastModifiedAt: parseLastModified(
1866
+ response.headers.get("Last-Modified")
1867
+ )
1868
+ });
1869
+ }
1870
+ const message = `could not read ${url}, ${response.status} ${response.statusText}`;
1871
+ if (response.status === 404) {
1872
+ throw new errors.NotFoundError(message);
1873
+ }
1874
+ throw new Error(message);
1875
+ }
1876
+ async readTree() {
1877
+ throw new Error("FetchUrlReader does not implement readTree");
1878
+ }
1879
+ async search() {
1880
+ throw new Error("FetchUrlReader does not implement search");
1881
+ }
1882
+ toString() {
1883
+ return "fetch{}";
1884
+ }
1885
+ }
1886
+
1887
+ function notAllowedMessage(url) {
1888
+ return `Reading from '${url}' is not allowed. You may need to configure an integration for the target host, or add it to the configured list of allowed hosts at 'backend.reading.allow'`;
1889
+ }
1890
+ class UrlReaderPredicateMux {
1891
+ readers = [];
1892
+ register(tuple) {
1893
+ this.readers.push(tuple);
1894
+ }
1895
+ async readUrl(url, options) {
1896
+ const parsed = new URL(url);
1897
+ for (const { predicate, reader } of this.readers) {
1898
+ if (predicate(parsed)) {
1899
+ return reader.readUrl(url, options);
1900
+ }
1901
+ }
1902
+ throw new errors.NotAllowedError(notAllowedMessage(url));
1903
+ }
1904
+ async readTree(url, options) {
1905
+ const parsed = new URL(url);
1906
+ for (const { predicate, reader } of this.readers) {
1907
+ if (predicate(parsed)) {
1908
+ return await reader.readTree(url, options);
1909
+ }
1910
+ }
1911
+ throw new errors.NotAllowedError(notAllowedMessage(url));
1912
+ }
1913
+ async search(url, options) {
1914
+ const parsed = new URL(url);
1915
+ for (const { predicate, reader } of this.readers) {
1916
+ if (predicate(parsed)) {
1917
+ return await reader.search(url, options);
1918
+ }
1919
+ }
1920
+ throw new errors.NotAllowedError(notAllowedMessage(url));
1921
+ }
1922
+ toString() {
1923
+ return `predicateMux{readers=${this.readers.map((t) => t.reader).join(",")}`;
1924
+ }
1925
+ }
1926
+
1927
+ const pipeline$2 = util.promisify(stream.pipeline);
1928
+ const directoryNameRegex = /^[^\/]+\//;
1929
+ function stripFirstDirectoryFromPath(path) {
1930
+ return path.replace(directoryNameRegex, "");
1931
+ }
1932
+ const streamToBuffer = (stream) => {
1933
+ return new Promise(async (resolve, reject) => {
1934
+ try {
1935
+ await pipeline$2(stream, concatStream__default.default(resolve));
1936
+ } catch (ex) {
1937
+ reject(ex);
1938
+ }
1939
+ });
1940
+ };
1941
+
1942
+ const TarParseStream = tar.Parse;
1943
+ const pipeline$1 = util.promisify(stream.pipeline);
1944
+ class TarArchiveResponse {
1945
+ constructor(stream, subPath, workDir, etag, filter, stripFirstDirectory = true) {
1946
+ this.stream = stream;
1947
+ this.subPath = subPath;
1948
+ this.workDir = workDir;
1949
+ this.etag = etag;
1950
+ this.filter = filter;
1951
+ this.stripFirstDirectory = stripFirstDirectory;
1952
+ if (subPath) {
1953
+ if (!subPath.endsWith("/")) {
1954
+ this.subPath += "/";
1955
+ }
1956
+ if (subPath.startsWith("/")) {
1957
+ throw new TypeError(
1958
+ `TarArchiveResponse subPath must not start with a /, got '${subPath}'`
1959
+ );
1960
+ }
1961
+ }
1962
+ this.etag = etag;
1963
+ }
1964
+ read = false;
1965
+ // Make sure the input stream is only read once
1966
+ onlyOnce() {
1967
+ if (this.read) {
1968
+ throw new Error("Response has already been read");
1969
+ }
1970
+ this.read = true;
1971
+ }
1972
+ async files() {
1973
+ this.onlyOnce();
1974
+ const files = Array();
1975
+ const parser = new TarParseStream();
1976
+ parser.on("entry", (entry) => {
1977
+ if (entry.type === "Directory") {
1978
+ entry.resume();
1979
+ return;
1980
+ }
1981
+ const relativePath = this.stripFirstDirectory ? stripFirstDirectoryFromPath(entry.path) : entry.path;
1982
+ if (this.subPath) {
1983
+ if (!relativePath.startsWith(this.subPath)) {
1984
+ entry.resume();
1985
+ return;
1986
+ }
1987
+ }
1988
+ const path = relativePath.slice(this.subPath.length);
1989
+ if (this.filter) {
1990
+ if (!this.filter(path, { size: entry.remain })) {
1991
+ entry.resume();
1992
+ return;
1993
+ }
1994
+ }
1995
+ const content = new Promise(async (resolve) => {
1996
+ await pipeline$1(entry, concatStream__default.default(resolve));
1997
+ });
1998
+ files.push({
1999
+ path,
2000
+ content: () => content
2001
+ });
2002
+ entry.resume();
2003
+ });
2004
+ await pipeline$1(this.stream, parser);
2005
+ return files;
2006
+ }
2007
+ async archive() {
2008
+ if (!this.subPath) {
2009
+ this.onlyOnce();
2010
+ return this.stream;
2011
+ }
2012
+ const tmpDir = await this.dir();
2013
+ try {
2014
+ const data = await new Promise(async (resolve) => {
2015
+ await pipeline$1(
2016
+ tar__default.default.create({ cwd: tmpDir }, [""]),
2017
+ concatStream__default.default(resolve)
2018
+ );
2019
+ });
2020
+ return stream.Readable.from(data);
2021
+ } finally {
2022
+ await fs__default.default.remove(tmpDir);
2023
+ }
2024
+ }
2025
+ async dir(options) {
2026
+ this.onlyOnce();
2027
+ const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
2028
+ let strip = this.subPath ? this.subPath.split("/").length : 1;
2029
+ if (!this.stripFirstDirectory) {
2030
+ strip--;
2031
+ }
2032
+ let filterError = void 0;
2033
+ await pipeline$1(
2034
+ this.stream,
2035
+ tar__default.default.extract({
2036
+ strip,
2037
+ cwd: dir,
2038
+ filter: (path, stat) => {
2039
+ if (filterError) {
2040
+ return false;
2041
+ }
2042
+ const relativePath = this.stripFirstDirectory ? stripFirstDirectoryFromPath(path) : path;
2043
+ if (this.subPath && !relativePath.startsWith(this.subPath)) {
2044
+ return false;
2045
+ }
2046
+ if (this.filter) {
2047
+ const innerPath = path.split("/").slice(strip).join("/");
2048
+ try {
2049
+ return this.filter(innerPath, { size: stat.size });
2050
+ } catch (error) {
2051
+ filterError = error;
2052
+ return false;
2053
+ }
2054
+ }
2055
+ return true;
2056
+ }
2057
+ })
2058
+ );
2059
+ if (filterError) {
2060
+ if (!options?.targetDir) {
2061
+ await fs__default.default.remove(dir).catch(() => {
2062
+ });
2063
+ }
2064
+ throw filterError;
2065
+ }
2066
+ return dir;
2067
+ }
2068
+ }
2069
+
2070
+ class ZipArchiveResponse {
2071
+ constructor(stream, subPath, workDir, etag, filter) {
2072
+ this.stream = stream;
2073
+ this.subPath = subPath;
2074
+ this.workDir = workDir;
2075
+ this.etag = etag;
2076
+ this.filter = filter;
2077
+ if (subPath) {
2078
+ if (!subPath.endsWith("/")) {
2079
+ this.subPath += "/";
2080
+ }
2081
+ if (subPath.startsWith("/")) {
2082
+ throw new TypeError(
2083
+ `ZipArchiveResponse subPath must not start with a /, got '${subPath}'`
2084
+ );
2085
+ }
2086
+ }
2087
+ this.etag = etag;
2088
+ }
2089
+ read = false;
2090
+ // Make sure the input stream is only read once
2091
+ onlyOnce() {
2092
+ if (this.read) {
2093
+ throw new Error("Response has already been read");
2094
+ }
2095
+ this.read = true;
2096
+ }
2097
+ // File path relative to the root extracted directory or a sub directory if subpath is set.
2098
+ getInnerPath(path) {
2099
+ return path.slice(this.subPath.length);
2100
+ }
2101
+ shouldBeIncluded(entry) {
2102
+ if (this.subPath) {
2103
+ if (!entry.fileName.startsWith(this.subPath)) {
2104
+ return false;
2105
+ }
2106
+ }
2107
+ if (this.filter) {
2108
+ return this.filter(this.getInnerPath(entry.fileName), {
2109
+ size: entry.uncompressedSize
2110
+ });
2111
+ }
2112
+ return true;
2113
+ }
2114
+ async streamToTemporaryFile(stream) {
2115
+ const tmpDir = await fs__default.default.mkdtemp(
2116
+ platformPath__default.default.join(this.workDir, "backstage-tmp")
2117
+ );
2118
+ const tmpFile = platformPath__default.default.join(tmpDir, "tmp.zip");
2119
+ const writeStream = fs__default.default.createWriteStream(tmpFile);
2120
+ return new Promise((resolve, reject) => {
2121
+ writeStream.on("error", reject);
2122
+ writeStream.on("finish", () => {
2123
+ writeStream.end();
2124
+ resolve({
2125
+ fileName: tmpFile,
2126
+ cleanup: () => fs__default.default.rm(tmpDir, { recursive: true })
2127
+ });
2128
+ });
2129
+ stream.pipe(writeStream);
2130
+ });
2131
+ }
2132
+ forEveryZipEntry(zip, callback) {
2133
+ return new Promise((resolve, reject) => {
2134
+ yauzl__default.default.open(zip, { lazyEntries: true }, (err, zipfile) => {
2135
+ if (err || !zipfile) {
2136
+ reject(err || new Error(`Failed to open zip file ${zip}`));
2137
+ return;
2138
+ }
2139
+ zipfile.on("entry", async (entry) => {
2140
+ if (!entry.fileName.endsWith("/") && this.shouldBeIncluded(entry)) {
2141
+ zipfile.openReadStream(entry, async (openErr, readStream) => {
2142
+ if (openErr || !readStream) {
2143
+ reject(
2144
+ openErr || new Error(`Failed to open zip entry ${entry.fileName}`)
2145
+ );
2146
+ return;
2147
+ }
2148
+ await callback(entry, readStream);
2149
+ zipfile.readEntry();
2150
+ });
2151
+ } else {
2152
+ zipfile.readEntry();
2153
+ }
2154
+ });
2155
+ zipfile.once("end", () => resolve());
2156
+ zipfile.on("error", (e) => reject(e));
2157
+ zipfile.readEntry();
2158
+ });
2159
+ });
2160
+ }
2161
+ async files() {
2162
+ this.onlyOnce();
2163
+ const files = Array();
2164
+ const temporary = await this.streamToTemporaryFile(this.stream);
2165
+ await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
2166
+ files.push({
2167
+ path: this.getInnerPath(entry.fileName),
2168
+ content: async () => await streamToBuffer(content),
2169
+ lastModifiedAt: entry.lastModFileTime ? new Date(entry.lastModFileTime) : void 0
2170
+ });
2171
+ });
2172
+ await temporary.cleanup();
2173
+ return files;
2174
+ }
2175
+ async archive() {
2176
+ this.onlyOnce();
2177
+ if (!this.subPath) {
2178
+ return this.stream;
2179
+ }
2180
+ const archive = archiver__default.default("zip");
2181
+ const temporary = await this.streamToTemporaryFile(this.stream);
2182
+ await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
2183
+ archive.append(await streamToBuffer(content), {
2184
+ name: this.getInnerPath(entry.fileName)
2185
+ });
2186
+ });
2187
+ archive.finalize();
2188
+ await temporary.cleanup();
2189
+ return archive;
2190
+ }
2191
+ async dir(options) {
2192
+ this.onlyOnce();
2193
+ const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
2194
+ const temporary = await this.streamToTemporaryFile(this.stream);
2195
+ await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
2196
+ const entryPath = this.getInnerPath(entry.fileName);
2197
+ const dirname = platformPath__default.default.dirname(entryPath);
2198
+ if (dirname) {
2199
+ await fs__default.default.mkdirp(backendPluginApi.resolveSafeChildPath(dir, dirname));
2200
+ }
2201
+ return new Promise(async (resolve, reject) => {
2202
+ const file = fs__default.default.createWriteStream(backendPluginApi.resolveSafeChildPath(dir, entryPath));
2203
+ file.on("finish", resolve);
2204
+ content.on("error", reject);
2205
+ content.pipe(file);
2206
+ });
2207
+ });
2208
+ await temporary.cleanup();
2209
+ return dir;
2210
+ }
2211
+ }
2212
+
2213
+ const pipeline = util.promisify(stream.pipeline);
2214
+ class ReadableArrayResponse {
2215
+ constructor(stream, workDir, etag) {
2216
+ this.stream = stream;
2217
+ this.workDir = workDir;
2218
+ this.etag = etag;
2219
+ this.etag = etag;
2220
+ }
2221
+ read = false;
2222
+ // Make sure the input stream is only read once
2223
+ onlyOnce() {
2224
+ if (this.read) {
2225
+ throw new Error("Response has already been read");
2226
+ }
2227
+ this.read = true;
2228
+ }
2229
+ async files() {
2230
+ this.onlyOnce();
2231
+ const files = Array();
2232
+ for (let i = 0; i < this.stream.length; i++) {
2233
+ if (!this.stream[i].path.endsWith("/")) {
2234
+ files.push({
2235
+ path: this.stream[i].path,
2236
+ content: () => getRawBody__default.default(this.stream[i].data),
2237
+ lastModifiedAt: this.stream[i]?.lastModifiedAt
2238
+ });
2239
+ }
2240
+ }
2241
+ return files;
2242
+ }
2243
+ async archive() {
2244
+ const tmpDir = await this.dir();
2245
+ try {
2246
+ const data = await new Promise(async (resolve) => {
2247
+ await pipeline(
2248
+ tar__default.default.create({ cwd: tmpDir }, [""]),
2249
+ concatStream__default.default(resolve)
2250
+ );
2251
+ });
2252
+ return stream.Readable.from(data);
2253
+ } finally {
2254
+ await fs__default.default.remove(tmpDir);
2255
+ }
2256
+ }
2257
+ async dir(options) {
2258
+ this.onlyOnce();
2259
+ const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
2260
+ for (let i = 0; i < this.stream.length; i++) {
2261
+ if (!this.stream[i].path.endsWith("/")) {
2262
+ const filePath = platformPath__default.default.join(dir, this.stream[i].path);
2263
+ await fs__default.default.mkdir(platformPath.dirname(filePath), { recursive: true });
2264
+ await pipeline(this.stream[i].data, fs__default.default.createWriteStream(filePath));
2265
+ }
2266
+ }
2267
+ return dir;
2268
+ }
2269
+ }
2270
+
2271
+ class DefaultReadTreeResponseFactory {
2272
+ constructor(workDir) {
2273
+ this.workDir = workDir;
2274
+ }
2275
+ static create(options) {
2276
+ return new DefaultReadTreeResponseFactory(
2277
+ options.config.getOptionalString("backend.workingDirectory") ?? os__default.default.tmpdir()
2278
+ );
2279
+ }
2280
+ async fromTarArchive(options) {
2281
+ return new TarArchiveResponse(
2282
+ options.stream,
2283
+ options.subpath ?? "",
2284
+ this.workDir,
2285
+ options.etag,
2286
+ options.filter,
2287
+ options.stripFirstDirectory ?? true
2288
+ );
2289
+ }
2290
+ async fromZipArchive(options) {
2291
+ return new ZipArchiveResponse(
2292
+ options.stream,
2293
+ options.subpath ?? "",
2294
+ this.workDir,
2295
+ options.etag,
2296
+ options.filter
2297
+ );
2298
+ }
2299
+ async fromReadableArray(options) {
2300
+ return new ReadableArrayResponse(options, this.workDir, "");
2301
+ }
2302
+ }
2303
+
2304
+ var name = "@backstage/backend-defaults";
2305
+ var version = "0.3.0";
2306
+ var description = "Backend defaults used by Backstage backend apps";
2307
+ var backstage = {
2308
+ role: "node-library"
2309
+ };
2310
+ var publishConfig = {
2311
+ access: "public"
2312
+ };
2313
+ var keywords = [
2314
+ "backstage"
2315
+ ];
2316
+ var homepage = "https://backstage.io";
2317
+ var repository = {
2318
+ type: "git",
2319
+ url: "https://github.com/backstage/backstage",
2320
+ directory: "packages/backend-defaults"
2321
+ };
2322
+ var license = "Apache-2.0";
2323
+ var exports$1 = {
2324
+ ".": "./src/index.ts",
2325
+ "./auth": "./src/entrypoints/auth/index.ts",
2326
+ "./cache": "./src/entrypoints/cache/index.ts",
2327
+ "./database": "./src/entrypoints/database/index.ts",
2328
+ "./discovery": "./src/entrypoints/discovery/index.ts",
2329
+ "./httpAuth": "./src/entrypoints/httpAuth/index.ts",
2330
+ "./httpRouter": "./src/entrypoints/httpRouter/index.ts",
2331
+ "./lifecycle": "./src/entrypoints/lifecycle/index.ts",
2332
+ "./logger": "./src/entrypoints/logger/index.ts",
2333
+ "./permissions": "./src/entrypoints/permissions/index.ts",
2334
+ "./rootConfig": "./src/entrypoints/rootConfig/index.ts",
2335
+ "./rootHttpRouter": "./src/entrypoints/rootHttpRouter/index.ts",
2336
+ "./rootLifecycle": "./src/entrypoints/rootLifecycle/index.ts",
2337
+ "./rootLogger": "./src/entrypoints/rootLogger/index.ts",
2338
+ "./scheduler": "./src/entrypoints/scheduler/index.ts",
2339
+ "./urlReader": "./src/entrypoints/urlReader/index.ts",
2340
+ "./userInfo": "./src/entrypoints/userInfo/index.ts",
2341
+ "./package.json": "./package.json"
2342
+ };
2343
+ var main = "src/index.ts";
2344
+ var types = "src/index.ts";
2345
+ var typesVersions = {
2346
+ "*": {
2347
+ auth: [
2348
+ "src/entrypoints/auth/index.ts"
2349
+ ],
2350
+ cache: [
2351
+ "src/entrypoints/cache/index.ts"
2352
+ ],
2353
+ database: [
2354
+ "src/entrypoints/database/index.ts"
2355
+ ],
2356
+ discovery: [
2357
+ "src/entrypoints/discovery/index.ts"
2358
+ ],
2359
+ httpAuth: [
2360
+ "src/entrypoints/httpAuth/index.ts"
2361
+ ],
2362
+ httpRouter: [
2363
+ "src/entrypoints/httpRouter/index.ts"
2364
+ ],
2365
+ lifecycle: [
2366
+ "src/entrypoints/lifecycle/index.ts"
2367
+ ],
2368
+ logger: [
2369
+ "src/entrypoints/logger/index.ts"
2370
+ ],
2371
+ permissions: [
2372
+ "src/entrypoints/permissions/index.ts"
2373
+ ],
2374
+ rootConfig: [
2375
+ "src/entrypoints/rootConfig/index.ts"
2376
+ ],
2377
+ rootHttpRouter: [
2378
+ "src/entrypoints/rootHttpRouter/index.ts"
2379
+ ],
2380
+ rootLifecycle: [
2381
+ "src/entrypoints/rootLifecycle/index.ts"
2382
+ ],
2383
+ rootLogger: [
2384
+ "src/entrypoints/rootLogger/index.ts"
2385
+ ],
2386
+ scheduler: [
2387
+ "src/entrypoints/scheduler/index.ts"
2388
+ ],
2389
+ urlReader: [
2390
+ "src/entrypoints/urlReader/index.ts"
2391
+ ],
2392
+ userInfo: [
2393
+ "src/entrypoints/userInfo/index.ts"
2394
+ ],
2395
+ "package.json": [
2396
+ "package.json"
2397
+ ]
2398
+ }
2399
+ };
2400
+ var files = [
2401
+ "config.d.ts",
2402
+ "dist",
2403
+ "migrations"
2404
+ ];
2405
+ var scripts = {
2406
+ build: "backstage-cli package build",
2407
+ clean: "backstage-cli package clean",
2408
+ lint: "backstage-cli package lint",
2409
+ prepack: "backstage-cli package prepack",
2410
+ postpack: "backstage-cli package postpack",
2411
+ start: "backstage-cli package start",
2412
+ test: "backstage-cli package test"
2413
+ };
2414
+ var dependencies = {
2415
+ "@aws-sdk/abort-controller": "^3.347.0",
2416
+ "@aws-sdk/client-codecommit": "^3.350.0",
2417
+ "@aws-sdk/client-s3": "^3.350.0",
2418
+ "@aws-sdk/credential-providers": "^3.350.0",
2419
+ "@aws-sdk/types": "^3.347.0",
2420
+ "@backstage/backend-app-api": "workspace:^",
2421
+ "@backstage/backend-common": "workspace:^",
2422
+ "@backstage/backend-dev-utils": "workspace:^",
2423
+ "@backstage/backend-plugin-api": "workspace:^",
2424
+ "@backstage/cli-common": "workspace:^",
2425
+ "@backstage/config": "workspace:^",
2426
+ "@backstage/config-loader": "workspace:^",
2427
+ "@backstage/errors": "workspace:^",
2428
+ "@backstage/integration": "workspace:^",
2429
+ "@backstage/integration-aws-node": "workspace:^",
2430
+ "@backstage/plugin-auth-node": "workspace:^",
2431
+ "@backstage/plugin-events-node": "workspace:^",
2432
+ "@backstage/plugin-permission-node": "workspace:^",
2433
+ "@backstage/types": "workspace:^",
2434
+ "@google-cloud/storage": "^7.0.0",
2435
+ "@keyv/memcache": "^1.3.5",
2436
+ "@keyv/redis": "^2.5.3",
2437
+ "@manypkg/get-packages": "^1.1.3",
2438
+ "@octokit/rest": "^19.0.3",
2439
+ "@opentelemetry/api": "^1.3.0",
2440
+ "@types/cors": "^2.8.6",
2441
+ "@types/express": "^4.17.6",
2442
+ archiver: "^6.0.0",
2443
+ "base64-stream": "^1.0.0",
2444
+ "better-sqlite3": "^9.0.0",
2445
+ compression: "^1.7.4",
2446
+ "concat-stream": "^2.0.0",
2447
+ cookie: "^0.6.0",
2448
+ cors: "^2.8.5",
2449
+ cron: "^3.0.0",
2450
+ express: "^4.17.1",
2451
+ "express-promise-router": "^4.1.0",
2452
+ "fs-extra": "^11.2.0",
2453
+ "git-url-parse": "^14.0.0",
2454
+ helmet: "^6.0.0",
2455
+ "isomorphic-git": "^1.23.0",
2456
+ jose: "^5.0.0",
2457
+ keyv: "^4.5.2",
2458
+ knex: "^3.0.0",
2459
+ lodash: "^4.17.21",
2460
+ logform: "^2.3.2",
2461
+ luxon: "^3.0.0",
2462
+ minimatch: "^9.0.0",
2463
+ minimist: "^1.2.5",
2464
+ morgan: "^1.10.0",
2465
+ mysql2: "^3.0.0",
2466
+ "node-fetch": "^2.6.7",
2467
+ "node-forge": "^1.3.1",
2468
+ "p-limit": "^3.1.0",
2469
+ "path-to-regexp": "^6.2.1",
2470
+ pg: "^8.11.3",
2471
+ "pg-connection-string": "^2.3.0",
2472
+ "raw-body": "^2.4.1",
2473
+ selfsigned: "^2.0.0",
2474
+ stoppable: "^1.1.0",
2475
+ tar: "^6.1.12",
2476
+ "triple-beam": "^1.4.1",
2477
+ uuid: "^9.0.0",
2478
+ winston: "^3.2.1",
2479
+ "winston-transport": "^4.5.0",
2480
+ yauzl: "^3.0.0",
2481
+ yn: "^4.0.0",
2482
+ zod: "^3.22.4"
2483
+ };
2484
+ var devDependencies = {
2485
+ "@aws-sdk/util-stream-node": "^3.350.0",
2486
+ "@backstage/backend-plugin-api": "workspace:^",
2487
+ "@backstage/backend-test-utils": "workspace:^",
2488
+ "@backstage/cli": "workspace:^",
2489
+ "@types/http-errors": "^2.0.0",
2490
+ "@types/morgan": "^1.9.0",
2491
+ "@types/node-forge": "^1.3.0",
2492
+ "@types/stoppable": "^1.1.0",
2493
+ "aws-sdk-client-mock": "^4.0.0",
2494
+ "http-errors": "^2.0.0",
2495
+ msw: "^1.0.0",
2496
+ supertest: "^6.1.3",
2497
+ "wait-for-expect": "^3.0.2"
2498
+ };
2499
+ var configSchema = "config.d.ts";
2500
+ var packageinfo = {
2501
+ name: name,
2502
+ version: version,
2503
+ description: description,
2504
+ backstage: backstage,
2505
+ publishConfig: publishConfig,
2506
+ keywords: keywords,
2507
+ homepage: homepage,
2508
+ repository: repository,
2509
+ license: license,
2510
+ exports: exports$1,
2511
+ main: main,
2512
+ types: types,
2513
+ typesVersions: typesVersions,
2514
+ files: files,
2515
+ scripts: scripts,
2516
+ dependencies: dependencies,
2517
+ devDependencies: devDependencies,
2518
+ configSchema: configSchema
2519
+ };
2520
+
2521
+ const GOOGLE_GCS_HOST = "storage.cloud.google.com";
2522
+ const parseURL = (url) => {
2523
+ const { host, pathname } = new URL(url);
2524
+ if (host !== GOOGLE_GCS_HOST) {
2525
+ throw new Error(`not a valid GCS URL: ${url}`);
2526
+ }
2527
+ const [, bucket, ...key] = pathname.split("/");
2528
+ return {
2529
+ host,
2530
+ bucket,
2531
+ key: key.join("/")
2532
+ };
2533
+ };
2534
+ class GoogleGcsUrlReader {
2535
+ constructor(integration, storage) {
2536
+ this.integration = integration;
2537
+ this.storage = storage;
2538
+ }
2539
+ static factory = ({ config, logger }) => {
2540
+ if (!config.has("integrations.googleGcs")) {
2541
+ return [];
2542
+ }
2543
+ const gcsConfig = integration.readGoogleGcsIntegrationConfig(
2544
+ config.getConfig("integrations.googleGcs")
2545
+ );
2546
+ let storage;
2547
+ if (!gcsConfig.clientEmail || !gcsConfig.privateKey) {
2548
+ logger.info(
2549
+ "googleGcs credentials not found in config. Using default credentials provider."
2550
+ );
2551
+ storage = new GoogleCloud__namespace.Storage({
2552
+ userAgent: `backstage/backend-defaults.GoogleGcsUrlReader/${packageinfo.version}`
2553
+ });
2554
+ } else {
2555
+ storage = new GoogleCloud__namespace.Storage({
2556
+ credentials: {
2557
+ client_email: gcsConfig.clientEmail || void 0,
2558
+ private_key: gcsConfig.privateKey || void 0
2559
+ },
2560
+ userAgent: `backstage/backend-defaults.GoogleGcsUrlReader/${packageinfo.version}`
2561
+ });
2562
+ }
2563
+ const reader = new GoogleGcsUrlReader(gcsConfig, storage);
2564
+ const predicate = (url) => url.host === GOOGLE_GCS_HOST;
2565
+ return [{ reader, predicate }];
2566
+ };
2567
+ readStreamFromUrl(url) {
2568
+ const { bucket, key } = parseURL(url);
2569
+ return this.storage.bucket(bucket).file(key).createReadStream();
2570
+ }
2571
+ async read(url) {
2572
+ try {
2573
+ return await getRawBody__default.default(this.readStreamFromUrl(url));
2574
+ } catch (error) {
2575
+ throw new Error(`unable to read gcs file from ${url}, ${error}`);
2576
+ }
2577
+ }
2578
+ async readUrl(url, _options) {
2579
+ const stream = this.readStreamFromUrl(url);
2580
+ return ReadUrlResponseFactory.fromReadable(stream);
2581
+ }
2582
+ async readTree() {
2583
+ throw new Error("GcsUrlReader does not implement readTree");
2584
+ }
2585
+ async search(url) {
2586
+ const { bucket, key: pattern } = parseURL(url);
2587
+ if (!pattern.endsWith("*") || pattern.indexOf("*") !== pattern.length - 1) {
2588
+ throw new Error("GcsUrlReader only supports prefix-based searches");
2589
+ }
2590
+ const [files] = await this.storage.bucket(bucket).getFiles({
2591
+ autoPaginate: true,
2592
+ prefix: pattern.split("*").join("")
2593
+ });
2594
+ return {
2595
+ files: files.map((file) => {
2596
+ const fullUrl = ["https:/", GOOGLE_GCS_HOST, bucket, file.name].join(
2597
+ "/"
2598
+ );
2599
+ return {
2600
+ url: fullUrl,
2601
+ content: async () => {
2602
+ const readResponse = await this.readUrl(fullUrl);
2603
+ return readResponse.buffer();
2604
+ }
2605
+ };
2606
+ }),
2607
+ // TODO etag is not implemented yet.
2608
+ etag: "NOT/IMPLEMENTED"
2609
+ };
2610
+ }
2611
+ toString() {
2612
+ const key = this.integration.privateKey;
2613
+ return `googleGcs{host=${GOOGLE_GCS_HOST},authed=${Boolean(key)}}`;
2614
+ }
2615
+ }
2616
+
2617
+ function parseUrl(url, requireGitPath = false) {
2618
+ const parsedUrl = new URL(url);
2619
+ if (parsedUrl.pathname.includes("/files/edit/")) {
2620
+ throw new Error(
2621
+ "Please provide the view url to yaml file from CodeCommit, not the edit url"
2622
+ );
2623
+ }
2624
+ if (requireGitPath && !parsedUrl.pathname.includes("/browse/")) {
2625
+ throw new Error("Please provide full path to yaml file from CodeCommit");
2626
+ }
2627
+ const hostMatch = parsedUrl.host.match(
2628
+ /^([^\.]+)\.console\.aws\.amazon\.com$/
2629
+ );
2630
+ if (!hostMatch) {
2631
+ throw new Error(
2632
+ `Invalid AWS CodeCommit URL (unexpected host format): ${url}`
2633
+ );
2634
+ }
2635
+ const [, region] = hostMatch;
2636
+ const pathMatch = parsedUrl.pathname.match(
2637
+ /^\/codesuite\/codecommit\/repositories\/([^\/]+)\/browse\/((.*)\/)?--\/(.*)$/
2638
+ );
2639
+ if (!pathMatch) {
2640
+ if (!requireGitPath) {
2641
+ const pathname = parsedUrl.pathname.split("/--/")[0].replace("/codesuite/codecommit/repositories/", "");
2642
+ const [repositoryName2, commitSpecifier2] = pathname.split("/browse");
2643
+ return {
2644
+ region,
2645
+ repositoryName: repositoryName2.replace(/^\/|\/$/g, ""),
2646
+ path: "/",
2647
+ commitSpecifier: commitSpecifier2 === "" ? void 0 : commitSpecifier2?.replace(/^\/|\/$/g, "")
2648
+ };
2649
+ }
2650
+ throw new Error(
2651
+ `Invalid AWS CodeCommit URL (unexpected path format): ${url}`
2652
+ );
2653
+ }
2654
+ const [, repositoryName, , commitSpecifier, path] = pathMatch;
2655
+ return {
2656
+ region,
2657
+ repositoryName,
2658
+ path,
2659
+ // the commitSpecifier is passed to AWS SDK which does not allow empty strings so replace empty string with undefined
2660
+ commitSpecifier: commitSpecifier === "" ? void 0 : commitSpecifier
2661
+ };
2662
+ }
2663
+ class AwsCodeCommitUrlReader {
2664
+ constructor(credsManager, integration, deps) {
2665
+ this.credsManager = credsManager;
2666
+ this.integration = integration;
2667
+ this.deps = deps;
2668
+ }
2669
+ static factory = ({ config, treeResponseFactory }) => {
2670
+ const integrations = integration.ScmIntegrations.fromConfig(config);
2671
+ const credsManager = integrationAwsNode.DefaultAwsCredentialsManager.fromConfig(config);
2672
+ return integrations.awsCodeCommit.list().map((integration) => {
2673
+ const reader = new AwsCodeCommitUrlReader(credsManager, integration, {
2674
+ treeResponseFactory
2675
+ });
2676
+ const predicate = (url) => {
2677
+ return url.host.endsWith(integration.config.host) && url.pathname.startsWith("/codesuite/codecommit");
2678
+ };
2679
+ return { reader, predicate };
2680
+ });
2681
+ };
2682
+ /**
2683
+ * If accessKeyId and secretAccessKey are missing, the standard credentials provider chain will be used:
2684
+ * https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html
2685
+ */
2686
+ static buildStaticCredentials(accessKeyId, secretAccessKey) {
2687
+ return async () => {
2688
+ return {
2689
+ accessKeyId,
2690
+ secretAccessKey
2691
+ };
2692
+ };
2693
+ }
2694
+ static async buildCredentials(credsManager, region, integration) {
2695
+ if (!integration) {
2696
+ return (await credsManager.getCredentialProvider()).sdkCredentialProvider;
2697
+ }
2698
+ const accessKeyId = integration.config.accessKeyId;
2699
+ const secretAccessKey = integration.config.secretAccessKey;
2700
+ let explicitCredentials;
2701
+ if (accessKeyId && secretAccessKey) {
2702
+ explicitCredentials = AwsCodeCommitUrlReader.buildStaticCredentials(
2703
+ accessKeyId,
2704
+ secretAccessKey
2705
+ );
2706
+ } else {
2707
+ explicitCredentials = (await credsManager.getCredentialProvider()).sdkCredentialProvider;
2708
+ }
2709
+ const roleArn = integration.config.roleArn;
2710
+ if (roleArn) {
2711
+ return credentialProviders.fromTemporaryCredentials({
2712
+ masterCredentials: explicitCredentials,
2713
+ params: {
2714
+ RoleSessionName: "backstage-aws-code-commit-url-reader",
2715
+ RoleArn: roleArn,
2716
+ ExternalId: integration.config.externalId
2717
+ },
2718
+ clientConfig: { region }
2719
+ });
2720
+ }
2721
+ return explicitCredentials;
2722
+ }
2723
+ async buildCodeCommitClient(credsManager, region, integration) {
2724
+ const credentials = await AwsCodeCommitUrlReader.buildCredentials(
2725
+ credsManager,
2726
+ region,
2727
+ integration
2728
+ );
2729
+ const codeCommit = new clientCodecommit.CodeCommitClient({
2730
+ region,
2731
+ credentials
2732
+ });
2733
+ return codeCommit;
2734
+ }
2735
+ async readUrl(url, options) {
2736
+ try {
2737
+ const { path, repositoryName, region, commitSpecifier } = parseUrl(
2738
+ url,
2739
+ true
2740
+ );
2741
+ const codeCommitClient = await this.buildCodeCommitClient(
2742
+ this.credsManager,
2743
+ region,
2744
+ this.integration
2745
+ );
2746
+ const abortController$1 = new abortController.AbortController();
2747
+ const input = {
2748
+ repositoryName,
2749
+ commitSpecifier,
2750
+ filePath: path
2751
+ };
2752
+ options?.signal?.addEventListener("abort", () => abortController$1.abort());
2753
+ const getObjectCommand = new clientCodecommit.GetFileCommand(input);
2754
+ const response = await codeCommitClient.send(
2755
+ getObjectCommand,
2756
+ {
2757
+ abortSignal: abortController$1.signal
2758
+ }
2759
+ );
2760
+ if (options?.etag && options.etag === response.commitId) {
2761
+ throw new errors.NotModifiedError();
2762
+ }
2763
+ return ReadUrlResponseFactory.fromReadable(
2764
+ stream.Readable.from([response?.fileContent]),
2765
+ {
2766
+ etag: response.commitId
2767
+ }
2768
+ );
2769
+ } catch (e) {
2770
+ if (e.$metadata && e.$metadata.httpStatusCode === 304) {
2771
+ throw new errors.NotModifiedError();
2772
+ }
2773
+ if (e.name && e.name === "NotModifiedError") {
2774
+ throw new errors.NotModifiedError();
2775
+ }
2776
+ throw new errors.ForwardedError("Could not retrieve file from CodeCommit", e);
2777
+ }
2778
+ }
2779
+ async readTreePath(codeCommitClient, abortSignal, path, repositoryName, commitSpecifier, etag) {
2780
+ const getFolderCommand = new clientCodecommit.GetFolderCommand({
2781
+ folderPath: path,
2782
+ repositoryName,
2783
+ commitSpecifier
2784
+ });
2785
+ const response = await codeCommitClient.send(getFolderCommand, {
2786
+ abortSignal
2787
+ });
2788
+ if (etag && etag === response.commitId) {
2789
+ throw new errors.NotModifiedError();
2790
+ }
2791
+ const output = [];
2792
+ if (response.files) {
2793
+ response.files.forEach((file) => {
2794
+ if (file.absolutePath) {
2795
+ output.push(file.absolutePath);
2796
+ }
2797
+ });
2798
+ }
2799
+ if (!response.subFolders) {
2800
+ return output;
2801
+ }
2802
+ for (const subFolder of response.subFolders) {
2803
+ if (subFolder.absolutePath) {
2804
+ output.push(
2805
+ ...await this.readTreePath(
2806
+ codeCommitClient,
2807
+ abortSignal,
2808
+ subFolder.absolutePath,
2809
+ repositoryName,
2810
+ commitSpecifier,
2811
+ etag
2812
+ )
2813
+ );
2814
+ }
2815
+ }
2816
+ return output;
2817
+ }
2818
+ async readTree(url, options) {
2819
+ try {
2820
+ const { path, repositoryName, region, commitSpecifier } = parseUrl(url);
2821
+ const codeCommitClient = await this.buildCodeCommitClient(
2822
+ this.credsManager,
2823
+ region,
2824
+ this.integration
2825
+ );
2826
+ const abortController$1 = new abortController.AbortController();
2827
+ options?.signal?.addEventListener("abort", () => abortController$1.abort());
2828
+ const allFiles = await this.readTreePath(
2829
+ codeCommitClient,
2830
+ abortController$1.signal,
2831
+ path,
2832
+ repositoryName,
2833
+ commitSpecifier,
2834
+ options?.etag
2835
+ );
2836
+ const responses = [];
2837
+ for (let i = 0; i < allFiles.length; i++) {
2838
+ const getFileCommand = new clientCodecommit.GetFileCommand({
2839
+ repositoryName,
2840
+ filePath: String(allFiles[i]),
2841
+ commitSpecifier
2842
+ });
2843
+ const response = await codeCommitClient.send(getFileCommand);
2844
+ const objectData = await stream.Readable.from([response?.fileContent]);
2845
+ responses.push({
2846
+ data: objectData,
2847
+ path: posix.relative(
2848
+ path.startsWith("/") ? path : `/${path}`,
2849
+ allFiles[i].startsWith("/") ? allFiles[i] : `/${allFiles[i]}`
2850
+ )
2851
+ });
2852
+ }
2853
+ return await this.deps.treeResponseFactory.fromReadableArray(responses);
2854
+ } catch (e) {
2855
+ if (e.name && e.name === "NotModifiedError") {
2856
+ throw new errors.NotModifiedError();
2857
+ }
2858
+ throw new errors.ForwardedError(
2859
+ "Could not retrieve file tree from CodeCommit",
2860
+ e
2861
+ );
2862
+ }
2863
+ }
2864
+ async search() {
2865
+ throw new Error("AwsCodeCommitReader does not implement search");
2866
+ }
2867
+ toString() {
2868
+ const secretAccessKey = this.integration.config.secretAccessKey;
2869
+ return `awsCodeCommit{host=${this.integration.config.host},authed=${Boolean(
2870
+ secretAccessKey
2871
+ )}}`;
2872
+ }
2873
+ }
2874
+
2875
+ class UrlReaders {
2876
+ /**
2877
+ * Creates a custom {@link @backstage/backend-plugin-api#UrlReaderService} wrapper for your own set of factories.
2878
+ */
2879
+ static create(options) {
2880
+ const { logger, config, factories } = options;
2881
+ const mux = new UrlReaderPredicateMux();
2882
+ const treeResponseFactory = DefaultReadTreeResponseFactory.create({
2883
+ config
2884
+ });
2885
+ for (const factory of factories ?? []) {
2886
+ const tuples = factory({ config, logger, treeResponseFactory });
2887
+ for (const tuple of tuples) {
2888
+ mux.register(tuple);
2889
+ }
2890
+ }
2891
+ return mux;
2892
+ }
2893
+ /**
2894
+ * Creates a {@link @backstage/backend-plugin-api#UrlReaderService} wrapper that includes all the default factories
2895
+ * from this package.
2896
+ *
2897
+ * Any additional factories passed will be loaded before the default ones.
2898
+ */
2899
+ static default(options) {
2900
+ const { logger, config, factories = [] } = options;
2901
+ return UrlReaders.create({
2902
+ logger,
2903
+ config,
2904
+ factories: factories.concat([
2905
+ AzureUrlReader.factory,
2906
+ BitbucketCloudUrlReader.factory,
2907
+ BitbucketServerUrlReader.factory,
2908
+ BitbucketUrlReader.factory,
2909
+ GerritUrlReader.factory,
2910
+ GithubUrlReader.factory,
2911
+ GiteaUrlReader.factory,
2912
+ GitlabUrlReader.factory,
2913
+ GoogleGcsUrlReader.factory,
2914
+ HarnessUrlReader.factory,
2915
+ AwsS3UrlReader.factory,
2916
+ AwsCodeCommitUrlReader.factory,
2917
+ FetchUrlReader.factory
2918
+ ])
2919
+ });
2920
+ }
2921
+ }
5
2922
 
6
2923
  const urlReaderServiceFactory = backendPluginApi.createServiceFactory({
7
2924
  service: backendPluginApi.coreServices.urlReader,
@@ -10,12 +2927,25 @@ const urlReaderServiceFactory = backendPluginApi.createServiceFactory({
10
2927
  logger: backendPluginApi.coreServices.logger
11
2928
  },
12
2929
  async factory({ config, logger }) {
13
- return backendCommon.UrlReaders.default({
2930
+ return UrlReaders.default({
14
2931
  config,
15
2932
  logger
16
2933
  });
17
2934
  }
18
2935
  });
19
2936
 
2937
+ exports.AwsS3UrlReader = AwsS3UrlReader;
2938
+ exports.AzureUrlReader = AzureUrlReader;
2939
+ exports.BitbucketCloudUrlReader = BitbucketCloudUrlReader;
2940
+ exports.BitbucketServerUrlReader = BitbucketServerUrlReader;
2941
+ exports.BitbucketUrlReader = BitbucketUrlReader;
2942
+ exports.FetchUrlReader = FetchUrlReader;
2943
+ exports.GerritUrlReader = GerritUrlReader;
2944
+ exports.GiteaUrlReader = GiteaUrlReader;
2945
+ exports.GithubUrlReader = GithubUrlReader;
2946
+ exports.GitlabUrlReader = GitlabUrlReader;
2947
+ exports.HarnessUrlReader = HarnessUrlReader;
2948
+ exports.ReadUrlResponseFactory = ReadUrlResponseFactory;
2949
+ exports.UrlReaders = UrlReaders;
20
2950
  exports.urlReaderServiceFactory = urlReaderServiceFactory;
21
2951
  //# sourceMappingURL=urlReader.cjs.js.map