@backstage/backend-defaults 0.3.0-next.2 → 0.3.0-next.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,7 +1,2982 @@
1
1
  'use strict';
2
2
 
3
- var backendCommon = require('@backstage/backend-common');
3
+ var integration = require('@backstage/integration');
4
+ var fetch = require('node-fetch');
5
+ var minimatch = require('minimatch');
6
+ var stream = require('stream');
7
+ var errors = require('@backstage/errors');
8
+ var getRawBody = require('raw-body');
9
+ var parseGitUrl = require('git-url-parse');
10
+ var lodash = require('lodash');
11
+ var base64Stream = require('base64-stream');
12
+ var concatStream = require('concat-stream');
13
+ var fs = require('fs-extra');
14
+ var os = require('os');
15
+ var platformPath = require('path');
16
+ var tar = require('tar');
17
+ var util = require('util');
18
+ var isomorphicGit = require('isomorphic-git');
19
+ var http = require('isomorphic-git/http/node');
20
+ var integrationAwsNode = require('@backstage/integration-aws-node');
21
+ var credentialProviders = require('@aws-sdk/credential-providers');
22
+ var clientS3 = require('@aws-sdk/client-s3');
23
+ var abortController = require('@aws-sdk/abort-controller');
24
+ var posix = require('path/posix');
25
+ var archiver = require('archiver');
26
+ var yauzl = require('yauzl');
4
27
  var backendPluginApi = require('@backstage/backend-plugin-api');
28
+ var GoogleCloud = require('@google-cloud/storage');
29
+ var clientCodecommit = require('@aws-sdk/client-codecommit');
30
+
31
+ function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
32
+
33
+ function _interopNamespaceCompat(e) {
34
+ if (e && typeof e === 'object' && 'default' in e) return e;
35
+ var n = Object.create(null);
36
+ if (e) {
37
+ Object.keys(e).forEach(function (k) {
38
+ if (k !== 'default') {
39
+ var d = Object.getOwnPropertyDescriptor(e, k);
40
+ Object.defineProperty(n, k, d.get ? d : {
41
+ enumerable: true,
42
+ get: function () { return e[k]; }
43
+ });
44
+ }
45
+ });
46
+ }
47
+ n.default = e;
48
+ return Object.freeze(n);
49
+ }
50
+
51
+ var fetch__default = /*#__PURE__*/_interopDefaultCompat(fetch);
52
+ var getRawBody__default = /*#__PURE__*/_interopDefaultCompat(getRawBody);
53
+ var parseGitUrl__default = /*#__PURE__*/_interopDefaultCompat(parseGitUrl);
54
+ var concatStream__default = /*#__PURE__*/_interopDefaultCompat(concatStream);
55
+ var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
56
+ var os__default = /*#__PURE__*/_interopDefaultCompat(os);
57
+ var platformPath__default = /*#__PURE__*/_interopDefaultCompat(platformPath);
58
+ var tar__default = /*#__PURE__*/_interopDefaultCompat(tar);
59
+ var isomorphicGit__default = /*#__PURE__*/_interopDefaultCompat(isomorphicGit);
60
+ var http__default = /*#__PURE__*/_interopDefaultCompat(http);
61
+ var archiver__default = /*#__PURE__*/_interopDefaultCompat(archiver);
62
+ var yauzl__default = /*#__PURE__*/_interopDefaultCompat(yauzl);
63
+ var GoogleCloud__namespace = /*#__PURE__*/_interopNamespaceCompat(GoogleCloud);
64
+
65
+ class ReadUrlResponseFactory {
66
+ /**
67
+ * Resolves a ReadUrlResponse from a Readable stream.
68
+ */
69
+ static async fromReadable(stream, options) {
70
+ let buffer;
71
+ const conflictError = new errors.ConflictError(
72
+ "Cannot use buffer() and stream() from the same ReadUrlResponse"
73
+ );
74
+ let hasCalledStream = false;
75
+ let hasCalledBuffer = false;
76
+ return {
77
+ buffer: () => {
78
+ hasCalledBuffer = true;
79
+ if (hasCalledStream)
80
+ throw conflictError;
81
+ if (buffer)
82
+ return buffer;
83
+ buffer = getRawBody__default.default(stream);
84
+ return buffer;
85
+ },
86
+ stream: () => {
87
+ hasCalledStream = true;
88
+ if (hasCalledBuffer)
89
+ throw conflictError;
90
+ return stream;
91
+ },
92
+ etag: options?.etag,
93
+ lastModifiedAt: options?.lastModifiedAt
94
+ };
95
+ }
96
+ /**
97
+ * Resolves a ReadUrlResponse from an old-style NodeJS.ReadableStream.
98
+ */
99
+ static async fromNodeJSReadable(oldStyleStream, options) {
100
+ const readable = stream.Readable.from(oldStyleStream);
101
+ return ReadUrlResponseFactory.fromReadable(readable, options);
102
+ }
103
+ }
104
+
105
+ class AzureUrlReader {
106
+ constructor(integration, deps) {
107
+ this.integration = integration;
108
+ this.deps = deps;
109
+ }
110
+ static factory = ({ config, treeResponseFactory }) => {
111
+ const integrations = integration.ScmIntegrations.fromConfig(config);
112
+ const credentialProvider = integration.DefaultAzureDevOpsCredentialsProvider.fromIntegrations(integrations);
113
+ return integrations.azure.list().map((integration) => {
114
+ const reader = new AzureUrlReader(integration, {
115
+ treeResponseFactory,
116
+ credentialsProvider: credentialProvider
117
+ });
118
+ const predicate = (url) => url.host === integration.config.host;
119
+ return { reader, predicate };
120
+ });
121
+ };
122
+ async read(url) {
123
+ const response = await this.readUrl(url);
124
+ return response.buffer();
125
+ }
126
+ async readUrl(url, options) {
127
+ const { signal } = options ?? {};
128
+ const builtUrl = integration.getAzureFileFetchUrl(url);
129
+ let response;
130
+ try {
131
+ const credentials = await this.deps.credentialsProvider.getCredentials({
132
+ url: builtUrl
133
+ });
134
+ response = await fetch__default.default(builtUrl, {
135
+ headers: credentials?.headers,
136
+ // TODO(freben): The signal cast is there because pre-3.x versions of
137
+ // node-fetch have a very slightly deviating AbortSignal type signature.
138
+ // The difference does not affect us in practice however. The cast can
139
+ // be removed after we support ESM for CLI dependencies and migrate to
140
+ // version 3 of node-fetch.
141
+ // https://github.com/backstage/backstage/issues/8242
142
+ ...signal && { signal }
143
+ });
144
+ } catch (e) {
145
+ throw new Error(`Unable to read ${url}, ${e}`);
146
+ }
147
+ if (response.ok && response.status !== 203) {
148
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body);
149
+ }
150
+ const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;
151
+ if (response.status === 404) {
152
+ throw new errors.NotFoundError(message);
153
+ }
154
+ throw new Error(message);
155
+ }
156
+ async readTree(url, options) {
157
+ const { etag, filter, signal } = options ?? {};
158
+ const credentials = await this.deps.credentialsProvider.getCredentials({
159
+ url
160
+ });
161
+ const commitsAzureResponse = await fetch__default.default(integration.getAzureCommitsUrl(url), {
162
+ headers: credentials?.headers
163
+ });
164
+ if (!commitsAzureResponse.ok) {
165
+ const message = `Failed to read tree from ${url}, ${commitsAzureResponse.status} ${commitsAzureResponse.statusText}`;
166
+ if (commitsAzureResponse.status === 404) {
167
+ throw new errors.NotFoundError(message);
168
+ }
169
+ throw new Error(message);
170
+ }
171
+ const commitSha = (await commitsAzureResponse.json()).value[0].commitId;
172
+ if (etag && etag === commitSha) {
173
+ throw new errors.NotModifiedError();
174
+ }
175
+ const archiveAzureResponse = await fetch__default.default(integration.getAzureDownloadUrl(url), {
176
+ headers: {
177
+ ...credentials?.headers,
178
+ Accept: "application/zip"
179
+ },
180
+ // TODO(freben): The signal cast is there because pre-3.x versions of
181
+ // node-fetch have a very slightly deviating AbortSignal type signature.
182
+ // The difference does not affect us in practice however. The cast can be
183
+ // removed after we support ESM for CLI dependencies and migrate to
184
+ // version 3 of node-fetch.
185
+ // https://github.com/backstage/backstage/issues/8242
186
+ ...signal && { signal }
187
+ });
188
+ if (!archiveAzureResponse.ok) {
189
+ const message = `Failed to read tree from ${url}, ${archiveAzureResponse.status} ${archiveAzureResponse.statusText}`;
190
+ if (archiveAzureResponse.status === 404) {
191
+ throw new errors.NotFoundError(message);
192
+ }
193
+ throw new Error(message);
194
+ }
195
+ let subpath;
196
+ const path = new URL(url).searchParams.get("path");
197
+ if (path) {
198
+ subpath = path.split("/").filter(Boolean).slice(-1)[0];
199
+ }
200
+ return await this.deps.treeResponseFactory.fromZipArchive({
201
+ stream: stream.Readable.from(archiveAzureResponse.body),
202
+ etag: commitSha,
203
+ filter,
204
+ subpath
205
+ });
206
+ }
207
+ async search(url, options) {
208
+ const treeUrl = new URL(url);
209
+ const path = treeUrl.searchParams.get("path");
210
+ const matcher = path && new minimatch.Minimatch(path.replace(/^\/+/, ""));
211
+ treeUrl.searchParams.delete("path");
212
+ const tree = await this.readTree(treeUrl.toString(), {
213
+ etag: options?.etag,
214
+ signal: options?.signal,
215
+ filter: (p) => matcher ? matcher.match(p) : true
216
+ });
217
+ const files = await tree.files();
218
+ return {
219
+ etag: tree.etag,
220
+ files: files.map((file) => ({
221
+ url: this.integration.resolveUrl({
222
+ url: `/${file.path}`,
223
+ base: url
224
+ }),
225
+ content: file.content,
226
+ lastModifiedAt: file.lastModifiedAt
227
+ }))
228
+ };
229
+ }
230
+ toString() {
231
+ const { host, credentials } = this.integration.config;
232
+ return `azure{host=${host},authed=${Boolean(
233
+ credentials !== void 0 && credentials.length > 0
234
+ )}}`;
235
+ }
236
+ }
237
+
238
+ function parseLastModified(value) {
239
+ if (!value) {
240
+ return void 0;
241
+ }
242
+ return new Date(value);
243
+ }
244
+
245
+ class BitbucketCloudUrlReader {
246
+ constructor(integration, deps) {
247
+ this.integration = integration;
248
+ this.deps = deps;
249
+ const { host, username, appPassword } = integration.config;
250
+ if (username && !appPassword) {
251
+ throw new Error(
252
+ `Bitbucket Cloud integration for '${host}' has configured a username but is missing a required appPassword.`
253
+ );
254
+ }
255
+ }
256
+ static factory = ({ config, treeResponseFactory }) => {
257
+ const integrations = integration.ScmIntegrations.fromConfig(config);
258
+ return integrations.bitbucketCloud.list().map((integration) => {
259
+ const reader = new BitbucketCloudUrlReader(integration, {
260
+ treeResponseFactory
261
+ });
262
+ const predicate = (url) => url.host === integration.config.host;
263
+ return { reader, predicate };
264
+ });
265
+ };
266
+ async read(url) {
267
+ const response = await this.readUrl(url);
268
+ return response.buffer();
269
+ }
270
+ async readUrl(url, options) {
271
+ const { etag, lastModifiedAfter, signal } = options ?? {};
272
+ const bitbucketUrl = integration.getBitbucketCloudFileFetchUrl(
273
+ url,
274
+ this.integration.config
275
+ );
276
+ const requestOptions = integration.getBitbucketCloudRequestOptions(
277
+ this.integration.config
278
+ );
279
+ let response;
280
+ try {
281
+ response = await fetch__default.default(bitbucketUrl.toString(), {
282
+ headers: {
283
+ ...requestOptions.headers,
284
+ ...etag && { "If-None-Match": etag },
285
+ ...lastModifiedAfter && {
286
+ "If-Modified-Since": lastModifiedAfter.toUTCString()
287
+ }
288
+ },
289
+ // TODO(freben): The signal cast is there because pre-3.x versions of
290
+ // node-fetch have a very slightly deviating AbortSignal type signature.
291
+ // The difference does not affect us in practice however. The cast can be
292
+ // removed after we support ESM for CLI dependencies and migrate to
293
+ // version 3 of node-fetch.
294
+ // https://github.com/backstage/backstage/issues/8242
295
+ ...signal && { signal }
296
+ });
297
+ } catch (e) {
298
+ throw new Error(`Unable to read ${url}, ${e}`);
299
+ }
300
+ if (response.status === 304) {
301
+ throw new errors.NotModifiedError();
302
+ }
303
+ if (response.ok) {
304
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
305
+ etag: response.headers.get("ETag") ?? void 0,
306
+ lastModifiedAt: parseLastModified(
307
+ response.headers.get("Last-Modified")
308
+ )
309
+ });
310
+ }
311
+ const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
312
+ if (response.status === 404) {
313
+ throw new errors.NotFoundError(message);
314
+ }
315
+ throw new Error(message);
316
+ }
317
+ async readTree(url, options) {
318
+ const { filepath } = parseGitUrl__default.default(url);
319
+ const lastCommitShortHash = await this.getLastCommitShortHash(url);
320
+ if (options?.etag && options.etag === lastCommitShortHash) {
321
+ throw new errors.NotModifiedError();
322
+ }
323
+ const downloadUrl = await integration.getBitbucketCloudDownloadUrl(
324
+ url,
325
+ this.integration.config
326
+ );
327
+ const archiveResponse = await fetch__default.default(
328
+ downloadUrl,
329
+ integration.getBitbucketCloudRequestOptions(this.integration.config)
330
+ );
331
+ if (!archiveResponse.ok) {
332
+ const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;
333
+ if (archiveResponse.status === 404) {
334
+ throw new errors.NotFoundError(message);
335
+ }
336
+ throw new Error(message);
337
+ }
338
+ return await this.deps.treeResponseFactory.fromTarArchive({
339
+ stream: stream.Readable.from(archiveResponse.body),
340
+ subpath: filepath,
341
+ etag: lastCommitShortHash,
342
+ filter: options?.filter
343
+ });
344
+ }
345
+ async search(url, options) {
346
+ const { filepath } = parseGitUrl__default.default(url);
347
+ const matcher = new minimatch.Minimatch(filepath);
348
+ const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
349
+ const tree = await this.readTree(treeUrl, {
350
+ etag: options?.etag,
351
+ filter: (path) => matcher.match(path)
352
+ });
353
+ const files = await tree.files();
354
+ return {
355
+ etag: tree.etag,
356
+ files: files.map((file) => ({
357
+ url: this.integration.resolveUrl({
358
+ url: `/${file.path}`,
359
+ base: url
360
+ }),
361
+ content: file.content,
362
+ lastModifiedAt: file.lastModifiedAt
363
+ }))
364
+ };
365
+ }
366
+ toString() {
367
+ const { host, username, appPassword } = this.integration.config;
368
+ const authed = Boolean(username && appPassword);
369
+ return `bitbucketCloud{host=${host},authed=${authed}}`;
370
+ }
371
+ async getLastCommitShortHash(url) {
372
+ const { name: repoName, owner: project, ref } = parseGitUrl__default.default(url);
373
+ let branch = ref;
374
+ if (!branch) {
375
+ branch = await integration.getBitbucketCloudDefaultBranch(
376
+ url,
377
+ this.integration.config
378
+ );
379
+ }
380
+ const commitsApiUrl = `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}`;
381
+ const commitsResponse = await fetch__default.default(
382
+ commitsApiUrl,
383
+ integration.getBitbucketCloudRequestOptions(this.integration.config)
384
+ );
385
+ if (!commitsResponse.ok) {
386
+ const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`;
387
+ if (commitsResponse.status === 404) {
388
+ throw new errors.NotFoundError(message);
389
+ }
390
+ throw new Error(message);
391
+ }
392
+ const commits = await commitsResponse.json();
393
+ if (commits && commits.values && commits.values.length > 0 && commits.values[0].hash) {
394
+ return commits.values[0].hash.substring(0, 12);
395
+ }
396
+ throw new Error(`Failed to read response from ${commitsApiUrl}`);
397
+ }
398
+ }
399
+
400
+ class BitbucketUrlReader {
401
+ constructor(integration, logger, deps) {
402
+ this.integration = integration;
403
+ this.deps = deps;
404
+ const { host, token, username, appPassword } = integration.config;
405
+ const replacement = host === "bitbucket.org" ? "bitbucketCloud" : "bitbucketServer";
406
+ logger.warn(
407
+ `[Deprecated] Please migrate from "integrations.bitbucket" to "integrations.${replacement}".`
408
+ );
409
+ if (!token && username && !appPassword) {
410
+ throw new Error(
411
+ `Bitbucket integration for '${host}' has configured a username but is missing a required appPassword.`
412
+ );
413
+ }
414
+ }
415
+ static factory = ({ config, logger, treeResponseFactory }) => {
416
+ const integrations = integration.ScmIntegrations.fromConfig(config);
417
+ return integrations.bitbucket.list().filter(
418
+ (item) => !integrations.bitbucketCloud.byHost(item.config.host) && !integrations.bitbucketServer.byHost(item.config.host)
419
+ ).map((integration) => {
420
+ const reader = new BitbucketUrlReader(integration, logger, {
421
+ treeResponseFactory
422
+ });
423
+ const predicate = (url) => url.host === integration.config.host;
424
+ return { reader, predicate };
425
+ });
426
+ };
427
+ async read(url) {
428
+ const response = await this.readUrl(url);
429
+ return response.buffer();
430
+ }
431
+ async readUrl(url, options) {
432
+ const { etag, lastModifiedAfter, signal } = options ?? {};
433
+ const bitbucketUrl = integration.getBitbucketFileFetchUrl(url, this.integration.config);
434
+ const requestOptions = integration.getBitbucketRequestOptions(this.integration.config);
435
+ let response;
436
+ try {
437
+ response = await fetch__default.default(bitbucketUrl.toString(), {
438
+ headers: {
439
+ ...requestOptions.headers,
440
+ ...etag && { "If-None-Match": etag },
441
+ ...lastModifiedAfter && {
442
+ "If-Modified-Since": lastModifiedAfter.toUTCString()
443
+ }
444
+ },
445
+ // TODO(freben): The signal cast is there because pre-3.x versions of
446
+ // node-fetch have a very slightly deviating AbortSignal type signature.
447
+ // The difference does not affect us in practice however. The cast can be
448
+ // removed after we support ESM for CLI dependencies and migrate to
449
+ // version 3 of node-fetch.
450
+ // https://github.com/backstage/backstage/issues/8242
451
+ ...signal && { signal }
452
+ });
453
+ } catch (e) {
454
+ throw new Error(`Unable to read ${url}, ${e}`);
455
+ }
456
+ if (response.status === 304) {
457
+ throw new errors.NotModifiedError();
458
+ }
459
+ if (response.ok) {
460
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
461
+ etag: response.headers.get("ETag") ?? void 0,
462
+ lastModifiedAt: parseLastModified(
463
+ response.headers.get("Last-Modified")
464
+ )
465
+ });
466
+ }
467
+ const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
468
+ if (response.status === 404) {
469
+ throw new errors.NotFoundError(message);
470
+ }
471
+ throw new Error(message);
472
+ }
473
+ async readTree(url, options) {
474
+ const { filepath } = parseGitUrl__default.default(url);
475
+ const lastCommitShortHash = await this.getLastCommitShortHash(url);
476
+ if (options?.etag && options.etag === lastCommitShortHash) {
477
+ throw new errors.NotModifiedError();
478
+ }
479
+ const downloadUrl = await integration.getBitbucketDownloadUrl(
480
+ url,
481
+ this.integration.config
482
+ );
483
+ const archiveBitbucketResponse = await fetch__default.default(
484
+ downloadUrl,
485
+ integration.getBitbucketRequestOptions(this.integration.config)
486
+ );
487
+ if (!archiveBitbucketResponse.ok) {
488
+ const message = `Failed to read tree from ${url}, ${archiveBitbucketResponse.status} ${archiveBitbucketResponse.statusText}`;
489
+ if (archiveBitbucketResponse.status === 404) {
490
+ throw new errors.NotFoundError(message);
491
+ }
492
+ throw new Error(message);
493
+ }
494
+ return await this.deps.treeResponseFactory.fromTarArchive({
495
+ stream: stream.Readable.from(archiveBitbucketResponse.body),
496
+ subpath: filepath,
497
+ etag: lastCommitShortHash,
498
+ filter: options?.filter
499
+ });
500
+ }
501
+ async search(url, options) {
502
+ const { filepath } = parseGitUrl__default.default(url);
503
+ const matcher = new minimatch.Minimatch(filepath);
504
+ const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
505
+ const tree = await this.readTree(treeUrl, {
506
+ etag: options?.etag,
507
+ filter: (path) => matcher.match(path)
508
+ });
509
+ const files = await tree.files();
510
+ return {
511
+ etag: tree.etag,
512
+ files: files.map((file) => ({
513
+ url: this.integration.resolveUrl({
514
+ url: `/${file.path}`,
515
+ base: url
516
+ }),
517
+ content: file.content,
518
+ lastModifiedAt: file.lastModifiedAt
519
+ }))
520
+ };
521
+ }
522
+ toString() {
523
+ const { host, token, username, appPassword } = this.integration.config;
524
+ let authed = Boolean(token);
525
+ if (!authed) {
526
+ authed = Boolean(username && appPassword);
527
+ }
528
+ return `bitbucket{host=${host},authed=${authed}}`;
529
+ }
530
+ async getLastCommitShortHash(url) {
531
+ const { resource, name: repoName, owner: project, ref } = parseGitUrl__default.default(url);
532
+ let branch = ref;
533
+ if (!branch) {
534
+ branch = await integration.getBitbucketDefaultBranch(url, this.integration.config);
535
+ }
536
+ const isHosted = resource === "bitbucket.org";
537
+ const commitsApiUrl = isHosted ? `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}` : `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/commits`;
538
+ const commitsResponse = await fetch__default.default(
539
+ commitsApiUrl,
540
+ integration.getBitbucketRequestOptions(this.integration.config)
541
+ );
542
+ if (!commitsResponse.ok) {
543
+ const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`;
544
+ if (commitsResponse.status === 404) {
545
+ throw new errors.NotFoundError(message);
546
+ }
547
+ throw new Error(message);
548
+ }
549
+ const commits = await commitsResponse.json();
550
+ if (isHosted) {
551
+ if (commits && commits.values && commits.values.length > 0 && commits.values[0].hash) {
552
+ return commits.values[0].hash.substring(0, 12);
553
+ }
554
+ } else {
555
+ if (commits && commits.values && commits.values.length > 0 && commits.values[0].id) {
556
+ return commits.values[0].id.substring(0, 12);
557
+ }
558
+ }
559
+ throw new Error(`Failed to read response from ${commitsApiUrl}`);
560
+ }
561
+ }
562
+
563
+ class BitbucketServerUrlReader {
564
+ constructor(integration, deps) {
565
+ this.integration = integration;
566
+ this.deps = deps;
567
+ }
568
+ static factory = ({ config, treeResponseFactory }) => {
569
+ const integrations = integration.ScmIntegrations.fromConfig(config);
570
+ return integrations.bitbucketServer.list().map((integration) => {
571
+ const reader = new BitbucketServerUrlReader(integration, {
572
+ treeResponseFactory
573
+ });
574
+ const predicate = (url) => url.host === integration.config.host;
575
+ return { reader, predicate };
576
+ });
577
+ };
578
+ async read(url) {
579
+ const response = await this.readUrl(url);
580
+ return response.buffer();
581
+ }
582
+ async readUrl(url, options) {
583
+ const { etag, lastModifiedAfter, signal } = options ?? {};
584
+ const bitbucketUrl = integration.getBitbucketServerFileFetchUrl(
585
+ url,
586
+ this.integration.config
587
+ );
588
+ const requestOptions = integration.getBitbucketServerRequestOptions(
589
+ this.integration.config
590
+ );
591
+ let response;
592
+ try {
593
+ response = await fetch__default.default(bitbucketUrl.toString(), {
594
+ headers: {
595
+ ...requestOptions.headers,
596
+ ...etag && { "If-None-Match": etag },
597
+ ...lastModifiedAfter && {
598
+ "If-Modified-Since": lastModifiedAfter.toUTCString()
599
+ }
600
+ },
601
+ // TODO(freben): The signal cast is there because pre-3.x versions of
602
+ // node-fetch have a very slightly deviating AbortSignal type signature.
603
+ // The difference does not affect us in practice however. The cast can be
604
+ // removed after we support ESM for CLI dependencies and migrate to
605
+ // version 3 of node-fetch.
606
+ // https://github.com/backstage/backstage/issues/8242
607
+ ...signal && { signal }
608
+ });
609
+ } catch (e) {
610
+ throw new Error(`Unable to read ${url}, ${e}`);
611
+ }
612
+ if (response.status === 304) {
613
+ throw new errors.NotModifiedError();
614
+ }
615
+ if (response.ok) {
616
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
617
+ etag: response.headers.get("ETag") ?? void 0,
618
+ lastModifiedAt: parseLastModified(
619
+ response.headers.get("Last-Modified")
620
+ )
621
+ });
622
+ }
623
+ const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
624
+ if (response.status === 404) {
625
+ throw new errors.NotFoundError(message);
626
+ }
627
+ throw new Error(message);
628
+ }
629
+ async readTree(url, options) {
630
+ const { filepath } = parseGitUrl__default.default(url);
631
+ const lastCommitShortHash = await this.getLastCommitShortHash(url);
632
+ if (options?.etag && options.etag === lastCommitShortHash) {
633
+ throw new errors.NotModifiedError();
634
+ }
635
+ const downloadUrl = await integration.getBitbucketServerDownloadUrl(
636
+ url,
637
+ this.integration.config
638
+ );
639
+ const archiveResponse = await fetch__default.default(
640
+ downloadUrl,
641
+ integration.getBitbucketServerRequestOptions(this.integration.config)
642
+ );
643
+ if (!archiveResponse.ok) {
644
+ const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;
645
+ if (archiveResponse.status === 404) {
646
+ throw new errors.NotFoundError(message);
647
+ }
648
+ throw new Error(message);
649
+ }
650
+ return await this.deps.treeResponseFactory.fromTarArchive({
651
+ stream: stream.Readable.from(archiveResponse.body),
652
+ subpath: filepath,
653
+ etag: lastCommitShortHash,
654
+ filter: options?.filter
655
+ });
656
+ }
657
+ async search(url, options) {
658
+ const { filepath } = parseGitUrl__default.default(url);
659
+ const matcher = new minimatch.Minimatch(filepath);
660
+ const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
661
+ const tree = await this.readTree(treeUrl, {
662
+ etag: options?.etag,
663
+ filter: (path) => matcher.match(path)
664
+ });
665
+ const files = await tree.files();
666
+ return {
667
+ etag: tree.etag,
668
+ files: files.map((file) => ({
669
+ url: this.integration.resolveUrl({
670
+ url: `/${file.path}`,
671
+ base: url
672
+ }),
673
+ content: file.content,
674
+ lastModifiedAt: file.lastModifiedAt
675
+ }))
676
+ };
677
+ }
678
+ toString() {
679
+ const { host, token } = this.integration.config;
680
+ const authed = Boolean(token);
681
+ return `bitbucketServer{host=${host},authed=${authed}}`;
682
+ }
683
+ async getLastCommitShortHash(url) {
684
+ const { name: repoName, owner: project, ref: branch } = parseGitUrl__default.default(url);
685
+ const branchParameter = branch ? `?filterText=${encodeURIComponent(branch)}` : "/default";
686
+ const branchListUrl = `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/branches${branchParameter}`;
687
+ const branchListResponse = await fetch__default.default(
688
+ branchListUrl,
689
+ integration.getBitbucketServerRequestOptions(this.integration.config)
690
+ );
691
+ if (!branchListResponse.ok) {
692
+ const message = `Failed to retrieve branch list from ${branchListUrl}, ${branchListResponse.status} ${branchListResponse.statusText}`;
693
+ if (branchListResponse.status === 404) {
694
+ throw new errors.NotFoundError(message);
695
+ }
696
+ throw new Error(message);
697
+ }
698
+ const branchMatches = await branchListResponse.json();
699
+ if (branchMatches && branchMatches.size > 0) {
700
+ const exactBranchMatch = branchMatches.values.filter(
701
+ (branchDetails) => branchDetails.displayId === branch
702
+ )[0];
703
+ return exactBranchMatch.latestCommit.substring(0, 12);
704
+ }
705
+ if (!branch && branchMatches) {
706
+ return branchMatches.latestCommit.substring(0, 12);
707
+ }
708
+ throw new Error(
709
+ `Failed to find Last Commit using ${branch ? `branch "${branch}"` : "default branch"} in response from ${branchListUrl}`
710
+ );
711
+ }
712
+ }
713
+
714
+ function isAuthCallbackOptions(options) {
715
+ return "onAuth" in options;
716
+ }
717
+ class Git {
718
+ constructor(config) {
719
+ this.config = config;
720
+ this.onAuth = config.onAuth;
721
+ this.headers = {
722
+ "user-agent": "git/@isomorphic-git",
723
+ ...config.token ? { Authorization: `Bearer ${config.token}` } : {}
724
+ };
725
+ }
726
+ headers;
727
+ /** https://isomorphic-git.org/docs/en/clone */
728
+ async clone(options) {
729
+ const { url, dir, ref, depth, noCheckout } = options;
730
+ this.config.logger?.info(`Cloning repo {dir=${dir},url=${url}}`);
731
+ try {
732
+ return await isomorphicGit__default.default.clone({
733
+ fs: fs__default.default,
734
+ http: http__default.default,
735
+ url,
736
+ dir,
737
+ ref,
738
+ singleBranch: true,
739
+ depth: depth ?? 1,
740
+ noCheckout,
741
+ onProgress: this.onProgressHandler(),
742
+ headers: this.headers,
743
+ onAuth: this.onAuth
744
+ });
745
+ } catch (ex) {
746
+ this.config.logger?.error(`Failed to clone repo {dir=${dir},url=${url}}`);
747
+ if (ex.data) {
748
+ throw new Error(`${ex.message} {data=${JSON.stringify(ex.data)}}`);
749
+ }
750
+ throw ex;
751
+ }
752
+ }
753
+ onAuth;
754
+ onProgressHandler = () => {
755
+ let currentPhase = "";
756
+ return (event) => {
757
+ if (currentPhase !== event.phase) {
758
+ currentPhase = event.phase;
759
+ this.config.logger?.info(event.phase);
760
+ }
761
+ const total = event.total ? `${Math.round(event.loaded / event.total * 100)}%` : event.loaded;
762
+ this.config.logger?.debug(`status={${event.phase},total={${total}}}`);
763
+ };
764
+ };
765
+ static fromAuth = (options) => {
766
+ if (isAuthCallbackOptions(options)) {
767
+ const { onAuth, logger: logger2 } = options;
768
+ return new Git({ onAuth, logger: logger2 });
769
+ }
770
+ const { username, password, token, logger } = options;
771
+ return new Git({ onAuth: () => ({ username, password }), token, logger });
772
+ };
773
+ }
774
+
775
+ const pipeline$3 = util.promisify(stream.pipeline);
776
+ const GITILES_BASE_URL_DEPRECATION_MESSSAGE = `A gitilesBaseUrl must be provided for the gerrit integration to work. You can disable this check by setting DISABLE_GERRIT_GITILES_REQUIREMENT=1 but this will be removed in a future release. If you are not able to use the gitiles gerrit plugin, please open an issue towards https://github.com/backstage/backstage`;
777
+ const createTemporaryDirectory = async (workDir) => await fs__default.default.mkdtemp(platformPath.join(workDir, "/gerrit-clone-"));
778
+ class GerritUrlReader {
779
+ constructor(integration, deps, workDir) {
780
+ this.integration = integration;
781
+ this.deps = deps;
782
+ this.workDir = workDir;
783
+ }
784
+ static factory = ({ config, treeResponseFactory }) => {
785
+ const integrations = integration.ScmIntegrations.fromConfig(config);
786
+ if (!integrations.gerrit) {
787
+ return [];
788
+ }
789
+ const workDir = config.getOptionalString("backend.workingDirectory") ?? os__default.default.tmpdir();
790
+ return integrations.gerrit.list().map((integration) => {
791
+ if (integration.config.gitilesBaseUrl === integration.config.baseUrl && process.env.DISABLE_GERRIT_GITILES_REQUIREMENT === void 0) {
792
+ throw new Error(GITILES_BASE_URL_DEPRECATION_MESSSAGE);
793
+ }
794
+ const reader = new GerritUrlReader(
795
+ integration,
796
+ { treeResponseFactory },
797
+ workDir
798
+ );
799
+ const predicate = (url) => {
800
+ const gitilesUrl = new URL(integration.config.gitilesBaseUrl);
801
+ return url.host === gitilesUrl.host;
802
+ };
803
+ return { reader, predicate };
804
+ });
805
+ };
806
+ async read(url) {
807
+ const response = await this.readUrl(url);
808
+ return response.buffer();
809
+ }
810
+ async readUrl(url, options) {
811
+ const apiUrl = integration.getGerritFileContentsApiUrl(this.integration.config, url);
812
+ let response;
813
+ try {
814
+ response = await fetch__default.default(apiUrl, {
815
+ method: "GET",
816
+ ...integration.getGerritRequestOptions(this.integration.config),
817
+ // TODO(freben): The signal cast is there because pre-3.x versions of
818
+ // node-fetch have a very slightly deviating AbortSignal type signature.
819
+ // The difference does not affect us in practice however. The cast can
820
+ // be removed after we support ESM for CLI dependencies and migrate to
821
+ // version 3 of node-fetch.
822
+ // https://github.com/backstage/backstage/issues/8242
823
+ signal: options?.signal
824
+ });
825
+ } catch (e) {
826
+ throw new Error(`Unable to read gerrit file ${url}, ${e}`);
827
+ }
828
+ if (response.ok) {
829
+ let responseBody;
830
+ return {
831
+ buffer: async () => {
832
+ if (responseBody === void 0) {
833
+ responseBody = await response.text();
834
+ }
835
+ return Buffer.from(responseBody, "base64");
836
+ },
837
+ stream: () => {
838
+ const readable = stream.Readable.from(response.body);
839
+ return readable.pipe(new base64Stream.Base64Decode());
840
+ }
841
+ };
842
+ }
843
+ if (response.status === 404) {
844
+ throw new errors.NotFoundError(`File ${url} not found.`);
845
+ }
846
+ throw new Error(
847
+ `${url} could not be read as ${apiUrl}, ${response.status} ${response.statusText}`
848
+ );
849
+ }
850
+ async readTree(url, options) {
851
+ const apiUrl = integration.getGerritBranchApiUrl(this.integration.config, url);
852
+ let response;
853
+ try {
854
+ response = await fetch__default.default(apiUrl, {
855
+ method: "GET",
856
+ ...integration.getGerritRequestOptions(this.integration.config)
857
+ });
858
+ } catch (e) {
859
+ throw new Error(`Unable to read branch state ${url}, ${e}`);
860
+ }
861
+ if (response.status === 404) {
862
+ throw new errors.NotFoundError(`Not found: ${url}`);
863
+ }
864
+ if (!response.ok) {
865
+ throw new Error(
866
+ `${url} could not be read as ${apiUrl}, ${response.status} ${response.statusText}`
867
+ );
868
+ }
869
+ const branchInfo = await integration.parseGerritJsonResponse(response);
870
+ if (options?.etag === branchInfo.revision) {
871
+ throw new errors.NotModifiedError();
872
+ }
873
+ if (this.integration.config.gitilesBaseUrl !== this.integration.config.baseUrl) {
874
+ return this.readTreeFromGitiles(url, branchInfo.revision, options);
875
+ }
876
+ return this.readTreeFromGitClone(url, branchInfo.revision, options);
877
+ }
878
+ async search() {
879
+ throw new Error("GerritReader does not implement search");
880
+ }
881
+ toString() {
882
+ const { host, password } = this.integration.config;
883
+ return `gerrit{host=${host},authed=${Boolean(password)}}`;
884
+ }
885
+ async readTreeFromGitClone(url, revision, options) {
886
+ const { filePath } = integration.parseGerritGitilesUrl(this.integration.config, url);
887
+ const git = Git.fromAuth({
888
+ username: this.integration.config.username,
889
+ password: this.integration.config.password
890
+ });
891
+ const tempDir = await createTemporaryDirectory(this.workDir);
892
+ const cloneUrl = integration.getGerritCloneRepoUrl(this.integration.config, url);
893
+ try {
894
+ await git.clone({
895
+ url: cloneUrl,
896
+ dir: platformPath.join(tempDir, "repo"),
897
+ ref: revision,
898
+ depth: 1
899
+ });
900
+ const data = await new Promise(async (resolve) => {
901
+ await pipeline$3(
902
+ tar__default.default.create({ cwd: tempDir }, [""]),
903
+ concatStream__default.default(resolve)
904
+ );
905
+ });
906
+ const tarArchive = stream.Readable.from(data);
907
+ return await this.deps.treeResponseFactory.fromTarArchive({
908
+ stream: tarArchive,
909
+ subpath: filePath === "/" ? void 0 : filePath,
910
+ etag: revision,
911
+ filter: options?.filter
912
+ });
913
+ } catch (error) {
914
+ throw new Error(`Could not clone ${cloneUrl}: ${error}`);
915
+ } finally {
916
+ await fs__default.default.rm(tempDir, { recursive: true, force: true });
917
+ }
918
+ }
919
+ async readTreeFromGitiles(url, revision, options) {
920
+ const { branch, filePath, project } = integration.parseGerritGitilesUrl(
921
+ this.integration.config,
922
+ url
923
+ );
924
+ const archiveUrl = integration.buildGerritGitilesArchiveUrl(
925
+ this.integration.config,
926
+ project,
927
+ branch,
928
+ filePath
929
+ );
930
+ const archiveResponse = await fetch__default.default(archiveUrl, {
931
+ ...integration.getGerritRequestOptions(this.integration.config),
932
+ // TODO(freben): The signal cast is there because pre-3.x versions of
933
+ // node-fetch have a very slightly deviating AbortSignal type signature.
934
+ // The difference does not affect us in practice however. The cast can
935
+ // be removed after we support ESM for CLI dependencies and migrate to
936
+ // version 3 of node-fetch.
937
+ // https://github.com/backstage/backstage/issues/8242
938
+ signal: options?.signal
939
+ });
940
+ if (archiveResponse.status === 404) {
941
+ throw new errors.NotFoundError(`Not found: ${archiveUrl}`);
942
+ }
943
+ if (!archiveResponse.ok) {
944
+ throw new Error(
945
+ `${url} could not be read as ${archiveUrl}, ${archiveResponse.status} ${archiveResponse.statusText}`
946
+ );
947
+ }
948
+ return await this.deps.treeResponseFactory.fromTarArchive({
949
+ stream: archiveResponse.body,
950
+ etag: revision,
951
+ filter: options?.filter,
952
+ stripFirstDirectory: false
953
+ });
954
+ }
955
+ }
956
+
957
+ class GithubUrlReader {
958
+ constructor(integration, deps) {
959
+ this.integration = integration;
960
+ this.deps = deps;
961
+ if (!integration.config.apiBaseUrl && !integration.config.rawBaseUrl) {
962
+ throw new Error(
963
+ `GitHub integration '${integration.title}' must configure an explicit apiBaseUrl or rawBaseUrl`
964
+ );
965
+ }
966
+ }
967
+ static factory = ({ config, treeResponseFactory }) => {
968
+ const integrations = integration.ScmIntegrations.fromConfig(config);
969
+ const credentialsProvider = integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations);
970
+ return integrations.github.list().map((integration) => {
971
+ const reader = new GithubUrlReader(integration, {
972
+ treeResponseFactory,
973
+ credentialsProvider
974
+ });
975
+ const predicate = (url) => url.host === integration.config.host;
976
+ return { reader, predicate };
977
+ });
978
+ };
979
+ async read(url) {
980
+ const response = await this.readUrl(url);
981
+ return response.buffer();
982
+ }
983
+ getCredentials = async (url, options) => {
984
+ if (options?.token) {
985
+ return {
986
+ headers: {
987
+ Authorization: `Bearer ${options.token}`
988
+ },
989
+ type: "token",
990
+ token: options.token
991
+ };
992
+ }
993
+ return await this.deps.credentialsProvider.getCredentials({
994
+ url
995
+ });
996
+ };
997
+ async readUrl(url, options) {
998
+ const credentials = await this.getCredentials(url, options);
999
+ const ghUrl = integration.getGithubFileFetchUrl(
1000
+ url,
1001
+ this.integration.config,
1002
+ credentials
1003
+ );
1004
+ const response = await this.fetchResponse(ghUrl, {
1005
+ headers: {
1006
+ ...credentials?.headers,
1007
+ ...options?.etag && { "If-None-Match": options.etag },
1008
+ ...options?.lastModifiedAfter && {
1009
+ "If-Modified-Since": options.lastModifiedAfter.toUTCString()
1010
+ },
1011
+ Accept: "application/vnd.github.v3.raw"
1012
+ },
1013
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1014
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1015
+ // The difference does not affect us in practice however. The cast can
1016
+ // be removed after we support ESM for CLI dependencies and migrate to
1017
+ // version 3 of node-fetch.
1018
+ // https://github.com/backstage/backstage/issues/8242
1019
+ signal: options?.signal
1020
+ });
1021
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
1022
+ etag: response.headers.get("ETag") ?? void 0,
1023
+ lastModifiedAt: parseLastModified(response.headers.get("Last-Modified"))
1024
+ });
1025
+ }
1026
+ async readTree(url, options) {
1027
+ const repoDetails = await this.getRepoDetails(url);
1028
+ const commitSha = repoDetails.commitSha;
1029
+ if (options?.etag && options.etag === commitSha) {
1030
+ throw new errors.NotModifiedError();
1031
+ }
1032
+ const { filepath } = parseGitUrl__default.default(url);
1033
+ const { headers } = await this.getCredentials(url, options);
1034
+ return this.doReadTree(
1035
+ repoDetails.repo.archive_url,
1036
+ commitSha,
1037
+ filepath,
1038
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1039
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1040
+ // The difference does not affect us in practice however. The cast can be
1041
+ // removed after we support ESM for CLI dependencies and migrate to
1042
+ // version 3 of node-fetch.
1043
+ // https://github.com/backstage/backstage/issues/8242
1044
+ { headers, signal: options?.signal },
1045
+ options
1046
+ );
1047
+ }
1048
+ async search(url, options) {
1049
+ const repoDetails = await this.getRepoDetails(url);
1050
+ const commitSha = repoDetails.commitSha;
1051
+ if (options?.etag && options.etag === commitSha) {
1052
+ throw new errors.NotModifiedError();
1053
+ }
1054
+ const { filepath } = parseGitUrl__default.default(url);
1055
+ const { headers } = await this.getCredentials(url, options);
1056
+ const files = await this.doSearch(
1057
+ url,
1058
+ repoDetails.repo.trees_url,
1059
+ repoDetails.repo.archive_url,
1060
+ commitSha,
1061
+ filepath,
1062
+ { headers, signal: options?.signal }
1063
+ );
1064
+ return { files, etag: commitSha };
1065
+ }
1066
+ toString() {
1067
+ const { host, token } = this.integration.config;
1068
+ return `github{host=${host},authed=${Boolean(token)}}`;
1069
+ }
1070
+ async doReadTree(archiveUrl, sha, subpath, init, options) {
1071
+ const archive = await this.fetchResponse(
1072
+ archiveUrl.replace("{archive_format}", "tarball").replace("{/ref}", `/${sha}`),
1073
+ init
1074
+ );
1075
+ return await this.deps.treeResponseFactory.fromTarArchive({
1076
+ // TODO(Rugvip): Underlying implementation of fetch will be node-fetch, we probably want
1077
+ // to stick to using that in exclusively backend code.
1078
+ stream: stream.Readable.from(archive.body),
1079
+ subpath,
1080
+ etag: sha,
1081
+ filter: options?.filter
1082
+ });
1083
+ }
1084
+ async doSearch(url, treesUrl, archiveUrl, sha, query, init) {
1085
+ function pathToUrl(path) {
1086
+ const updated = new URL(url);
1087
+ const base = updated.pathname.split("/").slice(1, 5).join("/");
1088
+ updated.pathname = `${base}/${path}`;
1089
+ return updated.toString();
1090
+ }
1091
+ const matcher = new minimatch.Minimatch(query.replace(/^\/+/, ""));
1092
+ const recursiveTree = await this.fetchJson(
1093
+ treesUrl.replace("{/sha}", `/${sha}?recursive=true`),
1094
+ init
1095
+ );
1096
+ if (!recursiveTree.truncated) {
1097
+ const matching = recursiveTree.tree.filter(
1098
+ (item) => item.type === "blob" && item.path && item.url && matcher.match(item.path)
1099
+ );
1100
+ return matching.map((item) => ({
1101
+ url: pathToUrl(item.path),
1102
+ content: async () => {
1103
+ const blob = await this.fetchJson(item.url, init);
1104
+ return Buffer.from(blob.content, "base64");
1105
+ }
1106
+ }));
1107
+ }
1108
+ const tree = await this.doReadTree(archiveUrl, sha, "", init, {
1109
+ filter: (path) => matcher.match(path)
1110
+ });
1111
+ const files = await tree.files();
1112
+ return files.map((file) => ({
1113
+ url: pathToUrl(file.path),
1114
+ content: file.content,
1115
+ lastModifiedAt: file.lastModifiedAt
1116
+ }));
1117
+ }
1118
+ async getRepoDetails(url) {
1119
+ const parsed = parseGitUrl__default.default(url);
1120
+ const { ref, full_name } = parsed;
1121
+ const credentials = await this.deps.credentialsProvider.getCredentials({
1122
+ url
1123
+ });
1124
+ const { headers } = credentials;
1125
+ const commitStatus = await this.fetchJson(
1126
+ `${this.integration.config.apiBaseUrl}/repos/${full_name}/commits/${ref || await this.getDefaultBranch(full_name, credentials)}/status?per_page=0`,
1127
+ { headers }
1128
+ );
1129
+ return {
1130
+ commitSha: commitStatus.sha,
1131
+ repo: commitStatus.repository
1132
+ };
1133
+ }
1134
+ async getDefaultBranch(repoFullName, credentials) {
1135
+ const repo = await this.fetchJson(
1136
+ `${this.integration.config.apiBaseUrl}/repos/${repoFullName}`,
1137
+ { headers: credentials.headers }
1138
+ );
1139
+ return repo.default_branch;
1140
+ }
1141
+ async fetchResponse(url, init) {
1142
+ const urlAsString = url.toString();
1143
+ const response = await fetch__default.default(urlAsString, init);
1144
+ if (!response.ok) {
1145
+ let message = `Request failed for ${urlAsString}, ${response.status} ${response.statusText}`;
1146
+ if (response.status === 304) {
1147
+ throw new errors.NotModifiedError();
1148
+ }
1149
+ if (response.status === 404) {
1150
+ throw new errors.NotFoundError(message);
1151
+ }
1152
+ if (this.integration.parseRateLimitInfo(response).isRateLimited) {
1153
+ message += " (rate limit exceeded)";
1154
+ }
1155
+ throw new Error(message);
1156
+ }
1157
+ return response;
1158
+ }
1159
+ async fetchJson(url, init) {
1160
+ const response = await this.fetchResponse(url, init);
1161
+ return await response.json();
1162
+ }
1163
+ }
1164
+
1165
+ class GitlabUrlReader {
1166
+ constructor(integration, deps) {
1167
+ this.integration = integration;
1168
+ this.deps = deps;
1169
+ }
1170
+ static factory = ({ config, treeResponseFactory }) => {
1171
+ const integrations = integration.ScmIntegrations.fromConfig(config);
1172
+ return integrations.gitlab.list().map((integration) => {
1173
+ const reader = new GitlabUrlReader(integration, {
1174
+ treeResponseFactory
1175
+ });
1176
+ const predicate = (url) => url.host === integration.config.host;
1177
+ return { reader, predicate };
1178
+ });
1179
+ };
1180
+ async read(url) {
1181
+ const response = await this.readUrl(url);
1182
+ return response.buffer();
1183
+ }
1184
+ async readUrl(url, options) {
1185
+ const { etag, lastModifiedAfter, signal } = options ?? {};
1186
+ const builtUrl = await this.getGitlabFetchUrl(url);
1187
+ let response;
1188
+ try {
1189
+ response = await fetch__default.default(builtUrl, {
1190
+ headers: {
1191
+ ...integration.getGitLabRequestOptions(this.integration.config).headers,
1192
+ ...etag && { "If-None-Match": etag },
1193
+ ...lastModifiedAfter && {
1194
+ "If-Modified-Since": lastModifiedAfter.toUTCString()
1195
+ }
1196
+ },
1197
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1198
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1199
+ // The difference does not affect us in practice however. The cast can be
1200
+ // removed after we support ESM for CLI dependencies and migrate to
1201
+ // version 3 of node-fetch.
1202
+ // https://github.com/backstage/backstage/issues/8242
1203
+ ...signal && { signal }
1204
+ });
1205
+ } catch (e) {
1206
+ throw new Error(`Unable to read ${url}, ${e}`);
1207
+ }
1208
+ if (response.status === 304) {
1209
+ throw new errors.NotModifiedError();
1210
+ }
1211
+ if (response.ok) {
1212
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
1213
+ etag: response.headers.get("ETag") ?? void 0,
1214
+ lastModifiedAt: parseLastModified(
1215
+ response.headers.get("Last-Modified")
1216
+ )
1217
+ });
1218
+ }
1219
+ const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;
1220
+ if (response.status === 404) {
1221
+ throw new errors.NotFoundError(message);
1222
+ }
1223
+ throw new Error(message);
1224
+ }
1225
+ async readTree(url, options) {
1226
+ const { etag, signal } = options ?? {};
1227
+ const { ref, full_name, filepath } = parseGitUrl__default.default(url);
1228
+ let repoFullName = full_name;
1229
+ const relativePath = integration.getGitLabIntegrationRelativePath(
1230
+ this.integration.config
1231
+ );
1232
+ if (relativePath) {
1233
+ const rectifiedRelativePath = `${lodash.trimStart(relativePath, "/")}/`;
1234
+ repoFullName = full_name.replace(rectifiedRelativePath, "");
1235
+ }
1236
+ const projectGitlabResponse = await fetch__default.default(
1237
+ new URL(
1238
+ `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
1239
+ repoFullName
1240
+ )}`
1241
+ ).toString(),
1242
+ integration.getGitLabRequestOptions(this.integration.config)
1243
+ );
1244
+ if (!projectGitlabResponse.ok) {
1245
+ const msg = `Failed to read tree from ${url}, ${projectGitlabResponse.status} ${projectGitlabResponse.statusText}`;
1246
+ if (projectGitlabResponse.status === 404) {
1247
+ throw new errors.NotFoundError(msg);
1248
+ }
1249
+ throw new Error(msg);
1250
+ }
1251
+ const projectGitlabResponseJson = await projectGitlabResponse.json();
1252
+ const branch = ref || projectGitlabResponseJson.default_branch;
1253
+ const commitsReqParams = new URLSearchParams();
1254
+ commitsReqParams.set("ref_name", branch);
1255
+ if (!!filepath) {
1256
+ commitsReqParams.set("path", filepath);
1257
+ }
1258
+ const commitsGitlabResponse = await fetch__default.default(
1259
+ new URL(
1260
+ `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
1261
+ repoFullName
1262
+ )}/repository/commits?${commitsReqParams.toString()}`
1263
+ ).toString(),
1264
+ {
1265
+ ...integration.getGitLabRequestOptions(this.integration.config),
1266
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1267
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1268
+ // The difference does not affect us in practice however. The cast can
1269
+ // be removed after we support ESM for CLI dependencies and migrate to
1270
+ // version 3 of node-fetch.
1271
+ // https://github.com/backstage/backstage/issues/8242
1272
+ ...signal && { signal }
1273
+ }
1274
+ );
1275
+ if (!commitsGitlabResponse.ok) {
1276
+ const message = `Failed to read tree (branch) from ${url}, ${commitsGitlabResponse.status} ${commitsGitlabResponse.statusText}`;
1277
+ if (commitsGitlabResponse.status === 404) {
1278
+ throw new errors.NotFoundError(message);
1279
+ }
1280
+ throw new Error(message);
1281
+ }
1282
+ const commitSha = (await commitsGitlabResponse.json())[0]?.id ?? "";
1283
+ if (etag && etag === commitSha) {
1284
+ throw new errors.NotModifiedError();
1285
+ }
1286
+ const archiveReqParams = new URLSearchParams();
1287
+ archiveReqParams.set("sha", branch);
1288
+ if (!!filepath) {
1289
+ archiveReqParams.set("path", filepath);
1290
+ }
1291
+ const archiveGitLabResponse = await fetch__default.default(
1292
+ `${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
1293
+ repoFullName
1294
+ )}/repository/archive?${archiveReqParams.toString()}`,
1295
+ {
1296
+ ...integration.getGitLabRequestOptions(this.integration.config),
1297
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1298
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1299
+ // The difference does not affect us in practice however. The cast can
1300
+ // be removed after we support ESM for CLI dependencies and migrate to
1301
+ // version 3 of node-fetch.
1302
+ // https://github.com/backstage/backstage/issues/8242
1303
+ ...signal && { signal }
1304
+ }
1305
+ );
1306
+ if (!archiveGitLabResponse.ok) {
1307
+ const message = `Failed to read tree (archive) from ${url}, ${archiveGitLabResponse.status} ${archiveGitLabResponse.statusText}`;
1308
+ if (archiveGitLabResponse.status === 404) {
1309
+ throw new errors.NotFoundError(message);
1310
+ }
1311
+ throw new Error(message);
1312
+ }
1313
+ return await this.deps.treeResponseFactory.fromTarArchive({
1314
+ stream: stream.Readable.from(archiveGitLabResponse.body),
1315
+ subpath: filepath,
1316
+ etag: commitSha,
1317
+ filter: options?.filter
1318
+ });
1319
+ }
1320
+ async search(url, options) {
1321
+ const { filepath } = parseGitUrl__default.default(url);
1322
+ const staticPart = this.getStaticPart(filepath);
1323
+ const matcher = new minimatch.Minimatch(filepath);
1324
+ const treeUrl = lodash.trimEnd(url.replace(filepath, staticPart), `/`);
1325
+ const pathPrefix = staticPart ? `${staticPart}/` : "";
1326
+ const tree = await this.readTree(treeUrl, {
1327
+ etag: options?.etag,
1328
+ signal: options?.signal,
1329
+ filter: (path) => matcher.match(`${pathPrefix}${path}`)
1330
+ });
1331
+ const files = await tree.files();
1332
+ return {
1333
+ etag: tree.etag,
1334
+ files: files.map((file) => ({
1335
+ url: this.integration.resolveUrl({
1336
+ url: `/${pathPrefix}${file.path}`,
1337
+ base: url
1338
+ }),
1339
+ content: file.content,
1340
+ lastModifiedAt: file.lastModifiedAt
1341
+ }))
1342
+ };
1343
+ }
1344
+ /**
1345
+ * This function splits the input globPattern string into segments using the path separator /. It then iterates over
1346
+ * the segments from the end of the array towards the beginning, checking if the concatenated string up to that
1347
+ * segment matches the original globPattern using the minimatch function. If a match is found, it continues iterating.
1348
+ * If no match is found, it returns the concatenated string up to the current segment, which is the static part of the
1349
+ * glob pattern.
1350
+ *
1351
+ * E.g. `catalog/foo/*.yaml` will return `catalog/foo`.
1352
+ *
1353
+ * @param globPattern the glob pattern
1354
+ * @private
1355
+ */
1356
+ getStaticPart(globPattern) {
1357
+ const segments = globPattern.split("/");
1358
+ let i = segments.length;
1359
+ while (i > 0 && new minimatch.Minimatch(segments.slice(0, i).join("/")).match(globPattern)) {
1360
+ i--;
1361
+ }
1362
+ return segments.slice(0, i).join("/");
1363
+ }
1364
+ toString() {
1365
+ const { host, token } = this.integration.config;
1366
+ return `gitlab{host=${host},authed=${Boolean(token)}}`;
1367
+ }
1368
+ async getGitlabFetchUrl(target) {
1369
+ const targetUrl = new URL(target);
1370
+ if (targetUrl.pathname.includes("/-/jobs/artifacts/")) {
1371
+ return this.getGitlabArtifactFetchUrl(targetUrl).then(
1372
+ (value) => value.toString()
1373
+ );
1374
+ }
1375
+ return integration.getGitLabFileFetchUrl(target, this.integration.config);
1376
+ }
1377
+ // convert urls of the form:
1378
+ // https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/raw/<path_to_file>?job=<job_name>
1379
+ // to urls of the form:
1380
+ // https://example.com/api/v4/projects/:id/jobs/artifacts/:ref_name/raw/*artifact_path?job=<job_name>
1381
+ async getGitlabArtifactFetchUrl(target) {
1382
+ if (!target.pathname.includes("/-/jobs/artifacts/")) {
1383
+ throw new Error("Unable to process url as an GitLab artifact");
1384
+ }
1385
+ try {
1386
+ const [namespaceAndProject, ref] = target.pathname.split("/-/jobs/artifacts/");
1387
+ const projectPath = new URL(target);
1388
+ projectPath.pathname = namespaceAndProject;
1389
+ const projectId = await this.resolveProjectToId(projectPath);
1390
+ const relativePath = integration.getGitLabIntegrationRelativePath(
1391
+ this.integration.config
1392
+ );
1393
+ const newUrl = new URL(target);
1394
+ newUrl.pathname = `${relativePath}/api/v4/projects/${projectId}/jobs/artifacts/${ref}`;
1395
+ return newUrl;
1396
+ } catch (e) {
1397
+ throw new Error(
1398
+ `Unable to translate GitLab artifact URL: ${target}, ${e}`
1399
+ );
1400
+ }
1401
+ }
1402
+ async resolveProjectToId(pathToProject) {
1403
+ let project = pathToProject.pathname;
1404
+ const relativePath = integration.getGitLabIntegrationRelativePath(
1405
+ this.integration.config
1406
+ );
1407
+ if (relativePath) {
1408
+ project = project.replace(relativePath, "");
1409
+ }
1410
+ project = project.replace(/^\//, "");
1411
+ const result = await fetch__default.default(
1412
+ `${pathToProject.origin}${relativePath}/api/v4/projects/${encodeURIComponent(project)}`,
1413
+ integration.getGitLabRequestOptions(this.integration.config)
1414
+ );
1415
+ const data = await result.json();
1416
+ if (!result.ok) {
1417
+ throw new Error(`Gitlab error: ${data.error}, ${data.error_description}`);
1418
+ }
1419
+ return Number(data.id);
1420
+ }
1421
+ }
1422
+
1423
+ class GiteaUrlReader {
1424
+ constructor(integration, deps) {
1425
+ this.integration = integration;
1426
+ this.deps = deps;
1427
+ }
1428
+ static factory = ({ config, treeResponseFactory }) => {
1429
+ return integration.ScmIntegrations.fromConfig(config).gitea.list().map((integration) => {
1430
+ const reader = new GiteaUrlReader(integration, { treeResponseFactory });
1431
+ const predicate = (url) => {
1432
+ return url.host === integration.config.host;
1433
+ };
1434
+ return { reader, predicate };
1435
+ });
1436
+ };
1437
+ async read(url) {
1438
+ const response = await this.readUrl(url);
1439
+ return response.buffer();
1440
+ }
1441
+ async readUrl(url, options) {
1442
+ let response;
1443
+ const blobUrl = integration.getGiteaFileContentsUrl(this.integration.config, url);
1444
+ try {
1445
+ response = await fetch__default.default(blobUrl, {
1446
+ method: "GET",
1447
+ ...integration.getGiteaRequestOptions(this.integration.config),
1448
+ signal: options?.signal
1449
+ });
1450
+ } catch (e) {
1451
+ throw new Error(`Unable to read ${blobUrl}, ${e}`);
1452
+ }
1453
+ if (response.ok) {
1454
+ const { encoding, content } = await response.json();
1455
+ if (encoding === "base64") {
1456
+ return ReadUrlResponseFactory.fromReadable(
1457
+ stream.Readable.from(Buffer.from(content, "base64")),
1458
+ {
1459
+ etag: response.headers.get("ETag") ?? void 0,
1460
+ lastModifiedAt: parseLastModified(
1461
+ response.headers.get("Last-Modified")
1462
+ )
1463
+ }
1464
+ );
1465
+ }
1466
+ throw new Error(`Unknown encoding: ${encoding}`);
1467
+ }
1468
+ const message = `${url} could not be read as ${blobUrl}, ${response.status} ${response.statusText}`;
1469
+ if (response.status === 404) {
1470
+ throw new errors.NotFoundError(message);
1471
+ }
1472
+ if (response.status === 304) {
1473
+ throw new errors.NotModifiedError();
1474
+ }
1475
+ if (response.status === 403) {
1476
+ throw new errors.AuthenticationError();
1477
+ }
1478
+ throw new Error(message);
1479
+ }
1480
+ async readTree(url, options) {
1481
+ const lastCommitHash = await this.getLastCommitHash(url);
1482
+ if (options?.etag && options.etag === lastCommitHash) {
1483
+ throw new errors.NotModifiedError();
1484
+ }
1485
+ const archiveUri = integration.getGiteaArchiveUrl(this.integration.config, url);
1486
+ let response;
1487
+ try {
1488
+ response = await fetch__default.default(archiveUri, {
1489
+ method: "GET",
1490
+ ...integration.getGiteaRequestOptions(this.integration.config),
1491
+ signal: options?.signal
1492
+ });
1493
+ } catch (e) {
1494
+ throw new Error(`Unable to read ${archiveUri}, ${e}`);
1495
+ }
1496
+ const parsedUri = integration.parseGiteaUrl(this.integration.config, url);
1497
+ return this.deps.treeResponseFactory.fromTarArchive({
1498
+ stream: stream.Readable.from(response.body),
1499
+ subpath: parsedUri.path,
1500
+ etag: lastCommitHash,
1501
+ filter: options?.filter
1502
+ });
1503
+ }
1504
+ search() {
1505
+ throw new Error("GiteaUrlReader search not implemented.");
1506
+ }
1507
+ toString() {
1508
+ const { host } = this.integration.config;
1509
+ return `gitea{host=${host},authed=${Boolean(
1510
+ this.integration.config.password
1511
+ )}}`;
1512
+ }
1513
+ async getLastCommitHash(url) {
1514
+ const commitUri = integration.getGiteaLatestCommitUrl(this.integration.config, url);
1515
+ const response = await fetch__default.default(
1516
+ commitUri,
1517
+ integration.getGiteaRequestOptions(this.integration.config)
1518
+ );
1519
+ if (!response.ok) {
1520
+ const message = `Failed to retrieve latest commit information from ${commitUri}, ${response.status} ${response.statusText}`;
1521
+ if (response.status === 404) {
1522
+ throw new errors.NotFoundError(message);
1523
+ }
1524
+ throw new Error(message);
1525
+ }
1526
+ return (await response.json()).sha;
1527
+ }
1528
+ }
1529
+
1530
+ class HarnessUrlReader {
1531
+ constructor(integration, deps) {
1532
+ this.integration = integration;
1533
+ this.deps = deps;
1534
+ }
1535
+ static factory = ({ config, treeResponseFactory }) => {
1536
+ return integration.ScmIntegrations.fromConfig(config).harness.list().map((integration) => {
1537
+ const reader = new HarnessUrlReader(integration, {
1538
+ treeResponseFactory
1539
+ });
1540
+ const predicate = (url) => {
1541
+ return url.host === integration.config.host;
1542
+ };
1543
+ return { reader, predicate };
1544
+ });
1545
+ };
1546
+ async read(url) {
1547
+ const response = await this.readUrl(url);
1548
+ return response.buffer();
1549
+ }
1550
+ async readUrl(url, options) {
1551
+ let response;
1552
+ const blobUrl = integration.getHarnessFileContentsUrl(this.integration.config, url);
1553
+ try {
1554
+ response = await fetch__default.default(blobUrl, {
1555
+ method: "GET",
1556
+ ...integration.getHarnessRequestOptions(this.integration.config),
1557
+ signal: options?.signal
1558
+ });
1559
+ } catch (e) {
1560
+ throw new Error(`Unable to read ${blobUrl}, ${e}`);
1561
+ }
1562
+ if (response.ok) {
1563
+ const jsonResponse = { data: response.body };
1564
+ if (jsonResponse) {
1565
+ return ReadUrlResponseFactory.fromReadable(
1566
+ stream.Readable.from(jsonResponse.data),
1567
+ {
1568
+ etag: response.headers.get("ETag") ?? void 0
1569
+ }
1570
+ );
1571
+ }
1572
+ throw new Error(`Unknown json: ${jsonResponse}`);
1573
+ }
1574
+ const message = `${url} x ${blobUrl}, ${response.status} ${response.statusText}`;
1575
+ if (response.status === 404) {
1576
+ throw new errors.NotFoundError(message);
1577
+ }
1578
+ if (response.status === 304) {
1579
+ throw new errors.NotModifiedError();
1580
+ }
1581
+ if (response.status === 403) {
1582
+ throw new errors.AuthenticationError();
1583
+ }
1584
+ throw new Error(message);
1585
+ }
1586
+ async readTree(url, options) {
1587
+ const lastCommitHash = await this.getLastCommitHash(url);
1588
+ if (options?.etag && options.etag === lastCommitHash) {
1589
+ throw new errors.NotModifiedError();
1590
+ }
1591
+ const archiveUri = integration.getHarnessArchiveUrl(this.integration.config, url);
1592
+ let response;
1593
+ try {
1594
+ response = await fetch__default.default(archiveUri, {
1595
+ method: "GET",
1596
+ ...integration.getHarnessRequestOptions(this.integration.config),
1597
+ signal: options?.signal
1598
+ });
1599
+ } catch (e) {
1600
+ throw new Error(`Unable to read ${archiveUri}, ${e}`);
1601
+ }
1602
+ const parsedUri = integration.parseHarnessUrl(this.integration.config, url);
1603
+ return this.deps.treeResponseFactory.fromZipArchive({
1604
+ stream: stream.Readable.from(response.body),
1605
+ subpath: parsedUri.path,
1606
+ etag: lastCommitHash,
1607
+ filter: options?.filter
1608
+ });
1609
+ }
1610
+ search() {
1611
+ throw new Error("HarnessUrlReader search not implemented.");
1612
+ }
1613
+ toString() {
1614
+ const { host } = this.integration.config;
1615
+ return `harness{host=${host},authed=${Boolean(
1616
+ this.integration.config.token || this.integration.config.apiKey
1617
+ )}}`;
1618
+ }
1619
+ async getLastCommitHash(url) {
1620
+ const commitUri = integration.getHarnessLatestCommitUrl(this.integration.config, url);
1621
+ const response = await fetch__default.default(
1622
+ commitUri,
1623
+ integration.getHarnessRequestOptions(this.integration.config)
1624
+ );
1625
+ if (!response.ok) {
1626
+ const message = `Failed to retrieve latest commit information from ${commitUri}, ${response.status} ${response.statusText}`;
1627
+ if (response.status === 404) {
1628
+ throw new errors.NotFoundError(message);
1629
+ }
1630
+ throw new Error(message);
1631
+ }
1632
+ return (await response.json()).latest_commit.sha;
1633
+ }
1634
+ }
1635
+
1636
+ const DEFAULT_REGION = "us-east-1";
1637
+ function parseUrl$1(url, config) {
1638
+ const parsedUrl = new URL(url);
1639
+ const pathname = parsedUrl.pathname.substring(1);
1640
+ const host = parsedUrl.host;
1641
+ if (config.host === "amazonaws.com" || config.host === "amazonaws.com.cn") {
1642
+ const match = host.match(
1643
+ /^(?:([a-z0-9.-]+)\.)?s3(?:[.-]([a-z0-9-]+))?\.amazonaws\.com(\.cn)?$/
1644
+ );
1645
+ if (!match) {
1646
+ throw new Error(`Invalid AWS S3 URL ${url}`);
1647
+ }
1648
+ const [, hostBucket, hostRegion] = match;
1649
+ if (config.s3ForcePathStyle || !hostBucket) {
1650
+ const slashIndex = pathname.indexOf("/");
1651
+ if (slashIndex < 0) {
1652
+ throw new Error(
1653
+ `Invalid path-style AWS S3 URL ${url}, does not contain bucket in the path`
1654
+ );
1655
+ }
1656
+ return {
1657
+ path: pathname.substring(slashIndex + 1),
1658
+ bucket: pathname.substring(0, slashIndex),
1659
+ region: hostRegion ?? DEFAULT_REGION
1660
+ };
1661
+ }
1662
+ return {
1663
+ path: pathname,
1664
+ bucket: hostBucket,
1665
+ region: hostRegion ?? DEFAULT_REGION
1666
+ };
1667
+ }
1668
+ const usePathStyle = config.s3ForcePathStyle || host.length === config.host.length;
1669
+ if (usePathStyle) {
1670
+ const slashIndex = pathname.indexOf("/");
1671
+ if (slashIndex < 0) {
1672
+ throw new Error(
1673
+ `Invalid path-style AWS S3 URL ${url}, does not contain bucket in the path`
1674
+ );
1675
+ }
1676
+ return {
1677
+ path: pathname.substring(slashIndex + 1),
1678
+ bucket: pathname.substring(0, slashIndex),
1679
+ region: DEFAULT_REGION
1680
+ };
1681
+ }
1682
+ return {
1683
+ path: pathname,
1684
+ bucket: host.substring(0, host.length - config.host.length - 1),
1685
+ region: DEFAULT_REGION
1686
+ };
1687
+ }
1688
+ class AwsS3UrlReader {
1689
+ constructor(credsManager, integration, deps) {
1690
+ this.credsManager = credsManager;
1691
+ this.integration = integration;
1692
+ this.deps = deps;
1693
+ }
1694
+ static factory = ({ config, treeResponseFactory }) => {
1695
+ const integrations = integration.ScmIntegrations.fromConfig(config);
1696
+ const credsManager = integrationAwsNode.DefaultAwsCredentialsManager.fromConfig(config);
1697
+ return integrations.awsS3.list().map((integration) => {
1698
+ const reader = new AwsS3UrlReader(credsManager, integration, {
1699
+ treeResponseFactory
1700
+ });
1701
+ const predicate = (url) => url.host.endsWith(integration.config.host);
1702
+ return { reader, predicate };
1703
+ });
1704
+ };
1705
+ /**
1706
+ * If accessKeyId and secretAccessKey are missing, the standard credentials provider chain will be used:
1707
+ * https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html
1708
+ */
1709
+ static buildStaticCredentials(accessKeyId, secretAccessKey) {
1710
+ return async () => {
1711
+ return {
1712
+ accessKeyId,
1713
+ secretAccessKey
1714
+ };
1715
+ };
1716
+ }
1717
+ static async buildCredentials(credsManager, region, integration) {
1718
+ if (!integration) {
1719
+ return (await credsManager.getCredentialProvider()).sdkCredentialProvider;
1720
+ }
1721
+ const accessKeyId = integration.config.accessKeyId;
1722
+ const secretAccessKey = integration.config.secretAccessKey;
1723
+ let explicitCredentials;
1724
+ if (accessKeyId && secretAccessKey) {
1725
+ explicitCredentials = AwsS3UrlReader.buildStaticCredentials(
1726
+ accessKeyId,
1727
+ secretAccessKey
1728
+ );
1729
+ } else {
1730
+ explicitCredentials = (await credsManager.getCredentialProvider()).sdkCredentialProvider;
1731
+ }
1732
+ const roleArn = integration.config.roleArn;
1733
+ if (roleArn) {
1734
+ return credentialProviders.fromTemporaryCredentials({
1735
+ masterCredentials: explicitCredentials,
1736
+ params: {
1737
+ RoleSessionName: "backstage-aws-s3-url-reader",
1738
+ RoleArn: roleArn,
1739
+ ExternalId: integration.config.externalId
1740
+ },
1741
+ clientConfig: { region }
1742
+ });
1743
+ }
1744
+ return explicitCredentials;
1745
+ }
1746
+ async buildS3Client(credsManager, region, integration) {
1747
+ const credentials = await AwsS3UrlReader.buildCredentials(
1748
+ credsManager,
1749
+ region,
1750
+ integration
1751
+ );
1752
+ const s3 = new clientS3.S3Client({
1753
+ region,
1754
+ credentials,
1755
+ endpoint: integration.config.endpoint,
1756
+ forcePathStyle: integration.config.s3ForcePathStyle
1757
+ });
1758
+ return s3;
1759
+ }
1760
+ async retrieveS3ObjectData(stream$1) {
1761
+ return new Promise((resolve, reject) => {
1762
+ try {
1763
+ const chunks = [];
1764
+ stream$1.on("data", (chunk) => chunks.push(chunk));
1765
+ stream$1.on(
1766
+ "error",
1767
+ (e) => reject(new errors.ForwardedError("Unable to read stream", e))
1768
+ );
1769
+ stream$1.on("end", () => resolve(stream.Readable.from(Buffer.concat(chunks))));
1770
+ } catch (e) {
1771
+ throw new errors.ForwardedError("Unable to parse the response data", e);
1772
+ }
1773
+ });
1774
+ }
1775
+ async read(url) {
1776
+ const response = await this.readUrl(url);
1777
+ return response.buffer();
1778
+ }
1779
+ async readUrl(url, options) {
1780
+ const { etag, lastModifiedAfter } = options ?? {};
1781
+ try {
1782
+ const { path, bucket, region } = parseUrl$1(url, this.integration.config);
1783
+ const s3Client = await this.buildS3Client(
1784
+ this.credsManager,
1785
+ region,
1786
+ this.integration
1787
+ );
1788
+ const abortController$1 = new abortController.AbortController();
1789
+ const params = {
1790
+ Bucket: bucket,
1791
+ Key: path,
1792
+ ...etag && { IfNoneMatch: etag },
1793
+ ...lastModifiedAfter && {
1794
+ IfModifiedSince: lastModifiedAfter
1795
+ }
1796
+ };
1797
+ options?.signal?.addEventListener("abort", () => abortController$1.abort());
1798
+ const getObjectCommand = new clientS3.GetObjectCommand(params);
1799
+ const response = await s3Client.send(getObjectCommand, {
1800
+ abortSignal: abortController$1.signal
1801
+ });
1802
+ const s3ObjectData = await this.retrieveS3ObjectData(
1803
+ response.Body
1804
+ );
1805
+ return ReadUrlResponseFactory.fromReadable(s3ObjectData, {
1806
+ etag: response.ETag,
1807
+ lastModifiedAt: response.LastModified
1808
+ });
1809
+ } catch (e) {
1810
+ if (e.$metadata && e.$metadata.httpStatusCode === 304) {
1811
+ throw new errors.NotModifiedError();
1812
+ }
1813
+ throw new errors.ForwardedError("Could not retrieve file from S3", e);
1814
+ }
1815
+ }
1816
+ async readTree(url, options) {
1817
+ try {
1818
+ const { path, bucket, region } = parseUrl$1(url, this.integration.config);
1819
+ const s3Client = await this.buildS3Client(
1820
+ this.credsManager,
1821
+ region,
1822
+ this.integration
1823
+ );
1824
+ const abortController$1 = new abortController.AbortController();
1825
+ const allObjects = [];
1826
+ const responses = [];
1827
+ let continuationToken;
1828
+ let output;
1829
+ do {
1830
+ const listObjectsV2Command = new clientS3.ListObjectsV2Command({
1831
+ Bucket: bucket,
1832
+ ContinuationToken: continuationToken,
1833
+ Prefix: path
1834
+ });
1835
+ options?.signal?.addEventListener(
1836
+ "abort",
1837
+ () => abortController$1.abort()
1838
+ );
1839
+ output = await s3Client.send(listObjectsV2Command, {
1840
+ abortSignal: abortController$1.signal
1841
+ });
1842
+ if (output.Contents) {
1843
+ output.Contents.forEach((contents) => {
1844
+ allObjects.push(contents.Key);
1845
+ });
1846
+ }
1847
+ continuationToken = output.NextContinuationToken;
1848
+ } while (continuationToken);
1849
+ for (let i = 0; i < allObjects.length; i++) {
1850
+ const getObjectCommand = new clientS3.GetObjectCommand({
1851
+ Bucket: bucket,
1852
+ Key: String(allObjects[i])
1853
+ });
1854
+ const response = await s3Client.send(getObjectCommand);
1855
+ const s3ObjectData = await this.retrieveS3ObjectData(
1856
+ response.Body
1857
+ );
1858
+ responses.push({
1859
+ data: s3ObjectData,
1860
+ path: posix.relative(path, String(allObjects[i])),
1861
+ lastModifiedAt: response?.LastModified ?? void 0
1862
+ });
1863
+ }
1864
+ return await this.deps.treeResponseFactory.fromReadableArray(responses);
1865
+ } catch (e) {
1866
+ throw new errors.ForwardedError("Could not retrieve file tree from S3", e);
1867
+ }
1868
+ }
1869
+ async search() {
1870
+ throw new Error("AwsS3Reader does not implement search");
1871
+ }
1872
+ toString() {
1873
+ const secretAccessKey = this.integration.config.secretAccessKey;
1874
+ return `awsS3{host=${this.integration.config.host},authed=${Boolean(
1875
+ secretAccessKey
1876
+ )}}`;
1877
+ }
1878
+ }
1879
+
1880
+ const isInRange = (num, [start, end]) => {
1881
+ return num >= start && num <= end;
1882
+ };
1883
+ const parsePortRange = (port) => {
1884
+ const isRange = port.includes("-");
1885
+ if (isRange) {
1886
+ const range = port.split("-").map((v) => parseInt(v, 10)).filter(Boolean);
1887
+ if (range.length !== 2)
1888
+ throw new Error(`Port range is not valid: ${port}`);
1889
+ const [start, end] = range;
1890
+ if (start <= 0 || end <= 0 || start > end)
1891
+ throw new Error(`Port range is not valid: [${start}, ${end}]`);
1892
+ return range;
1893
+ }
1894
+ const parsedPort = parseInt(port, 10);
1895
+ return [parsedPort, parsedPort];
1896
+ };
1897
+ const parsePortPredicate = (port) => {
1898
+ if (port) {
1899
+ const range = parsePortRange(port);
1900
+ return (url) => {
1901
+ if (url.port)
1902
+ return isInRange(parseInt(url.port, 10), range);
1903
+ if (url.protocol === "http:")
1904
+ return isInRange(80, range);
1905
+ if (url.protocol === "https:")
1906
+ return isInRange(443, range);
1907
+ return false;
1908
+ };
1909
+ }
1910
+ return (url) => !url.port;
1911
+ };
1912
+ class FetchUrlReader {
1913
+ /**
1914
+ * The factory creates a single reader that will be used for reading any URL that's listed
1915
+ * in configuration at `backend.reading.allow`. The allow list contains a list of objects describing
1916
+ * targets to allow, containing the following fields:
1917
+ *
1918
+ * `host`:
1919
+ * Either full hostnames to match, or subdomain wildcard matchers with a leading '*'.
1920
+ * For example 'example.com' and '*.example.com' are valid values, 'prod.*.example.com' is not.
1921
+ *
1922
+ * `paths`:
1923
+ * An optional list of paths which are allowed. If the list is omitted all paths are allowed.
1924
+ */
1925
+ static factory = ({ config }) => {
1926
+ const predicates = config.getOptionalConfigArray("backend.reading.allow")?.map((allowConfig) => {
1927
+ const paths = allowConfig.getOptionalStringArray("paths");
1928
+ const checkPath = paths ? (url) => {
1929
+ const targetPath = platformPath__default.default.posix.normalize(url.pathname);
1930
+ return paths.some(
1931
+ (allowedPath) => targetPath.startsWith(allowedPath)
1932
+ );
1933
+ } : (_url) => true;
1934
+ const host = allowConfig.getString("host");
1935
+ const [hostname, port] = host.split(":");
1936
+ const checkPort = parsePortPredicate(port);
1937
+ if (hostname.startsWith("*.")) {
1938
+ const suffix = hostname.slice(1);
1939
+ return (url) => url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);
1940
+ }
1941
+ return (url) => url.hostname === hostname && checkPath(url) && checkPort(url);
1942
+ }) ?? [];
1943
+ const reader = new FetchUrlReader();
1944
+ const predicate = (url) => predicates.some((p) => p(url));
1945
+ return [{ reader, predicate }];
1946
+ };
1947
+ async read(url) {
1948
+ const response = await this.readUrl(url);
1949
+ return response.buffer();
1950
+ }
1951
+ async readUrl(url, options) {
1952
+ let response;
1953
+ try {
1954
+ response = await fetch__default.default(url, {
1955
+ headers: {
1956
+ ...options?.etag && { "If-None-Match": options.etag },
1957
+ ...options?.lastModifiedAfter && {
1958
+ "If-Modified-Since": options.lastModifiedAfter.toUTCString()
1959
+ },
1960
+ ...options?.token && { Authorization: `Bearer ${options.token}` }
1961
+ },
1962
+ // TODO(freben): The signal cast is there because pre-3.x versions of
1963
+ // node-fetch have a very slightly deviating AbortSignal type signature.
1964
+ // The difference does not affect us in practice however. The cast can
1965
+ // be removed after we support ESM for CLI dependencies and migrate to
1966
+ // version 3 of node-fetch.
1967
+ // https://github.com/backstage/backstage/issues/8242
1968
+ signal: options?.signal
1969
+ });
1970
+ } catch (e) {
1971
+ throw new Error(`Unable to read ${url}, ${e}`);
1972
+ }
1973
+ if (response.status === 304) {
1974
+ throw new errors.NotModifiedError();
1975
+ }
1976
+ if (response.ok) {
1977
+ return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
1978
+ etag: response.headers.get("ETag") ?? void 0,
1979
+ lastModifiedAt: parseLastModified(
1980
+ response.headers.get("Last-Modified")
1981
+ )
1982
+ });
1983
+ }
1984
+ const message = `could not read ${url}, ${response.status} ${response.statusText}`;
1985
+ if (response.status === 404) {
1986
+ throw new errors.NotFoundError(message);
1987
+ }
1988
+ throw new Error(message);
1989
+ }
1990
+ async readTree() {
1991
+ throw new Error("FetchUrlReader does not implement readTree");
1992
+ }
1993
+ async search() {
1994
+ throw new Error("FetchUrlReader does not implement search");
1995
+ }
1996
+ toString() {
1997
+ return "fetch{}";
1998
+ }
1999
+ }
2000
+
2001
+ function notAllowedMessage(url) {
2002
+ return `Reading from '${url}' is not allowed. You may need to configure an integration for the target host, or add it to the configured list of allowed hosts at 'backend.reading.allow'`;
2003
+ }
2004
+ class UrlReaderPredicateMux {
2005
+ readers = [];
2006
+ register(tuple) {
2007
+ this.readers.push(tuple);
2008
+ }
2009
+ async readUrl(url, options) {
2010
+ const parsed = new URL(url);
2011
+ for (const { predicate, reader } of this.readers) {
2012
+ if (predicate(parsed)) {
2013
+ return reader.readUrl(url, options);
2014
+ }
2015
+ }
2016
+ throw new errors.NotAllowedError(notAllowedMessage(url));
2017
+ }
2018
+ async readTree(url, options) {
2019
+ const parsed = new URL(url);
2020
+ for (const { predicate, reader } of this.readers) {
2021
+ if (predicate(parsed)) {
2022
+ return await reader.readTree(url, options);
2023
+ }
2024
+ }
2025
+ throw new errors.NotAllowedError(notAllowedMessage(url));
2026
+ }
2027
+ async search(url, options) {
2028
+ const parsed = new URL(url);
2029
+ for (const { predicate, reader } of this.readers) {
2030
+ if (predicate(parsed)) {
2031
+ return await reader.search(url, options);
2032
+ }
2033
+ }
2034
+ throw new errors.NotAllowedError(notAllowedMessage(url));
2035
+ }
2036
+ toString() {
2037
+ return `predicateMux{readers=${this.readers.map((t) => t.reader).join(",")}`;
2038
+ }
2039
+ }
2040
+
2041
+ const pipeline$2 = util.promisify(stream.pipeline);
2042
+ const directoryNameRegex = /^[^\/]+\//;
2043
+ function stripFirstDirectoryFromPath(path) {
2044
+ return path.replace(directoryNameRegex, "");
2045
+ }
2046
+ const streamToBuffer = (stream) => {
2047
+ return new Promise(async (resolve, reject) => {
2048
+ try {
2049
+ await pipeline$2(stream, concatStream__default.default(resolve));
2050
+ } catch (ex) {
2051
+ reject(ex);
2052
+ }
2053
+ });
2054
+ };
2055
+
2056
+ const TarParseStream = tar.Parse;
2057
+ const pipeline$1 = util.promisify(stream.pipeline);
2058
+ class TarArchiveResponse {
2059
+ constructor(stream, subPath, workDir, etag, filter, stripFirstDirectory = true) {
2060
+ this.stream = stream;
2061
+ this.subPath = subPath;
2062
+ this.workDir = workDir;
2063
+ this.etag = etag;
2064
+ this.filter = filter;
2065
+ this.stripFirstDirectory = stripFirstDirectory;
2066
+ if (subPath) {
2067
+ if (!subPath.endsWith("/")) {
2068
+ this.subPath += "/";
2069
+ }
2070
+ if (subPath.startsWith("/")) {
2071
+ throw new TypeError(
2072
+ `TarArchiveResponse subPath must not start with a /, got '${subPath}'`
2073
+ );
2074
+ }
2075
+ }
2076
+ this.etag = etag;
2077
+ }
2078
+ read = false;
2079
+ // Make sure the input stream is only read once
2080
+ onlyOnce() {
2081
+ if (this.read) {
2082
+ throw new Error("Response has already been read");
2083
+ }
2084
+ this.read = true;
2085
+ }
2086
+ async files() {
2087
+ this.onlyOnce();
2088
+ const files = Array();
2089
+ const parser = new TarParseStream();
2090
+ parser.on("entry", (entry) => {
2091
+ if (entry.type === "Directory") {
2092
+ entry.resume();
2093
+ return;
2094
+ }
2095
+ const relativePath = this.stripFirstDirectory ? stripFirstDirectoryFromPath(entry.path) : entry.path;
2096
+ if (this.subPath) {
2097
+ if (!relativePath.startsWith(this.subPath)) {
2098
+ entry.resume();
2099
+ return;
2100
+ }
2101
+ }
2102
+ const path = relativePath.slice(this.subPath.length);
2103
+ if (this.filter) {
2104
+ if (!this.filter(path, { size: entry.remain })) {
2105
+ entry.resume();
2106
+ return;
2107
+ }
2108
+ }
2109
+ const content = new Promise(async (resolve) => {
2110
+ await pipeline$1(entry, concatStream__default.default(resolve));
2111
+ });
2112
+ files.push({
2113
+ path,
2114
+ content: () => content
2115
+ });
2116
+ entry.resume();
2117
+ });
2118
+ await pipeline$1(this.stream, parser);
2119
+ return files;
2120
+ }
2121
+ async archive() {
2122
+ if (!this.subPath) {
2123
+ this.onlyOnce();
2124
+ return this.stream;
2125
+ }
2126
+ const tmpDir = await this.dir();
2127
+ try {
2128
+ const data = await new Promise(async (resolve) => {
2129
+ await pipeline$1(
2130
+ tar__default.default.create({ cwd: tmpDir }, [""]),
2131
+ concatStream__default.default(resolve)
2132
+ );
2133
+ });
2134
+ return stream.Readable.from(data);
2135
+ } finally {
2136
+ await fs__default.default.remove(tmpDir);
2137
+ }
2138
+ }
2139
+ async dir(options) {
2140
+ this.onlyOnce();
2141
+ const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
2142
+ let strip = this.subPath ? this.subPath.split("/").length : 1;
2143
+ if (!this.stripFirstDirectory) {
2144
+ strip--;
2145
+ }
2146
+ let filterError = void 0;
2147
+ await pipeline$1(
2148
+ this.stream,
2149
+ tar__default.default.extract({
2150
+ strip,
2151
+ cwd: dir,
2152
+ filter: (path, stat) => {
2153
+ if (filterError) {
2154
+ return false;
2155
+ }
2156
+ const relativePath = this.stripFirstDirectory ? stripFirstDirectoryFromPath(path) : path;
2157
+ if (this.subPath && !relativePath.startsWith(this.subPath)) {
2158
+ return false;
2159
+ }
2160
+ if (this.filter) {
2161
+ const innerPath = path.split("/").slice(strip).join("/");
2162
+ try {
2163
+ return this.filter(innerPath, { size: stat.size });
2164
+ } catch (error) {
2165
+ filterError = error;
2166
+ return false;
2167
+ }
2168
+ }
2169
+ return true;
2170
+ }
2171
+ })
2172
+ );
2173
+ if (filterError) {
2174
+ if (!options?.targetDir) {
2175
+ await fs__default.default.remove(dir).catch(() => {
2176
+ });
2177
+ }
2178
+ throw filterError;
2179
+ }
2180
+ return dir;
2181
+ }
2182
+ }
2183
+
2184
+ class ZipArchiveResponse {
2185
+ constructor(stream, subPath, workDir, etag, filter) {
2186
+ this.stream = stream;
2187
+ this.subPath = subPath;
2188
+ this.workDir = workDir;
2189
+ this.etag = etag;
2190
+ this.filter = filter;
2191
+ if (subPath) {
2192
+ if (!subPath.endsWith("/")) {
2193
+ this.subPath += "/";
2194
+ }
2195
+ if (subPath.startsWith("/")) {
2196
+ throw new TypeError(
2197
+ `ZipArchiveResponse subPath must not start with a /, got '${subPath}'`
2198
+ );
2199
+ }
2200
+ }
2201
+ this.etag = etag;
2202
+ }
2203
+ read = false;
2204
+ // Make sure the input stream is only read once
2205
+ onlyOnce() {
2206
+ if (this.read) {
2207
+ throw new Error("Response has already been read");
2208
+ }
2209
+ this.read = true;
2210
+ }
2211
+ // File path relative to the root extracted directory or a sub directory if subpath is set.
2212
+ getInnerPath(path) {
2213
+ return path.slice(this.subPath.length);
2214
+ }
2215
+ shouldBeIncluded(entry) {
2216
+ if (this.subPath) {
2217
+ if (!entry.fileName.startsWith(this.subPath)) {
2218
+ return false;
2219
+ }
2220
+ }
2221
+ if (this.filter) {
2222
+ return this.filter(this.getInnerPath(entry.fileName), {
2223
+ size: entry.uncompressedSize
2224
+ });
2225
+ }
2226
+ return true;
2227
+ }
2228
+ async streamToTemporaryFile(stream) {
2229
+ const tmpDir = await fs__default.default.mkdtemp(
2230
+ platformPath__default.default.join(this.workDir, "backstage-tmp")
2231
+ );
2232
+ const tmpFile = platformPath__default.default.join(tmpDir, "tmp.zip");
2233
+ const writeStream = fs__default.default.createWriteStream(tmpFile);
2234
+ return new Promise((resolve, reject) => {
2235
+ writeStream.on("error", reject);
2236
+ writeStream.on("finish", () => {
2237
+ writeStream.end();
2238
+ resolve({
2239
+ fileName: tmpFile,
2240
+ cleanup: () => fs__default.default.rm(tmpDir, { recursive: true })
2241
+ });
2242
+ });
2243
+ stream.pipe(writeStream);
2244
+ });
2245
+ }
2246
+ forEveryZipEntry(zip, callback) {
2247
+ return new Promise((resolve, reject) => {
2248
+ yauzl__default.default.open(zip, { lazyEntries: true }, (err, zipfile) => {
2249
+ if (err || !zipfile) {
2250
+ reject(err || new Error(`Failed to open zip file ${zip}`));
2251
+ return;
2252
+ }
2253
+ zipfile.on("entry", async (entry) => {
2254
+ if (!entry.fileName.endsWith("/") && this.shouldBeIncluded(entry)) {
2255
+ zipfile.openReadStream(entry, async (openErr, readStream) => {
2256
+ if (openErr || !readStream) {
2257
+ reject(
2258
+ openErr || new Error(`Failed to open zip entry ${entry.fileName}`)
2259
+ );
2260
+ return;
2261
+ }
2262
+ await callback(entry, readStream);
2263
+ zipfile.readEntry();
2264
+ });
2265
+ } else {
2266
+ zipfile.readEntry();
2267
+ }
2268
+ });
2269
+ zipfile.once("end", () => resolve());
2270
+ zipfile.on("error", (e) => reject(e));
2271
+ zipfile.readEntry();
2272
+ });
2273
+ });
2274
+ }
2275
+ async files() {
2276
+ this.onlyOnce();
2277
+ const files = Array();
2278
+ const temporary = await this.streamToTemporaryFile(this.stream);
2279
+ await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
2280
+ files.push({
2281
+ path: this.getInnerPath(entry.fileName),
2282
+ content: async () => await streamToBuffer(content),
2283
+ lastModifiedAt: entry.lastModFileTime ? new Date(entry.lastModFileTime) : void 0
2284
+ });
2285
+ });
2286
+ await temporary.cleanup();
2287
+ return files;
2288
+ }
2289
+ async archive() {
2290
+ this.onlyOnce();
2291
+ if (!this.subPath) {
2292
+ return this.stream;
2293
+ }
2294
+ const archive = archiver__default.default("zip");
2295
+ const temporary = await this.streamToTemporaryFile(this.stream);
2296
+ await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
2297
+ archive.append(await streamToBuffer(content), {
2298
+ name: this.getInnerPath(entry.fileName)
2299
+ });
2300
+ });
2301
+ archive.finalize();
2302
+ await temporary.cleanup();
2303
+ return archive;
2304
+ }
2305
+ async dir(options) {
2306
+ this.onlyOnce();
2307
+ const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
2308
+ const temporary = await this.streamToTemporaryFile(this.stream);
2309
+ await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
2310
+ const entryPath = this.getInnerPath(entry.fileName);
2311
+ const dirname = platformPath__default.default.dirname(entryPath);
2312
+ if (dirname) {
2313
+ await fs__default.default.mkdirp(backendPluginApi.resolveSafeChildPath(dir, dirname));
2314
+ }
2315
+ return new Promise(async (resolve, reject) => {
2316
+ const file = fs__default.default.createWriteStream(backendPluginApi.resolveSafeChildPath(dir, entryPath));
2317
+ file.on("finish", resolve);
2318
+ content.on("error", reject);
2319
+ content.pipe(file);
2320
+ });
2321
+ });
2322
+ await temporary.cleanup();
2323
+ return dir;
2324
+ }
2325
+ }
2326
+
2327
+ const pipeline = util.promisify(stream.pipeline);
2328
+ class ReadableArrayResponse {
2329
+ constructor(stream, workDir, etag) {
2330
+ this.stream = stream;
2331
+ this.workDir = workDir;
2332
+ this.etag = etag;
2333
+ this.etag = etag;
2334
+ }
2335
+ read = false;
2336
+ // Make sure the input stream is only read once
2337
+ onlyOnce() {
2338
+ if (this.read) {
2339
+ throw new Error("Response has already been read");
2340
+ }
2341
+ this.read = true;
2342
+ }
2343
+ async files() {
2344
+ this.onlyOnce();
2345
+ const files = Array();
2346
+ for (let i = 0; i < this.stream.length; i++) {
2347
+ if (!this.stream[i].path.endsWith("/")) {
2348
+ files.push({
2349
+ path: this.stream[i].path,
2350
+ content: () => getRawBody__default.default(this.stream[i].data),
2351
+ lastModifiedAt: this.stream[i]?.lastModifiedAt
2352
+ });
2353
+ }
2354
+ }
2355
+ return files;
2356
+ }
2357
+ async archive() {
2358
+ const tmpDir = await this.dir();
2359
+ try {
2360
+ const data = await new Promise(async (resolve) => {
2361
+ await pipeline(
2362
+ tar__default.default.create({ cwd: tmpDir }, [""]),
2363
+ concatStream__default.default(resolve)
2364
+ );
2365
+ });
2366
+ return stream.Readable.from(data);
2367
+ } finally {
2368
+ await fs__default.default.remove(tmpDir);
2369
+ }
2370
+ }
2371
+ async dir(options) {
2372
+ this.onlyOnce();
2373
+ const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
2374
+ for (let i = 0; i < this.stream.length; i++) {
2375
+ if (!this.stream[i].path.endsWith("/")) {
2376
+ const filePath = platformPath__default.default.join(dir, this.stream[i].path);
2377
+ await fs__default.default.mkdir(platformPath.dirname(filePath), { recursive: true });
2378
+ await pipeline(this.stream[i].data, fs__default.default.createWriteStream(filePath));
2379
+ }
2380
+ }
2381
+ return dir;
2382
+ }
2383
+ }
2384
+
2385
+ class DefaultReadTreeResponseFactory {
2386
+ constructor(workDir) {
2387
+ this.workDir = workDir;
2388
+ }
2389
+ static create(options) {
2390
+ return new DefaultReadTreeResponseFactory(
2391
+ options.config.getOptionalString("backend.workingDirectory") ?? os__default.default.tmpdir()
2392
+ );
2393
+ }
2394
+ async fromTarArchive(options) {
2395
+ return new TarArchiveResponse(
2396
+ options.stream,
2397
+ options.subpath ?? "",
2398
+ this.workDir,
2399
+ options.etag,
2400
+ options.filter,
2401
+ options.stripFirstDirectory ?? true
2402
+ );
2403
+ }
2404
+ async fromZipArchive(options) {
2405
+ return new ZipArchiveResponse(
2406
+ options.stream,
2407
+ options.subpath ?? "",
2408
+ this.workDir,
2409
+ options.etag,
2410
+ options.filter
2411
+ );
2412
+ }
2413
+ async fromReadableArray(options) {
2414
+ return new ReadableArrayResponse(options, this.workDir, "");
2415
+ }
2416
+ }
2417
+
2418
+ var name = "@backstage/backend-defaults";
2419
+ var version = "0.3.0-next.3";
2420
+ var description = "Backend defaults used by Backstage backend apps";
2421
+ var backstage = {
2422
+ role: "node-library"
2423
+ };
2424
+ var publishConfig = {
2425
+ access: "public"
2426
+ };
2427
+ var keywords = [
2428
+ "backstage"
2429
+ ];
2430
+ var homepage = "https://backstage.io";
2431
+ var repository = {
2432
+ type: "git",
2433
+ url: "https://github.com/backstage/backstage",
2434
+ directory: "packages/backend-defaults"
2435
+ };
2436
+ var license = "Apache-2.0";
2437
+ var exports$1 = {
2438
+ ".": "./src/index.ts",
2439
+ "./cache": "./src/entrypoints/cache/index.ts",
2440
+ "./database": "./src/entrypoints/database/index.ts",
2441
+ "./discovery": "./src/entrypoints/discovery/index.ts",
2442
+ "./lifecycle": "./src/entrypoints/lifecycle/index.ts",
2443
+ "./permissions": "./src/entrypoints/permissions/index.ts",
2444
+ "./rootConfig": "./src/entrypoints/rootConfig/index.ts",
2445
+ "./rootLifecycle": "./src/entrypoints/rootLifecycle/index.ts",
2446
+ "./scheduler": "./src/entrypoints/scheduler/index.ts",
2447
+ "./urlReader": "./src/entrypoints/urlReader/index.ts",
2448
+ "./package.json": "./package.json"
2449
+ };
2450
+ var main = "src/index.ts";
2451
+ var types = "src/index.ts";
2452
+ var typesVersions = {
2453
+ "*": {
2454
+ cache: [
2455
+ "src/entrypoints/cache/index.ts"
2456
+ ],
2457
+ database: [
2458
+ "src/entrypoints/database/index.ts"
2459
+ ],
2460
+ discovery: [
2461
+ "src/entrypoints/discovery/index.ts"
2462
+ ],
2463
+ lifecycle: [
2464
+ "src/entrypoints/lifecycle/index.ts"
2465
+ ],
2466
+ permissions: [
2467
+ "src/entrypoints/permissions/index.ts"
2468
+ ],
2469
+ rootConfig: [
2470
+ "src/entrypoints/rootConfig/index.ts"
2471
+ ],
2472
+ rootLifecycle: [
2473
+ "src/entrypoints/rootLifecycle/index.ts"
2474
+ ],
2475
+ scheduler: [
2476
+ "src/entrypoints/scheduler/index.ts"
2477
+ ],
2478
+ urlReader: [
2479
+ "src/entrypoints/urlReader/index.ts"
2480
+ ],
2481
+ "package.json": [
2482
+ "package.json"
2483
+ ]
2484
+ }
2485
+ };
2486
+ var files = [
2487
+ "config.d.ts",
2488
+ "dist",
2489
+ "migrations"
2490
+ ];
2491
+ var scripts = {
2492
+ build: "backstage-cli package build",
2493
+ clean: "backstage-cli package clean",
2494
+ lint: "backstage-cli package lint",
2495
+ prepack: "backstage-cli package prepack",
2496
+ postpack: "backstage-cli package postpack",
2497
+ start: "backstage-cli package start",
2498
+ test: "backstage-cli package test"
2499
+ };
2500
+ var dependencies = {
2501
+ "@aws-sdk/abort-controller": "^3.347.0",
2502
+ "@aws-sdk/client-codecommit": "^3.350.0",
2503
+ "@aws-sdk/client-s3": "^3.350.0",
2504
+ "@aws-sdk/credential-providers": "^3.350.0",
2505
+ "@aws-sdk/types": "^3.347.0",
2506
+ "@backstage/backend-app-api": "workspace:^",
2507
+ "@backstage/backend-common": "workspace:^",
2508
+ "@backstage/backend-dev-utils": "workspace:^",
2509
+ "@backstage/backend-plugin-api": "workspace:^",
2510
+ "@backstage/config": "workspace:^",
2511
+ "@backstage/config-loader": "workspace:^",
2512
+ "@backstage/errors": "workspace:^",
2513
+ "@backstage/integration": "workspace:^",
2514
+ "@backstage/integration-aws-node": "workspace:^",
2515
+ "@backstage/plugin-events-node": "workspace:^",
2516
+ "@backstage/plugin-permission-node": "workspace:^",
2517
+ "@backstage/types": "workspace:^",
2518
+ "@google-cloud/storage": "^7.0.0",
2519
+ "@keyv/memcache": "^1.3.5",
2520
+ "@keyv/redis": "^2.5.3",
2521
+ "@octokit/rest": "^19.0.3",
2522
+ "@opentelemetry/api": "^1.3.0",
2523
+ archiver: "^6.0.0",
2524
+ "base64-stream": "^1.0.0",
2525
+ "better-sqlite3": "^9.0.0",
2526
+ "concat-stream": "^2.0.0",
2527
+ cron: "^3.0.0",
2528
+ "fs-extra": "^11.2.0",
2529
+ "git-url-parse": "^14.0.0",
2530
+ "isomorphic-git": "^1.23.0",
2531
+ keyv: "^4.5.2",
2532
+ knex: "^3.0.0",
2533
+ lodash: "^4.17.21",
2534
+ luxon: "^3.0.0",
2535
+ minimatch: "^9.0.0",
2536
+ mysql2: "^3.0.0",
2537
+ "node-fetch": "^2.6.7",
2538
+ "p-limit": "^3.1.0",
2539
+ pg: "^8.11.3",
2540
+ "pg-connection-string": "^2.3.0",
2541
+ "raw-body": "^2.4.1",
2542
+ tar: "^6.1.12",
2543
+ uuid: "^9.0.0",
2544
+ yauzl: "^3.0.0",
2545
+ yn: "^4.0.0",
2546
+ zod: "^3.22.4"
2547
+ };
2548
+ var devDependencies = {
2549
+ "@aws-sdk/util-stream-node": "^3.350.0",
2550
+ "@backstage/backend-plugin-api": "workspace:^",
2551
+ "@backstage/backend-test-utils": "workspace:^",
2552
+ "@backstage/cli": "workspace:^",
2553
+ "aws-sdk-client-mock": "^4.0.0",
2554
+ msw: "^1.0.0",
2555
+ "wait-for-expect": "^3.0.2"
2556
+ };
2557
+ var configSchema = "config.d.ts";
2558
+ var packageinfo = {
2559
+ name: name,
2560
+ version: version,
2561
+ description: description,
2562
+ backstage: backstage,
2563
+ publishConfig: publishConfig,
2564
+ keywords: keywords,
2565
+ homepage: homepage,
2566
+ repository: repository,
2567
+ license: license,
2568
+ exports: exports$1,
2569
+ main: main,
2570
+ types: types,
2571
+ typesVersions: typesVersions,
2572
+ files: files,
2573
+ scripts: scripts,
2574
+ dependencies: dependencies,
2575
+ devDependencies: devDependencies,
2576
+ configSchema: configSchema
2577
+ };
2578
+
2579
+ const GOOGLE_GCS_HOST = "storage.cloud.google.com";
2580
+ const parseURL = (url) => {
2581
+ const { host, pathname } = new URL(url);
2582
+ if (host !== GOOGLE_GCS_HOST) {
2583
+ throw new Error(`not a valid GCS URL: ${url}`);
2584
+ }
2585
+ const [, bucket, ...key] = pathname.split("/");
2586
+ return {
2587
+ host,
2588
+ bucket,
2589
+ key: key.join("/")
2590
+ };
2591
+ };
2592
+ class GoogleGcsUrlReader {
2593
+ constructor(integration, storage) {
2594
+ this.integration = integration;
2595
+ this.storage = storage;
2596
+ }
2597
+ static factory = ({ config, logger }) => {
2598
+ if (!config.has("integrations.googleGcs")) {
2599
+ return [];
2600
+ }
2601
+ const gcsConfig = integration.readGoogleGcsIntegrationConfig(
2602
+ config.getConfig("integrations.googleGcs")
2603
+ );
2604
+ let storage;
2605
+ if (!gcsConfig.clientEmail || !gcsConfig.privateKey) {
2606
+ logger.info(
2607
+ "googleGcs credentials not found in config. Using default credentials provider."
2608
+ );
2609
+ storage = new GoogleCloud__namespace.Storage({
2610
+ userAgent: `backstage/backend-defaults.GoogleGcsUrlReader/${packageinfo.version}`
2611
+ });
2612
+ } else {
2613
+ storage = new GoogleCloud__namespace.Storage({
2614
+ credentials: {
2615
+ client_email: gcsConfig.clientEmail || void 0,
2616
+ private_key: gcsConfig.privateKey || void 0
2617
+ },
2618
+ userAgent: `backstage/backend-defaults.GoogleGcsUrlReader/${packageinfo.version}`
2619
+ });
2620
+ }
2621
+ const reader = new GoogleGcsUrlReader(gcsConfig, storage);
2622
+ const predicate = (url) => url.host === GOOGLE_GCS_HOST;
2623
+ return [{ reader, predicate }];
2624
+ };
2625
+ readStreamFromUrl(url) {
2626
+ const { bucket, key } = parseURL(url);
2627
+ return this.storage.bucket(bucket).file(key).createReadStream();
2628
+ }
2629
+ async read(url) {
2630
+ try {
2631
+ return await getRawBody__default.default(this.readStreamFromUrl(url));
2632
+ } catch (error) {
2633
+ throw new Error(`unable to read gcs file from ${url}, ${error}`);
2634
+ }
2635
+ }
2636
+ async readUrl(url, _options) {
2637
+ const stream = this.readStreamFromUrl(url);
2638
+ return ReadUrlResponseFactory.fromReadable(stream);
2639
+ }
2640
+ async readTree() {
2641
+ throw new Error("GcsUrlReader does not implement readTree");
2642
+ }
2643
+ async search(url) {
2644
+ const { bucket, key: pattern } = parseURL(url);
2645
+ if (!pattern.endsWith("*") || pattern.indexOf("*") !== pattern.length - 1) {
2646
+ throw new Error("GcsUrlReader only supports prefix-based searches");
2647
+ }
2648
+ const [files] = await this.storage.bucket(bucket).getFiles({
2649
+ autoPaginate: true,
2650
+ prefix: pattern.split("*").join("")
2651
+ });
2652
+ return {
2653
+ files: files.map((file) => {
2654
+ const fullUrl = ["https:/", GOOGLE_GCS_HOST, bucket, file.name].join(
2655
+ "/"
2656
+ );
2657
+ return {
2658
+ url: fullUrl,
2659
+ content: async () => {
2660
+ const readResponse = await this.readUrl(fullUrl);
2661
+ return readResponse.buffer();
2662
+ }
2663
+ };
2664
+ }),
2665
+ // TODO etag is not implemented yet.
2666
+ etag: "NOT/IMPLEMENTED"
2667
+ };
2668
+ }
2669
+ toString() {
2670
+ const key = this.integration.privateKey;
2671
+ return `googleGcs{host=${GOOGLE_GCS_HOST},authed=${Boolean(key)}}`;
2672
+ }
2673
+ }
2674
+
2675
+ function parseUrl(url, requireGitPath = false) {
2676
+ const parsedUrl = new URL(url);
2677
+ if (parsedUrl.pathname.includes("/files/edit/")) {
2678
+ throw new Error(
2679
+ "Please provide the view url to yaml file from CodeCommit, not the edit url"
2680
+ );
2681
+ }
2682
+ if (requireGitPath && !parsedUrl.pathname.includes("/browse/")) {
2683
+ throw new Error("Please provide full path to yaml file from CodeCommit");
2684
+ }
2685
+ const hostMatch = parsedUrl.host.match(
2686
+ /^([^\.]+)\.console\.aws\.amazon\.com$/
2687
+ );
2688
+ if (!hostMatch) {
2689
+ throw new Error(
2690
+ `Invalid AWS CodeCommit URL (unexpected host format): ${url}`
2691
+ );
2692
+ }
2693
+ const [, region] = hostMatch;
2694
+ const pathMatch = parsedUrl.pathname.match(
2695
+ /^\/codesuite\/codecommit\/repositories\/([^\/]+)\/browse\/((.*)\/)?--\/(.*)$/
2696
+ );
2697
+ if (!pathMatch) {
2698
+ if (!requireGitPath) {
2699
+ const pathname = parsedUrl.pathname.split("/--/")[0].replace("/codesuite/codecommit/repositories/", "");
2700
+ const [repositoryName2, commitSpecifier2] = pathname.split("/browse");
2701
+ return {
2702
+ region,
2703
+ repositoryName: repositoryName2.replace(/^\/|\/$/g, ""),
2704
+ path: "/",
2705
+ commitSpecifier: commitSpecifier2 === "" ? void 0 : commitSpecifier2?.replace(/^\/|\/$/g, "")
2706
+ };
2707
+ }
2708
+ throw new Error(
2709
+ `Invalid AWS CodeCommit URL (unexpected path format): ${url}`
2710
+ );
2711
+ }
2712
+ const [, repositoryName, , commitSpecifier, path] = pathMatch;
2713
+ return {
2714
+ region,
2715
+ repositoryName,
2716
+ path,
2717
+ // the commitSpecifier is passed to AWS SDK which does not allow empty strings so replace empty string with undefined
2718
+ commitSpecifier: commitSpecifier === "" ? void 0 : commitSpecifier
2719
+ };
2720
+ }
2721
+ class AwsCodeCommitUrlReader {
2722
+ constructor(credsManager, integration, deps) {
2723
+ this.credsManager = credsManager;
2724
+ this.integration = integration;
2725
+ this.deps = deps;
2726
+ }
2727
+ static factory = ({ config, treeResponseFactory }) => {
2728
+ const integrations = integration.ScmIntegrations.fromConfig(config);
2729
+ const credsManager = integrationAwsNode.DefaultAwsCredentialsManager.fromConfig(config);
2730
+ return integrations.awsCodeCommit.list().map((integration) => {
2731
+ const reader = new AwsCodeCommitUrlReader(credsManager, integration, {
2732
+ treeResponseFactory
2733
+ });
2734
+ const predicate = (url) => {
2735
+ return url.host.endsWith(integration.config.host) && url.pathname.startsWith("/codesuite/codecommit");
2736
+ };
2737
+ return { reader, predicate };
2738
+ });
2739
+ };
2740
+ /**
2741
+ * If accessKeyId and secretAccessKey are missing, the standard credentials provider chain will be used:
2742
+ * https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html
2743
+ */
2744
+ static buildStaticCredentials(accessKeyId, secretAccessKey) {
2745
+ return async () => {
2746
+ return {
2747
+ accessKeyId,
2748
+ secretAccessKey
2749
+ };
2750
+ };
2751
+ }
2752
+ static async buildCredentials(credsManager, region, integration) {
2753
+ if (!integration) {
2754
+ return (await credsManager.getCredentialProvider()).sdkCredentialProvider;
2755
+ }
2756
+ const accessKeyId = integration.config.accessKeyId;
2757
+ const secretAccessKey = integration.config.secretAccessKey;
2758
+ let explicitCredentials;
2759
+ if (accessKeyId && secretAccessKey) {
2760
+ explicitCredentials = AwsCodeCommitUrlReader.buildStaticCredentials(
2761
+ accessKeyId,
2762
+ secretAccessKey
2763
+ );
2764
+ } else {
2765
+ explicitCredentials = (await credsManager.getCredentialProvider()).sdkCredentialProvider;
2766
+ }
2767
+ const roleArn = integration.config.roleArn;
2768
+ if (roleArn) {
2769
+ return credentialProviders.fromTemporaryCredentials({
2770
+ masterCredentials: explicitCredentials,
2771
+ params: {
2772
+ RoleSessionName: "backstage-aws-code-commit-url-reader",
2773
+ RoleArn: roleArn,
2774
+ ExternalId: integration.config.externalId
2775
+ },
2776
+ clientConfig: { region }
2777
+ });
2778
+ }
2779
+ return explicitCredentials;
2780
+ }
2781
+ async buildCodeCommitClient(credsManager, region, integration) {
2782
+ const credentials = await AwsCodeCommitUrlReader.buildCredentials(
2783
+ credsManager,
2784
+ region,
2785
+ integration
2786
+ );
2787
+ const codeCommit = new clientCodecommit.CodeCommitClient({
2788
+ region,
2789
+ credentials
2790
+ });
2791
+ return codeCommit;
2792
+ }
2793
+ async readUrl(url, options) {
2794
+ try {
2795
+ const { path, repositoryName, region, commitSpecifier } = parseUrl(
2796
+ url,
2797
+ true
2798
+ );
2799
+ const codeCommitClient = await this.buildCodeCommitClient(
2800
+ this.credsManager,
2801
+ region,
2802
+ this.integration
2803
+ );
2804
+ const abortController$1 = new abortController.AbortController();
2805
+ const input = {
2806
+ repositoryName,
2807
+ commitSpecifier,
2808
+ filePath: path
2809
+ };
2810
+ options?.signal?.addEventListener("abort", () => abortController$1.abort());
2811
+ const getObjectCommand = new clientCodecommit.GetFileCommand(input);
2812
+ const response = await codeCommitClient.send(
2813
+ getObjectCommand,
2814
+ {
2815
+ abortSignal: abortController$1.signal
2816
+ }
2817
+ );
2818
+ if (options?.etag && options.etag === response.commitId) {
2819
+ throw new errors.NotModifiedError();
2820
+ }
2821
+ return ReadUrlResponseFactory.fromReadable(
2822
+ stream.Readable.from([response?.fileContent]),
2823
+ {
2824
+ etag: response.commitId
2825
+ }
2826
+ );
2827
+ } catch (e) {
2828
+ if (e.$metadata && e.$metadata.httpStatusCode === 304) {
2829
+ throw new errors.NotModifiedError();
2830
+ }
2831
+ if (e.name && e.name === "NotModifiedError") {
2832
+ throw new errors.NotModifiedError();
2833
+ }
2834
+ throw new errors.ForwardedError("Could not retrieve file from CodeCommit", e);
2835
+ }
2836
+ }
2837
+ async readTreePath(codeCommitClient, abortSignal, path, repositoryName, commitSpecifier, etag) {
2838
+ const getFolderCommand = new clientCodecommit.GetFolderCommand({
2839
+ folderPath: path,
2840
+ repositoryName,
2841
+ commitSpecifier
2842
+ });
2843
+ const response = await codeCommitClient.send(getFolderCommand, {
2844
+ abortSignal
2845
+ });
2846
+ if (etag && etag === response.commitId) {
2847
+ throw new errors.NotModifiedError();
2848
+ }
2849
+ const output = [];
2850
+ if (response.files) {
2851
+ response.files.forEach((file) => {
2852
+ if (file.absolutePath) {
2853
+ output.push(file.absolutePath);
2854
+ }
2855
+ });
2856
+ }
2857
+ if (!response.subFolders) {
2858
+ return output;
2859
+ }
2860
+ for (const subFolder of response.subFolders) {
2861
+ if (subFolder.absolutePath) {
2862
+ output.push(
2863
+ ...await this.readTreePath(
2864
+ codeCommitClient,
2865
+ abortSignal,
2866
+ subFolder.absolutePath,
2867
+ repositoryName,
2868
+ commitSpecifier,
2869
+ etag
2870
+ )
2871
+ );
2872
+ }
2873
+ }
2874
+ return output;
2875
+ }
2876
+ async readTree(url, options) {
2877
+ try {
2878
+ const { path, repositoryName, region, commitSpecifier } = parseUrl(url);
2879
+ const codeCommitClient = await this.buildCodeCommitClient(
2880
+ this.credsManager,
2881
+ region,
2882
+ this.integration
2883
+ );
2884
+ const abortController$1 = new abortController.AbortController();
2885
+ options?.signal?.addEventListener("abort", () => abortController$1.abort());
2886
+ const allFiles = await this.readTreePath(
2887
+ codeCommitClient,
2888
+ abortController$1.signal,
2889
+ path,
2890
+ repositoryName,
2891
+ commitSpecifier,
2892
+ options?.etag
2893
+ );
2894
+ const responses = [];
2895
+ for (let i = 0; i < allFiles.length; i++) {
2896
+ const getFileCommand = new clientCodecommit.GetFileCommand({
2897
+ repositoryName,
2898
+ filePath: String(allFiles[i]),
2899
+ commitSpecifier
2900
+ });
2901
+ const response = await codeCommitClient.send(getFileCommand);
2902
+ const objectData = await stream.Readable.from([response?.fileContent]);
2903
+ responses.push({
2904
+ data: objectData,
2905
+ path: posix.relative(
2906
+ path.startsWith("/") ? path : `/${path}`,
2907
+ allFiles[i].startsWith("/") ? allFiles[i] : `/${allFiles[i]}`
2908
+ )
2909
+ });
2910
+ }
2911
+ return await this.deps.treeResponseFactory.fromReadableArray(responses);
2912
+ } catch (e) {
2913
+ if (e.name && e.name === "NotModifiedError") {
2914
+ throw new errors.NotModifiedError();
2915
+ }
2916
+ throw new errors.ForwardedError(
2917
+ "Could not retrieve file tree from CodeCommit",
2918
+ e
2919
+ );
2920
+ }
2921
+ }
2922
+ async search() {
2923
+ throw new Error("AwsCodeCommitReader does not implement search");
2924
+ }
2925
+ toString() {
2926
+ const secretAccessKey = this.integration.config.secretAccessKey;
2927
+ return `awsCodeCommit{host=${this.integration.config.host},authed=${Boolean(
2928
+ secretAccessKey
2929
+ )}}`;
2930
+ }
2931
+ }
2932
+
2933
+ class UrlReaders {
2934
+ /**
2935
+ * Creates a custom {@link @backstage/backend-plugin-api#UrlReaderService} wrapper for your own set of factories.
2936
+ */
2937
+ static create(options) {
2938
+ const { logger, config, factories } = options;
2939
+ const mux = new UrlReaderPredicateMux();
2940
+ const treeResponseFactory = DefaultReadTreeResponseFactory.create({
2941
+ config
2942
+ });
2943
+ for (const factory of factories ?? []) {
2944
+ const tuples = factory({ config, logger, treeResponseFactory });
2945
+ for (const tuple of tuples) {
2946
+ mux.register(tuple);
2947
+ }
2948
+ }
2949
+ return mux;
2950
+ }
2951
+ /**
2952
+ * Creates a {@link @backstage/backend-plugin-api#UrlReaderService} wrapper that includes all the default factories
2953
+ * from this package.
2954
+ *
2955
+ * Any additional factories passed will be loaded before the default ones.
2956
+ */
2957
+ static default(options) {
2958
+ const { logger, config, factories = [] } = options;
2959
+ return UrlReaders.create({
2960
+ logger,
2961
+ config,
2962
+ factories: factories.concat([
2963
+ AzureUrlReader.factory,
2964
+ BitbucketCloudUrlReader.factory,
2965
+ BitbucketServerUrlReader.factory,
2966
+ BitbucketUrlReader.factory,
2967
+ GerritUrlReader.factory,
2968
+ GithubUrlReader.factory,
2969
+ GiteaUrlReader.factory,
2970
+ GitlabUrlReader.factory,
2971
+ GoogleGcsUrlReader.factory,
2972
+ HarnessUrlReader.factory,
2973
+ AwsS3UrlReader.factory,
2974
+ AwsCodeCommitUrlReader.factory,
2975
+ FetchUrlReader.factory
2976
+ ])
2977
+ });
2978
+ }
2979
+ }
5
2980
 
6
2981
  const urlReaderServiceFactory = backendPluginApi.createServiceFactory({
7
2982
  service: backendPluginApi.coreServices.urlReader,
@@ -10,12 +2985,25 @@ const urlReaderServiceFactory = backendPluginApi.createServiceFactory({
10
2985
  logger: backendPluginApi.coreServices.logger
11
2986
  },
12
2987
  async factory({ config, logger }) {
13
- return backendCommon.UrlReaders.default({
2988
+ return UrlReaders.default({
14
2989
  config,
15
2990
  logger
16
2991
  });
17
2992
  }
18
2993
  });
19
2994
 
2995
+ exports.AwsS3UrlReader = AwsS3UrlReader;
2996
+ exports.AzureUrlReader = AzureUrlReader;
2997
+ exports.BitbucketCloudUrlReader = BitbucketCloudUrlReader;
2998
+ exports.BitbucketServerUrlReader = BitbucketServerUrlReader;
2999
+ exports.BitbucketUrlReader = BitbucketUrlReader;
3000
+ exports.FetchUrlReader = FetchUrlReader;
3001
+ exports.GerritUrlReader = GerritUrlReader;
3002
+ exports.GiteaUrlReader = GiteaUrlReader;
3003
+ exports.GithubUrlReader = GithubUrlReader;
3004
+ exports.GitlabUrlReader = GitlabUrlReader;
3005
+ exports.HarnessUrlReader = HarnessUrlReader;
3006
+ exports.ReadUrlResponseFactory = ReadUrlResponseFactory;
3007
+ exports.UrlReaders = UrlReaders;
20
3008
  exports.urlReaderServiceFactory = urlReaderServiceFactory;
21
3009
  //# sourceMappingURL=urlReader.cjs.js.map