@backstage/backend-defaults 0.5.1-next.0 → 0.5.1-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +46 -0
- package/auth/package.json +1 -1
- package/cache/package.json +1 -1
- package/database/package.json +1 -1
- package/discovery/package.json +1 -1
- package/dist/CreateBackend.cjs.js +49 -0
- package/dist/CreateBackend.cjs.js.map +1 -0
- package/dist/PackageDiscoveryService.cjs.js +109 -0
- package/dist/PackageDiscoveryService.cjs.js.map +1 -0
- package/dist/auth.cjs.js +2 -996
- package/dist/auth.cjs.js.map +1 -1
- package/dist/cache.cjs.js +4 -204
- package/dist/cache.cjs.js.map +1 -1
- package/dist/database.cjs.js +4 -957
- package/dist/database.cjs.js.map +1 -1
- package/dist/database.d.ts +4 -1
- package/dist/discovery.cjs.js +4 -92
- package/dist/discovery.cjs.js.map +1 -1
- package/dist/discoveryFeatureLoader.cjs.js +19 -0
- package/dist/discoveryFeatureLoader.cjs.js.map +1 -0
- package/dist/entrypoints/auth/DefaultAuthService.cjs.js +130 -0
- package/dist/entrypoints/auth/DefaultAuthService.cjs.js.map +1 -0
- package/dist/entrypoints/auth/JwksClient.cjs.js +49 -0
- package/dist/entrypoints/auth/JwksClient.cjs.js.map +1 -0
- package/dist/entrypoints/auth/authServiceFactory.cjs.js +57 -0
- package/dist/entrypoints/auth/authServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/ExternalTokenHandler.cjs.js +78 -0
- package/dist/entrypoints/auth/external/ExternalTokenHandler.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/helpers.cjs.js +92 -0
- package/dist/entrypoints/auth/external/helpers.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/jwks.cjs.js +63 -0
- package/dist/entrypoints/auth/external/jwks.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/legacy.cjs.js +73 -0
- package/dist/entrypoints/auth/external/legacy.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/static.cjs.js +33 -0
- package/dist/entrypoints/auth/external/static.cjs.js.map +1 -0
- package/dist/{cjs/helpers-D2f1CG0o.cjs.js → entrypoints/auth/helpers.cjs.js} +1 -1
- package/dist/entrypoints/auth/helpers.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/PluginTokenHandler.cjs.js +147 -0
- package/dist/entrypoints/auth/plugin/PluginTokenHandler.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/keys/DatabaseKeyStore.cjs.js +73 -0
- package/dist/entrypoints/auth/plugin/keys/DatabaseKeyStore.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/keys/DatabasePluginKeySource.cjs.js +75 -0
- package/dist/entrypoints/auth/plugin/keys/DatabasePluginKeySource.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/keys/StaticConfigPluginKeySource.cjs.js +91 -0
- package/dist/entrypoints/auth/plugin/keys/StaticConfigPluginKeySource.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/keys/createPluginKeySource.cjs.js +29 -0
- package/dist/entrypoints/auth/plugin/keys/createPluginKeySource.cjs.js.map +1 -0
- package/dist/entrypoints/auth/user/UserTokenHandler.cjs.js +110 -0
- package/dist/entrypoints/auth/user/UserTokenHandler.cjs.js.map +1 -0
- package/dist/entrypoints/cache/CacheClient.cjs.js +50 -0
- package/dist/entrypoints/cache/CacheClient.cjs.js.map +1 -0
- package/dist/entrypoints/cache/CacheManager.cjs.js +147 -0
- package/dist/entrypoints/cache/CacheManager.cjs.js.map +1 -0
- package/dist/entrypoints/cache/cacheServiceFactory.cjs.js +22 -0
- package/dist/entrypoints/cache/cacheServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/cache/types.cjs.js +10 -0
- package/dist/entrypoints/cache/types.cjs.js.map +1 -0
- package/dist/entrypoints/database/DatabaseManager.cjs.js +173 -0
- package/dist/entrypoints/database/DatabaseManager.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/defaultNameOverride.cjs.js +14 -0
- package/dist/entrypoints/database/connectors/defaultNameOverride.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/defaultSchemaOverride.cjs.js +12 -0
- package/dist/entrypoints/database/connectors/defaultSchemaOverride.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/mergeDatabaseConfig.cjs.js +10 -0
- package/dist/entrypoints/database/connectors/mergeDatabaseConfig.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/mysql.cjs.js +278 -0
- package/dist/entrypoints/database/connectors/mysql.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/postgres.cjs.js +304 -0
- package/dist/entrypoints/database/connectors/postgres.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/sqlite3.cjs.js +251 -0
- package/dist/entrypoints/database/connectors/sqlite3.cjs.js.map +1 -0
- package/dist/entrypoints/database/databaseServiceFactory.cjs.js +36 -0
- package/dist/entrypoints/database/databaseServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/discovery/HostDiscovery.cjs.js +86 -0
- package/dist/entrypoints/discovery/HostDiscovery.cjs.js.map +1 -0
- package/dist/entrypoints/discovery/discoveryServiceFactory.cjs.js +17 -0
- package/dist/entrypoints/discovery/discoveryServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/httpAuth/httpAuthServiceFactory.cjs.js +192 -0
- package/dist/entrypoints/httpAuth/httpAuthServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/createAuthIntegrationRouter.cjs.js +19 -0
- package/dist/entrypoints/httpRouter/createAuthIntegrationRouter.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/createCookieAuthRefreshMiddleware.cjs.js +26 -0
- package/dist/entrypoints/httpRouter/createCookieAuthRefreshMiddleware.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/createCredentialsBarrier.cjs.js +63 -0
- package/dist/entrypoints/httpRouter/createCredentialsBarrier.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/createLifecycleMiddleware.cjs.js +52 -0
- package/dist/entrypoints/httpRouter/createLifecycleMiddleware.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/httpRouterServiceFactory.cjs.js +48 -0
- package/dist/entrypoints/httpRouter/httpRouterServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/lifecycle/lifecycleServiceFactory.cjs.js +88 -0
- package/dist/entrypoints/lifecycle/lifecycleServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/logger/loggerServiceFactory.cjs.js +17 -0
- package/dist/entrypoints/logger/loggerServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/permissions/permissionsServiceFactory.cjs.js +22 -0
- package/dist/entrypoints/permissions/permissionsServiceFactory.cjs.js.map +1 -0
- package/dist/{cjs/createConfigSecretEnumerator-DShyoWWL.cjs.js → entrypoints/rootConfig/createConfigSecretEnumerator.cjs.js} +1 -1
- package/dist/entrypoints/rootConfig/createConfigSecretEnumerator.cjs.js.map +1 -0
- package/dist/entrypoints/rootConfig/rootConfigServiceFactory.cjs.js +26 -0
- package/dist/entrypoints/rootConfig/rootConfigServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootHealth/rootHealthServiceFactory.cjs.js +41 -0
- package/dist/entrypoints/rootHealth/rootHealthServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/DefaultRootHttpRouter.cjs.js +77 -0
- package/dist/entrypoints/rootHttpRouter/DefaultRootHttpRouter.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/createHealthRouter.cjs.js +29 -0
- package/dist/entrypoints/rootHttpRouter/createHealthRouter.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/MiddlewareFactory.cjs.js +187 -0
- package/dist/entrypoints/rootHttpRouter/http/MiddlewareFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/applyInternalErrorFilter.cjs.js +28 -0
- package/dist/entrypoints/rootHttpRouter/http/applyInternalErrorFilter.cjs.js.map +1 -0
- package/dist/{cjs/config-BDOwXIyo.cjs.js → entrypoints/rootHttpRouter/http/config.cjs.js} +1 -1
- package/dist/entrypoints/rootHttpRouter/http/config.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/createHttpServer.cjs.js +88 -0
- package/dist/entrypoints/rootHttpRouter/http/createHttpServer.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/getGeneratedCertificate.cjs.js +130 -0
- package/dist/entrypoints/rootHttpRouter/http/getGeneratedCertificate.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/readCorsOptions.cjs.js +51 -0
- package/dist/entrypoints/rootHttpRouter/http/readCorsOptions.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/readHelmetOptions.cjs.js +62 -0
- package/dist/entrypoints/rootHttpRouter/http/readHelmetOptions.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/rootHttpRouterServiceFactory.cjs.js +73 -0
- package/dist/entrypoints/rootHttpRouter/rootHttpRouterServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootLifecycle/rootLifecycleServiceFactory.cjs.js +76 -0
- package/dist/entrypoints/rootLifecycle/rootLifecycleServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootLogger/WinstonLogger.cjs.js +114 -0
- package/dist/entrypoints/rootLogger/WinstonLogger.cjs.js.map +1 -0
- package/dist/entrypoints/rootLogger/rootLoggerServiceFactory.cjs.js +30 -0
- package/dist/entrypoints/rootLogger/rootLoggerServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/database/migrateBackendTasks.cjs.js +18 -0
- package/dist/entrypoints/scheduler/database/migrateBackendTasks.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/database/tables.cjs.js +8 -0
- package/dist/entrypoints/scheduler/database/tables.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/DefaultSchedulerService.cjs.js +37 -0
- package/dist/entrypoints/scheduler/lib/DefaultSchedulerService.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/LocalTaskWorker.cjs.js +105 -0
- package/dist/entrypoints/scheduler/lib/LocalTaskWorker.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerImpl.cjs.js +138 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerImpl.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerJanitor.cjs.js +59 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerJanitor.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/TaskWorker.cjs.js +275 -0
- package/dist/entrypoints/scheduler/lib/TaskWorker.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/types.cjs.js +60 -0
- package/dist/entrypoints/scheduler/lib/types.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/util.cjs.js +66 -0
- package/dist/entrypoints/scheduler/lib/util.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/schedulerServiceFactory.cjs.js +19 -0
- package/dist/entrypoints/scheduler/schedulerServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/AwsCodeCommitUrlReader.cjs.js +274 -0
- package/dist/entrypoints/urlReader/lib/AwsCodeCommitUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/AwsS3UrlReader.cjs.js +261 -0
- package/dist/entrypoints/urlReader/lib/AwsS3UrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/AzureUrlReader.cjs.js +148 -0
- package/dist/entrypoints/urlReader/lib/AzureUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/BitbucketCloudUrlReader.cjs.js +174 -0
- package/dist/entrypoints/urlReader/lib/BitbucketCloudUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/BitbucketServerUrlReader.cjs.js +170 -0
- package/dist/entrypoints/urlReader/lib/BitbucketServerUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/BitbucketUrlReader.cjs.js +182 -0
- package/dist/entrypoints/urlReader/lib/BitbucketUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/FetchUrlReader.cjs.js +132 -0
- package/dist/entrypoints/urlReader/lib/FetchUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GerritUrlReader.cjs.js +147 -0
- package/dist/entrypoints/urlReader/lib/GerritUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GiteaUrlReader.cjs.js +122 -0
- package/dist/entrypoints/urlReader/lib/GiteaUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GithubUrlReader.cjs.js +226 -0
- package/dist/entrypoints/urlReader/lib/GithubUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GitlabUrlReader.cjs.js +277 -0
- package/dist/entrypoints/urlReader/lib/GitlabUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GoogleGcsUrlReader.cjs.js +129 -0
- package/dist/entrypoints/urlReader/lib/GoogleGcsUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/HarnessUrlReader.cjs.js +120 -0
- package/dist/entrypoints/urlReader/lib/HarnessUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/ReadUrlResponseFactory.cjs.js +49 -0
- package/dist/entrypoints/urlReader/lib/ReadUrlResponseFactory.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/UrlReaderPredicateMux.cjs.js +46 -0
- package/dist/entrypoints/urlReader/lib/UrlReaderPredicateMux.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/UrlReaders.cjs.js +68 -0
- package/dist/entrypoints/urlReader/lib/UrlReaders.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/ReadTreeResponseFactory.cjs.js +46 -0
- package/dist/entrypoints/urlReader/lib/tree/ReadTreeResponseFactory.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/ReadableArrayResponse.cjs.js +78 -0
- package/dist/entrypoints/urlReader/lib/tree/ReadableArrayResponse.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/TarArchiveResponse.cjs.js +147 -0
- package/dist/entrypoints/urlReader/lib/tree/TarArchiveResponse.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/ZipArchiveResponse.cjs.js +161 -0
- package/dist/entrypoints/urlReader/lib/tree/ZipArchiveResponse.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/util.cjs.js +28 -0
- package/dist/entrypoints/urlReader/lib/tree/util.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/util.cjs.js +11 -0
- package/dist/entrypoints/urlReader/lib/util.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/urlReaderServiceFactory.cjs.js +29 -0
- package/dist/entrypoints/urlReader/urlReaderServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/userInfo/DefaultUserInfoService.cjs.js +59 -0
- package/dist/entrypoints/userInfo/DefaultUserInfoService.cjs.js.map +1 -0
- package/dist/entrypoints/userInfo/userInfoServiceFactory.cjs.js +17 -0
- package/dist/entrypoints/userInfo/userInfoServiceFactory.cjs.js.map +1 -0
- package/dist/httpAuth.cjs.js +3 -187
- package/dist/httpAuth.cjs.js.map +1 -1
- package/dist/httpRouter.cjs.js +2 -166
- package/dist/httpRouter.cjs.js.map +1 -1
- package/dist/index.cjs.js +4 -160
- package/dist/index.cjs.js.map +1 -1
- package/dist/lib/escapeRegExp.cjs.js +8 -0
- package/dist/lib/escapeRegExp.cjs.js.map +1 -0
- package/dist/lifecycle.cjs.js +3 -58
- package/dist/lifecycle.cjs.js.map +1 -1
- package/dist/logger.cjs.js +3 -12
- package/dist/logger.cjs.js.map +1 -1
- package/dist/package.json.cjs.js +252 -0
- package/dist/package.json.cjs.js.map +1 -0
- package/dist/permissions.cjs.js +3 -17
- package/dist/permissions.cjs.js.map +1 -1
- package/dist/rootConfig.cjs.js +4 -22
- package/dist/rootConfig.cjs.js.map +1 -1
- package/dist/rootHealth.cjs.js +3 -35
- package/dist/rootHealth.cjs.js.map +1 -1
- package/dist/rootHttpRouter.cjs.js +15 -651
- package/dist/rootHttpRouter.cjs.js.map +1 -1
- package/dist/rootLifecycle.cjs.js +3 -70
- package/dist/rootLifecycle.cjs.js.map +1 -1
- package/dist/rootLogger.cjs.js +4 -137
- package/dist/rootLogger.cjs.js.map +1 -1
- package/dist/scheduler.cjs.js +4 -693
- package/dist/scheduler.cjs.js.map +1 -1
- package/dist/scheduler.d.ts +2 -1
- package/dist/urlReader.cjs.js +32 -2962
- package/dist/urlReader.cjs.js.map +1 -1
- package/dist/userInfo.cjs.js +2 -64
- package/dist/userInfo.cjs.js.map +1 -1
- package/httpAuth/package.json +1 -1
- package/httpRouter/package.json +1 -1
- package/lifecycle/package.json +1 -1
- package/logger/package.json +1 -1
- package/package.json +20 -20
- package/permissions/package.json +1 -1
- package/rootConfig/package.json +1 -1
- package/rootHealth/package.json +1 -1
- package/rootHttpRouter/package.json +1 -1
- package/rootLifecycle/package.json +1 -1
- package/rootLogger/package.json +1 -1
- package/scheduler/package.json +1 -1
- package/urlReader/package.json +1 -1
- package/userInfo/package.json +1 -1
- package/dist/cjs/config-BDOwXIyo.cjs.js.map +0 -1
- package/dist/cjs/createConfigSecretEnumerator-DShyoWWL.cjs.js.map +0 -1
- package/dist/cjs/helpers-D2f1CG0o.cjs.js.map +0 -1
package/dist/urlReader.cjs.js
CHANGED
|
@@ -1,2965 +1,35 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var
|
|
4
|
-
var
|
|
5
|
-
var
|
|
6
|
-
var
|
|
7
|
-
var
|
|
8
|
-
var
|
|
9
|
-
var
|
|
10
|
-
var
|
|
11
|
-
var
|
|
12
|
-
var
|
|
13
|
-
var
|
|
14
|
-
var
|
|
15
|
-
var
|
|
16
|
-
var
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
Object.keys(e).forEach(function (k) {
|
|
36
|
-
if (k !== 'default') {
|
|
37
|
-
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
38
|
-
Object.defineProperty(n, k, d.get ? d : {
|
|
39
|
-
enumerable: true,
|
|
40
|
-
get: function () { return e[k]; }
|
|
41
|
-
});
|
|
42
|
-
}
|
|
43
|
-
});
|
|
44
|
-
}
|
|
45
|
-
n.default = e;
|
|
46
|
-
return Object.freeze(n);
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
var fetch__default = /*#__PURE__*/_interopDefaultCompat(fetch);
|
|
50
|
-
var getRawBody__default = /*#__PURE__*/_interopDefaultCompat(getRawBody);
|
|
51
|
-
var parseGitUrl__default = /*#__PURE__*/_interopDefaultCompat(parseGitUrl);
|
|
52
|
-
var platformPath__default = /*#__PURE__*/_interopDefaultCompat(platformPath);
|
|
53
|
-
var os__default = /*#__PURE__*/_interopDefaultCompat(os);
|
|
54
|
-
var concatStream__default = /*#__PURE__*/_interopDefaultCompat(concatStream);
|
|
55
|
-
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
56
|
-
var tar__default = /*#__PURE__*/_interopDefaultCompat(tar);
|
|
57
|
-
var archiver__default = /*#__PURE__*/_interopDefaultCompat(archiver);
|
|
58
|
-
var yauzl__default = /*#__PURE__*/_interopDefaultCompat(yauzl);
|
|
59
|
-
var GoogleCloud__namespace = /*#__PURE__*/_interopNamespaceCompat(GoogleCloud);
|
|
60
|
-
|
|
61
|
-
class ReadUrlResponseFactory {
|
|
62
|
-
/**
|
|
63
|
-
* Resolves a ReadUrlResponse from a Readable stream.
|
|
64
|
-
*/
|
|
65
|
-
static async fromReadable(stream, options) {
|
|
66
|
-
let buffer;
|
|
67
|
-
const conflictError = new errors.ConflictError(
|
|
68
|
-
"Cannot use buffer() and stream() from the same ReadUrlResponse"
|
|
69
|
-
);
|
|
70
|
-
let hasCalledStream = false;
|
|
71
|
-
let hasCalledBuffer = false;
|
|
72
|
-
return {
|
|
73
|
-
buffer: () => {
|
|
74
|
-
hasCalledBuffer = true;
|
|
75
|
-
if (hasCalledStream) throw conflictError;
|
|
76
|
-
if (buffer) return buffer;
|
|
77
|
-
buffer = getRawBody__default.default(stream);
|
|
78
|
-
return buffer;
|
|
79
|
-
},
|
|
80
|
-
stream: () => {
|
|
81
|
-
hasCalledStream = true;
|
|
82
|
-
if (hasCalledBuffer) throw conflictError;
|
|
83
|
-
return stream;
|
|
84
|
-
},
|
|
85
|
-
etag: options?.etag,
|
|
86
|
-
lastModifiedAt: options?.lastModifiedAt
|
|
87
|
-
};
|
|
88
|
-
}
|
|
89
|
-
/**
|
|
90
|
-
* Resolves a ReadUrlResponse from an old-style NodeJS.ReadableStream.
|
|
91
|
-
*/
|
|
92
|
-
static async fromNodeJSReadable(oldStyleStream, options) {
|
|
93
|
-
const readable = stream.Readable.from(oldStyleStream);
|
|
94
|
-
return ReadUrlResponseFactory.fromReadable(readable, options);
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
|
|
98
|
-
class AzureUrlReader {
|
|
99
|
-
constructor(integration, deps) {
|
|
100
|
-
this.integration = integration;
|
|
101
|
-
this.deps = deps;
|
|
102
|
-
}
|
|
103
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
104
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
105
|
-
const credentialProvider = integration.DefaultAzureDevOpsCredentialsProvider.fromIntegrations(integrations);
|
|
106
|
-
return integrations.azure.list().map((integration) => {
|
|
107
|
-
const reader = new AzureUrlReader(integration, {
|
|
108
|
-
treeResponseFactory,
|
|
109
|
-
credentialsProvider: credentialProvider
|
|
110
|
-
});
|
|
111
|
-
const predicate = (url) => url.host === integration.config.host;
|
|
112
|
-
return { reader, predicate };
|
|
113
|
-
});
|
|
114
|
-
};
|
|
115
|
-
async read(url) {
|
|
116
|
-
const response = await this.readUrl(url);
|
|
117
|
-
return response.buffer();
|
|
118
|
-
}
|
|
119
|
-
async readUrl(url, options) {
|
|
120
|
-
const { signal } = options ?? {};
|
|
121
|
-
const builtUrl = integration.getAzureFileFetchUrl(url);
|
|
122
|
-
let response;
|
|
123
|
-
try {
|
|
124
|
-
const credentials = await this.deps.credentialsProvider.getCredentials({
|
|
125
|
-
url: builtUrl
|
|
126
|
-
});
|
|
127
|
-
response = await fetch__default.default(builtUrl, {
|
|
128
|
-
headers: credentials?.headers,
|
|
129
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
130
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
131
|
-
// The difference does not affect us in practice however. The cast can
|
|
132
|
-
// be removed after we support ESM for CLI dependencies and migrate to
|
|
133
|
-
// version 3 of node-fetch.
|
|
134
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
135
|
-
...signal && { signal }
|
|
136
|
-
});
|
|
137
|
-
} catch (e) {
|
|
138
|
-
throw new Error(`Unable to read ${url}, ${e}`);
|
|
139
|
-
}
|
|
140
|
-
if (response.ok && response.status !== 203) {
|
|
141
|
-
return ReadUrlResponseFactory.fromNodeJSReadable(response.body);
|
|
142
|
-
}
|
|
143
|
-
const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;
|
|
144
|
-
if (response.status === 404) {
|
|
145
|
-
throw new errors.NotFoundError(message);
|
|
146
|
-
}
|
|
147
|
-
throw new Error(message);
|
|
148
|
-
}
|
|
149
|
-
async readTree(url, options) {
|
|
150
|
-
const { etag, filter, signal } = options ?? {};
|
|
151
|
-
const credentials = await this.deps.credentialsProvider.getCredentials({
|
|
152
|
-
url
|
|
153
|
-
});
|
|
154
|
-
const commitsAzureResponse = await fetch__default.default(integration.getAzureCommitsUrl(url), {
|
|
155
|
-
headers: credentials?.headers
|
|
156
|
-
});
|
|
157
|
-
if (!commitsAzureResponse.ok) {
|
|
158
|
-
const message = `Failed to read tree from ${url}, ${commitsAzureResponse.status} ${commitsAzureResponse.statusText}`;
|
|
159
|
-
if (commitsAzureResponse.status === 404) {
|
|
160
|
-
throw new errors.NotFoundError(message);
|
|
161
|
-
}
|
|
162
|
-
throw new Error(message);
|
|
163
|
-
}
|
|
164
|
-
const commitSha = (await commitsAzureResponse.json()).value[0].commitId;
|
|
165
|
-
if (etag && etag === commitSha) {
|
|
166
|
-
throw new errors.NotModifiedError();
|
|
167
|
-
}
|
|
168
|
-
const archiveAzureResponse = await fetch__default.default(integration.getAzureDownloadUrl(url), {
|
|
169
|
-
headers: {
|
|
170
|
-
...credentials?.headers,
|
|
171
|
-
Accept: "application/zip"
|
|
172
|
-
},
|
|
173
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
174
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
175
|
-
// The difference does not affect us in practice however. The cast can be
|
|
176
|
-
// removed after we support ESM for CLI dependencies and migrate to
|
|
177
|
-
// version 3 of node-fetch.
|
|
178
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
179
|
-
...signal && { signal }
|
|
180
|
-
});
|
|
181
|
-
if (!archiveAzureResponse.ok) {
|
|
182
|
-
const message = `Failed to read tree from ${url}, ${archiveAzureResponse.status} ${archiveAzureResponse.statusText}`;
|
|
183
|
-
if (archiveAzureResponse.status === 404) {
|
|
184
|
-
throw new errors.NotFoundError(message);
|
|
185
|
-
}
|
|
186
|
-
throw new Error(message);
|
|
187
|
-
}
|
|
188
|
-
let subpath;
|
|
189
|
-
const path = new URL(url).searchParams.get("path");
|
|
190
|
-
if (path) {
|
|
191
|
-
subpath = path.split("/").filter(Boolean).slice(-1)[0];
|
|
192
|
-
}
|
|
193
|
-
return await this.deps.treeResponseFactory.fromZipArchive({
|
|
194
|
-
stream: stream.Readable.from(archiveAzureResponse.body),
|
|
195
|
-
etag: commitSha,
|
|
196
|
-
filter,
|
|
197
|
-
subpath
|
|
198
|
-
});
|
|
199
|
-
}
|
|
200
|
-
async search(url, options) {
|
|
201
|
-
const treeUrl = new URL(url);
|
|
202
|
-
const path = treeUrl.searchParams.get("path");
|
|
203
|
-
const matcher = path && new minimatch.Minimatch(path.replace(/^\/+/, ""));
|
|
204
|
-
treeUrl.searchParams.delete("path");
|
|
205
|
-
const tree = await this.readTree(treeUrl.toString(), {
|
|
206
|
-
etag: options?.etag,
|
|
207
|
-
signal: options?.signal,
|
|
208
|
-
filter: (p) => matcher ? matcher.match(p) : true
|
|
209
|
-
});
|
|
210
|
-
const files = await tree.files();
|
|
211
|
-
return {
|
|
212
|
-
etag: tree.etag,
|
|
213
|
-
files: files.map((file) => ({
|
|
214
|
-
url: this.integration.resolveUrl({
|
|
215
|
-
url: `/${file.path}`,
|
|
216
|
-
base: url
|
|
217
|
-
}),
|
|
218
|
-
content: file.content,
|
|
219
|
-
lastModifiedAt: file.lastModifiedAt
|
|
220
|
-
}))
|
|
221
|
-
};
|
|
222
|
-
}
|
|
223
|
-
toString() {
|
|
224
|
-
const { host, credentials } = this.integration.config;
|
|
225
|
-
return `azure{host=${host},authed=${Boolean(
|
|
226
|
-
credentials !== void 0 && credentials.length > 0
|
|
227
|
-
)}}`;
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
function parseLastModified(value) {
|
|
232
|
-
if (!value) {
|
|
233
|
-
return void 0;
|
|
234
|
-
}
|
|
235
|
-
return new Date(value);
|
|
236
|
-
}
|
|
237
|
-
|
|
238
|
-
class BitbucketCloudUrlReader {
|
|
239
|
-
constructor(integration, deps) {
|
|
240
|
-
this.integration = integration;
|
|
241
|
-
this.deps = deps;
|
|
242
|
-
const { host, username, appPassword } = integration.config;
|
|
243
|
-
if (username && !appPassword) {
|
|
244
|
-
throw new Error(
|
|
245
|
-
`Bitbucket Cloud integration for '${host}' has configured a username but is missing a required appPassword.`
|
|
246
|
-
);
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
250
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
251
|
-
return integrations.bitbucketCloud.list().map((integration) => {
|
|
252
|
-
const reader = new BitbucketCloudUrlReader(integration, {
|
|
253
|
-
treeResponseFactory
|
|
254
|
-
});
|
|
255
|
-
const predicate = (url) => url.host === integration.config.host;
|
|
256
|
-
return { reader, predicate };
|
|
257
|
-
});
|
|
258
|
-
};
|
|
259
|
-
async read(url) {
|
|
260
|
-
const response = await this.readUrl(url);
|
|
261
|
-
return response.buffer();
|
|
262
|
-
}
|
|
263
|
-
async readUrl(url, options) {
|
|
264
|
-
const { etag, lastModifiedAfter, signal } = options ?? {};
|
|
265
|
-
const bitbucketUrl = integration.getBitbucketCloudFileFetchUrl(
|
|
266
|
-
url,
|
|
267
|
-
this.integration.config
|
|
268
|
-
);
|
|
269
|
-
const requestOptions = integration.getBitbucketCloudRequestOptions(
|
|
270
|
-
this.integration.config
|
|
271
|
-
);
|
|
272
|
-
let response;
|
|
273
|
-
try {
|
|
274
|
-
response = await fetch__default.default(bitbucketUrl.toString(), {
|
|
275
|
-
headers: {
|
|
276
|
-
...requestOptions.headers,
|
|
277
|
-
...etag && { "If-None-Match": etag },
|
|
278
|
-
...lastModifiedAfter && {
|
|
279
|
-
"If-Modified-Since": lastModifiedAfter.toUTCString()
|
|
280
|
-
}
|
|
281
|
-
},
|
|
282
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
283
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
284
|
-
// The difference does not affect us in practice however. The cast can be
|
|
285
|
-
// removed after we support ESM for CLI dependencies and migrate to
|
|
286
|
-
// version 3 of node-fetch.
|
|
287
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
288
|
-
...signal && { signal }
|
|
289
|
-
});
|
|
290
|
-
} catch (e) {
|
|
291
|
-
throw new Error(`Unable to read ${url}, ${e}`);
|
|
292
|
-
}
|
|
293
|
-
if (response.status === 304) {
|
|
294
|
-
throw new errors.NotModifiedError();
|
|
295
|
-
}
|
|
296
|
-
if (response.ok) {
|
|
297
|
-
return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
|
|
298
|
-
etag: response.headers.get("ETag") ?? void 0,
|
|
299
|
-
lastModifiedAt: parseLastModified(
|
|
300
|
-
response.headers.get("Last-Modified")
|
|
301
|
-
)
|
|
302
|
-
});
|
|
303
|
-
}
|
|
304
|
-
const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
|
|
305
|
-
if (response.status === 404) {
|
|
306
|
-
throw new errors.NotFoundError(message);
|
|
307
|
-
}
|
|
308
|
-
throw new Error(message);
|
|
309
|
-
}
|
|
310
|
-
async readTree(url, options) {
|
|
311
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
312
|
-
const lastCommitShortHash = await this.getLastCommitShortHash(url);
|
|
313
|
-
if (options?.etag && options.etag === lastCommitShortHash) {
|
|
314
|
-
throw new errors.NotModifiedError();
|
|
315
|
-
}
|
|
316
|
-
const downloadUrl = await integration.getBitbucketCloudDownloadUrl(
|
|
317
|
-
url,
|
|
318
|
-
this.integration.config
|
|
319
|
-
);
|
|
320
|
-
const archiveResponse = await fetch__default.default(
|
|
321
|
-
downloadUrl,
|
|
322
|
-
integration.getBitbucketCloudRequestOptions(this.integration.config)
|
|
323
|
-
);
|
|
324
|
-
if (!archiveResponse.ok) {
|
|
325
|
-
const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;
|
|
326
|
-
if (archiveResponse.status === 404) {
|
|
327
|
-
throw new errors.NotFoundError(message);
|
|
328
|
-
}
|
|
329
|
-
throw new Error(message);
|
|
330
|
-
}
|
|
331
|
-
return await this.deps.treeResponseFactory.fromTarArchive({
|
|
332
|
-
stream: stream.Readable.from(archiveResponse.body),
|
|
333
|
-
subpath: filepath,
|
|
334
|
-
etag: lastCommitShortHash,
|
|
335
|
-
filter: options?.filter
|
|
336
|
-
});
|
|
337
|
-
}
|
|
338
|
-
async search(url, options) {
|
|
339
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
340
|
-
const matcher = new minimatch.Minimatch(filepath);
|
|
341
|
-
const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
|
|
342
|
-
const tree = await this.readTree(treeUrl, {
|
|
343
|
-
etag: options?.etag,
|
|
344
|
-
filter: (path) => matcher.match(path)
|
|
345
|
-
});
|
|
346
|
-
const files = await tree.files();
|
|
347
|
-
return {
|
|
348
|
-
etag: tree.etag,
|
|
349
|
-
files: files.map((file) => ({
|
|
350
|
-
url: this.integration.resolveUrl({
|
|
351
|
-
url: `/${file.path}`,
|
|
352
|
-
base: url
|
|
353
|
-
}),
|
|
354
|
-
content: file.content,
|
|
355
|
-
lastModifiedAt: file.lastModifiedAt
|
|
356
|
-
}))
|
|
357
|
-
};
|
|
358
|
-
}
|
|
359
|
-
toString() {
|
|
360
|
-
const { host, username, appPassword } = this.integration.config;
|
|
361
|
-
const authed = Boolean(username && appPassword);
|
|
362
|
-
return `bitbucketCloud{host=${host},authed=${authed}}`;
|
|
363
|
-
}
|
|
364
|
-
async getLastCommitShortHash(url) {
|
|
365
|
-
const { name: repoName, owner: project, ref } = parseGitUrl__default.default(url);
|
|
366
|
-
let branch = ref;
|
|
367
|
-
if (!branch) {
|
|
368
|
-
branch = await integration.getBitbucketCloudDefaultBranch(
|
|
369
|
-
url,
|
|
370
|
-
this.integration.config
|
|
371
|
-
);
|
|
372
|
-
}
|
|
373
|
-
const commitsApiUrl = `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}`;
|
|
374
|
-
const commitsResponse = await fetch__default.default(
|
|
375
|
-
commitsApiUrl,
|
|
376
|
-
integration.getBitbucketCloudRequestOptions(this.integration.config)
|
|
377
|
-
);
|
|
378
|
-
if (!commitsResponse.ok) {
|
|
379
|
-
const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`;
|
|
380
|
-
if (commitsResponse.status === 404) {
|
|
381
|
-
throw new errors.NotFoundError(message);
|
|
382
|
-
}
|
|
383
|
-
throw new Error(message);
|
|
384
|
-
}
|
|
385
|
-
const commits = await commitsResponse.json();
|
|
386
|
-
if (commits && commits.values && commits.values.length > 0 && commits.values[0].hash) {
|
|
387
|
-
return commits.values[0].hash.substring(0, 12);
|
|
388
|
-
}
|
|
389
|
-
throw new Error(`Failed to read response from ${commitsApiUrl}`);
|
|
390
|
-
}
|
|
391
|
-
}
|
|
392
|
-
|
|
393
|
-
class BitbucketUrlReader {
|
|
394
|
-
constructor(integration, logger, deps) {
|
|
395
|
-
this.integration = integration;
|
|
396
|
-
this.deps = deps;
|
|
397
|
-
const { host, token, username, appPassword } = integration.config;
|
|
398
|
-
const replacement = host === "bitbucket.org" ? "bitbucketCloud" : "bitbucketServer";
|
|
399
|
-
logger.warn(
|
|
400
|
-
`[Deprecated] Please migrate from "integrations.bitbucket" to "integrations.${replacement}".`
|
|
401
|
-
);
|
|
402
|
-
if (!token && username && !appPassword) {
|
|
403
|
-
throw new Error(
|
|
404
|
-
`Bitbucket integration for '${host}' has configured a username but is missing a required appPassword.`
|
|
405
|
-
);
|
|
406
|
-
}
|
|
407
|
-
}
|
|
408
|
-
static factory = ({ config, logger, treeResponseFactory }) => {
|
|
409
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
410
|
-
return integrations.bitbucket.list().filter(
|
|
411
|
-
(item) => !integrations.bitbucketCloud.byHost(item.config.host) && !integrations.bitbucketServer.byHost(item.config.host)
|
|
412
|
-
).map((integration) => {
|
|
413
|
-
const reader = new BitbucketUrlReader(integration, logger, {
|
|
414
|
-
treeResponseFactory
|
|
415
|
-
});
|
|
416
|
-
const predicate = (url) => url.host === integration.config.host;
|
|
417
|
-
return { reader, predicate };
|
|
418
|
-
});
|
|
419
|
-
};
|
|
420
|
-
async read(url) {
|
|
421
|
-
const response = await this.readUrl(url);
|
|
422
|
-
return response.buffer();
|
|
423
|
-
}
|
|
424
|
-
async readUrl(url, options) {
|
|
425
|
-
const { etag, lastModifiedAfter, signal } = options ?? {};
|
|
426
|
-
const bitbucketUrl = integration.getBitbucketFileFetchUrl(url, this.integration.config);
|
|
427
|
-
const requestOptions = integration.getBitbucketRequestOptions(this.integration.config);
|
|
428
|
-
let response;
|
|
429
|
-
try {
|
|
430
|
-
response = await fetch__default.default(bitbucketUrl.toString(), {
|
|
431
|
-
headers: {
|
|
432
|
-
...requestOptions.headers,
|
|
433
|
-
...etag && { "If-None-Match": etag },
|
|
434
|
-
...lastModifiedAfter && {
|
|
435
|
-
"If-Modified-Since": lastModifiedAfter.toUTCString()
|
|
436
|
-
}
|
|
437
|
-
},
|
|
438
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
439
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
440
|
-
// The difference does not affect us in practice however. The cast can be
|
|
441
|
-
// removed after we support ESM for CLI dependencies and migrate to
|
|
442
|
-
// version 3 of node-fetch.
|
|
443
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
444
|
-
...signal && { signal }
|
|
445
|
-
});
|
|
446
|
-
} catch (e) {
|
|
447
|
-
throw new Error(`Unable to read ${url}, ${e}`);
|
|
448
|
-
}
|
|
449
|
-
if (response.status === 304) {
|
|
450
|
-
throw new errors.NotModifiedError();
|
|
451
|
-
}
|
|
452
|
-
if (response.ok) {
|
|
453
|
-
return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
|
|
454
|
-
etag: response.headers.get("ETag") ?? void 0,
|
|
455
|
-
lastModifiedAt: parseLastModified(
|
|
456
|
-
response.headers.get("Last-Modified")
|
|
457
|
-
)
|
|
458
|
-
});
|
|
459
|
-
}
|
|
460
|
-
const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
|
|
461
|
-
if (response.status === 404) {
|
|
462
|
-
throw new errors.NotFoundError(message);
|
|
463
|
-
}
|
|
464
|
-
throw new Error(message);
|
|
465
|
-
}
|
|
466
|
-
async readTree(url, options) {
|
|
467
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
468
|
-
const lastCommitShortHash = await this.getLastCommitShortHash(url);
|
|
469
|
-
if (options?.etag && options.etag === lastCommitShortHash) {
|
|
470
|
-
throw new errors.NotModifiedError();
|
|
471
|
-
}
|
|
472
|
-
const downloadUrl = await integration.getBitbucketDownloadUrl(
|
|
473
|
-
url,
|
|
474
|
-
this.integration.config
|
|
475
|
-
);
|
|
476
|
-
const archiveBitbucketResponse = await fetch__default.default(
|
|
477
|
-
downloadUrl,
|
|
478
|
-
integration.getBitbucketRequestOptions(this.integration.config)
|
|
479
|
-
);
|
|
480
|
-
if (!archiveBitbucketResponse.ok) {
|
|
481
|
-
const message = `Failed to read tree from ${url}, ${archiveBitbucketResponse.status} ${archiveBitbucketResponse.statusText}`;
|
|
482
|
-
if (archiveBitbucketResponse.status === 404) {
|
|
483
|
-
throw new errors.NotFoundError(message);
|
|
484
|
-
}
|
|
485
|
-
throw new Error(message);
|
|
486
|
-
}
|
|
487
|
-
return await this.deps.treeResponseFactory.fromTarArchive({
|
|
488
|
-
stream: stream.Readable.from(archiveBitbucketResponse.body),
|
|
489
|
-
subpath: filepath,
|
|
490
|
-
etag: lastCommitShortHash,
|
|
491
|
-
filter: options?.filter
|
|
492
|
-
});
|
|
493
|
-
}
|
|
494
|
-
async search(url, options) {
|
|
495
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
496
|
-
const matcher = new minimatch.Minimatch(filepath);
|
|
497
|
-
const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
|
|
498
|
-
const tree = await this.readTree(treeUrl, {
|
|
499
|
-
etag: options?.etag,
|
|
500
|
-
filter: (path) => matcher.match(path)
|
|
501
|
-
});
|
|
502
|
-
const files = await tree.files();
|
|
503
|
-
return {
|
|
504
|
-
etag: tree.etag,
|
|
505
|
-
files: files.map((file) => ({
|
|
506
|
-
url: this.integration.resolveUrl({
|
|
507
|
-
url: `/${file.path}`,
|
|
508
|
-
base: url
|
|
509
|
-
}),
|
|
510
|
-
content: file.content,
|
|
511
|
-
lastModifiedAt: file.lastModifiedAt
|
|
512
|
-
}))
|
|
513
|
-
};
|
|
514
|
-
}
|
|
515
|
-
toString() {
|
|
516
|
-
const { host, token, username, appPassword } = this.integration.config;
|
|
517
|
-
let authed = Boolean(token);
|
|
518
|
-
if (!authed) {
|
|
519
|
-
authed = Boolean(username && appPassword);
|
|
520
|
-
}
|
|
521
|
-
return `bitbucket{host=${host},authed=${authed}}`;
|
|
522
|
-
}
|
|
523
|
-
async getLastCommitShortHash(url) {
|
|
524
|
-
const { resource, name: repoName, owner: project, ref } = parseGitUrl__default.default(url);
|
|
525
|
-
let branch = ref;
|
|
526
|
-
if (!branch) {
|
|
527
|
-
branch = await integration.getBitbucketDefaultBranch(url, this.integration.config);
|
|
528
|
-
}
|
|
529
|
-
const isHosted = resource === "bitbucket.org";
|
|
530
|
-
const commitsApiUrl = isHosted ? `${this.integration.config.apiBaseUrl}/repositories/${project}/${repoName}/commits/${branch}` : `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/commits`;
|
|
531
|
-
const commitsResponse = await fetch__default.default(
|
|
532
|
-
commitsApiUrl,
|
|
533
|
-
integration.getBitbucketRequestOptions(this.integration.config)
|
|
534
|
-
);
|
|
535
|
-
if (!commitsResponse.ok) {
|
|
536
|
-
const message = `Failed to retrieve commits from ${commitsApiUrl}, ${commitsResponse.status} ${commitsResponse.statusText}`;
|
|
537
|
-
if (commitsResponse.status === 404) {
|
|
538
|
-
throw new errors.NotFoundError(message);
|
|
539
|
-
}
|
|
540
|
-
throw new Error(message);
|
|
541
|
-
}
|
|
542
|
-
const commits = await commitsResponse.json();
|
|
543
|
-
if (isHosted) {
|
|
544
|
-
if (commits && commits.values && commits.values.length > 0 && commits.values[0].hash) {
|
|
545
|
-
return commits.values[0].hash.substring(0, 12);
|
|
546
|
-
}
|
|
547
|
-
} else {
|
|
548
|
-
if (commits && commits.values && commits.values.length > 0 && commits.values[0].id) {
|
|
549
|
-
return commits.values[0].id.substring(0, 12);
|
|
550
|
-
}
|
|
551
|
-
}
|
|
552
|
-
throw new Error(`Failed to read response from ${commitsApiUrl}`);
|
|
553
|
-
}
|
|
554
|
-
}
|
|
555
|
-
|
|
556
|
-
class BitbucketServerUrlReader {
|
|
557
|
-
constructor(integration, deps) {
|
|
558
|
-
this.integration = integration;
|
|
559
|
-
this.deps = deps;
|
|
560
|
-
}
|
|
561
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
562
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
563
|
-
return integrations.bitbucketServer.list().map((integration) => {
|
|
564
|
-
const reader = new BitbucketServerUrlReader(integration, {
|
|
565
|
-
treeResponseFactory
|
|
566
|
-
});
|
|
567
|
-
const predicate = (url) => url.host === integration.config.host;
|
|
568
|
-
return { reader, predicate };
|
|
569
|
-
});
|
|
570
|
-
};
|
|
571
|
-
async read(url) {
|
|
572
|
-
const response = await this.readUrl(url);
|
|
573
|
-
return response.buffer();
|
|
574
|
-
}
|
|
575
|
-
async readUrl(url, options) {
|
|
576
|
-
const { etag, lastModifiedAfter, signal } = options ?? {};
|
|
577
|
-
const bitbucketUrl = integration.getBitbucketServerFileFetchUrl(
|
|
578
|
-
url,
|
|
579
|
-
this.integration.config
|
|
580
|
-
);
|
|
581
|
-
const requestOptions = integration.getBitbucketServerRequestOptions(
|
|
582
|
-
this.integration.config
|
|
583
|
-
);
|
|
584
|
-
let response;
|
|
585
|
-
try {
|
|
586
|
-
response = await fetch__default.default(bitbucketUrl.toString(), {
|
|
587
|
-
headers: {
|
|
588
|
-
...requestOptions.headers,
|
|
589
|
-
...etag && { "If-None-Match": etag },
|
|
590
|
-
...lastModifiedAfter && {
|
|
591
|
-
"If-Modified-Since": lastModifiedAfter.toUTCString()
|
|
592
|
-
}
|
|
593
|
-
},
|
|
594
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
595
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
596
|
-
// The difference does not affect us in practice however. The cast can be
|
|
597
|
-
// removed after we support ESM for CLI dependencies and migrate to
|
|
598
|
-
// version 3 of node-fetch.
|
|
599
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
600
|
-
...signal && { signal }
|
|
601
|
-
});
|
|
602
|
-
} catch (e) {
|
|
603
|
-
throw new Error(`Unable to read ${url}, ${e}`);
|
|
604
|
-
}
|
|
605
|
-
if (response.status === 304) {
|
|
606
|
-
throw new errors.NotModifiedError();
|
|
607
|
-
}
|
|
608
|
-
if (response.ok) {
|
|
609
|
-
return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
|
|
610
|
-
etag: response.headers.get("ETag") ?? void 0,
|
|
611
|
-
lastModifiedAt: parseLastModified(
|
|
612
|
-
response.headers.get("Last-Modified")
|
|
613
|
-
)
|
|
614
|
-
});
|
|
615
|
-
}
|
|
616
|
-
const message = `${url} could not be read as ${bitbucketUrl}, ${response.status} ${response.statusText}`;
|
|
617
|
-
if (response.status === 404) {
|
|
618
|
-
throw new errors.NotFoundError(message);
|
|
619
|
-
}
|
|
620
|
-
throw new Error(message);
|
|
621
|
-
}
|
|
622
|
-
async readTree(url, options) {
|
|
623
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
624
|
-
const lastCommitShortHash = await this.getLastCommitShortHash(url);
|
|
625
|
-
if (options?.etag && options.etag === lastCommitShortHash) {
|
|
626
|
-
throw new errors.NotModifiedError();
|
|
627
|
-
}
|
|
628
|
-
const downloadUrl = await integration.getBitbucketServerDownloadUrl(
|
|
629
|
-
url,
|
|
630
|
-
this.integration.config
|
|
631
|
-
);
|
|
632
|
-
const archiveResponse = await fetch__default.default(
|
|
633
|
-
downloadUrl,
|
|
634
|
-
integration.getBitbucketServerRequestOptions(this.integration.config)
|
|
635
|
-
);
|
|
636
|
-
if (!archiveResponse.ok) {
|
|
637
|
-
const message = `Failed to read tree from ${url}, ${archiveResponse.status} ${archiveResponse.statusText}`;
|
|
638
|
-
if (archiveResponse.status === 404) {
|
|
639
|
-
throw new errors.NotFoundError(message);
|
|
640
|
-
}
|
|
641
|
-
throw new Error(message);
|
|
642
|
-
}
|
|
643
|
-
return await this.deps.treeResponseFactory.fromTarArchive({
|
|
644
|
-
stream: stream.Readable.from(archiveResponse.body),
|
|
645
|
-
subpath: filepath,
|
|
646
|
-
etag: lastCommitShortHash,
|
|
647
|
-
filter: options?.filter
|
|
648
|
-
});
|
|
649
|
-
}
|
|
650
|
-
async search(url, options) {
|
|
651
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
652
|
-
const matcher = new minimatch.Minimatch(filepath);
|
|
653
|
-
const treeUrl = lodash.trimEnd(url.replace(filepath, ""), "/");
|
|
654
|
-
const tree = await this.readTree(treeUrl, {
|
|
655
|
-
etag: options?.etag,
|
|
656
|
-
filter: (path) => matcher.match(path)
|
|
657
|
-
});
|
|
658
|
-
const files = await tree.files();
|
|
659
|
-
return {
|
|
660
|
-
etag: tree.etag,
|
|
661
|
-
files: files.map((file) => ({
|
|
662
|
-
url: this.integration.resolveUrl({
|
|
663
|
-
url: `/${file.path}`,
|
|
664
|
-
base: url
|
|
665
|
-
}),
|
|
666
|
-
content: file.content,
|
|
667
|
-
lastModifiedAt: file.lastModifiedAt
|
|
668
|
-
}))
|
|
669
|
-
};
|
|
670
|
-
}
|
|
671
|
-
toString() {
|
|
672
|
-
const { host, token } = this.integration.config;
|
|
673
|
-
const authed = Boolean(token);
|
|
674
|
-
return `bitbucketServer{host=${host},authed=${authed}}`;
|
|
675
|
-
}
|
|
676
|
-
async getLastCommitShortHash(url) {
|
|
677
|
-
const { name: repoName, owner: project, ref: branch } = parseGitUrl__default.default(url);
|
|
678
|
-
const branchParameter = branch ? `?filterText=${encodeURIComponent(branch)}` : "/default";
|
|
679
|
-
const branchListUrl = `${this.integration.config.apiBaseUrl}/projects/${project}/repos/${repoName}/branches${branchParameter}`;
|
|
680
|
-
const branchListResponse = await fetch__default.default(
|
|
681
|
-
branchListUrl,
|
|
682
|
-
integration.getBitbucketServerRequestOptions(this.integration.config)
|
|
683
|
-
);
|
|
684
|
-
if (!branchListResponse.ok) {
|
|
685
|
-
const message = `Failed to retrieve branch list from ${branchListUrl}, ${branchListResponse.status} ${branchListResponse.statusText}`;
|
|
686
|
-
if (branchListResponse.status === 404) {
|
|
687
|
-
throw new errors.NotFoundError(message);
|
|
688
|
-
}
|
|
689
|
-
throw new Error(message);
|
|
690
|
-
}
|
|
691
|
-
const branchMatches = await branchListResponse.json();
|
|
692
|
-
if (branchMatches && branchMatches.size > 0) {
|
|
693
|
-
const exactBranchMatch = branchMatches.values.filter(
|
|
694
|
-
(branchDetails) => branchDetails.displayId === branch
|
|
695
|
-
)[0];
|
|
696
|
-
return exactBranchMatch.latestCommit.substring(0, 12);
|
|
697
|
-
}
|
|
698
|
-
if (!branch && branchMatches) {
|
|
699
|
-
return branchMatches.latestCommit.substring(0, 12);
|
|
700
|
-
}
|
|
701
|
-
throw new Error(
|
|
702
|
-
`Failed to find Last Commit using ${branch ? `branch "${branch}"` : "default branch"} in response from ${branchListUrl}`
|
|
703
|
-
);
|
|
704
|
-
}
|
|
705
|
-
}
|
|
706
|
-
|
|
707
|
-
class GerritUrlReader {
|
|
708
|
-
constructor(integration, deps) {
|
|
709
|
-
this.integration = integration;
|
|
710
|
-
this.deps = deps;
|
|
711
|
-
}
|
|
712
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
713
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
714
|
-
if (!integrations.gerrit) {
|
|
715
|
-
return [];
|
|
716
|
-
}
|
|
717
|
-
return integrations.gerrit.list().map((integration) => {
|
|
718
|
-
const reader = new GerritUrlReader(integration, { treeResponseFactory });
|
|
719
|
-
const predicate = (url) => {
|
|
720
|
-
const gitilesUrl = new URL(integration.config.gitilesBaseUrl);
|
|
721
|
-
return url.host === gitilesUrl.host;
|
|
722
|
-
};
|
|
723
|
-
return { reader, predicate };
|
|
724
|
-
});
|
|
725
|
-
};
|
|
726
|
-
async read(url) {
|
|
727
|
-
const response = await this.readUrl(url);
|
|
728
|
-
return response.buffer();
|
|
729
|
-
}
|
|
730
|
-
async readUrl(url, options) {
|
|
731
|
-
const apiUrl = integration.getGerritFileContentsApiUrl(this.integration.config, url);
|
|
732
|
-
let response;
|
|
733
|
-
try {
|
|
734
|
-
response = await fetch__default.default(apiUrl, {
|
|
735
|
-
method: "GET",
|
|
736
|
-
...integration.getGerritRequestOptions(this.integration.config),
|
|
737
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
738
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
739
|
-
// The difference does not affect us in practice however. The cast can
|
|
740
|
-
// be removed after we support ESM for CLI dependencies and migrate to
|
|
741
|
-
// version 3 of node-fetch.
|
|
742
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
743
|
-
signal: options?.signal
|
|
744
|
-
});
|
|
745
|
-
} catch (e) {
|
|
746
|
-
throw new Error(`Unable to read gerrit file ${url}, ${e}`);
|
|
747
|
-
}
|
|
748
|
-
if (response.ok) {
|
|
749
|
-
let responseBody;
|
|
750
|
-
return {
|
|
751
|
-
buffer: async () => {
|
|
752
|
-
if (responseBody === void 0) {
|
|
753
|
-
responseBody = await response.text();
|
|
754
|
-
}
|
|
755
|
-
return Buffer.from(responseBody, "base64");
|
|
756
|
-
},
|
|
757
|
-
stream: () => {
|
|
758
|
-
const readable = stream.Readable.from(response.body);
|
|
759
|
-
return readable.pipe(new base64Stream.Base64Decode());
|
|
760
|
-
}
|
|
761
|
-
};
|
|
762
|
-
}
|
|
763
|
-
if (response.status === 404) {
|
|
764
|
-
throw new errors.NotFoundError(`File ${url} not found.`);
|
|
765
|
-
}
|
|
766
|
-
throw new Error(
|
|
767
|
-
`${url} could not be read as ${apiUrl}, ${response.status} ${response.statusText}`
|
|
768
|
-
);
|
|
769
|
-
}
|
|
770
|
-
async readTree(url, options) {
|
|
771
|
-
const apiUrl = integration.getGerritBranchApiUrl(this.integration.config, url);
|
|
772
|
-
let response;
|
|
773
|
-
try {
|
|
774
|
-
response = await fetch__default.default(apiUrl, {
|
|
775
|
-
method: "GET",
|
|
776
|
-
...integration.getGerritRequestOptions(this.integration.config)
|
|
777
|
-
});
|
|
778
|
-
} catch (e) {
|
|
779
|
-
throw new Error(`Unable to read branch state ${url}, ${e}`);
|
|
780
|
-
}
|
|
781
|
-
if (response.status === 404) {
|
|
782
|
-
throw new errors.NotFoundError(`Not found: ${url}`);
|
|
783
|
-
}
|
|
784
|
-
if (!response.ok) {
|
|
785
|
-
throw new Error(
|
|
786
|
-
`${url} could not be read as ${apiUrl}, ${response.status} ${response.statusText}`
|
|
787
|
-
);
|
|
788
|
-
}
|
|
789
|
-
const branchInfo = await integration.parseGerritJsonResponse(response);
|
|
790
|
-
if (options?.etag === branchInfo.revision) {
|
|
791
|
-
throw new errors.NotModifiedError();
|
|
792
|
-
}
|
|
793
|
-
return this.readTreeFromGitiles(url, branchInfo.revision, options);
|
|
794
|
-
}
|
|
795
|
-
async search() {
|
|
796
|
-
throw new Error("GerritReader does not implement search");
|
|
797
|
-
}
|
|
798
|
-
toString() {
|
|
799
|
-
const { host, password } = this.integration.config;
|
|
800
|
-
return `gerrit{host=${host},authed=${Boolean(password)}}`;
|
|
801
|
-
}
|
|
802
|
-
async readTreeFromGitiles(url, revision, options) {
|
|
803
|
-
const { branch, filePath, project } = integration.parseGerritGitilesUrl(
|
|
804
|
-
this.integration.config,
|
|
805
|
-
url
|
|
806
|
-
);
|
|
807
|
-
const archiveUrl = integration.buildGerritGitilesArchiveUrl(
|
|
808
|
-
this.integration.config,
|
|
809
|
-
project,
|
|
810
|
-
branch,
|
|
811
|
-
filePath
|
|
812
|
-
);
|
|
813
|
-
const archiveResponse = await fetch__default.default(archiveUrl, {
|
|
814
|
-
...integration.getGerritRequestOptions(this.integration.config),
|
|
815
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
816
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
817
|
-
// The difference does not affect us in practice however. The cast can
|
|
818
|
-
// be removed after we support ESM for CLI dependencies and migrate to
|
|
819
|
-
// version 3 of node-fetch.
|
|
820
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
821
|
-
signal: options?.signal
|
|
822
|
-
});
|
|
823
|
-
if (archiveResponse.status === 404) {
|
|
824
|
-
throw new errors.NotFoundError(`Not found: ${archiveUrl}`);
|
|
825
|
-
}
|
|
826
|
-
if (!archiveResponse.ok) {
|
|
827
|
-
throw new Error(
|
|
828
|
-
`${url} could not be read as ${archiveUrl}, ${archiveResponse.status} ${archiveResponse.statusText}`
|
|
829
|
-
);
|
|
830
|
-
}
|
|
831
|
-
return await this.deps.treeResponseFactory.fromTarArchive({
|
|
832
|
-
stream: archiveResponse.body,
|
|
833
|
-
etag: revision,
|
|
834
|
-
filter: options?.filter,
|
|
835
|
-
stripFirstDirectory: false
|
|
836
|
-
});
|
|
837
|
-
}
|
|
838
|
-
}
|
|
839
|
-
|
|
840
|
-
class GithubUrlReader {
|
|
841
|
-
constructor(integration, deps) {
|
|
842
|
-
this.integration = integration;
|
|
843
|
-
this.deps = deps;
|
|
844
|
-
if (!integration.config.apiBaseUrl && !integration.config.rawBaseUrl) {
|
|
845
|
-
throw new Error(
|
|
846
|
-
`GitHub integration '${integration.title}' must configure an explicit apiBaseUrl or rawBaseUrl`
|
|
847
|
-
);
|
|
848
|
-
}
|
|
849
|
-
}
|
|
850
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
851
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
852
|
-
const credentialsProvider = integration.DefaultGithubCredentialsProvider.fromIntegrations(integrations);
|
|
853
|
-
return integrations.github.list().map((integration) => {
|
|
854
|
-
const reader = new GithubUrlReader(integration, {
|
|
855
|
-
treeResponseFactory,
|
|
856
|
-
credentialsProvider
|
|
857
|
-
});
|
|
858
|
-
const predicate = (url) => url.host === integration.config.host;
|
|
859
|
-
return { reader, predicate };
|
|
860
|
-
});
|
|
861
|
-
};
|
|
862
|
-
async read(url) {
|
|
863
|
-
const response = await this.readUrl(url);
|
|
864
|
-
return response.buffer();
|
|
865
|
-
}
|
|
866
|
-
getCredentials = async (url, options) => {
|
|
867
|
-
if (options?.token) {
|
|
868
|
-
return {
|
|
869
|
-
headers: {
|
|
870
|
-
Authorization: `Bearer ${options.token}`
|
|
871
|
-
},
|
|
872
|
-
type: "token",
|
|
873
|
-
token: options.token
|
|
874
|
-
};
|
|
875
|
-
}
|
|
876
|
-
return await this.deps.credentialsProvider.getCredentials({
|
|
877
|
-
url
|
|
878
|
-
});
|
|
879
|
-
};
|
|
880
|
-
async readUrl(url, options) {
|
|
881
|
-
const credentials = await this.getCredentials(url, options);
|
|
882
|
-
const ghUrl = integration.getGithubFileFetchUrl(
|
|
883
|
-
url,
|
|
884
|
-
this.integration.config,
|
|
885
|
-
credentials
|
|
886
|
-
);
|
|
887
|
-
const response = await this.fetchResponse(ghUrl, {
|
|
888
|
-
headers: {
|
|
889
|
-
...credentials?.headers,
|
|
890
|
-
...options?.etag && { "If-None-Match": options.etag },
|
|
891
|
-
...options?.lastModifiedAfter && {
|
|
892
|
-
"If-Modified-Since": options.lastModifiedAfter.toUTCString()
|
|
893
|
-
},
|
|
894
|
-
Accept: "application/vnd.github.v3.raw"
|
|
895
|
-
},
|
|
896
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
897
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
898
|
-
// The difference does not affect us in practice however. The cast can
|
|
899
|
-
// be removed after we support ESM for CLI dependencies and migrate to
|
|
900
|
-
// version 3 of node-fetch.
|
|
901
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
902
|
-
signal: options?.signal
|
|
903
|
-
});
|
|
904
|
-
return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
|
|
905
|
-
etag: response.headers.get("ETag") ?? void 0,
|
|
906
|
-
lastModifiedAt: parseLastModified(response.headers.get("Last-Modified"))
|
|
907
|
-
});
|
|
908
|
-
}
|
|
909
|
-
async readTree(url, options) {
|
|
910
|
-
const repoDetails = await this.getRepoDetails(url);
|
|
911
|
-
const commitSha = repoDetails.commitSha;
|
|
912
|
-
if (options?.etag && options.etag === commitSha) {
|
|
913
|
-
throw new errors.NotModifiedError();
|
|
914
|
-
}
|
|
915
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
916
|
-
const { headers } = await this.getCredentials(url, options);
|
|
917
|
-
return this.doReadTree(
|
|
918
|
-
repoDetails.repo.archive_url,
|
|
919
|
-
commitSha,
|
|
920
|
-
filepath,
|
|
921
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
922
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
923
|
-
// The difference does not affect us in practice however. The cast can be
|
|
924
|
-
// removed after we support ESM for CLI dependencies and migrate to
|
|
925
|
-
// version 3 of node-fetch.
|
|
926
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
927
|
-
{ headers, signal: options?.signal },
|
|
928
|
-
options
|
|
929
|
-
);
|
|
930
|
-
}
|
|
931
|
-
async search(url, options) {
|
|
932
|
-
const repoDetails = await this.getRepoDetails(url);
|
|
933
|
-
const commitSha = repoDetails.commitSha;
|
|
934
|
-
if (options?.etag && options.etag === commitSha) {
|
|
935
|
-
throw new errors.NotModifiedError();
|
|
936
|
-
}
|
|
937
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
938
|
-
const { headers } = await this.getCredentials(url, options);
|
|
939
|
-
const files = await this.doSearch(
|
|
940
|
-
url,
|
|
941
|
-
repoDetails.repo.trees_url,
|
|
942
|
-
repoDetails.repo.archive_url,
|
|
943
|
-
commitSha,
|
|
944
|
-
filepath,
|
|
945
|
-
{ headers, signal: options?.signal }
|
|
946
|
-
);
|
|
947
|
-
return { files, etag: commitSha };
|
|
948
|
-
}
|
|
949
|
-
toString() {
|
|
950
|
-
const { host, token } = this.integration.config;
|
|
951
|
-
return `github{host=${host},authed=${Boolean(token)}}`;
|
|
952
|
-
}
|
|
953
|
-
async doReadTree(archiveUrl, sha, subpath, init, options) {
|
|
954
|
-
const archive = await this.fetchResponse(
|
|
955
|
-
archiveUrl.replace("{archive_format}", "tarball").replace("{/ref}", `/${sha}`),
|
|
956
|
-
init
|
|
957
|
-
);
|
|
958
|
-
return await this.deps.treeResponseFactory.fromTarArchive({
|
|
959
|
-
// TODO(Rugvip): Underlying implementation of fetch will be node-fetch, we probably want
|
|
960
|
-
// to stick to using that in exclusively backend code.
|
|
961
|
-
stream: stream.Readable.from(archive.body),
|
|
962
|
-
subpath,
|
|
963
|
-
etag: sha,
|
|
964
|
-
filter: options?.filter
|
|
965
|
-
});
|
|
966
|
-
}
|
|
967
|
-
async doSearch(url, treesUrl, archiveUrl, sha, query, init) {
|
|
968
|
-
function pathToUrl(path) {
|
|
969
|
-
const updated = new URL(url);
|
|
970
|
-
const base = updated.pathname.split("/").slice(1, 5).join("/");
|
|
971
|
-
updated.pathname = `${base}/${path}`;
|
|
972
|
-
return updated.toString();
|
|
973
|
-
}
|
|
974
|
-
const matcher = new minimatch.Minimatch(query.replace(/^\/+/, ""));
|
|
975
|
-
const recursiveTree = await this.fetchJson(
|
|
976
|
-
treesUrl.replace("{/sha}", `/${sha}?recursive=true`),
|
|
977
|
-
init
|
|
978
|
-
);
|
|
979
|
-
if (!recursiveTree.truncated) {
|
|
980
|
-
const matching = recursiveTree.tree.filter(
|
|
981
|
-
(item) => item.type === "blob" && item.path && item.url && matcher.match(item.path)
|
|
982
|
-
);
|
|
983
|
-
return matching.map((item) => ({
|
|
984
|
-
url: pathToUrl(item.path),
|
|
985
|
-
content: async () => {
|
|
986
|
-
const blob = await this.fetchJson(item.url, init);
|
|
987
|
-
return Buffer.from(blob.content, "base64");
|
|
988
|
-
}
|
|
989
|
-
}));
|
|
990
|
-
}
|
|
991
|
-
const tree = await this.doReadTree(archiveUrl, sha, "", init, {
|
|
992
|
-
filter: (path) => matcher.match(path)
|
|
993
|
-
});
|
|
994
|
-
const files = await tree.files();
|
|
995
|
-
return files.map((file) => ({
|
|
996
|
-
url: pathToUrl(file.path),
|
|
997
|
-
content: file.content,
|
|
998
|
-
lastModifiedAt: file.lastModifiedAt
|
|
999
|
-
}));
|
|
1000
|
-
}
|
|
1001
|
-
async getRepoDetails(url) {
|
|
1002
|
-
const parsed = parseGitUrl__default.default(url);
|
|
1003
|
-
const { ref, full_name } = parsed;
|
|
1004
|
-
const credentials = await this.deps.credentialsProvider.getCredentials({
|
|
1005
|
-
url
|
|
1006
|
-
});
|
|
1007
|
-
const { headers } = credentials;
|
|
1008
|
-
const commitStatus = await this.fetchJson(
|
|
1009
|
-
`${this.integration.config.apiBaseUrl}/repos/${full_name}/commits/${ref || await this.getDefaultBranch(full_name, credentials)}/status?per_page=0`,
|
|
1010
|
-
{ headers }
|
|
1011
|
-
);
|
|
1012
|
-
return {
|
|
1013
|
-
commitSha: commitStatus.sha,
|
|
1014
|
-
repo: commitStatus.repository
|
|
1015
|
-
};
|
|
1016
|
-
}
|
|
1017
|
-
async getDefaultBranch(repoFullName, credentials) {
|
|
1018
|
-
const repo = await this.fetchJson(
|
|
1019
|
-
`${this.integration.config.apiBaseUrl}/repos/${repoFullName}`,
|
|
1020
|
-
{ headers: credentials.headers }
|
|
1021
|
-
);
|
|
1022
|
-
return repo.default_branch;
|
|
1023
|
-
}
|
|
1024
|
-
async fetchResponse(url, init) {
|
|
1025
|
-
const urlAsString = url.toString();
|
|
1026
|
-
const response = await fetch__default.default(urlAsString, init);
|
|
1027
|
-
if (!response.ok) {
|
|
1028
|
-
let message = `Request failed for ${urlAsString}, ${response.status} ${response.statusText}`;
|
|
1029
|
-
if (response.status === 304) {
|
|
1030
|
-
throw new errors.NotModifiedError();
|
|
1031
|
-
}
|
|
1032
|
-
if (response.status === 404) {
|
|
1033
|
-
throw new errors.NotFoundError(message);
|
|
1034
|
-
}
|
|
1035
|
-
if (this.integration.parseRateLimitInfo(response).isRateLimited) {
|
|
1036
|
-
message += " (rate limit exceeded)";
|
|
1037
|
-
}
|
|
1038
|
-
throw new Error(message);
|
|
1039
|
-
}
|
|
1040
|
-
return response;
|
|
1041
|
-
}
|
|
1042
|
-
async fetchJson(url, init) {
|
|
1043
|
-
const response = await this.fetchResponse(url, init);
|
|
1044
|
-
return await response.json();
|
|
1045
|
-
}
|
|
1046
|
-
}
|
|
1047
|
-
|
|
1048
|
-
class GitlabUrlReader {
|
|
1049
|
-
constructor(integration, deps) {
|
|
1050
|
-
this.integration = integration;
|
|
1051
|
-
this.deps = deps;
|
|
1052
|
-
}
|
|
1053
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
1054
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
1055
|
-
return integrations.gitlab.list().map((integration) => {
|
|
1056
|
-
const reader = new GitlabUrlReader(integration, {
|
|
1057
|
-
treeResponseFactory
|
|
1058
|
-
});
|
|
1059
|
-
const predicate = (url) => url.host === integration.config.host;
|
|
1060
|
-
return { reader, predicate };
|
|
1061
|
-
});
|
|
1062
|
-
};
|
|
1063
|
-
async read(url) {
|
|
1064
|
-
const response = await this.readUrl(url);
|
|
1065
|
-
return response.buffer();
|
|
1066
|
-
}
|
|
1067
|
-
async readUrl(url, options) {
|
|
1068
|
-
const { etag, lastModifiedAfter, signal, token } = options ?? {};
|
|
1069
|
-
const builtUrl = await this.getGitlabFetchUrl(url);
|
|
1070
|
-
let response;
|
|
1071
|
-
try {
|
|
1072
|
-
response = await fetch__default.default(builtUrl, {
|
|
1073
|
-
headers: {
|
|
1074
|
-
...integration.getGitLabRequestOptions(this.integration.config, token).headers,
|
|
1075
|
-
...etag && { "If-None-Match": etag },
|
|
1076
|
-
...lastModifiedAfter && {
|
|
1077
|
-
"If-Modified-Since": lastModifiedAfter.toUTCString()
|
|
1078
|
-
}
|
|
1079
|
-
},
|
|
1080
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
1081
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
1082
|
-
// The difference does not affect us in practice however. The cast can be
|
|
1083
|
-
// removed after we support ESM for CLI dependencies and migrate to
|
|
1084
|
-
// version 3 of node-fetch.
|
|
1085
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
1086
|
-
...signal && { signal }
|
|
1087
|
-
});
|
|
1088
|
-
} catch (e) {
|
|
1089
|
-
throw new Error(`Unable to read ${url}, ${e}`);
|
|
1090
|
-
}
|
|
1091
|
-
if (response.status === 304) {
|
|
1092
|
-
throw new errors.NotModifiedError();
|
|
1093
|
-
}
|
|
1094
|
-
if (response.ok) {
|
|
1095
|
-
return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
|
|
1096
|
-
etag: response.headers.get("ETag") ?? void 0,
|
|
1097
|
-
lastModifiedAt: parseLastModified(
|
|
1098
|
-
response.headers.get("Last-Modified")
|
|
1099
|
-
)
|
|
1100
|
-
});
|
|
1101
|
-
}
|
|
1102
|
-
const message = `${url} could not be read as ${builtUrl}, ${response.status} ${response.statusText}`;
|
|
1103
|
-
if (response.status === 404) {
|
|
1104
|
-
throw new errors.NotFoundError(message);
|
|
1105
|
-
}
|
|
1106
|
-
throw new Error(message);
|
|
1107
|
-
}
|
|
1108
|
-
async readTree(url, options) {
|
|
1109
|
-
const { etag, signal, token } = options ?? {};
|
|
1110
|
-
const { ref, full_name, filepath } = parseGitUrl__default.default(url);
|
|
1111
|
-
let repoFullName = full_name;
|
|
1112
|
-
const relativePath = integration.getGitLabIntegrationRelativePath(
|
|
1113
|
-
this.integration.config
|
|
1114
|
-
);
|
|
1115
|
-
if (relativePath) {
|
|
1116
|
-
const rectifiedRelativePath = `${lodash.trimStart(relativePath, "/")}/`;
|
|
1117
|
-
repoFullName = full_name.replace(rectifiedRelativePath, "");
|
|
1118
|
-
}
|
|
1119
|
-
const projectGitlabResponse = await fetch__default.default(
|
|
1120
|
-
new URL(
|
|
1121
|
-
`${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
|
|
1122
|
-
repoFullName
|
|
1123
|
-
)}`
|
|
1124
|
-
).toString(),
|
|
1125
|
-
integration.getGitLabRequestOptions(this.integration.config, token)
|
|
1126
|
-
);
|
|
1127
|
-
if (!projectGitlabResponse.ok) {
|
|
1128
|
-
const msg = `Failed to read tree from ${url}, ${projectGitlabResponse.status} ${projectGitlabResponse.statusText}`;
|
|
1129
|
-
if (projectGitlabResponse.status === 404) {
|
|
1130
|
-
throw new errors.NotFoundError(msg);
|
|
1131
|
-
}
|
|
1132
|
-
throw new Error(msg);
|
|
1133
|
-
}
|
|
1134
|
-
const projectGitlabResponseJson = await projectGitlabResponse.json();
|
|
1135
|
-
const branch = ref || projectGitlabResponseJson.default_branch;
|
|
1136
|
-
const commitsReqParams = new URLSearchParams();
|
|
1137
|
-
commitsReqParams.set("ref_name", branch);
|
|
1138
|
-
if (!!filepath) {
|
|
1139
|
-
commitsReqParams.set("path", filepath);
|
|
1140
|
-
}
|
|
1141
|
-
const commitsGitlabResponse = await fetch__default.default(
|
|
1142
|
-
new URL(
|
|
1143
|
-
`${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
|
|
1144
|
-
repoFullName
|
|
1145
|
-
)}/repository/commits?${commitsReqParams.toString()}`
|
|
1146
|
-
).toString(),
|
|
1147
|
-
{
|
|
1148
|
-
...integration.getGitLabRequestOptions(this.integration.config, token),
|
|
1149
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
1150
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
1151
|
-
// The difference does not affect us in practice however. The cast can
|
|
1152
|
-
// be removed after we support ESM for CLI dependencies and migrate to
|
|
1153
|
-
// version 3 of node-fetch.
|
|
1154
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
1155
|
-
...signal && { signal }
|
|
1156
|
-
}
|
|
1157
|
-
);
|
|
1158
|
-
if (!commitsGitlabResponse.ok) {
|
|
1159
|
-
const message = `Failed to read tree (branch) from ${url}, ${commitsGitlabResponse.status} ${commitsGitlabResponse.statusText}`;
|
|
1160
|
-
if (commitsGitlabResponse.status === 404) {
|
|
1161
|
-
throw new errors.NotFoundError(message);
|
|
1162
|
-
}
|
|
1163
|
-
throw new Error(message);
|
|
1164
|
-
}
|
|
1165
|
-
const commitSha = (await commitsGitlabResponse.json())[0]?.id ?? "";
|
|
1166
|
-
if (etag && etag === commitSha) {
|
|
1167
|
-
throw new errors.NotModifiedError();
|
|
1168
|
-
}
|
|
1169
|
-
const archiveReqParams = new URLSearchParams();
|
|
1170
|
-
archiveReqParams.set("sha", branch);
|
|
1171
|
-
if (!!filepath) {
|
|
1172
|
-
archiveReqParams.set("path", filepath);
|
|
1173
|
-
}
|
|
1174
|
-
const archiveGitLabResponse = await fetch__default.default(
|
|
1175
|
-
`${this.integration.config.apiBaseUrl}/projects/${encodeURIComponent(
|
|
1176
|
-
repoFullName
|
|
1177
|
-
)}/repository/archive?${archiveReqParams.toString()}`,
|
|
1178
|
-
{
|
|
1179
|
-
...integration.getGitLabRequestOptions(this.integration.config, token),
|
|
1180
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
1181
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
1182
|
-
// The difference does not affect us in practice however. The cast can
|
|
1183
|
-
// be removed after we support ESM for CLI dependencies and migrate to
|
|
1184
|
-
// version 3 of node-fetch.
|
|
1185
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
1186
|
-
...signal && { signal }
|
|
1187
|
-
}
|
|
1188
|
-
);
|
|
1189
|
-
if (!archiveGitLabResponse.ok) {
|
|
1190
|
-
const message = `Failed to read tree (archive) from ${url}, ${archiveGitLabResponse.status} ${archiveGitLabResponse.statusText}`;
|
|
1191
|
-
if (archiveGitLabResponse.status === 404) {
|
|
1192
|
-
throw new errors.NotFoundError(message);
|
|
1193
|
-
}
|
|
1194
|
-
throw new Error(message);
|
|
1195
|
-
}
|
|
1196
|
-
return await this.deps.treeResponseFactory.fromTarArchive({
|
|
1197
|
-
stream: stream.Readable.from(archiveGitLabResponse.body),
|
|
1198
|
-
subpath: filepath,
|
|
1199
|
-
etag: commitSha,
|
|
1200
|
-
filter: options?.filter
|
|
1201
|
-
});
|
|
1202
|
-
}
|
|
1203
|
-
async search(url, options) {
|
|
1204
|
-
const { filepath } = parseGitUrl__default.default(url);
|
|
1205
|
-
const staticPart = this.getStaticPart(filepath);
|
|
1206
|
-
const matcher = new minimatch.Minimatch(filepath);
|
|
1207
|
-
const treeUrl = lodash.trimEnd(url.replace(filepath, staticPart), `/`);
|
|
1208
|
-
const pathPrefix = staticPart ? `${staticPart}/` : "";
|
|
1209
|
-
const tree = await this.readTree(treeUrl, {
|
|
1210
|
-
etag: options?.etag,
|
|
1211
|
-
signal: options?.signal,
|
|
1212
|
-
filter: (path) => matcher.match(`${pathPrefix}${path}`)
|
|
1213
|
-
});
|
|
1214
|
-
const files = await tree.files();
|
|
1215
|
-
return {
|
|
1216
|
-
etag: tree.etag,
|
|
1217
|
-
files: files.map((file) => ({
|
|
1218
|
-
url: this.integration.resolveUrl({
|
|
1219
|
-
url: `/${pathPrefix}${file.path}`,
|
|
1220
|
-
base: url
|
|
1221
|
-
}),
|
|
1222
|
-
content: file.content,
|
|
1223
|
-
lastModifiedAt: file.lastModifiedAt
|
|
1224
|
-
}))
|
|
1225
|
-
};
|
|
1226
|
-
}
|
|
1227
|
-
/**
|
|
1228
|
-
* This function splits the input globPattern string into segments using the path separator /. It then iterates over
|
|
1229
|
-
* the segments from the end of the array towards the beginning, checking if the concatenated string up to that
|
|
1230
|
-
* segment matches the original globPattern using the minimatch function. If a match is found, it continues iterating.
|
|
1231
|
-
* If no match is found, it returns the concatenated string up to the current segment, which is the static part of the
|
|
1232
|
-
* glob pattern.
|
|
1233
|
-
*
|
|
1234
|
-
* E.g. `catalog/foo/*.yaml` will return `catalog/foo`.
|
|
1235
|
-
*
|
|
1236
|
-
* @param globPattern the glob pattern
|
|
1237
|
-
* @private
|
|
1238
|
-
*/
|
|
1239
|
-
getStaticPart(globPattern) {
|
|
1240
|
-
const segments = globPattern.split("/");
|
|
1241
|
-
let i = segments.length;
|
|
1242
|
-
while (i > 0 && new minimatch.Minimatch(segments.slice(0, i).join("/")).match(globPattern)) {
|
|
1243
|
-
i--;
|
|
1244
|
-
}
|
|
1245
|
-
return segments.slice(0, i).join("/");
|
|
1246
|
-
}
|
|
1247
|
-
toString() {
|
|
1248
|
-
const { host, token } = this.integration.config;
|
|
1249
|
-
return `gitlab{host=${host},authed=${Boolean(token)}}`;
|
|
1250
|
-
}
|
|
1251
|
-
async getGitlabFetchUrl(target) {
|
|
1252
|
-
const targetUrl = new URL(target);
|
|
1253
|
-
if (targetUrl.pathname.includes("/-/jobs/artifacts/")) {
|
|
1254
|
-
return this.getGitlabArtifactFetchUrl(targetUrl).then(
|
|
1255
|
-
(value) => value.toString()
|
|
1256
|
-
);
|
|
1257
|
-
}
|
|
1258
|
-
return integration.getGitLabFileFetchUrl(target, this.integration.config);
|
|
1259
|
-
}
|
|
1260
|
-
// convert urls of the form:
|
|
1261
|
-
// https://example.com/<namespace>/<project>/-/jobs/artifacts/<ref>/raw/<path_to_file>?job=<job_name>
|
|
1262
|
-
// to urls of the form:
|
|
1263
|
-
// https://example.com/api/v4/projects/:id/jobs/artifacts/:ref_name/raw/*artifact_path?job=<job_name>
|
|
1264
|
-
async getGitlabArtifactFetchUrl(target) {
|
|
1265
|
-
if (!target.pathname.includes("/-/jobs/artifacts/")) {
|
|
1266
|
-
throw new Error("Unable to process url as an GitLab artifact");
|
|
1267
|
-
}
|
|
1268
|
-
try {
|
|
1269
|
-
const [namespaceAndProject, ref] = target.pathname.split("/-/jobs/artifacts/");
|
|
1270
|
-
const projectPath = new URL(target);
|
|
1271
|
-
projectPath.pathname = namespaceAndProject;
|
|
1272
|
-
const projectId = await this.resolveProjectToId(projectPath);
|
|
1273
|
-
const relativePath = integration.getGitLabIntegrationRelativePath(
|
|
1274
|
-
this.integration.config
|
|
1275
|
-
);
|
|
1276
|
-
const newUrl = new URL(target);
|
|
1277
|
-
newUrl.pathname = `${relativePath}/api/v4/projects/${projectId}/jobs/artifacts/${ref}`;
|
|
1278
|
-
return newUrl;
|
|
1279
|
-
} catch (e) {
|
|
1280
|
-
throw new Error(
|
|
1281
|
-
`Unable to translate GitLab artifact URL: ${target}, ${e}`
|
|
1282
|
-
);
|
|
1283
|
-
}
|
|
1284
|
-
}
|
|
1285
|
-
async resolveProjectToId(pathToProject) {
|
|
1286
|
-
let project = pathToProject.pathname;
|
|
1287
|
-
const relativePath = integration.getGitLabIntegrationRelativePath(
|
|
1288
|
-
this.integration.config
|
|
1289
|
-
);
|
|
1290
|
-
if (relativePath) {
|
|
1291
|
-
project = project.replace(relativePath, "");
|
|
1292
|
-
}
|
|
1293
|
-
project = project.replace(/^\//, "");
|
|
1294
|
-
const result = await fetch__default.default(
|
|
1295
|
-
`${pathToProject.origin}${relativePath}/api/v4/projects/${encodeURIComponent(project)}`,
|
|
1296
|
-
integration.getGitLabRequestOptions(this.integration.config)
|
|
1297
|
-
);
|
|
1298
|
-
const data = await result.json();
|
|
1299
|
-
if (!result.ok) {
|
|
1300
|
-
throw new Error(`Gitlab error: ${data.error}, ${data.error_description}`);
|
|
1301
|
-
}
|
|
1302
|
-
return Number(data.id);
|
|
1303
|
-
}
|
|
1304
|
-
}
|
|
1305
|
-
|
|
1306
|
-
class GiteaUrlReader {
|
|
1307
|
-
constructor(integration, deps) {
|
|
1308
|
-
this.integration = integration;
|
|
1309
|
-
this.deps = deps;
|
|
1310
|
-
}
|
|
1311
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
1312
|
-
return integration.ScmIntegrations.fromConfig(config).gitea.list().map((integration) => {
|
|
1313
|
-
const reader = new GiteaUrlReader(integration, { treeResponseFactory });
|
|
1314
|
-
const predicate = (url) => {
|
|
1315
|
-
return url.host === integration.config.host;
|
|
1316
|
-
};
|
|
1317
|
-
return { reader, predicate };
|
|
1318
|
-
});
|
|
1319
|
-
};
|
|
1320
|
-
async read(url) {
|
|
1321
|
-
const response = await this.readUrl(url);
|
|
1322
|
-
return response.buffer();
|
|
1323
|
-
}
|
|
1324
|
-
async readUrl(url, options) {
|
|
1325
|
-
let response;
|
|
1326
|
-
const blobUrl = integration.getGiteaFileContentsUrl(this.integration.config, url);
|
|
1327
|
-
try {
|
|
1328
|
-
response = await fetch__default.default(blobUrl, {
|
|
1329
|
-
method: "GET",
|
|
1330
|
-
...integration.getGiteaRequestOptions(this.integration.config),
|
|
1331
|
-
signal: options?.signal
|
|
1332
|
-
});
|
|
1333
|
-
} catch (e) {
|
|
1334
|
-
throw new Error(`Unable to read ${blobUrl}, ${e}`);
|
|
1335
|
-
}
|
|
1336
|
-
if (response.ok) {
|
|
1337
|
-
const { encoding, content } = await response.json();
|
|
1338
|
-
if (encoding === "base64") {
|
|
1339
|
-
return ReadUrlResponseFactory.fromReadable(
|
|
1340
|
-
stream.Readable.from(Buffer.from(content, "base64")),
|
|
1341
|
-
{
|
|
1342
|
-
etag: response.headers.get("ETag") ?? void 0,
|
|
1343
|
-
lastModifiedAt: parseLastModified(
|
|
1344
|
-
response.headers.get("Last-Modified")
|
|
1345
|
-
)
|
|
1346
|
-
}
|
|
1347
|
-
);
|
|
1348
|
-
}
|
|
1349
|
-
throw new Error(`Unknown encoding: ${encoding}`);
|
|
1350
|
-
}
|
|
1351
|
-
const message = `${url} could not be read as ${blobUrl}, ${response.status} ${response.statusText}`;
|
|
1352
|
-
if (response.status === 404) {
|
|
1353
|
-
throw new errors.NotFoundError(message);
|
|
1354
|
-
}
|
|
1355
|
-
if (response.status === 304) {
|
|
1356
|
-
throw new errors.NotModifiedError();
|
|
1357
|
-
}
|
|
1358
|
-
if (response.status === 403) {
|
|
1359
|
-
throw new errors.AuthenticationError();
|
|
1360
|
-
}
|
|
1361
|
-
throw new Error(message);
|
|
1362
|
-
}
|
|
1363
|
-
async readTree(url, options) {
|
|
1364
|
-
const lastCommitHash = await this.getLastCommitHash(url);
|
|
1365
|
-
if (options?.etag && options.etag === lastCommitHash) {
|
|
1366
|
-
throw new errors.NotModifiedError();
|
|
1367
|
-
}
|
|
1368
|
-
const archiveUri = integration.getGiteaArchiveUrl(this.integration.config, url);
|
|
1369
|
-
let response;
|
|
1370
|
-
try {
|
|
1371
|
-
response = await fetch__default.default(archiveUri, {
|
|
1372
|
-
method: "GET",
|
|
1373
|
-
...integration.getGiteaRequestOptions(this.integration.config),
|
|
1374
|
-
signal: options?.signal
|
|
1375
|
-
});
|
|
1376
|
-
} catch (e) {
|
|
1377
|
-
throw new Error(`Unable to read ${archiveUri}, ${e}`);
|
|
1378
|
-
}
|
|
1379
|
-
const parsedUri = integration.parseGiteaUrl(this.integration.config, url);
|
|
1380
|
-
return this.deps.treeResponseFactory.fromTarArchive({
|
|
1381
|
-
stream: stream.Readable.from(response.body),
|
|
1382
|
-
subpath: parsedUri.path,
|
|
1383
|
-
etag: lastCommitHash,
|
|
1384
|
-
filter: options?.filter
|
|
1385
|
-
});
|
|
1386
|
-
}
|
|
1387
|
-
search() {
|
|
1388
|
-
throw new Error("GiteaUrlReader search not implemented.");
|
|
1389
|
-
}
|
|
1390
|
-
toString() {
|
|
1391
|
-
const { host } = this.integration.config;
|
|
1392
|
-
return `gitea{host=${host},authed=${Boolean(
|
|
1393
|
-
this.integration.config.password
|
|
1394
|
-
)}}`;
|
|
1395
|
-
}
|
|
1396
|
-
async getLastCommitHash(url) {
|
|
1397
|
-
const commitUri = integration.getGiteaLatestCommitUrl(this.integration.config, url);
|
|
1398
|
-
const response = await fetch__default.default(
|
|
1399
|
-
commitUri,
|
|
1400
|
-
integration.getGiteaRequestOptions(this.integration.config)
|
|
1401
|
-
);
|
|
1402
|
-
if (!response.ok) {
|
|
1403
|
-
const message = `Failed to retrieve latest commit information from ${commitUri}, ${response.status} ${response.statusText}`;
|
|
1404
|
-
if (response.status === 404) {
|
|
1405
|
-
throw new errors.NotFoundError(message);
|
|
1406
|
-
}
|
|
1407
|
-
throw new Error(message);
|
|
1408
|
-
}
|
|
1409
|
-
return (await response.json()).sha;
|
|
1410
|
-
}
|
|
1411
|
-
}
|
|
1412
|
-
|
|
1413
|
-
class HarnessUrlReader {
|
|
1414
|
-
constructor(integration, deps) {
|
|
1415
|
-
this.integration = integration;
|
|
1416
|
-
this.deps = deps;
|
|
1417
|
-
}
|
|
1418
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
1419
|
-
return integration.ScmIntegrations.fromConfig(config).harness.list().map((integration) => {
|
|
1420
|
-
const reader = new HarnessUrlReader(integration, {
|
|
1421
|
-
treeResponseFactory
|
|
1422
|
-
});
|
|
1423
|
-
const predicate = (url) => {
|
|
1424
|
-
return url.host === integration.config.host;
|
|
1425
|
-
};
|
|
1426
|
-
return { reader, predicate };
|
|
1427
|
-
});
|
|
1428
|
-
};
|
|
1429
|
-
async read(url) {
|
|
1430
|
-
const response = await this.readUrl(url);
|
|
1431
|
-
return response.buffer();
|
|
1432
|
-
}
|
|
1433
|
-
async readUrl(url, options) {
|
|
1434
|
-
let response;
|
|
1435
|
-
const blobUrl = integration.getHarnessFileContentsUrl(this.integration.config, url);
|
|
1436
|
-
try {
|
|
1437
|
-
response = await fetch__default.default(blobUrl, {
|
|
1438
|
-
method: "GET",
|
|
1439
|
-
...integration.getHarnessRequestOptions(this.integration.config),
|
|
1440
|
-
signal: options?.signal
|
|
1441
|
-
});
|
|
1442
|
-
} catch (e) {
|
|
1443
|
-
throw new Error(`Unable to read ${blobUrl}, ${e}`);
|
|
1444
|
-
}
|
|
1445
|
-
if (response.ok) {
|
|
1446
|
-
const jsonResponse = { data: response.body };
|
|
1447
|
-
if (jsonResponse) {
|
|
1448
|
-
return ReadUrlResponseFactory.fromReadable(
|
|
1449
|
-
stream.Readable.from(jsonResponse.data),
|
|
1450
|
-
{
|
|
1451
|
-
etag: response.headers.get("ETag") ?? void 0
|
|
1452
|
-
}
|
|
1453
|
-
);
|
|
1454
|
-
}
|
|
1455
|
-
throw new Error(`Unknown json: ${jsonResponse}`);
|
|
1456
|
-
}
|
|
1457
|
-
const message = `${url} x ${blobUrl}, ${response.status} ${response.statusText}`;
|
|
1458
|
-
if (response.status === 404) {
|
|
1459
|
-
throw new errors.NotFoundError(message);
|
|
1460
|
-
}
|
|
1461
|
-
if (response.status === 304) {
|
|
1462
|
-
throw new errors.NotModifiedError();
|
|
1463
|
-
}
|
|
1464
|
-
if (response.status === 403) {
|
|
1465
|
-
throw new errors.AuthenticationError();
|
|
1466
|
-
}
|
|
1467
|
-
throw new Error(message);
|
|
1468
|
-
}
|
|
1469
|
-
async readTree(url, options) {
|
|
1470
|
-
const lastCommitHash = await this.getLastCommitHash(url);
|
|
1471
|
-
if (options?.etag && options.etag === lastCommitHash) {
|
|
1472
|
-
throw new errors.NotModifiedError();
|
|
1473
|
-
}
|
|
1474
|
-
const archiveUri = integration.getHarnessArchiveUrl(this.integration.config, url);
|
|
1475
|
-
let response;
|
|
1476
|
-
try {
|
|
1477
|
-
response = await fetch__default.default(archiveUri, {
|
|
1478
|
-
method: "GET",
|
|
1479
|
-
...integration.getHarnessRequestOptions(this.integration.config),
|
|
1480
|
-
signal: options?.signal
|
|
1481
|
-
});
|
|
1482
|
-
} catch (e) {
|
|
1483
|
-
throw new Error(`Unable to read ${archiveUri}, ${e}`);
|
|
1484
|
-
}
|
|
1485
|
-
const parsedUri = integration.parseHarnessUrl(this.integration.config, url);
|
|
1486
|
-
return this.deps.treeResponseFactory.fromZipArchive({
|
|
1487
|
-
stream: stream.Readable.from(response.body),
|
|
1488
|
-
subpath: parsedUri.path,
|
|
1489
|
-
etag: lastCommitHash,
|
|
1490
|
-
filter: options?.filter
|
|
1491
|
-
});
|
|
1492
|
-
}
|
|
1493
|
-
search() {
|
|
1494
|
-
throw new Error("HarnessUrlReader search not implemented.");
|
|
1495
|
-
}
|
|
1496
|
-
toString() {
|
|
1497
|
-
const { host } = this.integration.config;
|
|
1498
|
-
return `harness{host=${host},authed=${Boolean(
|
|
1499
|
-
this.integration.config.token || this.integration.config.apiKey
|
|
1500
|
-
)}}`;
|
|
1501
|
-
}
|
|
1502
|
-
async getLastCommitHash(url) {
|
|
1503
|
-
const commitUri = integration.getHarnessLatestCommitUrl(this.integration.config, url);
|
|
1504
|
-
const response = await fetch__default.default(
|
|
1505
|
-
commitUri,
|
|
1506
|
-
integration.getHarnessRequestOptions(this.integration.config)
|
|
1507
|
-
);
|
|
1508
|
-
if (!response.ok) {
|
|
1509
|
-
const message = `Failed to retrieve latest commit information from ${commitUri}, ${response.status} ${response.statusText}`;
|
|
1510
|
-
if (response.status === 404) {
|
|
1511
|
-
throw new errors.NotFoundError(message);
|
|
1512
|
-
}
|
|
1513
|
-
throw new Error(message);
|
|
1514
|
-
}
|
|
1515
|
-
return (await response.json()).latest_commit.sha;
|
|
1516
|
-
}
|
|
1517
|
-
}
|
|
1518
|
-
|
|
1519
|
-
const DEFAULT_REGION = "us-east-1";
|
|
1520
|
-
function parseUrl$1(url, config) {
|
|
1521
|
-
const parsedUrl = new URL(url);
|
|
1522
|
-
const pathname = parsedUrl.pathname.substring(1);
|
|
1523
|
-
const host = parsedUrl.host;
|
|
1524
|
-
if (config.host === "amazonaws.com" || config.host === "amazonaws.com.cn") {
|
|
1525
|
-
const match = host.match(
|
|
1526
|
-
/^(?:([a-z0-9.-]+)\.)?s3(?:[.-]([a-z0-9-]+))?\.amazonaws\.com(\.cn)?$/
|
|
1527
|
-
);
|
|
1528
|
-
if (!match) {
|
|
1529
|
-
throw new Error(`Invalid AWS S3 URL ${url}`);
|
|
1530
|
-
}
|
|
1531
|
-
const [, hostBucket, hostRegion] = match;
|
|
1532
|
-
if (config.s3ForcePathStyle || !hostBucket) {
|
|
1533
|
-
const slashIndex = pathname.indexOf("/");
|
|
1534
|
-
if (slashIndex < 0) {
|
|
1535
|
-
throw new Error(
|
|
1536
|
-
`Invalid path-style AWS S3 URL ${url}, does not contain bucket in the path`
|
|
1537
|
-
);
|
|
1538
|
-
}
|
|
1539
|
-
return {
|
|
1540
|
-
path: pathname.substring(slashIndex + 1),
|
|
1541
|
-
bucket: pathname.substring(0, slashIndex),
|
|
1542
|
-
region: hostRegion ?? DEFAULT_REGION
|
|
1543
|
-
};
|
|
1544
|
-
}
|
|
1545
|
-
return {
|
|
1546
|
-
path: pathname,
|
|
1547
|
-
bucket: hostBucket,
|
|
1548
|
-
region: hostRegion ?? DEFAULT_REGION
|
|
1549
|
-
};
|
|
1550
|
-
}
|
|
1551
|
-
const usePathStyle = config.s3ForcePathStyle || host.length === config.host.length;
|
|
1552
|
-
if (usePathStyle) {
|
|
1553
|
-
const slashIndex = pathname.indexOf("/");
|
|
1554
|
-
if (slashIndex < 0) {
|
|
1555
|
-
throw new Error(
|
|
1556
|
-
`Invalid path-style AWS S3 URL ${url}, does not contain bucket in the path`
|
|
1557
|
-
);
|
|
1558
|
-
}
|
|
1559
|
-
return {
|
|
1560
|
-
path: pathname.substring(slashIndex + 1),
|
|
1561
|
-
bucket: pathname.substring(0, slashIndex),
|
|
1562
|
-
region: DEFAULT_REGION
|
|
1563
|
-
};
|
|
1564
|
-
}
|
|
1565
|
-
return {
|
|
1566
|
-
path: pathname,
|
|
1567
|
-
bucket: host.substring(0, host.length - config.host.length - 1),
|
|
1568
|
-
region: DEFAULT_REGION
|
|
1569
|
-
};
|
|
1570
|
-
}
|
|
1571
|
-
class AwsS3UrlReader {
|
|
1572
|
-
constructor(credsManager, integration, deps) {
|
|
1573
|
-
this.credsManager = credsManager;
|
|
1574
|
-
this.integration = integration;
|
|
1575
|
-
this.deps = deps;
|
|
1576
|
-
}
|
|
1577
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
1578
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
1579
|
-
const credsManager = integrationAwsNode.DefaultAwsCredentialsManager.fromConfig(config);
|
|
1580
|
-
return integrations.awsS3.list().map((integration) => {
|
|
1581
|
-
const reader = new AwsS3UrlReader(credsManager, integration, {
|
|
1582
|
-
treeResponseFactory
|
|
1583
|
-
});
|
|
1584
|
-
const predicate = (url) => url.host.endsWith(integration.config.host);
|
|
1585
|
-
return { reader, predicate };
|
|
1586
|
-
});
|
|
1587
|
-
};
|
|
1588
|
-
/**
|
|
1589
|
-
* If accessKeyId and secretAccessKey are missing, the standard credentials provider chain will be used:
|
|
1590
|
-
* https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html
|
|
1591
|
-
*/
|
|
1592
|
-
static buildStaticCredentials(accessKeyId, secretAccessKey) {
|
|
1593
|
-
return async () => {
|
|
1594
|
-
return {
|
|
1595
|
-
accessKeyId,
|
|
1596
|
-
secretAccessKey
|
|
1597
|
-
};
|
|
1598
|
-
};
|
|
1599
|
-
}
|
|
1600
|
-
static async buildCredentials(credsManager, region, integration) {
|
|
1601
|
-
if (!integration) {
|
|
1602
|
-
return (await credsManager.getCredentialProvider()).sdkCredentialProvider;
|
|
1603
|
-
}
|
|
1604
|
-
const accessKeyId = integration.config.accessKeyId;
|
|
1605
|
-
const secretAccessKey = integration.config.secretAccessKey;
|
|
1606
|
-
let explicitCredentials;
|
|
1607
|
-
if (accessKeyId && secretAccessKey) {
|
|
1608
|
-
explicitCredentials = AwsS3UrlReader.buildStaticCredentials(
|
|
1609
|
-
accessKeyId,
|
|
1610
|
-
secretAccessKey
|
|
1611
|
-
);
|
|
1612
|
-
} else {
|
|
1613
|
-
explicitCredentials = (await credsManager.getCredentialProvider()).sdkCredentialProvider;
|
|
1614
|
-
}
|
|
1615
|
-
const roleArn = integration.config.roleArn;
|
|
1616
|
-
if (roleArn) {
|
|
1617
|
-
return credentialProviders.fromTemporaryCredentials({
|
|
1618
|
-
masterCredentials: explicitCredentials,
|
|
1619
|
-
params: {
|
|
1620
|
-
RoleSessionName: "backstage-aws-s3-url-reader",
|
|
1621
|
-
RoleArn: roleArn,
|
|
1622
|
-
ExternalId: integration.config.externalId
|
|
1623
|
-
},
|
|
1624
|
-
clientConfig: { region }
|
|
1625
|
-
});
|
|
1626
|
-
}
|
|
1627
|
-
return explicitCredentials;
|
|
1628
|
-
}
|
|
1629
|
-
async buildS3Client(credsManager, region, integration) {
|
|
1630
|
-
const credentials = await AwsS3UrlReader.buildCredentials(
|
|
1631
|
-
credsManager,
|
|
1632
|
-
region,
|
|
1633
|
-
integration
|
|
1634
|
-
);
|
|
1635
|
-
const s3 = new clientS3.S3Client({
|
|
1636
|
-
customUserAgent: "backstage-aws-s3-url-reader",
|
|
1637
|
-
region,
|
|
1638
|
-
credentials,
|
|
1639
|
-
endpoint: integration.config.endpoint,
|
|
1640
|
-
forcePathStyle: integration.config.s3ForcePathStyle
|
|
1641
|
-
});
|
|
1642
|
-
return s3;
|
|
1643
|
-
}
|
|
1644
|
-
async retrieveS3ObjectData(stream$1) {
|
|
1645
|
-
return new Promise((resolve, reject) => {
|
|
1646
|
-
try {
|
|
1647
|
-
const chunks = [];
|
|
1648
|
-
stream$1.on("data", (chunk) => chunks.push(chunk));
|
|
1649
|
-
stream$1.on(
|
|
1650
|
-
"error",
|
|
1651
|
-
(e) => reject(new errors.ForwardedError("Unable to read stream", e))
|
|
1652
|
-
);
|
|
1653
|
-
stream$1.on("end", () => resolve(stream.Readable.from(Buffer.concat(chunks))));
|
|
1654
|
-
} catch (e) {
|
|
1655
|
-
throw new errors.ForwardedError("Unable to parse the response data", e);
|
|
1656
|
-
}
|
|
1657
|
-
});
|
|
1658
|
-
}
|
|
1659
|
-
async read(url) {
|
|
1660
|
-
const response = await this.readUrl(url);
|
|
1661
|
-
return response.buffer();
|
|
1662
|
-
}
|
|
1663
|
-
async readUrl(url, options) {
|
|
1664
|
-
const { etag, lastModifiedAfter } = options ?? {};
|
|
1665
|
-
try {
|
|
1666
|
-
const { path, bucket, region } = parseUrl$1(url, this.integration.config);
|
|
1667
|
-
const s3Client = await this.buildS3Client(
|
|
1668
|
-
this.credsManager,
|
|
1669
|
-
region,
|
|
1670
|
-
this.integration
|
|
1671
|
-
);
|
|
1672
|
-
const abortController$1 = new abortController.AbortController();
|
|
1673
|
-
const params = {
|
|
1674
|
-
Bucket: bucket,
|
|
1675
|
-
Key: path,
|
|
1676
|
-
...etag && { IfNoneMatch: etag },
|
|
1677
|
-
...lastModifiedAfter && {
|
|
1678
|
-
IfModifiedSince: lastModifiedAfter
|
|
1679
|
-
}
|
|
1680
|
-
};
|
|
1681
|
-
options?.signal?.addEventListener("abort", () => abortController$1.abort());
|
|
1682
|
-
const getObjectCommand = new clientS3.GetObjectCommand(params);
|
|
1683
|
-
const response = await s3Client.send(getObjectCommand, {
|
|
1684
|
-
abortSignal: abortController$1.signal
|
|
1685
|
-
});
|
|
1686
|
-
const s3ObjectData = await this.retrieveS3ObjectData(
|
|
1687
|
-
response.Body
|
|
1688
|
-
);
|
|
1689
|
-
return ReadUrlResponseFactory.fromReadable(s3ObjectData, {
|
|
1690
|
-
etag: response.ETag,
|
|
1691
|
-
lastModifiedAt: response.LastModified
|
|
1692
|
-
});
|
|
1693
|
-
} catch (e) {
|
|
1694
|
-
if (e.$metadata && e.$metadata.httpStatusCode === 304) {
|
|
1695
|
-
throw new errors.NotModifiedError();
|
|
1696
|
-
}
|
|
1697
|
-
throw new errors.ForwardedError("Could not retrieve file from S3", e);
|
|
1698
|
-
}
|
|
1699
|
-
}
|
|
1700
|
-
async readTree(url, options) {
|
|
1701
|
-
try {
|
|
1702
|
-
const { path, bucket, region } = parseUrl$1(url, this.integration.config);
|
|
1703
|
-
const s3Client = await this.buildS3Client(
|
|
1704
|
-
this.credsManager,
|
|
1705
|
-
region,
|
|
1706
|
-
this.integration
|
|
1707
|
-
);
|
|
1708
|
-
const abortController$1 = new abortController.AbortController();
|
|
1709
|
-
const allObjects = [];
|
|
1710
|
-
const responses = [];
|
|
1711
|
-
let continuationToken;
|
|
1712
|
-
let output;
|
|
1713
|
-
do {
|
|
1714
|
-
const listObjectsV2Command = new clientS3.ListObjectsV2Command({
|
|
1715
|
-
Bucket: bucket,
|
|
1716
|
-
ContinuationToken: continuationToken,
|
|
1717
|
-
Prefix: path
|
|
1718
|
-
});
|
|
1719
|
-
options?.signal?.addEventListener(
|
|
1720
|
-
"abort",
|
|
1721
|
-
() => abortController$1.abort()
|
|
1722
|
-
);
|
|
1723
|
-
output = await s3Client.send(listObjectsV2Command, {
|
|
1724
|
-
abortSignal: abortController$1.signal
|
|
1725
|
-
});
|
|
1726
|
-
if (output.Contents) {
|
|
1727
|
-
output.Contents.forEach((contents) => {
|
|
1728
|
-
allObjects.push(contents.Key);
|
|
1729
|
-
});
|
|
1730
|
-
}
|
|
1731
|
-
continuationToken = output.NextContinuationToken;
|
|
1732
|
-
} while (continuationToken);
|
|
1733
|
-
for (let i = 0; i < allObjects.length; i++) {
|
|
1734
|
-
const getObjectCommand = new clientS3.GetObjectCommand({
|
|
1735
|
-
Bucket: bucket,
|
|
1736
|
-
Key: String(allObjects[i])
|
|
1737
|
-
});
|
|
1738
|
-
const response = await s3Client.send(getObjectCommand);
|
|
1739
|
-
const s3ObjectData = await this.retrieveS3ObjectData(
|
|
1740
|
-
response.Body
|
|
1741
|
-
);
|
|
1742
|
-
responses.push({
|
|
1743
|
-
data: s3ObjectData,
|
|
1744
|
-
path: posix.relative(path, String(allObjects[i])),
|
|
1745
|
-
lastModifiedAt: response?.LastModified ?? void 0
|
|
1746
|
-
});
|
|
1747
|
-
}
|
|
1748
|
-
return await this.deps.treeResponseFactory.fromReadableArray(responses);
|
|
1749
|
-
} catch (e) {
|
|
1750
|
-
throw new errors.ForwardedError("Could not retrieve file tree from S3", e);
|
|
1751
|
-
}
|
|
1752
|
-
}
|
|
1753
|
-
async search() {
|
|
1754
|
-
throw new Error("AwsS3Reader does not implement search");
|
|
1755
|
-
}
|
|
1756
|
-
toString() {
|
|
1757
|
-
const secretAccessKey = this.integration.config.secretAccessKey;
|
|
1758
|
-
return `awsS3{host=${this.integration.config.host},authed=${Boolean(
|
|
1759
|
-
secretAccessKey
|
|
1760
|
-
)}}`;
|
|
1761
|
-
}
|
|
1762
|
-
}
|
|
1763
|
-
|
|
1764
|
-
const isInRange = (num, [start, end]) => {
|
|
1765
|
-
return num >= start && num <= end;
|
|
1766
|
-
};
|
|
1767
|
-
const parsePortRange = (port) => {
|
|
1768
|
-
const isRange = port.includes("-");
|
|
1769
|
-
if (isRange) {
|
|
1770
|
-
const range = port.split("-").map((v) => parseInt(v, 10)).filter(Boolean);
|
|
1771
|
-
if (range.length !== 2) throw new Error(`Port range is not valid: ${port}`);
|
|
1772
|
-
const [start, end] = range;
|
|
1773
|
-
if (start <= 0 || end <= 0 || start > end)
|
|
1774
|
-
throw new Error(`Port range is not valid: [${start}, ${end}]`);
|
|
1775
|
-
return range;
|
|
1776
|
-
}
|
|
1777
|
-
const parsedPort = parseInt(port, 10);
|
|
1778
|
-
return [parsedPort, parsedPort];
|
|
1779
|
-
};
|
|
1780
|
-
const parsePortPredicate = (port) => {
|
|
1781
|
-
if (port) {
|
|
1782
|
-
const range = parsePortRange(port);
|
|
1783
|
-
return (url) => {
|
|
1784
|
-
if (url.port) return isInRange(parseInt(url.port, 10), range);
|
|
1785
|
-
if (url.protocol === "http:") return isInRange(80, range);
|
|
1786
|
-
if (url.protocol === "https:") return isInRange(443, range);
|
|
1787
|
-
return false;
|
|
1788
|
-
};
|
|
1789
|
-
}
|
|
1790
|
-
return (url) => !url.port;
|
|
1791
|
-
};
|
|
1792
|
-
class FetchUrlReader {
|
|
1793
|
-
/**
|
|
1794
|
-
* The factory creates a single reader that will be used for reading any URL that's listed
|
|
1795
|
-
* in configuration at `backend.reading.allow`. The allow list contains a list of objects describing
|
|
1796
|
-
* targets to allow, containing the following fields:
|
|
1797
|
-
*
|
|
1798
|
-
* `host`:
|
|
1799
|
-
* Either full hostnames to match, or subdomain wildcard matchers with a leading '*'.
|
|
1800
|
-
* For example 'example.com' and '*.example.com' are valid values, 'prod.*.example.com' is not.
|
|
1801
|
-
*
|
|
1802
|
-
* `paths`:
|
|
1803
|
-
* An optional list of paths which are allowed. If the list is omitted all paths are allowed.
|
|
1804
|
-
*/
|
|
1805
|
-
static factory = ({ config }) => {
|
|
1806
|
-
const predicates = config.getOptionalConfigArray("backend.reading.allow")?.map((allowConfig) => {
|
|
1807
|
-
const paths = allowConfig.getOptionalStringArray("paths");
|
|
1808
|
-
const checkPath = paths ? (url) => {
|
|
1809
|
-
const targetPath = platformPath__default.default.posix.normalize(url.pathname);
|
|
1810
|
-
return paths.some(
|
|
1811
|
-
(allowedPath) => targetPath.startsWith(allowedPath)
|
|
1812
|
-
);
|
|
1813
|
-
} : (_url) => true;
|
|
1814
|
-
const host = allowConfig.getString("host");
|
|
1815
|
-
const [hostname, port] = host.split(":");
|
|
1816
|
-
const checkPort = parsePortPredicate(port);
|
|
1817
|
-
if (hostname.startsWith("*.")) {
|
|
1818
|
-
const suffix = hostname.slice(1);
|
|
1819
|
-
return (url) => url.hostname.endsWith(suffix) && checkPath(url) && checkPort(url);
|
|
1820
|
-
}
|
|
1821
|
-
return (url) => url.hostname === hostname && checkPath(url) && checkPort(url);
|
|
1822
|
-
}) ?? [];
|
|
1823
|
-
const reader = new FetchUrlReader();
|
|
1824
|
-
const predicate = (url) => predicates.some((p) => p(url));
|
|
1825
|
-
return [{ reader, predicate }];
|
|
1826
|
-
};
|
|
1827
|
-
async read(url) {
|
|
1828
|
-
const response = await this.readUrl(url);
|
|
1829
|
-
return response.buffer();
|
|
1830
|
-
}
|
|
1831
|
-
async readUrl(url, options) {
|
|
1832
|
-
let response;
|
|
1833
|
-
try {
|
|
1834
|
-
response = await fetch__default.default(url, {
|
|
1835
|
-
headers: {
|
|
1836
|
-
...options?.etag && { "If-None-Match": options.etag },
|
|
1837
|
-
...options?.lastModifiedAfter && {
|
|
1838
|
-
"If-Modified-Since": options.lastModifiedAfter.toUTCString()
|
|
1839
|
-
},
|
|
1840
|
-
...options?.token && { Authorization: `Bearer ${options.token}` }
|
|
1841
|
-
},
|
|
1842
|
-
// TODO(freben): The signal cast is there because pre-3.x versions of
|
|
1843
|
-
// node-fetch have a very slightly deviating AbortSignal type signature.
|
|
1844
|
-
// The difference does not affect us in practice however. The cast can
|
|
1845
|
-
// be removed after we support ESM for CLI dependencies and migrate to
|
|
1846
|
-
// version 3 of node-fetch.
|
|
1847
|
-
// https://github.com/backstage/backstage/issues/8242
|
|
1848
|
-
signal: options?.signal
|
|
1849
|
-
});
|
|
1850
|
-
} catch (e) {
|
|
1851
|
-
throw new Error(`Unable to read ${url}, ${e}`);
|
|
1852
|
-
}
|
|
1853
|
-
if (response.status === 304) {
|
|
1854
|
-
throw new errors.NotModifiedError();
|
|
1855
|
-
}
|
|
1856
|
-
if (response.ok) {
|
|
1857
|
-
return ReadUrlResponseFactory.fromNodeJSReadable(response.body, {
|
|
1858
|
-
etag: response.headers.get("ETag") ?? void 0,
|
|
1859
|
-
lastModifiedAt: parseLastModified(
|
|
1860
|
-
response.headers.get("Last-Modified")
|
|
1861
|
-
)
|
|
1862
|
-
});
|
|
1863
|
-
}
|
|
1864
|
-
const message = `could not read ${url}, ${response.status} ${response.statusText}`;
|
|
1865
|
-
if (response.status === 404) {
|
|
1866
|
-
throw new errors.NotFoundError(message);
|
|
1867
|
-
}
|
|
1868
|
-
throw new Error(message);
|
|
1869
|
-
}
|
|
1870
|
-
async readTree() {
|
|
1871
|
-
throw new Error("FetchUrlReader does not implement readTree");
|
|
1872
|
-
}
|
|
1873
|
-
async search() {
|
|
1874
|
-
throw new Error("FetchUrlReader does not implement search");
|
|
1875
|
-
}
|
|
1876
|
-
toString() {
|
|
1877
|
-
return "fetch{}";
|
|
1878
|
-
}
|
|
1879
|
-
}
|
|
1880
|
-
|
|
1881
|
-
function notAllowedMessage(url) {
|
|
1882
|
-
return `Reading from '${url}' is not allowed. You may need to configure an integration for the target host, or add it to the configured list of allowed hosts at 'backend.reading.allow'`;
|
|
1883
|
-
}
|
|
1884
|
-
class UrlReaderPredicateMux {
|
|
1885
|
-
readers = [];
|
|
1886
|
-
register(tuple) {
|
|
1887
|
-
this.readers.push(tuple);
|
|
1888
|
-
}
|
|
1889
|
-
async readUrl(url, options) {
|
|
1890
|
-
const parsed = new URL(url);
|
|
1891
|
-
for (const { predicate, reader } of this.readers) {
|
|
1892
|
-
if (predicate(parsed)) {
|
|
1893
|
-
return reader.readUrl(url, options);
|
|
1894
|
-
}
|
|
1895
|
-
}
|
|
1896
|
-
throw new errors.NotAllowedError(notAllowedMessage(url));
|
|
1897
|
-
}
|
|
1898
|
-
async readTree(url, options) {
|
|
1899
|
-
const parsed = new URL(url);
|
|
1900
|
-
for (const { predicate, reader } of this.readers) {
|
|
1901
|
-
if (predicate(parsed)) {
|
|
1902
|
-
return await reader.readTree(url, options);
|
|
1903
|
-
}
|
|
1904
|
-
}
|
|
1905
|
-
throw new errors.NotAllowedError(notAllowedMessage(url));
|
|
1906
|
-
}
|
|
1907
|
-
async search(url, options) {
|
|
1908
|
-
const parsed = new URL(url);
|
|
1909
|
-
for (const { predicate, reader } of this.readers) {
|
|
1910
|
-
if (predicate(parsed)) {
|
|
1911
|
-
return await reader.search(url, options);
|
|
1912
|
-
}
|
|
1913
|
-
}
|
|
1914
|
-
throw new errors.NotAllowedError(notAllowedMessage(url));
|
|
1915
|
-
}
|
|
1916
|
-
toString() {
|
|
1917
|
-
return `predicateMux{readers=${this.readers.map((t) => t.reader).join(",")}`;
|
|
1918
|
-
}
|
|
1919
|
-
}
|
|
1920
|
-
|
|
1921
|
-
const pipeline$2 = util.promisify(stream.pipeline);
|
|
1922
|
-
const directoryNameRegex = /^[^\/]+\//;
|
|
1923
|
-
function stripFirstDirectoryFromPath(path) {
|
|
1924
|
-
return path.replace(directoryNameRegex, "");
|
|
1925
|
-
}
|
|
1926
|
-
const streamToBuffer = (stream) => {
|
|
1927
|
-
return new Promise(async (resolve, reject) => {
|
|
1928
|
-
try {
|
|
1929
|
-
await pipeline$2(stream, concatStream__default.default(resolve));
|
|
1930
|
-
} catch (ex) {
|
|
1931
|
-
reject(ex);
|
|
1932
|
-
}
|
|
1933
|
-
});
|
|
1934
|
-
};
|
|
1935
|
-
|
|
1936
|
-
const TarParseStream = tar.Parse;
|
|
1937
|
-
const pipeline$1 = util.promisify(stream.pipeline);
|
|
1938
|
-
class TarArchiveResponse {
|
|
1939
|
-
constructor(stream, subPath, workDir, etag, filter, stripFirstDirectory = true) {
|
|
1940
|
-
this.stream = stream;
|
|
1941
|
-
this.subPath = subPath;
|
|
1942
|
-
this.workDir = workDir;
|
|
1943
|
-
this.etag = etag;
|
|
1944
|
-
this.filter = filter;
|
|
1945
|
-
this.stripFirstDirectory = stripFirstDirectory;
|
|
1946
|
-
if (subPath) {
|
|
1947
|
-
if (!subPath.endsWith("/")) {
|
|
1948
|
-
this.subPath += "/";
|
|
1949
|
-
}
|
|
1950
|
-
if (subPath.startsWith("/")) {
|
|
1951
|
-
throw new TypeError(
|
|
1952
|
-
`TarArchiveResponse subPath must not start with a /, got '${subPath}'`
|
|
1953
|
-
);
|
|
1954
|
-
}
|
|
1955
|
-
}
|
|
1956
|
-
this.etag = etag;
|
|
1957
|
-
}
|
|
1958
|
-
read = false;
|
|
1959
|
-
// Make sure the input stream is only read once
|
|
1960
|
-
onlyOnce() {
|
|
1961
|
-
if (this.read) {
|
|
1962
|
-
throw new Error("Response has already been read");
|
|
1963
|
-
}
|
|
1964
|
-
this.read = true;
|
|
1965
|
-
}
|
|
1966
|
-
async files() {
|
|
1967
|
-
this.onlyOnce();
|
|
1968
|
-
const files = Array();
|
|
1969
|
-
const parser = new TarParseStream();
|
|
1970
|
-
parser.on("entry", (entry) => {
|
|
1971
|
-
if (entry.type === "Directory") {
|
|
1972
|
-
entry.resume();
|
|
1973
|
-
return;
|
|
1974
|
-
}
|
|
1975
|
-
const relativePath = this.stripFirstDirectory ? stripFirstDirectoryFromPath(entry.path) : entry.path;
|
|
1976
|
-
if (this.subPath) {
|
|
1977
|
-
if (!relativePath.startsWith(this.subPath)) {
|
|
1978
|
-
entry.resume();
|
|
1979
|
-
return;
|
|
1980
|
-
}
|
|
1981
|
-
}
|
|
1982
|
-
const path = relativePath.slice(this.subPath.length);
|
|
1983
|
-
if (this.filter) {
|
|
1984
|
-
if (!this.filter(path, { size: entry.remain })) {
|
|
1985
|
-
entry.resume();
|
|
1986
|
-
return;
|
|
1987
|
-
}
|
|
1988
|
-
}
|
|
1989
|
-
const content = new Promise(async (resolve) => {
|
|
1990
|
-
await pipeline$1(entry, concatStream__default.default(resolve));
|
|
1991
|
-
});
|
|
1992
|
-
files.push({
|
|
1993
|
-
path,
|
|
1994
|
-
content: () => content
|
|
1995
|
-
});
|
|
1996
|
-
entry.resume();
|
|
1997
|
-
});
|
|
1998
|
-
await pipeline$1(this.stream, parser);
|
|
1999
|
-
return files;
|
|
2000
|
-
}
|
|
2001
|
-
async archive() {
|
|
2002
|
-
if (!this.subPath) {
|
|
2003
|
-
this.onlyOnce();
|
|
2004
|
-
return this.stream;
|
|
2005
|
-
}
|
|
2006
|
-
const tmpDir = await this.dir();
|
|
2007
|
-
try {
|
|
2008
|
-
const data = await new Promise(async (resolve) => {
|
|
2009
|
-
await pipeline$1(
|
|
2010
|
-
tar__default.default.create({ cwd: tmpDir }, [""]),
|
|
2011
|
-
concatStream__default.default(resolve)
|
|
2012
|
-
);
|
|
2013
|
-
});
|
|
2014
|
-
return stream.Readable.from(data);
|
|
2015
|
-
} finally {
|
|
2016
|
-
await fs__default.default.remove(tmpDir);
|
|
2017
|
-
}
|
|
2018
|
-
}
|
|
2019
|
-
async dir(options) {
|
|
2020
|
-
this.onlyOnce();
|
|
2021
|
-
const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
|
|
2022
|
-
let strip = this.subPath ? this.subPath.split("/").length : 1;
|
|
2023
|
-
if (!this.stripFirstDirectory) {
|
|
2024
|
-
strip--;
|
|
2025
|
-
}
|
|
2026
|
-
let filterError = void 0;
|
|
2027
|
-
await pipeline$1(
|
|
2028
|
-
this.stream,
|
|
2029
|
-
tar__default.default.extract({
|
|
2030
|
-
strip,
|
|
2031
|
-
cwd: dir,
|
|
2032
|
-
filter: (path, stat) => {
|
|
2033
|
-
if (filterError) {
|
|
2034
|
-
return false;
|
|
2035
|
-
}
|
|
2036
|
-
const relativePath = this.stripFirstDirectory ? stripFirstDirectoryFromPath(path) : path;
|
|
2037
|
-
if (this.subPath && !relativePath.startsWith(this.subPath)) {
|
|
2038
|
-
return false;
|
|
2039
|
-
}
|
|
2040
|
-
if (this.filter) {
|
|
2041
|
-
const innerPath = path.split("/").slice(strip).join("/");
|
|
2042
|
-
try {
|
|
2043
|
-
return this.filter(innerPath, { size: stat.size });
|
|
2044
|
-
} catch (error) {
|
|
2045
|
-
filterError = error;
|
|
2046
|
-
return false;
|
|
2047
|
-
}
|
|
2048
|
-
}
|
|
2049
|
-
return true;
|
|
2050
|
-
}
|
|
2051
|
-
})
|
|
2052
|
-
);
|
|
2053
|
-
if (filterError) {
|
|
2054
|
-
if (!options?.targetDir) {
|
|
2055
|
-
await fs__default.default.remove(dir).catch(() => {
|
|
2056
|
-
});
|
|
2057
|
-
}
|
|
2058
|
-
throw filterError;
|
|
2059
|
-
}
|
|
2060
|
-
return dir;
|
|
2061
|
-
}
|
|
2062
|
-
}
|
|
2063
|
-
|
|
2064
|
-
class ZipArchiveResponse {
|
|
2065
|
-
constructor(stream, subPath, workDir, etag, filter) {
|
|
2066
|
-
this.stream = stream;
|
|
2067
|
-
this.subPath = subPath;
|
|
2068
|
-
this.workDir = workDir;
|
|
2069
|
-
this.etag = etag;
|
|
2070
|
-
this.filter = filter;
|
|
2071
|
-
if (subPath) {
|
|
2072
|
-
if (!subPath.endsWith("/")) {
|
|
2073
|
-
this.subPath += "/";
|
|
2074
|
-
}
|
|
2075
|
-
if (subPath.startsWith("/")) {
|
|
2076
|
-
throw new TypeError(
|
|
2077
|
-
`ZipArchiveResponse subPath must not start with a /, got '${subPath}'`
|
|
2078
|
-
);
|
|
2079
|
-
}
|
|
2080
|
-
}
|
|
2081
|
-
this.etag = etag;
|
|
2082
|
-
}
|
|
2083
|
-
read = false;
|
|
2084
|
-
// Make sure the input stream is only read once
|
|
2085
|
-
onlyOnce() {
|
|
2086
|
-
if (this.read) {
|
|
2087
|
-
throw new Error("Response has already been read");
|
|
2088
|
-
}
|
|
2089
|
-
this.read = true;
|
|
2090
|
-
}
|
|
2091
|
-
// File path relative to the root extracted directory or a sub directory if subpath is set.
|
|
2092
|
-
getInnerPath(path) {
|
|
2093
|
-
return path.slice(this.subPath.length);
|
|
2094
|
-
}
|
|
2095
|
-
shouldBeIncluded(entry) {
|
|
2096
|
-
if (this.subPath) {
|
|
2097
|
-
if (!entry.fileName.startsWith(this.subPath)) {
|
|
2098
|
-
return false;
|
|
2099
|
-
}
|
|
2100
|
-
}
|
|
2101
|
-
if (this.filter) {
|
|
2102
|
-
return this.filter(this.getInnerPath(entry.fileName), {
|
|
2103
|
-
size: entry.uncompressedSize
|
|
2104
|
-
});
|
|
2105
|
-
}
|
|
2106
|
-
return true;
|
|
2107
|
-
}
|
|
2108
|
-
async streamToTemporaryFile(stream) {
|
|
2109
|
-
const tmpDir = await fs__default.default.mkdtemp(
|
|
2110
|
-
platformPath__default.default.join(this.workDir, "backstage-tmp")
|
|
2111
|
-
);
|
|
2112
|
-
const tmpFile = platformPath__default.default.join(tmpDir, "tmp.zip");
|
|
2113
|
-
const writeStream = fs__default.default.createWriteStream(tmpFile);
|
|
2114
|
-
return new Promise((resolve, reject) => {
|
|
2115
|
-
writeStream.on("error", reject);
|
|
2116
|
-
writeStream.on("finish", () => {
|
|
2117
|
-
writeStream.end();
|
|
2118
|
-
resolve({
|
|
2119
|
-
fileName: tmpFile,
|
|
2120
|
-
cleanup: () => fs__default.default.rm(tmpDir, { recursive: true })
|
|
2121
|
-
});
|
|
2122
|
-
});
|
|
2123
|
-
stream.pipe(writeStream);
|
|
2124
|
-
});
|
|
2125
|
-
}
|
|
2126
|
-
forEveryZipEntry(zip, callback) {
|
|
2127
|
-
return new Promise((resolve, reject) => {
|
|
2128
|
-
yauzl__default.default.open(zip, { lazyEntries: true }, (err, zipfile) => {
|
|
2129
|
-
if (err || !zipfile) {
|
|
2130
|
-
reject(err || new Error(`Failed to open zip file ${zip}`));
|
|
2131
|
-
return;
|
|
2132
|
-
}
|
|
2133
|
-
zipfile.on("entry", async (entry) => {
|
|
2134
|
-
if (!entry.fileName.endsWith("/") && this.shouldBeIncluded(entry)) {
|
|
2135
|
-
zipfile.openReadStream(entry, async (openErr, readStream) => {
|
|
2136
|
-
if (openErr || !readStream) {
|
|
2137
|
-
reject(
|
|
2138
|
-
openErr || new Error(`Failed to open zip entry ${entry.fileName}`)
|
|
2139
|
-
);
|
|
2140
|
-
return;
|
|
2141
|
-
}
|
|
2142
|
-
await callback(entry, readStream);
|
|
2143
|
-
zipfile.readEntry();
|
|
2144
|
-
});
|
|
2145
|
-
} else {
|
|
2146
|
-
zipfile.readEntry();
|
|
2147
|
-
}
|
|
2148
|
-
});
|
|
2149
|
-
zipfile.once("end", () => resolve());
|
|
2150
|
-
zipfile.on("error", (e) => reject(e));
|
|
2151
|
-
zipfile.readEntry();
|
|
2152
|
-
});
|
|
2153
|
-
});
|
|
2154
|
-
}
|
|
2155
|
-
async files() {
|
|
2156
|
-
this.onlyOnce();
|
|
2157
|
-
const files = Array();
|
|
2158
|
-
const temporary = await this.streamToTemporaryFile(this.stream);
|
|
2159
|
-
await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
|
|
2160
|
-
files.push({
|
|
2161
|
-
path: this.getInnerPath(entry.fileName),
|
|
2162
|
-
content: async () => await streamToBuffer(content),
|
|
2163
|
-
lastModifiedAt: entry.lastModFileTime ? new Date(entry.lastModFileTime) : void 0
|
|
2164
|
-
});
|
|
2165
|
-
});
|
|
2166
|
-
await temporary.cleanup();
|
|
2167
|
-
return files;
|
|
2168
|
-
}
|
|
2169
|
-
async archive() {
|
|
2170
|
-
this.onlyOnce();
|
|
2171
|
-
if (!this.subPath) {
|
|
2172
|
-
return this.stream;
|
|
2173
|
-
}
|
|
2174
|
-
const archive = archiver__default.default("zip");
|
|
2175
|
-
const temporary = await this.streamToTemporaryFile(this.stream);
|
|
2176
|
-
await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
|
|
2177
|
-
archive.append(await streamToBuffer(content), {
|
|
2178
|
-
name: this.getInnerPath(entry.fileName)
|
|
2179
|
-
});
|
|
2180
|
-
});
|
|
2181
|
-
archive.finalize();
|
|
2182
|
-
await temporary.cleanup();
|
|
2183
|
-
return archive;
|
|
2184
|
-
}
|
|
2185
|
-
async dir(options) {
|
|
2186
|
-
this.onlyOnce();
|
|
2187
|
-
const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
|
|
2188
|
-
const temporary = await this.streamToTemporaryFile(this.stream);
|
|
2189
|
-
await this.forEveryZipEntry(temporary.fileName, async (entry, content) => {
|
|
2190
|
-
const entryPath = this.getInnerPath(entry.fileName);
|
|
2191
|
-
const dirname = platformPath__default.default.dirname(entryPath);
|
|
2192
|
-
if (dirname) {
|
|
2193
|
-
await fs__default.default.mkdirp(backendPluginApi.resolveSafeChildPath(dir, dirname));
|
|
2194
|
-
}
|
|
2195
|
-
return new Promise(async (resolve, reject) => {
|
|
2196
|
-
const file = fs__default.default.createWriteStream(backendPluginApi.resolveSafeChildPath(dir, entryPath));
|
|
2197
|
-
file.on("finish", resolve);
|
|
2198
|
-
content.on("error", reject);
|
|
2199
|
-
content.pipe(file);
|
|
2200
|
-
});
|
|
2201
|
-
});
|
|
2202
|
-
await temporary.cleanup();
|
|
2203
|
-
return dir;
|
|
2204
|
-
}
|
|
2205
|
-
}
|
|
2206
|
-
|
|
2207
|
-
const pipeline = util.promisify(stream.pipeline);
|
|
2208
|
-
class ReadableArrayResponse {
|
|
2209
|
-
constructor(stream, workDir, etag) {
|
|
2210
|
-
this.stream = stream;
|
|
2211
|
-
this.workDir = workDir;
|
|
2212
|
-
this.etag = etag;
|
|
2213
|
-
this.etag = etag;
|
|
2214
|
-
}
|
|
2215
|
-
read = false;
|
|
2216
|
-
// Make sure the input stream is only read once
|
|
2217
|
-
onlyOnce() {
|
|
2218
|
-
if (this.read) {
|
|
2219
|
-
throw new Error("Response has already been read");
|
|
2220
|
-
}
|
|
2221
|
-
this.read = true;
|
|
2222
|
-
}
|
|
2223
|
-
async files() {
|
|
2224
|
-
this.onlyOnce();
|
|
2225
|
-
const files = Array();
|
|
2226
|
-
for (let i = 0; i < this.stream.length; i++) {
|
|
2227
|
-
if (!this.stream[i].path.endsWith("/")) {
|
|
2228
|
-
files.push({
|
|
2229
|
-
path: this.stream[i].path,
|
|
2230
|
-
content: () => getRawBody__default.default(this.stream[i].data),
|
|
2231
|
-
lastModifiedAt: this.stream[i]?.lastModifiedAt
|
|
2232
|
-
});
|
|
2233
|
-
}
|
|
2234
|
-
}
|
|
2235
|
-
return files;
|
|
2236
|
-
}
|
|
2237
|
-
async archive() {
|
|
2238
|
-
const tmpDir = await this.dir();
|
|
2239
|
-
try {
|
|
2240
|
-
const data = await new Promise(async (resolve) => {
|
|
2241
|
-
await pipeline(
|
|
2242
|
-
tar__default.default.create({ cwd: tmpDir }, [""]),
|
|
2243
|
-
concatStream__default.default(resolve)
|
|
2244
|
-
);
|
|
2245
|
-
});
|
|
2246
|
-
return stream.Readable.from(data);
|
|
2247
|
-
} finally {
|
|
2248
|
-
await fs__default.default.remove(tmpDir);
|
|
2249
|
-
}
|
|
2250
|
-
}
|
|
2251
|
-
async dir(options) {
|
|
2252
|
-
this.onlyOnce();
|
|
2253
|
-
const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
|
|
2254
|
-
for (let i = 0; i < this.stream.length; i++) {
|
|
2255
|
-
if (!this.stream[i].path.endsWith("/")) {
|
|
2256
|
-
const filePath = platformPath__default.default.join(dir, this.stream[i].path);
|
|
2257
|
-
await fs__default.default.mkdir(platformPath.dirname(filePath), { recursive: true });
|
|
2258
|
-
await pipeline(this.stream[i].data, fs__default.default.createWriteStream(filePath));
|
|
2259
|
-
}
|
|
2260
|
-
}
|
|
2261
|
-
return dir;
|
|
2262
|
-
}
|
|
2263
|
-
}
|
|
2264
|
-
|
|
2265
|
-
class DefaultReadTreeResponseFactory {
|
|
2266
|
-
constructor(workDir) {
|
|
2267
|
-
this.workDir = workDir;
|
|
2268
|
-
}
|
|
2269
|
-
static create(options) {
|
|
2270
|
-
return new DefaultReadTreeResponseFactory(
|
|
2271
|
-
options.config.getOptionalString("backend.workingDirectory") ?? os__default.default.tmpdir()
|
|
2272
|
-
);
|
|
2273
|
-
}
|
|
2274
|
-
async fromTarArchive(options) {
|
|
2275
|
-
return new TarArchiveResponse(
|
|
2276
|
-
options.stream,
|
|
2277
|
-
options.subpath ?? "",
|
|
2278
|
-
this.workDir,
|
|
2279
|
-
options.etag,
|
|
2280
|
-
options.filter,
|
|
2281
|
-
options.stripFirstDirectory ?? true
|
|
2282
|
-
);
|
|
2283
|
-
}
|
|
2284
|
-
async fromZipArchive(options) {
|
|
2285
|
-
return new ZipArchiveResponse(
|
|
2286
|
-
options.stream,
|
|
2287
|
-
options.subpath ?? "",
|
|
2288
|
-
this.workDir,
|
|
2289
|
-
options.etag,
|
|
2290
|
-
options.filter
|
|
2291
|
-
);
|
|
2292
|
-
}
|
|
2293
|
-
async fromReadableArray(options) {
|
|
2294
|
-
return new ReadableArrayResponse(options, this.workDir, "");
|
|
2295
|
-
}
|
|
2296
|
-
}
|
|
2297
|
-
|
|
2298
|
-
var name = "@backstage/backend-defaults";
|
|
2299
|
-
var version = "0.5.1-next.0";
|
|
2300
|
-
var description = "Backend defaults used by Backstage backend apps";
|
|
2301
|
-
var backstage = {
|
|
2302
|
-
role: "node-library"
|
|
2303
|
-
};
|
|
2304
|
-
var publishConfig = {
|
|
2305
|
-
access: "public"
|
|
2306
|
-
};
|
|
2307
|
-
var keywords = [
|
|
2308
|
-
"backstage"
|
|
2309
|
-
];
|
|
2310
|
-
var homepage = "https://backstage.io";
|
|
2311
|
-
var repository = {
|
|
2312
|
-
type: "git",
|
|
2313
|
-
url: "https://github.com/backstage/backstage",
|
|
2314
|
-
directory: "packages/backend-defaults"
|
|
2315
|
-
};
|
|
2316
|
-
var license = "Apache-2.0";
|
|
2317
|
-
var exports$1 = {
|
|
2318
|
-
".": "./src/index.ts",
|
|
2319
|
-
"./auth": "./src/entrypoints/auth/index.ts",
|
|
2320
|
-
"./cache": "./src/entrypoints/cache/index.ts",
|
|
2321
|
-
"./database": "./src/entrypoints/database/index.ts",
|
|
2322
|
-
"./discovery": "./src/entrypoints/discovery/index.ts",
|
|
2323
|
-
"./httpAuth": "./src/entrypoints/httpAuth/index.ts",
|
|
2324
|
-
"./httpRouter": "./src/entrypoints/httpRouter/index.ts",
|
|
2325
|
-
"./lifecycle": "./src/entrypoints/lifecycle/index.ts",
|
|
2326
|
-
"./logger": "./src/entrypoints/logger/index.ts",
|
|
2327
|
-
"./permissions": "./src/entrypoints/permissions/index.ts",
|
|
2328
|
-
"./rootConfig": "./src/entrypoints/rootConfig/index.ts",
|
|
2329
|
-
"./rootHealth": "./src/entrypoints/rootHealth/index.ts",
|
|
2330
|
-
"./rootHttpRouter": "./src/entrypoints/rootHttpRouter/index.ts",
|
|
2331
|
-
"./rootLifecycle": "./src/entrypoints/rootLifecycle/index.ts",
|
|
2332
|
-
"./rootLogger": "./src/entrypoints/rootLogger/index.ts",
|
|
2333
|
-
"./scheduler": "./src/entrypoints/scheduler/index.ts",
|
|
2334
|
-
"./urlReader": "./src/entrypoints/urlReader/index.ts",
|
|
2335
|
-
"./userInfo": "./src/entrypoints/userInfo/index.ts",
|
|
2336
|
-
"./package.json": "./package.json"
|
|
2337
|
-
};
|
|
2338
|
-
var main = "src/index.ts";
|
|
2339
|
-
var types = "src/index.ts";
|
|
2340
|
-
var typesVersions = {
|
|
2341
|
-
"*": {
|
|
2342
|
-
auth: [
|
|
2343
|
-
"src/entrypoints/auth/index.ts"
|
|
2344
|
-
],
|
|
2345
|
-
cache: [
|
|
2346
|
-
"src/entrypoints/cache/index.ts"
|
|
2347
|
-
],
|
|
2348
|
-
database: [
|
|
2349
|
-
"src/entrypoints/database/index.ts"
|
|
2350
|
-
],
|
|
2351
|
-
discovery: [
|
|
2352
|
-
"src/entrypoints/discovery/index.ts"
|
|
2353
|
-
],
|
|
2354
|
-
httpAuth: [
|
|
2355
|
-
"src/entrypoints/httpAuth/index.ts"
|
|
2356
|
-
],
|
|
2357
|
-
httpRouter: [
|
|
2358
|
-
"src/entrypoints/httpRouter/index.ts"
|
|
2359
|
-
],
|
|
2360
|
-
lifecycle: [
|
|
2361
|
-
"src/entrypoints/lifecycle/index.ts"
|
|
2362
|
-
],
|
|
2363
|
-
logger: [
|
|
2364
|
-
"src/entrypoints/logger/index.ts"
|
|
2365
|
-
],
|
|
2366
|
-
permissions: [
|
|
2367
|
-
"src/entrypoints/permissions/index.ts"
|
|
2368
|
-
],
|
|
2369
|
-
rootConfig: [
|
|
2370
|
-
"src/entrypoints/rootConfig/index.ts"
|
|
2371
|
-
],
|
|
2372
|
-
rootHealth: [
|
|
2373
|
-
"src/entrypoints/rootHealth/index.ts"
|
|
2374
|
-
],
|
|
2375
|
-
rootHttpRouter: [
|
|
2376
|
-
"src/entrypoints/rootHttpRouter/index.ts"
|
|
2377
|
-
],
|
|
2378
|
-
rootLifecycle: [
|
|
2379
|
-
"src/entrypoints/rootLifecycle/index.ts"
|
|
2380
|
-
],
|
|
2381
|
-
rootLogger: [
|
|
2382
|
-
"src/entrypoints/rootLogger/index.ts"
|
|
2383
|
-
],
|
|
2384
|
-
scheduler: [
|
|
2385
|
-
"src/entrypoints/scheduler/index.ts"
|
|
2386
|
-
],
|
|
2387
|
-
urlReader: [
|
|
2388
|
-
"src/entrypoints/urlReader/index.ts"
|
|
2389
|
-
],
|
|
2390
|
-
userInfo: [
|
|
2391
|
-
"src/entrypoints/userInfo/index.ts"
|
|
2392
|
-
],
|
|
2393
|
-
"package.json": [
|
|
2394
|
-
"package.json"
|
|
2395
|
-
]
|
|
2396
|
-
}
|
|
2397
|
-
};
|
|
2398
|
-
var files = [
|
|
2399
|
-
"config.d.ts",
|
|
2400
|
-
"dist",
|
|
2401
|
-
"migrations"
|
|
2402
|
-
];
|
|
2403
|
-
var scripts = {
|
|
2404
|
-
build: "backstage-cli package build",
|
|
2405
|
-
clean: "backstage-cli package clean",
|
|
2406
|
-
lint: "backstage-cli package lint",
|
|
2407
|
-
prepack: "backstage-cli package prepack",
|
|
2408
|
-
postpack: "backstage-cli package postpack",
|
|
2409
|
-
start: "backstage-cli package start",
|
|
2410
|
-
test: "backstage-cli package test"
|
|
2411
|
-
};
|
|
2412
|
-
var dependencies = {
|
|
2413
|
-
"@aws-sdk/abort-controller": "^3.347.0",
|
|
2414
|
-
"@aws-sdk/client-codecommit": "^3.350.0",
|
|
2415
|
-
"@aws-sdk/client-s3": "^3.350.0",
|
|
2416
|
-
"@aws-sdk/credential-providers": "^3.350.0",
|
|
2417
|
-
"@aws-sdk/types": "^3.347.0",
|
|
2418
|
-
"@backstage/backend-app-api": "workspace:^",
|
|
2419
|
-
"@backstage/backend-common": "^0.25.0",
|
|
2420
|
-
"@backstage/backend-dev-utils": "workspace:^",
|
|
2421
|
-
"@backstage/backend-plugin-api": "workspace:^",
|
|
2422
|
-
"@backstage/cli-common": "workspace:^",
|
|
2423
|
-
"@backstage/cli-node": "workspace:^",
|
|
2424
|
-
"@backstage/config": "workspace:^",
|
|
2425
|
-
"@backstage/config-loader": "workspace:^",
|
|
2426
|
-
"@backstage/errors": "workspace:^",
|
|
2427
|
-
"@backstage/integration": "workspace:^",
|
|
2428
|
-
"@backstage/integration-aws-node": "workspace:^",
|
|
2429
|
-
"@backstage/plugin-auth-node": "workspace:^",
|
|
2430
|
-
"@backstage/plugin-events-node": "workspace:^",
|
|
2431
|
-
"@backstage/plugin-permission-node": "workspace:^",
|
|
2432
|
-
"@backstage/types": "workspace:^",
|
|
2433
|
-
"@google-cloud/storage": "^7.0.0",
|
|
2434
|
-
"@keyv/memcache": "^1.3.5",
|
|
2435
|
-
"@keyv/redis": "^2.5.3",
|
|
2436
|
-
"@manypkg/get-packages": "^1.1.3",
|
|
2437
|
-
"@octokit/rest": "^19.0.3",
|
|
2438
|
-
"@opentelemetry/api": "^1.3.0",
|
|
2439
|
-
"@types/cors": "^2.8.6",
|
|
2440
|
-
"@types/express": "^4.17.6",
|
|
2441
|
-
archiver: "^7.0.0",
|
|
2442
|
-
"base64-stream": "^1.0.0",
|
|
2443
|
-
"better-sqlite3": "^11.0.0",
|
|
2444
|
-
compression: "^1.7.4",
|
|
2445
|
-
"concat-stream": "^2.0.0",
|
|
2446
|
-
cookie: "^0.6.0",
|
|
2447
|
-
cors: "^2.8.5",
|
|
2448
|
-
cron: "^3.0.0",
|
|
2449
|
-
express: "^4.17.1",
|
|
2450
|
-
"express-promise-router": "^4.1.0",
|
|
2451
|
-
"fs-extra": "^11.2.0",
|
|
2452
|
-
"git-url-parse": "^14.0.0",
|
|
2453
|
-
helmet: "^6.0.0",
|
|
2454
|
-
"isomorphic-git": "^1.23.0",
|
|
2455
|
-
jose: "^5.0.0",
|
|
2456
|
-
keyv: "^4.5.2",
|
|
2457
|
-
knex: "^3.0.0",
|
|
2458
|
-
lodash: "^4.17.21",
|
|
2459
|
-
logform: "^2.3.2",
|
|
2460
|
-
luxon: "^3.0.0",
|
|
2461
|
-
minimatch: "^9.0.0",
|
|
2462
|
-
minimist: "^1.2.5",
|
|
2463
|
-
morgan: "^1.10.0",
|
|
2464
|
-
mysql2: "^3.0.0",
|
|
2465
|
-
"node-fetch": "^2.7.0",
|
|
2466
|
-
"node-forge": "^1.3.1",
|
|
2467
|
-
"p-limit": "^3.1.0",
|
|
2468
|
-
"path-to-regexp": "^8.0.0",
|
|
2469
|
-
pg: "^8.11.3",
|
|
2470
|
-
"pg-connection-string": "^2.3.0",
|
|
2471
|
-
"pg-format": "^1.0.4",
|
|
2472
|
-
"raw-body": "^2.4.1",
|
|
2473
|
-
selfsigned: "^2.0.0",
|
|
2474
|
-
stoppable: "^1.1.0",
|
|
2475
|
-
tar: "^6.1.12",
|
|
2476
|
-
"triple-beam": "^1.4.1",
|
|
2477
|
-
uuid: "^9.0.0",
|
|
2478
|
-
winston: "^3.2.1",
|
|
2479
|
-
"winston-transport": "^4.5.0",
|
|
2480
|
-
yauzl: "^3.0.0",
|
|
2481
|
-
yn: "^4.0.0",
|
|
2482
|
-
zod: "^3.22.4"
|
|
2483
|
-
};
|
|
2484
|
-
var devDependencies = {
|
|
2485
|
-
"@aws-sdk/util-stream-node": "^3.350.0",
|
|
2486
|
-
"@backstage/backend-plugin-api": "workspace:^",
|
|
2487
|
-
"@backstage/backend-test-utils": "workspace:^",
|
|
2488
|
-
"@backstage/cli": "workspace:^",
|
|
2489
|
-
"@types/archiver": "^6.0.0",
|
|
2490
|
-
"@types/base64-stream": "^1.0.2",
|
|
2491
|
-
"@types/concat-stream": "^2.0.0",
|
|
2492
|
-
"@types/http-errors": "^2.0.0",
|
|
2493
|
-
"@types/morgan": "^1.9.0",
|
|
2494
|
-
"@types/node-forge": "^1.3.0",
|
|
2495
|
-
"@types/pg-format": "^1.0.5",
|
|
2496
|
-
"@types/stoppable": "^1.1.0",
|
|
2497
|
-
"@types/yauzl": "^2.10.0",
|
|
2498
|
-
"aws-sdk-client-mock": "^4.0.0",
|
|
2499
|
-
"http-errors": "^2.0.0",
|
|
2500
|
-
msw: "^1.0.0",
|
|
2501
|
-
supertest: "^7.0.0",
|
|
2502
|
-
"wait-for-expect": "^3.0.2"
|
|
2503
|
-
};
|
|
2504
|
-
var configSchema = "config.d.ts";
|
|
2505
|
-
var packageinfo = {
|
|
2506
|
-
name: name,
|
|
2507
|
-
version: version,
|
|
2508
|
-
description: description,
|
|
2509
|
-
backstage: backstage,
|
|
2510
|
-
publishConfig: publishConfig,
|
|
2511
|
-
keywords: keywords,
|
|
2512
|
-
homepage: homepage,
|
|
2513
|
-
repository: repository,
|
|
2514
|
-
license: license,
|
|
2515
|
-
exports: exports$1,
|
|
2516
|
-
main: main,
|
|
2517
|
-
types: types,
|
|
2518
|
-
typesVersions: typesVersions,
|
|
2519
|
-
files: files,
|
|
2520
|
-
scripts: scripts,
|
|
2521
|
-
dependencies: dependencies,
|
|
2522
|
-
devDependencies: devDependencies,
|
|
2523
|
-
configSchema: configSchema
|
|
2524
|
-
};
|
|
2525
|
-
|
|
2526
|
-
const GOOGLE_GCS_HOST = "storage.cloud.google.com";
|
|
2527
|
-
const parseURL = (url) => {
|
|
2528
|
-
const { host, pathname } = new URL(url);
|
|
2529
|
-
if (host !== GOOGLE_GCS_HOST) {
|
|
2530
|
-
throw new Error(`not a valid GCS URL: ${url}`);
|
|
2531
|
-
}
|
|
2532
|
-
const [, bucket, ...key] = pathname.split("/");
|
|
2533
|
-
return {
|
|
2534
|
-
host,
|
|
2535
|
-
bucket,
|
|
2536
|
-
key: key.join("/")
|
|
2537
|
-
};
|
|
2538
|
-
};
|
|
2539
|
-
class GoogleGcsUrlReader {
|
|
2540
|
-
constructor(integration, storage) {
|
|
2541
|
-
this.integration = integration;
|
|
2542
|
-
this.storage = storage;
|
|
2543
|
-
}
|
|
2544
|
-
static factory = ({ config, logger }) => {
|
|
2545
|
-
if (!config.has("integrations.googleGcs")) {
|
|
2546
|
-
return [];
|
|
2547
|
-
}
|
|
2548
|
-
const gcsConfig = integration.readGoogleGcsIntegrationConfig(
|
|
2549
|
-
config.getConfig("integrations.googleGcs")
|
|
2550
|
-
);
|
|
2551
|
-
let storage;
|
|
2552
|
-
if (!gcsConfig.clientEmail || !gcsConfig.privateKey) {
|
|
2553
|
-
logger.info(
|
|
2554
|
-
"googleGcs credentials not found in config. Using default credentials provider."
|
|
2555
|
-
);
|
|
2556
|
-
storage = new GoogleCloud__namespace.Storage({
|
|
2557
|
-
userAgent: `backstage/backend-defaults.GoogleGcsUrlReader/${packageinfo.version}`
|
|
2558
|
-
});
|
|
2559
|
-
} else {
|
|
2560
|
-
storage = new GoogleCloud__namespace.Storage({
|
|
2561
|
-
credentials: {
|
|
2562
|
-
client_email: gcsConfig.clientEmail || void 0,
|
|
2563
|
-
private_key: gcsConfig.privateKey || void 0
|
|
2564
|
-
},
|
|
2565
|
-
userAgent: `backstage/backend-defaults.GoogleGcsUrlReader/${packageinfo.version}`
|
|
2566
|
-
});
|
|
2567
|
-
}
|
|
2568
|
-
const reader = new GoogleGcsUrlReader(gcsConfig, storage);
|
|
2569
|
-
const predicate = (url) => url.host === GOOGLE_GCS_HOST;
|
|
2570
|
-
return [{ reader, predicate }];
|
|
2571
|
-
};
|
|
2572
|
-
readStreamFromUrl(url) {
|
|
2573
|
-
const { bucket, key } = parseURL(url);
|
|
2574
|
-
return this.storage.bucket(bucket).file(key).createReadStream();
|
|
2575
|
-
}
|
|
2576
|
-
async read(url) {
|
|
2577
|
-
try {
|
|
2578
|
-
return await getRawBody__default.default(this.readStreamFromUrl(url));
|
|
2579
|
-
} catch (error) {
|
|
2580
|
-
throw new Error(`unable to read gcs file from ${url}, ${error}`);
|
|
2581
|
-
}
|
|
2582
|
-
}
|
|
2583
|
-
async readUrl(url, _options) {
|
|
2584
|
-
const stream = this.readStreamFromUrl(url);
|
|
2585
|
-
return ReadUrlResponseFactory.fromReadable(stream);
|
|
2586
|
-
}
|
|
2587
|
-
async readTree() {
|
|
2588
|
-
throw new Error("GcsUrlReader does not implement readTree");
|
|
2589
|
-
}
|
|
2590
|
-
async search(url) {
|
|
2591
|
-
const { bucket, key: pattern } = parseURL(url);
|
|
2592
|
-
if (!pattern.endsWith("*") || pattern.indexOf("*") !== pattern.length - 1) {
|
|
2593
|
-
throw new Error("GcsUrlReader only supports prefix-based searches");
|
|
2594
|
-
}
|
|
2595
|
-
const [files] = await this.storage.bucket(bucket).getFiles({
|
|
2596
|
-
autoPaginate: true,
|
|
2597
|
-
prefix: pattern.split("*").join("")
|
|
2598
|
-
});
|
|
2599
|
-
return {
|
|
2600
|
-
files: files.map((file) => {
|
|
2601
|
-
const fullUrl = ["https:/", GOOGLE_GCS_HOST, bucket, file.name].join(
|
|
2602
|
-
"/"
|
|
2603
|
-
);
|
|
2604
|
-
return {
|
|
2605
|
-
url: fullUrl,
|
|
2606
|
-
content: async () => {
|
|
2607
|
-
const readResponse = await this.readUrl(fullUrl);
|
|
2608
|
-
return readResponse.buffer();
|
|
2609
|
-
}
|
|
2610
|
-
};
|
|
2611
|
-
}),
|
|
2612
|
-
// TODO etag is not implemented yet.
|
|
2613
|
-
etag: "NOT/IMPLEMENTED"
|
|
2614
|
-
};
|
|
2615
|
-
}
|
|
2616
|
-
toString() {
|
|
2617
|
-
const key = this.integration.privateKey;
|
|
2618
|
-
return `googleGcs{host=${GOOGLE_GCS_HOST},authed=${Boolean(key)}}`;
|
|
2619
|
-
}
|
|
2620
|
-
}
|
|
2621
|
-
|
|
2622
|
-
function parseUrl(url, requireGitPath = false) {
|
|
2623
|
-
const parsedUrl = new URL(url);
|
|
2624
|
-
if (parsedUrl.pathname.includes("/files/edit/")) {
|
|
2625
|
-
throw new Error(
|
|
2626
|
-
"Please provide the view url to yaml file from CodeCommit, not the edit url"
|
|
2627
|
-
);
|
|
2628
|
-
}
|
|
2629
|
-
if (requireGitPath && !parsedUrl.pathname.includes("/browse/")) {
|
|
2630
|
-
throw new Error("Please provide full path to yaml file from CodeCommit");
|
|
2631
|
-
}
|
|
2632
|
-
const hostMatch = parsedUrl.host.match(
|
|
2633
|
-
/^([^\.]+)\.console\.aws\.amazon\.com$/
|
|
2634
|
-
);
|
|
2635
|
-
if (!hostMatch) {
|
|
2636
|
-
throw new Error(
|
|
2637
|
-
`Invalid AWS CodeCommit URL (unexpected host format): ${url}`
|
|
2638
|
-
);
|
|
2639
|
-
}
|
|
2640
|
-
const [, region] = hostMatch;
|
|
2641
|
-
const pathMatch = parsedUrl.pathname.match(
|
|
2642
|
-
/^\/codesuite\/codecommit\/repositories\/([^\/]+)\/browse\/((.*)\/)?--\/(.*)$/
|
|
2643
|
-
);
|
|
2644
|
-
if (!pathMatch) {
|
|
2645
|
-
if (!requireGitPath) {
|
|
2646
|
-
const pathname = parsedUrl.pathname.split("/--/")[0].replace("/codesuite/codecommit/repositories/", "");
|
|
2647
|
-
const [repositoryName2, commitSpecifier2] = pathname.split("/browse");
|
|
2648
|
-
return {
|
|
2649
|
-
region,
|
|
2650
|
-
repositoryName: repositoryName2.replace(/^\/|\/$/g, ""),
|
|
2651
|
-
path: "/",
|
|
2652
|
-
commitSpecifier: commitSpecifier2 === "" ? void 0 : commitSpecifier2?.replace(/^\/|\/$/g, "")
|
|
2653
|
-
};
|
|
2654
|
-
}
|
|
2655
|
-
throw new Error(
|
|
2656
|
-
`Invalid AWS CodeCommit URL (unexpected path format): ${url}`
|
|
2657
|
-
);
|
|
2658
|
-
}
|
|
2659
|
-
const [, repositoryName, , commitSpecifier, path] = pathMatch;
|
|
2660
|
-
return {
|
|
2661
|
-
region,
|
|
2662
|
-
repositoryName,
|
|
2663
|
-
path,
|
|
2664
|
-
// the commitSpecifier is passed to AWS SDK which does not allow empty strings so replace empty string with undefined
|
|
2665
|
-
commitSpecifier: commitSpecifier === "" ? void 0 : commitSpecifier
|
|
2666
|
-
};
|
|
2667
|
-
}
|
|
2668
|
-
class AwsCodeCommitUrlReader {
|
|
2669
|
-
constructor(credsManager, integration, deps) {
|
|
2670
|
-
this.credsManager = credsManager;
|
|
2671
|
-
this.integration = integration;
|
|
2672
|
-
this.deps = deps;
|
|
2673
|
-
}
|
|
2674
|
-
static factory = ({ config, treeResponseFactory }) => {
|
|
2675
|
-
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
2676
|
-
const credsManager = integrationAwsNode.DefaultAwsCredentialsManager.fromConfig(config);
|
|
2677
|
-
return integrations.awsCodeCommit.list().map((integration) => {
|
|
2678
|
-
const reader = new AwsCodeCommitUrlReader(credsManager, integration, {
|
|
2679
|
-
treeResponseFactory
|
|
2680
|
-
});
|
|
2681
|
-
const predicate = (url) => {
|
|
2682
|
-
return url.host.endsWith(integration.config.host) && url.pathname.startsWith("/codesuite/codecommit");
|
|
2683
|
-
};
|
|
2684
|
-
return { reader, predicate };
|
|
2685
|
-
});
|
|
2686
|
-
};
|
|
2687
|
-
/**
|
|
2688
|
-
* If accessKeyId and secretAccessKey are missing, the standard credentials provider chain will be used:
|
|
2689
|
-
* https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html
|
|
2690
|
-
*/
|
|
2691
|
-
static buildStaticCredentials(accessKeyId, secretAccessKey) {
|
|
2692
|
-
return async () => {
|
|
2693
|
-
return {
|
|
2694
|
-
accessKeyId,
|
|
2695
|
-
secretAccessKey
|
|
2696
|
-
};
|
|
2697
|
-
};
|
|
2698
|
-
}
|
|
2699
|
-
static async buildCredentials(credsManager, region, integration) {
|
|
2700
|
-
if (!integration) {
|
|
2701
|
-
return (await credsManager.getCredentialProvider()).sdkCredentialProvider;
|
|
2702
|
-
}
|
|
2703
|
-
const accessKeyId = integration.config.accessKeyId;
|
|
2704
|
-
const secretAccessKey = integration.config.secretAccessKey;
|
|
2705
|
-
let explicitCredentials;
|
|
2706
|
-
if (accessKeyId && secretAccessKey) {
|
|
2707
|
-
explicitCredentials = AwsCodeCommitUrlReader.buildStaticCredentials(
|
|
2708
|
-
accessKeyId,
|
|
2709
|
-
secretAccessKey
|
|
2710
|
-
);
|
|
2711
|
-
} else {
|
|
2712
|
-
explicitCredentials = (await credsManager.getCredentialProvider()).sdkCredentialProvider;
|
|
2713
|
-
}
|
|
2714
|
-
const roleArn = integration.config.roleArn;
|
|
2715
|
-
if (roleArn) {
|
|
2716
|
-
return credentialProviders.fromTemporaryCredentials({
|
|
2717
|
-
masterCredentials: explicitCredentials,
|
|
2718
|
-
params: {
|
|
2719
|
-
RoleSessionName: "backstage-aws-code-commit-url-reader",
|
|
2720
|
-
RoleArn: roleArn,
|
|
2721
|
-
ExternalId: integration.config.externalId
|
|
2722
|
-
},
|
|
2723
|
-
clientConfig: { region }
|
|
2724
|
-
});
|
|
2725
|
-
}
|
|
2726
|
-
return explicitCredentials;
|
|
2727
|
-
}
|
|
2728
|
-
async buildCodeCommitClient(credsManager, region, integration) {
|
|
2729
|
-
const credentials = await AwsCodeCommitUrlReader.buildCredentials(
|
|
2730
|
-
credsManager,
|
|
2731
|
-
region,
|
|
2732
|
-
integration
|
|
2733
|
-
);
|
|
2734
|
-
const codeCommit = new clientCodecommit.CodeCommitClient({
|
|
2735
|
-
customUserAgent: "backstage-aws-codecommit-url-reader",
|
|
2736
|
-
region,
|
|
2737
|
-
credentials
|
|
2738
|
-
});
|
|
2739
|
-
return codeCommit;
|
|
2740
|
-
}
|
|
2741
|
-
async readUrl(url, options) {
|
|
2742
|
-
try {
|
|
2743
|
-
const { path, repositoryName, region, commitSpecifier } = parseUrl(
|
|
2744
|
-
url,
|
|
2745
|
-
true
|
|
2746
|
-
);
|
|
2747
|
-
const codeCommitClient = await this.buildCodeCommitClient(
|
|
2748
|
-
this.credsManager,
|
|
2749
|
-
region,
|
|
2750
|
-
this.integration
|
|
2751
|
-
);
|
|
2752
|
-
const abortController$1 = new abortController.AbortController();
|
|
2753
|
-
const input = {
|
|
2754
|
-
repositoryName,
|
|
2755
|
-
commitSpecifier,
|
|
2756
|
-
filePath: path
|
|
2757
|
-
};
|
|
2758
|
-
options?.signal?.addEventListener("abort", () => abortController$1.abort());
|
|
2759
|
-
const getObjectCommand = new clientCodecommit.GetFileCommand(input);
|
|
2760
|
-
const response = await codeCommitClient.send(
|
|
2761
|
-
getObjectCommand,
|
|
2762
|
-
{
|
|
2763
|
-
abortSignal: abortController$1.signal
|
|
2764
|
-
}
|
|
2765
|
-
);
|
|
2766
|
-
if (options?.etag && options.etag === response.commitId) {
|
|
2767
|
-
throw new errors.NotModifiedError();
|
|
2768
|
-
}
|
|
2769
|
-
return ReadUrlResponseFactory.fromReadable(
|
|
2770
|
-
stream.Readable.from([response?.fileContent]),
|
|
2771
|
-
{
|
|
2772
|
-
etag: response.commitId
|
|
2773
|
-
}
|
|
2774
|
-
);
|
|
2775
|
-
} catch (e) {
|
|
2776
|
-
if (e.$metadata && e.$metadata.httpStatusCode === 304) {
|
|
2777
|
-
throw new errors.NotModifiedError();
|
|
2778
|
-
}
|
|
2779
|
-
if (e.name && e.name === "NotModifiedError") {
|
|
2780
|
-
throw new errors.NotModifiedError();
|
|
2781
|
-
}
|
|
2782
|
-
throw new errors.ForwardedError("Could not retrieve file from CodeCommit", e);
|
|
2783
|
-
}
|
|
2784
|
-
}
|
|
2785
|
-
async readTreePath(codeCommitClient, abortSignal, path, repositoryName, commitSpecifier, etag) {
|
|
2786
|
-
const getFolderCommand = new clientCodecommit.GetFolderCommand({
|
|
2787
|
-
folderPath: path,
|
|
2788
|
-
repositoryName,
|
|
2789
|
-
commitSpecifier
|
|
2790
|
-
});
|
|
2791
|
-
const response = await codeCommitClient.send(getFolderCommand, {
|
|
2792
|
-
abortSignal
|
|
2793
|
-
});
|
|
2794
|
-
if (etag && etag === response.commitId) {
|
|
2795
|
-
throw new errors.NotModifiedError();
|
|
2796
|
-
}
|
|
2797
|
-
const output = [];
|
|
2798
|
-
if (response.files) {
|
|
2799
|
-
response.files.forEach((file) => {
|
|
2800
|
-
if (file.absolutePath) {
|
|
2801
|
-
output.push(file.absolutePath);
|
|
2802
|
-
}
|
|
2803
|
-
});
|
|
2804
|
-
}
|
|
2805
|
-
if (!response.subFolders) {
|
|
2806
|
-
return output;
|
|
2807
|
-
}
|
|
2808
|
-
for (const subFolder of response.subFolders) {
|
|
2809
|
-
if (subFolder.absolutePath) {
|
|
2810
|
-
output.push(
|
|
2811
|
-
...await this.readTreePath(
|
|
2812
|
-
codeCommitClient,
|
|
2813
|
-
abortSignal,
|
|
2814
|
-
subFolder.absolutePath,
|
|
2815
|
-
repositoryName,
|
|
2816
|
-
commitSpecifier,
|
|
2817
|
-
etag
|
|
2818
|
-
)
|
|
2819
|
-
);
|
|
2820
|
-
}
|
|
2821
|
-
}
|
|
2822
|
-
return output;
|
|
2823
|
-
}
|
|
2824
|
-
async readTree(url, options) {
|
|
2825
|
-
try {
|
|
2826
|
-
const { path, repositoryName, region, commitSpecifier } = parseUrl(url);
|
|
2827
|
-
const codeCommitClient = await this.buildCodeCommitClient(
|
|
2828
|
-
this.credsManager,
|
|
2829
|
-
region,
|
|
2830
|
-
this.integration
|
|
2831
|
-
);
|
|
2832
|
-
const abortController$1 = new abortController.AbortController();
|
|
2833
|
-
options?.signal?.addEventListener("abort", () => abortController$1.abort());
|
|
2834
|
-
const allFiles = await this.readTreePath(
|
|
2835
|
-
codeCommitClient,
|
|
2836
|
-
abortController$1.signal,
|
|
2837
|
-
path,
|
|
2838
|
-
repositoryName,
|
|
2839
|
-
commitSpecifier,
|
|
2840
|
-
options?.etag
|
|
2841
|
-
);
|
|
2842
|
-
const responses = [];
|
|
2843
|
-
for (let i = 0; i < allFiles.length; i++) {
|
|
2844
|
-
const getFileCommand = new clientCodecommit.GetFileCommand({
|
|
2845
|
-
repositoryName,
|
|
2846
|
-
filePath: String(allFiles[i]),
|
|
2847
|
-
commitSpecifier
|
|
2848
|
-
});
|
|
2849
|
-
const response = await codeCommitClient.send(getFileCommand);
|
|
2850
|
-
const objectData = await stream.Readable.from([response?.fileContent]);
|
|
2851
|
-
responses.push({
|
|
2852
|
-
data: objectData,
|
|
2853
|
-
path: posix.relative(
|
|
2854
|
-
path.startsWith("/") ? path : `/${path}`,
|
|
2855
|
-
allFiles[i].startsWith("/") ? allFiles[i] : `/${allFiles[i]}`
|
|
2856
|
-
)
|
|
2857
|
-
});
|
|
2858
|
-
}
|
|
2859
|
-
return await this.deps.treeResponseFactory.fromReadableArray(responses);
|
|
2860
|
-
} catch (e) {
|
|
2861
|
-
if (e.name && e.name === "NotModifiedError") {
|
|
2862
|
-
throw new errors.NotModifiedError();
|
|
2863
|
-
}
|
|
2864
|
-
throw new errors.ForwardedError(
|
|
2865
|
-
"Could not retrieve file tree from CodeCommit",
|
|
2866
|
-
e
|
|
2867
|
-
);
|
|
2868
|
-
}
|
|
2869
|
-
}
|
|
2870
|
-
async search() {
|
|
2871
|
-
throw new Error("AwsCodeCommitReader does not implement search");
|
|
2872
|
-
}
|
|
2873
|
-
toString() {
|
|
2874
|
-
const secretAccessKey = this.integration.config.secretAccessKey;
|
|
2875
|
-
return `awsCodeCommit{host=${this.integration.config.host},authed=${Boolean(
|
|
2876
|
-
secretAccessKey
|
|
2877
|
-
)}}`;
|
|
2878
|
-
}
|
|
2879
|
-
}
|
|
2880
|
-
|
|
2881
|
-
class UrlReaders {
|
|
2882
|
-
/**
|
|
2883
|
-
* Creates a custom {@link @backstage/backend-plugin-api#UrlReaderService} wrapper for your own set of factories.
|
|
2884
|
-
*/
|
|
2885
|
-
static create(options) {
|
|
2886
|
-
const { logger, config, factories } = options;
|
|
2887
|
-
const mux = new UrlReaderPredicateMux();
|
|
2888
|
-
const treeResponseFactory = DefaultReadTreeResponseFactory.create({
|
|
2889
|
-
config
|
|
2890
|
-
});
|
|
2891
|
-
for (const factory of factories ?? []) {
|
|
2892
|
-
const tuples = factory({ config, logger, treeResponseFactory });
|
|
2893
|
-
for (const tuple of tuples) {
|
|
2894
|
-
mux.register(tuple);
|
|
2895
|
-
}
|
|
2896
|
-
}
|
|
2897
|
-
return mux;
|
|
2898
|
-
}
|
|
2899
|
-
/**
|
|
2900
|
-
* Creates a {@link @backstage/backend-plugin-api#UrlReaderService} wrapper that includes all the default factories
|
|
2901
|
-
* from this package.
|
|
2902
|
-
*
|
|
2903
|
-
* Any additional factories passed will be loaded before the default ones.
|
|
2904
|
-
*/
|
|
2905
|
-
static default(options) {
|
|
2906
|
-
const { logger, config, factories = [] } = options;
|
|
2907
|
-
return UrlReaders.create({
|
|
2908
|
-
logger,
|
|
2909
|
-
config,
|
|
2910
|
-
factories: factories.concat([
|
|
2911
|
-
AzureUrlReader.factory,
|
|
2912
|
-
BitbucketCloudUrlReader.factory,
|
|
2913
|
-
BitbucketServerUrlReader.factory,
|
|
2914
|
-
BitbucketUrlReader.factory,
|
|
2915
|
-
GerritUrlReader.factory,
|
|
2916
|
-
GithubUrlReader.factory,
|
|
2917
|
-
GiteaUrlReader.factory,
|
|
2918
|
-
GitlabUrlReader.factory,
|
|
2919
|
-
GoogleGcsUrlReader.factory,
|
|
2920
|
-
HarnessUrlReader.factory,
|
|
2921
|
-
AwsS3UrlReader.factory,
|
|
2922
|
-
AwsCodeCommitUrlReader.factory,
|
|
2923
|
-
FetchUrlReader.factory
|
|
2924
|
-
])
|
|
2925
|
-
});
|
|
2926
|
-
}
|
|
2927
|
-
}
|
|
2928
|
-
|
|
2929
|
-
const urlReaderFactoriesServiceRef = backendPluginApi.createServiceRef({
|
|
2930
|
-
id: "core.urlReader.factories",
|
|
2931
|
-
scope: "plugin",
|
|
2932
|
-
multiton: true
|
|
2933
|
-
});
|
|
2934
|
-
const urlReaderServiceFactory = backendPluginApi.createServiceFactory({
|
|
2935
|
-
service: backendPluginApi.coreServices.urlReader,
|
|
2936
|
-
deps: {
|
|
2937
|
-
config: backendPluginApi.coreServices.rootConfig,
|
|
2938
|
-
logger: backendPluginApi.coreServices.logger,
|
|
2939
|
-
factories: urlReaderFactoriesServiceRef
|
|
2940
|
-
},
|
|
2941
|
-
async factory({ config, logger, factories }) {
|
|
2942
|
-
return UrlReaders.default({
|
|
2943
|
-
config,
|
|
2944
|
-
logger,
|
|
2945
|
-
factories
|
|
2946
|
-
});
|
|
2947
|
-
}
|
|
2948
|
-
});
|
|
2949
|
-
|
|
2950
|
-
exports.AwsS3UrlReader = AwsS3UrlReader;
|
|
2951
|
-
exports.AzureUrlReader = AzureUrlReader;
|
|
2952
|
-
exports.BitbucketCloudUrlReader = BitbucketCloudUrlReader;
|
|
2953
|
-
exports.BitbucketServerUrlReader = BitbucketServerUrlReader;
|
|
2954
|
-
exports.BitbucketUrlReader = BitbucketUrlReader;
|
|
2955
|
-
exports.FetchUrlReader = FetchUrlReader;
|
|
2956
|
-
exports.GerritUrlReader = GerritUrlReader;
|
|
2957
|
-
exports.GiteaUrlReader = GiteaUrlReader;
|
|
2958
|
-
exports.GithubUrlReader = GithubUrlReader;
|
|
2959
|
-
exports.GitlabUrlReader = GitlabUrlReader;
|
|
2960
|
-
exports.HarnessUrlReader = HarnessUrlReader;
|
|
2961
|
-
exports.ReadUrlResponseFactory = ReadUrlResponseFactory;
|
|
2962
|
-
exports.UrlReaders = UrlReaders;
|
|
2963
|
-
exports.urlReaderFactoriesServiceRef = urlReaderFactoriesServiceRef;
|
|
2964
|
-
exports.urlReaderServiceFactory = urlReaderServiceFactory;
|
|
3
|
+
var AzureUrlReader = require('./entrypoints/urlReader/lib/AzureUrlReader.cjs.js');
|
|
4
|
+
var BitbucketCloudUrlReader = require('./entrypoints/urlReader/lib/BitbucketCloudUrlReader.cjs.js');
|
|
5
|
+
var BitbucketUrlReader = require('./entrypoints/urlReader/lib/BitbucketUrlReader.cjs.js');
|
|
6
|
+
var BitbucketServerUrlReader = require('./entrypoints/urlReader/lib/BitbucketServerUrlReader.cjs.js');
|
|
7
|
+
var GerritUrlReader = require('./entrypoints/urlReader/lib/GerritUrlReader.cjs.js');
|
|
8
|
+
var GithubUrlReader = require('./entrypoints/urlReader/lib/GithubUrlReader.cjs.js');
|
|
9
|
+
var GitlabUrlReader = require('./entrypoints/urlReader/lib/GitlabUrlReader.cjs.js');
|
|
10
|
+
var GiteaUrlReader = require('./entrypoints/urlReader/lib/GiteaUrlReader.cjs.js');
|
|
11
|
+
var HarnessUrlReader = require('./entrypoints/urlReader/lib/HarnessUrlReader.cjs.js');
|
|
12
|
+
var AwsS3UrlReader = require('./entrypoints/urlReader/lib/AwsS3UrlReader.cjs.js');
|
|
13
|
+
var FetchUrlReader = require('./entrypoints/urlReader/lib/FetchUrlReader.cjs.js');
|
|
14
|
+
var ReadUrlResponseFactory = require('./entrypoints/urlReader/lib/ReadUrlResponseFactory.cjs.js');
|
|
15
|
+
var UrlReaders = require('./entrypoints/urlReader/lib/UrlReaders.cjs.js');
|
|
16
|
+
var urlReaderServiceFactory = require('./entrypoints/urlReader/urlReaderServiceFactory.cjs.js');
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
exports.AzureUrlReader = AzureUrlReader.AzureUrlReader;
|
|
21
|
+
exports.BitbucketCloudUrlReader = BitbucketCloudUrlReader.BitbucketCloudUrlReader;
|
|
22
|
+
exports.BitbucketUrlReader = BitbucketUrlReader.BitbucketUrlReader;
|
|
23
|
+
exports.BitbucketServerUrlReader = BitbucketServerUrlReader.BitbucketServerUrlReader;
|
|
24
|
+
exports.GerritUrlReader = GerritUrlReader.GerritUrlReader;
|
|
25
|
+
exports.GithubUrlReader = GithubUrlReader.GithubUrlReader;
|
|
26
|
+
exports.GitlabUrlReader = GitlabUrlReader.GitlabUrlReader;
|
|
27
|
+
exports.GiteaUrlReader = GiteaUrlReader.GiteaUrlReader;
|
|
28
|
+
exports.HarnessUrlReader = HarnessUrlReader.HarnessUrlReader;
|
|
29
|
+
exports.AwsS3UrlReader = AwsS3UrlReader.AwsS3UrlReader;
|
|
30
|
+
exports.FetchUrlReader = FetchUrlReader.FetchUrlReader;
|
|
31
|
+
exports.ReadUrlResponseFactory = ReadUrlResponseFactory.ReadUrlResponseFactory;
|
|
32
|
+
exports.UrlReaders = UrlReaders.UrlReaders;
|
|
33
|
+
exports.urlReaderFactoriesServiceRef = urlReaderServiceFactory.urlReaderFactoriesServiceRef;
|
|
34
|
+
exports.urlReaderServiceFactory = urlReaderServiceFactory.urlReaderServiceFactory;
|
|
2965
35
|
//# sourceMappingURL=urlReader.cjs.js.map
|