@backstage/backend-defaults 0.5.1-next.1 → 0.5.1-next.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +25 -0
- package/auth/package.json +1 -1
- package/cache/package.json +1 -1
- package/database/package.json +1 -1
- package/discovery/package.json +1 -1
- package/dist/CreateBackend.cjs.js +49 -0
- package/dist/CreateBackend.cjs.js.map +1 -0
- package/dist/PackageDiscoveryService.cjs.js +109 -0
- package/dist/PackageDiscoveryService.cjs.js.map +1 -0
- package/dist/auth.cjs.js +2 -996
- package/dist/auth.cjs.js.map +1 -1
- package/dist/cache.cjs.js +4 -204
- package/dist/cache.cjs.js.map +1 -1
- package/dist/database.cjs.js +4 -957
- package/dist/database.cjs.js.map +1 -1
- package/dist/database.d.ts +4 -1
- package/dist/discovery.cjs.js +4 -92
- package/dist/discovery.cjs.js.map +1 -1
- package/dist/discoveryFeatureLoader.cjs.js +19 -0
- package/dist/discoveryFeatureLoader.cjs.js.map +1 -0
- package/dist/entrypoints/auth/DefaultAuthService.cjs.js +130 -0
- package/dist/entrypoints/auth/DefaultAuthService.cjs.js.map +1 -0
- package/dist/entrypoints/auth/JwksClient.cjs.js +49 -0
- package/dist/entrypoints/auth/JwksClient.cjs.js.map +1 -0
- package/dist/entrypoints/auth/authServiceFactory.cjs.js +57 -0
- package/dist/entrypoints/auth/authServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/ExternalTokenHandler.cjs.js +78 -0
- package/dist/entrypoints/auth/external/ExternalTokenHandler.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/helpers.cjs.js +92 -0
- package/dist/entrypoints/auth/external/helpers.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/jwks.cjs.js +63 -0
- package/dist/entrypoints/auth/external/jwks.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/legacy.cjs.js +73 -0
- package/dist/entrypoints/auth/external/legacy.cjs.js.map +1 -0
- package/dist/entrypoints/auth/external/static.cjs.js +33 -0
- package/dist/entrypoints/auth/external/static.cjs.js.map +1 -0
- package/dist/{cjs/helpers-D2f1CG0o.cjs.js → entrypoints/auth/helpers.cjs.js} +1 -1
- package/dist/entrypoints/auth/helpers.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/PluginTokenHandler.cjs.js +147 -0
- package/dist/entrypoints/auth/plugin/PluginTokenHandler.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/keys/DatabaseKeyStore.cjs.js +73 -0
- package/dist/entrypoints/auth/plugin/keys/DatabaseKeyStore.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/keys/DatabasePluginKeySource.cjs.js +75 -0
- package/dist/entrypoints/auth/plugin/keys/DatabasePluginKeySource.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/keys/StaticConfigPluginKeySource.cjs.js +91 -0
- package/dist/entrypoints/auth/plugin/keys/StaticConfigPluginKeySource.cjs.js.map +1 -0
- package/dist/entrypoints/auth/plugin/keys/createPluginKeySource.cjs.js +29 -0
- package/dist/entrypoints/auth/plugin/keys/createPluginKeySource.cjs.js.map +1 -0
- package/dist/entrypoints/auth/user/UserTokenHandler.cjs.js +110 -0
- package/dist/entrypoints/auth/user/UserTokenHandler.cjs.js.map +1 -0
- package/dist/entrypoints/cache/CacheClient.cjs.js +50 -0
- package/dist/entrypoints/cache/CacheClient.cjs.js.map +1 -0
- package/dist/entrypoints/cache/CacheManager.cjs.js +147 -0
- package/dist/entrypoints/cache/CacheManager.cjs.js.map +1 -0
- package/dist/entrypoints/cache/cacheServiceFactory.cjs.js +22 -0
- package/dist/entrypoints/cache/cacheServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/cache/types.cjs.js +10 -0
- package/dist/entrypoints/cache/types.cjs.js.map +1 -0
- package/dist/entrypoints/database/DatabaseManager.cjs.js +173 -0
- package/dist/entrypoints/database/DatabaseManager.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/defaultNameOverride.cjs.js +14 -0
- package/dist/entrypoints/database/connectors/defaultNameOverride.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/defaultSchemaOverride.cjs.js +12 -0
- package/dist/entrypoints/database/connectors/defaultSchemaOverride.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/mergeDatabaseConfig.cjs.js +10 -0
- package/dist/entrypoints/database/connectors/mergeDatabaseConfig.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/mysql.cjs.js +278 -0
- package/dist/entrypoints/database/connectors/mysql.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/postgres.cjs.js +304 -0
- package/dist/entrypoints/database/connectors/postgres.cjs.js.map +1 -0
- package/dist/entrypoints/database/connectors/sqlite3.cjs.js +251 -0
- package/dist/entrypoints/database/connectors/sqlite3.cjs.js.map +1 -0
- package/dist/entrypoints/database/databaseServiceFactory.cjs.js +36 -0
- package/dist/entrypoints/database/databaseServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/discovery/HostDiscovery.cjs.js +86 -0
- package/dist/entrypoints/discovery/HostDiscovery.cjs.js.map +1 -0
- package/dist/entrypoints/discovery/discoveryServiceFactory.cjs.js +17 -0
- package/dist/entrypoints/discovery/discoveryServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/httpAuth/httpAuthServiceFactory.cjs.js +192 -0
- package/dist/entrypoints/httpAuth/httpAuthServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/createAuthIntegrationRouter.cjs.js +19 -0
- package/dist/entrypoints/httpRouter/createAuthIntegrationRouter.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/createCookieAuthRefreshMiddleware.cjs.js +26 -0
- package/dist/entrypoints/httpRouter/createCookieAuthRefreshMiddleware.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/createCredentialsBarrier.cjs.js +63 -0
- package/dist/entrypoints/httpRouter/createCredentialsBarrier.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/createLifecycleMiddleware.cjs.js +52 -0
- package/dist/entrypoints/httpRouter/createLifecycleMiddleware.cjs.js.map +1 -0
- package/dist/entrypoints/httpRouter/httpRouterServiceFactory.cjs.js +48 -0
- package/dist/entrypoints/httpRouter/httpRouterServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/lifecycle/lifecycleServiceFactory.cjs.js +88 -0
- package/dist/entrypoints/lifecycle/lifecycleServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/logger/loggerServiceFactory.cjs.js +17 -0
- package/dist/entrypoints/logger/loggerServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/permissions/permissionsServiceFactory.cjs.js +22 -0
- package/dist/entrypoints/permissions/permissionsServiceFactory.cjs.js.map +1 -0
- package/dist/{cjs/createConfigSecretEnumerator-DShyoWWL.cjs.js → entrypoints/rootConfig/createConfigSecretEnumerator.cjs.js} +1 -1
- package/dist/entrypoints/rootConfig/createConfigSecretEnumerator.cjs.js.map +1 -0
- package/dist/entrypoints/rootConfig/rootConfigServiceFactory.cjs.js +26 -0
- package/dist/entrypoints/rootConfig/rootConfigServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootHealth/rootHealthServiceFactory.cjs.js +41 -0
- package/dist/entrypoints/rootHealth/rootHealthServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/DefaultRootHttpRouter.cjs.js +77 -0
- package/dist/entrypoints/rootHttpRouter/DefaultRootHttpRouter.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/createHealthRouter.cjs.js +29 -0
- package/dist/entrypoints/rootHttpRouter/createHealthRouter.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/MiddlewareFactory.cjs.js +187 -0
- package/dist/entrypoints/rootHttpRouter/http/MiddlewareFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/applyInternalErrorFilter.cjs.js +28 -0
- package/dist/entrypoints/rootHttpRouter/http/applyInternalErrorFilter.cjs.js.map +1 -0
- package/dist/{cjs/config-BDOwXIyo.cjs.js → entrypoints/rootHttpRouter/http/config.cjs.js} +1 -1
- package/dist/entrypoints/rootHttpRouter/http/config.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/createHttpServer.cjs.js +88 -0
- package/dist/entrypoints/rootHttpRouter/http/createHttpServer.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/getGeneratedCertificate.cjs.js +130 -0
- package/dist/entrypoints/rootHttpRouter/http/getGeneratedCertificate.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/readCorsOptions.cjs.js +51 -0
- package/dist/entrypoints/rootHttpRouter/http/readCorsOptions.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/http/readHelmetOptions.cjs.js +62 -0
- package/dist/entrypoints/rootHttpRouter/http/readHelmetOptions.cjs.js.map +1 -0
- package/dist/entrypoints/rootHttpRouter/rootHttpRouterServiceFactory.cjs.js +73 -0
- package/dist/entrypoints/rootHttpRouter/rootHttpRouterServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootLifecycle/rootLifecycleServiceFactory.cjs.js +76 -0
- package/dist/entrypoints/rootLifecycle/rootLifecycleServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/rootLogger/WinstonLogger.cjs.js +114 -0
- package/dist/entrypoints/rootLogger/WinstonLogger.cjs.js.map +1 -0
- package/dist/entrypoints/rootLogger/rootLoggerServiceFactory.cjs.js +30 -0
- package/dist/entrypoints/rootLogger/rootLoggerServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/database/migrateBackendTasks.cjs.js +18 -0
- package/dist/entrypoints/scheduler/database/migrateBackendTasks.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/database/tables.cjs.js +8 -0
- package/dist/entrypoints/scheduler/database/tables.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/DefaultSchedulerService.cjs.js +37 -0
- package/dist/entrypoints/scheduler/lib/DefaultSchedulerService.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/LocalTaskWorker.cjs.js +105 -0
- package/dist/entrypoints/scheduler/lib/LocalTaskWorker.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerImpl.cjs.js +138 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerImpl.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerJanitor.cjs.js +59 -0
- package/dist/entrypoints/scheduler/lib/PluginTaskSchedulerJanitor.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/TaskWorker.cjs.js +275 -0
- package/dist/entrypoints/scheduler/lib/TaskWorker.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/types.cjs.js +60 -0
- package/dist/entrypoints/scheduler/lib/types.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/lib/util.cjs.js +66 -0
- package/dist/entrypoints/scheduler/lib/util.cjs.js.map +1 -0
- package/dist/entrypoints/scheduler/schedulerServiceFactory.cjs.js +19 -0
- package/dist/entrypoints/scheduler/schedulerServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/AwsCodeCommitUrlReader.cjs.js +274 -0
- package/dist/entrypoints/urlReader/lib/AwsCodeCommitUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/AwsS3UrlReader.cjs.js +261 -0
- package/dist/entrypoints/urlReader/lib/AwsS3UrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/AzureUrlReader.cjs.js +148 -0
- package/dist/entrypoints/urlReader/lib/AzureUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/BitbucketCloudUrlReader.cjs.js +174 -0
- package/dist/entrypoints/urlReader/lib/BitbucketCloudUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/BitbucketServerUrlReader.cjs.js +170 -0
- package/dist/entrypoints/urlReader/lib/BitbucketServerUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/BitbucketUrlReader.cjs.js +182 -0
- package/dist/entrypoints/urlReader/lib/BitbucketUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/FetchUrlReader.cjs.js +132 -0
- package/dist/entrypoints/urlReader/lib/FetchUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GerritUrlReader.cjs.js +147 -0
- package/dist/entrypoints/urlReader/lib/GerritUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GiteaUrlReader.cjs.js +122 -0
- package/dist/entrypoints/urlReader/lib/GiteaUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GithubUrlReader.cjs.js +226 -0
- package/dist/entrypoints/urlReader/lib/GithubUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GitlabUrlReader.cjs.js +277 -0
- package/dist/entrypoints/urlReader/lib/GitlabUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/GoogleGcsUrlReader.cjs.js +129 -0
- package/dist/entrypoints/urlReader/lib/GoogleGcsUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/HarnessUrlReader.cjs.js +120 -0
- package/dist/entrypoints/urlReader/lib/HarnessUrlReader.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/ReadUrlResponseFactory.cjs.js +49 -0
- package/dist/entrypoints/urlReader/lib/ReadUrlResponseFactory.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/UrlReaderPredicateMux.cjs.js +46 -0
- package/dist/entrypoints/urlReader/lib/UrlReaderPredicateMux.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/UrlReaders.cjs.js +68 -0
- package/dist/entrypoints/urlReader/lib/UrlReaders.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/ReadTreeResponseFactory.cjs.js +46 -0
- package/dist/entrypoints/urlReader/lib/tree/ReadTreeResponseFactory.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/ReadableArrayResponse.cjs.js +78 -0
- package/dist/entrypoints/urlReader/lib/tree/ReadableArrayResponse.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/TarArchiveResponse.cjs.js +147 -0
- package/dist/entrypoints/urlReader/lib/tree/TarArchiveResponse.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/ZipArchiveResponse.cjs.js +161 -0
- package/dist/entrypoints/urlReader/lib/tree/ZipArchiveResponse.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/tree/util.cjs.js +28 -0
- package/dist/entrypoints/urlReader/lib/tree/util.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/lib/util.cjs.js +11 -0
- package/dist/entrypoints/urlReader/lib/util.cjs.js.map +1 -0
- package/dist/entrypoints/urlReader/urlReaderServiceFactory.cjs.js +29 -0
- package/dist/entrypoints/urlReader/urlReaderServiceFactory.cjs.js.map +1 -0
- package/dist/entrypoints/userInfo/DefaultUserInfoService.cjs.js +59 -0
- package/dist/entrypoints/userInfo/DefaultUserInfoService.cjs.js.map +1 -0
- package/dist/entrypoints/userInfo/userInfoServiceFactory.cjs.js +17 -0
- package/dist/entrypoints/userInfo/userInfoServiceFactory.cjs.js.map +1 -0
- package/dist/httpAuth.cjs.js +3 -187
- package/dist/httpAuth.cjs.js.map +1 -1
- package/dist/httpRouter.cjs.js +2 -166
- package/dist/httpRouter.cjs.js.map +1 -1
- package/dist/index.cjs.js +4 -160
- package/dist/index.cjs.js.map +1 -1
- package/dist/lib/escapeRegExp.cjs.js +8 -0
- package/dist/lib/escapeRegExp.cjs.js.map +1 -0
- package/dist/lifecycle.cjs.js +3 -58
- package/dist/lifecycle.cjs.js.map +1 -1
- package/dist/logger.cjs.js +3 -12
- package/dist/logger.cjs.js.map +1 -1
- package/dist/package.json.cjs.js +252 -0
- package/dist/package.json.cjs.js.map +1 -0
- package/dist/permissions.cjs.js +3 -17
- package/dist/permissions.cjs.js.map +1 -1
- package/dist/rootConfig.cjs.js +4 -22
- package/dist/rootConfig.cjs.js.map +1 -1
- package/dist/rootHealth.cjs.js +3 -35
- package/dist/rootHealth.cjs.js.map +1 -1
- package/dist/rootHttpRouter.cjs.js +15 -651
- package/dist/rootHttpRouter.cjs.js.map +1 -1
- package/dist/rootLifecycle.cjs.js +3 -70
- package/dist/rootLifecycle.cjs.js.map +1 -1
- package/dist/rootLogger.cjs.js +4 -137
- package/dist/rootLogger.cjs.js.map +1 -1
- package/dist/scheduler.cjs.js +4 -693
- package/dist/scheduler.cjs.js.map +1 -1
- package/dist/scheduler.d.ts +2 -1
- package/dist/urlReader.cjs.js +32 -2962
- package/dist/urlReader.cjs.js.map +1 -1
- package/dist/userInfo.cjs.js +2 -64
- package/dist/userInfo.cjs.js.map +1 -1
- package/httpAuth/package.json +1 -1
- package/httpRouter/package.json +1 -1
- package/lifecycle/package.json +1 -1
- package/logger/package.json +1 -1
- package/package.json +13 -13
- package/permissions/package.json +1 -1
- package/rootConfig/package.json +1 -1
- package/rootHealth/package.json +1 -1
- package/rootHttpRouter/package.json +1 -1
- package/rootLifecycle/package.json +1 -1
- package/rootLogger/package.json +1 -1
- package/scheduler/package.json +1 -1
- package/urlReader/package.json +1 -1
- package/userInfo/package.json +1 -1
- package/dist/cjs/config-BDOwXIyo.cjs.js.map +0 -1
- package/dist/cjs/createConfigSecretEnumerator-DShyoWWL.cjs.js.map +0 -1
- package/dist/cjs/helpers-D2f1CG0o.cjs.js.map +0 -1
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var errors = require('@backstage/errors');
|
|
4
|
+
var getRawBody = require('raw-body');
|
|
5
|
+
var stream = require('stream');
|
|
6
|
+
|
|
7
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
8
|
+
|
|
9
|
+
var getRawBody__default = /*#__PURE__*/_interopDefaultCompat(getRawBody);
|
|
10
|
+
|
|
11
|
+
class ReadUrlResponseFactory {
|
|
12
|
+
/**
|
|
13
|
+
* Resolves a ReadUrlResponse from a Readable stream.
|
|
14
|
+
*/
|
|
15
|
+
static async fromReadable(stream, options) {
|
|
16
|
+
let buffer;
|
|
17
|
+
const conflictError = new errors.ConflictError(
|
|
18
|
+
"Cannot use buffer() and stream() from the same ReadUrlResponse"
|
|
19
|
+
);
|
|
20
|
+
let hasCalledStream = false;
|
|
21
|
+
let hasCalledBuffer = false;
|
|
22
|
+
return {
|
|
23
|
+
buffer: () => {
|
|
24
|
+
hasCalledBuffer = true;
|
|
25
|
+
if (hasCalledStream) throw conflictError;
|
|
26
|
+
if (buffer) return buffer;
|
|
27
|
+
buffer = getRawBody__default.default(stream);
|
|
28
|
+
return buffer;
|
|
29
|
+
},
|
|
30
|
+
stream: () => {
|
|
31
|
+
hasCalledStream = true;
|
|
32
|
+
if (hasCalledBuffer) throw conflictError;
|
|
33
|
+
return stream;
|
|
34
|
+
},
|
|
35
|
+
etag: options?.etag,
|
|
36
|
+
lastModifiedAt: options?.lastModifiedAt
|
|
37
|
+
};
|
|
38
|
+
}
|
|
39
|
+
/**
|
|
40
|
+
* Resolves a ReadUrlResponse from an old-style NodeJS.ReadableStream.
|
|
41
|
+
*/
|
|
42
|
+
static async fromNodeJSReadable(oldStyleStream, options) {
|
|
43
|
+
const readable = stream.Readable.from(oldStyleStream);
|
|
44
|
+
return ReadUrlResponseFactory.fromReadable(readable, options);
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
exports.ReadUrlResponseFactory = ReadUrlResponseFactory;
|
|
49
|
+
//# sourceMappingURL=ReadUrlResponseFactory.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ReadUrlResponseFactory.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/ReadUrlResponseFactory.ts"],"sourcesContent":["/*\n * Copyright 2022 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport { ConflictError } from '@backstage/errors';\nimport { UrlReaderServiceReadUrlResponse } from '@backstage/backend-plugin-api';\nimport getRawBody from 'raw-body';\nimport { Readable } from 'stream';\nimport { ReadUrlResponseFactoryFromStreamOptions } from './types';\n\n/**\n * Utility class for UrlReader implementations to create valid ReadUrlResponse\n * instances from common response primitives.\n *\n * @public\n */\nexport class ReadUrlResponseFactory {\n /**\n * Resolves a ReadUrlResponse from a Readable stream.\n */\n static async fromReadable(\n stream: Readable,\n options?: ReadUrlResponseFactoryFromStreamOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n // Reference to eventual buffer enables callers to call buffer() multiple\n // times without consequence.\n let buffer: Promise<Buffer>;\n\n // Prevent \"stream is not readable\" errors from bubbling up.\n const conflictError = new ConflictError(\n 'Cannot use buffer() and stream() from the same ReadUrlResponse',\n );\n let hasCalledStream = false;\n let hasCalledBuffer = false;\n\n return {\n buffer: () => {\n hasCalledBuffer = true;\n if (hasCalledStream) throw conflictError;\n if (buffer) return buffer;\n buffer = getRawBody(stream);\n return buffer;\n },\n stream: () => {\n hasCalledStream = true;\n if (hasCalledBuffer) throw conflictError;\n return stream;\n },\n etag: options?.etag,\n lastModifiedAt: options?.lastModifiedAt,\n };\n }\n\n /**\n * Resolves a ReadUrlResponse from an old-style NodeJS.ReadableStream.\n */\n static async fromNodeJSReadable(\n oldStyleStream: NodeJS.ReadableStream,\n options?: ReadUrlResponseFactoryFromStreamOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n const readable = Readable.from(oldStyleStream);\n return ReadUrlResponseFactory.fromReadable(readable, options);\n }\n}\n"],"names":["ConflictError","getRawBody","Readable"],"mappings":";;;;;;;;;;AA4BO,MAAM,sBAAuB,CAAA;AAAA;AAAA;AAAA;AAAA,EAIlC,aAAa,YACX,CAAA,MAAA,EACA,OAC0C,EAAA;AAG1C,IAAI,IAAA,MAAA,CAAA;AAGJ,IAAA,MAAM,gBAAgB,IAAIA,oBAAA;AAAA,MACxB,gEAAA;AAAA,KACF,CAAA;AACA,IAAA,IAAI,eAAkB,GAAA,KAAA,CAAA;AACtB,IAAA,IAAI,eAAkB,GAAA,KAAA,CAAA;AAEtB,IAAO,OAAA;AAAA,MACL,QAAQ,MAAM;AACZ,QAAkB,eAAA,GAAA,IAAA,CAAA;AAClB,QAAA,IAAI,iBAAuB,MAAA,aAAA,CAAA;AAC3B,QAAA,IAAI,QAAe,OAAA,MAAA,CAAA;AACnB,QAAA,MAAA,GAASC,4BAAW,MAAM,CAAA,CAAA;AAC1B,QAAO,OAAA,MAAA,CAAA;AAAA,OACT;AAAA,MACA,QAAQ,MAAM;AACZ,QAAkB,eAAA,GAAA,IAAA,CAAA;AAClB,QAAA,IAAI,iBAAuB,MAAA,aAAA,CAAA;AAC3B,QAAO,OAAA,MAAA,CAAA;AAAA,OACT;AAAA,MACA,MAAM,OAAS,EAAA,IAAA;AAAA,MACf,gBAAgB,OAAS,EAAA,cAAA;AAAA,KAC3B,CAAA;AAAA,GACF;AAAA;AAAA;AAAA;AAAA,EAKA,aAAa,kBACX,CAAA,cAAA,EACA,OAC0C,EAAA;AAC1C,IAAM,MAAA,QAAA,GAAWC,eAAS,CAAA,IAAA,CAAK,cAAc,CAAA,CAAA;AAC7C,IAAO,OAAA,sBAAA,CAAuB,YAAa,CAAA,QAAA,EAAU,OAAO,CAAA,CAAA;AAAA,GAC9D;AACF;;;;"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var errors = require('@backstage/errors');
|
|
4
|
+
|
|
5
|
+
function notAllowedMessage(url) {
|
|
6
|
+
return `Reading from '${url}' is not allowed. You may need to configure an integration for the target host, or add it to the configured list of allowed hosts at 'backend.reading.allow'`;
|
|
7
|
+
}
|
|
8
|
+
class UrlReaderPredicateMux {
|
|
9
|
+
readers = [];
|
|
10
|
+
register(tuple) {
|
|
11
|
+
this.readers.push(tuple);
|
|
12
|
+
}
|
|
13
|
+
async readUrl(url, options) {
|
|
14
|
+
const parsed = new URL(url);
|
|
15
|
+
for (const { predicate, reader } of this.readers) {
|
|
16
|
+
if (predicate(parsed)) {
|
|
17
|
+
return reader.readUrl(url, options);
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
throw new errors.NotAllowedError(notAllowedMessage(url));
|
|
21
|
+
}
|
|
22
|
+
async readTree(url, options) {
|
|
23
|
+
const parsed = new URL(url);
|
|
24
|
+
for (const { predicate, reader } of this.readers) {
|
|
25
|
+
if (predicate(parsed)) {
|
|
26
|
+
return await reader.readTree(url, options);
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
throw new errors.NotAllowedError(notAllowedMessage(url));
|
|
30
|
+
}
|
|
31
|
+
async search(url, options) {
|
|
32
|
+
const parsed = new URL(url);
|
|
33
|
+
for (const { predicate, reader } of this.readers) {
|
|
34
|
+
if (predicate(parsed)) {
|
|
35
|
+
return await reader.search(url, options);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
throw new errors.NotAllowedError(notAllowedMessage(url));
|
|
39
|
+
}
|
|
40
|
+
toString() {
|
|
41
|
+
return `predicateMux{readers=${this.readers.map((t) => t.reader).join(",")}`;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
exports.UrlReaderPredicateMux = UrlReaderPredicateMux;
|
|
46
|
+
//# sourceMappingURL=UrlReaderPredicateMux.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"UrlReaderPredicateMux.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/UrlReaderPredicateMux.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderServiceReadTreeOptions,\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadUrlOptions,\n UrlReaderServiceReadUrlResponse,\n UrlReaderServiceSearchOptions,\n UrlReaderServiceSearchResponse,\n UrlReaderService,\n} from '@backstage/backend-plugin-api';\nimport { NotAllowedError } from '@backstage/errors';\nimport { UrlReaderPredicateTuple } from './types';\n\nfunction notAllowedMessage(url: string) {\n return (\n `Reading from '${url}' is not allowed. ` +\n `You may need to configure an integration for the target host, or add it ` +\n `to the configured list of allowed hosts at 'backend.reading.allow'`\n );\n}\n\n/**\n * A UrlReaderService implementation that selects from a set of readers\n * based on a predicate tied to each reader.\n */\nexport class UrlReaderPredicateMux implements UrlReaderService {\n private readonly readers: UrlReaderPredicateTuple[] = [];\n\n register(tuple: UrlReaderPredicateTuple): void {\n this.readers.push(tuple);\n }\n\n async readUrl(\n url: string,\n options?: UrlReaderServiceReadUrlOptions,\n ): Promise<UrlReaderServiceReadUrlResponse> {\n const parsed = new URL(url);\n\n for (const { predicate, reader } of this.readers) {\n if (predicate(parsed)) {\n return reader.readUrl(url, options);\n }\n }\n\n throw new NotAllowedError(notAllowedMessage(url));\n }\n\n async readTree(\n url: string,\n options?: UrlReaderServiceReadTreeOptions,\n ): Promise<UrlReaderServiceReadTreeResponse> {\n const parsed = new URL(url);\n\n for (const { predicate, reader } of this.readers) {\n if (predicate(parsed)) {\n return await reader.readTree(url, options);\n }\n }\n\n throw new NotAllowedError(notAllowedMessage(url));\n }\n\n async search(\n url: string,\n options?: UrlReaderServiceSearchOptions,\n ): Promise<UrlReaderServiceSearchResponse> {\n const parsed = new URL(url);\n\n for (const { predicate, reader } of this.readers) {\n if (predicate(parsed)) {\n return await reader.search(url, options);\n }\n }\n\n throw new NotAllowedError(notAllowedMessage(url));\n }\n\n toString() {\n return `predicateMux{readers=${this.readers.map(t => t.reader).join(',')}`;\n }\n}\n"],"names":["NotAllowedError"],"mappings":";;;;AA4BA,SAAS,kBAAkB,GAAa,EAAA;AACtC,EAAA,OACE,iBAAiB,GAAG,CAAA,4JAAA,CAAA,CAAA;AAIxB,CAAA;AAMO,MAAM,qBAAkD,CAAA;AAAA,EAC5C,UAAqC,EAAC,CAAA;AAAA,EAEvD,SAAS,KAAsC,EAAA;AAC7C,IAAK,IAAA,CAAA,OAAA,CAAQ,KAAK,KAAK,CAAA,CAAA;AAAA,GACzB;AAAA,EAEA,MAAM,OACJ,CAAA,GAAA,EACA,OAC0C,EAAA;AAC1C,IAAM,MAAA,MAAA,GAAS,IAAI,GAAA,CAAI,GAAG,CAAA,CAAA;AAE1B,IAAA,KAAA,MAAW,EAAE,SAAA,EAAW,MAAO,EAAA,IAAK,KAAK,OAAS,EAAA;AAChD,MAAI,IAAA,SAAA,CAAU,MAAM,CAAG,EAAA;AACrB,QAAO,OAAA,MAAA,CAAO,OAAQ,CAAA,GAAA,EAAK,OAAO,CAAA,CAAA;AAAA,OACpC;AAAA,KACF;AAEA,IAAA,MAAM,IAAIA,sBAAA,CAAgB,iBAAkB,CAAA,GAAG,CAAC,CAAA,CAAA;AAAA,GAClD;AAAA,EAEA,MAAM,QACJ,CAAA,GAAA,EACA,OAC2C,EAAA;AAC3C,IAAM,MAAA,MAAA,GAAS,IAAI,GAAA,CAAI,GAAG,CAAA,CAAA;AAE1B,IAAA,KAAA,MAAW,EAAE,SAAA,EAAW,MAAO,EAAA,IAAK,KAAK,OAAS,EAAA;AAChD,MAAI,IAAA,SAAA,CAAU,MAAM,CAAG,EAAA;AACrB,QAAA,OAAO,MAAM,MAAA,CAAO,QAAS,CAAA,GAAA,EAAK,OAAO,CAAA,CAAA;AAAA,OAC3C;AAAA,KACF;AAEA,IAAA,MAAM,IAAIA,sBAAA,CAAgB,iBAAkB,CAAA,GAAG,CAAC,CAAA,CAAA;AAAA,GAClD;AAAA,EAEA,MAAM,MACJ,CAAA,GAAA,EACA,OACyC,EAAA;AACzC,IAAM,MAAA,MAAA,GAAS,IAAI,GAAA,CAAI,GAAG,CAAA,CAAA;AAE1B,IAAA,KAAA,MAAW,EAAE,SAAA,EAAW,MAAO,EAAA,IAAK,KAAK,OAAS,EAAA;AAChD,MAAI,IAAA,SAAA,CAAU,MAAM,CAAG,EAAA;AACrB,QAAA,OAAO,MAAM,MAAA,CAAO,MAAO,CAAA,GAAA,EAAK,OAAO,CAAA,CAAA;AAAA,OACzC;AAAA,KACF;AAEA,IAAA,MAAM,IAAIA,sBAAA,CAAgB,iBAAkB,CAAA,GAAG,CAAC,CAAA,CAAA;AAAA,GAClD;AAAA,EAEA,QAAW,GAAA;AACT,IAAO,OAAA,CAAA,qBAAA,EAAwB,IAAK,CAAA,OAAA,CAAQ,GAAI,CAAA,CAAA,CAAA,KAAK,EAAE,MAAM,CAAA,CAAE,IAAK,CAAA,GAAG,CAAC,CAAA,CAAA,CAAA;AAAA,GAC1E;AACF;;;;"}
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var UrlReaderPredicateMux = require('./UrlReaderPredicateMux.cjs.js');
|
|
4
|
+
var AzureUrlReader = require('./AzureUrlReader.cjs.js');
|
|
5
|
+
var BitbucketCloudUrlReader = require('./BitbucketCloudUrlReader.cjs.js');
|
|
6
|
+
var BitbucketServerUrlReader = require('./BitbucketServerUrlReader.cjs.js');
|
|
7
|
+
var BitbucketUrlReader = require('./BitbucketUrlReader.cjs.js');
|
|
8
|
+
var GerritUrlReader = require('./GerritUrlReader.cjs.js');
|
|
9
|
+
var GithubUrlReader = require('./GithubUrlReader.cjs.js');
|
|
10
|
+
var GitlabUrlReader = require('./GitlabUrlReader.cjs.js');
|
|
11
|
+
var ReadTreeResponseFactory = require('./tree/ReadTreeResponseFactory.cjs.js');
|
|
12
|
+
var FetchUrlReader = require('./FetchUrlReader.cjs.js');
|
|
13
|
+
var GoogleGcsUrlReader = require('./GoogleGcsUrlReader.cjs.js');
|
|
14
|
+
var AwsS3UrlReader = require('./AwsS3UrlReader.cjs.js');
|
|
15
|
+
var GiteaUrlReader = require('./GiteaUrlReader.cjs.js');
|
|
16
|
+
var AwsCodeCommitUrlReader = require('./AwsCodeCommitUrlReader.cjs.js');
|
|
17
|
+
var HarnessUrlReader = require('./HarnessUrlReader.cjs.js');
|
|
18
|
+
|
|
19
|
+
class UrlReaders {
|
|
20
|
+
/**
|
|
21
|
+
* Creates a custom {@link @backstage/backend-plugin-api#UrlReaderService} wrapper for your own set of factories.
|
|
22
|
+
*/
|
|
23
|
+
static create(options) {
|
|
24
|
+
const { logger, config, factories } = options;
|
|
25
|
+
const mux = new UrlReaderPredicateMux.UrlReaderPredicateMux();
|
|
26
|
+
const treeResponseFactory = ReadTreeResponseFactory.DefaultReadTreeResponseFactory.create({
|
|
27
|
+
config
|
|
28
|
+
});
|
|
29
|
+
for (const factory of factories ?? []) {
|
|
30
|
+
const tuples = factory({ config, logger, treeResponseFactory });
|
|
31
|
+
for (const tuple of tuples) {
|
|
32
|
+
mux.register(tuple);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
return mux;
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Creates a {@link @backstage/backend-plugin-api#UrlReaderService} wrapper that includes all the default factories
|
|
39
|
+
* from this package.
|
|
40
|
+
*
|
|
41
|
+
* Any additional factories passed will be loaded before the default ones.
|
|
42
|
+
*/
|
|
43
|
+
static default(options) {
|
|
44
|
+
const { logger, config, factories = [] } = options;
|
|
45
|
+
return UrlReaders.create({
|
|
46
|
+
logger,
|
|
47
|
+
config,
|
|
48
|
+
factories: factories.concat([
|
|
49
|
+
AzureUrlReader.AzureUrlReader.factory,
|
|
50
|
+
BitbucketCloudUrlReader.BitbucketCloudUrlReader.factory,
|
|
51
|
+
BitbucketServerUrlReader.BitbucketServerUrlReader.factory,
|
|
52
|
+
BitbucketUrlReader.BitbucketUrlReader.factory,
|
|
53
|
+
GerritUrlReader.GerritUrlReader.factory,
|
|
54
|
+
GithubUrlReader.GithubUrlReader.factory,
|
|
55
|
+
GiteaUrlReader.GiteaUrlReader.factory,
|
|
56
|
+
GitlabUrlReader.GitlabUrlReader.factory,
|
|
57
|
+
GoogleGcsUrlReader.GoogleGcsUrlReader.factory,
|
|
58
|
+
HarnessUrlReader.HarnessUrlReader.factory,
|
|
59
|
+
AwsS3UrlReader.AwsS3UrlReader.factory,
|
|
60
|
+
AwsCodeCommitUrlReader.AwsCodeCommitUrlReader.factory,
|
|
61
|
+
FetchUrlReader.FetchUrlReader.factory
|
|
62
|
+
])
|
|
63
|
+
});
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
exports.UrlReaders = UrlReaders;
|
|
68
|
+
//# sourceMappingURL=UrlReaders.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"UrlReaders.cjs.js","sources":["../../../../src/entrypoints/urlReader/lib/UrlReaders.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n LoggerService,\n RootConfigService,\n UrlReaderService,\n} from '@backstage/backend-plugin-api';\nimport { ReaderFactory } from './types';\nimport { UrlReaderPredicateMux } from './UrlReaderPredicateMux';\nimport { AzureUrlReader } from './AzureUrlReader';\nimport { BitbucketCloudUrlReader } from './BitbucketCloudUrlReader';\nimport { BitbucketServerUrlReader } from './BitbucketServerUrlReader';\nimport { BitbucketUrlReader } from './BitbucketUrlReader';\nimport { GerritUrlReader } from './GerritUrlReader';\nimport { GithubUrlReader } from './GithubUrlReader';\nimport { GitlabUrlReader } from './GitlabUrlReader';\nimport { DefaultReadTreeResponseFactory } from './tree';\nimport { FetchUrlReader } from './FetchUrlReader';\nimport { GoogleGcsUrlReader } from './GoogleGcsUrlReader';\nimport { AwsS3UrlReader } from './AwsS3UrlReader';\nimport { GiteaUrlReader } from './GiteaUrlReader';\nimport { AwsCodeCommitUrlReader } from './AwsCodeCommitUrlReader';\nimport { HarnessUrlReader } from './HarnessUrlReader';\n\n/**\n * Creation options for {@link @backstage/backend-plugin-api#UrlReaderService}.\n *\n * @public\n */\nexport type UrlReadersOptions = {\n /** Root config object */\n config: RootConfigService;\n /** Logger used by all the readers */\n logger: LoggerService;\n /** A list of factories used to construct individual readers that match on URLs */\n factories?: ReaderFactory[];\n};\n\n/**\n * Helps construct {@link @backstage/backend-plugin-api#UrlReaderService}s.\n *\n * @public\n */\nexport class UrlReaders {\n /**\n * Creates a custom {@link @backstage/backend-plugin-api#UrlReaderService} wrapper for your own set of factories.\n */\n static create(options: UrlReadersOptions): UrlReaderService {\n const { logger, config, factories } = options;\n const mux = new UrlReaderPredicateMux();\n const treeResponseFactory = DefaultReadTreeResponseFactory.create({\n config,\n });\n for (const factory of factories ?? []) {\n const tuples = factory({ config, logger: logger, treeResponseFactory });\n\n for (const tuple of tuples) {\n mux.register(tuple);\n }\n }\n\n return mux;\n }\n\n /**\n * Creates a {@link @backstage/backend-plugin-api#UrlReaderService} wrapper that includes all the default factories\n * from this package.\n *\n * Any additional factories passed will be loaded before the default ones.\n */\n static default(options: UrlReadersOptions) {\n const { logger, config, factories = [] } = options;\n return UrlReaders.create({\n logger,\n config,\n factories: factories.concat([\n AzureUrlReader.factory,\n BitbucketCloudUrlReader.factory,\n BitbucketServerUrlReader.factory,\n BitbucketUrlReader.factory,\n GerritUrlReader.factory,\n GithubUrlReader.factory,\n GiteaUrlReader.factory,\n GitlabUrlReader.factory,\n GoogleGcsUrlReader.factory,\n HarnessUrlReader.factory,\n AwsS3UrlReader.factory,\n AwsCodeCommitUrlReader.factory,\n FetchUrlReader.factory,\n ]),\n });\n }\n}\n"],"names":["UrlReaderPredicateMux","DefaultReadTreeResponseFactory","AzureUrlReader","BitbucketCloudUrlReader","BitbucketServerUrlReader","BitbucketUrlReader","GerritUrlReader","GithubUrlReader","GiteaUrlReader","GitlabUrlReader","GoogleGcsUrlReader","HarnessUrlReader","AwsS3UrlReader","AwsCodeCommitUrlReader","FetchUrlReader"],"mappings":";;;;;;;;;;;;;;;;;;AAyDO,MAAM,UAAW,CAAA;AAAA;AAAA;AAAA;AAAA,EAItB,OAAO,OAAO,OAA8C,EAAA;AAC1D,IAAA,MAAM,EAAE,MAAA,EAAQ,MAAQ,EAAA,SAAA,EAAc,GAAA,OAAA,CAAA;AACtC,IAAM,MAAA,GAAA,GAAM,IAAIA,2CAAsB,EAAA,CAAA;AACtC,IAAM,MAAA,mBAAA,GAAsBC,uDAA+B,MAAO,CAAA;AAAA,MAChE,MAAA;AAAA,KACD,CAAA,CAAA;AACD,IAAW,KAAA,MAAA,OAAA,IAAW,SAAa,IAAA,EAAI,EAAA;AACrC,MAAA,MAAM,SAAS,OAAQ,CAAA,EAAE,MAAQ,EAAA,MAAA,EAAgB,qBAAqB,CAAA,CAAA;AAEtE,MAAA,KAAA,MAAW,SAAS,MAAQ,EAAA;AAC1B,QAAA,GAAA,CAAI,SAAS,KAAK,CAAA,CAAA;AAAA,OACpB;AAAA,KACF;AAEA,IAAO,OAAA,GAAA,CAAA;AAAA,GACT;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,OAAO,QAAQ,OAA4B,EAAA;AACzC,IAAA,MAAM,EAAE,MAAQ,EAAA,MAAA,EAAQ,SAAY,GAAA,IAAO,GAAA,OAAA,CAAA;AAC3C,IAAA,OAAO,WAAW,MAAO,CAAA;AAAA,MACvB,MAAA;AAAA,MACA,MAAA;AAAA,MACA,SAAA,EAAW,UAAU,MAAO,CAAA;AAAA,QAC1BC,6BAAe,CAAA,OAAA;AAAA,QACfC,+CAAwB,CAAA,OAAA;AAAA,QACxBC,iDAAyB,CAAA,OAAA;AAAA,QACzBC,qCAAmB,CAAA,OAAA;AAAA,QACnBC,+BAAgB,CAAA,OAAA;AAAA,QAChBC,+BAAgB,CAAA,OAAA;AAAA,QAChBC,6BAAe,CAAA,OAAA;AAAA,QACfC,+BAAgB,CAAA,OAAA;AAAA,QAChBC,qCAAmB,CAAA,OAAA;AAAA,QACnBC,iCAAiB,CAAA,OAAA;AAAA,QACjBC,6BAAe,CAAA,OAAA;AAAA,QACfC,6CAAuB,CAAA,OAAA;AAAA,QACvBC,6BAAe,CAAA,OAAA;AAAA,OAChB,CAAA;AAAA,KACF,CAAA,CAAA;AAAA,GACH;AACF;;;;"}
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var os = require('os');
|
|
4
|
+
var TarArchiveResponse = require('./TarArchiveResponse.cjs.js');
|
|
5
|
+
var ZipArchiveResponse = require('./ZipArchiveResponse.cjs.js');
|
|
6
|
+
var ReadableArrayResponse = require('./ReadableArrayResponse.cjs.js');
|
|
7
|
+
|
|
8
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
9
|
+
|
|
10
|
+
var os__default = /*#__PURE__*/_interopDefaultCompat(os);
|
|
11
|
+
|
|
12
|
+
class DefaultReadTreeResponseFactory {
|
|
13
|
+
constructor(workDir) {
|
|
14
|
+
this.workDir = workDir;
|
|
15
|
+
}
|
|
16
|
+
static create(options) {
|
|
17
|
+
return new DefaultReadTreeResponseFactory(
|
|
18
|
+
options.config.getOptionalString("backend.workingDirectory") ?? os__default.default.tmpdir()
|
|
19
|
+
);
|
|
20
|
+
}
|
|
21
|
+
async fromTarArchive(options) {
|
|
22
|
+
return new TarArchiveResponse.TarArchiveResponse(
|
|
23
|
+
options.stream,
|
|
24
|
+
options.subpath ?? "",
|
|
25
|
+
this.workDir,
|
|
26
|
+
options.etag,
|
|
27
|
+
options.filter,
|
|
28
|
+
options.stripFirstDirectory ?? true
|
|
29
|
+
);
|
|
30
|
+
}
|
|
31
|
+
async fromZipArchive(options) {
|
|
32
|
+
return new ZipArchiveResponse.ZipArchiveResponse(
|
|
33
|
+
options.stream,
|
|
34
|
+
options.subpath ?? "",
|
|
35
|
+
this.workDir,
|
|
36
|
+
options.etag,
|
|
37
|
+
options.filter
|
|
38
|
+
);
|
|
39
|
+
}
|
|
40
|
+
async fromReadableArray(options) {
|
|
41
|
+
return new ReadableArrayResponse.ReadableArrayResponse(options, this.workDir, "");
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
exports.DefaultReadTreeResponseFactory = DefaultReadTreeResponseFactory;
|
|
46
|
+
//# sourceMappingURL=ReadTreeResponseFactory.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ReadTreeResponseFactory.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/ReadTreeResponseFactory.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport os from 'os';\nimport { Config } from '@backstage/config';\nimport {\n ReadTreeResponseFactoryOptions,\n ReadTreeResponseFactory,\n FromReadableArrayOptions,\n} from '../types';\nimport { TarArchiveResponse } from './TarArchiveResponse';\nimport { ZipArchiveResponse } from './ZipArchiveResponse';\nimport { ReadableArrayResponse } from './ReadableArrayResponse';\nimport { UrlReaderServiceReadTreeResponse } from '@backstage/backend-plugin-api';\n\nexport class DefaultReadTreeResponseFactory implements ReadTreeResponseFactory {\n static create(options: { config: Config }): DefaultReadTreeResponseFactory {\n return new DefaultReadTreeResponseFactory(\n options.config.getOptionalString('backend.workingDirectory') ??\n os.tmpdir(),\n );\n }\n\n constructor(private readonly workDir: string) {}\n\n async fromTarArchive(\n options: ReadTreeResponseFactoryOptions & {\n stripFirstDirectory?: boolean;\n },\n ): Promise<UrlReaderServiceReadTreeResponse> {\n return new TarArchiveResponse(\n options.stream,\n options.subpath ?? '',\n this.workDir,\n options.etag,\n options.filter,\n options.stripFirstDirectory ?? true,\n );\n }\n\n async fromZipArchive(\n options: ReadTreeResponseFactoryOptions,\n ): Promise<UrlReaderServiceReadTreeResponse> {\n return new ZipArchiveResponse(\n options.stream,\n options.subpath ?? '',\n this.workDir,\n options.etag,\n options.filter,\n );\n }\n\n async fromReadableArray(\n options: FromReadableArrayOptions,\n ): Promise<UrlReaderServiceReadTreeResponse> {\n return new ReadableArrayResponse(options, this.workDir, '');\n }\n}\n"],"names":["os","TarArchiveResponse","ZipArchiveResponse","ReadableArrayResponse"],"mappings":";;;;;;;;;;;AA4BO,MAAM,8BAAkE,CAAA;AAAA,EAQ7E,YAA6B,OAAiB,EAAA;AAAjB,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA,CAAA;AAAA,GAAkB;AAAA,EAP/C,OAAO,OAAO,OAA6D,EAAA;AACzE,IAAA,OAAO,IAAI,8BAAA;AAAA,MACT,QAAQ,MAAO,CAAA,iBAAA,CAAkB,0BAA0B,CAAA,IACzDA,oBAAG,MAAO,EAAA;AAAA,KACd,CAAA;AAAA,GACF;AAAA,EAIA,MAAM,eACJ,OAG2C,EAAA;AAC3C,IAAA,OAAO,IAAIC,qCAAA;AAAA,MACT,OAAQ,CAAA,MAAA;AAAA,MACR,QAAQ,OAAW,IAAA,EAAA;AAAA,MACnB,IAAK,CAAA,OAAA;AAAA,MACL,OAAQ,CAAA,IAAA;AAAA,MACR,OAAQ,CAAA,MAAA;AAAA,MACR,QAAQ,mBAAuB,IAAA,IAAA;AAAA,KACjC,CAAA;AAAA,GACF;AAAA,EAEA,MAAM,eACJ,OAC2C,EAAA;AAC3C,IAAA,OAAO,IAAIC,qCAAA;AAAA,MACT,OAAQ,CAAA,MAAA;AAAA,MACR,QAAQ,OAAW,IAAA,EAAA;AAAA,MACnB,IAAK,CAAA,OAAA;AAAA,MACL,OAAQ,CAAA,IAAA;AAAA,MACR,OAAQ,CAAA,MAAA;AAAA,KACV,CAAA;AAAA,GACF;AAAA,EAEA,MAAM,kBACJ,OAC2C,EAAA;AAC3C,IAAA,OAAO,IAAIC,2CAAA,CAAsB,OAAS,EAAA,IAAA,CAAK,SAAS,EAAE,CAAA,CAAA;AAAA,GAC5D;AACF;;;;"}
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var concatStream = require('concat-stream');
|
|
4
|
+
var platformPath = require('path');
|
|
5
|
+
var getRawBody = require('raw-body');
|
|
6
|
+
var fs = require('fs-extra');
|
|
7
|
+
var util = require('util');
|
|
8
|
+
var tar = require('tar');
|
|
9
|
+
var stream = require('stream');
|
|
10
|
+
|
|
11
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
12
|
+
|
|
13
|
+
var concatStream__default = /*#__PURE__*/_interopDefaultCompat(concatStream);
|
|
14
|
+
var platformPath__default = /*#__PURE__*/_interopDefaultCompat(platformPath);
|
|
15
|
+
var getRawBody__default = /*#__PURE__*/_interopDefaultCompat(getRawBody);
|
|
16
|
+
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
17
|
+
var tar__default = /*#__PURE__*/_interopDefaultCompat(tar);
|
|
18
|
+
|
|
19
|
+
const pipeline = util.promisify(stream.pipeline);
|
|
20
|
+
class ReadableArrayResponse {
|
|
21
|
+
constructor(stream, workDir, etag) {
|
|
22
|
+
this.stream = stream;
|
|
23
|
+
this.workDir = workDir;
|
|
24
|
+
this.etag = etag;
|
|
25
|
+
this.etag = etag;
|
|
26
|
+
}
|
|
27
|
+
read = false;
|
|
28
|
+
// Make sure the input stream is only read once
|
|
29
|
+
onlyOnce() {
|
|
30
|
+
if (this.read) {
|
|
31
|
+
throw new Error("Response has already been read");
|
|
32
|
+
}
|
|
33
|
+
this.read = true;
|
|
34
|
+
}
|
|
35
|
+
async files() {
|
|
36
|
+
this.onlyOnce();
|
|
37
|
+
const files = Array();
|
|
38
|
+
for (let i = 0; i < this.stream.length; i++) {
|
|
39
|
+
if (!this.stream[i].path.endsWith("/")) {
|
|
40
|
+
files.push({
|
|
41
|
+
path: this.stream[i].path,
|
|
42
|
+
content: () => getRawBody__default.default(this.stream[i].data),
|
|
43
|
+
lastModifiedAt: this.stream[i]?.lastModifiedAt
|
|
44
|
+
});
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return files;
|
|
48
|
+
}
|
|
49
|
+
async archive() {
|
|
50
|
+
const tmpDir = await this.dir();
|
|
51
|
+
try {
|
|
52
|
+
const data = await new Promise(async (resolve) => {
|
|
53
|
+
await pipeline(
|
|
54
|
+
tar__default.default.create({ cwd: tmpDir }, [""]),
|
|
55
|
+
concatStream__default.default(resolve)
|
|
56
|
+
);
|
|
57
|
+
});
|
|
58
|
+
return stream.Readable.from(data);
|
|
59
|
+
} finally {
|
|
60
|
+
await fs__default.default.remove(tmpDir);
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
async dir(options) {
|
|
64
|
+
this.onlyOnce();
|
|
65
|
+
const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
|
|
66
|
+
for (let i = 0; i < this.stream.length; i++) {
|
|
67
|
+
if (!this.stream[i].path.endsWith("/")) {
|
|
68
|
+
const filePath = platformPath__default.default.join(dir, this.stream[i].path);
|
|
69
|
+
await fs__default.default.mkdir(platformPath.dirname(filePath), { recursive: true });
|
|
70
|
+
await pipeline(this.stream[i].data, fs__default.default.createWriteStream(filePath));
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
return dir;
|
|
74
|
+
}
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
exports.ReadableArrayResponse = ReadableArrayResponse;
|
|
78
|
+
//# sourceMappingURL=ReadableArrayResponse.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"ReadableArrayResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/ReadableArrayResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport platformPath, { dirname } from 'path';\nimport getRawBody from 'raw-body';\nimport fs from 'fs-extra';\nimport { promisify } from 'util';\nimport tar from 'tar';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport { FromReadableArrayOptions } from '../types';\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a array of Readable objects into a tree response reader.\n */\nexport class ReadableArrayResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n\n constructor(\n private readonly stream: FromReadableArrayOptions,\n private readonly workDir: string,\n public readonly etag: string,\n ) {\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n files.push({\n path: this.stream[i].path,\n content: () => getRawBody(this.stream[i].data),\n lastModifiedAt: this.stream[i]?.lastModifiedAt,\n });\n }\n }\n\n return files;\n }\n\n async archive(): Promise<NodeJS.ReadableStream> {\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n for (let i = 0; i < this.stream.length; i++) {\n if (!this.stream[i].path.endsWith('/')) {\n const filePath = platformPath.join(dir, this.stream[i].path);\n await fs.mkdir(dirname(filePath), { recursive: true });\n await pipeline(this.stream[i].data, fs.createWriteStream(filePath));\n }\n }\n\n return dir;\n }\n}\n"],"names":["promisify","pipelineCb","getRawBody","tar","concatStream","Readable","fs","platformPath","dirname"],"mappings":";;;;;;;;;;;;;;;;;;AA8BA,MAAM,QAAA,GAAWA,eAAUC,eAAU,CAAA,CAAA;AAK9B,MAAM,qBAAkE,CAAA;AAAA,EAG7E,WAAA,CACmB,MACA,EAAA,OAAA,EACD,IAChB,EAAA;AAHiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA,CAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA,CAAA;AACD,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA,CAAA;AAEhB,IAAA,IAAA,CAAK,IAAO,GAAA,IAAA,CAAA;AAAA,GACd;AAAA,EARQ,IAAO,GAAA,KAAA,CAAA;AAAA;AAAA,EAWP,QAAW,GAAA;AACjB,IAAA,IAAI,KAAK,IAAM,EAAA;AACb,MAAM,MAAA,IAAI,MAAM,gCAAgC,CAAA,CAAA;AAAA,KAClD;AACA,IAAA,IAAA,CAAK,IAAO,GAAA,IAAA,CAAA;AAAA,GACd;AAAA,EAEA,MAAM,KAAyD,GAAA;AAC7D,IAAA,IAAA,CAAK,QAAS,EAAA,CAAA;AAEd,IAAA,MAAM,QAAQ,KAA4C,EAAA,CAAA;AAE1D,IAAA,KAAA,IAAS,IAAI,CAAG,EAAA,CAAA,GAAI,IAAK,CAAA,MAAA,CAAO,QAAQ,CAAK,EAAA,EAAA;AAC3C,MAAI,IAAA,CAAC,KAAK,MAAO,CAAA,CAAC,EAAE,IAAK,CAAA,QAAA,CAAS,GAAG,CAAG,EAAA;AACtC,QAAA,KAAA,CAAM,IAAK,CAAA;AAAA,UACT,IAAM,EAAA,IAAA,CAAK,MAAO,CAAA,CAAC,CAAE,CAAA,IAAA;AAAA,UACrB,SAAS,MAAMC,2BAAA,CAAW,KAAK,MAAO,CAAA,CAAC,EAAE,IAAI,CAAA;AAAA,UAC7C,cAAgB,EAAA,IAAA,CAAK,MAAO,CAAA,CAAC,CAAG,EAAA,cAAA;AAAA,SACjC,CAAA,CAAA;AAAA,OACH;AAAA,KACF;AAEA,IAAO,OAAA,KAAA,CAAA;AAAA,GACT;AAAA,EAEA,MAAM,OAA0C,GAAA;AAC9C,IAAM,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,GAAI,EAAA,CAAA;AAE9B,IAAI,IAAA;AACF,MAAA,MAAM,IAAO,GAAA,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAW,KAAA;AACtD,QAAM,MAAA,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAU,EAAA,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCC,8BAAa,OAAO,CAAA;AAAA,SACtB,CAAA;AAAA,OACD,CAAA,CAAA;AACD,MAAO,OAAAC,eAAA,CAAS,KAAK,IAAI,CAAA,CAAA;AAAA,KACzB,SAAA;AACA,MAAM,MAAAC,mBAAA,CAAG,OAAO,MAAM,CAAA,CAAA;AAAA,KACxB;AAAA,GACF;AAAA,EAEA,MAAM,IACJ,OACiB,EAAA;AACjB,IAAA,IAAA,CAAK,QAAS,EAAA,CAAA;AAEd,IAAM,MAAA,GAAA,GACJ,OAAS,EAAA,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAQ,CAAAC,6BAAA,CAAa,IAAK,CAAA,IAAA,CAAK,OAAS,EAAA,YAAY,CAAC,CAAA,CAAA;AAEjE,IAAA,KAAA,IAAS,IAAI,CAAG,EAAA,CAAA,GAAI,IAAK,CAAA,MAAA,CAAO,QAAQ,CAAK,EAAA,EAAA;AAC3C,MAAI,IAAA,CAAC,KAAK,MAAO,CAAA,CAAC,EAAE,IAAK,CAAA,QAAA,CAAS,GAAG,CAAG,EAAA;AACtC,QAAM,MAAA,QAAA,GAAWA,8BAAa,IAAK,CAAA,GAAA,EAAK,KAAK,MAAO,CAAA,CAAC,EAAE,IAAI,CAAA,CAAA;AAC3D,QAAM,MAAAD,mBAAA,CAAG,MAAME,oBAAQ,CAAA,QAAQ,GAAG,EAAE,SAAA,EAAW,MAAM,CAAA,CAAA;AACrD,QAAM,MAAA,QAAA,CAAS,KAAK,MAAO,CAAA,CAAC,EAAE,IAAM,EAAAF,mBAAA,CAAG,iBAAkB,CAAA,QAAQ,CAAC,CAAA,CAAA;AAAA,OACpE;AAAA,KACF;AAEA,IAAO,OAAA,GAAA,CAAA;AAAA,GACT;AACF;;;;"}
|
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var concatStream = require('concat-stream');
|
|
4
|
+
var fs = require('fs-extra');
|
|
5
|
+
var platformPath = require('path');
|
|
6
|
+
var stream = require('stream');
|
|
7
|
+
var tar = require('tar');
|
|
8
|
+
var util = require('util');
|
|
9
|
+
var util$1 = require('./util.cjs.js');
|
|
10
|
+
|
|
11
|
+
function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e : { default: e }; }
|
|
12
|
+
|
|
13
|
+
var concatStream__default = /*#__PURE__*/_interopDefaultCompat(concatStream);
|
|
14
|
+
var fs__default = /*#__PURE__*/_interopDefaultCompat(fs);
|
|
15
|
+
var platformPath__default = /*#__PURE__*/_interopDefaultCompat(platformPath);
|
|
16
|
+
var tar__default = /*#__PURE__*/_interopDefaultCompat(tar);
|
|
17
|
+
|
|
18
|
+
const TarParseStream = tar.Parse;
|
|
19
|
+
const pipeline = util.promisify(stream.pipeline);
|
|
20
|
+
class TarArchiveResponse {
|
|
21
|
+
constructor(stream, subPath, workDir, etag, filter, stripFirstDirectory = true) {
|
|
22
|
+
this.stream = stream;
|
|
23
|
+
this.subPath = subPath;
|
|
24
|
+
this.workDir = workDir;
|
|
25
|
+
this.etag = etag;
|
|
26
|
+
this.filter = filter;
|
|
27
|
+
this.stripFirstDirectory = stripFirstDirectory;
|
|
28
|
+
if (subPath) {
|
|
29
|
+
if (!subPath.endsWith("/")) {
|
|
30
|
+
this.subPath += "/";
|
|
31
|
+
}
|
|
32
|
+
if (subPath.startsWith("/")) {
|
|
33
|
+
throw new TypeError(
|
|
34
|
+
`TarArchiveResponse subPath must not start with a /, got '${subPath}'`
|
|
35
|
+
);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
this.etag = etag;
|
|
39
|
+
}
|
|
40
|
+
read = false;
|
|
41
|
+
// Make sure the input stream is only read once
|
|
42
|
+
onlyOnce() {
|
|
43
|
+
if (this.read) {
|
|
44
|
+
throw new Error("Response has already been read");
|
|
45
|
+
}
|
|
46
|
+
this.read = true;
|
|
47
|
+
}
|
|
48
|
+
async files() {
|
|
49
|
+
this.onlyOnce();
|
|
50
|
+
const files = Array();
|
|
51
|
+
const parser = new TarParseStream();
|
|
52
|
+
parser.on("entry", (entry) => {
|
|
53
|
+
if (entry.type === "Directory") {
|
|
54
|
+
entry.resume();
|
|
55
|
+
return;
|
|
56
|
+
}
|
|
57
|
+
const relativePath = this.stripFirstDirectory ? util$1.stripFirstDirectoryFromPath(entry.path) : entry.path;
|
|
58
|
+
if (this.subPath) {
|
|
59
|
+
if (!relativePath.startsWith(this.subPath)) {
|
|
60
|
+
entry.resume();
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
const path = relativePath.slice(this.subPath.length);
|
|
65
|
+
if (this.filter) {
|
|
66
|
+
if (!this.filter(path, { size: entry.remain })) {
|
|
67
|
+
entry.resume();
|
|
68
|
+
return;
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
const content = new Promise(async (resolve) => {
|
|
72
|
+
await pipeline(entry, concatStream__default.default(resolve));
|
|
73
|
+
});
|
|
74
|
+
files.push({
|
|
75
|
+
path,
|
|
76
|
+
content: () => content
|
|
77
|
+
});
|
|
78
|
+
entry.resume();
|
|
79
|
+
});
|
|
80
|
+
await pipeline(this.stream, parser);
|
|
81
|
+
return files;
|
|
82
|
+
}
|
|
83
|
+
async archive() {
|
|
84
|
+
if (!this.subPath) {
|
|
85
|
+
this.onlyOnce();
|
|
86
|
+
return this.stream;
|
|
87
|
+
}
|
|
88
|
+
const tmpDir = await this.dir();
|
|
89
|
+
try {
|
|
90
|
+
const data = await new Promise(async (resolve) => {
|
|
91
|
+
await pipeline(
|
|
92
|
+
tar__default.default.create({ cwd: tmpDir }, [""]),
|
|
93
|
+
concatStream__default.default(resolve)
|
|
94
|
+
);
|
|
95
|
+
});
|
|
96
|
+
return stream.Readable.from(data);
|
|
97
|
+
} finally {
|
|
98
|
+
await fs__default.default.remove(tmpDir);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
async dir(options) {
|
|
102
|
+
this.onlyOnce();
|
|
103
|
+
const dir = options?.targetDir ?? await fs__default.default.mkdtemp(platformPath__default.default.join(this.workDir, "backstage-"));
|
|
104
|
+
let strip = this.subPath ? this.subPath.split("/").length : 1;
|
|
105
|
+
if (!this.stripFirstDirectory) {
|
|
106
|
+
strip--;
|
|
107
|
+
}
|
|
108
|
+
let filterError = void 0;
|
|
109
|
+
await pipeline(
|
|
110
|
+
this.stream,
|
|
111
|
+
tar__default.default.extract({
|
|
112
|
+
strip,
|
|
113
|
+
cwd: dir,
|
|
114
|
+
filter: (path, stat) => {
|
|
115
|
+
if (filterError) {
|
|
116
|
+
return false;
|
|
117
|
+
}
|
|
118
|
+
const relativePath = this.stripFirstDirectory ? util$1.stripFirstDirectoryFromPath(path) : path;
|
|
119
|
+
if (this.subPath && !relativePath.startsWith(this.subPath)) {
|
|
120
|
+
return false;
|
|
121
|
+
}
|
|
122
|
+
if (this.filter) {
|
|
123
|
+
const innerPath = path.split("/").slice(strip).join("/");
|
|
124
|
+
try {
|
|
125
|
+
return this.filter(innerPath, { size: stat.size });
|
|
126
|
+
} catch (error) {
|
|
127
|
+
filterError = error;
|
|
128
|
+
return false;
|
|
129
|
+
}
|
|
130
|
+
}
|
|
131
|
+
return true;
|
|
132
|
+
}
|
|
133
|
+
})
|
|
134
|
+
);
|
|
135
|
+
if (filterError) {
|
|
136
|
+
if (!options?.targetDir) {
|
|
137
|
+
await fs__default.default.remove(dir).catch(() => {
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
throw filterError;
|
|
141
|
+
}
|
|
142
|
+
return dir;
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
exports.TarArchiveResponse = TarArchiveResponse;
|
|
147
|
+
//# sourceMappingURL=TarArchiveResponse.cjs.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"TarArchiveResponse.cjs.js","sources":["../../../../../src/entrypoints/urlReader/lib/tree/TarArchiveResponse.ts"],"sourcesContent":["/*\n * Copyright 2020 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\n\nimport {\n UrlReaderServiceReadTreeResponse,\n UrlReaderServiceReadTreeResponseDirOptions,\n UrlReaderServiceReadTreeResponseFile,\n} from '@backstage/backend-plugin-api';\nimport concatStream from 'concat-stream';\nimport fs from 'fs-extra';\nimport platformPath from 'path';\nimport { pipeline as pipelineCb, Readable } from 'stream';\nimport tar, { Parse, ParseStream, ReadEntry } from 'tar';\nimport { promisify } from 'util';\nimport { stripFirstDirectoryFromPath } from './util';\n\n// Tar types for `Parse` is not a proper constructor, but it should be\nconst TarParseStream = Parse as unknown as { new (): ParseStream };\n\nconst pipeline = promisify(pipelineCb);\n\n/**\n * Wraps a tar archive stream into a tree response reader.\n */\nexport class TarArchiveResponse implements UrlReaderServiceReadTreeResponse {\n private read = false;\n\n constructor(\n private readonly stream: Readable,\n private readonly subPath: string,\n private readonly workDir: string,\n public readonly etag: string,\n private readonly filter?: (path: string, info: { size: number }) => boolean,\n private readonly stripFirstDirectory: boolean = true,\n ) {\n if (subPath) {\n if (!subPath.endsWith('/')) {\n this.subPath += '/';\n }\n if (subPath.startsWith('/')) {\n throw new TypeError(\n `TarArchiveResponse subPath must not start with a /, got '${subPath}'`,\n );\n }\n }\n\n this.etag = etag;\n }\n\n // Make sure the input stream is only read once\n private onlyOnce() {\n if (this.read) {\n throw new Error('Response has already been read');\n }\n this.read = true;\n }\n\n async files(): Promise<UrlReaderServiceReadTreeResponseFile[]> {\n this.onlyOnce();\n\n const files = Array<UrlReaderServiceReadTreeResponseFile>();\n const parser = new TarParseStream();\n\n parser.on('entry', (entry: ReadEntry & Readable) => {\n if (entry.type === 'Directory') {\n entry.resume();\n return;\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(entry.path)\n : entry.path;\n\n if (this.subPath) {\n if (!relativePath.startsWith(this.subPath)) {\n entry.resume();\n return;\n }\n }\n\n const path = relativePath.slice(this.subPath.length);\n if (this.filter) {\n if (!this.filter(path, { size: entry.remain })) {\n entry.resume();\n return;\n }\n }\n\n const content = new Promise<Buffer>(async resolve => {\n await pipeline(entry, concatStream(resolve));\n });\n\n files.push({\n path,\n content: () => content,\n });\n\n entry.resume();\n });\n\n await pipeline(this.stream, parser);\n\n return files;\n }\n\n async archive(): Promise<Readable> {\n if (!this.subPath) {\n this.onlyOnce();\n\n return this.stream;\n }\n\n // TODO(Rugvip): method for repacking a tar with a subpath is to simply extract into a\n // tmp dir and recreate the archive. Would be nicer to stream things instead.\n const tmpDir = await this.dir();\n\n try {\n const data = await new Promise<Buffer>(async resolve => {\n await pipeline(\n tar.create({ cwd: tmpDir }, ['']),\n concatStream(resolve),\n );\n });\n return Readable.from(data);\n } finally {\n await fs.remove(tmpDir);\n }\n }\n\n async dir(\n options?: UrlReaderServiceReadTreeResponseDirOptions,\n ): Promise<string> {\n this.onlyOnce();\n\n const dir =\n options?.targetDir ??\n (await fs.mkdtemp(platformPath.join(this.workDir, 'backstage-')));\n\n // Equivalent of tar --strip-components=N\n // When no subPath is given, remove just 1 top level directory\n let strip = this.subPath ? this.subPath.split('/').length : 1;\n if (!this.stripFirstDirectory) {\n strip--;\n }\n\n let filterError: Error | undefined = undefined;\n await pipeline(\n this.stream,\n tar.extract({\n strip,\n cwd: dir,\n filter: (path, stat) => {\n // Filter errors will short-circuit the rest of the filtering and then throw\n if (filterError) {\n return false;\n }\n\n // File path relative to the root extracted directory. Will remove the\n // top level dir name from the path since its name is hard to predetermine.\n const relativePath = this.stripFirstDirectory\n ? stripFirstDirectoryFromPath(path)\n : path;\n if (this.subPath && !relativePath.startsWith(this.subPath)) {\n return false;\n }\n if (this.filter) {\n const innerPath = path.split('/').slice(strip).join('/');\n try {\n return this.filter(innerPath, { size: stat.size });\n } catch (error) {\n filterError = error;\n return false;\n }\n }\n return true;\n },\n }),\n );\n\n if (filterError) {\n // If the dir was provided we don't want to remove it, but if it wasn't it means\n // we created a temporary directory and we should remove it.\n if (!options?.targetDir) {\n await fs.remove(dir).catch(() => {});\n }\n throw filterError;\n }\n\n return dir;\n }\n}\n"],"names":["Parse","promisify","pipelineCb","stripFirstDirectoryFromPath","concatStream","tar","Readable","fs","platformPath"],"mappings":";;;;;;;;;;;;;;;;;AA8BA,MAAM,cAAiB,GAAAA,SAAA,CAAA;AAEvB,MAAM,QAAA,GAAWC,eAAUC,eAAU,CAAA,CAAA;AAK9B,MAAM,kBAA+D,CAAA;AAAA,EAG1E,YACmB,MACA,EAAA,OAAA,EACA,SACD,IACC,EAAA,MAAA,EACA,sBAA+B,IAChD,EAAA;AANiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA,CAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA,CAAA;AACA,IAAA,IAAA,CAAA,OAAA,GAAA,OAAA,CAAA;AACD,IAAA,IAAA,CAAA,IAAA,GAAA,IAAA,CAAA;AACC,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA,CAAA;AACA,IAAA,IAAA,CAAA,mBAAA,GAAA,mBAAA,CAAA;AAEjB,IAAA,IAAI,OAAS,EAAA;AACX,MAAA,IAAI,CAAC,OAAA,CAAQ,QAAS,CAAA,GAAG,CAAG,EAAA;AAC1B,QAAA,IAAA,CAAK,OAAW,IAAA,GAAA,CAAA;AAAA,OAClB;AACA,MAAI,IAAA,OAAA,CAAQ,UAAW,CAAA,GAAG,CAAG,EAAA;AAC3B,QAAA,MAAM,IAAI,SAAA;AAAA,UACR,4DAA4D,OAAO,CAAA,CAAA,CAAA;AAAA,SACrE,CAAA;AAAA,OACF;AAAA,KACF;AAEA,IAAA,IAAA,CAAK,IAAO,GAAA,IAAA,CAAA;AAAA,GACd;AAAA,EAtBQ,IAAO,GAAA,KAAA,CAAA;AAAA;AAAA,EAyBP,QAAW,GAAA;AACjB,IAAA,IAAI,KAAK,IAAM,EAAA;AACb,MAAM,MAAA,IAAI,MAAM,gCAAgC,CAAA,CAAA;AAAA,KAClD;AACA,IAAA,IAAA,CAAK,IAAO,GAAA,IAAA,CAAA;AAAA,GACd;AAAA,EAEA,MAAM,KAAyD,GAAA;AAC7D,IAAA,IAAA,CAAK,QAAS,EAAA,CAAA;AAEd,IAAA,MAAM,QAAQ,KAA4C,EAAA,CAAA;AAC1D,IAAM,MAAA,MAAA,GAAS,IAAI,cAAe,EAAA,CAAA;AAElC,IAAO,MAAA,CAAA,EAAA,CAAG,OAAS,EAAA,CAAC,KAAgC,KAAA;AAClD,MAAI,IAAA,KAAA,CAAM,SAAS,WAAa,EAAA;AAC9B,QAAA,KAAA,CAAM,MAAO,EAAA,CAAA;AACb,QAAA,OAAA;AAAA,OACF;AAIA,MAAA,MAAM,eAAe,IAAK,CAAA,mBAAA,GACtBC,mCAA4B,KAAM,CAAA,IAAI,IACtC,KAAM,CAAA,IAAA,CAAA;AAEV,MAAA,IAAI,KAAK,OAAS,EAAA;AAChB,QAAA,IAAI,CAAC,YAAA,CAAa,UAAW,CAAA,IAAA,CAAK,OAAO,CAAG,EAAA;AAC1C,UAAA,KAAA,CAAM,MAAO,EAAA,CAAA;AACb,UAAA,OAAA;AAAA,SACF;AAAA,OACF;AAEA,MAAA,MAAM,IAAO,GAAA,YAAA,CAAa,KAAM,CAAA,IAAA,CAAK,QAAQ,MAAM,CAAA,CAAA;AACnD,MAAA,IAAI,KAAK,MAAQ,EAAA;AACf,QAAI,IAAA,CAAC,KAAK,MAAO,CAAA,IAAA,EAAM,EAAE,IAAM,EAAA,KAAA,CAAM,MAAO,EAAC,CAAG,EAAA;AAC9C,UAAA,KAAA,CAAM,MAAO,EAAA,CAAA;AACb,UAAA,OAAA;AAAA,SACF;AAAA,OACF;AAEA,MAAA,MAAM,OAAU,GAAA,IAAI,OAAgB,CAAA,OAAM,OAAW,KAAA;AACnD,QAAA,MAAM,QAAS,CAAA,KAAA,EAAOC,6BAAa,CAAA,OAAO,CAAC,CAAA,CAAA;AAAA,OAC5C,CAAA,CAAA;AAED,MAAA,KAAA,CAAM,IAAK,CAAA;AAAA,QACT,IAAA;AAAA,QACA,SAAS,MAAM,OAAA;AAAA,OAChB,CAAA,CAAA;AAED,MAAA,KAAA,CAAM,MAAO,EAAA,CAAA;AAAA,KACd,CAAA,CAAA;AAED,IAAM,MAAA,QAAA,CAAS,IAAK,CAAA,MAAA,EAAQ,MAAM,CAAA,CAAA;AAElC,IAAO,OAAA,KAAA,CAAA;AAAA,GACT;AAAA,EAEA,MAAM,OAA6B,GAAA;AACjC,IAAI,IAAA,CAAC,KAAK,OAAS,EAAA;AACjB,MAAA,IAAA,CAAK,QAAS,EAAA,CAAA;AAEd,MAAA,OAAO,IAAK,CAAA,MAAA,CAAA;AAAA,KACd;AAIA,IAAM,MAAA,MAAA,GAAS,MAAM,IAAA,CAAK,GAAI,EAAA,CAAA;AAE9B,IAAI,IAAA;AACF,MAAA,MAAM,IAAO,GAAA,MAAM,IAAI,OAAA,CAAgB,OAAM,OAAW,KAAA;AACtD,QAAM,MAAA,QAAA;AAAA,UACJC,oBAAA,CAAI,OAAO,EAAE,GAAA,EAAK,QAAU,EAAA,CAAC,EAAE,CAAC,CAAA;AAAA,UAChCD,8BAAa,OAAO,CAAA;AAAA,SACtB,CAAA;AAAA,OACD,CAAA,CAAA;AACD,MAAO,OAAAE,eAAA,CAAS,KAAK,IAAI,CAAA,CAAA;AAAA,KACzB,SAAA;AACA,MAAM,MAAAC,mBAAA,CAAG,OAAO,MAAM,CAAA,CAAA;AAAA,KACxB;AAAA,GACF;AAAA,EAEA,MAAM,IACJ,OACiB,EAAA;AACjB,IAAA,IAAA,CAAK,QAAS,EAAA,CAAA;AAEd,IAAM,MAAA,GAAA,GACJ,OAAS,EAAA,SAAA,IACR,MAAMA,mBAAA,CAAG,OAAQ,CAAAC,6BAAA,CAAa,IAAK,CAAA,IAAA,CAAK,OAAS,EAAA,YAAY,CAAC,CAAA,CAAA;AAIjE,IAAI,IAAA,KAAA,GAAQ,KAAK,OAAU,GAAA,IAAA,CAAK,QAAQ,KAAM,CAAA,GAAG,EAAE,MAAS,GAAA,CAAA,CAAA;AAC5D,IAAI,IAAA,CAAC,KAAK,mBAAqB,EAAA;AAC7B,MAAA,KAAA,EAAA,CAAA;AAAA,KACF;AAEA,IAAA,IAAI,WAAiC,GAAA,KAAA,CAAA,CAAA;AACrC,IAAM,MAAA,QAAA;AAAA,MACJ,IAAK,CAAA,MAAA;AAAA,MACLH,qBAAI,OAAQ,CAAA;AAAA,QACV,KAAA;AAAA,QACA,GAAK,EAAA,GAAA;AAAA,QACL,MAAA,EAAQ,CAAC,IAAA,EAAM,IAAS,KAAA;AAEtB,UAAA,IAAI,WAAa,EAAA;AACf,YAAO,OAAA,KAAA,CAAA;AAAA,WACT;AAIA,UAAA,MAAM,YAAe,GAAA,IAAA,CAAK,mBACtB,GAAAF,kCAAA,CAA4B,IAAI,CAChC,GAAA,IAAA,CAAA;AACJ,UAAA,IAAI,KAAK,OAAW,IAAA,CAAC,aAAa,UAAW,CAAA,IAAA,CAAK,OAAO,CAAG,EAAA;AAC1D,YAAO,OAAA,KAAA,CAAA;AAAA,WACT;AACA,UAAA,IAAI,KAAK,MAAQ,EAAA;AACf,YAAM,MAAA,SAAA,GAAY,KAAK,KAAM,CAAA,GAAG,EAAE,KAAM,CAAA,KAAK,CAAE,CAAA,IAAA,CAAK,GAAG,CAAA,CAAA;AACvD,YAAI,IAAA;AACF,cAAA,OAAO,KAAK,MAAO,CAAA,SAAA,EAAW,EAAE,IAAM,EAAA,IAAA,CAAK,MAAM,CAAA,CAAA;AAAA,qBAC1C,KAAO,EAAA;AACd,cAAc,WAAA,GAAA,KAAA,CAAA;AACd,cAAO,OAAA,KAAA,CAAA;AAAA,aACT;AAAA,WACF;AACA,UAAO,OAAA,IAAA,CAAA;AAAA,SACT;AAAA,OACD,CAAA;AAAA,KACH,CAAA;AAEA,IAAA,IAAI,WAAa,EAAA;AAGf,MAAI,IAAA,CAAC,SAAS,SAAW,EAAA;AACvB,QAAA,MAAMI,mBAAG,CAAA,MAAA,CAAO,GAAG,CAAA,CAAE,MAAM,MAAM;AAAA,SAAE,CAAA,CAAA;AAAA,OACrC;AACA,MAAM,MAAA,WAAA,CAAA;AAAA,KACR;AAEA,IAAO,OAAA,GAAA,CAAA;AAAA,GACT;AACF;;;;"}
|