@backstage/plugin-scaffolder-backend 1.5.0-next.1 → 1.6.0-next.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +71 -0
- package/alpha/package.json +1 -1
- package/dist/index.alpha.d.ts +16 -24
- package/dist/index.beta.d.ts +15 -23
- package/dist/index.cjs.js +133 -74
- package/dist/index.cjs.js.map +1 -1
- package/dist/index.d.ts +15 -23
- package/package.json +12 -12
package/dist/index.cjs.js
CHANGED
|
@@ -283,13 +283,8 @@ async function recursiveReadDir(dir) {
|
|
|
283
283
|
return files.reduce((a, f) => a.concat(f), []);
|
|
284
284
|
}
|
|
285
285
|
|
|
286
|
-
async function fetchContents({
|
|
287
|
-
reader,
|
|
288
|
-
integrations,
|
|
289
|
-
baseUrl,
|
|
290
|
-
fetchUrl = ".",
|
|
291
|
-
outputPath
|
|
292
|
-
}) {
|
|
286
|
+
async function fetchContents(options) {
|
|
287
|
+
const { reader, integrations, baseUrl, fetchUrl = ".", outputPath } = options;
|
|
293
288
|
let fetchUrlIsAbsolute = false;
|
|
294
289
|
try {
|
|
295
290
|
new URL(fetchUrl);
|
|
@@ -673,7 +668,7 @@ function createFetchTemplateAction(options) {
|
|
|
673
668
|
});
|
|
674
669
|
}
|
|
675
670
|
function containsSkippedContent(localOutputPath) {
|
|
676
|
-
return localOutputPath === "" ||
|
|
671
|
+
return localOutputPath === "" || localOutputPath.startsWith("/") || localOutputPath.includes("//");
|
|
677
672
|
}
|
|
678
673
|
|
|
679
674
|
const createFilesystemDeleteAction = () => {
|
|
@@ -1043,7 +1038,7 @@ async function getOctokitOptions(options) {
|
|
|
1043
1038
|
previews: ["nebula-preview"]
|
|
1044
1039
|
};
|
|
1045
1040
|
}
|
|
1046
|
-
async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, repoVisibility, description, deleteBranchOnMerge, allowMergeCommit, allowSquashMerge, allowRebaseMerge, access, collaborators, topics, logger) {
|
|
1041
|
+
async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, repoVisibility, description, homepage, deleteBranchOnMerge, allowMergeCommit, allowSquashMerge, allowRebaseMerge, access, collaborators, topics, logger) {
|
|
1047
1042
|
const user = await client.rest.users.getByUsername({
|
|
1048
1043
|
username: owner
|
|
1049
1044
|
});
|
|
@@ -1056,7 +1051,8 @@ async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, r
|
|
|
1056
1051
|
delete_branch_on_merge: deleteBranchOnMerge,
|
|
1057
1052
|
allow_merge_commit: allowMergeCommit,
|
|
1058
1053
|
allow_squash_merge: allowSquashMerge,
|
|
1059
|
-
allow_rebase_merge: allowRebaseMerge
|
|
1054
|
+
allow_rebase_merge: allowRebaseMerge,
|
|
1055
|
+
homepage
|
|
1060
1056
|
}) : client.rest.repos.createForAuthenticatedUser({
|
|
1061
1057
|
name: repo,
|
|
1062
1058
|
private: repoVisibility === "private",
|
|
@@ -1064,7 +1060,8 @@ async function createGithubRepoWithCollaboratorsAndTopics(client, repo, owner, r
|
|
|
1064
1060
|
delete_branch_on_merge: deleteBranchOnMerge,
|
|
1065
1061
|
allow_merge_commit: allowMergeCommit,
|
|
1066
1062
|
allow_squash_merge: allowSquashMerge,
|
|
1067
|
-
allow_rebase_merge: allowRebaseMerge
|
|
1063
|
+
allow_rebase_merge: allowRebaseMerge,
|
|
1064
|
+
homepage
|
|
1068
1065
|
});
|
|
1069
1066
|
let newRepo;
|
|
1070
1067
|
try {
|
|
@@ -1335,6 +1332,10 @@ const description = {
|
|
|
1335
1332
|
title: "Repository Description",
|
|
1336
1333
|
type: "string"
|
|
1337
1334
|
};
|
|
1335
|
+
const homepage = {
|
|
1336
|
+
title: "Repository Homepage",
|
|
1337
|
+
type: "string"
|
|
1338
|
+
};
|
|
1338
1339
|
const access = {
|
|
1339
1340
|
title: "Repository Access",
|
|
1340
1341
|
description: `Sets an admin collaborator on the repository. Can either be a user reference different from 'owner' in 'repoUrl' or team reference, eg. 'org/team-name'`,
|
|
@@ -1473,6 +1474,7 @@ function createGithubRepoCreateAction(options) {
|
|
|
1473
1474
|
properties: {
|
|
1474
1475
|
repoUrl: repoUrl,
|
|
1475
1476
|
description: description,
|
|
1477
|
+
homepage: homepage,
|
|
1476
1478
|
access: access,
|
|
1477
1479
|
requireCodeOwnerReviews: requireCodeOwnerReviews,
|
|
1478
1480
|
requiredStatusCheckContexts: requiredStatusCheckContexts,
|
|
@@ -1498,6 +1500,7 @@ function createGithubRepoCreateAction(options) {
|
|
|
1498
1500
|
const {
|
|
1499
1501
|
repoUrl,
|
|
1500
1502
|
description,
|
|
1503
|
+
homepage,
|
|
1501
1504
|
access,
|
|
1502
1505
|
repoVisibility = "private",
|
|
1503
1506
|
deleteBranchOnMerge = false,
|
|
@@ -1525,6 +1528,7 @@ function createGithubRepoCreateAction(options) {
|
|
|
1525
1528
|
owner,
|
|
1526
1529
|
repoVisibility,
|
|
1527
1530
|
description,
|
|
1531
|
+
homepage,
|
|
1528
1532
|
deleteBranchOnMerge,
|
|
1529
1533
|
allowMergeCommit,
|
|
1530
1534
|
allowSquashMerge,
|
|
@@ -2544,34 +2548,6 @@ function createPublishBitbucketServerAction(options) {
|
|
|
2544
2548
|
});
|
|
2545
2549
|
}
|
|
2546
2550
|
|
|
2547
|
-
function createPublishFileAction() {
|
|
2548
|
-
return createTemplateAction({
|
|
2549
|
-
id: "publish:file",
|
|
2550
|
-
description: "Writes contents of the workspace to a local directory",
|
|
2551
|
-
schema: {
|
|
2552
|
-
input: {
|
|
2553
|
-
type: "object",
|
|
2554
|
-
required: ["path"],
|
|
2555
|
-
properties: {
|
|
2556
|
-
path: {
|
|
2557
|
-
title: "Path to a directory where the output will be written",
|
|
2558
|
-
type: "string"
|
|
2559
|
-
}
|
|
2560
|
-
}
|
|
2561
|
-
}
|
|
2562
|
-
},
|
|
2563
|
-
async handler(ctx) {
|
|
2564
|
-
const { path: path$1 } = ctx.input;
|
|
2565
|
-
const exists = await fs__default["default"].pathExists(path$1);
|
|
2566
|
-
if (exists) {
|
|
2567
|
-
throw new errors.InputError("Output path already exists");
|
|
2568
|
-
}
|
|
2569
|
-
await fs__default["default"].ensureDir(path.dirname(path$1));
|
|
2570
|
-
await fs__default["default"].copy(ctx.workspacePath, path$1);
|
|
2571
|
-
}
|
|
2572
|
-
});
|
|
2573
|
-
}
|
|
2574
|
-
|
|
2575
2551
|
const createGerritProject = async (config, options) => {
|
|
2576
2552
|
const { projectName, parent, owner, description } = options;
|
|
2577
2553
|
const fetchOptions = {
|
|
@@ -2840,6 +2816,7 @@ function createPublishGithubAction(options) {
|
|
|
2840
2816
|
properties: {
|
|
2841
2817
|
repoUrl: repoUrl,
|
|
2842
2818
|
description: description,
|
|
2819
|
+
homepage: homepage,
|
|
2843
2820
|
access: access,
|
|
2844
2821
|
requireCodeOwnerReviews: requireCodeOwnerReviews,
|
|
2845
2822
|
requiredStatusCheckContexts: requiredStatusCheckContexts,
|
|
@@ -2872,6 +2849,7 @@ function createPublishGithubAction(options) {
|
|
|
2872
2849
|
const {
|
|
2873
2850
|
repoUrl,
|
|
2874
2851
|
description,
|
|
2852
|
+
homepage,
|
|
2875
2853
|
access,
|
|
2876
2854
|
requireCodeOwnerReviews = false,
|
|
2877
2855
|
requiredStatusCheckContexts = [],
|
|
@@ -2907,6 +2885,7 @@ function createPublishGithubAction(options) {
|
|
|
2907
2885
|
owner,
|
|
2908
2886
|
repoVisibility,
|
|
2909
2887
|
description,
|
|
2888
|
+
homepage,
|
|
2910
2889
|
deleteBranchOnMerge,
|
|
2911
2890
|
allowMergeCommit,
|
|
2912
2891
|
allowSquashMerge,
|
|
@@ -3055,6 +3034,22 @@ const createPublishGithubPullRequestAction = ({
|
|
|
3055
3034
|
title: "Authentication Token",
|
|
3056
3035
|
type: "string",
|
|
3057
3036
|
description: "The token to use for authorization to GitHub"
|
|
3037
|
+
},
|
|
3038
|
+
reviewers: {
|
|
3039
|
+
title: "Pull Request Reviewers",
|
|
3040
|
+
type: "array",
|
|
3041
|
+
items: {
|
|
3042
|
+
type: "string"
|
|
3043
|
+
},
|
|
3044
|
+
description: "The users that will be added as reviewers to the pull request"
|
|
3045
|
+
},
|
|
3046
|
+
teamReviewers: {
|
|
3047
|
+
title: "Pull Request Team Reviewers",
|
|
3048
|
+
type: "array",
|
|
3049
|
+
items: {
|
|
3050
|
+
type: "string"
|
|
3051
|
+
},
|
|
3052
|
+
description: "The teams that will be added as reviewers to the pull request"
|
|
3058
3053
|
}
|
|
3059
3054
|
}
|
|
3060
3055
|
},
|
|
@@ -3084,7 +3079,9 @@ const createPublishGithubPullRequestAction = ({
|
|
|
3084
3079
|
draft,
|
|
3085
3080
|
targetPath,
|
|
3086
3081
|
sourcePath,
|
|
3087
|
-
token: providedToken
|
|
3082
|
+
token: providedToken,
|
|
3083
|
+
reviewers,
|
|
3084
|
+
teamReviewers
|
|
3088
3085
|
} = ctx.input;
|
|
3089
3086
|
const { owner, repo, host } = parseRepoUrl(repoUrl, integrations);
|
|
3090
3087
|
if (!owner) {
|
|
@@ -3132,13 +3129,46 @@ const createPublishGithubPullRequestAction = ({
|
|
|
3132
3129
|
if (!response) {
|
|
3133
3130
|
throw new GithubResponseError("null response from Github");
|
|
3134
3131
|
}
|
|
3132
|
+
const pullRequestNumber = response.data.number;
|
|
3133
|
+
if (reviewers || teamReviewers) {
|
|
3134
|
+
const pullRequest = { owner, repo, number: pullRequestNumber };
|
|
3135
|
+
await requestReviewersOnPullRequest(
|
|
3136
|
+
pullRequest,
|
|
3137
|
+
reviewers,
|
|
3138
|
+
teamReviewers,
|
|
3139
|
+
client,
|
|
3140
|
+
ctx.logger
|
|
3141
|
+
);
|
|
3142
|
+
}
|
|
3135
3143
|
ctx.output("remoteUrl", response.data.html_url);
|
|
3136
|
-
ctx.output("pullRequestNumber",
|
|
3144
|
+
ctx.output("pullRequestNumber", pullRequestNumber);
|
|
3137
3145
|
} catch (e) {
|
|
3138
3146
|
throw new GithubResponseError("Pull request creation failed", e);
|
|
3139
3147
|
}
|
|
3140
3148
|
}
|
|
3141
3149
|
});
|
|
3150
|
+
async function requestReviewersOnPullRequest(pr, reviewers, teamReviewers, client, logger) {
|
|
3151
|
+
var _a, _b, _c, _d;
|
|
3152
|
+
try {
|
|
3153
|
+
const result = await client.rest.pulls.requestReviewers({
|
|
3154
|
+
owner: pr.owner,
|
|
3155
|
+
repo: pr.repo,
|
|
3156
|
+
pull_number: pr.number,
|
|
3157
|
+
reviewers,
|
|
3158
|
+
team_reviewers: teamReviewers
|
|
3159
|
+
});
|
|
3160
|
+
const addedUsers = (_b = (_a = result.data.requested_reviewers) == null ? void 0 : _a.join(", ")) != null ? _b : "";
|
|
3161
|
+
const addedTeams = (_d = (_c = result.data.requested_teams) == null ? void 0 : _c.join(", ")) != null ? _d : "";
|
|
3162
|
+
logger.info(
|
|
3163
|
+
`Added users [${addedUsers}] and teams [${addedTeams}] as reviewers to Pull request ${pr.number}`
|
|
3164
|
+
);
|
|
3165
|
+
} catch (e) {
|
|
3166
|
+
logger.error(
|
|
3167
|
+
`Failure when adding reviewers to Pull request ${pr.number}`,
|
|
3168
|
+
e
|
|
3169
|
+
);
|
|
3170
|
+
}
|
|
3171
|
+
}
|
|
3142
3172
|
};
|
|
3143
3173
|
|
|
3144
3174
|
function createPublishGitlabAction(options) {
|
|
@@ -3585,6 +3615,9 @@ const migrationsDir = backendCommon.resolvePackagePath(
|
|
|
3585
3615
|
"@backstage/plugin-scaffolder-backend",
|
|
3586
3616
|
"migrations"
|
|
3587
3617
|
);
|
|
3618
|
+
function isPluginDatabaseManager(opt) {
|
|
3619
|
+
return opt.getClient !== void 0;
|
|
3620
|
+
}
|
|
3588
3621
|
const parseSqlDateToIsoString = (input) => {
|
|
3589
3622
|
if (typeof input === "string") {
|
|
3590
3623
|
return luxon.DateTime.fromSQL(input, { zone: "UTC" }).toISO();
|
|
@@ -3593,13 +3626,33 @@ const parseSqlDateToIsoString = (input) => {
|
|
|
3593
3626
|
};
|
|
3594
3627
|
class DatabaseTaskStore {
|
|
3595
3628
|
static async create(options) {
|
|
3596
|
-
|
|
3597
|
-
|
|
3598
|
-
|
|
3599
|
-
return new DatabaseTaskStore(
|
|
3629
|
+
const { database } = options;
|
|
3630
|
+
const client = await this.getClient(database);
|
|
3631
|
+
await this.runMigrations(database, client);
|
|
3632
|
+
return new DatabaseTaskStore(client);
|
|
3600
3633
|
}
|
|
3601
|
-
|
|
3602
|
-
|
|
3634
|
+
static async getClient(database) {
|
|
3635
|
+
if (isPluginDatabaseManager(database)) {
|
|
3636
|
+
return database.getClient();
|
|
3637
|
+
}
|
|
3638
|
+
return database;
|
|
3639
|
+
}
|
|
3640
|
+
static async runMigrations(database, client) {
|
|
3641
|
+
var _a;
|
|
3642
|
+
if (!isPluginDatabaseManager(database)) {
|
|
3643
|
+
await client.migrate.latest({
|
|
3644
|
+
directory: migrationsDir
|
|
3645
|
+
});
|
|
3646
|
+
return;
|
|
3647
|
+
}
|
|
3648
|
+
if (!((_a = database.migrations) == null ? void 0 : _a.skip)) {
|
|
3649
|
+
await client.migrate.latest({
|
|
3650
|
+
directory: migrationsDir
|
|
3651
|
+
});
|
|
3652
|
+
}
|
|
3653
|
+
}
|
|
3654
|
+
constructor(client) {
|
|
3655
|
+
this.db = client;
|
|
3603
3656
|
}
|
|
3604
3657
|
async list(options) {
|
|
3605
3658
|
const queryBuilder = this.db("tasks");
|
|
@@ -3698,7 +3751,8 @@ class DatabaseTaskStore {
|
|
|
3698
3751
|
throw new errors.ConflictError(`No running task with taskId ${taskId} found`);
|
|
3699
3752
|
}
|
|
3700
3753
|
}
|
|
3701
|
-
async listStaleTasks(
|
|
3754
|
+
async listStaleTasks(options) {
|
|
3755
|
+
const { timeoutS } = options;
|
|
3702
3756
|
const rawRows = await this.db("tasks").where("status", "processing").andWhere(
|
|
3703
3757
|
"last_heartbeat_at",
|
|
3704
3758
|
"<=",
|
|
@@ -3712,11 +3766,8 @@ class DatabaseTaskStore {
|
|
|
3712
3766
|
}));
|
|
3713
3767
|
return { tasks };
|
|
3714
3768
|
}
|
|
3715
|
-
async completeTask({
|
|
3716
|
-
taskId,
|
|
3717
|
-
status,
|
|
3718
|
-
eventBody
|
|
3719
|
-
}) {
|
|
3769
|
+
async completeTask(options) {
|
|
3770
|
+
const { taskId, status, eventBody } = options;
|
|
3720
3771
|
let oldStatus;
|
|
3721
3772
|
if (status === "failed" || status === "completed") {
|
|
3722
3773
|
oldStatus = "processing";
|
|
@@ -3764,10 +3815,8 @@ class DatabaseTaskStore {
|
|
|
3764
3815
|
body: serializedBody
|
|
3765
3816
|
});
|
|
3766
3817
|
}
|
|
3767
|
-
async listEvents({
|
|
3768
|
-
taskId,
|
|
3769
|
-
after
|
|
3770
|
-
}) {
|
|
3818
|
+
async listEvents(options) {
|
|
3819
|
+
const { taskId, after } = options;
|
|
3771
3820
|
const rawEvents = await this.db("task_events").where({
|
|
3772
3821
|
task_id: taskId
|
|
3773
3822
|
}).andWhere((builder) => {
|
|
@@ -4414,9 +4463,7 @@ async function createRouter(options) {
|
|
|
4414
4463
|
const integrations = integration.ScmIntegrations.fromConfig(config);
|
|
4415
4464
|
let taskBroker;
|
|
4416
4465
|
if (!options.taskBroker) {
|
|
4417
|
-
const databaseTaskStore = await DatabaseTaskStore.create({
|
|
4418
|
-
database: await database.getClient()
|
|
4419
|
-
});
|
|
4466
|
+
const databaseTaskStore = await DatabaseTaskStore.create({ database });
|
|
4420
4467
|
taskBroker = new StorageTaskBroker(databaseTaskStore, logger);
|
|
4421
4468
|
} else {
|
|
4422
4469
|
taskBroker = options.taskBroker;
|
|
@@ -4455,7 +4502,10 @@ async function createRouter(options) {
|
|
|
4455
4502
|
async (req, res) => {
|
|
4456
4503
|
var _a, _b;
|
|
4457
4504
|
const { namespace, kind, name } = req.params;
|
|
4458
|
-
const { token } = parseBearerToken(
|
|
4505
|
+
const { token } = parseBearerToken({
|
|
4506
|
+
header: req.headers.authorization,
|
|
4507
|
+
logger
|
|
4508
|
+
});
|
|
4459
4509
|
const template = await findTemplate({
|
|
4460
4510
|
catalogApi: catalogClient,
|
|
4461
4511
|
entityRef: { kind, namespace, name },
|
|
@@ -4496,9 +4546,10 @@ async function createRouter(options) {
|
|
|
4496
4546
|
const { kind, namespace, name } = catalogModel.parseEntityRef(templateRef, {
|
|
4497
4547
|
defaultKind: "template"
|
|
4498
4548
|
});
|
|
4499
|
-
const { token, entityRef: userEntityRef } = parseBearerToken(
|
|
4500
|
-
req.headers.authorization
|
|
4501
|
-
|
|
4549
|
+
const { token, entityRef: userEntityRef } = parseBearerToken({
|
|
4550
|
+
header: req.headers.authorization,
|
|
4551
|
+
logger
|
|
4552
|
+
});
|
|
4502
4553
|
const userEntity = userEntityRef ? await catalogClient.getEntityByRef(userEntityRef, { token }) : void 0;
|
|
4503
4554
|
let auditLog = `Scaffolding task for ${templateRef}`;
|
|
4504
4555
|
if (userEntityRef) {
|
|
@@ -4546,7 +4597,10 @@ async function createRouter(options) {
|
|
|
4546
4597
|
namespace,
|
|
4547
4598
|
name: (_c = template.metadata) == null ? void 0 : _c.name
|
|
4548
4599
|
}),
|
|
4549
|
-
baseUrl
|
|
4600
|
+
baseUrl,
|
|
4601
|
+
entity: {
|
|
4602
|
+
metadata: template.metadata
|
|
4603
|
+
}
|
|
4550
4604
|
}
|
|
4551
4605
|
};
|
|
4552
4606
|
const result = await taskBroker.dispatch({
|
|
@@ -4660,7 +4714,10 @@ data: ${JSON.stringify(event)}
|
|
|
4660
4714
|
if (!await pluginScaffolderCommon.templateEntityV1beta3Validator.check(template)) {
|
|
4661
4715
|
throw new errors.InputError("Input template is not a template");
|
|
4662
4716
|
}
|
|
4663
|
-
const { token } = parseBearerToken(
|
|
4717
|
+
const { token } = parseBearerToken({
|
|
4718
|
+
header: req.headers.authorization,
|
|
4719
|
+
logger
|
|
4720
|
+
});
|
|
4664
4721
|
for (const parameters of [(_a = template.spec.parameters) != null ? _a : []].flat()) {
|
|
4665
4722
|
const result2 = jsonschema.validate(body.values, parameters);
|
|
4666
4723
|
if (!result2.valid) {
|
|
@@ -4707,7 +4764,10 @@ data: ${JSON.stringify(event)}
|
|
|
4707
4764
|
app.use("/", router);
|
|
4708
4765
|
return app;
|
|
4709
4766
|
}
|
|
4710
|
-
function parseBearerToken(
|
|
4767
|
+
function parseBearerToken({
|
|
4768
|
+
header,
|
|
4769
|
+
logger
|
|
4770
|
+
}) {
|
|
4711
4771
|
var _a;
|
|
4712
4772
|
if (!header) {
|
|
4713
4773
|
return {};
|
|
@@ -4728,9 +4788,11 @@ function parseBearerToken(header) {
|
|
|
4728
4788
|
if (typeof sub !== "string") {
|
|
4729
4789
|
throw new TypeError("Expected string sub claim");
|
|
4730
4790
|
}
|
|
4791
|
+
catalogModel.parseEntityRef(sub);
|
|
4731
4792
|
return { entityRef: sub, token };
|
|
4732
4793
|
} catch (e) {
|
|
4733
|
-
|
|
4794
|
+
logger.error(`Invalid authorization header: ${errors.stringifyError(e)}`);
|
|
4795
|
+
return {};
|
|
4734
4796
|
}
|
|
4735
4797
|
}
|
|
4736
4798
|
|
|
@@ -4793,12 +4855,10 @@ const scaffolderCatalogModule = backendPluginApi.createBackendModule({
|
|
|
4793
4855
|
register(env) {
|
|
4794
4856
|
env.registerInit({
|
|
4795
4857
|
deps: {
|
|
4796
|
-
|
|
4858
|
+
catalog: pluginCatalogNode.catalogProcessingExtensionPoint
|
|
4797
4859
|
},
|
|
4798
|
-
async init({
|
|
4799
|
-
|
|
4800
|
-
new ScaffolderEntitiesProcessor()
|
|
4801
|
-
);
|
|
4860
|
+
async init({ catalog }) {
|
|
4861
|
+
catalog.addProcessor(new ScaffolderEntitiesProcessor());
|
|
4802
4862
|
}
|
|
4803
4863
|
});
|
|
4804
4864
|
}
|
|
@@ -4826,7 +4886,6 @@ exports.createPublishAzureAction = createPublishAzureAction;
|
|
|
4826
4886
|
exports.createPublishBitbucketAction = createPublishBitbucketAction;
|
|
4827
4887
|
exports.createPublishBitbucketCloudAction = createPublishBitbucketCloudAction;
|
|
4828
4888
|
exports.createPublishBitbucketServerAction = createPublishBitbucketServerAction;
|
|
4829
|
-
exports.createPublishFileAction = createPublishFileAction;
|
|
4830
4889
|
exports.createPublishGerritAction = createPublishGerritAction;
|
|
4831
4890
|
exports.createPublishGerritReviewAction = createPublishGerritReviewAction;
|
|
4832
4891
|
exports.createPublishGithubAction = createPublishGithubAction;
|