eas-cli 3.18.0 → 3.18.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +55 -55
- package/build/branch/actions/SelectBranch.d.ts +33 -0
- package/build/branch/actions/SelectBranch.js +67 -0
- package/build/channel/actions/SelectChannel.d.ts +16 -0
- package/build/channel/actions/SelectChannel.js +42 -24
- package/build/channel/branch-mapping.d.ts +17 -1
- package/build/channel/branch-mapping.js +48 -7
- package/build/channel/queries.d.ts +1 -6
- package/build/channel/queries.js +1 -25
- package/build/commands/channel/rollout-preview.d.ts +32 -0
- package/build/commands/channel/rollout-preview.js +109 -0
- package/build/commands/update/republish.js +23 -74
- package/build/graphql/generated.d.ts +192 -18
- package/build/graphql/generated.js +8 -2
- package/build/graphql/queries/BranchQuery.d.ts +3 -2
- package/build/graphql/queries/BranchQuery.js +43 -1
- package/build/graphql/queries/ChannelQuery.d.ts +3 -2
- package/build/graphql/queries/ChannelQuery.js +19 -5
- package/build/graphql/queries/RuntimeQuery.d.ts +6 -0
- package/build/graphql/queries/RuntimeQuery.js +70 -0
- package/build/graphql/types/Runtime.d.ts +1 -0
- package/build/graphql/types/Runtime.js +11 -0
- package/build/graphql/types/UpdateBranch.js +3 -1
- package/build/graphql/types/UpdateBranchBasicInfo.d.ts +1 -0
- package/build/graphql/types/UpdateBranchBasicInfo.js +11 -0
- package/build/rollout/actions/CreateRollout.d.ts +23 -0
- package/build/rollout/actions/CreateRollout.js +153 -0
- package/build/rollout/actions/EditRollout.d.ts +17 -0
- package/build/rollout/actions/EditRollout.js +79 -0
- package/build/rollout/actions/EndRollout.d.ts +24 -0
- package/build/rollout/actions/EndRollout.js +164 -0
- package/build/rollout/actions/ManageRollout.d.ts +24 -0
- package/build/rollout/actions/ManageRollout.js +78 -0
- package/build/rollout/actions/NonInteractiveRollout.d.ts +18 -0
- package/build/rollout/actions/NonInteractiveRollout.js +46 -0
- package/build/rollout/actions/RolloutMainMenu.d.ts +28 -0
- package/build/rollout/actions/RolloutMainMenu.js +110 -0
- package/build/rollout/actions/SelectRollout.js +18 -8
- package/build/rollout/actions/SelectRuntime.d.ts +36 -0
- package/build/rollout/actions/SelectRuntime.js +167 -0
- package/build/rollout/branch-mapping.d.ts +52 -4
- package/build/rollout/branch-mapping.js +68 -19
- package/build/rollout/utils.d.ts +9 -2
- package/build/rollout/utils.js +66 -21
- package/build/update/configure.d.ts +6 -1
- package/build/update/configure.js +18 -8
- package/build/update/republish.d.ts +26 -0
- package/build/update/republish.js +83 -0
- package/build/utils/relay.d.ts +80 -15
- package/build/utils/relay.js +211 -28
- package/oclif.manifest.json +1 -1
- package/package.json +2 -2
package/build/rollout/utils.js
CHANGED
|
@@ -1,36 +1,24 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.promptForRolloutPercentAsync = exports.formatRuntimeWithUpdateGroup = exports.formatBranchWithUpdateGroup = exports.displayRolloutDetails = exports.printRollout = void 0;
|
|
4
4
|
const tslib_1 = require("tslib");
|
|
5
5
|
const chalk_1 = tslib_1.__importDefault(require("chalk"));
|
|
6
6
|
const log_1 = tslib_1.__importDefault(require("../log"));
|
|
7
|
+
const prompts_1 = require("../prompts");
|
|
8
|
+
const utils_1 = require("../update/utils");
|
|
7
9
|
const formatFields_1 = tslib_1.__importDefault(require("../utils/formatFields"));
|
|
8
10
|
const branch_mapping_1 = require("./branch-mapping");
|
|
9
11
|
function printRollout(channel) {
|
|
10
12
|
const rollout = (0, branch_mapping_1.getRollout)(channel);
|
|
11
|
-
|
|
12
|
-
log_1.default.addNewLineIfNone();
|
|
13
|
-
log_1.default.log(chalk_1.default.bold('Rollout:'));
|
|
14
|
-
log_1.default.log((0, formatFields_1.default)([
|
|
15
|
-
{ label: 'Channel', value: channel.name },
|
|
16
|
-
...((0, branch_mapping_1.isConstrainedRollout)(rollout)
|
|
17
|
-
? [{ label: 'Runtime Version', value: rollout.runtimeVersion }]
|
|
18
|
-
: []),
|
|
19
|
-
{
|
|
20
|
-
label: 'Branches',
|
|
21
|
-
value: `${rollout.rolledOutBranch.name} (${rolledOutPercent}%), ${rollout.defaultBranch.name} (${100 - rolledOutPercent}%)`,
|
|
22
|
-
},
|
|
23
|
-
]));
|
|
24
|
-
log_1.default.addNewLineIfNone();
|
|
13
|
+
displayRolloutDetails(channel.name, rollout);
|
|
25
14
|
}
|
|
26
15
|
exports.printRollout = printRollout;
|
|
27
|
-
function
|
|
28
|
-
const rollout = (0, branch_mapping_1.getRollout)(channel);
|
|
16
|
+
function displayRolloutDetails(channelName, rollout) {
|
|
29
17
|
const rolledOutPercent = rollout.percentRolledOut;
|
|
30
|
-
log_1.default.
|
|
31
|
-
log_1.default.log(chalk_1.default.bold('Rollout:'));
|
|
18
|
+
log_1.default.newLine();
|
|
19
|
+
log_1.default.log(chalk_1.default.bold('🚀 Rollout:'));
|
|
32
20
|
log_1.default.log((0, formatFields_1.default)([
|
|
33
|
-
{ label: 'Channel', value:
|
|
21
|
+
{ label: 'Channel', value: channelName },
|
|
34
22
|
...((0, branch_mapping_1.isConstrainedRollout)(rollout)
|
|
35
23
|
? [{ label: 'Runtime Version', value: rollout.runtimeVersion }]
|
|
36
24
|
: []),
|
|
@@ -41,4 +29,61 @@ function printBranch(channel) {
|
|
|
41
29
|
]));
|
|
42
30
|
log_1.default.addNewLineIfNone();
|
|
43
31
|
}
|
|
44
|
-
exports.
|
|
32
|
+
exports.displayRolloutDetails = displayRolloutDetails;
|
|
33
|
+
function formatBranchWithUpdateGroup(maybeUpdateGroup, branch, percentRolledOut) {
|
|
34
|
+
const lines = [];
|
|
35
|
+
lines.push(chalk_1.default.bold(`🍽️ Served by branch ${chalk_1.default.bold(branch.name)} (${percentRolledOut}%)`));
|
|
36
|
+
if (!maybeUpdateGroup) {
|
|
37
|
+
lines.push(`No updates for target runtime`);
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
const [updateGroupDescription] = (0, utils_1.getUpdateGroupDescriptions)([maybeUpdateGroup]);
|
|
41
|
+
lines.push(...formatUpdateGroup(updateGroupDescription));
|
|
42
|
+
}
|
|
43
|
+
return lines.join('\n ');
|
|
44
|
+
}
|
|
45
|
+
exports.formatBranchWithUpdateGroup = formatBranchWithUpdateGroup;
|
|
46
|
+
function formatRuntimeWithUpdateGroup(maybeUpdateGroup, runtime) {
|
|
47
|
+
const lines = [];
|
|
48
|
+
lines.push(chalk_1.default.bold(`🍽️ Served by runtime ${chalk_1.default.bold(runtime.version)}:`));
|
|
49
|
+
if (!maybeUpdateGroup) {
|
|
50
|
+
lines.push(`No updates published for this runtime`);
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
const [updateGroupDescription] = (0, utils_1.getUpdateGroupDescriptions)([maybeUpdateGroup]);
|
|
54
|
+
lines.push(...formatUpdateGroup(updateGroupDescription));
|
|
55
|
+
}
|
|
56
|
+
return lines.join('\n ');
|
|
57
|
+
}
|
|
58
|
+
exports.formatRuntimeWithUpdateGroup = formatRuntimeWithUpdateGroup;
|
|
59
|
+
function formatUpdateGroup(updateGroup) {
|
|
60
|
+
var _a, _b, _c, _d;
|
|
61
|
+
const lines = [];
|
|
62
|
+
const formattedLines = (0, formatFields_1.default)([
|
|
63
|
+
{ label: 'Message', value: (_a = updateGroup.message) !== null && _a !== void 0 ? _a : 'N/A' },
|
|
64
|
+
{ label: 'Runtime Version', value: (_b = updateGroup.runtimeVersion) !== null && _b !== void 0 ? _b : 'N/A' },
|
|
65
|
+
{ label: 'Platforms', value: (_c = updateGroup.platforms) !== null && _c !== void 0 ? _c : 'N/A' },
|
|
66
|
+
{ label: 'Group ID', value: (_d = updateGroup.group) !== null && _d !== void 0 ? _d : 'N/A' },
|
|
67
|
+
]).split('\n');
|
|
68
|
+
lines.push(...formattedLines);
|
|
69
|
+
return lines;
|
|
70
|
+
}
|
|
71
|
+
async function promptForRolloutPercentAsync({ promptMessage, }) {
|
|
72
|
+
const { name: rolloutPercent } = await (0, prompts_1.promptAsync)({
|
|
73
|
+
type: 'text',
|
|
74
|
+
name: 'name',
|
|
75
|
+
format: value => {
|
|
76
|
+
return parseInt(value, 10);
|
|
77
|
+
},
|
|
78
|
+
message: promptMessage,
|
|
79
|
+
initial: 0,
|
|
80
|
+
validate: (rolloutPercent) => {
|
|
81
|
+
const floatValue = parseFloat(rolloutPercent);
|
|
82
|
+
return Number.isInteger(floatValue) && floatValue >= 0 && floatValue <= 100
|
|
83
|
+
? true
|
|
84
|
+
: 'The rollout percentage must be an integer between 0 and 100 inclusive.';
|
|
85
|
+
},
|
|
86
|
+
});
|
|
87
|
+
return rolloutPercent;
|
|
88
|
+
}
|
|
89
|
+
exports.promptForRolloutPercentAsync = promptForRolloutPercentAsync;
|
|
@@ -1,10 +1,15 @@
|
|
|
1
1
|
import { ExpoConfig } from '@expo/config-types';
|
|
2
|
+
import { Workflow } from '@expo/eas-build-job';
|
|
2
3
|
import { ExpoGraphqlClient } from '../commandUtils/context/contextUtils/createGraphqlClient';
|
|
3
4
|
import { RequestedPlatform } from '../platform';
|
|
4
|
-
export declare const
|
|
5
|
+
export declare const DEFAULT_MANAGED_RUNTIME_VERSION_GTE_SDK_49: {
|
|
6
|
+
readonly policy: "appVersion";
|
|
7
|
+
};
|
|
8
|
+
export declare const DEFAULT_MANAGED_RUNTIME_VERSION_LTE_SDK_48: {
|
|
5
9
|
readonly policy: "sdkVersion";
|
|
6
10
|
};
|
|
7
11
|
export declare const DEFAULT_BARE_RUNTIME_VERSION: "1.0.0";
|
|
12
|
+
export declare function getDefaultRuntimeVersion(workflow: Workflow, sdkVersion: string | undefined): NonNullable<ExpoConfig['runtimeVersion']>;
|
|
8
13
|
/**
|
|
9
14
|
* Make sure EAS Build profiles are configured to work with EAS Update by adding channels to build profiles.
|
|
10
15
|
*/
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.ensureUseClassicUpdatesIsRemovedAsync = exports.ensureEASUpdateIsConfiguredAsync = exports.ensureEASUpdateIsConfiguredInEasJsonAsync = exports.DEFAULT_BARE_RUNTIME_VERSION = exports.
|
|
3
|
+
exports.ensureUseClassicUpdatesIsRemovedAsync = exports.ensureEASUpdateIsConfiguredAsync = exports.ensureEASUpdateIsConfiguredInEasJsonAsync = exports.getDefaultRuntimeVersion = exports.DEFAULT_BARE_RUNTIME_VERSION = exports.DEFAULT_MANAGED_RUNTIME_VERSION_LTE_SDK_48 = exports.DEFAULT_MANAGED_RUNTIME_VERSION_GTE_SDK_49 = void 0;
|
|
4
4
|
const tslib_1 = require("tslib");
|
|
5
5
|
const eas_build_job_1 = require("@expo/eas-build-job");
|
|
6
6
|
const eas_json_1 = require("@expo/eas-json");
|
|
7
7
|
const chalk_1 = tslib_1.__importDefault(require("chalk"));
|
|
8
8
|
const fs_extra_1 = tslib_1.__importDefault(require("fs-extra"));
|
|
9
|
+
const semver_1 = tslib_1.__importDefault(require("semver"));
|
|
9
10
|
const api_1 = require("../api");
|
|
10
11
|
const generated_1 = require("../graphql/generated");
|
|
11
12
|
const log_1 = tslib_1.__importStar(require("../log"));
|
|
@@ -16,13 +17,20 @@ const workflow_1 = require("../project/workflow");
|
|
|
16
17
|
const prompts_1 = require("../prompts");
|
|
17
18
|
const UpdatesModule_1 = require("./android/UpdatesModule");
|
|
18
19
|
const UpdatesModule_2 = require("./ios/UpdatesModule");
|
|
19
|
-
exports.
|
|
20
|
+
exports.DEFAULT_MANAGED_RUNTIME_VERSION_GTE_SDK_49 = { policy: 'appVersion' };
|
|
21
|
+
exports.DEFAULT_MANAGED_RUNTIME_VERSION_LTE_SDK_48 = { policy: 'sdkVersion' };
|
|
20
22
|
exports.DEFAULT_BARE_RUNTIME_VERSION = '1.0.0';
|
|
21
|
-
function getDefaultRuntimeVersion(workflow) {
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
23
|
+
function getDefaultRuntimeVersion(workflow, sdkVersion) {
|
|
24
|
+
if (workflow === eas_build_job_1.Workflow.GENERIC) {
|
|
25
|
+
return exports.DEFAULT_BARE_RUNTIME_VERSION;
|
|
26
|
+
}
|
|
27
|
+
// Expo Go supports loading appVersion SDK 49 and above
|
|
28
|
+
const hasSupportedSdk = sdkVersion && semver_1.default.satisfies(sdkVersion, '>= 49.0.0');
|
|
29
|
+
return hasSupportedSdk
|
|
30
|
+
? exports.DEFAULT_MANAGED_RUNTIME_VERSION_GTE_SDK_49
|
|
31
|
+
: exports.DEFAULT_MANAGED_RUNTIME_VERSION_LTE_SDK_48;
|
|
25
32
|
}
|
|
33
|
+
exports.getDefaultRuntimeVersion = getDefaultRuntimeVersion;
|
|
26
34
|
function isRuntimeEqual(runtimeVersionA, runtimeVersionB) {
|
|
27
35
|
if (typeof runtimeVersionA === 'string' && typeof runtimeVersionB === 'string') {
|
|
28
36
|
return runtimeVersionA === runtimeVersionB;
|
|
@@ -81,8 +89,10 @@ async function ensureEASUpdatesIsConfiguredInExpoConfigAsync({ exp, projectId, p
|
|
|
81
89
|
let iosRuntimeVersion = (_e = (_d = exp.ios) === null || _d === void 0 ? void 0 : _d.runtimeVersion) !== null && _e !== void 0 ? _e : exp.runtimeVersion;
|
|
82
90
|
if ((['all', 'android'].includes(platform) && !androidRuntimeVersion) ||
|
|
83
91
|
(['all', 'ios'].includes(platform) && !iosRuntimeVersion)) {
|
|
84
|
-
androidRuntimeVersion =
|
|
85
|
-
|
|
92
|
+
androidRuntimeVersion =
|
|
93
|
+
androidRuntimeVersion !== null && androidRuntimeVersion !== void 0 ? androidRuntimeVersion : getDefaultRuntimeVersion(workflows.android, exp.sdkVersion);
|
|
94
|
+
iosRuntimeVersion =
|
|
95
|
+
iosRuntimeVersion !== null && iosRuntimeVersion !== void 0 ? iosRuntimeVersion : getDefaultRuntimeVersion(workflows.ios, exp.sdkVersion);
|
|
86
96
|
if (platform === 'all' && isRuntimeEqual(androidRuntimeVersion, iosRuntimeVersion)) {
|
|
87
97
|
modifyConfig.runtimeVersion = androidRuntimeVersion;
|
|
88
98
|
}
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { ExpoConfig } from '@expo/config';
|
|
2
|
+
import { ExpoGraphqlClient } from '../commandUtils/context/contextUtils/createGraphqlClient';
|
|
3
|
+
import { Update } from '../graphql/generated';
|
|
4
|
+
export type UpdateToRepublish = {
|
|
5
|
+
groupId: string;
|
|
6
|
+
branchId: string;
|
|
7
|
+
branchName: string;
|
|
8
|
+
} & Pick<Update, 'message' | 'runtimeVersion' | 'manifestFragment' | 'platform' | 'gitCommitHash' | 'codeSigningInfo'>;
|
|
9
|
+
/**
|
|
10
|
+
* @param updatesToPublish The update group to republish
|
|
11
|
+
* @param targetBranch The branch to repubish the update group on
|
|
12
|
+
*/
|
|
13
|
+
export declare function republishAsync({ graphqlClient, app, updatesToPublish, targetBranch, updateMessage, json, }: {
|
|
14
|
+
graphqlClient: ExpoGraphqlClient;
|
|
15
|
+
app: {
|
|
16
|
+
exp: ExpoConfig;
|
|
17
|
+
projectId: string;
|
|
18
|
+
};
|
|
19
|
+
updatesToPublish: UpdateToRepublish[];
|
|
20
|
+
targetBranch: {
|
|
21
|
+
branchName: string;
|
|
22
|
+
branchId: string;
|
|
23
|
+
};
|
|
24
|
+
updateMessage: string;
|
|
25
|
+
json?: boolean;
|
|
26
|
+
}): Promise<void>;
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.republishAsync = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const assert_1 = tslib_1.__importDefault(require("assert"));
|
|
6
|
+
const url_1 = require("../build/utils/url");
|
|
7
|
+
const PublishMutation_1 = require("../graphql/mutations/PublishMutation");
|
|
8
|
+
const log_1 = tslib_1.__importStar(require("../log"));
|
|
9
|
+
const ora_1 = require("../ora");
|
|
10
|
+
const projectUtils_1 = require("../project/projectUtils");
|
|
11
|
+
const formatFields_1 = tslib_1.__importDefault(require("../utils/formatFields"));
|
|
12
|
+
const json_1 = require("../utils/json");
|
|
13
|
+
/**
|
|
14
|
+
* @param updatesToPublish The update group to republish
|
|
15
|
+
* @param targetBranch The branch to repubish the update group on
|
|
16
|
+
*/
|
|
17
|
+
async function republishAsync({ graphqlClient, app, updatesToPublish, targetBranch, updateMessage, json, }) {
|
|
18
|
+
const { branchName: targetBranchName, branchId: targetBranchId } = targetBranch;
|
|
19
|
+
// The update group properties are the same for all updates
|
|
20
|
+
(0, assert_1.default)(updatesToPublish.length > 0, 'Updates to republish must be provided');
|
|
21
|
+
const arbitraryUpdate = updatesToPublish[0];
|
|
22
|
+
const isSameGroup = (update) => update.groupId === arbitraryUpdate.groupId &&
|
|
23
|
+
update.branchId === arbitraryUpdate.branchId &&
|
|
24
|
+
update.branchName === arbitraryUpdate.branchName &&
|
|
25
|
+
update.runtimeVersion === arbitraryUpdate.runtimeVersion;
|
|
26
|
+
(0, assert_1.default)(updatesToPublish.every(isSameGroup), 'All updates must belong to the same update group');
|
|
27
|
+
const { runtimeVersion } = arbitraryUpdate;
|
|
28
|
+
// If codesigning was created for the original update, we need to add it to the republish
|
|
29
|
+
const shouldRepublishWithCodesigning = updatesToPublish.some(update => update.codeSigningInfo);
|
|
30
|
+
if (shouldRepublishWithCodesigning) {
|
|
31
|
+
log_1.default.withTick(`The republished update will be signed with the same codesigning as the original update.`);
|
|
32
|
+
}
|
|
33
|
+
const publishIndicator = (0, ora_1.ora)('Republishing...').start();
|
|
34
|
+
let updatesRepublished;
|
|
35
|
+
try {
|
|
36
|
+
updatesRepublished = await PublishMutation_1.PublishMutation.publishUpdateGroupAsync(graphqlClient, [
|
|
37
|
+
{
|
|
38
|
+
branchId: targetBranchId,
|
|
39
|
+
runtimeVersion,
|
|
40
|
+
message: updateMessage,
|
|
41
|
+
updateInfoGroup: Object.fromEntries(updatesToPublish.map(update => [update.platform, JSON.parse(update.manifestFragment)])),
|
|
42
|
+
gitCommitHash: updatesToPublish[0].gitCommitHash,
|
|
43
|
+
awaitingCodeSigningInfo: shouldRepublishWithCodesigning,
|
|
44
|
+
},
|
|
45
|
+
]);
|
|
46
|
+
if (shouldRepublishWithCodesigning) {
|
|
47
|
+
const codeSigningByPlatform = Object.fromEntries(updatesToPublish.map(update => [update.platform, update.codeSigningInfo]));
|
|
48
|
+
await Promise.all(updatesRepublished.map(async (update) => {
|
|
49
|
+
const codeSigning = codeSigningByPlatform[update.platform];
|
|
50
|
+
if (codeSigning) {
|
|
51
|
+
await PublishMutation_1.PublishMutation.setCodeSigningInfoAsync(graphqlClient, update.id, codeSigning);
|
|
52
|
+
}
|
|
53
|
+
}));
|
|
54
|
+
}
|
|
55
|
+
publishIndicator.succeed('Republished update');
|
|
56
|
+
}
|
|
57
|
+
catch (error) {
|
|
58
|
+
publishIndicator.fail('Failed to republish update');
|
|
59
|
+
throw error;
|
|
60
|
+
}
|
|
61
|
+
if (json) {
|
|
62
|
+
return (0, json_1.printJsonOnlyOutput)(updatesRepublished);
|
|
63
|
+
}
|
|
64
|
+
const updatesRepublishedByPlatform = Object.fromEntries(updatesRepublished.map(update => [update.platform, update]));
|
|
65
|
+
const arbitraryRepublishedUpdate = updatesRepublished[0];
|
|
66
|
+
const updateGroupUrl = (0, url_1.getUpdateGroupUrl)((await (0, projectUtils_1.getOwnerAccountForProjectIdAsync)(graphqlClient, app.projectId)).name, app.exp.slug, arbitraryRepublishedUpdate.group);
|
|
67
|
+
log_1.default.addNewLineIfNone();
|
|
68
|
+
log_1.default.log((0, formatFields_1.default)([
|
|
69
|
+
{ label: 'Branch', value: targetBranchName },
|
|
70
|
+
{ label: 'Runtime version', value: arbitraryRepublishedUpdate.runtimeVersion },
|
|
71
|
+
{ label: 'Platform', value: updatesRepublished.map(update => update.platform).join(', ') },
|
|
72
|
+
{ label: 'Update Group ID', value: arbitraryRepublishedUpdate.group },
|
|
73
|
+
...(updatesRepublishedByPlatform.android
|
|
74
|
+
? [{ label: 'Android update ID', value: updatesRepublishedByPlatform.android.id }]
|
|
75
|
+
: []),
|
|
76
|
+
...(updatesRepublishedByPlatform.ios
|
|
77
|
+
? [{ label: 'iOS update ID', value: updatesRepublishedByPlatform.ios.id }]
|
|
78
|
+
: []),
|
|
79
|
+
{ label: 'Message', value: updateMessage },
|
|
80
|
+
{ label: 'Website link', value: (0, log_1.link)(updateGroupUrl, { dim: false }) },
|
|
81
|
+
]));
|
|
82
|
+
}
|
|
83
|
+
exports.republishAsync = republishAsync;
|
package/build/utils/relay.d.ts
CHANGED
|
@@ -3,31 +3,96 @@ export type Connection<T> = {
|
|
|
3
3
|
edges: Edge<T>[];
|
|
4
4
|
pageInfo: PageInfo;
|
|
5
5
|
};
|
|
6
|
-
type Edge<T> = {
|
|
6
|
+
export type Edge<T> = {
|
|
7
|
+
cursor: string;
|
|
7
8
|
node: T;
|
|
8
9
|
};
|
|
9
10
|
export type QueryParams = {
|
|
10
|
-
first
|
|
11
|
+
first?: number;
|
|
11
12
|
after?: string;
|
|
13
|
+
last?: number;
|
|
14
|
+
before?: string;
|
|
12
15
|
};
|
|
13
16
|
/**
|
|
14
|
-
* Fetches dataset in paginated manner (batch by batch) using GraphQL queries.
|
|
15
17
|
*
|
|
18
|
+
* Pagination that performs client side filtering on the nodes returned from a relay compliant datasource.
|
|
19
|
+
*
|
|
20
|
+
* @param queryParams The query params for the pagination.
|
|
16
21
|
* @param queryAsync A promise based function for querying.
|
|
22
|
+
* @param filterPredicate A predicate function to filter the node.
|
|
23
|
+
* @param beforeEachQuery Optional. A callback function to be called before each query
|
|
24
|
+
* @param afterEachQuery Optional. A callback function to be called after each query.
|
|
25
|
+
* @param internalBatchSize Optional. The batch size of queryAsync. Defaults to 100.
|
|
26
|
+
* @param maxNodesFetched Optional. The maximum number of nodes to fetch. Defaults to 10_000.
|
|
17
27
|
* @param beforeEachQuery Optional. A callback function to be called before each query
|
|
28
|
+
* @args externalQueryParams The query params for the pagination.
|
|
29
|
+
* @args totalNodesFetched The total number of nodes fetched so far.
|
|
30
|
+
* @args dataset The dataset so far.
|
|
18
31
|
* @param afterEachQuery Optional. A callback function to be called after each query.
|
|
19
|
-
* @
|
|
20
|
-
* @
|
|
32
|
+
* @args externalQueryParams The query params for the pagination.
|
|
33
|
+
* @args totalNodesFetched The total number of nodes fetched so far.
|
|
34
|
+
* @args dataset The dataset so far.
|
|
35
|
+
* @args willFetchAgain If the query will fetch again to get a complete page.
|
|
21
36
|
*
|
|
22
|
-
* @return {Promise<T[]>} - A promise that resolves to an array (the dataset).
|
|
23
37
|
* @throws {Error} - If an error occurs during execution of the query or pagination.
|
|
24
38
|
*/
|
|
25
|
-
export declare
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
39
|
+
export declare class FilterPagination {
|
|
40
|
+
static getPageAsync<T>({ queryParams, queryAsync, filterPredicate, internalBatchSize, maxNodesFetched, beforeEachQuery, afterEachQuery, }: {
|
|
41
|
+
queryParams: QueryParams;
|
|
42
|
+
queryAsync: (queryParams: QueryParams) => Promise<Connection<T>>;
|
|
43
|
+
filterPredicate: (node: T) => boolean;
|
|
44
|
+
internalBatchSize?: number;
|
|
45
|
+
maxNodesFetched?: number;
|
|
46
|
+
beforeEachQuery?: (externalQueryParams: QueryParams, totalNodesFetched: number, dataset: Edge<T>[]) => void;
|
|
47
|
+
afterEachQuery?: (externalQueryParams: QueryParams, totalNodesFetched: number, dataset: Edge<T>[], willFetchAgain: boolean) => void;
|
|
48
|
+
}): Promise<Connection<T>>;
|
|
49
|
+
static isFirstAfter(connectionArgs: QueryParams): connectionArgs is {
|
|
50
|
+
first: number;
|
|
51
|
+
after?: string;
|
|
52
|
+
};
|
|
53
|
+
static isLastBefore(connectionArgs: {
|
|
54
|
+
first?: number;
|
|
55
|
+
after?: string;
|
|
56
|
+
last?: number;
|
|
57
|
+
before?: string;
|
|
58
|
+
}): connectionArgs is {
|
|
59
|
+
last: number;
|
|
60
|
+
before?: string;
|
|
61
|
+
};
|
|
62
|
+
static getFirstItemsAsync<T>({ first, after }: {
|
|
63
|
+
first: number;
|
|
64
|
+
after?: string;
|
|
65
|
+
}, { internalBatchSize, maxNodesFetched, filterPredicate, queryAsync, beforeEachQuery, afterEachQuery, }: {
|
|
66
|
+
internalBatchSize?: number;
|
|
67
|
+
maxNodesFetched: number;
|
|
68
|
+
filterPredicate: (node: T) => boolean;
|
|
69
|
+
queryAsync: (queryParams: QueryParams) => Promise<Connection<T>>;
|
|
70
|
+
beforeEachQuery?: (externalQueryParams: QueryParams, totalNodesFetched: number, dataset: Edge<T>[]) => void;
|
|
71
|
+
afterEachQuery?: (externalQueryParams: QueryParams, totalNodesFetched: number, dataset: Edge<T>[], willFetchAgain: boolean) => void;
|
|
72
|
+
}): Promise<Connection<T>>;
|
|
73
|
+
static getLastItemsAsync<T>({ last, before }: {
|
|
74
|
+
last: number;
|
|
75
|
+
before?: string;
|
|
76
|
+
}, { internalBatchSize, maxNodesFetched, filterPredicate, queryAsync, beforeEachQuery, afterEachQuery, }: {
|
|
77
|
+
internalBatchSize?: number;
|
|
78
|
+
maxNodesFetched: number;
|
|
79
|
+
filterPredicate: (node: T) => boolean;
|
|
80
|
+
queryAsync: (queryParams: QueryParams) => Promise<Connection<T>>;
|
|
81
|
+
beforeEachQuery?: (externalQueryParams: QueryParams, totalNodesFetched: number, dataset: Edge<T>[]) => void;
|
|
82
|
+
afterEachQuery?: (externalQueryParams: QueryParams, totalNodesFetched: number, dataset: Edge<T>[], willFetchAgain: boolean) => void;
|
|
83
|
+
}): Promise<Connection<T>>;
|
|
84
|
+
}
|
|
85
|
+
export declare function selectPaginatedAsync<T>({ queryAsync, getTitleAsync, printedType, pageSize, }: {
|
|
86
|
+
pageSize: number;
|
|
87
|
+
queryAsync: (queryParams: QueryParams) => Promise<Connection<T>>;
|
|
88
|
+
getTitleAsync: (node: T) => Promise<string>;
|
|
89
|
+
printedType: string;
|
|
90
|
+
}): Promise<T | null>;
|
|
91
|
+
export declare const PREV_PAGE_OPTION: {
|
|
92
|
+
value: symbol;
|
|
93
|
+
title: string;
|
|
94
|
+
};
|
|
95
|
+
export declare const NEXT_PAGE_OPTION: {
|
|
96
|
+
value: symbol;
|
|
97
|
+
title: string;
|
|
98
|
+
};
|
package/build/utils/relay.js
CHANGED
|
@@ -1,43 +1,226 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.
|
|
3
|
+
exports.NEXT_PAGE_OPTION = exports.PREV_PAGE_OPTION = exports.selectPaginatedAsync = exports.FilterPagination = void 0;
|
|
4
|
+
const tslib_1 = require("tslib");
|
|
5
|
+
const assert_1 = tslib_1.__importDefault(require("assert"));
|
|
6
|
+
const prompts_1 = require("../prompts");
|
|
4
7
|
/**
|
|
5
|
-
* Fetches dataset in paginated manner (batch by batch) using GraphQL queries.
|
|
6
8
|
*
|
|
9
|
+
* Pagination that performs client side filtering on the nodes returned from a relay compliant datasource.
|
|
10
|
+
*
|
|
11
|
+
* @param queryParams The query params for the pagination.
|
|
7
12
|
* @param queryAsync A promise based function for querying.
|
|
13
|
+
* @param filterPredicate A predicate function to filter the node.
|
|
14
|
+
* @param beforeEachQuery Optional. A callback function to be called before each query
|
|
15
|
+
* @param afterEachQuery Optional. A callback function to be called after each query.
|
|
16
|
+
* @param internalBatchSize Optional. The batch size of queryAsync. Defaults to 100.
|
|
17
|
+
* @param maxNodesFetched Optional. The maximum number of nodes to fetch. Defaults to 10_000.
|
|
8
18
|
* @param beforeEachQuery Optional. A callback function to be called before each query
|
|
19
|
+
* @args externalQueryParams The query params for the pagination.
|
|
20
|
+
* @args totalNodesFetched The total number of nodes fetched so far.
|
|
21
|
+
* @args dataset The dataset so far.
|
|
9
22
|
* @param afterEachQuery Optional. A callback function to be called after each query.
|
|
10
|
-
* @
|
|
11
|
-
* @
|
|
23
|
+
* @args externalQueryParams The query params for the pagination.
|
|
24
|
+
* @args totalNodesFetched The total number of nodes fetched so far.
|
|
25
|
+
* @args dataset The dataset so far.
|
|
26
|
+
* @args willFetchAgain If the query will fetch again to get a complete page.
|
|
12
27
|
*
|
|
13
|
-
* @return {Promise<T[]>} - A promise that resolves to an array (the dataset).
|
|
14
28
|
* @throws {Error} - If an error occurs during execution of the query or pagination.
|
|
15
29
|
*/
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
30
|
+
class FilterPagination {
|
|
31
|
+
static async getPageAsync({ queryParams, queryAsync, filterPredicate, internalBatchSize = 100, maxNodesFetched = 10000, beforeEachQuery, afterEachQuery, }) {
|
|
32
|
+
if (this.isFirstAfter(queryParams)) {
|
|
33
|
+
return await this.getFirstItemsAsync(queryParams, {
|
|
34
|
+
queryAsync,
|
|
35
|
+
filterPredicate,
|
|
36
|
+
internalBatchSize,
|
|
37
|
+
maxNodesFetched,
|
|
38
|
+
beforeEachQuery,
|
|
39
|
+
afterEachQuery,
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
else if (this.isLastBefore(queryParams)) {
|
|
43
|
+
return await this.getLastItemsAsync(queryParams, {
|
|
44
|
+
queryAsync,
|
|
45
|
+
filterPredicate,
|
|
46
|
+
internalBatchSize,
|
|
47
|
+
maxNodesFetched,
|
|
48
|
+
beforeEachQuery,
|
|
49
|
+
afterEachQuery,
|
|
50
|
+
});
|
|
25
51
|
}
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
52
|
+
throw new Error('Invalid query params');
|
|
53
|
+
}
|
|
54
|
+
static isFirstAfter(connectionArgs) {
|
|
55
|
+
return 'first' in connectionArgs;
|
|
56
|
+
}
|
|
57
|
+
static isLastBefore(connectionArgs) {
|
|
58
|
+
return 'last' in connectionArgs;
|
|
59
|
+
}
|
|
60
|
+
static async getFirstItemsAsync({ first, after }, { internalBatchSize, maxNodesFetched, filterPredicate, queryAsync, beforeEachQuery, afterEachQuery, }) {
|
|
61
|
+
var _a, _b, _c, _d, _e;
|
|
62
|
+
const limit = first + 1;
|
|
63
|
+
const dataset = [];
|
|
64
|
+
let hasMore = true;
|
|
65
|
+
let afterInternal = after;
|
|
66
|
+
let totalNodesFetched = 0;
|
|
67
|
+
while (hasMore && dataset.length < limit) {
|
|
68
|
+
if (beforeEachQuery) {
|
|
69
|
+
beforeEachQuery({ first, after }, totalNodesFetched, dataset);
|
|
70
|
+
}
|
|
71
|
+
const result = await queryAsync({ first: internalBatchSize, after: afterInternal });
|
|
72
|
+
const { edges: batchEdges, pageInfo } = result;
|
|
73
|
+
const batch = batchEdges.filter(edge => filterPredicate(edge.node));
|
|
74
|
+
const nodesRemaining = limit - dataset.length;
|
|
75
|
+
dataset.push(...batch.slice(0, nodesRemaining));
|
|
76
|
+
hasMore = pageInfo.hasNextPage;
|
|
77
|
+
afterInternal = (_a = pageInfo.endCursor) !== null && _a !== void 0 ? _a : undefined;
|
|
78
|
+
totalNodesFetched += batchEdges.length;
|
|
79
|
+
if (afterEachQuery) {
|
|
80
|
+
afterEachQuery({ first, after }, totalNodesFetched, dataset, hasMore && dataset.length < limit);
|
|
81
|
+
}
|
|
82
|
+
if (totalNodesFetched >= maxNodesFetched) {
|
|
83
|
+
throw new Error(`Max nodes of ${maxNodesFetched} fetched`);
|
|
84
|
+
}
|
|
36
85
|
}
|
|
37
|
-
|
|
38
|
-
|
|
86
|
+
const edges = dataset.slice(0, first);
|
|
87
|
+
return {
|
|
88
|
+
edges,
|
|
89
|
+
pageInfo: {
|
|
90
|
+
hasNextPage: dataset.length > first,
|
|
91
|
+
hasPreviousPage: false,
|
|
92
|
+
startCursor: (_c = (_b = edges[0]) === null || _b === void 0 ? void 0 : _b.cursor) !== null && _c !== void 0 ? _c : null,
|
|
93
|
+
endCursor: (_e = (_d = edges[edges.length - 1]) === null || _d === void 0 ? void 0 : _d.cursor) !== null && _e !== void 0 ? _e : null,
|
|
94
|
+
},
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
static async getLastItemsAsync({ last, before }, { internalBatchSize, maxNodesFetched, filterPredicate, queryAsync, beforeEachQuery, afterEachQuery, }) {
|
|
98
|
+
var _a, _b, _c, _d, _e;
|
|
99
|
+
const limit = last + 1;
|
|
100
|
+
const dataset = [];
|
|
101
|
+
let hasMore = true;
|
|
102
|
+
let beforeInternal = before;
|
|
103
|
+
let totalNodesFetched = 0;
|
|
104
|
+
while (hasMore && dataset.length < limit) {
|
|
105
|
+
if (beforeEachQuery) {
|
|
106
|
+
beforeEachQuery({ last, before }, totalNodesFetched, dataset);
|
|
107
|
+
}
|
|
108
|
+
const result = await queryAsync({ last: internalBatchSize, before: beforeInternal });
|
|
109
|
+
const { edges: batchEdges, pageInfo } = result;
|
|
110
|
+
const batch = batchEdges.filter(edge => filterPredicate(edge.node));
|
|
111
|
+
const nodesRemaining = limit - dataset.length;
|
|
112
|
+
// relay orders pages from first to last, so we reverse the batch to to choose the last n
|
|
113
|
+
const nodesChosen = batch.reverse().slice(0, nodesRemaining);
|
|
114
|
+
dataset.push(...nodesChosen);
|
|
115
|
+
hasMore = pageInfo.hasPreviousPage;
|
|
116
|
+
beforeInternal = (_a = pageInfo.startCursor) !== null && _a !== void 0 ? _a : undefined;
|
|
117
|
+
totalNodesFetched += batchEdges.length;
|
|
118
|
+
if (afterEachQuery) {
|
|
119
|
+
afterEachQuery({ last, before }, totalNodesFetched, dataset, hasMore && dataset.length < limit);
|
|
120
|
+
}
|
|
121
|
+
if (totalNodesFetched >= maxNodesFetched) {
|
|
122
|
+
throw new Error(`Max nodes of ${maxNodesFetched} fetched`);
|
|
123
|
+
}
|
|
39
124
|
}
|
|
125
|
+
// we reverse our dataset again to restore the original order of first to last to match relay
|
|
126
|
+
const edges = dataset.slice(0, last).reverse();
|
|
127
|
+
return {
|
|
128
|
+
edges,
|
|
129
|
+
pageInfo: {
|
|
130
|
+
hasNextPage: false,
|
|
131
|
+
hasPreviousPage: dataset.length > last,
|
|
132
|
+
startCursor: (_c = (_b = edges[0]) === null || _b === void 0 ? void 0 : _b.cursor) !== null && _c !== void 0 ? _c : null,
|
|
133
|
+
endCursor: (_e = (_d = edges[edges.length - 1]) === null || _d === void 0 ? void 0 : _d.cursor) !== null && _e !== void 0 ? _e : null,
|
|
134
|
+
},
|
|
135
|
+
};
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
exports.FilterPagination = FilterPagination;
|
|
139
|
+
async function selectPaginatedAsync({ queryAsync, getTitleAsync, printedType, pageSize, }) {
|
|
140
|
+
// Dont bother prompting if there are 0 or 1 items
|
|
141
|
+
const connectionPreflight = await queryAsync({ first: pageSize });
|
|
142
|
+
const { edges } = connectionPreflight;
|
|
143
|
+
if (edges.length === 0) {
|
|
144
|
+
return null;
|
|
145
|
+
}
|
|
146
|
+
else if (edges.length === 1) {
|
|
147
|
+
return edges[0].node;
|
|
148
|
+
}
|
|
149
|
+
return await selectPaginatedInternalAsync({
|
|
150
|
+
queryAsync,
|
|
151
|
+
getTitleAsync,
|
|
152
|
+
printedType,
|
|
153
|
+
queryParams: { first: pageSize },
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
exports.selectPaginatedAsync = selectPaginatedAsync;
|
|
157
|
+
exports.PREV_PAGE_OPTION = {
|
|
158
|
+
value: Symbol('PREV_PAGE'),
|
|
159
|
+
title: '⬆️ Previous page',
|
|
160
|
+
};
|
|
161
|
+
exports.NEXT_PAGE_OPTION = {
|
|
162
|
+
value: Symbol('NEXT_PAGE'),
|
|
163
|
+
title: '⬇️ Next page',
|
|
164
|
+
};
|
|
165
|
+
async function selectPaginatedInternalAsync({ queryAsync, getTitleAsync, printedType, queryParams, }) {
|
|
166
|
+
var _a;
|
|
167
|
+
const limit = (_a = queryParams.first) !== null && _a !== void 0 ? _a : queryParams.last;
|
|
168
|
+
(0, assert_1.default)(limit, 'queryParams must have either first or last');
|
|
169
|
+
const connection = await queryAsync(queryParams);
|
|
170
|
+
const { edges, pageInfo } = connection;
|
|
171
|
+
/*
|
|
172
|
+
* The Relay spec has a weird definition on hasNextPage and hasPreviousPage:
|
|
173
|
+
* 'If the client is paginating with last/before, then the server must return true if prior edges
|
|
174
|
+
* exist, otherwise false. If the client is paginating with first/after, then the client may
|
|
175
|
+
* return true if edges prior to after exist, if it can do so efficiently, otherwise may return false.'
|
|
176
|
+
*
|
|
177
|
+
* This means if we are paginating with first/after, we can't rely on pageInfo.hasPreviousPage and vice versa.
|
|
178
|
+
*/
|
|
179
|
+
const { endCursor, hasNextPage: serverResponseHasNextPage, startCursor, hasPreviousPage: serverResponseHasPreviousPage, } = pageInfo;
|
|
180
|
+
const hasPreviousPage = serverResponseHasPreviousPage || queryParams.after;
|
|
181
|
+
const hasNextPage = serverResponseHasNextPage || queryParams.before;
|
|
182
|
+
const nodes = edges.map(edge => edge.node);
|
|
183
|
+
const options = [];
|
|
184
|
+
if (hasPreviousPage) {
|
|
185
|
+
options.push(exports.PREV_PAGE_OPTION);
|
|
186
|
+
}
|
|
187
|
+
const nodeTitles = await Promise.all(nodes.map(node => getTitleAsync(node)));
|
|
188
|
+
options.push(...nodes.map((node, index) => ({ value: node, title: nodeTitles[index] })));
|
|
189
|
+
if (hasNextPage) {
|
|
190
|
+
options.push(exports.NEXT_PAGE_OPTION);
|
|
191
|
+
}
|
|
192
|
+
const { item: selectedItem } = await (0, prompts_1.promptAsync)({
|
|
193
|
+
type: 'select',
|
|
194
|
+
name: 'item',
|
|
195
|
+
message: `Select a ${printedType}`,
|
|
196
|
+
choices: options.map(option => ({
|
|
197
|
+
value: option.value,
|
|
198
|
+
title: option.title,
|
|
199
|
+
})),
|
|
200
|
+
});
|
|
201
|
+
if (selectedItem === exports.PREV_PAGE_OPTION.value) {
|
|
202
|
+
return await selectPaginatedInternalAsync({
|
|
203
|
+
queryParams: {
|
|
204
|
+
last: limit,
|
|
205
|
+
before: startCursor !== null && startCursor !== void 0 ? startCursor : undefined,
|
|
206
|
+
},
|
|
207
|
+
queryAsync,
|
|
208
|
+
getTitleAsync,
|
|
209
|
+
printedType,
|
|
210
|
+
});
|
|
211
|
+
}
|
|
212
|
+
else if (selectedItem === exports.NEXT_PAGE_OPTION.value) {
|
|
213
|
+
return await selectPaginatedInternalAsync({
|
|
214
|
+
queryParams: {
|
|
215
|
+
first: limit,
|
|
216
|
+
after: endCursor !== null && endCursor !== void 0 ? endCursor : undefined,
|
|
217
|
+
},
|
|
218
|
+
queryAsync,
|
|
219
|
+
getTitleAsync,
|
|
220
|
+
printedType,
|
|
221
|
+
});
|
|
222
|
+
}
|
|
223
|
+
else {
|
|
224
|
+
return selectedItem;
|
|
40
225
|
}
|
|
41
|
-
return dataset;
|
|
42
226
|
}
|
|
43
|
-
exports.getPaginatedDatasetAsync = getPaginatedDatasetAsync;
|