@expo/build-tools 18.0.1 → 18.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/buildErrors/userErrorHandlers.js +11 -7
- package/dist/customBuildContext.d.ts +2 -0
- package/dist/customBuildContext.js +2 -0
- package/dist/steps/easFunctions.js +4 -0
- package/dist/steps/functions/internalMaestroTest.js +7 -0
- package/dist/steps/functions/maestroResultParser.d.ts +42 -0
- package/dist/steps/functions/maestroResultParser.js +215 -0
- package/dist/steps/functions/readIpaInfo.d.ts +8 -0
- package/dist/steps/functions/readIpaInfo.js +111 -0
- package/dist/steps/functions/reportMaestroTestResults.d.ts +3 -0
- package/dist/steps/functions/reportMaestroTestResults.js +105 -0
- package/dist/steps/functions/uploadToAsc.js +1 -1
- package/dist/steps/utils/ios/AscApiClient.d.ts +3 -3
- package/package.json +7 -4
|
@@ -13,7 +13,7 @@ exports.userErrorHandlers = [
|
|
|
13
13
|
// [!] `React` requires CocoaPods version `>= 1.10.1`, which is not satisfied by your current version, `1.10.0`.
|
|
14
14
|
createError: () => new UserFacingError('EAS_BUILD_UNSUPPORTED_COCOAPODS_VERSION_ERROR', `Your project requires a newer version of CocoaPods. You can update it in the build profile in eas.json by either:
|
|
15
15
|
- changing the current version under key "cocoapods"
|
|
16
|
-
- switching to an image that supports that version under key "image"`, 'https://docs.expo.dev/build-reference/eas-json/'),
|
|
16
|
+
- switching to an image that supports that version under key "image"`, { docsUrl: 'https://docs.expo.dev/build-reference/eas-json/' }),
|
|
17
17
|
},
|
|
18
18
|
{
|
|
19
19
|
platform: eas_build_job_1.Platform.IOS,
|
|
@@ -21,7 +21,7 @@ exports.userErrorHandlers = [
|
|
|
21
21
|
regexp: /Could not find 'bundler' (.*) required by your/,
|
|
22
22
|
// example log:
|
|
23
23
|
// /System/Library/Frameworks/Ruby.framework/Versions/2.6/usr/lib/ruby/2.6.0/rubygems/dependency.rb:313:in `to_specs': Could not find 'bundler' (2.2.3) required by your /Users/expo/project/build/ios/Gemfile.lock. (Gem::MissingSpecVersionError)
|
|
24
|
-
createError: () => new UserFacingError('EAS_BUILD_UNSUPPORTED_BUNDLER_VERSION_ERROR', `Your project requires a different version of the Ruby "bundler" program than the version installed in this EAS Build environment. You can specify which version of "bundler" to install by specifying the version under "build"→[buildProfileName]→"ios"→"bundler" in eas.json.`, 'https://docs.expo.dev/build-reference/eas-json/'),
|
|
24
|
+
createError: () => new UserFacingError('EAS_BUILD_UNSUPPORTED_BUNDLER_VERSION_ERROR', `Your project requires a different version of the Ruby "bundler" program than the version installed in this EAS Build environment. You can specify which version of "bundler" to install by specifying the version under "build"→[buildProfileName]→"ios"→"bundler" in eas.json.`, { docsUrl: 'https://docs.expo.dev/build-reference/eas-json/' }),
|
|
25
25
|
},
|
|
26
26
|
{
|
|
27
27
|
platform: eas_build_job_1.Platform.ANDROID,
|
|
@@ -48,7 +48,7 @@ exports.userErrorHandlers = [
|
|
|
48
48
|
// [11:17:29] [android.dangerous]: withAndroidDangerousBaseMod: Cannot copy google-services.json from /home/expo/workingdir/build/test/test-google-services.json to /home/expo/workingdir/build/android/app/google-services.json. Please make sure the source and destination paths exist.
|
|
49
49
|
// [11:17:29] Error: [android.dangerous]: withAndroidDangerousBaseMod: Cannot copy google-services.json from /home/expo/workingdir/build/test/test-google-services.json to /home/expo/workingdir/build/android/app/google-services.json. Please make sure the source and destination paths exist.
|
|
50
50
|
regexp: /Cannot copy google-services\.json/,
|
|
51
|
-
createError: () => new UserFacingError('EAS_BUILD_MISSING_GOOGLE_SERVICES_JSON_ERROR', '"google-services.json" is missing, make sure that the file exists. Remember that EAS Build only uploads the files tracked by git. Use EAS environment variables to provide EAS Build with the file.', 'https://docs.expo.dev/eas/environment-variables/#file-environment-variables'),
|
|
51
|
+
createError: () => new UserFacingError('EAS_BUILD_MISSING_GOOGLE_SERVICES_JSON_ERROR', '"google-services.json" is missing, make sure that the file exists. Remember that EAS Build only uploads the files tracked by git. Use EAS environment variables to provide EAS Build with the file.', { docsUrl: 'https://docs.expo.dev/eas/environment-variables/#file-environment-variables' }),
|
|
52
52
|
},
|
|
53
53
|
{
|
|
54
54
|
platform: eas_build_job_1.Platform.ANDROID,
|
|
@@ -57,7 +57,7 @@ exports.userErrorHandlers = [
|
|
|
57
57
|
// > File google-services.json is missing. The Google Services Plugin cannot function without it.
|
|
58
58
|
// Searched Location:
|
|
59
59
|
regexp: /File google-services\.json is missing\. The Google Services Plugin cannot function without it/,
|
|
60
|
-
createError: () => new UserFacingError('EAS_BUILD_MISSING_GOOGLE_SERVICES_JSON_ERROR', '"google-services.json" is missing, make sure that the file exists. Remember that EAS Build only uploads the files tracked by git. Use EAS environment variables to provide EAS Build with the file.', 'https://docs.expo.dev/eas/environment-variables/#file-environment-variables'),
|
|
60
|
+
createError: () => new UserFacingError('EAS_BUILD_MISSING_GOOGLE_SERVICES_JSON_ERROR', '"google-services.json" is missing, make sure that the file exists. Remember that EAS Build only uploads the files tracked by git. Use EAS environment variables to provide EAS Build with the file.', { docsUrl: 'https://docs.expo.dev/eas/environment-variables/#file-environment-variables' }),
|
|
61
61
|
},
|
|
62
62
|
{
|
|
63
63
|
platform: eas_build_job_1.Platform.IOS,
|
|
@@ -65,7 +65,7 @@ exports.userErrorHandlers = [
|
|
|
65
65
|
// example log:
|
|
66
66
|
// [08:44:18] ENOENT: no such file or directory, copyfile '/Users/expo/workingdir/build/managed/abc' -> '/Users/expo/workingdir/build/managed/ios/testapp/GoogleService-Info.plist'
|
|
67
67
|
regexp: /ENOENT: no such file or directory, copyfile .*GoogleService-Info.plist/,
|
|
68
|
-
createError: () => new UserFacingError('EAS_BUILD_MISSING_GOOGLE_SERVICES_PLIST_ERROR', '"GoogleService-Info.plist" is missing, make sure that the file exists. Remember that EAS Build only uploads the files tracked by git. Use EAS environment variables to provide EAS Build with the file.', 'https://docs.expo.dev/eas/environment-variables/#file-environment-variables'),
|
|
68
|
+
createError: () => new UserFacingError('EAS_BUILD_MISSING_GOOGLE_SERVICES_PLIST_ERROR', '"GoogleService-Info.plist" is missing, make sure that the file exists. Remember that EAS Build only uploads the files tracked by git. Use EAS environment variables to provide EAS Build with the file.', { docsUrl: 'https://docs.expo.dev/eas/environment-variables/#file-environment-variables' }),
|
|
69
69
|
},
|
|
70
70
|
{
|
|
71
71
|
platform: eas_build_job_1.Platform.IOS,
|
|
@@ -176,9 +176,13 @@ You are seeing this error because either:
|
|
|
176
176
|
regexp: /error: Signing for "[a-zA-Z-0-9_]+" requires a development team/,
|
|
177
177
|
createError: (_, { job }) => 'type' in job && job.type === eas_build_job_1.Workflow.MANAGED
|
|
178
178
|
? new UserFacingError('XCODE_RESOURCE_BUNDLE_CODE_SIGNING_ERROR', `Starting from Xcode 14, resource bundles are signed by default, which requires setting the development team for each resource bundle target.
|
|
179
|
-
To resolve this issue, downgrade to an older Xcode version using the "image" field in eas.json, or upgrade to SDK 46 or higher.`,
|
|
179
|
+
To resolve this issue, downgrade to an older Xcode version using the "image" field in eas.json, or upgrade to SDK 46 or higher.`, {
|
|
180
|
+
docsUrl: 'https://docs.expo.dev/build-reference/infrastructure/#ios-build-server-configurations',
|
|
181
|
+
})
|
|
180
182
|
: new UserFacingError('XCODE_RESOURCE_BUNDLE_CODE_SIGNING_ERROR', `Starting from Xcode 14, resource bundles are signed by default, which requires setting the development team for each resource bundle target.
|
|
181
|
-
To resolve this issue, downgrade to an older Xcode version using the "image" field in eas.json, or turn off signing resource bundles in your Podfile: https://expo.fyi/r/disable-bundle-resource-signing`,
|
|
183
|
+
To resolve this issue, downgrade to an older Xcode version using the "image" field in eas.json, or turn off signing resource bundles in your Podfile: https://expo.fyi/r/disable-bundle-resource-signing`, {
|
|
184
|
+
docsUrl: 'https://docs.expo.dev/build-reference/infrastructure/#ios-build-server-configurations',
|
|
185
|
+
}),
|
|
182
186
|
},
|
|
183
187
|
{
|
|
184
188
|
platform: eas_build_job_1.Platform.ANDROID,
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { BuildJob, Env, Job, Metadata, StaticJobInterpolationContext } from '@expo/eas-build-job';
|
|
2
2
|
import { bunyan } from '@expo/logger';
|
|
3
3
|
import { BuildRuntimePlatform, ExternalBuildContextProvider } from '@expo/steps';
|
|
4
|
+
import { Client } from '@urql/core';
|
|
4
5
|
import { ArtifactToUpload, BuildContext } from './context';
|
|
5
6
|
export interface BuilderRuntimeApi {
|
|
6
7
|
uploadArtifact: (spec: {
|
|
@@ -20,6 +21,7 @@ export declare class CustomBuildContext<TJob extends Job = Job> implements Exter
|
|
|
20
21
|
*/
|
|
21
22
|
readonly startTime: Date;
|
|
22
23
|
readonly logger: bunyan;
|
|
24
|
+
readonly graphqlClient: Client;
|
|
23
25
|
readonly runtimeApi: BuilderRuntimeApi;
|
|
24
26
|
job: TJob;
|
|
25
27
|
metadata?: Metadata;
|
|
@@ -34,6 +34,7 @@ class CustomBuildContext {
|
|
|
34
34
|
*/
|
|
35
35
|
startTime;
|
|
36
36
|
logger;
|
|
37
|
+
graphqlClient;
|
|
37
38
|
runtimeApi;
|
|
38
39
|
job;
|
|
39
40
|
metadata;
|
|
@@ -43,6 +44,7 @@ class CustomBuildContext {
|
|
|
43
44
|
this.job = buildCtx.job;
|
|
44
45
|
this.metadata = buildCtx.metadata;
|
|
45
46
|
this.logger = buildCtx.logger.child({ phase: eas_build_job_1.BuildPhase.CUSTOM });
|
|
47
|
+
this.graphqlClient = buildCtx.graphqlClient;
|
|
46
48
|
this.projectSourceDirectory = path_1.default.join(buildCtx.workingdir, 'temporary-custom-build');
|
|
47
49
|
this.projectTargetDirectory = path_1.default.join(buildCtx.workingdir, 'build');
|
|
48
50
|
this.defaultWorkingDirectory = buildCtx.getReactNativeProjectDirectory();
|
|
@@ -20,7 +20,9 @@ const installNodeModules_1 = require("./functions/installNodeModules");
|
|
|
20
20
|
const installPods_1 = require("./functions/installPods");
|
|
21
21
|
const internalMaestroTest_1 = require("./functions/internalMaestroTest");
|
|
22
22
|
const prebuild_1 = require("./functions/prebuild");
|
|
23
|
+
const readIpaInfo_1 = require("./functions/readIpaInfo");
|
|
23
24
|
const repack_1 = require("./functions/repack");
|
|
25
|
+
const reportMaestroTestResults_1 = require("./functions/reportMaestroTestResults");
|
|
24
26
|
const resolveAppleTeamIdFromCredentials_1 = require("./functions/resolveAppleTeamIdFromCredentials");
|
|
25
27
|
const resolveBuildConfig_1 = require("./functions/resolveBuildConfig");
|
|
26
28
|
const restoreBuildCache_1 = require("./functions/restoreBuildCache");
|
|
@@ -44,6 +46,7 @@ function getEasFunctions(ctx) {
|
|
|
44
46
|
(0, useNpmToken_1.createSetUpNpmrcBuildFunction)(),
|
|
45
47
|
(0, installNodeModules_1.createInstallNodeModulesBuildFunction)(),
|
|
46
48
|
(0, prebuild_1.createPrebuildBuildFunction)(),
|
|
49
|
+
(0, readIpaInfo_1.createReadIpaInfoBuildFunction)(),
|
|
47
50
|
(0, downloadBuild_1.createDownloadBuildFunction)(),
|
|
48
51
|
(0, repack_1.createRepackBuildFunction)(),
|
|
49
52
|
(0, restoreCache_1.createRestoreCacheFunction)(),
|
|
@@ -71,6 +74,7 @@ function getEasFunctions(ctx) {
|
|
|
71
74
|
(0, createSubmissionEntity_1.createSubmissionEntityFunction)(),
|
|
72
75
|
(0, uploadToAsc_1.createUploadToAscBuildFunction)(),
|
|
73
76
|
(0, internalMaestroTest_1.createInternalEasMaestroTestFunction)(ctx),
|
|
77
|
+
(0, reportMaestroTestResults_1.createReportMaestroTestResultsFunction)(ctx),
|
|
74
78
|
];
|
|
75
79
|
if (ctx.hasBuildJob()) {
|
|
76
80
|
functions.push(...[
|
|
@@ -74,6 +74,10 @@ function createInternalEasMaestroTestFunction(ctx) {
|
|
|
74
74
|
id: 'test_reports_artifact_id',
|
|
75
75
|
required: false,
|
|
76
76
|
}),
|
|
77
|
+
steps_1.BuildStepOutput.createProvider({
|
|
78
|
+
id: 'junit_report_directory',
|
|
79
|
+
required: false,
|
|
80
|
+
}),
|
|
77
81
|
],
|
|
78
82
|
fn: async (stepCtx, { inputs: _inputs, env, outputs }) => {
|
|
79
83
|
// inputs come in form of { value: unknown }. Here we parse them into a typed and validated object.
|
|
@@ -293,6 +297,9 @@ function createInternalEasMaestroTestFunction(ctx) {
|
|
|
293
297
|
stepCtx.logger.error({ err }, 'Failed to upload reports.');
|
|
294
298
|
}
|
|
295
299
|
}
|
|
300
|
+
if (output_format === 'junit') {
|
|
301
|
+
outputs.junit_report_directory.set(maestroReportsDir);
|
|
302
|
+
}
|
|
296
303
|
const generatedDeviceLogs = await node_fs_1.default.promises.readdir(deviceLogsDir);
|
|
297
304
|
if (generatedDeviceLogs.length === 0) {
|
|
298
305
|
stepCtx.logger.warn('No device logs were successfully collected.');
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { z } from 'zod';
|
|
2
|
+
export interface MaestroFlowResult {
|
|
3
|
+
name: string;
|
|
4
|
+
path: string;
|
|
5
|
+
status: 'passed' | 'failed';
|
|
6
|
+
errorMessage: string | null;
|
|
7
|
+
duration: number;
|
|
8
|
+
retryCount: number;
|
|
9
|
+
tags: string[];
|
|
10
|
+
properties: Record<string, string>;
|
|
11
|
+
}
|
|
12
|
+
export declare function extractFlowKey(filename: string, prefix: string): string | null;
|
|
13
|
+
export interface JUnitTestCaseResult {
|
|
14
|
+
name: string;
|
|
15
|
+
status: 'passed' | 'failed';
|
|
16
|
+
duration: number;
|
|
17
|
+
errorMessage: string | null;
|
|
18
|
+
tags: string[];
|
|
19
|
+
properties: Record<string, string>;
|
|
20
|
+
}
|
|
21
|
+
export declare function parseJUnitTestCases(junitDirectory: string): Promise<JUnitTestCaseResult[]>;
|
|
22
|
+
declare const FlowMetadataFileSchema: z.ZodObject<{
|
|
23
|
+
flow_name: z.ZodString;
|
|
24
|
+
flow_file_path: z.ZodString;
|
|
25
|
+
}, z.core.$strip>;
|
|
26
|
+
type FlowMetadata = z.output<typeof FlowMetadataFileSchema>;
|
|
27
|
+
/**
|
|
28
|
+
* Parses an `ai-*.json` file produced by Maestro's TestDebugReporter.
|
|
29
|
+
*
|
|
30
|
+
* The file contains:
|
|
31
|
+
* - `flow_name`: derived from the YAML `config.name` field if present, otherwise
|
|
32
|
+
* the flow filename without extension.
|
|
33
|
+
* See: https://github.com/mobile-dev-inc/Maestro/blob/c0e95fd/maestro-cli/src/main/java/maestro/cli/runner/TestRunner.kt#L70
|
|
34
|
+
* - `flow_file_path`: absolute path to the original flow YAML file.
|
|
35
|
+
* - `outputs`: screenshot defect data (unused here).
|
|
36
|
+
*
|
|
37
|
+
* Filename format: `ai-(flowName).json` where `/` in flowName is replaced with `_`.
|
|
38
|
+
* See: https://github.com/mobile-dev-inc/Maestro/blob/c0e95fd/maestro-cli/src/main/java/maestro/cli/report/TestDebugReporter.kt#L67
|
|
39
|
+
*/
|
|
40
|
+
export declare function parseFlowMetadata(filePath: string): Promise<FlowMetadata | null>;
|
|
41
|
+
export declare function parseMaestroResults(junitDirectory: string, testsDirectory: string, projectRoot: string): Promise<MaestroFlowResult[]>;
|
|
42
|
+
export {};
|
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.extractFlowKey = extractFlowKey;
|
|
7
|
+
exports.parseJUnitTestCases = parseJUnitTestCases;
|
|
8
|
+
exports.parseFlowMetadata = parseFlowMetadata;
|
|
9
|
+
exports.parseMaestroResults = parseMaestroResults;
|
|
10
|
+
const fast_xml_parser_1 = require("fast-xml-parser");
|
|
11
|
+
const promises_1 = __importDefault(require("fs/promises"));
|
|
12
|
+
const path_1 = __importDefault(require("path"));
|
|
13
|
+
const zod_1 = require("zod");
|
|
14
|
+
// Maestro's TestDebugReporter creates timestamped directories, e.g. "2024-06-15_143022"
|
|
15
|
+
const TIMESTAMP_DIR_PATTERN = /^\d{4}-\d{2}-\d{2}_\d{6}$/;
|
|
16
|
+
function extractFlowKey(filename, prefix) {
|
|
17
|
+
const match = filename.match(new RegExp(`^${prefix}-(.+)\\.json$`));
|
|
18
|
+
return match?.[1] ?? null;
|
|
19
|
+
}
|
|
20
|
+
const xmlParser = new fast_xml_parser_1.XMLParser({
|
|
21
|
+
ignoreAttributes: false,
|
|
22
|
+
attributeNamePrefix: '@_',
|
|
23
|
+
// Ensure single-element arrays are always arrays
|
|
24
|
+
isArray: name => ['testsuite', 'testcase', 'property'].includes(name),
|
|
25
|
+
});
|
|
26
|
+
async function parseJUnitTestCases(junitDirectory) {
|
|
27
|
+
let entries;
|
|
28
|
+
try {
|
|
29
|
+
entries = await promises_1.default.readdir(junitDirectory);
|
|
30
|
+
}
|
|
31
|
+
catch {
|
|
32
|
+
return [];
|
|
33
|
+
}
|
|
34
|
+
const xmlFiles = entries.filter(f => f.endsWith('.xml'));
|
|
35
|
+
if (xmlFiles.length === 0) {
|
|
36
|
+
return [];
|
|
37
|
+
}
|
|
38
|
+
const results = [];
|
|
39
|
+
for (const xmlFile of xmlFiles) {
|
|
40
|
+
try {
|
|
41
|
+
const content = await promises_1.default.readFile(path_1.default.join(junitDirectory, xmlFile), 'utf-8');
|
|
42
|
+
const parsed = xmlParser.parse(content);
|
|
43
|
+
const testsuites = parsed?.testsuites?.testsuite;
|
|
44
|
+
if (!Array.isArray(testsuites)) {
|
|
45
|
+
continue;
|
|
46
|
+
}
|
|
47
|
+
for (const suite of testsuites) {
|
|
48
|
+
const testcases = suite?.testcase;
|
|
49
|
+
if (!Array.isArray(testcases)) {
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
for (const tc of testcases) {
|
|
53
|
+
const name = tc['@_name'];
|
|
54
|
+
if (!name) {
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
const timeStr = tc['@_time'];
|
|
58
|
+
const timeSeconds = timeStr ? parseFloat(timeStr) : 0;
|
|
59
|
+
const duration = Number.isFinite(timeSeconds) ? Math.round(timeSeconds * 1000) : 0;
|
|
60
|
+
// Use @_status as primary indicator (more robust than checking <failure> presence)
|
|
61
|
+
const status = tc['@_status'] === 'SUCCESS' ? 'passed' : 'failed';
|
|
62
|
+
// Extract error message from <failure> or <error> elements
|
|
63
|
+
const failureText = tc.failure != null
|
|
64
|
+
? typeof tc.failure === 'string'
|
|
65
|
+
? tc.failure
|
|
66
|
+
: (tc.failure?.['#text'] ?? null)
|
|
67
|
+
: null;
|
|
68
|
+
const errorText = tc.error != null
|
|
69
|
+
? typeof tc.error === 'string'
|
|
70
|
+
? tc.error
|
|
71
|
+
: (tc.error?.['#text'] ?? null)
|
|
72
|
+
: null;
|
|
73
|
+
const errorMessage = failureText ?? errorText ?? null;
|
|
74
|
+
// Extract properties
|
|
75
|
+
const rawProperties = tc.properties?.property ?? [];
|
|
76
|
+
const properties = {};
|
|
77
|
+
for (const prop of rawProperties) {
|
|
78
|
+
const propName = prop['@_name'];
|
|
79
|
+
const value = prop['@_value'];
|
|
80
|
+
if (typeof propName !== 'string' || typeof value !== 'string') {
|
|
81
|
+
continue;
|
|
82
|
+
}
|
|
83
|
+
properties[propName] = value;
|
|
84
|
+
}
|
|
85
|
+
// Extract tags from "tags" property (Maestro 2.2.0+, comma-separated)
|
|
86
|
+
const tagsValue = properties['tags'];
|
|
87
|
+
const tags = tagsValue
|
|
88
|
+
? tagsValue
|
|
89
|
+
.split(',')
|
|
90
|
+
.map(t => t.trim())
|
|
91
|
+
.filter(Boolean)
|
|
92
|
+
: [];
|
|
93
|
+
delete properties['tags'];
|
|
94
|
+
results.push({ name, status, duration, errorMessage, tags, properties });
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
catch {
|
|
99
|
+
// Skip malformed XML files
|
|
100
|
+
continue;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
return results;
|
|
104
|
+
}
|
|
105
|
+
const FlowMetadataFileSchema = zod_1.z.object({
|
|
106
|
+
flow_name: zod_1.z.string(),
|
|
107
|
+
flow_file_path: zod_1.z.string(),
|
|
108
|
+
});
|
|
109
|
+
/**
|
|
110
|
+
* Parses an `ai-*.json` file produced by Maestro's TestDebugReporter.
|
|
111
|
+
*
|
|
112
|
+
* The file contains:
|
|
113
|
+
* - `flow_name`: derived from the YAML `config.name` field if present, otherwise
|
|
114
|
+
* the flow filename without extension.
|
|
115
|
+
* See: https://github.com/mobile-dev-inc/Maestro/blob/c0e95fd/maestro-cli/src/main/java/maestro/cli/runner/TestRunner.kt#L70
|
|
116
|
+
* - `flow_file_path`: absolute path to the original flow YAML file.
|
|
117
|
+
* - `outputs`: screenshot defect data (unused here).
|
|
118
|
+
*
|
|
119
|
+
* Filename format: `ai-(flowName).json` where `/` in flowName is replaced with `_`.
|
|
120
|
+
* See: https://github.com/mobile-dev-inc/Maestro/blob/c0e95fd/maestro-cli/src/main/java/maestro/cli/report/TestDebugReporter.kt#L67
|
|
121
|
+
*/
|
|
122
|
+
async function parseFlowMetadata(filePath) {
|
|
123
|
+
try {
|
|
124
|
+
const content = await promises_1.default.readFile(filePath, 'utf-8');
|
|
125
|
+
const data = JSON.parse(content);
|
|
126
|
+
return FlowMetadataFileSchema.parse(data);
|
|
127
|
+
}
|
|
128
|
+
catch {
|
|
129
|
+
return null;
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
async function parseMaestroResults(junitDirectory, testsDirectory, projectRoot) {
|
|
133
|
+
// 1. Parse JUnit XML files (primary source)
|
|
134
|
+
const junitResults = await parseJUnitTestCases(junitDirectory);
|
|
135
|
+
if (junitResults.length === 0) {
|
|
136
|
+
return [];
|
|
137
|
+
}
|
|
138
|
+
// 2. Parse ai-*.json from debug output for flow_file_path + retryCount
|
|
139
|
+
const flowPathMap = new Map(); // flowName → flowFilePath
|
|
140
|
+
const flowOccurrences = new Map(); // flowName → count
|
|
141
|
+
let entries;
|
|
142
|
+
try {
|
|
143
|
+
entries = await promises_1.default.readdir(testsDirectory);
|
|
144
|
+
}
|
|
145
|
+
catch {
|
|
146
|
+
entries = [];
|
|
147
|
+
}
|
|
148
|
+
const timestampDirs = entries.filter(name => TIMESTAMP_DIR_PATTERN.test(name)).sort();
|
|
149
|
+
for (const dir of timestampDirs) {
|
|
150
|
+
const dirPath = path_1.default.join(testsDirectory, dir);
|
|
151
|
+
let files;
|
|
152
|
+
try {
|
|
153
|
+
files = await promises_1.default.readdir(dirPath);
|
|
154
|
+
}
|
|
155
|
+
catch {
|
|
156
|
+
continue;
|
|
157
|
+
}
|
|
158
|
+
for (const file of files) {
|
|
159
|
+
const flowKey = extractFlowKey(file, 'ai');
|
|
160
|
+
if (!flowKey) {
|
|
161
|
+
continue;
|
|
162
|
+
}
|
|
163
|
+
const metadata = await parseFlowMetadata(path_1.default.join(dirPath, file));
|
|
164
|
+
if (!metadata) {
|
|
165
|
+
continue;
|
|
166
|
+
}
|
|
167
|
+
// Track latest path (last timestamp dir wins)
|
|
168
|
+
flowPathMap.set(metadata.flow_name, metadata.flow_file_path);
|
|
169
|
+
// Count occurrences for retryCount
|
|
170
|
+
flowOccurrences.set(metadata.flow_name, (flowOccurrences.get(metadata.flow_name) ?? 0) + 1);
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
// 3. Merge: JUnit results + ai-*.json metadata
|
|
174
|
+
const results = [];
|
|
175
|
+
for (const junit of junitResults) {
|
|
176
|
+
const flowFilePath = flowPathMap.get(junit.name);
|
|
177
|
+
const relativePath = flowFilePath
|
|
178
|
+
? await relativizePathAsync(flowFilePath, projectRoot)
|
|
179
|
+
: junit.name; // fallback: use flow name if ai-*.json not found
|
|
180
|
+
const occurrences = flowOccurrences.get(junit.name) ?? 0;
|
|
181
|
+
const retryCount = Math.max(0, occurrences - 1);
|
|
182
|
+
results.push({
|
|
183
|
+
name: junit.name,
|
|
184
|
+
path: relativePath,
|
|
185
|
+
status: junit.status,
|
|
186
|
+
errorMessage: junit.errorMessage,
|
|
187
|
+
duration: junit.duration,
|
|
188
|
+
retryCount,
|
|
189
|
+
tags: junit.tags,
|
|
190
|
+
properties: junit.properties,
|
|
191
|
+
});
|
|
192
|
+
}
|
|
193
|
+
return results;
|
|
194
|
+
}
|
|
195
|
+
async function relativizePathAsync(flowFilePath, projectRoot) {
|
|
196
|
+
if (!path_1.default.isAbsolute(flowFilePath)) {
|
|
197
|
+
return flowFilePath;
|
|
198
|
+
}
|
|
199
|
+
// Resolve symlinks (e.g., /tmp -> /private/tmp on macOS) for consistent comparison
|
|
200
|
+
let resolvedRoot = projectRoot;
|
|
201
|
+
let resolvedFlow = flowFilePath;
|
|
202
|
+
try {
|
|
203
|
+
resolvedRoot = await promises_1.default.realpath(projectRoot);
|
|
204
|
+
}
|
|
205
|
+
catch { }
|
|
206
|
+
try {
|
|
207
|
+
resolvedFlow = await promises_1.default.realpath(flowFilePath);
|
|
208
|
+
}
|
|
209
|
+
catch { }
|
|
210
|
+
const relative = path_1.default.relative(resolvedRoot, resolvedFlow);
|
|
211
|
+
if (relative.startsWith('..')) {
|
|
212
|
+
return flowFilePath;
|
|
213
|
+
}
|
|
214
|
+
return relative;
|
|
215
|
+
}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
import { BuildFunction } from '@expo/steps';
|
|
2
|
+
export type IpaInfo = {
|
|
3
|
+
bundleIdentifier: string;
|
|
4
|
+
bundleShortVersion: string;
|
|
5
|
+
bundleVersion: string;
|
|
6
|
+
};
|
|
7
|
+
export declare function createReadIpaInfoBuildFunction(): BuildFunction;
|
|
8
|
+
export declare function readIpaInfoAsync(ipaPath: string): Promise<IpaInfo>;
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.createReadIpaInfoBuildFunction = createReadIpaInfoBuildFunction;
|
|
7
|
+
exports.readIpaInfoAsync = readIpaInfoAsync;
|
|
8
|
+
const errors_1 = require("@expo/eas-build-job/dist/errors");
|
|
9
|
+
const steps_1 = require("@expo/steps");
|
|
10
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
11
|
+
const node_path_1 = __importDefault(require("node:path"));
|
|
12
|
+
const node_stream_zip_1 = __importDefault(require("node-stream-zip"));
|
|
13
|
+
const zod_1 = require("zod");
|
|
14
|
+
const bplist_parser_1 = __importDefault(require("bplist-parser"));
|
|
15
|
+
const plist_1 = __importDefault(require("plist"));
|
|
16
|
+
const INFO_PLIST_PATH_REGEXP = /^Payload\/[^/]+\.app\/Info\.plist$/;
|
|
17
|
+
function createReadIpaInfoBuildFunction() {
|
|
18
|
+
return new steps_1.BuildFunction({
|
|
19
|
+
namespace: 'eas',
|
|
20
|
+
id: 'read_ipa_info',
|
|
21
|
+
name: 'Read IPA Info',
|
|
22
|
+
__metricsId: 'eas/read_ipa_info',
|
|
23
|
+
inputProviders: [
|
|
24
|
+
steps_1.BuildStepInput.createProvider({
|
|
25
|
+
id: 'ipa_path',
|
|
26
|
+
required: true,
|
|
27
|
+
allowedValueTypeName: steps_1.BuildStepInputValueTypeName.STRING,
|
|
28
|
+
}),
|
|
29
|
+
],
|
|
30
|
+
outputProviders: [
|
|
31
|
+
steps_1.BuildStepOutput.createProvider({
|
|
32
|
+
id: 'bundle_identifier',
|
|
33
|
+
required: true,
|
|
34
|
+
}),
|
|
35
|
+
steps_1.BuildStepOutput.createProvider({
|
|
36
|
+
id: 'bundle_short_version',
|
|
37
|
+
required: true,
|
|
38
|
+
}),
|
|
39
|
+
steps_1.BuildStepOutput.createProvider({
|
|
40
|
+
id: 'bundle_version',
|
|
41
|
+
required: true,
|
|
42
|
+
}),
|
|
43
|
+
],
|
|
44
|
+
fn: async (stepCtx, { inputs, outputs }) => {
|
|
45
|
+
const ipaPathInput = zod_1.z.string().parse(inputs.ipa_path.value);
|
|
46
|
+
const ipaPath = node_path_1.default.resolve(stepCtx.workingDirectory, ipaPathInput);
|
|
47
|
+
if (!(await fs_extra_1.default.pathExists(ipaPath))) {
|
|
48
|
+
throw new errors_1.UserFacingError('EAS_READ_IPA_INFO_FILE_NOT_FOUND', `IPA file not found: ${ipaPath}`);
|
|
49
|
+
}
|
|
50
|
+
const ipaInfo = await readIpaInfoAsync(ipaPath);
|
|
51
|
+
outputs.bundle_identifier.set(ipaInfo.bundleIdentifier);
|
|
52
|
+
outputs.bundle_short_version.set(ipaInfo.bundleShortVersion);
|
|
53
|
+
outputs.bundle_version.set(ipaInfo.bundleVersion);
|
|
54
|
+
},
|
|
55
|
+
});
|
|
56
|
+
}
|
|
57
|
+
async function readIpaInfoAsync(ipaPath) {
|
|
58
|
+
try {
|
|
59
|
+
const infoPlistBuffer = await readInfoPlistBufferFromIpaAsync(ipaPath);
|
|
60
|
+
const infoPlist = parseInfoPlistBuffer(infoPlistBuffer);
|
|
61
|
+
const bundleIdentifier = infoPlist.CFBundleIdentifier;
|
|
62
|
+
if (typeof bundleIdentifier !== 'string') {
|
|
63
|
+
throw new errors_1.UserFacingError('EAS_READ_IPA_INFO_INVALID_INFO_PLIST', 'Failed to read IPA info: Missing or invalid CFBundleIdentifier in Info.plist');
|
|
64
|
+
}
|
|
65
|
+
const bundleShortVersion = infoPlist.CFBundleShortVersionString;
|
|
66
|
+
if (typeof bundleShortVersion !== 'string') {
|
|
67
|
+
throw new errors_1.UserFacingError('EAS_READ_IPA_INFO_INVALID_INFO_PLIST', 'Failed to read IPA info: Missing or invalid CFBundleShortVersionString in Info.plist');
|
|
68
|
+
}
|
|
69
|
+
const bundleVersion = infoPlist.CFBundleVersion;
|
|
70
|
+
if (typeof bundleVersion !== 'string') {
|
|
71
|
+
throw new errors_1.UserFacingError('EAS_READ_IPA_INFO_INVALID_INFO_PLIST', 'Failed to read IPA info: Missing or invalid CFBundleVersion in Info.plist');
|
|
72
|
+
}
|
|
73
|
+
return {
|
|
74
|
+
bundleIdentifier,
|
|
75
|
+
bundleShortVersion,
|
|
76
|
+
bundleVersion,
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
catch (error) {
|
|
80
|
+
if (error instanceof errors_1.UserFacingError) {
|
|
81
|
+
throw error;
|
|
82
|
+
}
|
|
83
|
+
throw new errors_1.UserFacingError('EAS_READ_IPA_INFO_FAILED', `Failed to read IPA info: ${error.message}`, { cause: error });
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
function parseInfoPlistBuffer(data) {
|
|
87
|
+
const isBinaryPlist = data.subarray(0, 8).toString('ascii') === 'bplist00';
|
|
88
|
+
if (isBinaryPlist) {
|
|
89
|
+
const parsedBinaryPlists = bplist_parser_1.default.parseBuffer(data);
|
|
90
|
+
const parsedBinaryPlist = parsedBinaryPlists[0];
|
|
91
|
+
if (!parsedBinaryPlist || typeof parsedBinaryPlist !== 'object') {
|
|
92
|
+
throw new errors_1.UserFacingError('EAS_READ_IPA_INFO_INVALID_BINARY_PLIST', 'Invalid binary plist in IPA');
|
|
93
|
+
}
|
|
94
|
+
return parsedBinaryPlist;
|
|
95
|
+
}
|
|
96
|
+
return plist_1.default.parse(data.toString('utf8'));
|
|
97
|
+
}
|
|
98
|
+
async function readInfoPlistBufferFromIpaAsync(ipaPath) {
|
|
99
|
+
const zip = new node_stream_zip_1.default.async({ file: ipaPath });
|
|
100
|
+
try {
|
|
101
|
+
const entries = Object.values(await zip.entries());
|
|
102
|
+
const infoPlistEntry = entries.find(entry => INFO_PLIST_PATH_REGEXP.test(entry.name));
|
|
103
|
+
if (!infoPlistEntry) {
|
|
104
|
+
throw new errors_1.UserFacingError('EAS_READ_IPA_INFO_INFO_PLIST_NOT_FOUND', `Failed to read IPA info: Could not find Info.plist in ${ipaPath}`);
|
|
105
|
+
}
|
|
106
|
+
return await zip.entryData(infoPlistEntry.name);
|
|
107
|
+
}
|
|
108
|
+
finally {
|
|
109
|
+
await zip.close();
|
|
110
|
+
}
|
|
111
|
+
}
|
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.createReportMaestroTestResultsFunction = createReportMaestroTestResultsFunction;
|
|
4
|
+
const steps_1 = require("@expo/steps");
|
|
5
|
+
const gql_tada_1 = require("gql.tada");
|
|
6
|
+
const maestroResultParser_1 = require("./maestroResultParser");
|
|
7
|
+
const CREATE_MUTATION = (0, gql_tada_1.graphql)(`
|
|
8
|
+
mutation CreateWorkflowDeviceTestCaseResults($input: CreateWorkflowDeviceTestCaseResultsInput!) {
|
|
9
|
+
workflowDeviceTestCaseResult {
|
|
10
|
+
createWorkflowDeviceTestCaseResults(input: $input) {
|
|
11
|
+
id
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
`);
|
|
16
|
+
const FLOW_STATUS_TO_TEST_CASE_RESULT_STATUS = {
|
|
17
|
+
passed: 'PASSED',
|
|
18
|
+
failed: 'FAILED',
|
|
19
|
+
};
|
|
20
|
+
function createReportMaestroTestResultsFunction(ctx) {
|
|
21
|
+
return new steps_1.BuildFunction({
|
|
22
|
+
namespace: 'eas',
|
|
23
|
+
id: 'report_maestro_test_results',
|
|
24
|
+
name: 'Report Maestro Test Results',
|
|
25
|
+
__metricsId: 'eas/report_maestro_test_results',
|
|
26
|
+
inputProviders: [
|
|
27
|
+
steps_1.BuildStepInput.createProvider({
|
|
28
|
+
id: 'junit_report_directory',
|
|
29
|
+
required: false,
|
|
30
|
+
allowedValueTypeName: steps_1.BuildStepInputValueTypeName.STRING,
|
|
31
|
+
defaultValue: '${{ env.HOME }}/.maestro/tests',
|
|
32
|
+
}),
|
|
33
|
+
steps_1.BuildStepInput.createProvider({
|
|
34
|
+
id: 'tests_directory',
|
|
35
|
+
required: false,
|
|
36
|
+
allowedValueTypeName: steps_1.BuildStepInputValueTypeName.STRING,
|
|
37
|
+
defaultValue: '${{ env.HOME }}/.maestro/tests',
|
|
38
|
+
}),
|
|
39
|
+
],
|
|
40
|
+
fn: async (stepsCtx, { inputs }) => {
|
|
41
|
+
const { logger } = stepsCtx;
|
|
42
|
+
const workflowJobId = stepsCtx.global.env.__WORKFLOW_JOB_ID;
|
|
43
|
+
if (!workflowJobId) {
|
|
44
|
+
logger.info('Not running in a workflow job, skipping test results report');
|
|
45
|
+
return;
|
|
46
|
+
}
|
|
47
|
+
const junitDirectory = inputs.junit_report_directory.value ?? '';
|
|
48
|
+
if (!junitDirectory) {
|
|
49
|
+
logger.info('No JUnit directory provided, skipping test results report');
|
|
50
|
+
return;
|
|
51
|
+
}
|
|
52
|
+
const testsDirectory = inputs.tests_directory.value;
|
|
53
|
+
try {
|
|
54
|
+
const flowResults = await (0, maestroResultParser_1.parseMaestroResults)(junitDirectory, testsDirectory, stepsCtx.workingDirectory);
|
|
55
|
+
if (flowResults.length === 0) {
|
|
56
|
+
logger.info('No maestro test results found, skipping report');
|
|
57
|
+
return;
|
|
58
|
+
}
|
|
59
|
+
// Maestro allows overriding flow names via config, so different flow files can share
|
|
60
|
+
// the same name. JUnit XML only contains names (not file paths), making it impossible
|
|
61
|
+
// to map duplicates back to their original flow files. Skip and let the user fix it.
|
|
62
|
+
const names = flowResults.map(r => r.name);
|
|
63
|
+
const duplicates = names.filter((n, i) => names.indexOf(n) !== i);
|
|
64
|
+
if (duplicates.length > 0) {
|
|
65
|
+
logger.error(`Duplicate test case names found in JUnit output: ${[...new Set(duplicates)].join(', ')}. Skipping report. Ensure each Maestro flow has a unique name.`);
|
|
66
|
+
return;
|
|
67
|
+
}
|
|
68
|
+
const testCaseResults = flowResults.flatMap(f => {
|
|
69
|
+
const status = FLOW_STATUS_TO_TEST_CASE_RESULT_STATUS[f.status];
|
|
70
|
+
if (!status) {
|
|
71
|
+
return [];
|
|
72
|
+
}
|
|
73
|
+
return [
|
|
74
|
+
{
|
|
75
|
+
name: f.name,
|
|
76
|
+
path: f.path,
|
|
77
|
+
status,
|
|
78
|
+
errorMessage: f.errorMessage,
|
|
79
|
+
duration: f.duration,
|
|
80
|
+
retryCount: f.retryCount,
|
|
81
|
+
tags: f.tags,
|
|
82
|
+
properties: f.properties,
|
|
83
|
+
},
|
|
84
|
+
];
|
|
85
|
+
});
|
|
86
|
+
const result = await ctx.graphqlClient
|
|
87
|
+
.mutation(CREATE_MUTATION, {
|
|
88
|
+
input: {
|
|
89
|
+
workflowJobId,
|
|
90
|
+
testCaseResults,
|
|
91
|
+
},
|
|
92
|
+
})
|
|
93
|
+
.toPromise();
|
|
94
|
+
if (result.error) {
|
|
95
|
+
logger.error({ error: result.error }, 'GraphQL error creating test case results');
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
logger.info(`Reported ${testCaseResults.length} test case result(s).`);
|
|
99
|
+
}
|
|
100
|
+
catch (error) {
|
|
101
|
+
logger.error({ err: error }, 'Failed to create test case results');
|
|
102
|
+
}
|
|
103
|
+
},
|
|
104
|
+
});
|
|
105
|
+
}
|
|
@@ -223,7 +223,7 @@ function createUploadToAscBuildFunction() {
|
|
|
223
223
|
}
|
|
224
224
|
stepsCtx.logger.info('Checking build upload status...');
|
|
225
225
|
const waitingForBuildStartedAt = Date.now();
|
|
226
|
-
while (Date.now() - waitingForBuildStartedAt <
|
|
226
|
+
while (Date.now() - waitingForBuildStartedAt < 30 * 60 * 1000 /* 30 minutes */) {
|
|
227
227
|
const { data: { attributes: { state }, }, } = await client.getAsync(`/v1/buildUploads/:id`, { 'fields[buildUploads]': ['state', 'build'], include: ['build'] }, { id: buildUploadId });
|
|
228
228
|
if (state.state === 'AWAITING_UPLOAD' || state.state === 'PROCESSING') {
|
|
229
229
|
stepsCtx.logger.info(`Waiting for build upload to complete... (status = ${state.state})`);
|
|
@@ -38,10 +38,10 @@ declare const GetApi: {
|
|
|
38
38
|
attributes: z.ZodObject<{
|
|
39
39
|
assetDeliveryState: z.ZodObject<{
|
|
40
40
|
state: z.ZodEnum<{
|
|
41
|
+
FAILED: "FAILED";
|
|
41
42
|
AWAITING_UPLOAD: "AWAITING_UPLOAD";
|
|
42
43
|
UPLOAD_COMPLETE: "UPLOAD_COMPLETE";
|
|
43
44
|
COMPLETE: "COMPLETE";
|
|
44
|
-
FAILED: "FAILED";
|
|
45
45
|
}>;
|
|
46
46
|
errors: z.ZodOptional<z.ZodArray<z.ZodObject<{
|
|
47
47
|
code: z.ZodString;
|
|
@@ -76,9 +76,9 @@ declare const GetApi: {
|
|
|
76
76
|
attributes: z.ZodObject<{
|
|
77
77
|
state: z.ZodObject<{
|
|
78
78
|
state: z.ZodEnum<{
|
|
79
|
+
FAILED: "FAILED";
|
|
79
80
|
AWAITING_UPLOAD: "AWAITING_UPLOAD";
|
|
80
81
|
COMPLETE: "COMPLETE";
|
|
81
|
-
FAILED: "FAILED";
|
|
82
82
|
PROCESSING: "PROCESSING";
|
|
83
83
|
}>;
|
|
84
84
|
infos: z.ZodOptional<z.ZodArray<z.ZodObject<{
|
|
@@ -203,10 +203,10 @@ declare const PatchApi: {
|
|
|
203
203
|
attributes: z.ZodObject<{
|
|
204
204
|
assetDeliveryState: z.ZodObject<{
|
|
205
205
|
state: z.ZodEnum<{
|
|
206
|
+
FAILED: "FAILED";
|
|
206
207
|
AWAITING_UPLOAD: "AWAITING_UPLOAD";
|
|
207
208
|
UPLOAD_COMPLETE: "UPLOAD_COMPLETE";
|
|
208
209
|
COMPLETE: "COMPLETE";
|
|
209
|
-
FAILED: "FAILED";
|
|
210
210
|
}>;
|
|
211
211
|
errors: z.ZodOptional<z.ZodArray<z.ZodObject<{
|
|
212
212
|
code: z.ZodString;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@expo/build-tools",
|
|
3
|
-
"version": "18.0.
|
|
3
|
+
"version": "18.0.6",
|
|
4
4
|
"bugs": "https://github.com/expo/eas-cli/issues",
|
|
5
5
|
"license": "BUSL-1.1",
|
|
6
6
|
"author": "Expo <support@expo.io>",
|
|
@@ -38,20 +38,22 @@
|
|
|
38
38
|
"@expo/config": "10.0.6",
|
|
39
39
|
"@expo/config-plugins": "9.0.12",
|
|
40
40
|
"@expo/downloader": "18.0.1",
|
|
41
|
-
"@expo/eas-build-job": "18.0.
|
|
41
|
+
"@expo/eas-build-job": "18.0.2",
|
|
42
42
|
"@expo/env": "^0.4.0",
|
|
43
43
|
"@expo/logger": "18.0.1",
|
|
44
44
|
"@expo/package-manager": "1.9.10",
|
|
45
45
|
"@expo/plist": "^0.2.0",
|
|
46
46
|
"@expo/results": "^1.0.0",
|
|
47
47
|
"@expo/spawn-async": "1.7.2",
|
|
48
|
-
"@expo/steps": "18.0.
|
|
48
|
+
"@expo/steps": "18.0.2",
|
|
49
49
|
"@expo/template-file": "18.0.1",
|
|
50
50
|
"@expo/turtle-spawn": "18.0.1",
|
|
51
51
|
"@expo/xcpretty": "^4.3.1",
|
|
52
52
|
"@google-cloud/storage": "^7.11.2",
|
|
53
53
|
"@urql/core": "^6.0.1",
|
|
54
|
+
"bplist-parser": "0.3.2",
|
|
54
55
|
"fast-glob": "^3.3.2",
|
|
56
|
+
"fast-xml-parser": "^4.4.1",
|
|
55
57
|
"fs-extra": "^11.2.0",
|
|
56
58
|
"gql.tada": "^1.8.13",
|
|
57
59
|
"joi": "^17.13.1",
|
|
@@ -59,6 +61,7 @@
|
|
|
59
61
|
"lodash": "^4.17.21",
|
|
60
62
|
"node-fetch": "^2.7.0",
|
|
61
63
|
"node-forge": "^1.3.1",
|
|
64
|
+
"node-stream-zip": "1.15.0",
|
|
62
65
|
"nullthrows": "^1.1.1",
|
|
63
66
|
"plist": "^3.1.0",
|
|
64
67
|
"promise-limit": "^2.7.0",
|
|
@@ -94,5 +97,5 @@
|
|
|
94
97
|
"typescript": "^5.5.4",
|
|
95
98
|
"uuid": "^9.0.1"
|
|
96
99
|
},
|
|
97
|
-
"gitHead": "
|
|
100
|
+
"gitHead": "df8ebc8f84809e52032661f9f08768650440e5c0"
|
|
98
101
|
}
|