@expo/build-tools 1.0.238 → 1.0.240

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,6 @@
1
1
  import { Job } from '@expo/eas-build-job';
2
2
  import { BuildContext } from '../context';
3
- export declare function prepareProjectSourcesAsync<TJob extends Job>(ctx: BuildContext<TJob>, destinationDirectory?: string): Promise<void>;
3
+ export declare function prepareProjectSourcesAsync<TJob extends Job>(ctx: BuildContext<TJob>, destinationDirectory?: string): Promise<{
4
+ handled: boolean;
5
+ }>;
4
6
  export declare function downloadAndUnpackProjectFromTarGzAsync<TJob extends Job>(ctx: BuildContext<TJob>, projectArchiveUrl: string, destinationDirectory: string): Promise<void>;
@@ -23,25 +23,39 @@ async function prepareProjectSourcesAsync(ctx, destinationDirectory = ctx.buildD
23
23
  ctx.logger.error({ err: projectArchiveResult.reason }, 'Failed to refresh project archive, falling back to the original one');
24
24
  }
25
25
  const projectArchive = (_a = projectArchiveResult.value) !== null && _a !== void 0 ? _a : ctx.job.projectArchive;
26
- if (projectArchive.type === eas_build_job_1.ArchiveSourceType.GCS) {
27
- throw new Error('GCS project sources should be resolved earlier to url');
28
- }
29
- else if (projectArchive.type === eas_build_job_1.ArchiveSourceType.PATH) {
30
- await prepareProjectSourcesLocallyAsync(ctx, projectArchive.path, destinationDirectory); // used in eas build --local
31
- }
32
- else if (projectArchive.type === eas_build_job_1.ArchiveSourceType.URL) {
33
- await downloadAndUnpackProjectFromTarGzAsync(ctx, projectArchive.url, destinationDirectory);
34
- }
35
- else if (projectArchive.type === eas_build_job_1.ArchiveSourceType.GIT) {
36
- await (0, git_1.shallowCloneRepositoryAsync)({
37
- logger: ctx.logger,
38
- archiveSource: projectArchive,
39
- destinationDirectory,
40
- });
41
- }
42
- const uploadResult = await (0, results_1.asyncResult)(uploadProjectMetadataAsync(ctx, { projectDirectory: destinationDirectory }));
43
- if (!uploadResult.ok) {
44
- ctx.logger.warn(`Failed to upload project metadata: ${uploadResult.reason}`);
26
+ switch (projectArchive.type) {
27
+ case eas_build_job_1.ArchiveSourceType.R2:
28
+ case eas_build_job_1.ArchiveSourceType.GCS: {
29
+ throw new Error('Remote project sources should be resolved earlier to URL');
30
+ }
31
+ case eas_build_job_1.ArchiveSourceType.PATH: {
32
+ await prepareProjectSourcesLocallyAsync(ctx, projectArchive.path, destinationDirectory); // used in eas build --local
33
+ return { handled: true };
34
+ }
35
+ case eas_build_job_1.ArchiveSourceType.NONE: {
36
+ // May be used in no-sources jobs like submission jobs.
37
+ return { handled: true };
38
+ }
39
+ case eas_build_job_1.ArchiveSourceType.URL: {
40
+ await downloadAndUnpackProjectFromTarGzAsync(ctx, projectArchive.url, destinationDirectory);
41
+ const uploadResult = await (0, results_1.asyncResult)(uploadProjectMetadataAsync(ctx, { projectDirectory: destinationDirectory }));
42
+ if (!uploadResult.ok) {
43
+ ctx.logger.warn(`Failed to upload project metadata: ${uploadResult.reason}`);
44
+ }
45
+ return { handled: true };
46
+ }
47
+ case eas_build_job_1.ArchiveSourceType.GIT: {
48
+ await (0, git_1.shallowCloneRepositoryAsync)({
49
+ logger: ctx.logger,
50
+ archiveSource: projectArchive,
51
+ destinationDirectory,
52
+ });
53
+ const uploadResult = await (0, results_1.asyncResult)(uploadProjectMetadataAsync(ctx, { projectDirectory: destinationDirectory }));
54
+ if (!uploadResult.ok) {
55
+ ctx.logger.warn(`Failed to upload project metadata: ${uploadResult.reason}`);
56
+ }
57
+ return { handled: true };
58
+ }
45
59
  }
46
60
  }
47
61
  async function downloadAndUnpackProjectFromTarGzAsync(ctx, projectArchiveUrl, destinationDirectory) {
@@ -1 +1 @@
1
- {"version":3,"file":"projectSources.js","sourceRoot":"","sources":["../../src/common/projectSources.ts"],"names":[],"mappings":";;;;;AAiBA,gEAmCC;AAED,wFAoBC;AA1ED,gDAAwB;AAExB,sEAAuC;AACvC,wDAA0B;AAC1B,uDAAkG;AAElG,kEAA4C;AAC5C,6BAAwB;AACxB,2CAA4C;AAC5C,4DAAoC;AACpC,uCAAmC;AAGnC,sDAAmD;AAEnD,+BAAoD;AAE7C,KAAK,UAAU,0BAA0B,CAC9C,GAAuB,EACvB,oBAAoB,GAAG,GAAG,CAAC,cAAc;;IAEzC,MAAM,oBAAoB,GAAG,MAAM,IAAA,qBAAW,EAAC,8BAA8B,CAAC,GAAG,CAAC,CAAC,CAAC;IAEpF,IAAI,CAAC,oBAAoB,CAAC,EAAE,EAAE,CAAC;QAC7B,GAAG,CAAC,MAAM,CAAC,KAAK,CACd,EAAE,GAAG,EAAE,oBAAoB,CAAC,MAAM,EAAE,EACpC,qEAAqE,CACtE,CAAC;IACJ,CAAC;IAED,MAAM,cAAc,GAAG,MAAA,oBAAoB,CAAC,KAAK,mCAAI,GAAG,CAAC,GAAG,CAAC,cAAc,CAAC;IAE5E,IAAI,cAAc,CAAC,IAAI,KAAK,iCAAiB,CAAC,GAAG,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,uDAAuD,CAAC,CAAC;IAC3E,CAAC;SAAM,IAAI,cAAc,CAAC,IAAI,KAAK,iCAAiB,CAAC,IAAI,EAAE,CAAC;QAC1D,MAAM,iCAAiC,CAAC,GAAG,EAAE,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,CAAC,CAAC,4BAA4B;IACvH,CAAC;SAAM,IAAI,cAAc,CAAC,IAAI,KAAK,iCAAiB,CAAC,GAAG,EAAE,CAAC;QACzD,MAAM,sCAAsC,CAAC,GAAG,EAAE,cAAc,CAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;IAC9F,CAAC;SAAM,IAAI,cAAc,CAAC,IAAI,KAAK,iCAAiB,CAAC,GAAG,EAAE,CAAC;QACzD,MAAM,IAAA,iCAA2B,EAAC;YAChC,MAAM,EAAE,GAAG,CAAC,MAAM;YAClB,aAAa,EAAE,cAAc;YAC7B,oBAAoB;SACrB,CAAC,CAAC;IACL,CAAC;IAED,MAAM,YAAY,GAAG,MAAM,IAAA,qBAAW,EACpC,0BAA0B,CAAC,GAAG,EAAE,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,CAAC,CAC5E,CAAC;IACF,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC;QACrB,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,sCAAsC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC;IAC/E,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,sCAAsC,CAC1D,GAAuB,EACvB,iBAAyB,EACzB,oBAA4B;;IAE5B,MAAM,cAAc,GAAG,cAAI,CAAC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;IACnE,IAAI,CAAC;QACH,MAAM,IAAA,oBAAY,EAAC,iBAAiB,EAAE,cAAc,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC;IACtE,CAAC;IAAC,OAAO,GAAQ,EAAE,CAAC;QAClB,MAAA,GAAG,CAAC,WAAW,oDAAG,oCAAoC,EAAE,GAAG,EAAE;YAC3D,MAAM,EAAE,EAAE,OAAO,EAAE,GAAG,CAAC,GAAG,CAAC,YAAY,EAAE;SAC1C,CAAC,CAAC;QACH,MAAM,GAAG,CAAC;IACZ,CAAC;IAED,MAAM,gBAAgB,CAAC;QACrB,WAAW,EAAE,oBAAoB;QACjC,MAAM,EAAE,cAAc;QACtB,MAAM,EAAE,GAAG,CAAC,MAAM;KACnB,CAAC,CAAC;AACL,CAAC;AAED,KAAK,UAAU,iCAAiC,CAC9C,GAAuB,EACvB,kBAA0B,EAC1B,oBAA4B;IAE5B,MAAM,cAAc,GAAG,cAAI,CAAC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;IACnE,MAAM,kBAAE,CAAC,IAAI,CAAC,kBAAkB,EAAE,cAAc,CAAC,CAAC;IAElD,MAAM,gBAAgB,CAAC;QACrB,WAAW,EAAE,oBAAoB;QACjC,MAAM,EAAE,cAAc;QACtB,MAAM,EAAE,GAAG,CAAC,MAAM;KACnB,CAAC,CAAC;AACL,CAAC;AAED,KAAK,UAAU,gBAAgB,CAAC,EAC9B,MAAM,EACN,MAAM,EACN,WAAW,GAKZ;IACC,MAAM,IAAA,sBAAK,EAAC,KAAK,EAAE,CAAC,IAAI,EAAE,WAAW,EAAE,oBAAoB,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE;QACjF,MAAM;KACP,CAAC,CAAC;AACL,CAAC;AAED,KAAK,UAAU,0BAA0B,CACvC,GAAsB,EACtB,EAAE,gBAAgB,EAAgC;IAElD,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC;QACtB,yBAAyB;QACzB,OAAO;IACT,CAAC;SAAM,IACL,GAAG,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,KAAK,iCAAiB,CAAC,GAAG;QACrD,GAAG,CAAC,GAAG,CAAC,cAAc,CAAC,gBAAgB,EACvC,CAAC;QACD,4CAA4C;QAC5C,OAAO;IACT,CAAC;IAED,MAAM,KAAK,GAAa,EAAE,CAAC;IAE3B,MAAM,iBAAiB,GAA4C;QACjE,EAAE,GAAG,EAAE,gBAAgB,EAAE,YAAY,EAAE,EAAE,EAAE;KAC5C,CAAC;IAEF,OAAO,iBAAiB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACpC,MAAM,EAAE,GAAG,EAAE,YAAY,EAAE,GAAG,iBAAiB,CAAC,KAAK,EAAG,CAAC;QAEzD,IAAI,YAAY,KAAK,MAAM,EAAE,CAAC;YAC5B,6EAA6E;YAC7E,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YACvB,SAAS;QACX,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,kBAAE,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;QAE/D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;YAC5B,MAAM,QAAQ,GAAG,cAAI,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YAC5C,MAAM,gBAAgB,GAAG,cAAI,CAAC,IAAI,CAAC,YAAY,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YAE7D,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;gBACxB,iBAAiB,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,QAAQ,EAAE,YAAY,EAAE,gBAAgB,EAAE,CAAC,CAAC;YAC5E,CAAC;iBAAM,CAAC;gBACN,KAAK,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;YAC/B,CAAC;QACH,CAAC;IACH,CAAC;IACD,MAAM,WAAW,GAAG,KAAK;SACtB,GAAG;IACF,kCAAkC;IAClC,CAAC,CAAC,EAAE,EAAE,CAAC,cAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,CAC/B;SACA,IAAI,EAAE,CAAC,CAAC,+BAA+B;IAE1C,MAAM,MAAM,GAAG,MAAM,GAAG,CAAC,aAAa;SACnC,QAAQ,CACP,IAAA,kBAAO,EAAC;;;;;;OAMP,CAAC,EACF,EAAE,CACH;SACA,SAAS,EAAE,CAAC;IAEf,IAAI,MAAM,CAAC,KAAK,EAAE,CAAC;QACjB,MAAM,MAAM,CAAC,KAAK,CAAC;IACrB,CAAC;IAED,MAAM,aAAa,GAAG,MAAM,CAAC,IAAK,CAAC,aAAa,CAAC,mBAIhD,CAAC;IAEF,MAAM,KAAK,CAAC,aAAa,CAAC,GAAG,EAAE;QAC7B,MAAM,EAAE,KAAK;QACb,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,cAAc,EAAE,WAAW,EAAE,CAAC;QACrD,OAAO,EAAE,aAAa,CAAC,OAAO;KAC/B,CAAC,CAAC;IAEH,MAAM,oBAAoB,GAAG,MAAM,GAAG,CAAC,aAAa;SACjD,QAAQ,CACP,IAAA,kBAAO,EAAC;;;;;;;;;;;;;;OAcP,CAAC,EACF;QACE,OAAO,EAAE,GAAG,CAAC,GAAG,CAAC,YAAY;QAC7B,mBAAmB,EAAE;YACnB,IAAI,EAAE,KAAK;YACX,SAAS,EAAE,aAAa,CAAC,SAAS;SACnC;KACF,CACF;SACA,SAAS,EAAE,CAAC;IAEf,IAAI,oBAAoB,CAAC,KAAK,EAAE,CAAC;QAC/B,MAAM,oBAAoB,CAAC,KAAK,CAAC;IACnC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,8BAA8B,CAAC,GAAsB;;IAClE,MAAM,MAAM,GAAG,IAAA,oBAAU,EAAC,GAAG,CAAC,GAAG,CAAC,YAAY,EAAE,yBAAyB,CAAC,CAAC;IAC3E,MAAM,gBAAgB,GAAG,IAAA,oBAAU,EAAC,GAAG,CAAC,GAAG,CAAC,gBAAgB,EAAE,6BAA6B,CAAC,CAAC;IAC7F,MAAM,gBAAgB,GAAG,IAAA,oBAAU,EACjC,MAAA,GAAG,CAAC,GAAG,CAAC,OAAO,0CAAE,gBAAgB,EACjC,+BAA+B,CAChC,CAAC;IAEF,MAAM,QAAQ,GAAG,MAAM,IAAA,yBAAW,EAChC,IAAI,GAAG,CACL,GAAG,CAAC,GAAG,CAAC,QAAQ;QACd,CAAC,CAAC,qBAAqB,MAAM,2BAA2B;QACxD,CAAC,CAAC,uBAAuB,MAAM,2BAA2B,EAC5D,gBAAgB,CACjB,CAAC,QAAQ,EAAE,EACZ,MAAM,EACN;QACE,OAAO,EAAE;YACP,aAAa,EAAE,UAAU,gBAAgB,EAAE;SAC5C;QACD,OAAO,EAAE,KAAK;QACd,OAAO,EAAE,CAAC;QACV,MAAM,EAAE,GAAG,CAAC,MAAM;KACnB,CACF,CAAC;IAEF,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,UAAU,GAAG,MAAM,IAAA,qBAAW,EAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC;QACtD,MAAM,IAAI,KAAK,CAAC,oCAAoC,QAAQ,CAAC,MAAM,MAAM,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;IAC/F,CAAC;IAED,MAAM,UAAU,GAAG,MAAM,IAAA,qBAAW,EAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC;IACtD,IAAI,CAAC,UAAU,CAAC,EAAE,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CACb,uCAAuC,QAAQ,CAAC,MAAM,MAAM,UAAU,CAAC,MAAM,EAAE,CAChF,CAAC;IACJ,CAAC;IAED,MAAM,UAAU,GAAG,OAAC,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,oCAAoB,EAAE,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IACxF,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,CAAC;QACxB,MAAM,IAAI,KAAK,CACb,gCAAgC,QAAQ,CAAC,MAAM,MAAM,OAAC,CAAC,aAAa,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE,CACzF,CAAC;IACJ,CAAC;IAED,OAAO,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;AAC9B,CAAC","sourcesContent":["import path from 'path';\n\nimport spawn from '@expo/turtle-spawn';\nimport fs from 'fs-extra';\nimport { ArchiveSourceType, Job, ArchiveSource, ArchiveSourceSchemaZ } from '@expo/eas-build-job';\nimport { bunyan } from '@expo/logger';\nimport downloadFile from '@expo/downloader';\nimport { z } from 'zod';\nimport { asyncResult } from '@expo/results';\nimport nullthrows from 'nullthrows';\nimport { graphql } from 'gql.tada';\n\nimport { BuildContext } from '../context';\nimport { turtleFetch } from '../utils/turtleFetch';\n\nimport { shallowCloneRepositoryAsync } from './git';\n\nexport async function prepareProjectSourcesAsync<TJob extends Job>(\n ctx: BuildContext<TJob>,\n destinationDirectory = ctx.buildDirectory\n): Promise<void> {\n const projectArchiveResult = await asyncResult(fetchProjectArchiveSourceAsync(ctx));\n\n if (!projectArchiveResult.ok) {\n ctx.logger.error(\n { err: projectArchiveResult.reason },\n 'Failed to refresh project archive, falling back to the original one'\n );\n }\n\n const projectArchive = projectArchiveResult.value ?? ctx.job.projectArchive;\n\n if (projectArchive.type === ArchiveSourceType.GCS) {\n throw new Error('GCS project sources should be resolved earlier to url');\n } else if (projectArchive.type === ArchiveSourceType.PATH) {\n await prepareProjectSourcesLocallyAsync(ctx, projectArchive.path, destinationDirectory); // used in eas build --local\n } else if (projectArchive.type === ArchiveSourceType.URL) {\n await downloadAndUnpackProjectFromTarGzAsync(ctx, projectArchive.url, destinationDirectory);\n } else if (projectArchive.type === ArchiveSourceType.GIT) {\n await shallowCloneRepositoryAsync({\n logger: ctx.logger,\n archiveSource: projectArchive,\n destinationDirectory,\n });\n }\n\n const uploadResult = await asyncResult(\n uploadProjectMetadataAsync(ctx, { projectDirectory: destinationDirectory })\n );\n if (!uploadResult.ok) {\n ctx.logger.warn(`Failed to upload project metadata: ${uploadResult.reason}`);\n }\n}\n\nexport async function downloadAndUnpackProjectFromTarGzAsync<TJob extends Job>(\n ctx: BuildContext<TJob>,\n projectArchiveUrl: string,\n destinationDirectory: string\n): Promise<void> {\n const projectTarball = path.join(ctx.workingdir, 'project.tar.gz');\n try {\n await downloadFile(projectArchiveUrl, projectTarball, { retry: 3 });\n } catch (err: any) {\n ctx.reportError?.('Failed to download project archive', err, {\n extras: { buildId: ctx.env.EAS_BUILD_ID },\n });\n throw err;\n }\n\n await unpackTarGzAsync({\n destination: destinationDirectory,\n source: projectTarball,\n logger: ctx.logger,\n });\n}\n\nasync function prepareProjectSourcesLocallyAsync<TJob extends Job>(\n ctx: BuildContext<TJob>,\n projectArchivePath: string,\n destinationDirectory: string\n): Promise<void> {\n const projectTarball = path.join(ctx.workingdir, 'project.tar.gz');\n await fs.copy(projectArchivePath, projectTarball);\n\n await unpackTarGzAsync({\n destination: destinationDirectory,\n source: projectTarball,\n logger: ctx.logger,\n });\n}\n\nasync function unpackTarGzAsync({\n logger,\n source,\n destination,\n}: {\n logger: bunyan;\n source: string;\n destination: string;\n}): Promise<void> {\n await spawn('tar', ['-C', destination, '--strip-components', '1', '-zxf', source], {\n logger,\n });\n}\n\nasync function uploadProjectMetadataAsync(\n ctx: BuildContext<Job>,\n { projectDirectory }: { projectDirectory: string }\n): Promise<void> {\n if (!ctx.job.platform) {\n // Not a build job, skip.\n return;\n } else if (\n ctx.job.projectArchive.type === ArchiveSourceType.GCS &&\n ctx.job.projectArchive.metadataLocation\n ) {\n // Build already has project metadata, skip.\n return;\n }\n\n const files: string[] = [];\n\n const directoriesToScan: { dir: string; relativePath: string }[] = [\n { dir: projectDirectory, relativePath: '' },\n ];\n\n while (directoriesToScan.length > 0) {\n const { dir, relativePath } = directoriesToScan.shift()!;\n\n if (relativePath === '.git') {\n // Do not include whole `.git` directory in the archive, just that it exists.\n files.push('.git/...');\n continue;\n }\n\n const entries = await fs.readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n const relativeFilePath = path.join(relativePath, entry.name);\n\n if (entry.isDirectory()) {\n directoriesToScan.push({ dir: fullPath, relativePath: relativeFilePath });\n } else {\n files.push(relativeFilePath);\n }\n }\n }\n const sortedFiles = files\n .map(\n // Prepend entries with \"project/\"\n (f) => path.join('project', f)\n )\n .sort(); // Sort for consistent ordering\n\n const result = await ctx.graphqlClient\n .mutation(\n graphql(`\n mutation {\n uploadSession {\n createUploadSession(type: EAS_BUILD_GCS_PROJECT_METADATA)\n }\n }\n `),\n {}\n )\n .toPromise();\n\n if (result.error) {\n throw result.error;\n }\n\n const uploadSession = result.data!.uploadSession.createUploadSession as {\n url: string;\n bucketKey: string;\n headers: Record<string, string>;\n };\n\n await fetch(uploadSession.url, {\n method: 'PUT',\n body: JSON.stringify({ archiveContent: sortedFiles }),\n headers: uploadSession.headers,\n });\n\n const updateMetadataResult = await ctx.graphqlClient\n .mutation(\n graphql(`\n mutation UpdateTurtleBuildMetadataMutation(\n $buildId: ID!\n $projectMetadataFile: ProjectMetadataFileInput!\n ) {\n build {\n updateBuildMetadata(\n buildId: $buildId\n metadata: { projectMetadataFile: $projectMetadataFile }\n ) {\n id\n }\n }\n }\n `),\n {\n buildId: ctx.env.EAS_BUILD_ID,\n projectMetadataFile: {\n type: 'GCS',\n bucketKey: uploadSession.bucketKey,\n },\n }\n )\n .toPromise();\n\n if (updateMetadataResult.error) {\n throw updateMetadataResult.error;\n }\n}\n\nasync function fetchProjectArchiveSourceAsync(ctx: BuildContext<Job>): Promise<ArchiveSource> {\n const taskId = nullthrows(ctx.env.EAS_BUILD_ID, 'EAS_BUILD_ID is not set');\n const expoApiServerURL = nullthrows(ctx.env.__API_SERVER_URL, '__API_SERVER_URL is not set');\n const robotAccessToken = nullthrows(\n ctx.job.secrets?.robotAccessToken,\n 'robot access token is not set'\n );\n\n const response = await turtleFetch(\n new URL(\n ctx.job.platform\n ? `/v2/turtle-builds/${taskId}/download-project-archive`\n : `/v2/turtle-job-runs/${taskId}/download-project-archive`,\n expoApiServerURL\n ).toString(),\n 'POST',\n {\n headers: {\n Authorization: `Bearer ${robotAccessToken}`,\n },\n timeout: 20000,\n retries: 3,\n logger: ctx.logger,\n }\n );\n\n if (!response.ok) {\n const textResult = await asyncResult(response.text());\n throw new Error(`Unexpected response from server (${response.status}): ${textResult.value}`);\n }\n\n const jsonResult = await asyncResult(response.json());\n if (!jsonResult.ok) {\n throw new Error(\n `Expected JSON response from server (${response.status}): ${jsonResult.reason}`\n );\n }\n\n const dataResult = z.object({ data: ArchiveSourceSchemaZ }).safeParse(jsonResult.value);\n if (!dataResult.success) {\n throw new Error(\n `Unexpected data from server (${response.status}): ${z.prettifyError(dataResult.error)}`\n );\n }\n\n return dataResult.data.data;\n}\n"]}
1
+ {"version":3,"file":"projectSources.js","sourceRoot":"","sources":["../../src/common/projectSources.ts"],"names":[],"mappings":";;;;;AAiBA,gEA8DC;AAED,wFAoBC;AArGD,gDAAwB;AAExB,sEAAuC;AACvC,wDAA0B;AAC1B,uDAAkG;AAElG,kEAA4C;AAC5C,6BAAwB;AACxB,2CAA4C;AAC5C,4DAAoC;AACpC,uCAAmC;AAGnC,sDAAmD;AAEnD,+BAAoD;AAE7C,KAAK,UAAU,0BAA0B,CAC9C,GAAuB,EACvB,oBAAoB,GAAG,GAAG,CAAC,cAAc;;IAGzC,MAAM,oBAAoB,GAAG,MAAM,IAAA,qBAAW,EAAC,8BAA8B,CAAC,GAAG,CAAC,CAAC,CAAC;IAEpF,IAAI,CAAC,oBAAoB,CAAC,EAAE,EAAE,CAAC;QAC7B,GAAG,CAAC,MAAM,CAAC,KAAK,CACd,EAAE,GAAG,EAAE,oBAAoB,CAAC,MAAM,EAAE,EACpC,qEAAqE,CACtE,CAAC;IACJ,CAAC;IAED,MAAM,cAAc,GAAG,MAAA,oBAAoB,CAAC,KAAK,mCAAI,GAAG,CAAC,GAAG,CAAC,cAAc,CAAC;IAE5E,QAAQ,cAAc,CAAC,IAAI,EAAE,CAAC;QAC5B,KAAK,iCAAiB,CAAC,EAAE,CAAC;QAC1B,KAAK,iCAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;YAC3B,MAAM,IAAI,KAAK,CAAC,0DAA0D,CAAC,CAAC;QAC9E,CAAC;QAED,KAAK,iCAAiB,CAAC,IAAI,CAAC,CAAC,CAAC;YAC5B,MAAM,iCAAiC,CAAC,GAAG,EAAE,cAAc,CAAC,IAAI,EAAE,oBAAoB,CAAC,CAAC,CAAC,4BAA4B;YACrH,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;QAC3B,CAAC;QAED,KAAK,iCAAiB,CAAC,IAAI,CAAC,CAAC,CAAC;YAC5B,uDAAuD;YACvD,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;QAC3B,CAAC;QAED,KAAK,iCAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;YAC3B,MAAM,sCAAsC,CAAC,GAAG,EAAE,cAAc,CAAC,GAAG,EAAE,oBAAoB,CAAC,CAAC;YAE5F,MAAM,YAAY,GAAG,MAAM,IAAA,qBAAW,EACpC,0BAA0B,CAAC,GAAG,EAAE,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,CAAC,CAC5E,CAAC;YACF,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC;gBACrB,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,sCAAsC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC;YAC/E,CAAC;YAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;QAC3B,CAAC;QAED,KAAK,iCAAiB,CAAC,GAAG,CAAC,CAAC,CAAC;YAC3B,MAAM,IAAA,iCAA2B,EAAC;gBAChC,MAAM,EAAE,GAAG,CAAC,MAAM;gBAClB,aAAa,EAAE,cAAc;gBAC7B,oBAAoB;aACrB,CAAC,CAAC;YAEH,MAAM,YAAY,GAAG,MAAM,IAAA,qBAAW,EACpC,0BAA0B,CAAC,GAAG,EAAE,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,CAAC,CAC5E,CAAC;YACF,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,CAAC;gBACrB,GAAG,CAAC,MAAM,CAAC,IAAI,CAAC,sCAAsC,YAAY,CAAC,MAAM,EAAE,CAAC,CAAC;YAC/E,CAAC;YAED,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAC;QAC3B,CAAC;IACH,CAAC;AACH,CAAC;AAEM,KAAK,UAAU,sCAAsC,CAC1D,GAAuB,EACvB,iBAAyB,EACzB,oBAA4B;;IAE5B,MAAM,cAAc,GAAG,cAAI,CAAC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;IACnE,IAAI,CAAC;QACH,MAAM,IAAA,oBAAY,EAAC,iBAAiB,EAAE,cAAc,EAAE,EAAE,KAAK,EAAE,CAAC,EAAE,CAAC,CAAC;IACtE,CAAC;IAAC,OAAO,GAAQ,EAAE,CAAC;QAClB,MAAA,GAAG,CAAC,WAAW,oDAAG,oCAAoC,EAAE,GAAG,EAAE;YAC3D,MAAM,EAAE,EAAE,OAAO,EAAE,GAAG,CAAC,GAAG,CAAC,YAAY,EAAE;SAC1C,CAAC,CAAC;QACH,MAAM,GAAG,CAAC;IACZ,CAAC;IAED,MAAM,gBAAgB,CAAC;QACrB,WAAW,EAAE,oBAAoB;QACjC,MAAM,EAAE,cAAc;QACtB,MAAM,EAAE,GAAG,CAAC,MAAM;KACnB,CAAC,CAAC;AACL,CAAC;AAED,KAAK,UAAU,iCAAiC,CAC9C,GAAuB,EACvB,kBAA0B,EAC1B,oBAA4B;IAE5B,MAAM,cAAc,GAAG,cAAI,CAAC,IAAI,CAAC,GAAG,CAAC,UAAU,EAAE,gBAAgB,CAAC,CAAC;IACnE,MAAM,kBAAE,CAAC,IAAI,CAAC,kBAAkB,EAAE,cAAc,CAAC,CAAC;IAElD,MAAM,gBAAgB,CAAC;QACrB,WAAW,EAAE,oBAAoB;QACjC,MAAM,EAAE,cAAc;QACtB,MAAM,EAAE,GAAG,CAAC,MAAM;KACnB,CAAC,CAAC;AACL,CAAC;AAED,KAAK,UAAU,gBAAgB,CAAC,EAC9B,MAAM,EACN,MAAM,EACN,WAAW,GAKZ;IACC,MAAM,IAAA,sBAAK,EAAC,KAAK,EAAE,CAAC,IAAI,EAAE,WAAW,EAAE,oBAAoB,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,CAAC,EAAE;QACjF,MAAM;KACP,CAAC,CAAC;AACL,CAAC;AAED,KAAK,UAAU,0BAA0B,CACvC,GAAsB,EACtB,EAAE,gBAAgB,EAAgC;IAElD,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC;QACtB,yBAAyB;QACzB,OAAO;IACT,CAAC;SAAM,IACL,GAAG,CAAC,GAAG,CAAC,cAAc,CAAC,IAAI,KAAK,iCAAiB,CAAC,GAAG;QACrD,GAAG,CAAC,GAAG,CAAC,cAAc,CAAC,gBAAgB,EACvC,CAAC;QACD,4CAA4C;QAC5C,OAAO;IACT,CAAC;IAED,MAAM,KAAK,GAAa,EAAE,CAAC;IAE3B,MAAM,iBAAiB,GAA4C;QACjE,EAAE,GAAG,EAAE,gBAAgB,EAAE,YAAY,EAAE,EAAE,EAAE;KAC5C,CAAC;IAEF,OAAO,iBAAiB,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QACpC,MAAM,EAAE,GAAG,EAAE,YAAY,EAAE,GAAG,iBAAiB,CAAC,KAAK,EAAG,CAAC;QAEzD,IAAI,YAAY,KAAK,MAAM,EAAE,CAAC;YAC5B,6EAA6E;YAC7E,KAAK,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC;YACvB,SAAS;QACX,CAAC;QAED,MAAM,OAAO,GAAG,MAAM,kBAAE,CAAC,OAAO,CAAC,GAAG,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAC;QAE/D,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;YAC5B,MAAM,QAAQ,GAAG,cAAI,CAAC,IAAI,CAAC,GAAG,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YAC5C,MAAM,gBAAgB,GAAG,cAAI,CAAC,IAAI,CAAC,YAAY,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;YAE7D,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE,CAAC;gBACxB,iBAAiB,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,QAAQ,EAAE,YAAY,EAAE,gBAAgB,EAAE,CAAC,CAAC;YAC5E,CAAC;iBAAM,CAAC;gBACN,KAAK,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC;YAC/B,CAAC;QACH,CAAC;IACH,CAAC;IACD,MAAM,WAAW,GAAG,KAAK;SACtB,GAAG;IACF,kCAAkC;IAClC,CAAC,CAAC,EAAE,EAAE,CAAC,cAAI,CAAC,IAAI,CAAC,SAAS,EAAE,CAAC,CAAC,CAC/B;SACA,IAAI,EAAE,CAAC,CAAC,+BAA+B;IAE1C,MAAM,MAAM,GAAG,MAAM,GAAG,CAAC,aAAa;SACnC,QAAQ,CACP,IAAA,kBAAO,EAAC;;;;;;OAMP,CAAC,EACF,EAAE,CACH;SACA,SAAS,EAAE,CAAC;IAEf,IAAI,MAAM,CAAC,KAAK,EAAE,CAAC;QACjB,MAAM,MAAM,CAAC,KAAK,CAAC;IACrB,CAAC;IAED,MAAM,aAAa,GAAG,MAAM,CAAC,IAAK,CAAC,aAAa,CAAC,mBAIhD,CAAC;IAEF,MAAM,KAAK,CAAC,aAAa,CAAC,GAAG,EAAE;QAC7B,MAAM,EAAE,KAAK;QACb,IAAI,EAAE,IAAI,CAAC,SAAS,CAAC,EAAE,cAAc,EAAE,WAAW,EAAE,CAAC;QACrD,OAAO,EAAE,aAAa,CAAC,OAAO;KAC/B,CAAC,CAAC;IAEH,MAAM,oBAAoB,GAAG,MAAM,GAAG,CAAC,aAAa;SACjD,QAAQ,CACP,IAAA,kBAAO,EAAC;;;;;;;;;;;;;;OAcP,CAAC,EACF;QACE,OAAO,EAAE,GAAG,CAAC,GAAG,CAAC,YAAY;QAC7B,mBAAmB,EAAE;YACnB,IAAI,EAAE,KAAK;YACX,SAAS,EAAE,aAAa,CAAC,SAAS;SACnC;KACF,CACF;SACA,SAAS,EAAE,CAAC;IAEf,IAAI,oBAAoB,CAAC,KAAK,EAAE,CAAC;QAC/B,MAAM,oBAAoB,CAAC,KAAK,CAAC;IACnC,CAAC;AACH,CAAC;AAED,KAAK,UAAU,8BAA8B,CAAC,GAAsB;;IAClE,MAAM,MAAM,GAAG,IAAA,oBAAU,EAAC,GAAG,CAAC,GAAG,CAAC,YAAY,EAAE,yBAAyB,CAAC,CAAC;IAC3E,MAAM,gBAAgB,GAAG,IAAA,oBAAU,EAAC,GAAG,CAAC,GAAG,CAAC,gBAAgB,EAAE,6BAA6B,CAAC,CAAC;IAC7F,MAAM,gBAAgB,GAAG,IAAA,oBAAU,EACjC,MAAA,GAAG,CAAC,GAAG,CAAC,OAAO,0CAAE,gBAAgB,EACjC,+BAA+B,CAChC,CAAC;IAEF,MAAM,QAAQ,GAAG,MAAM,IAAA,yBAAW,EAChC,IAAI,GAAG,CACL,GAAG,CAAC,GAAG,CAAC,QAAQ;QACd,CAAC,CAAC,qBAAqB,MAAM,2BAA2B;QACxD,CAAC,CAAC,uBAAuB,MAAM,2BAA2B,EAC5D,gBAAgB,CACjB,CAAC,QAAQ,EAAE,EACZ,MAAM,EACN;QACE,OAAO,EAAE;YACP,aAAa,EAAE,UAAU,gBAAgB,EAAE;SAC5C;QACD,OAAO,EAAE,KAAK;QACd,OAAO,EAAE,CAAC;QACV,MAAM,EAAE,GAAG,CAAC,MAAM;KACnB,CACF,CAAC;IAEF,IAAI,CAAC,QAAQ,CAAC,EAAE,EAAE,CAAC;QACjB,MAAM,UAAU,GAAG,MAAM,IAAA,qBAAW,EAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC;QACtD,MAAM,IAAI,KAAK,CAAC,oCAAoC,QAAQ,CAAC,MAAM,MAAM,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;IAC/F,CAAC;IAED,MAAM,UAAU,GAAG,MAAM,IAAA,qBAAW,EAAC,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC;IACtD,IAAI,CAAC,UAAU,CAAC,EAAE,EAAE,CAAC;QACnB,MAAM,IAAI,KAAK,CACb,uCAAuC,QAAQ,CAAC,MAAM,MAAM,UAAU,CAAC,MAAM,EAAE,CAChF,CAAC;IACJ,CAAC;IAED,MAAM,UAAU,GAAG,OAAC,CAAC,MAAM,CAAC,EAAE,IAAI,EAAE,oCAAoB,EAAE,CAAC,CAAC,SAAS,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC;IACxF,IAAI,CAAC,UAAU,CAAC,OAAO,EAAE,CAAC;QACxB,MAAM,IAAI,KAAK,CACb,gCAAgC,QAAQ,CAAC,MAAM,MAAM,OAAC,CAAC,aAAa,CAAC,UAAU,CAAC,KAAK,CAAC,EAAE,CACzF,CAAC;IACJ,CAAC;IAED,OAAO,UAAU,CAAC,IAAI,CAAC,IAAI,CAAC;AAC9B,CAAC","sourcesContent":["import path from 'path';\n\nimport spawn from '@expo/turtle-spawn';\nimport fs from 'fs-extra';\nimport { ArchiveSourceType, Job, ArchiveSource, ArchiveSourceSchemaZ } from '@expo/eas-build-job';\nimport { bunyan } from '@expo/logger';\nimport downloadFile from '@expo/downloader';\nimport { z } from 'zod';\nimport { asyncResult } from '@expo/results';\nimport nullthrows from 'nullthrows';\nimport { graphql } from 'gql.tada';\n\nimport { BuildContext } from '../context';\nimport { turtleFetch } from '../utils/turtleFetch';\n\nimport { shallowCloneRepositoryAsync } from './git';\n\nexport async function prepareProjectSourcesAsync<TJob extends Job>(\n ctx: BuildContext<TJob>,\n destinationDirectory = ctx.buildDirectory\n): // Return type required to make switch exhaustive.\nPromise<{ handled: boolean }> {\n const projectArchiveResult = await asyncResult(fetchProjectArchiveSourceAsync(ctx));\n\n if (!projectArchiveResult.ok) {\n ctx.logger.error(\n { err: projectArchiveResult.reason },\n 'Failed to refresh project archive, falling back to the original one'\n );\n }\n\n const projectArchive = projectArchiveResult.value ?? ctx.job.projectArchive;\n\n switch (projectArchive.type) {\n case ArchiveSourceType.R2:\n case ArchiveSourceType.GCS: {\n throw new Error('Remote project sources should be resolved earlier to URL');\n }\n\n case ArchiveSourceType.PATH: {\n await prepareProjectSourcesLocallyAsync(ctx, projectArchive.path, destinationDirectory); // used in eas build --local\n return { handled: true };\n }\n\n case ArchiveSourceType.NONE: {\n // May be used in no-sources jobs like submission jobs.\n return { handled: true };\n }\n\n case ArchiveSourceType.URL: {\n await downloadAndUnpackProjectFromTarGzAsync(ctx, projectArchive.url, destinationDirectory);\n\n const uploadResult = await asyncResult(\n uploadProjectMetadataAsync(ctx, { projectDirectory: destinationDirectory })\n );\n if (!uploadResult.ok) {\n ctx.logger.warn(`Failed to upload project metadata: ${uploadResult.reason}`);\n }\n\n return { handled: true };\n }\n\n case ArchiveSourceType.GIT: {\n await shallowCloneRepositoryAsync({\n logger: ctx.logger,\n archiveSource: projectArchive,\n destinationDirectory,\n });\n\n const uploadResult = await asyncResult(\n uploadProjectMetadataAsync(ctx, { projectDirectory: destinationDirectory })\n );\n if (!uploadResult.ok) {\n ctx.logger.warn(`Failed to upload project metadata: ${uploadResult.reason}`);\n }\n\n return { handled: true };\n }\n }\n}\n\nexport async function downloadAndUnpackProjectFromTarGzAsync<TJob extends Job>(\n ctx: BuildContext<TJob>,\n projectArchiveUrl: string,\n destinationDirectory: string\n): Promise<void> {\n const projectTarball = path.join(ctx.workingdir, 'project.tar.gz');\n try {\n await downloadFile(projectArchiveUrl, projectTarball, { retry: 3 });\n } catch (err: any) {\n ctx.reportError?.('Failed to download project archive', err, {\n extras: { buildId: ctx.env.EAS_BUILD_ID },\n });\n throw err;\n }\n\n await unpackTarGzAsync({\n destination: destinationDirectory,\n source: projectTarball,\n logger: ctx.logger,\n });\n}\n\nasync function prepareProjectSourcesLocallyAsync<TJob extends Job>(\n ctx: BuildContext<TJob>,\n projectArchivePath: string,\n destinationDirectory: string\n): Promise<void> {\n const projectTarball = path.join(ctx.workingdir, 'project.tar.gz');\n await fs.copy(projectArchivePath, projectTarball);\n\n await unpackTarGzAsync({\n destination: destinationDirectory,\n source: projectTarball,\n logger: ctx.logger,\n });\n}\n\nasync function unpackTarGzAsync({\n logger,\n source,\n destination,\n}: {\n logger: bunyan;\n source: string;\n destination: string;\n}): Promise<void> {\n await spawn('tar', ['-C', destination, '--strip-components', '1', '-zxf', source], {\n logger,\n });\n}\n\nasync function uploadProjectMetadataAsync(\n ctx: BuildContext<Job>,\n { projectDirectory }: { projectDirectory: string }\n): Promise<void> {\n if (!ctx.job.platform) {\n // Not a build job, skip.\n return;\n } else if (\n ctx.job.projectArchive.type === ArchiveSourceType.GCS &&\n ctx.job.projectArchive.metadataLocation\n ) {\n // Build already has project metadata, skip.\n return;\n }\n\n const files: string[] = [];\n\n const directoriesToScan: { dir: string; relativePath: string }[] = [\n { dir: projectDirectory, relativePath: '' },\n ];\n\n while (directoriesToScan.length > 0) {\n const { dir, relativePath } = directoriesToScan.shift()!;\n\n if (relativePath === '.git') {\n // Do not include whole `.git` directory in the archive, just that it exists.\n files.push('.git/...');\n continue;\n }\n\n const entries = await fs.readdir(dir, { withFileTypes: true });\n\n for (const entry of entries) {\n const fullPath = path.join(dir, entry.name);\n const relativeFilePath = path.join(relativePath, entry.name);\n\n if (entry.isDirectory()) {\n directoriesToScan.push({ dir: fullPath, relativePath: relativeFilePath });\n } else {\n files.push(relativeFilePath);\n }\n }\n }\n const sortedFiles = files\n .map(\n // Prepend entries with \"project/\"\n (f) => path.join('project', f)\n )\n .sort(); // Sort for consistent ordering\n\n const result = await ctx.graphqlClient\n .mutation(\n graphql(`\n mutation {\n uploadSession {\n createUploadSession(type: EAS_BUILD_GCS_PROJECT_METADATA)\n }\n }\n `),\n {}\n )\n .toPromise();\n\n if (result.error) {\n throw result.error;\n }\n\n const uploadSession = result.data!.uploadSession.createUploadSession as {\n url: string;\n bucketKey: string;\n headers: Record<string, string>;\n };\n\n await fetch(uploadSession.url, {\n method: 'PUT',\n body: JSON.stringify({ archiveContent: sortedFiles }),\n headers: uploadSession.headers,\n });\n\n const updateMetadataResult = await ctx.graphqlClient\n .mutation(\n graphql(`\n mutation UpdateTurtleBuildMetadataMutation(\n $buildId: ID!\n $projectMetadataFile: ProjectMetadataFileInput!\n ) {\n build {\n updateBuildMetadata(\n buildId: $buildId\n metadata: { projectMetadataFile: $projectMetadataFile }\n ) {\n id\n }\n }\n }\n `),\n {\n buildId: ctx.env.EAS_BUILD_ID,\n projectMetadataFile: {\n type: 'GCS',\n bucketKey: uploadSession.bucketKey,\n },\n }\n )\n .toPromise();\n\n if (updateMetadataResult.error) {\n throw updateMetadataResult.error;\n }\n}\n\nasync function fetchProjectArchiveSourceAsync(ctx: BuildContext<Job>): Promise<ArchiveSource> {\n const taskId = nullthrows(ctx.env.EAS_BUILD_ID, 'EAS_BUILD_ID is not set');\n const expoApiServerURL = nullthrows(ctx.env.__API_SERVER_URL, '__API_SERVER_URL is not set');\n const robotAccessToken = nullthrows(\n ctx.job.secrets?.robotAccessToken,\n 'robot access token is not set'\n );\n\n const response = await turtleFetch(\n new URL(\n ctx.job.platform\n ? `/v2/turtle-builds/${taskId}/download-project-archive`\n : `/v2/turtle-job-runs/${taskId}/download-project-archive`,\n expoApiServerURL\n ).toString(),\n 'POST',\n {\n headers: {\n Authorization: `Bearer ${robotAccessToken}`,\n },\n timeout: 20000,\n retries: 3,\n logger: ctx.logger,\n }\n );\n\n if (!response.ok) {\n const textResult = await asyncResult(response.text());\n throw new Error(`Unexpected response from server (${response.status}): ${textResult.value}`);\n }\n\n const jsonResult = await asyncResult(response.json());\n if (!jsonResult.ok) {\n throw new Error(\n `Expected JSON response from server (${response.status}): ${jsonResult.reason}`\n );\n }\n\n const dataResult = z.object({ data: ArchiveSourceSchemaZ }).safeParse(jsonResult.value);\n if (!dataResult.success) {\n throw new Error(\n `Unexpected data from server (${response.status}): ${z.prettifyError(dataResult.error)}`\n );\n }\n\n return dataResult.data.data;\n}\n"]}
@@ -17,20 +17,7 @@ async function uploadJobOutputsToWwwAsync(ctx, { logger, expoApiV2BaseUrl }) {
17
17
  try {
18
18
  const workflowJobId = (0, nullthrows_1.default)(ctx.env.__WORKFLOW_JOB_ID);
19
19
  const robotAccessToken = (0, nullthrows_1.default)((_a = ctx.staticContext.job.secrets) === null || _a === void 0 ? void 0 : _a.robotAccessToken);
20
- const interpolationContext = {
21
- ...ctx.staticContext,
22
- env: ctx.env,
23
- always: () => true,
24
- never: () => false,
25
- success: () => !ctx.hasAnyPreviousStepFailed,
26
- failure: () => ctx.hasAnyPreviousStepFailed,
27
- fromJSON: (json) => JSON.parse(json),
28
- toJSON: (value) => JSON.stringify(value),
29
- contains: (value, substring) => value.includes(substring),
30
- startsWith: (value, prefix) => value.startsWith(prefix),
31
- endsWith: (value, suffix) => value.endsWith(suffix),
32
- hashFiles: (...patterns) => ctx.hashFiles(...patterns),
33
- };
20
+ const interpolationContext = ctx.getInterpolationContext();
34
21
  logger.debug({ dynamicValues: interpolationContext }, 'Using dynamic values');
35
22
  const outputs = collectJobOutputs({
36
23
  jobOutputDefinitions: ctx.staticContext.job.outputs,
@@ -1 +1 @@
1
- {"version":3,"file":"outputs.js","sourceRoot":"","sources":["../../src/utils/outputs.ts"],"names":[],"mappings":";;;;;AAOA,gEA+CC;AAGD,8CAiBC;AAzED,uCAA+D;AAE/D,4DAAoC;AAEpC,+CAA4C;AAErC,KAAK,UAAU,0BAA0B,CAC9C,GAA2B,EAC3B,EAAE,MAAM,EAAE,gBAAgB,EAAgD;;IAE1E,IAAI,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,CAAC,OAAO,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;QACvD,OAAO;IACT,CAAC;IAED,IAAI,CAAC;QACH,MAAM,aAAa,GAAG,IAAA,oBAAU,EAAC,GAAG,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;QAC5D,MAAM,gBAAgB,GAAG,IAAA,oBAAU,EAAC,MAAA,GAAG,CAAC,aAAa,CAAC,GAAG,CAAC,OAAO,0CAAE,gBAAgB,CAAC,CAAC;QAErF,MAAM,oBAAoB,GAA4B;YACpD,GAAG,GAAG,CAAC,aAAa;YACpB,GAAG,EAAE,GAAG,CAAC,GAAG;YACZ,MAAM,EAAE,GAAG,EAAE,CAAC,IAAI;YAClB,KAAK,EAAE,GAAG,EAAE,CAAC,KAAK;YAClB,OAAO,EAAE,GAAG,EAAE,CAAC,CAAC,GAAG,CAAC,wBAAwB;YAC5C,OAAO,EAAE,GAAG,EAAE,CAAC,GAAG,CAAC,wBAAwB;YAC3C,QAAQ,EAAE,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC;YAC5C,MAAM,EAAE,CAAC,KAAc,EAAE,EAAE,CAAC,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC;YACjD,QAAQ,EAAE,CAAC,KAAK,EAAE,SAAS,EAAE,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,SAAS,CAAC;YACzD,UAAU,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,KAAK,CAAC,UAAU,CAAC,MAAM,CAAC;YACvD,QAAQ,EAAE,CAAC,KAAK,EAAE,MAAM,EAAE,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC;YACnD,SAAS,EAAE,CAAC,GAAG,QAAkB,EAAE,EAAE,CAAC,GAAG,CAAC,SAAS,CAAC,GAAG,QAAQ,CAAC;SACjE,CAAC;QACF,MAAM,CAAC,KAAK,CAAC,EAAE,aAAa,EAAE,oBAAoB,EAAE,EAAE,sBAAsB,CAAC,CAAC;QAE9E,MAAM,OAAO,GAAG,iBAAiB,CAAC;YAChC,oBAAoB,EAAE,GAAG,CAAC,aAAa,CAAC,GAAG,CAAC,OAAO;YACnD,oBAAoB;SACrB,CAAC,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC;QAEjC,MAAM,IAAA,yBAAW,EAAC,IAAI,GAAG,CAAC,aAAa,aAAa,EAAE,EAAE,gBAAgB,CAAC,CAAC,QAAQ,EAAE,EAAE,OAAO,EAAE;YAC7F,IAAI,EAAE,EAAE,OAAO,EAAE;YACjB,OAAO,EAAE;gBACP,aAAa,EAAE,UAAU,gBAAgB,EAAE;aAC5C;YACD,OAAO,EAAE,KAAK;YACd,MAAM;SACP,CAAC,CAAC;IACL,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,EAAE,GAAG,EAAE,EAAE,0BAA0B,CAAC,CAAC;QAClD,MAAM,GAAG,CAAC;IACZ,CAAC;AACH,CAAC;AAED,kEAAkE;AAClE,SAAgB,iBAAiB,CAAC,EAChC,oBAAoB,EACpB,oBAAoB,GAIrB;IACC,MAAM,UAAU,GAAuC,EAAE,CAAC;IAC1D,KAAK,MAAM,CAAC,SAAS,EAAE,gBAAgB,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE,CAAC;QACjF,MAAM,WAAW,GAAG,gBAAgB,CAAC,OAAO,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,UAAU,EAAE,EAAE;;YACtF,OAAO,GAAG,MAAA,IAAA,gBAAQ,EAAC,UAAU,EAAE,oBAAoB,CAAC,mCAAI,EAAE,EAAE,CAAC;QAC/D,CAAC,CAAC,CAAC;QAEH,UAAU,CAAC,SAAS,CAAC,GAAG,WAAW,CAAC;IACtC,CAAC;IAED,OAAO,UAAU,CAAC;AACpB,CAAC","sourcesContent":["import { JobInterpolationContext } from '@expo/eas-build-job';\nimport { BuildStepGlobalContext, jsepEval } from '@expo/steps';\nimport { bunyan } from '@expo/logger';\nimport nullthrows from 'nullthrows';\n\nimport { turtleFetch } from './turtleFetch';\n\nexport async function uploadJobOutputsToWwwAsync(\n ctx: BuildStepGlobalContext,\n { logger, expoApiV2BaseUrl }: { logger: bunyan; expoApiV2BaseUrl: string }\n): Promise<void> {\n if (!ctx.staticContext.job.outputs) {\n logger.info('Job defines no outputs, skipping upload');\n return;\n }\n\n try {\n const workflowJobId = nullthrows(ctx.env.__WORKFLOW_JOB_ID);\n const robotAccessToken = nullthrows(ctx.staticContext.job.secrets?.robotAccessToken);\n\n const interpolationContext: JobInterpolationContext = {\n ...ctx.staticContext,\n env: ctx.env,\n always: () => true,\n never: () => false,\n success: () => !ctx.hasAnyPreviousStepFailed,\n failure: () => ctx.hasAnyPreviousStepFailed,\n fromJSON: (json: string) => JSON.parse(json),\n toJSON: (value: unknown) => JSON.stringify(value),\n contains: (value, substring) => value.includes(substring),\n startsWith: (value, prefix) => value.startsWith(prefix),\n endsWith: (value, suffix) => value.endsWith(suffix),\n hashFiles: (...patterns: string[]) => ctx.hashFiles(...patterns),\n };\n logger.debug({ dynamicValues: interpolationContext }, 'Using dynamic values');\n\n const outputs = collectJobOutputs({\n jobOutputDefinitions: ctx.staticContext.job.outputs,\n interpolationContext,\n });\n logger.info('Uploading outputs');\n\n await turtleFetch(new URL(`workflows/${workflowJobId}`, expoApiV2BaseUrl).toString(), 'PATCH', {\n json: { outputs },\n headers: {\n Authorization: `Bearer ${robotAccessToken}`,\n },\n timeout: 20000,\n logger,\n });\n } catch (err) {\n logger.error({ err }, 'Failed to upload outputs');\n throw err;\n }\n}\n\n/** Function we use to get outputs of the whole job from steps. */\nexport function collectJobOutputs({\n jobOutputDefinitions,\n interpolationContext,\n}: {\n jobOutputDefinitions: Record<string, string>;\n interpolationContext: JobInterpolationContext;\n}): Record<string, string | undefined> {\n const jobOutputs: Record<string, string | undefined> = {};\n for (const [outputKey, outputDefinition] of Object.entries(jobOutputDefinitions)) {\n const outputValue = outputDefinition.replace(/\\$\\{\\{(.+?)\\}\\}/g, (_match, expression) => {\n return `${jsepEval(expression, interpolationContext) ?? ''}`;\n });\n\n jobOutputs[outputKey] = outputValue;\n }\n\n return jobOutputs;\n}\n"]}
1
+ {"version":3,"file":"outputs.js","sourceRoot":"","sources":["../../src/utils/outputs.ts"],"names":[],"mappings":";;;;;AAOA,gEAkCC;AAGD,8CAiBC;AA5DD,uCAA+D;AAE/D,4DAAoC;AAEpC,+CAA4C;AAErC,KAAK,UAAU,0BAA0B,CAC9C,GAA2B,EAC3B,EAAE,MAAM,EAAE,gBAAgB,EAAgD;;IAE1E,IAAI,CAAC,GAAG,CAAC,aAAa,CAAC,GAAG,CAAC,OAAO,EAAE,CAAC;QACnC,MAAM,CAAC,IAAI,CAAC,yCAAyC,CAAC,CAAC;QACvD,OAAO;IACT,CAAC;IAED,IAAI,CAAC;QACH,MAAM,aAAa,GAAG,IAAA,oBAAU,EAAC,GAAG,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC;QAC5D,MAAM,gBAAgB,GAAG,IAAA,oBAAU,EAAC,MAAA,GAAG,CAAC,aAAa,CAAC,GAAG,CAAC,OAAO,0CAAE,gBAAgB,CAAC,CAAC;QAErF,MAAM,oBAAoB,GAAG,GAAG,CAAC,uBAAuB,EAAE,CAAC;QAC3D,MAAM,CAAC,KAAK,CAAC,EAAE,aAAa,EAAE,oBAAoB,EAAE,EAAE,sBAAsB,CAAC,CAAC;QAE9E,MAAM,OAAO,GAAG,iBAAiB,CAAC;YAChC,oBAAoB,EAAE,GAAG,CAAC,aAAa,CAAC,GAAG,CAAC,OAAO;YACnD,oBAAoB;SACrB,CAAC,CAAC;QACH,MAAM,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC;QAEjC,MAAM,IAAA,yBAAW,EAAC,IAAI,GAAG,CAAC,aAAa,aAAa,EAAE,EAAE,gBAAgB,CAAC,CAAC,QAAQ,EAAE,EAAE,OAAO,EAAE;YAC7F,IAAI,EAAE,EAAE,OAAO,EAAE;YACjB,OAAO,EAAE;gBACP,aAAa,EAAE,UAAU,gBAAgB,EAAE;aAC5C;YACD,OAAO,EAAE,KAAK;YACd,MAAM;SACP,CAAC,CAAC;IACL,CAAC;IAAC,OAAO,GAAG,EAAE,CAAC;QACb,MAAM,CAAC,KAAK,CAAC,EAAE,GAAG,EAAE,EAAE,0BAA0B,CAAC,CAAC;QAClD,MAAM,GAAG,CAAC;IACZ,CAAC;AACH,CAAC;AAED,kEAAkE;AAClE,SAAgB,iBAAiB,CAAC,EAChC,oBAAoB,EACpB,oBAAoB,GAIrB;IACC,MAAM,UAAU,GAAuC,EAAE,CAAC;IAC1D,KAAK,MAAM,CAAC,SAAS,EAAE,gBAAgB,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,oBAAoB,CAAC,EAAE,CAAC;QACjF,MAAM,WAAW,GAAG,gBAAgB,CAAC,OAAO,CAAC,kBAAkB,EAAE,CAAC,MAAM,EAAE,UAAU,EAAE,EAAE;;YACtF,OAAO,GAAG,MAAA,IAAA,gBAAQ,EAAC,UAAU,EAAE,oBAAoB,CAAC,mCAAI,EAAE,EAAE,CAAC;QAC/D,CAAC,CAAC,CAAC;QAEH,UAAU,CAAC,SAAS,CAAC,GAAG,WAAW,CAAC;IACtC,CAAC;IAED,OAAO,UAAU,CAAC;AACpB,CAAC","sourcesContent":["import { JobInterpolationContext } from '@expo/eas-build-job';\nimport { BuildStepGlobalContext, jsepEval } from '@expo/steps';\nimport { bunyan } from '@expo/logger';\nimport nullthrows from 'nullthrows';\n\nimport { turtleFetch } from './turtleFetch';\n\nexport async function uploadJobOutputsToWwwAsync(\n ctx: BuildStepGlobalContext,\n { logger, expoApiV2BaseUrl }: { logger: bunyan; expoApiV2BaseUrl: string }\n): Promise<void> {\n if (!ctx.staticContext.job.outputs) {\n logger.info('Job defines no outputs, skipping upload');\n return;\n }\n\n try {\n const workflowJobId = nullthrows(ctx.env.__WORKFLOW_JOB_ID);\n const robotAccessToken = nullthrows(ctx.staticContext.job.secrets?.robotAccessToken);\n\n const interpolationContext = ctx.getInterpolationContext();\n logger.debug({ dynamicValues: interpolationContext }, 'Using dynamic values');\n\n const outputs = collectJobOutputs({\n jobOutputDefinitions: ctx.staticContext.job.outputs,\n interpolationContext,\n });\n logger.info('Uploading outputs');\n\n await turtleFetch(new URL(`workflows/${workflowJobId}`, expoApiV2BaseUrl).toString(), 'PATCH', {\n json: { outputs },\n headers: {\n Authorization: `Bearer ${robotAccessToken}`,\n },\n timeout: 20000,\n logger,\n });\n } catch (err) {\n logger.error({ err }, 'Failed to upload outputs');\n throw err;\n }\n}\n\n/** Function we use to get outputs of the whole job from steps. */\nexport function collectJobOutputs({\n jobOutputDefinitions,\n interpolationContext,\n}: {\n jobOutputDefinitions: Record<string, string>;\n interpolationContext: JobInterpolationContext;\n}): Record<string, string | undefined> {\n const jobOutputs: Record<string, string | undefined> = {};\n for (const [outputKey, outputDefinition] of Object.entries(jobOutputDefinitions)) {\n const outputValue = outputDefinition.replace(/\\$\\{\\{(.+?)\\}\\}/g, (_match, expression) => {\n return `${jsepEval(expression, interpolationContext) ?? ''}`;\n });\n\n jobOutputs[outputKey] = outputValue;\n }\n\n return jobOutputs;\n}\n"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@expo/build-tools",
3
- "version": "1.0.238",
3
+ "version": "1.0.240",
4
4
  "main": "dist/index.js",
5
5
  "types": "dist/index.d.ts",
6
6
  "files": [
@@ -31,13 +31,13 @@
31
31
  "@expo/config": "10.0.6",
32
32
  "@expo/config-plugins": "9.0.12",
33
33
  "@expo/downloader": "1.0.221",
34
- "@expo/eas-build-job": "1.0.237",
34
+ "@expo/eas-build-job": "1.0.240",
35
35
  "@expo/env": "^0.4.0",
36
36
  "@expo/logger": "1.0.221",
37
37
  "@expo/package-manager": "1.7.0",
38
38
  "@expo/plist": "^0.2.0",
39
39
  "@expo/results": "^1.0.0",
40
- "@expo/steps": "1.0.237",
40
+ "@expo/steps": "1.0.240",
41
41
  "@expo/template-file": "1.0.221",
42
42
  "@expo/turtle-spawn": "1.0.221",
43
43
  "@expo/xcpretty": "^4.3.1",
@@ -85,5 +85,5 @@
85
85
  "node": "20.14.0",
86
86
  "yarn": "1.22.21"
87
87
  },
88
- "gitHead": "9fb0a1cfda630bdb587a4e91be174b00690965ed"
88
+ "gitHead": "ce21bbbbe778ee0f7d45344facef45fd264d4292"
89
89
  }