@vercel/client 10.2.3-canary.6 → 10.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -51,6 +51,43 @@ async function* checkDeploymentStatus(deployment, clientOptions) {
51
51
  finishedEvents.add('ready');
52
52
  yield { type: 'ready', payload: deploymentUpdate };
53
53
  }
54
+ if (deploymentUpdate.checksState !== undefined) {
55
+ if (deploymentUpdate.checksState === 'completed' &&
56
+ !finishedEvents.has('checks-completed')) {
57
+ finishedEvents.add('checks-completed');
58
+ if (deploymentUpdate.checksConclusion === 'succeeded') {
59
+ yield {
60
+ type: 'checks-conclusion-succeeded',
61
+ payload: deploymentUpdate,
62
+ };
63
+ }
64
+ else if (deploymentUpdate.checksConclusion === 'failed') {
65
+ yield { type: 'checks-conclusion-failed', payload: deploymentUpdate };
66
+ }
67
+ else if (deploymentUpdate.checksConclusion === 'skipped') {
68
+ yield {
69
+ type: 'checks-conclusion-skipped',
70
+ payload: deploymentUpdate,
71
+ };
72
+ }
73
+ else if (deploymentUpdate.checksConclusion === 'canceled') {
74
+ yield {
75
+ type: 'checks-conclusion-canceled',
76
+ payload: deploymentUpdate,
77
+ };
78
+ }
79
+ }
80
+ if (deploymentUpdate.checksState === 'registered' &&
81
+ !finishedEvents.has('checks-registered')) {
82
+ finishedEvents.add('checks-registered');
83
+ yield { type: 'checks-registered', payload: deploymentUpdate };
84
+ }
85
+ if (deploymentUpdate.checksState === 'running' &&
86
+ !finishedEvents.has('checks-running')) {
87
+ finishedEvents.add('checks-running');
88
+ yield { type: 'checks-running', payload: deploymentUpdate };
89
+ }
90
+ }
54
91
  if (ready_state_1.isAliasAssigned(deploymentUpdate)) {
55
92
  debug('Deployment alias assigned');
56
93
  return yield { type: 'alias-assigned', payload: deploymentUpdate };
@@ -1,5 +1,5 @@
1
- import { NowConfig, VercelClientOptions, DeploymentOptions, DeploymentEventType } from './types';
2
- export default function buildCreateDeployment(): (clientOptions: VercelClientOptions, deploymentOptions?: DeploymentOptions, nowConfig?: NowConfig) => AsyncIterableIterator<{
1
+ import { VercelConfig, VercelClientOptions, DeploymentOptions, DeploymentEventType } from './types';
2
+ export default function buildCreateDeployment(): (clientOptions: VercelClientOptions, deploymentOptions?: DeploymentOptions, nowConfig?: VercelConfig) => AsyncIterableIterator<{
3
3
  type: DeploymentEventType;
4
4
  payload: any;
5
5
  }>;
@@ -1,27 +1,8 @@
1
1
  "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
5
- }) : (function(o, m, k, k2) {
6
- if (k2 === undefined) k2 = k;
7
- o[k2] = m[k];
8
- }));
9
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
10
- Object.defineProperty(o, "default", { enumerable: true, value: v });
11
- }) : function(o, v) {
12
- o["default"] = v;
13
- });
14
- var __importStar = (this && this.__importStar) || function (mod) {
15
- if (mod && mod.__esModule) return mod;
16
- var result = {};
17
- if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
18
- __setModuleDefault(result, mod);
19
- return result;
20
- };
21
2
  Object.defineProperty(exports, "__esModule", { value: true });
22
3
  const fs_extra_1 = require("fs-extra");
23
4
  const path_1 = require("path");
24
- const hashes_1 = __importStar(require("./utils/hashes"));
5
+ const hashes_1 = require("./utils/hashes");
25
6
  const upload_1 = require("./upload");
26
7
  const utils_1 = require("./utils");
27
8
  const errors_1 = require("./errors");
@@ -72,7 +53,7 @@ function buildCreateDeployment() {
72
53
  else {
73
54
  debug(`Provided 'path' is a single file`);
74
55
  }
75
- let { fileList } = await utils_1.buildFileTree(path, clientOptions.isDirectory, debug);
56
+ let { fileList } = await utils_1.buildFileTree(path, clientOptions, debug);
76
57
  let configPath;
77
58
  if (!nowConfig) {
78
59
  // If the user did not provide a config file, use the one in the root directory.
@@ -102,7 +83,11 @@ function buildCreateDeployment() {
102
83
  payload: 'There are no files inside your deployment.',
103
84
  };
104
85
  }
105
- const files = await hashes_1.default(fileList);
86
+ const hashedFileMap = await hashes_1.hashes(fileList);
87
+ const nftFileList = clientOptions.prebuilt
88
+ ? await hashes_1.resolveNftJsonFiles(hashedFileMap)
89
+ : [];
90
+ const files = await hashes_1.hashes(nftFileList, hashedFileMap);
106
91
  debug(`Yielding a 'hashes-calculated' event with ${files.size} hashes`);
107
92
  yield { type: 'hashes-calculated', payload: hashes_1.mapToObject(files) };
108
93
  if (clientOptions.apiUrl) {
package/dist/index.d.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  export { getVercelIgnore, buildFileTree } from './utils/index';
2
2
  export declare const createDeployment: (clientOptions: import("./types").VercelClientOptions, deploymentOptions?: import("./types").DeploymentOptions, nowConfig?: import("./types").VercelConfig) => AsyncIterableIterator<{
3
- type: "warning" | "error" | "hashes-calculated" | "file-count" | "file-uploaded" | "all-files-uploaded" | "created" | "building" | "ready" | "alias-assigned" | "notice" | "tip" | "canceled";
3
+ type: "warning" | "error" | "hashes-calculated" | "file-count" | "file-uploaded" | "all-files-uploaded" | "created" | "building" | "ready" | "alias-assigned" | "notice" | "tip" | "canceled" | "checks-registered" | "checks-completed" | "checks-running" | "checks-conclusion-succeeded" | "checks-conclusion-failed" | "checks-conclusion-skipped" | "checks-conclusion-canceled";
4
4
  payload: any;
5
5
  }>;
6
6
  export * from './errors';
package/dist/types.d.ts CHANGED
@@ -1,4 +1,4 @@
1
- import { Builder, BuilderFunctions } from '@vercel/build-utils';
1
+ import { Builder, BuilderFunctions, ProjectSettings } from '@vercel/build-utils';
2
2
  import { Header, Route, Redirect, Rewrite } from '@vercel/routing-utils';
3
3
  export { DeploymentEventType } from './utils';
4
4
  export interface Dictionary<T> {
@@ -11,6 +11,8 @@ export interface VercelClientOptions {
11
11
  teamId?: string;
12
12
  apiUrl?: string;
13
13
  force?: boolean;
14
+ prebuilt?: boolean;
15
+ rootDirectory?: string;
14
16
  withCache?: boolean;
15
17
  userAgent?: string;
16
18
  defaultName?: string;
@@ -88,12 +90,7 @@ export interface VercelConfig {
88
90
  scope?: string;
89
91
  alias?: string | string[];
90
92
  regions?: string[];
91
- projectSettings?: {
92
- devCommand?: string | null;
93
- buildCommand?: string | null;
94
- outputDirectory?: string | null;
95
- framework?: string | null;
96
- };
93
+ projectSettings?: ProjectSettings;
97
94
  }
98
95
  /**
99
96
  * Options that will be sent to the API.
@@ -118,9 +115,5 @@ export interface DeploymentOptions {
118
115
  name?: string;
119
116
  public?: boolean;
120
117
  meta?: Dictionary<string>;
121
- projectSettings?: {
122
- devCommand?: string | null;
123
- buildCommand?: string | null;
124
- outputDirectory?: string | null;
125
- };
118
+ projectSettings?: ProjectSettings;
126
119
  }
@@ -15,8 +15,9 @@ export declare const mapToObject: (map: Map<string, DeploymentFile>) => {
15
15
  /**
16
16
  * Computes hashes for the contents of each file given.
17
17
  *
18
- * @param {Array} of {String} full paths
19
- * @return {Map}
18
+ * @param files - absolute file paths
19
+ * @param map - optional map of files to append
20
+ * @return Map of hash digest to file object
20
21
  */
21
- declare function hashes(files: string[]): Promise<Map<string, DeploymentFile>>;
22
- export default hashes;
22
+ export declare function hashes(files: string[], map?: Map<string, DeploymentFile>): Promise<Map<string, DeploymentFile>>;
23
+ export declare function resolveNftJsonFiles(hashedFiles: Map<string, DeploymentFile>): Promise<string[]>;
@@ -3,10 +3,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.mapToObject = void 0;
6
+ exports.resolveNftJsonFiles = exports.hashes = exports.mapToObject = void 0;
7
7
  const crypto_1 = require("crypto");
8
8
  const fs_extra_1 = __importDefault(require("fs-extra"));
9
9
  const async_sema_1 = require("async-sema");
10
+ const path_1 = require("path");
10
11
  /**
11
12
  * Computes a hash for the given buf.
12
13
  *
@@ -14,9 +15,7 @@ const async_sema_1 = require("async-sema");
14
15
  * @return {String} hex digest
15
16
  */
16
17
  function hash(buf) {
17
- return crypto_1.createHash('sha1')
18
- .update(buf)
19
- .digest('hex');
18
+ return crypto_1.createHash('sha1').update(buf).digest('hex');
20
19
  }
21
20
  /**
22
21
  * Transforms map to object
@@ -34,11 +33,11 @@ exports.mapToObject = mapToObject;
34
33
  /**
35
34
  * Computes hashes for the contents of each file given.
36
35
  *
37
- * @param {Array} of {String} full paths
38
- * @return {Map}
36
+ * @param files - absolute file paths
37
+ * @param map - optional map of files to append
38
+ * @return Map of hash digest to file object
39
39
  */
40
- async function hashes(files) {
41
- const map = new Map();
40
+ async function hashes(files, map = new Map()) {
42
41
  const semaphore = new async_sema_1.Sema(100);
43
42
  await Promise.all(files.map(async (name) => {
44
43
  await semaphore.acquire();
@@ -47,7 +46,9 @@ async function hashes(files) {
47
46
  const h = hash(data);
48
47
  const entry = map.get(h);
49
48
  if (entry) {
50
- entry.names.push(name);
49
+ const names = new Set(entry.names);
50
+ names.add(name);
51
+ entry.names = [...names];
51
52
  }
52
53
  else {
53
54
  map.set(h, { names: [name], data, mode });
@@ -56,4 +57,29 @@ async function hashes(files) {
56
57
  }));
57
58
  return map;
58
59
  }
59
- exports.default = hashes;
60
+ exports.hashes = hashes;
61
+ async function resolveNftJsonFiles(hashedFiles) {
62
+ const semaphore = new async_sema_1.Sema(100);
63
+ const existingFiles = Array.from(hashedFiles.values());
64
+ const resolvedFiles = new Set();
65
+ await Promise.all(existingFiles.map(async (file) => {
66
+ await semaphore.acquire();
67
+ const fsPath = file.names[0];
68
+ if (fsPath.endsWith('.nft.json')) {
69
+ const json = file.data.toString('utf8');
70
+ const { version, files } = JSON.parse(json);
71
+ if (version === 1 || version === 2) {
72
+ for (let f of files) {
73
+ const relPath = typeof f === 'string' ? f : f.input;
74
+ resolvedFiles.add(path_1.join(path_1.dirname(fsPath), relPath));
75
+ }
76
+ }
77
+ else {
78
+ console.error(`Invalid nft.json version: ${version}`);
79
+ }
80
+ }
81
+ semaphore.release();
82
+ }));
83
+ return Array.from(resolvedFiles);
84
+ }
85
+ exports.resolveNftJsonFiles = resolveNftJsonFiles;
@@ -4,16 +4,16 @@ import ignore from 'ignore';
4
4
  declare type Ignore = ReturnType<typeof ignore>;
5
5
  import { VercelClientOptions, DeploymentOptions, NowConfig } from '../types';
6
6
  export declare const API_FILES = "/v2/now/files";
7
- declare const EVENTS_ARRAY: readonly ["hashes-calculated", "file-count", "file-uploaded", "all-files-uploaded", "created", "building", "ready", "alias-assigned", "warning", "error", "notice", "tip", "canceled"];
7
+ declare const EVENTS_ARRAY: readonly ["hashes-calculated", "file-count", "file-uploaded", "all-files-uploaded", "created", "building", "ready", "alias-assigned", "warning", "error", "notice", "tip", "canceled", "checks-registered", "checks-completed", "checks-running", "checks-conclusion-succeeded", "checks-conclusion-failed", "checks-conclusion-skipped", "checks-conclusion-canceled"];
8
8
  export declare type DeploymentEventType = typeof EVENTS_ARRAY[number];
9
- export declare const EVENTS: Set<"warning" | "error" | "hashes-calculated" | "file-count" | "file-uploaded" | "all-files-uploaded" | "created" | "building" | "ready" | "alias-assigned" | "notice" | "tip" | "canceled">;
9
+ export declare const EVENTS: Set<"warning" | "error" | "hashes-calculated" | "file-count" | "file-uploaded" | "all-files-uploaded" | "created" | "building" | "ready" | "alias-assigned" | "notice" | "tip" | "canceled" | "checks-registered" | "checks-completed" | "checks-running" | "checks-conclusion-succeeded" | "checks-conclusion-failed" | "checks-conclusion-skipped" | "checks-conclusion-canceled">;
10
10
  export declare function getApiDeploymentsUrl(metadata?: Pick<DeploymentOptions, 'builds' | 'functions'>): "/v10/now/deployments" | "/v13/now/deployments";
11
11
  export declare function parseVercelConfig(filePath?: string): Promise<NowConfig>;
12
- export declare function buildFileTree(path: string | string[], isDirectory: boolean, debug: Debug): Promise<{
12
+ export declare function buildFileTree(path: string | string[], { isDirectory, prebuilt, rootDirectory, }: Pick<VercelClientOptions, 'isDirectory' | 'prebuilt' | 'rootDirectory'>, debug: Debug): Promise<{
13
13
  fileList: string[];
14
14
  ignoreList: string[];
15
15
  }>;
16
- export declare function getVercelIgnore(cwd: string | string[]): Promise<{
16
+ export declare function getVercelIgnore(cwd: string | string[], prebuilt?: boolean, rootDirectory?: string): Promise<{
17
17
  ig: Ignore;
18
18
  ignores: string[];
19
19
  }>;
@@ -31,6 +31,14 @@ const EVENTS_ARRAY = [
31
31
  'notice',
32
32
  'tip',
33
33
  'canceled',
34
+ // Checks events
35
+ 'checks-registered',
36
+ 'checks-completed',
37
+ 'checks-running',
38
+ 'checks-conclusion-succeeded',
39
+ 'checks-conclusion-failed',
40
+ 'checks-conclusion-skipped',
41
+ 'checks-conclusion-canceled',
34
42
  ];
35
43
  exports.EVENTS = new Set(EVENTS_ARRAY);
36
44
  function getApiDeploymentsUrl(metadata) {
@@ -63,10 +71,10 @@ const maybeRead = async function (path, default_) {
63
71
  return default_;
64
72
  }
65
73
  };
66
- async function buildFileTree(path, isDirectory, debug) {
74
+ async function buildFileTree(path, { isDirectory, prebuilt, rootDirectory, }, debug) {
67
75
  const ignoreList = [];
68
76
  let fileList;
69
- let { ig, ignores } = await getVercelIgnore(path);
77
+ let { ig, ignores } = await getVercelIgnore(path, prebuilt, rootDirectory);
70
78
  debug(`Found ${ignores.length} rules in .vercelignore`);
71
79
  debug('Building file tree...');
72
80
  if (isDirectory && !Array.isArray(path)) {
@@ -95,34 +103,47 @@ async function buildFileTree(path, isDirectory, debug) {
95
103
  return { fileList, ignoreList };
96
104
  }
97
105
  exports.buildFileTree = buildFileTree;
98
- async function getVercelIgnore(cwd) {
99
- const ignores = [
100
- '.hg',
101
- '.git',
102
- '.gitmodules',
103
- '.svn',
104
- '.cache',
105
- '.next',
106
- '.now',
107
- '.vercel',
108
- '.npmignore',
109
- '.dockerignore',
110
- '.gitignore',
111
- '.*.swp',
112
- '.DS_Store',
113
- '.wafpicke-*',
114
- '.lock-wscript',
115
- '.env.local',
116
- '.env.*.local',
117
- '.venv',
118
- 'npm-debug.log',
119
- 'config.gypi',
120
- 'node_modules',
121
- '__pycache__',
122
- 'venv',
123
- 'CVS',
124
- '.vercel_build_output',
125
- ];
106
+ async function getVercelIgnore(cwd, prebuilt, rootDirectory) {
107
+ let ignores = [];
108
+ const outputDir = path_1.posix.join(rootDirectory || '', '.output');
109
+ if (prebuilt) {
110
+ ignores.push('*');
111
+ const parts = outputDir.split('/');
112
+ parts.forEach((_, i) => {
113
+ const level = parts.slice(0, i + 1).join('/');
114
+ ignores.push(`!${level}`);
115
+ });
116
+ ignores.push(`!${outputDir}/**`);
117
+ }
118
+ else {
119
+ ignores = [
120
+ '.hg',
121
+ '.git',
122
+ '.gitmodules',
123
+ '.svn',
124
+ '.cache',
125
+ '.next',
126
+ '.now',
127
+ '.vercel',
128
+ '.npmignore',
129
+ '.dockerignore',
130
+ '.gitignore',
131
+ '.*.swp',
132
+ '.DS_Store',
133
+ '.wafpicke-*',
134
+ '.lock-wscript',
135
+ '.env.local',
136
+ '.env.*.local',
137
+ '.venv',
138
+ 'npm-debug.log',
139
+ 'config.gypi',
140
+ 'node_modules',
141
+ '__pycache__',
142
+ 'venv',
143
+ 'CVS',
144
+ `.output`,
145
+ ];
146
+ }
126
147
  const cwds = Array.isArray(cwd) ? cwd : [cwd];
127
148
  const files = await Promise.all(cwds.map(async (cwd) => {
128
149
  const [vercelignore, nowignore] = await Promise.all([
@@ -182,9 +203,8 @@ const fetch = async (url, token, opts = {}, debugEnabled, useNodeFetch) => {
182
203
  exports.fetch = fetch;
183
204
  const isWin = process.platform.includes('win');
184
205
  const prepareFiles = (files, clientOptions) => {
185
- const preparedFiles = [...files.keys()].reduce((acc, sha) => {
186
- const next = [...acc];
187
- const file = files.get(sha);
206
+ const preparedFiles = [];
207
+ for (const [sha, file] of files) {
188
208
  for (const name of file.names) {
189
209
  let fileName;
190
210
  if (clientOptions.isDirectory) {
@@ -199,15 +219,14 @@ const prepareFiles = (files, clientOptions) => {
199
219
  const segments = name.split(path_1.sep);
200
220
  fileName = segments[segments.length - 1];
201
221
  }
202
- next.push({
222
+ preparedFiles.push({
203
223
  file: isWin ? fileName.replace(/\\/g, '/') : fileName,
204
224
  size: file.data.byteLength || file.data.length,
205
225
  mode: file.mode,
206
226
  sha,
207
227
  });
208
228
  }
209
- return next;
210
- }, []);
229
+ }
211
230
  return preparedFiles;
212
231
  };
213
232
  exports.prepareFiles = prepareFiles;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vercel/client",
3
- "version": "10.2.3-canary.6",
3
+ "version": "10.3.0",
4
4
  "main": "dist/index.js",
5
5
  "typings": "dist/index.d.ts",
6
6
  "homepage": "https://vercel.com",
@@ -40,7 +40,7 @@
40
40
  ]
41
41
  },
42
42
  "dependencies": {
43
- "@vercel/build-utils": "2.12.3-canary.6",
43
+ "@vercel/build-utils": "2.14.0",
44
44
  "@zeit/fetch": "5.2.0",
45
45
  "async-retry": "1.2.3",
46
46
  "async-sema": "3.0.0",
@@ -52,5 +52,5 @@
52
52
  "recursive-readdir": "2.2.2",
53
53
  "sleep-promise": "8.0.1"
54
54
  },
55
- "gitHead": "d3d5555d792f3d206c106199e1b9976cf6bc4ac2"
55
+ "gitHead": "99fa729966c4334aa2d64c592421cc65e1644bdb"
56
56
  }