@vercel/client 12.1.11 → 12.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,4 +1,7 @@
1
1
  "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
2
5
  Object.defineProperty(exports, "__esModule", { value: true });
3
6
  const fs_extra_1 = require("fs-extra");
4
7
  const path_1 = require("path");
@@ -6,6 +9,9 @@ const hashes_1 = require("./utils/hashes");
6
9
  const upload_1 = require("./upload");
7
10
  const utils_1 = require("./utils");
8
11
  const errors_1 = require("./errors");
12
+ const build_utils_1 = require("@vercel/build-utils");
13
+ const tar_fs_1 = __importDefault(require("tar-fs"));
14
+ const zlib_1 = require("zlib");
9
15
  function buildCreateDeployment() {
10
16
  return async function* createDeployment(clientOptions, deploymentOptions = {}) {
11
17
  const { path } = clientOptions;
@@ -52,7 +58,7 @@ function buildCreateDeployment() {
52
58
  else {
53
59
  debug(`Provided 'path' is a single file`);
54
60
  }
55
- const { fileList } = await utils_1.buildFileTree(path, clientOptions, debug);
61
+ let { fileList } = await utils_1.buildFileTree(path, clientOptions, debug);
56
62
  // This is a useful warning because it prevents people
57
63
  // from getting confused about a deployment that renders 404.
58
64
  if (fileList.length === 0) {
@@ -62,7 +68,32 @@ function buildCreateDeployment() {
62
68
  payload: 'There are no files inside your deployment.',
63
69
  };
64
70
  }
65
- const files = await hashes_1.hashes(fileList);
71
+ // Populate Files -> FileFsRef mapping
72
+ const workPath = typeof path === 'string' ? path : path[0];
73
+ let files;
74
+ if (clientOptions.archive === 'tgz') {
75
+ debug('Packing tarball');
76
+ const tarStream = tar_fs_1.default
77
+ .pack(workPath, {
78
+ entries: fileList.map(file => path_1.relative(workPath, file)),
79
+ })
80
+ .pipe(zlib_1.createGzip());
81
+ const tarBuffer = await build_utils_1.streamToBuffer(tarStream);
82
+ debug('Packed tarball');
83
+ files = new Map([
84
+ [
85
+ hashes_1.hash(tarBuffer),
86
+ {
87
+ names: [path_1.join(workPath, '.vercel/source.tgz')],
88
+ data: tarBuffer,
89
+ mode: 0o666,
90
+ },
91
+ ],
92
+ ]);
93
+ }
94
+ else {
95
+ files = await hashes_1.hashes(fileList);
96
+ }
66
97
  debug(`Yielding a 'hashes-calculated' event with ${files.size} hashes`);
67
98
  yield { type: 'hashes-calculated', payload: hashes_1.mapToObject(files) };
68
99
  if (clientOptions.apiUrl) {
package/dist/types.d.ts CHANGED
@@ -4,6 +4,8 @@ export { DeploymentEventType } from './utils';
4
4
  export interface Dictionary<T> {
5
5
  [key: string]: T;
6
6
  }
7
+ export declare const VALID_ARCHIVE_FORMATS: readonly ["tgz"];
8
+ export declare type ArchiveFormat = typeof VALID_ARCHIVE_FORMATS[number];
7
9
  export interface VercelClientOptions {
8
10
  token: string;
9
11
  path: string | string[];
@@ -18,6 +20,7 @@ export interface VercelClientOptions {
18
20
  defaultName?: string;
19
21
  isDirectory?: boolean;
20
22
  skipAutoDetectionConfirmation?: boolean;
23
+ archive?: ArchiveFormat;
21
24
  }
22
25
  /** @deprecated Use VercelClientOptions instead. */
23
26
  export declare type NowClientOptions = VercelClientOptions;
@@ -27,6 +30,7 @@ export interface Deployment {
27
30
  id: string;
28
31
  deploymentId?: string;
29
32
  url: string;
33
+ inspectorUrl: string;
30
34
  name: string;
31
35
  meta: Dictionary<string | number | boolean>;
32
36
  version: 2;
@@ -39,12 +43,14 @@ export interface Deployment {
39
43
  ownerId: string;
40
44
  readyState: 'INITIALIZING' | 'ANALYZING' | 'BUILDING' | 'DEPLOYING' | 'READY' | 'QUEUED' | 'CANCELED' | 'ERROR';
41
45
  state?: 'INITIALIZING' | 'ANALYZING' | 'BUILDING' | 'DEPLOYING' | 'READY' | 'QUEUED' | 'CANCELED' | 'ERROR';
46
+ ready?: number;
42
47
  createdAt: number;
43
48
  createdIn: string;
44
49
  buildingAt?: number;
45
50
  creator?: {
46
51
  uid?: string;
47
52
  email?: string;
53
+ name?: string;
48
54
  username?: string;
49
55
  };
50
56
  env: Dictionary<string>;
package/dist/types.js CHANGED
@@ -1,4 +1,5 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.fileNameSymbol = void 0;
3
+ exports.fileNameSymbol = exports.VALID_ARCHIVE_FORMATS = void 0;
4
+ exports.VALID_ARCHIVE_FORMATS = ['tgz'];
4
5
  exports.fileNameSymbol = Symbol('fileName');
package/dist/upload.js CHANGED
@@ -5,9 +5,10 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
6
  exports.upload = void 0;
7
7
  const https_1 = require("https");
8
+ const stream_1 = require("stream");
9
+ const events_1 = require("events");
8
10
  const async_retry_1 = __importDefault(require("async-retry"));
9
11
  const async_sema_1 = require("async-sema");
10
- const fs_extra_1 = __importDefault(require("fs-extra"));
11
12
  const utils_1 = require("./utils");
12
13
  const errors_1 = require("./errors");
13
14
  const deploy_1 = require("./deploy");
@@ -31,13 +32,13 @@ async function* upload(files, clientOptions, deploymentOptions) {
31
32
  debug(`Neither 'files', 'token' nor 'teamId are present. Exiting`);
32
33
  return;
33
34
  }
34
- let missingFiles = [];
35
+ let shas = [];
35
36
  debug('Determining necessary files for upload...');
36
37
  for await (const event of deploy_1.deploy(files, clientOptions, deploymentOptions)) {
37
38
  if (event.type === 'error') {
38
39
  if (event.payload.code === 'missing_files') {
39
- missingFiles = event.payload.missing;
40
- debug(`${missingFiles.length} files are required to upload`);
40
+ shas = event.payload.missing;
41
+ debug(`${shas.length} files are required to upload`);
41
42
  }
42
43
  else {
43
44
  return yield event;
@@ -52,13 +53,19 @@ async function* upload(files, clientOptions, deploymentOptions) {
52
53
  yield event;
53
54
  }
54
55
  }
55
- const shas = missingFiles;
56
- yield { type: 'file-count', payload: { total: files, missing: shas } };
56
+ const uploads = shas.map(sha => {
57
+ return new UploadProgress(sha, files.get(sha));
58
+ });
59
+ yield {
60
+ type: 'file-count',
61
+ payload: { total: files, missing: shas, uploads },
62
+ };
57
63
  const uploadList = {};
58
64
  debug('Building an upload list...');
59
65
  const semaphore = new async_sema_1.Sema(50, { capacity: 50 });
60
66
  const agent = new https_1.Agent({ keepAlive: true });
61
- shas.map((sha) => {
67
+ shas.forEach((sha, index) => {
68
+ const uploadProgress = uploads[index];
62
69
  uploadList[sha] = async_retry_1.default(async (bail) => {
63
70
  const file = files.get(sha);
64
71
  if (!file) {
@@ -66,16 +73,25 @@ async function* upload(files, clientOptions, deploymentOptions) {
66
73
  return bail(new Error(`File ${sha} is undefined`));
67
74
  }
68
75
  await semaphore.acquire();
69
- const fPath = file.names[0];
70
- let body = null;
71
- const stat = await fs_extra_1.default.lstat(fPath);
72
- if (stat.isSymbolicLink()) {
73
- body = await fs_extra_1.default.readlink(fPath);
74
- }
75
- else {
76
- body = fs_extra_1.default.createReadStream(fPath);
77
- }
78
76
  const { data } = file;
77
+ uploadProgress.bytesUploaded = 0;
78
+ // Split out into chunks
79
+ const body = new stream_1.Readable();
80
+ const originalRead = body.read.bind(body);
81
+ body.read = function (...args) {
82
+ const chunk = originalRead(...args);
83
+ if (chunk) {
84
+ uploadProgress.bytesUploaded += chunk.length;
85
+ uploadProgress.emit('progress');
86
+ }
87
+ return chunk;
88
+ };
89
+ const chunkSize = 16384; /* 16kb - default Node.js `highWaterMark` */
90
+ for (let i = 0; i < data.length; i += chunkSize) {
91
+ const chunk = data.slice(i, i + chunkSize);
92
+ body.push(chunk);
93
+ }
94
+ body.push(null);
79
95
  let err;
80
96
  let result;
81
97
  try {
@@ -117,12 +133,6 @@ async function* upload(files, clientOptions, deploymentOptions) {
117
133
  debug(`An unexpected error occurred in upload promise:\n${e}`);
118
134
  err = new Error(e);
119
135
  }
120
- finally {
121
- if (body && typeof body !== 'string') {
122
- body.close();
123
- body.destroy();
124
- }
125
- }
126
136
  semaphore.release();
127
137
  if (err) {
128
138
  if (isClientNetworkError(err)) {
@@ -172,3 +182,11 @@ async function* upload(files, clientOptions, deploymentOptions) {
172
182
  }
173
183
  }
174
184
  exports.upload = upload;
185
+ class UploadProgress extends events_1.EventEmitter {
186
+ constructor(sha, file) {
187
+ super();
188
+ this.sha = sha;
189
+ this.file = file;
190
+ this.bytesUploaded = 0;
191
+ }
192
+ }
@@ -4,6 +4,13 @@ export interface DeploymentFile {
4
4
  data: Buffer;
5
5
  mode: number;
6
6
  }
7
+ /**
8
+ * Computes a hash for the given buf.
9
+ *
10
+ * @param {Buffer} file data
11
+ * @return {String} hex digest
12
+ */
13
+ export declare function hash(buf: Buffer): string;
7
14
  /**
8
15
  * Transforms map to object
9
16
  * @param map with hashed files
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
3
3
  return (mod && mod.__esModule) ? mod : { "default": mod };
4
4
  };
5
5
  Object.defineProperty(exports, "__esModule", { value: true });
6
- exports.hashes = exports.mapToObject = void 0;
6
+ exports.hashes = exports.mapToObject = exports.hash = void 0;
7
7
  const crypto_1 = require("crypto");
8
8
  const fs_extra_1 = __importDefault(require("fs-extra"));
9
9
  const async_sema_1 = require("async-sema");
@@ -16,6 +16,7 @@ const async_sema_1 = require("async-sema");
16
16
  function hash(buf) {
17
17
  return crypto_1.createHash('sha1').update(buf).digest('hex');
18
18
  }
19
+ exports.hash = hash;
19
20
  /**
20
21
  * Transforms map to object
21
22
  * @param map with hashed files
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@vercel/client",
3
- "version": "12.1.11",
3
+ "version": "12.2.2",
4
4
  "main": "dist/index.js",
5
5
  "typings": "dist/index.d.ts",
6
6
  "homepage": "https://vercel.com",
@@ -31,6 +31,7 @@
31
31
  "@types/node": "12.0.4",
32
32
  "@types/node-fetch": "2.5.4",
33
33
  "@types/recursive-readdir": "2.2.0",
34
+ "@types/tar-fs": "^2.0.1",
34
35
  "typescript": "4.3.4"
35
36
  },
36
37
  "jest": {
@@ -42,7 +43,7 @@
42
43
  ]
43
44
  },
44
45
  "dependencies": {
45
- "@vercel/build-utils": "5.3.0",
46
+ "@vercel/build-utils": "5.4.0",
46
47
  "@vercel/routing-utils": "2.0.2",
47
48
  "@zeit/fetch": "5.2.0",
48
49
  "async-retry": "1.2.3",
@@ -55,5 +56,5 @@
55
56
  "querystring": "^0.2.0",
56
57
  "sleep-promise": "8.0.1"
57
58
  },
58
- "gitHead": "47e3381c6df661168e8be335cc58df03f3cf2414"
59
+ "gitHead": "e7e0a55b72bc73c26661f7b2a3caa0884a5d1764"
59
60
  }