@hubspot/local-dev-lib 0.0.4 → 0.0.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/api/customObjects.d.ts +7 -0
- package/api/customObjects.js +48 -0
- package/api/designManager.d.ts +13 -0
- package/api/designManager.js +21 -0
- package/api/fileManager.d.ts +5 -0
- package/api/fileManager.js +62 -0
- package/api/fileMapper.d.ts +10 -0
- package/api/fileMapper.js +104 -0
- package/api/functions.d.ts +4 -0
- package/api/functions.js +32 -0
- package/api/hubdb.d.ts +10 -0
- package/api/hubdb.js +61 -0
- package/api/lighthouseScore.d.ts +5 -0
- package/api/lighthouseScore.js +29 -0
- package/api/localDevAuth.d.ts +12 -0
- package/api/localDevAuth.js +38 -0
- package/api/marketplaceValidation.d.ts +5 -0
- package/api/marketplaceValidation.js +29 -0
- package/api/projects.d.ts +27 -0
- package/api/projects.js +126 -0
- package/api/sandboxHubs.d.ts +6 -0
- package/api/sandboxHubs.js +50 -0
- package/api/sandboxSync.d.ts +4 -0
- package/api/sandboxSync.js +35 -0
- package/api/secrets.d.ts +8 -0
- package/api/secrets.js +40 -0
- package/api/validateHubl.d.ts +2 -0
- package/api/validateHubl.js +18 -0
- package/config/CLIConfiguration.d.ts +22 -16
- package/config/CLIConfiguration.js +39 -9
- package/config/configFile.d.ts +4 -4
- package/config/configFile.js +2 -2
- package/config/configUtils.d.ts +8 -8
- package/config/config_DEPRECATED.d.ts +78 -0
- package/config/config_DEPRECATED.js +636 -0
- package/config/environment.d.ts +2 -3
- package/config/environment.js +11 -17
- package/config/index.d.ts +38 -0
- package/config/index.js +232 -0
- package/constants/api.d.ts +17 -0
- package/constants/api.js +20 -0
- package/constants/auth.d.ts +13 -0
- package/constants/auth.js +8 -1
- package/constants/config.d.ts +0 -4
- package/constants/config.js +1 -5
- package/constants/environments.d.ts +1 -0
- package/constants/environments.js +1 -0
- package/constants/extensions.d.ts +2 -0
- package/constants/extensions.js +4 -1
- package/constants/files.d.ts +16 -0
- package/constants/files.js +17 -1
- package/enums/build.d.ts +36 -0
- package/enums/build.js +39 -0
- package/enums/deploy.d.ts +11 -0
- package/enums/deploy.js +14 -0
- package/enums/project.d.ts +6 -0
- package/enums/project.js +9 -0
- package/errors/HubSpotAuthError.d.ts +8 -2
- package/errors/HubSpotAuthError.js +14 -0
- package/errors/apiErrors.d.ts +9 -0
- package/errors/apiErrors.js +182 -0
- package/errors/errors_DEPRECATED.d.ts +7 -0
- package/errors/errors_DEPRECATED.js +73 -0
- package/errors/fileSystemErrors.d.ts +1 -7
- package/errors/standardErrors.d.ts +7 -1
- package/errors/standardErrors.js +20 -23
- package/http/getAxiosConfig.d.ts +6 -0
- package/http/getAxiosConfig.js +22 -0
- package/http/index.d.ts +18 -0
- package/http/index.js +175 -0
- package/lang/en.lyaml +247 -0
- package/lib/cms/functions.d.ts +13 -0
- package/lib/cms/functions.js +181 -0
- package/lib/cms/handleFieldsJS.d.ts +2 -1
- package/lib/cms/handleFieldsJS.js +4 -3
- package/lib/cms/modules.js +3 -3
- package/lib/cms/processFieldsJs.d.ts +1 -0
- package/lib/cms/processFieldsJs.js +122 -0
- package/lib/cms/templates.d.ts +25 -0
- package/lib/cms/templates.js +62 -0
- package/lib/cms/themes.js +2 -2
- package/lib/cms/uploadFolder.d.ts +18 -0
- package/lib/cms/uploadFolder.js +182 -0
- package/lib/cms/watch.d.ts +20 -0
- package/lib/cms/watch.js +194 -0
- package/lib/customObjects.d.ts +5 -0
- package/lib/customObjects.js +36 -0
- package/lib/environment.d.ts +2 -1
- package/lib/fileMapper.d.ts +13 -0
- package/lib/fileMapper.js +322 -0
- package/lib/github.d.ts +6 -3
- package/lib/github.js +36 -27
- package/lib/gitignore.js +1 -46
- package/lib/hubdb.d.ts +16 -0
- package/lib/hubdb.js +130 -0
- package/lib/ignoreRules.d.ts +3 -0
- package/lib/ignoreRules.js +69 -0
- package/lib/logging/git.d.ts +2 -0
- package/lib/logging/git.js +54 -0
- package/lib/logging/logger.d.ts +44 -0
- package/lib/logging/logger.js +146 -0
- package/lib/logging/logs.d.ts +22 -0
- package/lib/logging/logs.js +82 -0
- package/lib/logging/table.d.ts +3 -0
- package/lib/logging/table.js +47 -0
- package/lib/oauth.d.ts +7 -0
- package/lib/oauth.js +44 -0
- package/lib/path.d.ts +2 -1
- package/lib/path.js +8 -3
- package/lib/personalAccessKey.d.ts +13 -0
- package/lib/personalAccessKey.js +135 -0
- package/lib/sandboxes.d.ts +14 -0
- package/lib/sandboxes.js +71 -0
- package/lib/trackUsage.d.ts +1 -0
- package/lib/trackUsage.js +53 -0
- package/lib/validate.d.ts +2 -0
- package/lib/validate.js +40 -0
- package/models/OAuth2Manager.d.ts +34 -0
- package/models/OAuth2Manager.js +126 -0
- package/package.json +25 -13
- package/types/Accounts.d.ts +66 -21
- package/types/Activity.d.ts +20 -0
- package/types/Activity.js +2 -0
- package/types/Api.d.ts +2 -0
- package/types/Api.js +2 -0
- package/types/Build.d.ts +41 -0
- package/types/Build.js +2 -0
- package/types/CLIOptions.d.ts +5 -0
- package/types/ComponentStructure.d.ts +20 -0
- package/types/ComponentStructure.js +2 -0
- package/types/Config.d.ts +17 -4
- package/types/Deploy.d.ts +42 -0
- package/types/Deploy.js +2 -0
- package/types/Error.d.ts +32 -7
- package/types/FileManager.d.ts +66 -0
- package/types/FileManager.js +2 -0
- package/types/Files.d.ts +33 -1
- package/types/Functions.d.ts +40 -0
- package/types/Functions.js +2 -0
- package/types/Github.d.ts +6 -0
- package/types/Http.d.ts +33 -0
- package/types/Http.js +2 -0
- package/types/Hubdb.d.ts +90 -0
- package/types/Hubdb.js +2 -0
- package/types/HublValidation.d.ts +59 -0
- package/types/HublValidation.js +2 -0
- package/types/Lighthouse.d.ts +25 -0
- package/types/Lighthouse.js +2 -0
- package/types/MarketplaceValidation.d.ts +28 -0
- package/types/MarketplaceValidation.js +2 -0
- package/types/Project.d.ts +38 -0
- package/types/Project.js +2 -0
- package/types/Sandbox.d.ts +165 -0
- package/types/Sandbox.js +2 -0
- package/types/Schemas.d.ts +42 -0
- package/types/Schemas.js +2 -0
- package/utils/{modules.d.ts → cms/modules.d.ts} +1 -1
- package/utils/{modules.js → cms/modules.js} +3 -3
- package/utils/getAccountIdentifier.d.ts +10 -0
- package/utils/getAccountIdentifier.js +40 -0
- package/utils/git.d.ts +7 -2
- package/utils/git.js +54 -7
- package/utils/lang.d.ts +4 -0
- package/utils/lang.js +9 -2
- package/utils/notify.d.ts +1 -0
- package/utils/notify.js +42 -0
- package/utils/objectUtils.d.ts +8 -0
- package/utils/objectUtils.js +33 -0
- package/constants/index.d.ts +0 -16
- package/constants/index.js +0 -12
- package/http/requestOptions.d.ts +0 -20
- package/http/requestOptions.js +0 -27
- package/lib/cms/index.d.ts +0 -10
- package/lib/cms/index.js +0 -13
- package/lib/index.d.ts +0 -11
- package/lib/index.js +0 -14
- /package/utils/{fieldsJS.d.ts → cms/fieldsJS.d.ts} +0 -0
- /package/utils/{fieldsJS.js → cms/fieldsJS.js} +0 -0
|
@@ -0,0 +1,322 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.downloadFileOrFolder = exports.getFileMapperQueryValues = void 0;
|
|
7
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const p_queue_1 = __importDefault(require("p-queue"));
|
|
10
|
+
const path_2 = require("./path");
|
|
11
|
+
const fileMapper_1 = require("../api/fileMapper");
|
|
12
|
+
const standardErrors_1 = require("../errors/standardErrors");
|
|
13
|
+
const extensions_1 = require("../constants/extensions");
|
|
14
|
+
const files_1 = require("../constants/files");
|
|
15
|
+
const fileSystemErrors_1 = require("../errors/fileSystemErrors");
|
|
16
|
+
const apiErrors_1 = require("../errors/apiErrors");
|
|
17
|
+
const logger_1 = require("../utils/logger");
|
|
18
|
+
const queue = new p_queue_1.default({
|
|
19
|
+
concurrency: 10,
|
|
20
|
+
});
|
|
21
|
+
function isPathToFile(filepath) {
|
|
22
|
+
const ext = (0, path_2.getExt)(filepath);
|
|
23
|
+
return !!ext && ext !== extensions_1.MODULE_EXTENSION && ext !== extensions_1.FUNCTIONS_EXTENSION;
|
|
24
|
+
}
|
|
25
|
+
function isPathToModule(filepath) {
|
|
26
|
+
const ext = (0, path_2.getExt)(filepath);
|
|
27
|
+
return ext === extensions_1.MODULE_EXTENSION;
|
|
28
|
+
}
|
|
29
|
+
function isPathToRoot(filepath) {
|
|
30
|
+
if (typeof filepath !== 'string')
|
|
31
|
+
return false;
|
|
32
|
+
// Root pattern matches empty strings and: / \
|
|
33
|
+
return /^(\/|\\)?$/.test(filepath.trim());
|
|
34
|
+
}
|
|
35
|
+
function isPathToHubspot(filepath) {
|
|
36
|
+
if (typeof filepath !== 'string')
|
|
37
|
+
return false;
|
|
38
|
+
return /^(\/|\\)?@hubspot/i.test(filepath.trim());
|
|
39
|
+
}
|
|
40
|
+
function useApiBuffer(mode) {
|
|
41
|
+
return mode === files_1.MODE.draft;
|
|
42
|
+
}
|
|
43
|
+
// Determines API param based on mode an options
|
|
44
|
+
function getFileMapperQueryValues(mode, { staging, assetVersion }) {
|
|
45
|
+
return {
|
|
46
|
+
params: {
|
|
47
|
+
buffer: useApiBuffer(mode),
|
|
48
|
+
environmentId: staging ? 2 : 1,
|
|
49
|
+
version: assetVersion,
|
|
50
|
+
},
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
exports.getFileMapperQueryValues = getFileMapperQueryValues;
|
|
54
|
+
// Determines version number to log based on input.options
|
|
55
|
+
function getAssetVersionIdentifier(assetVersion, src) {
|
|
56
|
+
if (typeof assetVersion !== 'undefined' &&
|
|
57
|
+
typeof src !== 'undefined' &&
|
|
58
|
+
src.startsWith('@hubspot/')) {
|
|
59
|
+
return ` v${assetVersion}`;
|
|
60
|
+
}
|
|
61
|
+
return '';
|
|
62
|
+
}
|
|
63
|
+
function validateFileMapperNode(node) {
|
|
64
|
+
if (node === Object(node))
|
|
65
|
+
return;
|
|
66
|
+
let json;
|
|
67
|
+
try {
|
|
68
|
+
json = JSON.stringify(node, null, 2);
|
|
69
|
+
}
|
|
70
|
+
catch (err) {
|
|
71
|
+
json = node;
|
|
72
|
+
}
|
|
73
|
+
(0, standardErrors_1.throwTypeErrorWithMessage)('filemapper.invalidNode', {
|
|
74
|
+
json: JSON.stringify(json),
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
function getTypeDataFromPath(src) {
|
|
78
|
+
const isModule = isPathToModule(src);
|
|
79
|
+
const isHubspot = isPathToHubspot(src);
|
|
80
|
+
const isFile = !isModule && isPathToFile(src);
|
|
81
|
+
const isRoot = !isModule && !isFile && isPathToRoot(src);
|
|
82
|
+
const isFolder = !isFile;
|
|
83
|
+
return {
|
|
84
|
+
isModule,
|
|
85
|
+
isHubspot,
|
|
86
|
+
isFile,
|
|
87
|
+
isRoot,
|
|
88
|
+
isFolder,
|
|
89
|
+
};
|
|
90
|
+
}
|
|
91
|
+
function recurseFolder(node, callback, filepath = '', depth = 0) {
|
|
92
|
+
validateFileMapperNode(node);
|
|
93
|
+
const isRootFolder = node.folder && depth === 0;
|
|
94
|
+
if (isRootFolder) {
|
|
95
|
+
if (!filepath) {
|
|
96
|
+
filepath = node.name;
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
else {
|
|
100
|
+
filepath = path_1.default.join(filepath, node.name);
|
|
101
|
+
}
|
|
102
|
+
let __break = callback(node, filepath, depth);
|
|
103
|
+
if (__break === false)
|
|
104
|
+
return __break;
|
|
105
|
+
__break = node.children.every(childNode => {
|
|
106
|
+
__break = recurseFolder(childNode, callback, filepath, depth + 1);
|
|
107
|
+
return __break !== false;
|
|
108
|
+
});
|
|
109
|
+
return depth === 0 ? false : __break;
|
|
110
|
+
}
|
|
111
|
+
async function writeUtimes(accountId, filepath, node) {
|
|
112
|
+
try {
|
|
113
|
+
const now = new Date();
|
|
114
|
+
const atime = node.createdAt ? new Date(node.createdAt) : now;
|
|
115
|
+
const mtime = node.updatedAt ? new Date(node.updatedAt) : now;
|
|
116
|
+
await fs_extra_1.default.utimes(filepath, atime, mtime);
|
|
117
|
+
}
|
|
118
|
+
catch (err) {
|
|
119
|
+
(0, fileSystemErrors_1.throwFileSystemError)(err, {
|
|
120
|
+
filepath,
|
|
121
|
+
accountId,
|
|
122
|
+
write: true,
|
|
123
|
+
});
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
async function skipExisting(filepath, overwrite = false) {
|
|
127
|
+
if (overwrite) {
|
|
128
|
+
return false;
|
|
129
|
+
}
|
|
130
|
+
if (await fs_extra_1.default.pathExists(filepath)) {
|
|
131
|
+
return true;
|
|
132
|
+
}
|
|
133
|
+
return false;
|
|
134
|
+
}
|
|
135
|
+
const filemapperCallbackKeys = ['skippedExisting', 'wroteFolder'];
|
|
136
|
+
async function fetchAndWriteFileStream(accountId, srcPath, filepath, mode, options = {}, logCallbacks) {
|
|
137
|
+
const logger = (0, logger_1.makeTypedLogger)(logCallbacks, 'filemapper');
|
|
138
|
+
if (typeof srcPath !== 'string' || !srcPath.trim()) {
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
if (await skipExisting(filepath, options.overwrite)) {
|
|
142
|
+
logger('skippedExisting', { filepath });
|
|
143
|
+
return;
|
|
144
|
+
}
|
|
145
|
+
if (!(0, path_2.isAllowedExtension)(srcPath)) {
|
|
146
|
+
(0, standardErrors_1.throwErrorWithMessage)('filemapper.invalidFileType', { srcPath });
|
|
147
|
+
}
|
|
148
|
+
try {
|
|
149
|
+
const node = await (0, fileMapper_1.fetchFileStream)(accountId, srcPath, filepath, getFileMapperQueryValues(mode, options));
|
|
150
|
+
await writeUtimes(accountId, filepath, node);
|
|
151
|
+
}
|
|
152
|
+
catch (err) {
|
|
153
|
+
(0, apiErrors_1.throwStatusCodeError)(err, {
|
|
154
|
+
accountId,
|
|
155
|
+
request: srcPath,
|
|
156
|
+
});
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
// Writes an individual file or folder (not recursive). If file source is missing, the
|
|
160
|
+
//file is fetched.
|
|
161
|
+
async function writeFileMapperNode(accountId, filepath, node, mode, options = {}, logCallbacks) {
|
|
162
|
+
const logger = (0, logger_1.makeTypedLogger)(logCallbacks, 'filemapper');
|
|
163
|
+
const localFilepath = (0, path_2.convertToLocalFileSystemPath)(path_1.default.resolve(filepath));
|
|
164
|
+
if (await skipExisting(localFilepath, options.overwrite)) {
|
|
165
|
+
logger('skippedExisting', { filepath: localFilepath });
|
|
166
|
+
return true;
|
|
167
|
+
}
|
|
168
|
+
if (!node.folder) {
|
|
169
|
+
try {
|
|
170
|
+
await fetchAndWriteFileStream(accountId, node.path, localFilepath, mode, options, logCallbacks);
|
|
171
|
+
return true;
|
|
172
|
+
}
|
|
173
|
+
catch (err) {
|
|
174
|
+
return false;
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
try {
|
|
178
|
+
await fs_extra_1.default.ensureDir(localFilepath);
|
|
179
|
+
logger('wroteFolder', { filepath: localFilepath });
|
|
180
|
+
}
|
|
181
|
+
catch (err) {
|
|
182
|
+
(0, fileSystemErrors_1.throwFileSystemError)(err, {
|
|
183
|
+
filepath: localFilepath,
|
|
184
|
+
accountId,
|
|
185
|
+
write: true,
|
|
186
|
+
});
|
|
187
|
+
return false;
|
|
188
|
+
}
|
|
189
|
+
return true;
|
|
190
|
+
}
|
|
191
|
+
function isTimeout(err) {
|
|
192
|
+
return !!err && (err.statusCode === 408 || err.code === 'ESOCKETTIMEDOUT');
|
|
193
|
+
}
|
|
194
|
+
async function downloadFile(accountId, src, destPath, mode, options = {}, logCallbacks) {
|
|
195
|
+
const logger = (0, logger_1.makeTypedLogger)(logCallbacks, 'filemapper');
|
|
196
|
+
const { isFile, isHubspot } = getTypeDataFromPath(src);
|
|
197
|
+
try {
|
|
198
|
+
if (!isFile) {
|
|
199
|
+
throw new Error(`Invalid request for file: "${src}"`);
|
|
200
|
+
}
|
|
201
|
+
const dest = path_1.default.resolve(destPath);
|
|
202
|
+
const cwd = (0, path_2.getCwd)();
|
|
203
|
+
let filepath;
|
|
204
|
+
if (dest === cwd) {
|
|
205
|
+
// Dest: CWD
|
|
206
|
+
filepath = path_1.default.resolve(cwd, path_1.default.basename(src));
|
|
207
|
+
}
|
|
208
|
+
else if (isPathToFile(dest)) {
|
|
209
|
+
// Dest: file path
|
|
210
|
+
filepath = path_1.default.isAbsolute(dest) ? dest : path_1.default.resolve(cwd, dest);
|
|
211
|
+
}
|
|
212
|
+
else {
|
|
213
|
+
// Dest: folder path
|
|
214
|
+
const name = path_1.default.basename(src);
|
|
215
|
+
filepath = path_1.default.isAbsolute(dest)
|
|
216
|
+
? path_1.default.resolve(dest, name)
|
|
217
|
+
: path_1.default.resolve(cwd, dest, name);
|
|
218
|
+
}
|
|
219
|
+
const localFsPath = (0, path_2.convertToLocalFileSystemPath)(filepath);
|
|
220
|
+
await fetchAndWriteFileStream(accountId, src, localFsPath, mode, options, logCallbacks);
|
|
221
|
+
await queue.onIdle();
|
|
222
|
+
logger('completedFetch', {
|
|
223
|
+
src,
|
|
224
|
+
version: getAssetVersionIdentifier(options.assetVersion, src),
|
|
225
|
+
dest,
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
catch (err) {
|
|
229
|
+
const error = err;
|
|
230
|
+
if (isHubspot && isTimeout(err)) {
|
|
231
|
+
(0, standardErrors_1.throwErrorWithMessage)('filemapper.assetTimeout', {}, error);
|
|
232
|
+
}
|
|
233
|
+
else {
|
|
234
|
+
(0, standardErrors_1.throwErrorWithMessage)('filemapper.failedToFetchFile', { src, dest: destPath }, error);
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
async function fetchFolderFromApi(accountId, src, mode, options = {}, logCallbacks) {
|
|
239
|
+
const logger = (0, logger_1.makeTypedLogger)(logCallbacks, 'filemapper');
|
|
240
|
+
const { isRoot, isFolder, isHubspot } = getTypeDataFromPath(src);
|
|
241
|
+
if (!isFolder) {
|
|
242
|
+
(0, standardErrors_1.throwErrorWithMessage)('filemapper.invalidFetchFolderRequest', { src });
|
|
243
|
+
}
|
|
244
|
+
try {
|
|
245
|
+
const srcPath = isRoot ? '@root' : src;
|
|
246
|
+
const queryValues = getFileMapperQueryValues(mode, options);
|
|
247
|
+
const node = isHubspot
|
|
248
|
+
? await (0, fileMapper_1.downloadDefault)(accountId, srcPath, queryValues)
|
|
249
|
+
: await (0, fileMapper_1.download)(accountId, srcPath, queryValues);
|
|
250
|
+
logger('folderFetch', { src, accountId });
|
|
251
|
+
return node;
|
|
252
|
+
}
|
|
253
|
+
catch (err) {
|
|
254
|
+
const error = err;
|
|
255
|
+
if (isHubspot && isTimeout(error)) {
|
|
256
|
+
(0, standardErrors_1.throwErrorWithMessage)('filemapper.assetTimeout', {}, error);
|
|
257
|
+
}
|
|
258
|
+
else {
|
|
259
|
+
(0, apiErrors_1.throwStatusCodeError)(error, {
|
|
260
|
+
accountId,
|
|
261
|
+
request: src,
|
|
262
|
+
});
|
|
263
|
+
}
|
|
264
|
+
}
|
|
265
|
+
}
|
|
266
|
+
async function downloadFolder(accountId, src, destPath, mode, options = {}, logCallbacks) {
|
|
267
|
+
const logger = (0, logger_1.makeTypedLogger)(logCallbacks, 'filemapper');
|
|
268
|
+
try {
|
|
269
|
+
const node = await fetchFolderFromApi(accountId, src, mode, options, logCallbacks);
|
|
270
|
+
if (!node) {
|
|
271
|
+
return;
|
|
272
|
+
}
|
|
273
|
+
const dest = path_1.default.resolve(destPath);
|
|
274
|
+
const rootPath = dest === (0, path_2.getCwd)()
|
|
275
|
+
? (0, path_2.convertToLocalFileSystemPath)(path_1.default.resolve(dest, node.name))
|
|
276
|
+
: dest;
|
|
277
|
+
let success = true;
|
|
278
|
+
recurseFolder(node, (childNode, filepath) => {
|
|
279
|
+
queue.add(async () => {
|
|
280
|
+
const succeeded = await writeFileMapperNode(accountId, filepath || '', childNode, mode, options, logCallbacks);
|
|
281
|
+
if (succeeded === false) {
|
|
282
|
+
success = false;
|
|
283
|
+
}
|
|
284
|
+
});
|
|
285
|
+
return success;
|
|
286
|
+
}, rootPath);
|
|
287
|
+
await queue.onIdle();
|
|
288
|
+
if (success) {
|
|
289
|
+
logger('completedFolderFetch', {
|
|
290
|
+
src,
|
|
291
|
+
version: getAssetVersionIdentifier(options.assetVersion, src),
|
|
292
|
+
dest,
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
else {
|
|
296
|
+
(0, standardErrors_1.throwErrorWithMessage)('filemapper.incompleteFetch', { src });
|
|
297
|
+
}
|
|
298
|
+
}
|
|
299
|
+
catch (err) {
|
|
300
|
+
(0, standardErrors_1.throwErrorWithMessage)('filemapper.failedToFetchFolder', { src, dest: destPath }, err);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
/**
|
|
304
|
+
* Fetch a file/folder and write to local file system.
|
|
305
|
+
*
|
|
306
|
+
* @async
|
|
307
|
+
* @param {FileMapperInputArguments} input
|
|
308
|
+
* @returns {Promise}
|
|
309
|
+
*/
|
|
310
|
+
async function downloadFileOrFolder(accountId, src, dest, mode, options = {}, logCallbacks) {
|
|
311
|
+
if (!src) {
|
|
312
|
+
return;
|
|
313
|
+
}
|
|
314
|
+
const { isFile } = getTypeDataFromPath(src);
|
|
315
|
+
if (isFile) {
|
|
316
|
+
await downloadFile(accountId, src, dest, mode, options, logCallbacks);
|
|
317
|
+
}
|
|
318
|
+
else {
|
|
319
|
+
await downloadFolder(accountId, src, dest, mode, options, logCallbacks);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
exports.downloadFileOrFolder = downloadFileOrFolder;
|
package/lib/github.d.ts
CHANGED
|
@@ -1,10 +1,13 @@
|
|
|
1
1
|
import { GITHUB_RELEASE_TYPES } from '../constants/github';
|
|
2
|
+
import { GithubReleaseData } from '../types/Github';
|
|
2
3
|
import { ValueOf } from '../types/Utils';
|
|
3
4
|
import { LogCallbacksArg } from '../types/LogCallbacks';
|
|
4
5
|
declare global {
|
|
5
6
|
var githubToken: string;
|
|
6
7
|
}
|
|
7
|
-
|
|
8
|
+
type RepoPath = `${string}/${string}`;
|
|
9
|
+
export declare function fetchJsonFromRepository(repoPath: RepoPath, filePath: string, ref: string): Promise<JSON>;
|
|
10
|
+
export declare function fetchReleaseData(repoPath: RepoPath, tag?: string): Promise<GithubReleaseData>;
|
|
8
11
|
type CloneGithubRepoOptions = {
|
|
9
12
|
themeVersion?: string;
|
|
10
13
|
projectVersion?: string;
|
|
@@ -12,6 +15,6 @@ type CloneGithubRepoOptions = {
|
|
|
12
15
|
ref?: string;
|
|
13
16
|
};
|
|
14
17
|
declare const cloneGithubRepoCallbackKeys: string[];
|
|
15
|
-
export declare function cloneGithubRepo(dest: string, type: string,
|
|
16
|
-
export declare function downloadGithubRepoContents(
|
|
18
|
+
export declare function cloneGithubRepo(dest: string, type: string, repoPath: RepoPath, sourceDir: string, options?: CloneGithubRepoOptions, logCallbacks?: LogCallbacksArg<typeof cloneGithubRepoCallbackKeys>): Promise<boolean>;
|
|
19
|
+
export declare function downloadGithubRepoContents(repoPath: RepoPath, contentPath: string, dest: string, ref?: string, filter?: (contentPiecePath: string, downloadPath: string) => boolean): Promise<void>;
|
|
17
20
|
export {};
|
package/lib/github.js
CHANGED
|
@@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.downloadGithubRepoContents = exports.cloneGithubRepo = exports.fetchJsonFromRepository = void 0;
|
|
6
|
+
exports.downloadGithubRepoContents = exports.cloneGithubRepo = exports.fetchReleaseData = exports.fetchJsonFromRepository = void 0;
|
|
7
7
|
const axios_1 = __importDefault(require("axios"));
|
|
8
8
|
const path_1 = __importDefault(require("path"));
|
|
9
9
|
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
@@ -11,16 +11,16 @@ const logger_1 = require("../utils/logger");
|
|
|
11
11
|
const standardErrors_1 = require("../errors/standardErrors");
|
|
12
12
|
const archive_1 = require("./archive");
|
|
13
13
|
const github_1 = require("../constants/github");
|
|
14
|
-
const
|
|
14
|
+
const getAxiosConfig_1 = require("../http/getAxiosConfig");
|
|
15
15
|
const GITHUB_AUTH_HEADERS = {
|
|
16
16
|
authorization: global && global.githubToken ? `Bearer ${global.githubToken}` : null,
|
|
17
17
|
};
|
|
18
|
-
async function fetchJsonFromRepository(
|
|
18
|
+
async function fetchJsonFromRepository(repoPath, filePath, ref) {
|
|
19
19
|
try {
|
|
20
|
-
const
|
|
21
|
-
(0, logger_1.debug)('github.fetchJsonFromRepository', {
|
|
22
|
-
const { data } = await axios_1.default.get(
|
|
23
|
-
headers: { ...
|
|
20
|
+
const URL = `https://raw.githubusercontent.com/${repoPath}/${ref}/${filePath}`;
|
|
21
|
+
(0, logger_1.debug)('github.fetchJsonFromRepository', { url: URL });
|
|
22
|
+
const { data } = await axios_1.default.get(URL, {
|
|
23
|
+
headers: { ...getAxiosConfig_1.DEFAULT_USER_AGENT_HEADERS, ...GITHUB_AUTH_HEADERS },
|
|
24
24
|
});
|
|
25
25
|
return data;
|
|
26
26
|
}
|
|
@@ -29,17 +29,17 @@ async function fetchJsonFromRepository(repoName, filePath) {
|
|
|
29
29
|
}
|
|
30
30
|
}
|
|
31
31
|
exports.fetchJsonFromRepository = fetchJsonFromRepository;
|
|
32
|
-
async function fetchReleaseData(
|
|
32
|
+
async function fetchReleaseData(repoPath, tag = '') {
|
|
33
33
|
tag = tag.trim().toLowerCase();
|
|
34
34
|
if (tag.length && tag[0] !== 'v') {
|
|
35
35
|
tag = `v${tag}`;
|
|
36
36
|
}
|
|
37
37
|
const URI = tag
|
|
38
|
-
? `https://api.github.com/repos
|
|
39
|
-
: `https://api.github.com/repos
|
|
38
|
+
? `https://api.github.com/repos/${repoPath}/releases/tags/${tag}`
|
|
39
|
+
: `https://api.github.com/repos/${repoPath}/releases/latest`;
|
|
40
40
|
try {
|
|
41
41
|
const { data } = await axios_1.default.get(URI, {
|
|
42
|
-
headers: { ...
|
|
42
|
+
headers: { ...getAxiosConfig_1.DEFAULT_USER_AGENT_HEADERS, ...GITHUB_AUTH_HEADERS },
|
|
43
43
|
});
|
|
44
44
|
return data;
|
|
45
45
|
}
|
|
@@ -48,21 +48,22 @@ async function fetchReleaseData(repoName, tag = '') {
|
|
|
48
48
|
(0, standardErrors_1.throwErrorWithMessage)('github.fetchReleaseData', { tag: tag || 'latest' }, error);
|
|
49
49
|
}
|
|
50
50
|
}
|
|
51
|
-
|
|
51
|
+
exports.fetchReleaseData = fetchReleaseData;
|
|
52
|
+
async function downloadGithubRepoZip(repoPath, tag = '', releaseType = github_1.GITHUB_RELEASE_TYPES.RELEASE, ref) {
|
|
52
53
|
try {
|
|
53
54
|
let zipUrl;
|
|
54
55
|
if (releaseType === github_1.GITHUB_RELEASE_TYPES.REPOSITORY) {
|
|
55
|
-
(0, logger_1.debug)('github.downloadGithubRepoZip.fetching', { releaseType,
|
|
56
|
-
zipUrl = `https://api.github.com/repos
|
|
56
|
+
(0, logger_1.debug)('github.downloadGithubRepoZip.fetching', { releaseType, repoPath });
|
|
57
|
+
zipUrl = `https://api.github.com/repos/${repoPath}/zipball${ref ? `/${ref}` : ''}`;
|
|
57
58
|
}
|
|
58
59
|
else {
|
|
59
|
-
const releaseData = await fetchReleaseData(
|
|
60
|
+
const releaseData = await fetchReleaseData(repoPath, tag);
|
|
60
61
|
zipUrl = releaseData.zipball_url;
|
|
61
62
|
const { name } = releaseData;
|
|
62
63
|
(0, logger_1.debug)('github.downloadGithubRepoZip.fetchingName', { name });
|
|
63
64
|
}
|
|
64
65
|
const { data } = await axios_1.default.get(zipUrl, {
|
|
65
|
-
headers: { ...
|
|
66
|
+
headers: { ...getAxiosConfig_1.DEFAULT_USER_AGENT_HEADERS, ...GITHUB_AUTH_HEADERS },
|
|
66
67
|
});
|
|
67
68
|
(0, logger_1.debug)('github.downloadGithubRepoZip.completed');
|
|
68
69
|
return data;
|
|
@@ -72,11 +73,12 @@ async function downloadGithubRepoZip(repoName, tag = '', releaseType = github_1.
|
|
|
72
73
|
}
|
|
73
74
|
}
|
|
74
75
|
const cloneGithubRepoCallbackKeys = ['success'];
|
|
75
|
-
async function cloneGithubRepo(dest, type,
|
|
76
|
+
async function cloneGithubRepo(dest, type, repoPath, sourceDir, options = {}, logCallbacks) {
|
|
76
77
|
const logger = (0, logger_1.makeTypedLogger)(logCallbacks, 'github.cloneGithubRepo');
|
|
77
78
|
const { themeVersion, projectVersion, releaseType, ref } = options;
|
|
78
79
|
const tag = projectVersion || themeVersion;
|
|
79
|
-
const zip = await downloadGithubRepoZip(
|
|
80
|
+
const zip = await downloadGithubRepoZip(repoPath, tag, releaseType, ref);
|
|
81
|
+
const repoName = repoPath.split('/')[1];
|
|
80
82
|
const success = await (0, archive_1.extractZipArchive)(zip, repoName, dest, { sourceDir });
|
|
81
83
|
if (success) {
|
|
82
84
|
logger('success', { type, dest });
|
|
@@ -84,24 +86,25 @@ async function cloneGithubRepo(dest, type, repoName, sourceDir, options = {}, lo
|
|
|
84
86
|
return success;
|
|
85
87
|
}
|
|
86
88
|
exports.cloneGithubRepo = cloneGithubRepo;
|
|
87
|
-
async function getGitHubRepoContentsAtPath(
|
|
88
|
-
const
|
|
89
|
+
async function getGitHubRepoContentsAtPath(repoPath, path, ref) {
|
|
90
|
+
const refQuery = ref ? `?ref=${ref}` : '';
|
|
91
|
+
const contentsRequestUrl = `https://api.github.com/repos/${repoPath}/contents/${path}${refQuery}`;
|
|
89
92
|
const response = await axios_1.default.get(contentsRequestUrl, {
|
|
90
|
-
headers: { ...
|
|
93
|
+
headers: { ...getAxiosConfig_1.DEFAULT_USER_AGENT_HEADERS, ...GITHUB_AUTH_HEADERS },
|
|
91
94
|
});
|
|
92
95
|
return response.data;
|
|
93
96
|
}
|
|
94
97
|
async function fetchGitHubRepoContentFromDownloadUrl(dest, downloadUrl) {
|
|
95
98
|
const resp = await axios_1.default.get(downloadUrl, {
|
|
96
|
-
headers: { ...
|
|
99
|
+
headers: { ...getAxiosConfig_1.DEFAULT_USER_AGENT_HEADERS, ...GITHUB_AUTH_HEADERS },
|
|
97
100
|
});
|
|
98
101
|
fs_extra_1.default.writeFileSync(dest, resp.data, 'utf8');
|
|
99
102
|
}
|
|
100
|
-
// Writes files from a
|
|
101
|
-
async function downloadGithubRepoContents(
|
|
103
|
+
// Writes files from a public repository to the destination folder
|
|
104
|
+
async function downloadGithubRepoContents(repoPath, contentPath, dest, ref, filter) {
|
|
102
105
|
fs_extra_1.default.ensureDirSync(path_1.default.dirname(dest));
|
|
103
106
|
try {
|
|
104
|
-
const contentsResp = await getGitHubRepoContentsAtPath(
|
|
107
|
+
const contentsResp = await getGitHubRepoContentsAtPath(repoPath, contentPath, ref);
|
|
105
108
|
const downloadContent = async (contentPiece) => {
|
|
106
109
|
const { path: contentPiecePath, download_url } = contentPiece;
|
|
107
110
|
const downloadPath = path_1.default.join(dest, contentPiecePath.replace(contentPath, ''));
|
|
@@ -115,12 +118,18 @@ async function downloadGithubRepoContents(repoName, contentPath, dest, filter) {
|
|
|
115
118
|
});
|
|
116
119
|
return fetchGitHubRepoContentFromDownloadUrl(downloadPath, download_url);
|
|
117
120
|
};
|
|
118
|
-
|
|
121
|
+
let contentPromises;
|
|
122
|
+
if (Array.isArray(contentsResp)) {
|
|
123
|
+
contentPromises = contentsResp.map(downloadContent);
|
|
124
|
+
}
|
|
125
|
+
else {
|
|
126
|
+
contentPromises = [downloadContent(contentsResp)];
|
|
127
|
+
}
|
|
119
128
|
Promise.all(contentPromises);
|
|
120
129
|
}
|
|
121
130
|
catch (e) {
|
|
122
131
|
const error = e;
|
|
123
|
-
if (error
|
|
132
|
+
if (error?.error?.message) {
|
|
124
133
|
(0, standardErrors_1.throwErrorWithMessage)('github.downloadGithubRepoContents', {
|
|
125
134
|
errorMessage: error.error.message,
|
|
126
135
|
}, error);
|
package/lib/gitignore.js
CHANGED
|
@@ -6,58 +6,13 @@ Object.defineProperty(exports, "__esModule", { value: true });
|
|
|
6
6
|
exports.checkAndAddConfigToGitignore = void 0;
|
|
7
7
|
const fs_extra_1 = require("fs-extra");
|
|
8
8
|
const path_1 = __importDefault(require("path"));
|
|
9
|
-
const findup_sync_1 = __importDefault(require("findup-sync"));
|
|
10
9
|
const git_1 = require("../utils/git");
|
|
11
10
|
const config_1 = require("../constants/config");
|
|
12
11
|
const standardErrors_1 = require("../errors/standardErrors");
|
|
13
12
|
const GITIGNORE_FILE = '.gitignore';
|
|
14
|
-
// Get all .gitignore files since they can cascade down directory structures
|
|
15
|
-
function getGitignoreFiles(configPath) {
|
|
16
|
-
const gitDir = (0, git_1.getGitComparisonDir)();
|
|
17
|
-
const files = [];
|
|
18
|
-
if (!gitDir) {
|
|
19
|
-
// Not in git
|
|
20
|
-
return files;
|
|
21
|
-
}
|
|
22
|
-
// Start findup from config dir
|
|
23
|
-
let cwd = configPath && path_1.default.dirname(configPath);
|
|
24
|
-
while (cwd) {
|
|
25
|
-
const ignorePath = (0, findup_sync_1.default)(GITIGNORE_FILE, { cwd });
|
|
26
|
-
const ignorePathComparisonDir = (0, git_1.makeComparisonDir)(ignorePath);
|
|
27
|
-
const gitComparisonDir = (0, git_1.makeComparisonDir)(gitDir);
|
|
28
|
-
if (ignorePath &&
|
|
29
|
-
ignorePathComparisonDir &&
|
|
30
|
-
gitComparisonDir &&
|
|
31
|
-
ignorePathComparisonDir.startsWith(gitComparisonDir)) {
|
|
32
|
-
const file = path_1.default.resolve(ignorePath);
|
|
33
|
-
files.push(file);
|
|
34
|
-
cwd = path_1.default.resolve(path_1.default.dirname(file) + '..');
|
|
35
|
-
}
|
|
36
|
-
else {
|
|
37
|
-
cwd = null;
|
|
38
|
-
}
|
|
39
|
-
}
|
|
40
|
-
return files;
|
|
41
|
-
}
|
|
42
|
-
function checkGitInclusion(configPath) {
|
|
43
|
-
const result = {
|
|
44
|
-
inGit: false,
|
|
45
|
-
configIgnored: false,
|
|
46
|
-
gitignoreFiles: [],
|
|
47
|
-
};
|
|
48
|
-
if ((0, git_1.isConfigPathInGitRepo)(configPath)) {
|
|
49
|
-
result.inGit = true;
|
|
50
|
-
result.gitignoreFiles = getGitignoreFiles(configPath);
|
|
51
|
-
if ((0, git_1.configFilenameIsIgnoredByGitignore)(result.gitignoreFiles, configPath)) {
|
|
52
|
-
// Found ignore statement in .gitignore that matches config filename
|
|
53
|
-
result.configIgnored = true;
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
return result;
|
|
57
|
-
}
|
|
58
13
|
function checkAndAddConfigToGitignore(configPath) {
|
|
59
14
|
try {
|
|
60
|
-
const { configIgnored, gitignoreFiles } = checkGitInclusion(configPath);
|
|
15
|
+
const { configIgnored, gitignoreFiles } = (0, git_1.checkGitInclusion)(configPath);
|
|
61
16
|
if (configIgnored)
|
|
62
17
|
return;
|
|
63
18
|
let gitignoreFilePath = gitignoreFiles && gitignoreFiles.length ? gitignoreFiles[0] : null;
|
package/lib/hubdb.d.ts
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { Row } from '../types/Hubdb';
|
|
2
|
+
export declare function addRowsToHubDbTable(accountId: number, tableId: string, rows: Array<Row>): Promise<{
|
|
3
|
+
tableId: string;
|
|
4
|
+
rowCount: number;
|
|
5
|
+
}>;
|
|
6
|
+
export declare function createHubDbTable(accountId: number, src: string): Promise<{
|
|
7
|
+
tableId: string;
|
|
8
|
+
rowCount: number;
|
|
9
|
+
}>;
|
|
10
|
+
export declare function updateHubDbTable(accountId: number, tableId: string, src: string): Promise<unknown>;
|
|
11
|
+
export declare function downloadHubDbTable(accountId: number, tableId: string, dest: string): Promise<{
|
|
12
|
+
filePath: string;
|
|
13
|
+
}>;
|
|
14
|
+
export declare function clearHubDbTableRows(accountId: number, tableId: string): Promise<{
|
|
15
|
+
deletedRowCount: number;
|
|
16
|
+
}>;
|