@hubspot/local-dev-lib 1.13.1 → 2.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/api/appsDev.d.ts +6 -5
- package/api/appsDev.js +6 -10
- package/api/customObjects.d.ts +7 -6
- package/api/customObjects.js +13 -16
- package/api/designManager.d.ts +3 -2
- package/api/designManager.js +5 -8
- package/api/developerTestAccounts.d.ts +5 -4
- package/api/developerTestAccounts.js +9 -10
- package/api/fileManager.d.ts +5 -4
- package/api/fileManager.js +9 -9
- package/api/fileMapper.d.ts +8 -8
- package/api/fileMapper.js +16 -16
- package/api/fileTransport.d.ts +4 -3
- package/api/fileTransport.js +5 -5
- package/api/functions.d.ts +6 -5
- package/api/functions.js +11 -14
- package/api/github.d.ts +6 -6
- package/api/github.js +29 -13
- package/api/hubdb.d.ts +9 -8
- package/api/hubdb.js +17 -20
- package/api/lighthouseScore.d.ts +4 -3
- package/api/lighthouseScore.js +7 -10
- package/api/localDevAuth.d.ts +5 -4
- package/api/localDevAuth.js +8 -9
- package/api/marketplaceValidation.d.ts +4 -3
- package/api/marketplaceValidation.js +4 -7
- package/api/projects.d.ts +31 -30
- package/api/projects.js +52 -52
- package/api/sandboxHubs.d.ts +5 -4
- package/api/sandboxHubs.js +9 -10
- package/api/sandboxSync.d.ts +4 -4
- package/api/sandboxSync.js +5 -14
- package/api/secrets.d.ts +5 -4
- package/api/secrets.js +9 -12
- package/api/validateHubl.d.ts +2 -1
- package/api/validateHubl.js +3 -6
- package/config/CLIConfiguration.d.ts +3 -3
- package/config/CLIConfiguration.js +22 -20
- package/config/configFile.js +7 -14
- package/config/config_DEPRECATED.js +5 -2
- package/config/index.js +45 -48
- package/errors/errors_DEPRECATED.js +2 -2
- package/errors/index.d.ts +18 -0
- package/errors/index.js +63 -0
- package/http/addQueryParams.d.ts +2 -0
- package/http/addQueryParams.js +14 -0
- package/http/getAxiosConfig.d.ts +2 -2
- package/http/getAxiosConfig.js +10 -2
- package/http/index.d.ts +9 -14
- package/http/index.js +28 -53
- package/http/unauthed.d.ts +15 -0
- package/http/unauthed.js +38 -0
- package/lang/en.json +1 -2
- package/lang/lang/en.json +1 -2
- package/lib/archive.js +11 -10
- package/lib/cms/functions.d.ts +7 -1
- package/lib/cms/functions.js +22 -18
- package/lib/cms/handleFieldsJS.js +8 -7
- package/lib/cms/modules.js +4 -5
- package/lib/cms/processFieldsJs.js +8 -7
- package/lib/cms/templates.js +2 -3
- package/lib/cms/uploadFolder.js +16 -14
- package/lib/cms/validate.js +1 -1
- package/lib/cms/watch.js +16 -9
- package/lib/customObjects.js +4 -15
- package/lib/fileManager.js +22 -21
- package/lib/fileMapper.js +24 -31
- package/lib/fs.js +2 -2
- package/lib/github.js +21 -21
- package/lib/gitignore.js +2 -2
- package/lib/hubdb.d.ts +3 -2
- package/lib/hubdb.js +11 -9
- package/lib/notify.js +2 -2
- package/lib/oauth.d.ts +1 -1
- package/lib/oauth.js +8 -17
- package/lib/personalAccessKey.js +16 -21
- package/lib/portManager.js +2 -2
- package/lib/trackUsage.js +3 -3
- package/models/FileSystemError.d.ts +6 -0
- package/models/FileSystemError.js +47 -0
- package/models/HubSpotHttpError.d.ts +24 -0
- package/models/HubSpotHttpError.js +197 -0
- package/models/OAuth2Manager.d.ts +1 -2
- package/models/OAuth2Manager.js +13 -28
- package/package.json +2 -1
- package/types/Error.d.ts +7 -5
- package/types/Files.d.ts +4 -4
- package/types/Http.d.ts +6 -10
- package/types/Sandbox.d.ts +0 -5
- package/utils/PortManagerServer.d.ts +3 -3
- package/utils/PortManagerServer.js +9 -9
- package/utils/cms/modules.js +2 -2
- package/utils/detectPort.js +3 -3
- package/errors/apiErrors.d.ts +0 -25
- package/errors/apiErrors.js +0 -176
- package/errors/fileSystemErrors.d.ts +0 -6
- package/errors/fileSystemErrors.js +0 -35
- package/errors/standardErrors.d.ts +0 -20
- package/errors/standardErrors.js +0 -62
- package/lib/developerTestAccounts.d.ts +0 -4
- package/lib/developerTestAccounts.js +0 -35
- package/lib/sandboxes.d.ts +0 -14
- package/lib/sandboxes.js +0 -70
- package/models/HubSpotAuthError.d.ts +0 -12
- package/models/HubSpotAuthError.js +0 -20
package/lib/archive.js
CHANGED
|
@@ -8,10 +8,9 @@ const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
|
8
8
|
const path_1 = require("path");
|
|
9
9
|
const os_1 = require("os");
|
|
10
10
|
const extract_zip_1 = __importDefault(require("extract-zip"));
|
|
11
|
-
const fileSystemErrors_1 = require("../errors/fileSystemErrors");
|
|
12
|
-
const standardErrors_1 = require("../errors/standardErrors");
|
|
13
11
|
const logger_1 = require("./logger");
|
|
14
12
|
const lang_1 = require("../utils/lang");
|
|
13
|
+
const FileSystemError_1 = require("../models/FileSystemError");
|
|
15
14
|
const i18nKey = 'lib.archive';
|
|
16
15
|
async function extractZip(name, zip, hideLogs = false) {
|
|
17
16
|
const result = { extractDir: '', tmpDir: '' };
|
|
@@ -31,15 +30,16 @@ async function extractZip(name, zip, hideLogs = false) {
|
|
|
31
30
|
}
|
|
32
31
|
catch (err) {
|
|
33
32
|
if (tmpZipPath || result.tmpDir) {
|
|
34
|
-
|
|
33
|
+
throw new FileSystemError_1.FileSystemError({ cause: err }, {
|
|
35
34
|
filepath: tmpZipPath || result.tmpDir,
|
|
36
|
-
|
|
35
|
+
operation: 'write',
|
|
37
36
|
});
|
|
38
37
|
}
|
|
39
38
|
else {
|
|
40
|
-
(0,
|
|
39
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.extractZip.errors.write`), {
|
|
40
|
+
cause: err,
|
|
41
|
+
});
|
|
41
42
|
}
|
|
42
|
-
return result;
|
|
43
43
|
}
|
|
44
44
|
// Extract zip
|
|
45
45
|
try {
|
|
@@ -48,7 +48,9 @@ async function extractZip(name, zip, hideLogs = false) {
|
|
|
48
48
|
result.extractDir = tmpExtractPath;
|
|
49
49
|
}
|
|
50
50
|
catch (err) {
|
|
51
|
-
(0,
|
|
51
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.extractZip.errors.extract`), {
|
|
52
|
+
cause: err,
|
|
53
|
+
});
|
|
52
54
|
}
|
|
53
55
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.extractZip.success`));
|
|
54
56
|
return result;
|
|
@@ -81,12 +83,11 @@ async function copySourceToDest(src, dest, { sourceDir, includesRootDir = true,
|
|
|
81
83
|
}
|
|
82
84
|
catch (err) {
|
|
83
85
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.copySourceToDest.error`, { dest }));
|
|
84
|
-
|
|
86
|
+
throw new FileSystemError_1.FileSystemError({ cause: err }, {
|
|
85
87
|
filepath: dest,
|
|
86
|
-
|
|
88
|
+
operation: 'write',
|
|
87
89
|
});
|
|
88
90
|
}
|
|
89
|
-
return false;
|
|
90
91
|
}
|
|
91
92
|
async function cleanupTempDir(tmpDir) {
|
|
92
93
|
if (!tmpDir)
|
package/lib/cms/functions.d.ts
CHANGED
|
@@ -1,2 +1,8 @@
|
|
|
1
|
-
import { FunctionInfo, FunctionOptions } from '../../types/Functions';
|
|
1
|
+
import { FunctionConfig, FunctionConfigInfo, FunctionInfo, FunctionOptions } from '../../types/Functions';
|
|
2
|
+
export declare function isObjectOrFunction(value: object): boolean;
|
|
3
|
+
export declare function createEndpoint(endpointMethod: string, filename: string): {
|
|
4
|
+
method: string;
|
|
5
|
+
file: string;
|
|
6
|
+
};
|
|
7
|
+
export declare function createConfig({ endpointPath, endpointMethod, functionFile, }: FunctionConfigInfo): FunctionConfig;
|
|
2
8
|
export declare function createFunction(functionInfo: FunctionInfo, dest: string, options?: FunctionOptions): Promise<void>;
|
package/lib/cms/functions.js
CHANGED
|
@@ -3,27 +3,28 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
3
3
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
4
|
};
|
|
5
5
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
exports.createFunction = void 0;
|
|
6
|
+
exports.createFunction = exports.createConfig = exports.createEndpoint = exports.isObjectOrFunction = void 0;
|
|
7
7
|
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
8
8
|
const path_1 = __importDefault(require("path"));
|
|
9
9
|
const findup_sync_1 = __importDefault(require("findup-sync"));
|
|
10
10
|
const path_2 = require("../path");
|
|
11
11
|
const github_1 = require("../github");
|
|
12
12
|
const logger_1 = require("../logger");
|
|
13
|
-
const standardErrors_1 = require("../../errors/standardErrors");
|
|
14
|
-
const fileSystemErrors_1 = require("../../errors/fileSystemErrors");
|
|
15
13
|
const lang_1 = require("../../utils/lang");
|
|
14
|
+
const FileSystemError_1 = require("../../models/FileSystemError");
|
|
16
15
|
const i18nKey = 'lib.cms.functions';
|
|
17
16
|
function isObjectOrFunction(value) {
|
|
18
17
|
const type = typeof value;
|
|
19
18
|
return value != null && (type === 'object' || type === 'function');
|
|
20
19
|
}
|
|
20
|
+
exports.isObjectOrFunction = isObjectOrFunction;
|
|
21
21
|
function createEndpoint(endpointMethod, filename) {
|
|
22
22
|
return {
|
|
23
23
|
method: endpointMethod || 'GET',
|
|
24
24
|
file: filename,
|
|
25
25
|
};
|
|
26
26
|
}
|
|
27
|
+
exports.createEndpoint = createEndpoint;
|
|
27
28
|
function createConfig({ endpointPath, endpointMethod, functionFile, }) {
|
|
28
29
|
return {
|
|
29
30
|
runtime: 'nodejs18.x',
|
|
@@ -35,6 +36,7 @@ function createConfig({ endpointPath, endpointMethod, functionFile, }) {
|
|
|
35
36
|
},
|
|
36
37
|
};
|
|
37
38
|
}
|
|
39
|
+
exports.createConfig = createConfig;
|
|
38
40
|
function writeConfig(configFilePath, config) {
|
|
39
41
|
const configJson = JSON.stringify(config, null, ' ');
|
|
40
42
|
fs_extra_1.default.writeFileSync(configFilePath, configJson);
|
|
@@ -48,9 +50,9 @@ function updateExistingConfig(configFilePath, { endpointPath, endpointMethod, fu
|
|
|
48
50
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.updateExistingConfig.unableToReadFile`, {
|
|
49
51
|
configFilePath,
|
|
50
52
|
}));
|
|
51
|
-
|
|
53
|
+
throw new FileSystemError_1.FileSystemError({ cause: err }, {
|
|
52
54
|
filepath: configFilePath,
|
|
53
|
-
|
|
55
|
+
operation: 'read',
|
|
54
56
|
});
|
|
55
57
|
}
|
|
56
58
|
let config;
|
|
@@ -61,20 +63,22 @@ function updateExistingConfig(configFilePath, { endpointPath, endpointMethod, fu
|
|
|
61
63
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.updateExistingConfig.invalidJSON`, {
|
|
62
64
|
configFilePath,
|
|
63
65
|
}));
|
|
64
|
-
|
|
66
|
+
throw new FileSystemError_1.FileSystemError({ cause: err }, {
|
|
65
67
|
filepath: configFilePath,
|
|
66
|
-
|
|
68
|
+
operation: 'read',
|
|
67
69
|
});
|
|
68
70
|
}
|
|
69
71
|
if (!isObjectOrFunction(config)) {
|
|
70
|
-
(0,
|
|
72
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.updateExistingConfig.errors.configIsNotObjectError`, {
|
|
73
|
+
configFilePath,
|
|
74
|
+
}));
|
|
71
75
|
}
|
|
72
76
|
if (config.endpoints) {
|
|
73
77
|
if (config.endpoints[endpointPath]) {
|
|
74
|
-
(0,
|
|
78
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.updateExistingConfig.errors.endpointAreadyExistsError`, {
|
|
75
79
|
configFilePath,
|
|
76
80
|
endpointPath,
|
|
77
|
-
});
|
|
81
|
+
}));
|
|
78
82
|
}
|
|
79
83
|
else {
|
|
80
84
|
config.endpoints[endpointPath] = createEndpoint(endpointMethod, functionFile);
|
|
@@ -92,9 +96,9 @@ function updateExistingConfig(configFilePath, { endpointPath, endpointMethod, fu
|
|
|
92
96
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.updateExistingConfig.couldNotUpdateFile`, {
|
|
93
97
|
configFilePath,
|
|
94
98
|
}));
|
|
95
|
-
|
|
99
|
+
throw new FileSystemError_1.FileSystemError({ cause: err }, {
|
|
96
100
|
filepath: configFilePath,
|
|
97
|
-
|
|
101
|
+
operation: 'read',
|
|
98
102
|
});
|
|
99
103
|
}
|
|
100
104
|
}
|
|
@@ -106,9 +110,9 @@ async function createFunction(functionInfo, dest, options = {}) {
|
|
|
106
110
|
nocase: true,
|
|
107
111
|
});
|
|
108
112
|
if (ancestorFunctionsConfig) {
|
|
109
|
-
(0,
|
|
113
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.createFunction.errors.nestedConfigError`, {
|
|
110
114
|
ancestorConfigPath: path_1.default.dirname(ancestorFunctionsConfig),
|
|
111
|
-
});
|
|
115
|
+
}));
|
|
112
116
|
}
|
|
113
117
|
const folderName = functionsFolder.endsWith('.functions')
|
|
114
118
|
? functionsFolder
|
|
@@ -129,9 +133,9 @@ async function createFunction(functionInfo, dest, options = {}) {
|
|
|
129
133
|
const functionFilePath = path_1.default.join(destPath, functionFile);
|
|
130
134
|
const configFilePath = path_1.default.join(destPath, 'serverless.json');
|
|
131
135
|
if (!allowExistingFile && fs_extra_1.default.existsSync(functionFilePath)) {
|
|
132
|
-
(0,
|
|
136
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.createFunction.errors.jsFileConflictError`, {
|
|
133
137
|
functionFilePath,
|
|
134
|
-
});
|
|
138
|
+
}));
|
|
135
139
|
}
|
|
136
140
|
await (0, github_1.downloadGithubRepoContents)('HubSpot/cms-sample-assets', 'functions/sample-function.js', functionFilePath);
|
|
137
141
|
logger_1.logger.log((0, lang_1.i18n)(`${i18nKey}.createFunction.createdFunctionFile`, {
|
|
@@ -160,9 +164,9 @@ async function createFunction(functionInfo, dest, options = {}) {
|
|
|
160
164
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.createFunction.failedToCreateFile`, {
|
|
161
165
|
configFilePath,
|
|
162
166
|
}));
|
|
163
|
-
|
|
167
|
+
throw new FileSystemError_1.FileSystemError({ cause: err }, {
|
|
164
168
|
filepath: configFilePath,
|
|
165
|
-
|
|
169
|
+
operation: 'write',
|
|
166
170
|
});
|
|
167
171
|
}
|
|
168
172
|
logger_1.logger.log((0, lang_1.i18n)(`${i18nKey}.createFunction.createdConfigFile`, {
|
|
@@ -11,7 +11,6 @@ const child_process_1 = require("child_process");
|
|
|
11
11
|
const escapeRegExp_1 = require("../escapeRegExp");
|
|
12
12
|
const modules_1 = require("../../utils/cms/modules");
|
|
13
13
|
const logger_1 = require("../logger");
|
|
14
|
-
const standardErrors_1 = require("../../errors/standardErrors");
|
|
15
14
|
const lang_1 = require("../../utils/lang");
|
|
16
15
|
const i18nKey = 'lib.cms.handleFieldsJs';
|
|
17
16
|
class FieldsJs {
|
|
@@ -69,7 +68,7 @@ class FieldsJs {
|
|
|
69
68
|
}));
|
|
70
69
|
});
|
|
71
70
|
}).catch((e) => {
|
|
72
|
-
(0,
|
|
71
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.convertFieldsJs.errors.errorConverting`, { filePath }), { cause: e });
|
|
73
72
|
});
|
|
74
73
|
}
|
|
75
74
|
/**
|
|
@@ -79,9 +78,9 @@ class FieldsJs {
|
|
|
79
78
|
*/
|
|
80
79
|
saveOutput() {
|
|
81
80
|
if (!this.outputPath || !fs_extra_1.default.existsSync(this.outputPath)) {
|
|
82
|
-
(0,
|
|
81
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.saveOutput.errors.saveFailed`, {
|
|
83
82
|
path: this.filePath,
|
|
84
|
-
});
|
|
83
|
+
}));
|
|
85
84
|
}
|
|
86
85
|
const relativePath = path_1.default.relative(this.rootWriteDir, path_1.default.dirname(this.outputPath));
|
|
87
86
|
const savePath = path_1.default.join(this.projectDir, relativePath, 'fields.output.json');
|
|
@@ -89,7 +88,7 @@ class FieldsJs {
|
|
|
89
88
|
fs_extra_1.default.copyFileSync(this.outputPath, savePath);
|
|
90
89
|
}
|
|
91
90
|
catch (err) {
|
|
92
|
-
(0,
|
|
91
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.saveOutput.errors.saveFailed`, { path: savePath }), { cause: err });
|
|
93
92
|
}
|
|
94
93
|
}
|
|
95
94
|
/**
|
|
@@ -131,7 +130,9 @@ function createTmpDirSync(prefix) {
|
|
|
131
130
|
tmpDir = fs_extra_1.default.mkdtempSync(path_1.default.join(os_1.default.tmpdir(), prefix));
|
|
132
131
|
}
|
|
133
132
|
catch (err) {
|
|
134
|
-
(0,
|
|
133
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.createTmpDirSync.errors.writeFailed`), {
|
|
134
|
+
cause: err,
|
|
135
|
+
});
|
|
135
136
|
}
|
|
136
137
|
return tmpDir;
|
|
137
138
|
}
|
|
@@ -140,7 +141,7 @@ exports.createTmpDirSync = createTmpDirSync;
|
|
|
140
141
|
function cleanupTmpDirSync(tmpDir) {
|
|
141
142
|
fs_extra_1.default.rm(tmpDir, { recursive: true }, err => {
|
|
142
143
|
if (err) {
|
|
143
|
-
(0,
|
|
144
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.cleanupTmpDirSync.errors.deleteFailed`), { cause: err });
|
|
144
145
|
}
|
|
145
146
|
});
|
|
146
147
|
}
|
package/lib/cms/modules.js
CHANGED
|
@@ -9,7 +9,6 @@ const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
|
9
9
|
const path_2 = require("../path");
|
|
10
10
|
const fs_1 = require("../fs");
|
|
11
11
|
const github_1 = require("../github");
|
|
12
|
-
const standardErrors_1 = require("../../errors/standardErrors");
|
|
13
12
|
const logger_1 = require("../logger");
|
|
14
13
|
const modules_1 = require("../../utils/cms/modules");
|
|
15
14
|
const lang_1 = require("../../utils/lang");
|
|
@@ -98,10 +97,10 @@ const updateFileContents = async (file, metaData, getInternalVersion) => {
|
|
|
98
97
|
}
|
|
99
98
|
catch (error) {
|
|
100
99
|
const { message } = error;
|
|
101
|
-
(0,
|
|
100
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.createModule.errors.fileUpdateFailure`, {
|
|
102
101
|
path: file,
|
|
103
102
|
errorMessage: message,
|
|
104
|
-
});
|
|
103
|
+
}));
|
|
105
104
|
}
|
|
106
105
|
};
|
|
107
106
|
async function createModule(moduleDefinition, name, dest, getInternalVersion, options = {
|
|
@@ -126,9 +125,9 @@ async function createModule(moduleDefinition, name, dest, getInternalVersion, op
|
|
|
126
125
|
? path_1.default.join(dest, folderName)
|
|
127
126
|
: path_1.default.join(dest, `${name}`);
|
|
128
127
|
if (!options.allowExistingDir && fs_extra_1.default.existsSync(destPath)) {
|
|
129
|
-
(0,
|
|
128
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.createModule.errors.pathExists`, {
|
|
130
129
|
path: destPath,
|
|
131
|
-
});
|
|
130
|
+
}));
|
|
132
131
|
}
|
|
133
132
|
else {
|
|
134
133
|
logger_1.logger.log((0, lang_1.i18n)(`${i18nKey}.createModule.creatingPath`, {
|
|
@@ -9,7 +9,6 @@ const fs_1 = __importDefault(require("fs"));
|
|
|
9
9
|
const semver_1 = __importDefault(require("semver"));
|
|
10
10
|
const url_1 = require("url");
|
|
11
11
|
const path_2 = require("../path");
|
|
12
|
-
const standardErrors_1 = require("../../errors/standardErrors");
|
|
13
12
|
const lang_1 = require("../../utils/lang");
|
|
14
13
|
const i18nKey = 'lib.cms.processFieldsJs';
|
|
15
14
|
const { dirName, fieldOptions, filePath, writeDir } = process.env;
|
|
@@ -25,21 +24,23 @@ console.info((0, lang_1.i18n)(`${i18nKey}.converting`, {
|
|
|
25
24
|
* This function has optional return type of Promise<Array> | Array. In order to have uniform handling,
|
|
26
25
|
* we wrap the return value of the function in a Promise.resolve(), and then process.
|
|
27
26
|
*/
|
|
28
|
-
const fieldsPromise = dynamicImport(filePath).catch(e =>
|
|
27
|
+
const fieldsPromise = dynamicImport(filePath).catch(e => {
|
|
28
|
+
throw e;
|
|
29
|
+
});
|
|
29
30
|
fieldsPromise.then(fieldsFunc => {
|
|
30
31
|
const fieldsFuncType = typeof fieldsFunc;
|
|
31
32
|
if (fieldsFuncType !== 'function') {
|
|
32
|
-
(0,
|
|
33
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.errors.notFunction`, {
|
|
33
34
|
path: filePath,
|
|
34
35
|
returned: fieldsFuncType,
|
|
35
|
-
});
|
|
36
|
+
}));
|
|
36
37
|
}
|
|
37
38
|
return Promise.resolve(fieldsFunc(fieldOptions)).then(fields => {
|
|
38
39
|
if (!Array.isArray(fields)) {
|
|
39
|
-
(0,
|
|
40
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.errors.notArray`, {
|
|
40
41
|
path: filePath,
|
|
41
42
|
returned: typeof fields,
|
|
42
|
-
});
|
|
43
|
+
}));
|
|
43
44
|
}
|
|
44
45
|
const finalPath = path_1.default.join(writeDir, '/fields.json');
|
|
45
46
|
return fieldsArrayToJson(fields).then(json => {
|
|
@@ -89,7 +90,7 @@ async function dynamicImport(filePath) {
|
|
|
89
90
|
}
|
|
90
91
|
else {
|
|
91
92
|
if ((0, path_2.getExt)(filePath) == 'mjs') {
|
|
92
|
-
(0,
|
|
93
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.errors.invalidMjsFile`));
|
|
93
94
|
}
|
|
94
95
|
return require(filePath);
|
|
95
96
|
}
|
package/lib/cms/templates.js
CHANGED
|
@@ -7,7 +7,6 @@ exports.TEMPLATE_TYPES = exports.createTemplate = exports.isCodedFile = exports.
|
|
|
7
7
|
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
8
8
|
const path_1 = __importDefault(require("path"));
|
|
9
9
|
const github_1 = require("../github");
|
|
10
|
-
const standardErrors_1 = require("../../errors/standardErrors");
|
|
11
10
|
const logger_1 = require("../logger");
|
|
12
11
|
const lang_1 = require("../../utils/lang");
|
|
13
12
|
const i18nKey = 'lib.cms.templates';
|
|
@@ -52,9 +51,9 @@ async function createTemplate(name, dest, type = 'page-template', options = { al
|
|
|
52
51
|
const filename = name.endsWith('.html') ? name : `${name}.html`;
|
|
53
52
|
const filePath = path_1.default.join(dest, filename);
|
|
54
53
|
if (!options.allowExisting && fs_extra_1.default.existsSync(filePath)) {
|
|
55
|
-
(0,
|
|
54
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.createTemplate.errors.pathExists`, {
|
|
56
55
|
path: filePath,
|
|
57
|
-
});
|
|
56
|
+
}));
|
|
58
57
|
}
|
|
59
58
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.createTemplate.creatingPath`, { path: dest }));
|
|
60
59
|
fs_extra_1.default.mkdirp(dest);
|
package/lib/cms/uploadFolder.js
CHANGED
|
@@ -12,11 +12,11 @@ const fileMapper_2 = require("../../api/fileMapper");
|
|
|
12
12
|
const modules_1 = require("../../utils/cms/modules");
|
|
13
13
|
const escapeRegExp_1 = require("../escapeRegExp");
|
|
14
14
|
const path_2 = require("../path");
|
|
15
|
-
const
|
|
16
|
-
const apiErrors_1 = require("../../errors/apiErrors");
|
|
15
|
+
const errors_1 = require("../../errors");
|
|
17
16
|
const logger_1 = require("../logger");
|
|
18
17
|
const files_1 = require("../../constants/files");
|
|
19
18
|
const lang_1 = require("../../utils/lang");
|
|
19
|
+
const HubSpotHttpError_1 = require("../../models/HubSpotHttpError");
|
|
20
20
|
const i18nKey = 'lib.cms.uploadFolder';
|
|
21
21
|
const queue = new p_queue_1.default({
|
|
22
22
|
concurrency: 10,
|
|
@@ -86,17 +86,21 @@ const defaultUploadSuccessCallback = (file, destPath) => logger_1.logger.log((0,
|
|
|
86
86
|
}));
|
|
87
87
|
const defaultUploadFirstErrorCallback = (file, destPath, error) => {
|
|
88
88
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.uploadFolder.failed`, { file, destPath }));
|
|
89
|
-
if (
|
|
90
|
-
logger_1.logger.debug(error.
|
|
89
|
+
if ((0, errors_1.isHubSpotHttpError)(error)) {
|
|
90
|
+
logger_1.logger.debug(error.data);
|
|
91
91
|
}
|
|
92
|
-
else {
|
|
92
|
+
else if (error instanceof Error) {
|
|
93
93
|
logger_1.logger.debug(error.message);
|
|
94
94
|
}
|
|
95
95
|
};
|
|
96
96
|
const defaultUploadRetryCallback = (file, destPath) => logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.uploadFolder.retry`, { file, destPath }));
|
|
97
97
|
const defaultUploadFinalErrorCallback = (accountId, file, destPath, error) => {
|
|
98
|
-
|
|
99
|
-
|
|
98
|
+
const retryFailed = (0, lang_1.i18n)(`${i18nKey}.uploadFolder.retryFailed`, {
|
|
99
|
+
file,
|
|
100
|
+
destPath,
|
|
101
|
+
});
|
|
102
|
+
logger_1.logger.debug(retryFailed);
|
|
103
|
+
throw new HubSpotHttpError_1.HubSpotHttpError(retryFailed, { cause: error }, {
|
|
100
104
|
accountId,
|
|
101
105
|
request: destPath,
|
|
102
106
|
payload: file,
|
|
@@ -140,11 +144,10 @@ async function uploadFolder(accountId, src, dest, fileMapperOptions, commandOpti
|
|
|
140
144
|
_onSuccessCallback(originalFilePath, destPath);
|
|
141
145
|
}
|
|
142
146
|
catch (err) {
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
throw error;
|
|
147
|
+
if ((0, errors_1.isAuthError)(err)) {
|
|
148
|
+
throw err;
|
|
146
149
|
}
|
|
147
|
-
_onFirstErrorCallback(file, destPath,
|
|
150
|
+
_onFirstErrorCallback(file, destPath, err);
|
|
148
151
|
failures.push({
|
|
149
152
|
file,
|
|
150
153
|
destPath,
|
|
@@ -169,9 +172,8 @@ async function uploadFolder(accountId, src, dest, fileMapperOptions, commandOpti
|
|
|
169
172
|
file,
|
|
170
173
|
};
|
|
171
174
|
}
|
|
172
|
-
catch (
|
|
173
|
-
|
|
174
|
-
if ((0, standardErrors_1.isFatalError)(error)) {
|
|
175
|
+
catch (error) {
|
|
176
|
+
if ((0, errors_1.isAuthError)(error)) {
|
|
175
177
|
throw error;
|
|
176
178
|
}
|
|
177
179
|
_onFinalErrorCallback(accountId, file, destPath, error);
|
package/lib/cms/validate.js
CHANGED
|
@@ -26,7 +26,7 @@ async function lint(accountId, filepath, callback) {
|
|
|
26
26
|
}
|
|
27
27
|
return result;
|
|
28
28
|
}
|
|
29
|
-
const validation = await (0, validateHubl_1.validateHubl)(accountId, source);
|
|
29
|
+
const { data: validation } = await (0, validateHubl_1.validateHubl)(accountId, source);
|
|
30
30
|
const result = {
|
|
31
31
|
file,
|
|
32
32
|
validation,
|
package/lib/cms/watch.js
CHANGED
|
@@ -8,7 +8,6 @@ const path_1 = __importDefault(require("path"));
|
|
|
8
8
|
const chokidar_1 = __importDefault(require("chokidar"));
|
|
9
9
|
const p_queue_1 = __importDefault(require("p-queue"));
|
|
10
10
|
const debounce_1 = __importDefault(require("debounce"));
|
|
11
|
-
const apiErrors_1 = require("../../errors/apiErrors");
|
|
12
11
|
const handleFieldsJS_1 = require("./handleFieldsJS");
|
|
13
12
|
const uploadFolder_1 = require("./uploadFolder");
|
|
14
13
|
const ignoreRules_1 = require("../ignoreRules");
|
|
@@ -20,6 +19,8 @@ const notify_1 = require("../notify");
|
|
|
20
19
|
const themes_1 = require("./themes");
|
|
21
20
|
const logger_1 = require("../logger");
|
|
22
21
|
const lang_1 = require("../../utils/lang");
|
|
22
|
+
const HubSpotHttpError_1 = require("../../models/HubSpotHttpError");
|
|
23
|
+
const errors_1 = require("../../errors");
|
|
23
24
|
const i18nKey = 'lib.cms.watch';
|
|
24
25
|
const queue = new p_queue_1.default({
|
|
25
26
|
concurrency: 10,
|
|
@@ -36,11 +37,14 @@ function _notifyOfThemePreview(filePath, accountId) {
|
|
|
36
37
|
}
|
|
37
38
|
const notifyOfThemePreview = (0, debounce_1.default)(_notifyOfThemePreview, 1000);
|
|
38
39
|
const defaultOnUploadFileError = (file, dest, accountId) => (error) => {
|
|
39
|
-
|
|
40
|
+
const uploadFailedMessage = (0, lang_1.i18n)(`${i18nKey}.uploadFailed`, {
|
|
40
41
|
file,
|
|
41
42
|
dest,
|
|
42
|
-
})
|
|
43
|
-
(
|
|
43
|
+
});
|
|
44
|
+
logger_1.logger.debug(uploadFailedMessage);
|
|
45
|
+
throw new HubSpotHttpError_1.HubSpotHttpError(uploadFailedMessage, {
|
|
46
|
+
cause: error,
|
|
47
|
+
}, {
|
|
44
48
|
accountId,
|
|
45
49
|
request: dest,
|
|
46
50
|
payload: file,
|
|
@@ -98,14 +102,17 @@ async function deleteRemoteFile(accountId, filePath, remoteFilePath) {
|
|
|
98
102
|
logger_1.logger.log((0, lang_1.i18n)(`${i18nKey}.deleteSuccess`, { remoteFilePath }));
|
|
99
103
|
notifyOfThemePreview(filePath, accountId);
|
|
100
104
|
})
|
|
101
|
-
.catch(
|
|
105
|
+
.catch(error => {
|
|
102
106
|
logger_1.logger.debug((0, lang_1.i18n)(`${i18nKey}.deleteFailed`, {
|
|
103
107
|
remoteFilePath,
|
|
104
108
|
}));
|
|
105
|
-
(0,
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
+
if ((0, errors_1.isHubSpotHttpError)(error)) {
|
|
110
|
+
error.updateContext({
|
|
111
|
+
accountId,
|
|
112
|
+
request: remoteFilePath,
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
throw error;
|
|
109
116
|
});
|
|
110
117
|
});
|
|
111
118
|
}
|
package/lib/customObjects.js
CHANGED
|
@@ -9,7 +9,6 @@ const path_1 = __importDefault(require("path"));
|
|
|
9
9
|
const prettier_1 = __importDefault(require("prettier"));
|
|
10
10
|
const path_2 = require("../lib/path");
|
|
11
11
|
const customObjects_1 = require("../api/customObjects");
|
|
12
|
-
const apiErrors_1 = require("../errors/apiErrors");
|
|
13
12
|
function getResolvedPath(dest, name) {
|
|
14
13
|
if (name)
|
|
15
14
|
return path_1.default.resolve((0, path_2.getCwd)(), dest || '', `${name}.json`);
|
|
@@ -24,13 +23,8 @@ async function writeSchemaToDisk(schema, dest) {
|
|
|
24
23
|
}
|
|
25
24
|
exports.writeSchemaToDisk = writeSchemaToDisk;
|
|
26
25
|
async function downloadSchemas(accountId, dest) {
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
response = await (0, customObjects_1.fetchObjectSchemas)(accountId);
|
|
30
|
-
}
|
|
31
|
-
catch (err) {
|
|
32
|
-
(0, apiErrors_1.throwApiError)(err);
|
|
33
|
-
}
|
|
26
|
+
const axiosResponse = await (0, customObjects_1.fetchObjectSchemas)(accountId);
|
|
27
|
+
const response = axiosResponse.data;
|
|
34
28
|
if (response.results.length) {
|
|
35
29
|
for (const schema of response.results) {
|
|
36
30
|
await writeSchemaToDisk(schema, dest);
|
|
@@ -40,13 +34,8 @@ async function downloadSchemas(accountId, dest) {
|
|
|
40
34
|
}
|
|
41
35
|
exports.downloadSchemas = downloadSchemas;
|
|
42
36
|
async function downloadSchema(accountId, schemaObjectType, dest) {
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
response = await (0, customObjects_1.fetchObjectSchema)(accountId, schemaObjectType);
|
|
46
|
-
}
|
|
47
|
-
catch (err) {
|
|
48
|
-
(0, apiErrors_1.throwApiError)(err);
|
|
49
|
-
}
|
|
37
|
+
const axiosResponse = await (0, customObjects_1.fetchObjectSchema)(accountId, schemaObjectType);
|
|
38
|
+
const response = axiosResponse.data;
|
|
50
39
|
await writeSchemaToDisk(response, dest);
|
|
51
40
|
return response;
|
|
52
41
|
}
|
package/lib/fileManager.js
CHANGED
|
@@ -11,13 +11,12 @@ const fileManager_1 = require("../api/fileManager");
|
|
|
11
11
|
const fs_1 = require("./fs");
|
|
12
12
|
const logger_1 = require("./logger");
|
|
13
13
|
const ignoreRules_1 = require("./ignoreRules");
|
|
14
|
-
const http_1 =
|
|
14
|
+
const http_1 = require("../http");
|
|
15
15
|
const escapeRegExp_1 = require("./escapeRegExp");
|
|
16
16
|
const path_2 = require("./path");
|
|
17
|
-
const apiErrors_1 = require("../errors/apiErrors");
|
|
18
|
-
const standardErrors_1 = require("../errors/standardErrors");
|
|
19
|
-
const fileSystemErrors_1 = require("../errors/fileSystemErrors");
|
|
20
17
|
const lang_1 = require("../utils/lang");
|
|
18
|
+
const errors_1 = require("../errors");
|
|
19
|
+
const FileSystemError_1 = require("../models/FileSystemError");
|
|
21
20
|
const i18nKey = 'lib.fileManager';
|
|
22
21
|
async function uploadFolder(accountId, src, dest) {
|
|
23
22
|
const regex = new RegExp(`^${(0, escapeRegExp_1.escapeRegExp)(src)}`);
|
|
@@ -38,13 +37,17 @@ async function uploadFolder(accountId, src, dest) {
|
|
|
38
37
|
logger_1.logger.log((0, lang_1.i18n)(`${i18nKey}.uploadSuccess`, { file, destPath }));
|
|
39
38
|
}
|
|
40
39
|
catch (err) {
|
|
41
|
-
if ((0,
|
|
42
|
-
|
|
40
|
+
if ((0, errors_1.isHubSpotHttpError)(err)) {
|
|
41
|
+
err.updateContext({
|
|
42
|
+
filepath: file,
|
|
43
|
+
dest: destPath,
|
|
44
|
+
});
|
|
45
|
+
throw err;
|
|
43
46
|
}
|
|
44
|
-
(0,
|
|
47
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.errors.uploadFailed`, {
|
|
45
48
|
file,
|
|
46
49
|
destPath,
|
|
47
|
-
});
|
|
50
|
+
}));
|
|
48
51
|
}
|
|
49
52
|
}
|
|
50
53
|
}
|
|
@@ -65,7 +68,7 @@ async function downloadFile(accountId, file, dest, overwrite) {
|
|
|
65
68
|
if (await skipExisting(overwrite || false, destPath)) {
|
|
66
69
|
return;
|
|
67
70
|
}
|
|
68
|
-
await http_1.
|
|
71
|
+
await http_1.http.getOctetStream(accountId, {
|
|
69
72
|
baseURL: file.url,
|
|
70
73
|
url: '',
|
|
71
74
|
}, destPath);
|
|
@@ -76,7 +79,7 @@ async function fetchAllPagedFiles(accountId, folderId, includeArchived) {
|
|
|
76
79
|
let count = 0;
|
|
77
80
|
let offset = 0;
|
|
78
81
|
while (totalFiles === null || count < totalFiles) {
|
|
79
|
-
const response = await (0, fileManager_1.fetchFiles)(accountId, folderId, offset, includeArchived);
|
|
82
|
+
const { data: response } = await (0, fileManager_1.fetchFiles)(accountId, folderId, offset, includeArchived);
|
|
80
83
|
if (totalFiles === null) {
|
|
81
84
|
totalFiles = response.total_count;
|
|
82
85
|
}
|
|
@@ -91,10 +94,10 @@ async function fetchFolderContents(accountId, folder, dest, overwrite, includeAr
|
|
|
91
94
|
await fs_extra_1.default.ensureDir(dest);
|
|
92
95
|
}
|
|
93
96
|
catch (err) {
|
|
94
|
-
|
|
97
|
+
throw new FileSystemError_1.FileSystemError({ cause: err }, {
|
|
95
98
|
dest,
|
|
96
99
|
accountId,
|
|
97
|
-
|
|
100
|
+
operation: 'write',
|
|
98
101
|
});
|
|
99
102
|
}
|
|
100
103
|
const files = await fetchAllPagedFiles(accountId, folder.id, includeArchived);
|
|
@@ -105,7 +108,7 @@ async function fetchFolderContents(accountId, folder, dest, overwrite, includeAr
|
|
|
105
108
|
for (const file of files) {
|
|
106
109
|
await downloadFile(accountId, file, dest, overwrite);
|
|
107
110
|
}
|
|
108
|
-
const { objects: folders } = await (0, fileManager_1.fetchFolders)(accountId, folder.id);
|
|
111
|
+
const { data: { objects: folders }, } = await (0, fileManager_1.fetchFolders)(accountId, folder.id);
|
|
109
112
|
for (const folder of folders) {
|
|
110
113
|
const nestedFolder = path_1.default.join(dest, folder.name);
|
|
111
114
|
await fetchFolderContents(accountId, folder, nestedFolder, overwrite, includeArchived);
|
|
@@ -134,10 +137,10 @@ async function downloadFolder(accountId, src, dest, folder, overwrite, includeAr
|
|
|
134
137
|
// Download a single file and write to local file system.
|
|
135
138
|
async function downloadSingleFile(accountId, src, dest, file, overwrite, includeArchived) {
|
|
136
139
|
if (!includeArchived && file.archived) {
|
|
137
|
-
(0,
|
|
140
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.errors.archivedFile`, { src }));
|
|
138
141
|
}
|
|
139
142
|
if (file.hidden) {
|
|
140
|
-
(0,
|
|
143
|
+
throw new Error((0, lang_1.i18n)(`${i18nKey}.errors.hiddenFile`, { src }));
|
|
141
144
|
}
|
|
142
145
|
logger_1.logger.log((0, lang_1.i18n)(`${i18nKey}.fetchFileStarted`, {
|
|
143
146
|
src,
|
|
@@ -159,7 +162,7 @@ async function downloadFileOrFolder(accountId, src, dest, overwrite, includeArch
|
|
|
159
162
|
await downloadFolder(accountId, src, dest, rootFolder, overwrite, includeArchived);
|
|
160
163
|
}
|
|
161
164
|
else {
|
|
162
|
-
const { file, folder } = await (0, fileManager_1.fetchStat)(accountId, src);
|
|
165
|
+
const { data: { file, folder }, } = await (0, fileManager_1.fetchStat)(accountId, src);
|
|
163
166
|
if (file) {
|
|
164
167
|
await downloadSingleFile(accountId, src, dest, file, overwrite, includeArchived);
|
|
165
168
|
}
|
|
@@ -169,15 +172,13 @@ async function downloadFileOrFolder(accountId, src, dest, overwrite, includeArch
|
|
|
169
172
|
}
|
|
170
173
|
}
|
|
171
174
|
catch (err) {
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
(0, apiErrors_1.throwApiError)(err, {
|
|
175
|
+
if ((0, errors_1.isAuthError)(err)) {
|
|
176
|
+
err.updateContext({
|
|
175
177
|
request: src,
|
|
176
178
|
accountId,
|
|
177
179
|
});
|
|
178
180
|
}
|
|
179
|
-
|
|
180
|
-
(0, standardErrors_1.throwError)(error);
|
|
181
|
+
throw err;
|
|
181
182
|
}
|
|
182
183
|
}
|
|
183
184
|
exports.downloadFileOrFolder = downloadFileOrFolder;
|