@hubspot/cli 8.0.3-experimental.1 → 8.1.0-beta.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lang/en.d.ts +0 -8
- package/lang/en.js +0 -8
- package/lib/projects/__tests__/upload.test.js +0 -10
- package/lib/projects/upload.js +0 -9
- package/package.json +2 -2
- package/lib/projects/__tests__/workspaceArchive.test.d.ts +0 -1
- package/lib/projects/__tests__/workspaceArchive.test.js +0 -207
- package/lib/projects/workspaces.d.ts +0 -36
- package/lib/projects/workspaces.js +0 -224
package/lang/en.d.ts
CHANGED
|
@@ -3179,14 +3179,6 @@ export declare const lib: {
|
|
|
3179
3179
|
fileFiltered: (filename: string) => string;
|
|
3180
3180
|
legacyFileDetected: (filename: string, platformVersion: string) => string;
|
|
3181
3181
|
projectDoesNotExist: (accountId: number) => string;
|
|
3182
|
-
workspaceIncluded: (workspaceDir: string, archivePath: string) => string;
|
|
3183
|
-
fileDependencyIncluded: (packageName: string, localPath: string, archivePath: string) => string;
|
|
3184
|
-
malformedPackageJson: (packageJsonPath: string, error: string) => string;
|
|
3185
|
-
workspaceCollision: (archivePath: string, workspaceDir: string, existingWorkspace: string) => string;
|
|
3186
|
-
fileDependencyAlreadyIncluded: (packageName: string, archivePath: string) => string;
|
|
3187
|
-
updatingPackageJsonWorkspaces: (packageJsonPath: string) => string;
|
|
3188
|
-
updatedWorkspaces: (workspaces: string) => string;
|
|
3189
|
-
updatedFileDependency: (packageName: string, relativePath: string) => string;
|
|
3190
3182
|
};
|
|
3191
3183
|
};
|
|
3192
3184
|
importData: {
|
package/lang/en.js
CHANGED
|
@@ -3202,14 +3202,6 @@ export const lib = {
|
|
|
3202
3202
|
fileFiltered: (filename) => `Ignore rule triggered for "${filename}"`,
|
|
3203
3203
|
legacyFileDetected: (filename, platformVersion) => `The ${chalk.bold(filename)} file is not supported on platform version ${chalk.bold(platformVersion)} and will be ignored.`,
|
|
3204
3204
|
projectDoesNotExist: (accountId) => `Upload cancelled. Run ${uiCommandReference('hs project upload')} again to create the project in ${uiAccountDescription(accountId)}.`,
|
|
3205
|
-
workspaceIncluded: (workspaceDir, archivePath) => `Including workspace: ${workspaceDir} → ${archivePath}`,
|
|
3206
|
-
fileDependencyIncluded: (packageName, localPath, archivePath) => `Including file: dependency ${packageName}: ${localPath} → ${archivePath}`,
|
|
3207
|
-
malformedPackageJson: (packageJsonPath, error) => `Skipping malformed package.json at ${packageJsonPath}: ${error}`,
|
|
3208
|
-
workspaceCollision: (archivePath, workspaceDir, existingWorkspace) => `Workspace collision: ${archivePath} from ${workspaceDir} and ${existingWorkspace}`,
|
|
3209
|
-
fileDependencyAlreadyIncluded: (packageName, archivePath) => `file: dependency ${packageName} already included as workspace: ${archivePath}`,
|
|
3210
|
-
updatingPackageJsonWorkspaces: (packageJsonPath) => `Updating package.json workspaces in archive: ${packageJsonPath}`,
|
|
3211
|
-
updatedWorkspaces: (workspaces) => ` Updated workspaces: ${workspaces}`,
|
|
3212
|
-
updatedFileDependency: (packageName, relativePath) => ` Updated dependencies.${packageName}: file:${relativePath}`,
|
|
3213
3205
|
},
|
|
3214
3206
|
},
|
|
3215
3207
|
importData: {
|
|
@@ -13,7 +13,6 @@ import { walk } from '@hubspot/local-dev-lib/fs';
|
|
|
13
13
|
import { uploadProject } from '@hubspot/local-dev-lib/api/projects';
|
|
14
14
|
import { ensureProjectExists } from '../ensureProjectExists.js';
|
|
15
15
|
import { projectContainsHsMetaFiles } from '@hubspot/project-parsing-lib/projects';
|
|
16
|
-
import { findAndParsePackageJsonFiles, collectWorkspaceDirectories, collectFileDependencies, } from '@hubspot/project-parsing-lib/workspaces';
|
|
17
16
|
import { shouldIgnoreFile } from '@hubspot/local-dev-lib/ignoreRules';
|
|
18
17
|
import { getConfigAccountIfExists } from '@hubspot/local-dev-lib/config';
|
|
19
18
|
// Mock dependencies
|
|
@@ -23,11 +22,6 @@ vi.mock('@hubspot/local-dev-lib/fs');
|
|
|
23
22
|
vi.mock('@hubspot/local-dev-lib/api/projects');
|
|
24
23
|
vi.mock('../ensureProjectExists.js');
|
|
25
24
|
vi.mock('@hubspot/project-parsing-lib/projects');
|
|
26
|
-
vi.mock('@hubspot/project-parsing-lib/workspaces', () => ({
|
|
27
|
-
findAndParsePackageJsonFiles: vi.fn(),
|
|
28
|
-
collectWorkspaceDirectories: vi.fn(),
|
|
29
|
-
collectFileDependencies: vi.fn(),
|
|
30
|
-
}));
|
|
31
25
|
vi.mock('@hubspot/local-dev-lib/ignoreRules');
|
|
32
26
|
vi.mock('@hubspot/local-dev-lib/config');
|
|
33
27
|
vi.mock('archiver');
|
|
@@ -128,10 +122,6 @@ describe('lib/projects/upload', () => {
|
|
|
128
122
|
vi.mocked(shouldIgnoreFile).mockReturnValue(false);
|
|
129
123
|
vi.mocked(projectContainsHsMetaFiles).mockResolvedValue(false);
|
|
130
124
|
vi.mocked(isV2Project).mockReturnValue(false);
|
|
131
|
-
// Mock workspace functions to return empty arrays
|
|
132
|
-
vi.mocked(findAndParsePackageJsonFiles).mockResolvedValue([]);
|
|
133
|
-
vi.mocked(collectWorkspaceDirectories).mockResolvedValue([]);
|
|
134
|
-
vi.mocked(collectFileDependencies).mockResolvedValue([]);
|
|
135
125
|
vi.mocked(tmp.fileSync).mockReturnValue({
|
|
136
126
|
name: path.join(tempDir, 'test.zip'),
|
|
137
127
|
fd: 1,
|
package/lib/projects/upload.js
CHANGED
|
@@ -6,7 +6,6 @@ import { uploadProject } from '@hubspot/local-dev-lib/api/projects';
|
|
|
6
6
|
import { shouldIgnoreFile } from '@hubspot/local-dev-lib/ignoreRules';
|
|
7
7
|
import { isTranslationError, translate, } from '@hubspot/project-parsing-lib/translate';
|
|
8
8
|
import { projectContainsHsMetaFiles } from '@hubspot/project-parsing-lib/projects';
|
|
9
|
-
import { findAndParsePackageJsonFiles, collectWorkspaceDirectories, collectFileDependencies, } from '@hubspot/project-parsing-lib/workspaces';
|
|
10
9
|
import SpinniesManager from '../ui/SpinniesManager.js';
|
|
11
10
|
import { uiAccountDescription } from '../ui/index.js';
|
|
12
11
|
import { logError } from '../errorHandlers/index.js';
|
|
@@ -19,7 +18,6 @@ import { EXIT_CODES } from '../enums/exitCodes.js';
|
|
|
19
18
|
import ProjectValidationError from '../errors/ProjectValidationError.js';
|
|
20
19
|
import { walk } from '@hubspot/local-dev-lib/fs';
|
|
21
20
|
import { LEGACY_CONFIG_FILES } from '../constants.js';
|
|
22
|
-
import { archiveWorkspacesAndDependencies } from './workspaces.js';
|
|
23
21
|
async function uploadProjectFiles(accountId, projectName, filePath, uploadMessage, platformVersion, intermediateRepresentation) {
|
|
24
22
|
const accountIdentifier = uiAccountDescription(accountId) || `${accountId}`;
|
|
25
23
|
SpinniesManager.add('upload', {
|
|
@@ -64,11 +62,6 @@ export async function handleProjectUpload({ accountId, projectConfig, projectDir
|
|
|
64
62
|
}
|
|
65
63
|
const tempFile = tmp.fileSync({ postfix: '.zip' });
|
|
66
64
|
uiLogger.debug(lib.projectUpload.handleProjectUpload.compressing(tempFile.name));
|
|
67
|
-
// Find and parse all package.json files once (avoids duplicate filesystem walks)
|
|
68
|
-
const parsedPackageJsons = await findAndParsePackageJsonFiles(srcDir);
|
|
69
|
-
// Collect workspace directories and file: dependencies from parsed data
|
|
70
|
-
const workspaceMappings = await collectWorkspaceDirectories(parsedPackageJsons);
|
|
71
|
-
const fileDependencyMappings = await collectFileDependencies(parsedPackageJsons);
|
|
72
65
|
const output = fs.createWriteStream(tempFile.name);
|
|
73
66
|
const archive = archiver('zip');
|
|
74
67
|
const result = new Promise(resolve => output.on('close', async function () {
|
|
@@ -121,8 +114,6 @@ export async function handleProjectUpload({ accountId, projectConfig, projectDir
|
|
|
121
114
|
}
|
|
122
115
|
return ignored ? false : file;
|
|
123
116
|
});
|
|
124
|
-
// Archive workspaces and file: dependencies
|
|
125
|
-
await archiveWorkspacesAndDependencies(archive, srcDir, projectDir, workspaceMappings, fileDependencyMappings);
|
|
126
117
|
archive.finalize();
|
|
127
118
|
return result;
|
|
128
119
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@hubspot/cli",
|
|
3
|
-
"version": "8.0
|
|
3
|
+
"version": "8.1.0-beta.0",
|
|
4
4
|
"description": "The official CLI for developing on HubSpot",
|
|
5
5
|
"license": "Apache-2.0",
|
|
6
6
|
"repository": "https://github.com/HubSpot/hubspot-cli",
|
|
@@ -8,7 +8,7 @@
|
|
|
8
8
|
"dependencies": {
|
|
9
9
|
"@hubspot/cms-dev-server": "1.2.16",
|
|
10
10
|
"@hubspot/local-dev-lib": "5.1.1",
|
|
11
|
-
"@hubspot/project-parsing-lib": "0.
|
|
11
|
+
"@hubspot/project-parsing-lib": "0.12.0",
|
|
12
12
|
"@hubspot/serverless-dev-runtime": "7.0.7",
|
|
13
13
|
"@hubspot/ui-extensions-dev-server": "1.1.8",
|
|
14
14
|
"@inquirer/prompts": "7.1.0",
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
|
@@ -1,207 +0,0 @@
|
|
|
1
|
-
import { describe, it, expect, vi } from 'vitest';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import fs from 'fs-extra';
|
|
4
|
-
import { computeExternalArchivePath, shortHash, updatePackageJsonInArchive, } from '../workspaces.js';
|
|
5
|
-
describe('computeExternalArchivePath', () => {
|
|
6
|
-
it('places external workspace in _workspaces/ with basename-hash', () => {
|
|
7
|
-
const localPath = '/Users/test/company-libs/utils';
|
|
8
|
-
const result = computeExternalArchivePath(localPath);
|
|
9
|
-
const expectedHash = shortHash(path.resolve(localPath));
|
|
10
|
-
expect(result).toBe(path.join('_workspaces', `utils-${expectedHash}`));
|
|
11
|
-
});
|
|
12
|
-
it('does not include an external/ subdirectory', () => {
|
|
13
|
-
const result = computeExternalArchivePath('/Users/test/libs/utils');
|
|
14
|
-
expect(result).not.toContain('external');
|
|
15
|
-
expect(result.startsWith('_workspaces')).toBe(true);
|
|
16
|
-
});
|
|
17
|
-
it('produces different paths for different directories with same basename', () => {
|
|
18
|
-
const path1 = computeExternalArchivePath('/Users/test/project-a/utils');
|
|
19
|
-
const path2 = computeExternalArchivePath('/Users/test/project-b/utils');
|
|
20
|
-
expect(path1).not.toBe(path2);
|
|
21
|
-
expect(path1).toContain('utils-');
|
|
22
|
-
expect(path2).toContain('utils-');
|
|
23
|
-
});
|
|
24
|
-
it('is deterministic', () => {
|
|
25
|
-
const localPath = '/Users/test/libs/utils';
|
|
26
|
-
expect(computeExternalArchivePath(localPath)).toBe(computeExternalArchivePath(localPath));
|
|
27
|
-
});
|
|
28
|
-
it('produces paths matching _workspaces/<name>-[8 hex chars]', () => {
|
|
29
|
-
const result = computeExternalArchivePath('/Users/test/libs/utils');
|
|
30
|
-
// Normalize path separators for cross-platform compatibility
|
|
31
|
-
const normalized = result.replace(/\\/g, '/');
|
|
32
|
-
expect(normalized).toMatch(/_workspaces\/utils-[a-f0-9]{8}$/);
|
|
33
|
-
});
|
|
34
|
-
it('uses the last path segment as basename', () => {
|
|
35
|
-
const result = computeExternalArchivePath('/Users/test/libs/@company/shared-utils');
|
|
36
|
-
expect(result).toContain('shared-utils-');
|
|
37
|
-
const normalized = result.replace(/\\/g, '/');
|
|
38
|
-
expect(normalized).toMatch(/_workspaces\/shared-utils-[a-f0-9]{8}$/);
|
|
39
|
-
});
|
|
40
|
-
it('never produces paths with .. segments', () => {
|
|
41
|
-
const testCases = [
|
|
42
|
-
'/Users/other/libs/utils',
|
|
43
|
-
'/completely/different/path',
|
|
44
|
-
'/Users/test/other-project/shared',
|
|
45
|
-
];
|
|
46
|
-
testCases.forEach(localPath => {
|
|
47
|
-
expect(computeExternalArchivePath(localPath)).not.toContain('..');
|
|
48
|
-
});
|
|
49
|
-
});
|
|
50
|
-
});
|
|
51
|
-
describe('shortHash', () => {
|
|
52
|
-
it('produces 8-character hex string', () => {
|
|
53
|
-
const hash = shortHash('/some/path');
|
|
54
|
-
expect(hash).toMatch(/^[a-f0-9]{8}$/);
|
|
55
|
-
});
|
|
56
|
-
it('is deterministic', () => {
|
|
57
|
-
const input = '/Users/test/workspace';
|
|
58
|
-
expect(shortHash(input)).toBe(shortHash(input));
|
|
59
|
-
});
|
|
60
|
-
it('produces different hashes for different inputs', () => {
|
|
61
|
-
const hash1 = shortHash('/path/a');
|
|
62
|
-
const hash2 = shortHash('/path/b');
|
|
63
|
-
expect(hash1).not.toBe(hash2);
|
|
64
|
-
});
|
|
65
|
-
});
|
|
66
|
-
describe('updatePackageJsonInArchive', () => {
|
|
67
|
-
const srcDir = '/project/src';
|
|
68
|
-
function createMockArchive() {
|
|
69
|
-
const appended = [];
|
|
70
|
-
const mock = {
|
|
71
|
-
append: (content, opts) => {
|
|
72
|
-
appended.push({ content, name: opts.name });
|
|
73
|
-
return mock;
|
|
74
|
-
},
|
|
75
|
-
};
|
|
76
|
-
return {
|
|
77
|
-
archive: mock,
|
|
78
|
-
getAppended: () => appended,
|
|
79
|
-
};
|
|
80
|
-
}
|
|
81
|
-
it('writes external workspace entries as absolute archive paths', async () => {
|
|
82
|
-
const packageJsonPath = '/project/src/app/functions/package.json';
|
|
83
|
-
const originalPackageJson = {
|
|
84
|
-
name: 'my-app',
|
|
85
|
-
workspaces: ['../../packages/utils'],
|
|
86
|
-
dependencies: {},
|
|
87
|
-
};
|
|
88
|
-
vi.spyOn(fs, 'existsSync').mockReturnValue(true);
|
|
89
|
-
vi.spyOn(fs, 'readFileSync').mockReturnValue(JSON.stringify(originalPackageJson));
|
|
90
|
-
const { archive, getAppended } = createMockArchive();
|
|
91
|
-
const packageWorkspaces = new Map();
|
|
92
|
-
packageWorkspaces.set(packageJsonPath, [
|
|
93
|
-
'/_workspaces/packages-utils-a1b2c3d4',
|
|
94
|
-
'/_workspaces/packages-core-e5f6a7b8',
|
|
95
|
-
]);
|
|
96
|
-
await updatePackageJsonInArchive(archive, srcDir, packageWorkspaces, new Map());
|
|
97
|
-
const appended = getAppended();
|
|
98
|
-
expect(appended).toHaveLength(1);
|
|
99
|
-
const written = JSON.parse(appended[0].content);
|
|
100
|
-
expect(written.workspaces).toEqual([
|
|
101
|
-
'/_workspaces/packages-utils-a1b2c3d4',
|
|
102
|
-
'/_workspaces/packages-core-e5f6a7b8',
|
|
103
|
-
]);
|
|
104
|
-
vi.restoreAllMocks();
|
|
105
|
-
});
|
|
106
|
-
it('preserves internal workspace entries as relative paths', async () => {
|
|
107
|
-
const packageJsonPath = '/project/src/app/functions/package.json';
|
|
108
|
-
const originalPackageJson = {
|
|
109
|
-
name: 'my-app',
|
|
110
|
-
workspaces: ['../packages/utils'],
|
|
111
|
-
};
|
|
112
|
-
vi.spyOn(fs, 'existsSync').mockReturnValue(true);
|
|
113
|
-
vi.spyOn(fs, 'readFileSync').mockReturnValue(JSON.stringify(originalPackageJson));
|
|
114
|
-
const { archive, getAppended } = createMockArchive();
|
|
115
|
-
const packageWorkspaces = new Map();
|
|
116
|
-
packageWorkspaces.set(packageJsonPath, ['../packages/utils']);
|
|
117
|
-
await updatePackageJsonInArchive(archive, srcDir, packageWorkspaces, new Map());
|
|
118
|
-
const written = JSON.parse(getAppended()[0].content);
|
|
119
|
-
expect(written.workspaces).toEqual(['../packages/utils']);
|
|
120
|
-
vi.restoreAllMocks();
|
|
121
|
-
});
|
|
122
|
-
it('writes mixed internal and external workspace entries', async () => {
|
|
123
|
-
const packageJsonPath = '/project/src/app/functions/package.json';
|
|
124
|
-
const originalPackageJson = {
|
|
125
|
-
name: 'my-app',
|
|
126
|
-
workspaces: ['../packages/utils', '/_workspaces/logger-a1b2c3d4'],
|
|
127
|
-
};
|
|
128
|
-
vi.spyOn(fs, 'existsSync').mockReturnValue(true);
|
|
129
|
-
vi.spyOn(fs, 'readFileSync').mockReturnValue(JSON.stringify(originalPackageJson));
|
|
130
|
-
const { archive, getAppended } = createMockArchive();
|
|
131
|
-
const packageWorkspaces = new Map();
|
|
132
|
-
packageWorkspaces.set(packageJsonPath, [
|
|
133
|
-
'../packages/utils',
|
|
134
|
-
'/_workspaces/logger-a1b2c3d4',
|
|
135
|
-
]);
|
|
136
|
-
await updatePackageJsonInArchive(archive, srcDir, packageWorkspaces, new Map());
|
|
137
|
-
const written = JSON.parse(getAppended()[0].content);
|
|
138
|
-
expect(written.workspaces).toEqual([
|
|
139
|
-
'../packages/utils',
|
|
140
|
-
'/_workspaces/logger-a1b2c3d4',
|
|
141
|
-
]);
|
|
142
|
-
vi.restoreAllMocks();
|
|
143
|
-
});
|
|
144
|
-
it('rewrites external file: dependencies as absolute archive paths', async () => {
|
|
145
|
-
const packageJsonPath = '/project/src/app/functions/package.json';
|
|
146
|
-
const originalPackageJson = {
|
|
147
|
-
name: 'my-app',
|
|
148
|
-
dependencies: {
|
|
149
|
-
'@company/logger': 'file:../../external/logger',
|
|
150
|
-
react: '^18.0.0',
|
|
151
|
-
},
|
|
152
|
-
};
|
|
153
|
-
vi.spyOn(fs, 'existsSync').mockReturnValue(true);
|
|
154
|
-
vi.spyOn(fs, 'readFileSync').mockReturnValue(JSON.stringify(originalPackageJson));
|
|
155
|
-
const { archive, getAppended } = createMockArchive();
|
|
156
|
-
const packageFileDeps = new Map();
|
|
157
|
-
packageFileDeps.set(packageJsonPath, new Map([['@company/logger', '_workspaces/logger-a1b2c3d4']]));
|
|
158
|
-
await updatePackageJsonInArchive(archive, srcDir, new Map(), packageFileDeps);
|
|
159
|
-
const appended = getAppended();
|
|
160
|
-
expect(appended).toHaveLength(1);
|
|
161
|
-
const written = JSON.parse(appended[0].content);
|
|
162
|
-
expect(written.dependencies['@company/logger']).toBe('file:/_workspaces/logger-a1b2c3d4');
|
|
163
|
-
expect(written.dependencies['react']).toBe('^18.0.0');
|
|
164
|
-
vi.restoreAllMocks();
|
|
165
|
-
});
|
|
166
|
-
it('leaves internal file: dependencies untouched when not in packageFileDeps', async () => {
|
|
167
|
-
const originalPackageJson = {
|
|
168
|
-
name: 'my-app',
|
|
169
|
-
dependencies: {
|
|
170
|
-
'@internal/utils': 'file:../packages/utils',
|
|
171
|
-
react: '^18.0.0',
|
|
172
|
-
},
|
|
173
|
-
};
|
|
174
|
-
vi.spyOn(fs, 'existsSync').mockReturnValue(true);
|
|
175
|
-
vi.spyOn(fs, 'readFileSync').mockReturnValue(JSON.stringify(originalPackageJson));
|
|
176
|
-
const { archive, getAppended } = createMockArchive();
|
|
177
|
-
await updatePackageJsonInArchive(archive, srcDir, new Map(), new Map());
|
|
178
|
-
// Nothing to update, so no package.json should be appended
|
|
179
|
-
expect(getAppended()).toHaveLength(0);
|
|
180
|
-
vi.restoreAllMocks();
|
|
181
|
-
});
|
|
182
|
-
it('uses same absolute path regardless of package.json depth', async () => {
|
|
183
|
-
const shallowPath = '/project/src/package.json';
|
|
184
|
-
const deepPath = '/project/src/app/functions/nested/package.json';
|
|
185
|
-
const archivePath = '/_workspaces/utils-a1b2c3d4';
|
|
186
|
-
const makePackageJson = () => ({
|
|
187
|
-
name: 'test',
|
|
188
|
-
workspaces: ['placeholder'],
|
|
189
|
-
});
|
|
190
|
-
vi.spyOn(fs, 'existsSync').mockReturnValue(true);
|
|
191
|
-
const { archive: archive1, getAppended: getAppended1 } = createMockArchive();
|
|
192
|
-
vi.spyOn(fs, 'readFileSync').mockReturnValue(JSON.stringify(makePackageJson()));
|
|
193
|
-
const workspaces1 = new Map();
|
|
194
|
-
workspaces1.set(shallowPath, [archivePath]);
|
|
195
|
-
await updatePackageJsonInArchive(archive1, srcDir, workspaces1, new Map());
|
|
196
|
-
const { archive: archive2, getAppended: getAppended2 } = createMockArchive();
|
|
197
|
-
vi.spyOn(fs, 'readFileSync').mockReturnValue(JSON.stringify(makePackageJson()));
|
|
198
|
-
const workspaces2 = new Map();
|
|
199
|
-
workspaces2.set(deepPath, [archivePath]);
|
|
200
|
-
await updatePackageJsonInArchive(archive2, srcDir, workspaces2, new Map());
|
|
201
|
-
const written1 = JSON.parse(getAppended1()[0].content);
|
|
202
|
-
const written2 = JSON.parse(getAppended2()[0].content);
|
|
203
|
-
expect(written1.workspaces).toEqual(['/_workspaces/utils-a1b2c3d4']);
|
|
204
|
-
expect(written2.workspaces).toEqual(['/_workspaces/utils-a1b2c3d4']);
|
|
205
|
-
vi.restoreAllMocks();
|
|
206
|
-
});
|
|
207
|
-
});
|
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
import archiver from 'archiver';
|
|
2
|
-
import { WorkspaceMapping, FileDependencyMapping } from '@hubspot/project-parsing-lib/workspaces';
|
|
3
|
-
/**
|
|
4
|
-
* Result of archiving workspaces and file dependencies
|
|
5
|
-
*/
|
|
6
|
-
export type WorkspaceArchiveResult = {
|
|
7
|
-
packageWorkspaces: Map<string, string[]>;
|
|
8
|
-
packageFileDeps: Map<string, Map<string, string>>;
|
|
9
|
-
};
|
|
10
|
-
/**
|
|
11
|
-
* Generates a short hash of the input string for use in workspace paths.
|
|
12
|
-
* Uses SHA256 truncated to 8 hex characters (4 billion possibilities).
|
|
13
|
-
*/
|
|
14
|
-
export declare function shortHash(input: string): string;
|
|
15
|
-
/**
|
|
16
|
-
* Determines the archive path for an external workspace or file: dependency.
|
|
17
|
-
* Produces `_workspaces/<basename>-<hash>` with no subdirectory.
|
|
18
|
-
* The hash prevents collisions between different directories with the same basename.
|
|
19
|
-
*/
|
|
20
|
-
export declare function computeExternalArchivePath(absolutePath: string): string;
|
|
21
|
-
/**
|
|
22
|
-
* Updates package.json files in the archive to reflect new workspace and file: dependency paths.
|
|
23
|
-
*
|
|
24
|
-
* Workspace entries in packageWorkspaces are already in final form:
|
|
25
|
-
* - Internal workspaces: relative paths (e.g. "../packages/utils")
|
|
26
|
-
* - External workspaces: absolute paths (e.g. "/_workspaces/logger-abc")
|
|
27
|
-
*
|
|
28
|
-
* Only external file: dependencies appear in packageFileDeps; internal ones
|
|
29
|
-
* keep their original file: references and are left untouched.
|
|
30
|
-
*/
|
|
31
|
-
export declare function updatePackageJsonInArchive(archive: archiver.Archiver, srcDir: string, packageWorkspaces: Map<string, string[]>, packageFileDeps: Map<string, Map<string, string>>): Promise<void>;
|
|
32
|
-
/**
|
|
33
|
-
* Main orchestration function that handles archiving of workspaces and file dependencies.
|
|
34
|
-
* This is the clean integration point for upload.ts.
|
|
35
|
-
*/
|
|
36
|
-
export declare function archiveWorkspacesAndDependencies(archive: archiver.Archiver, srcDir: string, projectDir: string, workspaceMappings: WorkspaceMapping[], fileDependencyMappings: FileDependencyMapping[]): Promise<WorkspaceArchiveResult>;
|
|
@@ -1,224 +0,0 @@
|
|
|
1
|
-
import fs from 'fs-extra';
|
|
2
|
-
import path from 'path';
|
|
3
|
-
import crypto from 'crypto';
|
|
4
|
-
import { shouldIgnoreFile } from '@hubspot/local-dev-lib/ignoreRules';
|
|
5
|
-
import { getPackableFiles, } from '@hubspot/project-parsing-lib/workspaces';
|
|
6
|
-
import { uiLogger } from '../ui/logger.js';
|
|
7
|
-
import { lib } from '../../lang/en.js';
|
|
8
|
-
/**
|
|
9
|
-
* Generates a short hash of the input string for use in workspace paths.
|
|
10
|
-
* Uses SHA256 truncated to 8 hex characters (4 billion possibilities).
|
|
11
|
-
*/
|
|
12
|
-
export function shortHash(input) {
|
|
13
|
-
return crypto.createHash('sha256').update(input).digest('hex').slice(0, 8);
|
|
14
|
-
}
|
|
15
|
-
/**
|
|
16
|
-
* Determines the archive path for an external workspace or file: dependency.
|
|
17
|
-
* Produces `_workspaces/<basename>-<hash>` with no subdirectory.
|
|
18
|
-
* The hash prevents collisions between different directories with the same basename.
|
|
19
|
-
*/
|
|
20
|
-
export function computeExternalArchivePath(absolutePath) {
|
|
21
|
-
const resolved = path.resolve(absolutePath);
|
|
22
|
-
const name = path.basename(resolved);
|
|
23
|
-
return path.join('_workspaces', `${name}-${shortHash(resolved)}`);
|
|
24
|
-
}
|
|
25
|
-
/**
|
|
26
|
-
* Returns true if dir is inside srcDir (i.e. it will already be included
|
|
27
|
-
* in the archive from the srcDir walk and must not be copied again).
|
|
28
|
-
*/
|
|
29
|
-
function isInsideSrcDir(dir, srcDir) {
|
|
30
|
-
const rel = path.relative(path.resolve(srcDir), path.resolve(dir));
|
|
31
|
-
return !rel.startsWith('..') && !path.isAbsolute(rel);
|
|
32
|
-
}
|
|
33
|
-
/**
|
|
34
|
-
* Creates a file filter function for workspace archiving.
|
|
35
|
-
* Filters files based on packable files list and ignore rules.
|
|
36
|
-
*/
|
|
37
|
-
function createWorkspaceFileFilter(packableFiles) {
|
|
38
|
-
return (file) => {
|
|
39
|
-
if (packableFiles.size > 0 && !packableFiles.has(file.name)) {
|
|
40
|
-
uiLogger.debug(lib.projectUpload.handleProjectUpload.fileFiltered(file.name));
|
|
41
|
-
return false;
|
|
42
|
-
}
|
|
43
|
-
const ignored = shouldIgnoreFile(file.name, true);
|
|
44
|
-
if (ignored) {
|
|
45
|
-
uiLogger.debug(lib.projectUpload.handleProjectUpload.fileFiltered(file.name));
|
|
46
|
-
return false;
|
|
47
|
-
}
|
|
48
|
-
return file;
|
|
49
|
-
};
|
|
50
|
-
}
|
|
51
|
-
/**
|
|
52
|
-
* Archives workspace directories and returns mapping information.
|
|
53
|
-
*
|
|
54
|
-
* Internal workspaces (inside srcDir) are not archived — they are already
|
|
55
|
-
* included via the srcDir walk. Their relative paths (from the package.json
|
|
56
|
-
* directory to the workspace directory) are stored directly in the entries.
|
|
57
|
-
*
|
|
58
|
-
* External workspaces (outside srcDir) are copied to `_workspaces/<name>-<hash>`
|
|
59
|
-
* and their absolute archive paths (`/_workspaces/<name>-<hash>`) are stored.
|
|
60
|
-
*/
|
|
61
|
-
async function archiveWorkspaceDirectories(archive, srcDir, workspaceMappings) {
|
|
62
|
-
const externalArchivePaths = new Map(); // resolvedDir -> archivePath
|
|
63
|
-
const archivePathToDir = new Map(); // archivePath -> resolvedDir (collision detection)
|
|
64
|
-
const packageWorkspaceEntries = new Map();
|
|
65
|
-
const externalsToArchive = [];
|
|
66
|
-
for (const mapping of workspaceMappings) {
|
|
67
|
-
const { workspaceDir, sourcePackageJsonPath } = mapping;
|
|
68
|
-
if (!packageWorkspaceEntries.has(sourcePackageJsonPath)) {
|
|
69
|
-
packageWorkspaceEntries.set(sourcePackageJsonPath, []);
|
|
70
|
-
}
|
|
71
|
-
if (isInsideSrcDir(workspaceDir, srcDir)) {
|
|
72
|
-
// Internal: already in archive from srcDir walk.
|
|
73
|
-
// Store the relative path from the package.json directory so npm can resolve it.
|
|
74
|
-
const relPath = path.relative(path.dirname(sourcePackageJsonPath), path.resolve(workspaceDir));
|
|
75
|
-
packageWorkspaceEntries.get(sourcePackageJsonPath).push(relPath);
|
|
76
|
-
}
|
|
77
|
-
else {
|
|
78
|
-
// External: archive to _workspaces/<name>-<hash>.
|
|
79
|
-
const archivePath = computeExternalArchivePath(workspaceDir);
|
|
80
|
-
const resolvedDir = path.resolve(workspaceDir);
|
|
81
|
-
// Detect hash collisions (different dirs mapping to the same archive path)
|
|
82
|
-
const existing = archivePathToDir.get(archivePath);
|
|
83
|
-
if (existing && existing !== resolvedDir) {
|
|
84
|
-
throw new Error(lib.projectUpload.handleProjectUpload.workspaceCollision(archivePath, workspaceDir, existing));
|
|
85
|
-
}
|
|
86
|
-
if (!externalArchivePaths.has(resolvedDir)) {
|
|
87
|
-
externalArchivePaths.set(resolvedDir, archivePath);
|
|
88
|
-
archivePathToDir.set(archivePath, resolvedDir);
|
|
89
|
-
externalsToArchive.push({ dir: workspaceDir, archivePath });
|
|
90
|
-
}
|
|
91
|
-
// Store absolute archive path so DFS can locate it in the project tree
|
|
92
|
-
packageWorkspaceEntries
|
|
93
|
-
.get(sourcePackageJsonPath)
|
|
94
|
-
.push(`/${archivePath}`);
|
|
95
|
-
}
|
|
96
|
-
}
|
|
97
|
-
// Fetch packable files in parallel (I/O optimization)
|
|
98
|
-
const withPackableFiles = await Promise.all(externalsToArchive.map(async (item) => ({
|
|
99
|
-
...item,
|
|
100
|
-
packableFiles: await getPackableFiles(item.dir),
|
|
101
|
-
})));
|
|
102
|
-
// Archive directories sequentially (archiver requires sequential operations)
|
|
103
|
-
for (const { dir, archivePath, packableFiles } of withPackableFiles) {
|
|
104
|
-
uiLogger.log(lib.projectUpload.handleProjectUpload.workspaceIncluded(dir, archivePath));
|
|
105
|
-
archive.directory(dir, archivePath, createWorkspaceFileFilter(packableFiles));
|
|
106
|
-
}
|
|
107
|
-
return { externalArchivePaths, packageWorkspaceEntries };
|
|
108
|
-
}
|
|
109
|
-
/**
|
|
110
|
-
* Archives file: dependencies and returns mapping information.
|
|
111
|
-
*
|
|
112
|
-
* Internal file: dependencies (inside srcDir) are skipped — their original
|
|
113
|
-
* `file:` references in package.json remain valid after upload.
|
|
114
|
-
*
|
|
115
|
-
* External file: dependencies are archived to `_workspaces/<name>-<hash>`
|
|
116
|
-
* and tracked in the returned map so package.json can be rewritten.
|
|
117
|
-
*/
|
|
118
|
-
async function archiveFileDependencies(archive, srcDir, fileDependencyMappings, externalArchivePaths) {
|
|
119
|
-
const packageFileDeps = new Map();
|
|
120
|
-
const toArchive = [];
|
|
121
|
-
for (const mapping of fileDependencyMappings) {
|
|
122
|
-
const { packageName, localPath, sourcePackageJsonPath } = mapping;
|
|
123
|
-
if (isInsideSrcDir(localPath, srcDir)) {
|
|
124
|
-
// Internal: original file: reference stays unchanged, nothing to do
|
|
125
|
-
continue;
|
|
126
|
-
}
|
|
127
|
-
// External: archive to _workspaces/<name>-<hash>
|
|
128
|
-
const archivePath = computeExternalArchivePath(localPath);
|
|
129
|
-
const resolvedPath = path.resolve(localPath);
|
|
130
|
-
if (!packageFileDeps.has(sourcePackageJsonPath)) {
|
|
131
|
-
packageFileDeps.set(sourcePackageJsonPath, new Map());
|
|
132
|
-
}
|
|
133
|
-
packageFileDeps.get(sourcePackageJsonPath).set(packageName, archivePath);
|
|
134
|
-
// Only archive each unique path once
|
|
135
|
-
if (!externalArchivePaths.has(resolvedPath)) {
|
|
136
|
-
externalArchivePaths.set(resolvedPath, archivePath);
|
|
137
|
-
toArchive.push({ localPath, archivePath, packageName });
|
|
138
|
-
}
|
|
139
|
-
}
|
|
140
|
-
// Fetch packable files in parallel (I/O optimization)
|
|
141
|
-
const withPackableFiles = await Promise.all(toArchive.map(async (item) => ({
|
|
142
|
-
...item,
|
|
143
|
-
packableFiles: await getPackableFiles(item.localPath),
|
|
144
|
-
})));
|
|
145
|
-
// Archive directories sequentially (archiver requires sequential operations)
|
|
146
|
-
for (const { localPath, archivePath, packageName, packableFiles, } of withPackableFiles) {
|
|
147
|
-
uiLogger.log(lib.projectUpload.handleProjectUpload.fileDependencyIncluded(packageName, localPath, archivePath));
|
|
148
|
-
archive.directory(localPath, archivePath, createWorkspaceFileFilter(packableFiles));
|
|
149
|
-
}
|
|
150
|
-
return packageFileDeps;
|
|
151
|
-
}
|
|
152
|
-
/**
|
|
153
|
-
* Updates package.json files in the archive to reflect new workspace and file: dependency paths.
|
|
154
|
-
*
|
|
155
|
-
* Workspace entries in packageWorkspaces are already in final form:
|
|
156
|
-
* - Internal workspaces: relative paths (e.g. "../packages/utils")
|
|
157
|
-
* - External workspaces: absolute paths (e.g. "/_workspaces/logger-abc")
|
|
158
|
-
*
|
|
159
|
-
* Only external file: dependencies appear in packageFileDeps; internal ones
|
|
160
|
-
* keep their original file: references and are left untouched.
|
|
161
|
-
*/
|
|
162
|
-
export async function updatePackageJsonInArchive(archive, srcDir, packageWorkspaces, packageFileDeps) {
|
|
163
|
-
// Collect all package.json paths that need updating
|
|
164
|
-
const allPackageJsonPaths = new Set([
|
|
165
|
-
...packageWorkspaces.keys(),
|
|
166
|
-
...packageFileDeps.keys(),
|
|
167
|
-
]);
|
|
168
|
-
for (const packageJsonPath of allPackageJsonPaths) {
|
|
169
|
-
if (!fs.existsSync(packageJsonPath)) {
|
|
170
|
-
continue;
|
|
171
|
-
}
|
|
172
|
-
let packageJson;
|
|
173
|
-
try {
|
|
174
|
-
packageJson = JSON.parse(fs.readFileSync(packageJsonPath, 'utf8'));
|
|
175
|
-
}
|
|
176
|
-
catch (e) {
|
|
177
|
-
uiLogger.warn(lib.projectUpload.handleProjectUpload.malformedPackageJson(packageJsonPath, e instanceof Error ? e.message : String(e)));
|
|
178
|
-
continue;
|
|
179
|
-
}
|
|
180
|
-
const relativePackageJsonPath = path.relative(srcDir, packageJsonPath);
|
|
181
|
-
let modified = false;
|
|
182
|
-
// Update workspaces field — entries are already in their final form
|
|
183
|
-
const workspaceEntries = packageWorkspaces.get(packageJsonPath);
|
|
184
|
-
if (workspaceEntries && packageJson.workspaces) {
|
|
185
|
-
packageJson.workspaces = workspaceEntries;
|
|
186
|
-
modified = true;
|
|
187
|
-
uiLogger.debug(lib.projectUpload.handleProjectUpload.updatingPackageJsonWorkspaces(relativePackageJsonPath));
|
|
188
|
-
uiLogger.debug(lib.projectUpload.handleProjectUpload.updatedWorkspaces(workspaceEntries.join(', ')));
|
|
189
|
-
}
|
|
190
|
-
// Update external file: dependencies; internal ones are left untouched
|
|
191
|
-
const fileDeps = packageFileDeps.get(packageJsonPath);
|
|
192
|
-
if (fileDeps && fileDeps.size > 0 && packageJson.dependencies) {
|
|
193
|
-
for (const [packageName, archivePath] of fileDeps.entries()) {
|
|
194
|
-
if (packageJson.dependencies[packageName]?.startsWith('file:')) {
|
|
195
|
-
packageJson.dependencies[packageName] = `file:/${archivePath}`;
|
|
196
|
-
modified = true;
|
|
197
|
-
uiLogger.debug(lib.projectUpload.handleProjectUpload.updatedFileDependency(packageName, `/${archivePath}`));
|
|
198
|
-
}
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
if (modified) {
|
|
202
|
-
// Add modified package.json to archive (will replace the original)
|
|
203
|
-
archive.append(JSON.stringify(packageJson, null, 2), {
|
|
204
|
-
name: relativePackageJsonPath,
|
|
205
|
-
});
|
|
206
|
-
}
|
|
207
|
-
}
|
|
208
|
-
// Ensure all append operations are queued before finalize is called
|
|
209
|
-
// Use setImmediate to yield control and let archiver process the queue
|
|
210
|
-
await new Promise(resolve => setImmediate(resolve));
|
|
211
|
-
}
|
|
212
|
-
/**
|
|
213
|
-
* Main orchestration function that handles archiving of workspaces and file dependencies.
|
|
214
|
-
* This is the clean integration point for upload.ts.
|
|
215
|
-
*/
|
|
216
|
-
export async function archiveWorkspacesAndDependencies(archive, srcDir, projectDir, workspaceMappings, fileDependencyMappings) {
|
|
217
|
-
// Archive workspace directories (internal ones are skipped, externals are copied)
|
|
218
|
-
const { externalArchivePaths, packageWorkspaceEntries } = await archiveWorkspaceDirectories(archive, srcDir, workspaceMappings);
|
|
219
|
-
// Archive external file: dependencies (internals are skipped)
|
|
220
|
-
const packageFileDeps = await archiveFileDependencies(archive, srcDir, fileDependencyMappings, externalArchivePaths);
|
|
221
|
-
// Update package.json files with new paths
|
|
222
|
-
await updatePackageJsonInArchive(archive, srcDir, packageWorkspaceEntries, packageFileDeps);
|
|
223
|
-
return { packageWorkspaces: packageWorkspaceEntries, packageFileDeps };
|
|
224
|
-
}
|