@launchframe/cli 0.1.11 → 1.0.0-beta.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -11
- package/package.json +5 -5
- package/src/commands/cache.js +102 -0
- package/src/commands/deploy-configure.js +21 -4
- package/src/commands/deploy-init.js +24 -58
- package/src/commands/deploy-set-env.js +68 -91
- package/src/commands/docker-destroy.js +45 -15
- package/src/commands/docker-up.js +42 -16
- package/src/commands/help.js +11 -1
- package/src/commands/init.js +89 -55
- package/src/commands/service.js +64 -40
- package/src/commands/waitlist-deploy.js +2 -2
- package/src/commands/waitlist-logs.js +1 -2
- package/src/commands/waitlist-up.js +50 -15
- package/src/generator.js +13 -4
- package/src/index.js +16 -2
- package/src/prompts.js +12 -0
- package/src/services/registry.js +8 -6
- package/src/services/variant-config.js +135 -37
- package/src/utils/docker-helper.js +66 -44
- package/src/utils/github-access.js +67 -0
- package/src/utils/module-cache.js +274 -0
- package/src/utils/project-helpers.js +1 -1
- package/src/utils/section-replacer.js +32 -15
- package/src/utils/variable-replacer.js +7 -2
- package/src/utils/variant-processor.js +24 -12
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
const fs = require('fs-extra');
|
|
3
|
+
const os = require('os');
|
|
4
|
+
const { execSync } = require('child_process');
|
|
5
|
+
const chalk = require('chalk');
|
|
6
|
+
|
|
7
|
+
const MODULES_REPO = 'git@github.com:launchframe-dev/modules.git';
|
|
8
|
+
const BRANCH = 'main';
|
|
9
|
+
|
|
10
|
+
/**
|
|
11
|
+
* Get the cache directory path
|
|
12
|
+
* Works cross-platform (Linux, Mac, Windows)
|
|
13
|
+
* @returns {string} Cache directory path
|
|
14
|
+
*/
|
|
15
|
+
function getCacheDir() {
|
|
16
|
+
const homeDir = os.homedir();
|
|
17
|
+
// Use same path structure on all platforms
|
|
18
|
+
// Windows: C:\Users\username\.launchframe\cache\modules
|
|
19
|
+
// Mac/Linux: /home/username/.launchframe/cache/modules
|
|
20
|
+
return path.join(homeDir, '.launchframe', 'cache', 'modules');
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Check if cache exists and is valid
|
|
25
|
+
* @returns {boolean} True if cache exists
|
|
26
|
+
*/
|
|
27
|
+
async function cacheExists() {
|
|
28
|
+
const cacheDir = getCacheDir();
|
|
29
|
+
const gitDir = path.join(cacheDir, '.git');
|
|
30
|
+
return await fs.pathExists(gitDir);
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
/**
|
|
34
|
+
* Initialize cache with sparse checkout
|
|
35
|
+
* Clones only the repository structure, no modules yet
|
|
36
|
+
* @returns {Promise<void>}
|
|
37
|
+
*/
|
|
38
|
+
async function initializeCache() {
|
|
39
|
+
const cacheDir = getCacheDir();
|
|
40
|
+
|
|
41
|
+
console.log(chalk.blue('🔄 Initializing module cache...'));
|
|
42
|
+
|
|
43
|
+
try {
|
|
44
|
+
// Ensure parent directory exists
|
|
45
|
+
await fs.ensureDir(path.dirname(cacheDir));
|
|
46
|
+
|
|
47
|
+
// Sparse clone (only root files, no modules)
|
|
48
|
+
execSync(
|
|
49
|
+
`git clone --sparse --depth 1 --branch ${BRANCH} ${MODULES_REPO} "${cacheDir}"`,
|
|
50
|
+
{
|
|
51
|
+
stdio: 'pipe', // Hide output
|
|
52
|
+
timeout: 60000 // 1 minute timeout
|
|
53
|
+
}
|
|
54
|
+
);
|
|
55
|
+
|
|
56
|
+
// Configure sparse checkout (starts with empty set)
|
|
57
|
+
execSync('git sparse-checkout init --cone', {
|
|
58
|
+
cwd: cacheDir,
|
|
59
|
+
stdio: 'pipe'
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
console.log(chalk.green('✓ Cache initialized'));
|
|
63
|
+
} catch (error) {
|
|
64
|
+
// Clean up partial clone on failure
|
|
65
|
+
await fs.remove(cacheDir);
|
|
66
|
+
throw new Error(`Failed to initialize cache: ${error.message}`);
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Update cache to latest version from main branch
|
|
72
|
+
* Requires internet connection
|
|
73
|
+
* @returns {Promise<void>}
|
|
74
|
+
*/
|
|
75
|
+
async function updateCache() {
|
|
76
|
+
const cacheDir = getCacheDir();
|
|
77
|
+
|
|
78
|
+
console.log(chalk.blue('🔄 Updating module cache...'));
|
|
79
|
+
|
|
80
|
+
try {
|
|
81
|
+
execSync('git pull origin main', {
|
|
82
|
+
cwd: cacheDir,
|
|
83
|
+
stdio: 'pipe',
|
|
84
|
+
timeout: 30000 // 30 seconds
|
|
85
|
+
});
|
|
86
|
+
|
|
87
|
+
console.log(chalk.green('✓ Cache updated'));
|
|
88
|
+
} catch (error) {
|
|
89
|
+
throw new Error(`Failed to update cache: ${error.message}`);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Expand sparse checkout to include specific modules
|
|
95
|
+
* @param {string[]} moduleNames - Array of module names to expand
|
|
96
|
+
* @returns {Promise<void>}
|
|
97
|
+
*/
|
|
98
|
+
async function expandModules(moduleNames) {
|
|
99
|
+
const cacheDir = getCacheDir();
|
|
100
|
+
|
|
101
|
+
console.log(chalk.blue(`📦 Loading modules: ${moduleNames.join(', ')}...`));
|
|
102
|
+
|
|
103
|
+
try {
|
|
104
|
+
// Get current sparse checkout list
|
|
105
|
+
let currentModules = [];
|
|
106
|
+
try {
|
|
107
|
+
const output = execSync('git sparse-checkout list', {
|
|
108
|
+
cwd: cacheDir,
|
|
109
|
+
stdio: 'pipe',
|
|
110
|
+
encoding: 'utf8'
|
|
111
|
+
});
|
|
112
|
+
currentModules = output.trim().split('\n').filter(Boolean);
|
|
113
|
+
} catch (error) {
|
|
114
|
+
// No modules yet, that's fine
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// Add new modules to the list
|
|
118
|
+
const allModules = [...new Set([...currentModules, ...moduleNames])];
|
|
119
|
+
|
|
120
|
+
// Set sparse checkout to include all modules
|
|
121
|
+
execSync(`git sparse-checkout set ${allModules.join(' ')}`, {
|
|
122
|
+
cwd: cacheDir,
|
|
123
|
+
stdio: 'pipe',
|
|
124
|
+
timeout: 60000 // 1 minute (may need to download files)
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
console.log(chalk.green('✓ Modules loaded'));
|
|
128
|
+
} catch (error) {
|
|
129
|
+
throw new Error(`Failed to expand modules: ${error.message}`);
|
|
130
|
+
}
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
/**
|
|
134
|
+
* Get path to a specific module in the cache
|
|
135
|
+
* @param {string} moduleName - Module name (e.g., 'backend', 'admin-portal')
|
|
136
|
+
* @returns {string} Absolute path to module
|
|
137
|
+
*/
|
|
138
|
+
function getModulePath(moduleName) {
|
|
139
|
+
const cacheDir = getCacheDir();
|
|
140
|
+
return path.join(cacheDir, moduleName);
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/**
|
|
144
|
+
* Get cache root path
|
|
145
|
+
* @returns {string} Absolute path to cache root
|
|
146
|
+
*/
|
|
147
|
+
function getCachePath() {
|
|
148
|
+
return getCacheDir();
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
/**
|
|
152
|
+
* Clear the entire module cache
|
|
153
|
+
* Useful for troubleshooting or forcing fresh download
|
|
154
|
+
* @returns {Promise<void>}
|
|
155
|
+
*/
|
|
156
|
+
async function clearCache() {
|
|
157
|
+
const cacheDir = getCacheDir();
|
|
158
|
+
|
|
159
|
+
if (await fs.pathExists(cacheDir)) {
|
|
160
|
+
await fs.remove(cacheDir);
|
|
161
|
+
console.log(chalk.green('✓ Cache cleared'));
|
|
162
|
+
} else {
|
|
163
|
+
console.log(chalk.gray('Cache is already empty'));
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Get cache information (size, last update, modules)
|
|
169
|
+
* @returns {Promise<{exists: boolean, path: string, size?: number, modules?: string[], lastUpdate?: Date}>}
|
|
170
|
+
*/
|
|
171
|
+
async function getCacheInfo() {
|
|
172
|
+
const cacheDir = getCacheDir();
|
|
173
|
+
const info = {
|
|
174
|
+
exists: false,
|
|
175
|
+
path: cacheDir
|
|
176
|
+
};
|
|
177
|
+
|
|
178
|
+
if (!(await cacheExists())) {
|
|
179
|
+
return info;
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
info.exists = true;
|
|
183
|
+
|
|
184
|
+
try {
|
|
185
|
+
// Get cache size (du command works on Unix/Mac, different on Windows)
|
|
186
|
+
if (process.platform === 'win32') {
|
|
187
|
+
// Windows: use powershell to get size
|
|
188
|
+
const output = execSync(
|
|
189
|
+
`powershell -command "(Get-ChildItem -Path '${cacheDir}' -Recurse | Measure-Object -Property Length -Sum).Sum"`,
|
|
190
|
+
{ encoding: 'utf8', stdio: 'pipe' }
|
|
191
|
+
);
|
|
192
|
+
info.size = parseInt(output.trim());
|
|
193
|
+
} else {
|
|
194
|
+
// Unix/Mac: use du
|
|
195
|
+
const output = execSync(`du -sb "${cacheDir}"`, {
|
|
196
|
+
encoding: 'utf8',
|
|
197
|
+
stdio: 'pipe'
|
|
198
|
+
});
|
|
199
|
+
info.size = parseInt(output.split('\t')[0]);
|
|
200
|
+
}
|
|
201
|
+
} catch (error) {
|
|
202
|
+
// Size calculation failed, not critical
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
try {
|
|
206
|
+
// Get list of expanded modules
|
|
207
|
+
const output = execSync('git sparse-checkout list', {
|
|
208
|
+
cwd: cacheDir,
|
|
209
|
+
encoding: 'utf8',
|
|
210
|
+
stdio: 'pipe'
|
|
211
|
+
});
|
|
212
|
+
info.modules = output.trim().split('\n').filter(Boolean);
|
|
213
|
+
} catch (error) {
|
|
214
|
+
info.modules = [];
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
try {
|
|
218
|
+
// Get last update time from git log
|
|
219
|
+
const output = execSync('git log -1 --format=%cd --date=iso', {
|
|
220
|
+
cwd: cacheDir,
|
|
221
|
+
encoding: 'utf8',
|
|
222
|
+
stdio: 'pipe'
|
|
223
|
+
});
|
|
224
|
+
info.lastUpdate = new Date(output.trim());
|
|
225
|
+
} catch (error) {
|
|
226
|
+
// Last update time failed, not critical
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return info;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
/**
|
|
233
|
+
* Ensure cache is ready (initialize if needed, update if exists)
|
|
234
|
+
* This is the main entry point for cache management
|
|
235
|
+
* @param {string[]} requiredModules - Modules needed for the operation
|
|
236
|
+
* @returns {Promise<string>} Path to cache root
|
|
237
|
+
*/
|
|
238
|
+
async function ensureCacheReady(requiredModules) {
|
|
239
|
+
try {
|
|
240
|
+
if (!(await cacheExists())) {
|
|
241
|
+
// Cache doesn't exist, initialize it
|
|
242
|
+
await initializeCache();
|
|
243
|
+
} else {
|
|
244
|
+
// Cache exists, update it
|
|
245
|
+
await updateCache();
|
|
246
|
+
}
|
|
247
|
+
|
|
248
|
+
// Expand sparse checkout to include required modules
|
|
249
|
+
await expandModules(requiredModules);
|
|
250
|
+
|
|
251
|
+
return getCachePath();
|
|
252
|
+
} catch (error) {
|
|
253
|
+
// If we fail and it's a network error, provide helpful message
|
|
254
|
+
if (error.message.includes('Connection') || error.message.includes('timed out')) {
|
|
255
|
+
throw new Error(
|
|
256
|
+
'Cannot connect to GitHub. Please check your internet connection and try again.'
|
|
257
|
+
);
|
|
258
|
+
}
|
|
259
|
+
throw error;
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
|
|
263
|
+
module.exports = {
|
|
264
|
+
getCacheDir,
|
|
265
|
+
cacheExists,
|
|
266
|
+
initializeCache,
|
|
267
|
+
updateCache,
|
|
268
|
+
expandModules,
|
|
269
|
+
getModulePath,
|
|
270
|
+
getCachePath,
|
|
271
|
+
clearCache,
|
|
272
|
+
getCacheInfo,
|
|
273
|
+
ensureCacheReady
|
|
274
|
+
};
|
|
@@ -9,32 +9,46 @@ const fs = require('fs-extra');
|
|
|
9
9
|
async function replaceSection(filePath, sectionName, newContent) {
|
|
10
10
|
const content = await fs.readFile(filePath, 'utf8');
|
|
11
11
|
|
|
12
|
-
// Try
|
|
13
|
-
const
|
|
14
|
-
const
|
|
12
|
+
// Try all comment formats (// for JS/TS, {/* */} for JSX, # for YAML/Shell)
|
|
13
|
+
const startMarkerSlash = `// ${sectionName}_START`;
|
|
14
|
+
const endMarkerSlash = `// ${sectionName}_END`;
|
|
15
15
|
const startMarkerJSX = `{/* ${sectionName}_START */}`;
|
|
16
16
|
const endMarkerJSX = `{/* ${sectionName}_END */}`;
|
|
17
|
+
const startMarkerHash = `# ${sectionName}_START`;
|
|
18
|
+
const endMarkerHash = `# ${sectionName}_END`;
|
|
17
19
|
|
|
18
|
-
let startIndex = content.indexOf(
|
|
19
|
-
let endIndex = content.indexOf(
|
|
20
|
-
let isJSX = false;
|
|
20
|
+
let startIndex = content.indexOf(startMarkerSlash);
|
|
21
|
+
let endIndex = content.indexOf(endMarkerSlash);
|
|
21
22
|
|
|
22
|
-
// If not found with
|
|
23
|
+
// If not found with // comments, try JSX comments
|
|
23
24
|
if (startIndex === -1 || endIndex === -1) {
|
|
24
25
|
startIndex = content.indexOf(startMarkerJSX);
|
|
25
26
|
endIndex = content.indexOf(endMarkerJSX);
|
|
26
|
-
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// If not found with JSX comments, try # comments (YAML/Shell)
|
|
30
|
+
if (startIndex === -1 || endIndex === -1) {
|
|
31
|
+
startIndex = content.indexOf(startMarkerHash);
|
|
32
|
+
endIndex = content.indexOf(endMarkerHash);
|
|
27
33
|
}
|
|
28
34
|
|
|
29
35
|
if (startIndex === -1 || endIndex === -1) {
|
|
30
36
|
throw new Error(`Section markers not found: ${sectionName} in ${filePath}`);
|
|
31
37
|
}
|
|
32
38
|
|
|
39
|
+
// Find the start of the start marker line (beginning of line, not just the marker)
|
|
40
|
+
let lineStart = content.lastIndexOf('\n', startIndex - 1);
|
|
41
|
+
if (lineStart === -1) {
|
|
42
|
+
lineStart = 0; // Marker is on first line
|
|
43
|
+
} else {
|
|
44
|
+
lineStart += 1; // Move past the newline to the start of the line
|
|
45
|
+
}
|
|
46
|
+
|
|
33
47
|
// Find the end of the end marker line
|
|
34
48
|
const endLineEnd = content.indexOf('\n', endIndex);
|
|
35
49
|
|
|
36
|
-
// Construct new content - exclude both marker lines
|
|
37
|
-
const before = content.substring(0,
|
|
50
|
+
// Construct new content - exclude both marker lines (including leading whitespace)
|
|
51
|
+
const before = content.substring(0, lineStart);
|
|
38
52
|
const after = content.substring(endLineEnd + 1);
|
|
39
53
|
const replaced = before + newContent + after;
|
|
40
54
|
|
|
@@ -50,16 +64,19 @@ async function replaceSection(filePath, sectionName, newContent) {
|
|
|
50
64
|
async function hasSection(filePath, sectionName) {
|
|
51
65
|
try {
|
|
52
66
|
const content = await fs.readFile(filePath, 'utf8');
|
|
53
|
-
const
|
|
54
|
-
const
|
|
67
|
+
const startMarkerSlash = `// ${sectionName}_START`;
|
|
68
|
+
const endMarkerSlash = `// ${sectionName}_END`;
|
|
55
69
|
const startMarkerJSX = `{/* ${sectionName}_START */}`;
|
|
56
70
|
const endMarkerJSX = `{/* ${sectionName}_END */}`;
|
|
71
|
+
const startMarkerHash = `# ${sectionName}_START`;
|
|
72
|
+
const endMarkerHash = `# ${sectionName}_END`;
|
|
57
73
|
|
|
58
|
-
// Check
|
|
59
|
-
const
|
|
74
|
+
// Check all comment formats
|
|
75
|
+
const hasSlash = content.includes(startMarkerSlash) && content.includes(endMarkerSlash);
|
|
60
76
|
const hasJSX = content.includes(startMarkerJSX) && content.includes(endMarkerJSX);
|
|
77
|
+
const hasHash = content.includes(startMarkerHash) && content.includes(endMarkerHash);
|
|
61
78
|
|
|
62
|
-
return
|
|
79
|
+
return hasSlash || hasJSX || hasHash;
|
|
63
80
|
} catch (error) {
|
|
64
81
|
return false;
|
|
65
82
|
}
|
|
@@ -55,7 +55,10 @@ function escapeRegex(string) {
|
|
|
55
55
|
*/
|
|
56
56
|
async function replaceVariablesInFile(filePath, variables) {
|
|
57
57
|
try {
|
|
58
|
-
|
|
58
|
+
// Read file content - preserve line endings for shell scripts
|
|
59
|
+
// Use binary mode to avoid Node.js line ending normalization on Windows
|
|
60
|
+
const buffer = await fs.readFile(filePath);
|
|
61
|
+
let content = buffer.toString('utf8');
|
|
59
62
|
let modified = false;
|
|
60
63
|
|
|
61
64
|
// Replace each variable using regex with negative lookbehind
|
|
@@ -74,8 +77,10 @@ async function replaceVariablesInFile(filePath, variables) {
|
|
|
74
77
|
}
|
|
75
78
|
|
|
76
79
|
// Only write if changes were made
|
|
80
|
+
// Write as Buffer to preserve original line endings
|
|
77
81
|
if (modified) {
|
|
78
|
-
|
|
82
|
+
const outputBuffer = Buffer.from(content, 'utf8');
|
|
83
|
+
await fs.writeFile(filePath, outputBuffer);
|
|
79
84
|
}
|
|
80
85
|
|
|
81
86
|
return modified;
|
|
@@ -44,7 +44,7 @@ async function processServiceVariant(
|
|
|
44
44
|
// Step 1: Copy base template (minimal - B2B + single-tenant)
|
|
45
45
|
console.log(` 📁 Copying base template from ${serviceConfig.base}`);
|
|
46
46
|
await copyDirectory(basePath, destination, {
|
|
47
|
-
exclude: ['node_modules', '.git', 'dist', '.env'
|
|
47
|
+
exclude: ['node_modules', '.git', 'dist', '.env']
|
|
48
48
|
});
|
|
49
49
|
|
|
50
50
|
// Step 2: Determine which variants to apply
|
|
@@ -143,29 +143,41 @@ async function cleanupSectionMarkers(serviceName, serviceConfig, appliedVariants
|
|
|
143
143
|
let content = await fs.readFile(targetFilePath, 'utf-8');
|
|
144
144
|
let modified = false;
|
|
145
145
|
|
|
146
|
-
// Remove each unused section marker
|
|
146
|
+
// Remove each unused section marker (keep content, remove only marker comments)
|
|
147
147
|
for (const sectionName of sectionNames) {
|
|
148
|
-
// Try
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
148
|
+
// Try all comment formats (// for JS/TS, {/* */} for JSX, # for YAML/Shell)
|
|
149
|
+
// Capture: START marker, content, END marker - replace with just content
|
|
150
|
+
// Include leading whitespace before markers to prevent indentation issues
|
|
151
|
+
const slashPattern = new RegExp(
|
|
152
|
+
`^[ \\t]*\\/\\/ ${sectionName}_START\\n([\\s\\S]*?)^[ \\t]*\\/\\/ ${sectionName}_END\\n?`,
|
|
153
|
+
'gm'
|
|
152
154
|
);
|
|
153
155
|
const jsxPattern = new RegExp(
|
|
154
|
-
|
|
155
|
-
'
|
|
156
|
+
`^[ \\t]*\\{\\/\\* ${sectionName}_START \\*\\/\\}\\n([\\s\\S]*?)^[ \\t]*\\{\\/\\* ${sectionName}_END \\*\\/\\}\\n?`,
|
|
157
|
+
'gm'
|
|
158
|
+
);
|
|
159
|
+
const hashPattern = new RegExp(
|
|
160
|
+
`^[ \\t]*# ${sectionName}_START\\n([\\s\\S]*?)^[ \\t]*# ${sectionName}_END\\n?`,
|
|
161
|
+
'gm'
|
|
156
162
|
);
|
|
157
163
|
|
|
158
|
-
const
|
|
159
|
-
content = content.replace(
|
|
160
|
-
if (content !==
|
|
164
|
+
const beforeSlash = content;
|
|
165
|
+
content = content.replace(slashPattern, '$1');
|
|
166
|
+
if (content !== beforeSlash) {
|
|
161
167
|
modified = true;
|
|
162
168
|
}
|
|
163
169
|
|
|
164
170
|
const beforeJsx = content;
|
|
165
|
-
content = content.replace(jsxPattern, '');
|
|
171
|
+
content = content.replace(jsxPattern, '$1');
|
|
166
172
|
if (content !== beforeJsx) {
|
|
167
173
|
modified = true;
|
|
168
174
|
}
|
|
175
|
+
|
|
176
|
+
const beforeHash = content;
|
|
177
|
+
content = content.replace(hashPattern, '$1');
|
|
178
|
+
if (content !== beforeHash) {
|
|
179
|
+
modified = true;
|
|
180
|
+
}
|
|
169
181
|
}
|
|
170
182
|
|
|
171
183
|
if (modified) {
|