monorepotime 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +106 -0
- package/dist/routes/_temp.js +9 -0
- package/dist/routes/interactiveTerminal.js +145 -0
- package/dist/routes/listworkspacedirs.js +107 -0
- package/dist/routes/newworkspace.js +43 -0
- package/dist/routes/rootPath.js +36 -0
- package/dist/routes/runcmddev.js +61 -0
- package/dist/routes/scafoldrepo.js +156 -0
- package/dist/routes/scanworkspace.js +167 -0
- package/dist/routes/stopcmd.js +203 -0
- package/dist/routes/turborepoexist.js +31 -0
- package/dist/routes/updateworkspace.js +50 -0
- package/dist/routes/vscodeHideShow.js +129 -0
- package/package.json +47 -0
- package/public/assets/index-DjKHu3u0.css +1 -0
- package/public/assets/index-L0znsqFE.js +100 -0
- package/public/index.html +30 -0
- package/public/og-image.png +0 -0
- package/public/vite.svg +1 -0
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
7
|
+
const path_1 = __importDefault(require("path"));
|
|
8
|
+
const fast_glob_1 = __importDefault(require("fast-glob"));
|
|
9
|
+
const express_1 = require("express");
|
|
10
|
+
const START_DIR = process.cwd();
|
|
11
|
+
function findMonorepoRoot(startDir) {
|
|
12
|
+
let dir = startDir;
|
|
13
|
+
while (dir !== path_1.default.dirname(dir)) {
|
|
14
|
+
const pkgPath = path_1.default.join(dir, "package.json");
|
|
15
|
+
if (fs_extra_1.default.existsSync(pkgPath)) {
|
|
16
|
+
try {
|
|
17
|
+
const pkg = fs_extra_1.default.readJsonSync(pkgPath);
|
|
18
|
+
if (pkg.workspaces) {
|
|
19
|
+
return dir;
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
catch (e) {
|
|
23
|
+
// Ignore errors
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
dir = path_1.default.dirname(dir);
|
|
27
|
+
}
|
|
28
|
+
return startDir;
|
|
29
|
+
}
|
|
30
|
+
const ROOT = findMonorepoRoot(START_DIR);
|
|
31
|
+
const route = (0, express_1.Router)();
|
|
32
|
+
const IGNORE = [
|
|
33
|
+
"**/node_modules/**",
|
|
34
|
+
"**/.git/**",
|
|
35
|
+
"**/.turbo/**",
|
|
36
|
+
"**/.next/**",
|
|
37
|
+
"**/.vercel/**",
|
|
38
|
+
"**/.cache/**",
|
|
39
|
+
"**/dist/**",
|
|
40
|
+
"**/build/**",
|
|
41
|
+
"**/out/**",
|
|
42
|
+
"**/coverage/**",
|
|
43
|
+
];
|
|
44
|
+
/**
|
|
45
|
+
* Safe JSON reader
|
|
46
|
+
*/
|
|
47
|
+
async function readJSON(file) {
|
|
48
|
+
try {
|
|
49
|
+
return await fs_extra_1.default.readJSON(file);
|
|
50
|
+
}
|
|
51
|
+
catch {
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
/**
|
|
56
|
+
* Checks if folder has package.json with dev or start
|
|
57
|
+
*/
|
|
58
|
+
async function isRunnableProject(dir) {
|
|
59
|
+
const pkgPath = path_1.default.join(dir, "package.json");
|
|
60
|
+
if (!(await fs_extra_1.default.pathExists(pkgPath)))
|
|
61
|
+
return false;
|
|
62
|
+
const pkg = await readJSON(pkgPath);
|
|
63
|
+
if (!pkg?.scripts)
|
|
64
|
+
return false;
|
|
65
|
+
//even if the dev script or start script is empty as long it exist
|
|
66
|
+
//it is considred as runnable
|
|
67
|
+
if (pkg.scripts.dev != typeof null || pkg.scripts.start != typeof null) {
|
|
68
|
+
return true;
|
|
69
|
+
}
|
|
70
|
+
return false;
|
|
71
|
+
}
|
|
72
|
+
/**
|
|
73
|
+
* Expand workspace globs like "apps/*"
|
|
74
|
+
*/
|
|
75
|
+
async function resolveWorkspaceDirs(workspaces) {
|
|
76
|
+
return (0, fast_glob_1.default)(workspaces.map(w => w.replace(/\/$/, "") + "/"), {
|
|
77
|
+
cwd: ROOT,
|
|
78
|
+
onlyDirectories: true,
|
|
79
|
+
absolute: true,
|
|
80
|
+
ignore: IGNORE,
|
|
81
|
+
});
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Scan workspaces
|
|
85
|
+
*/
|
|
86
|
+
async function scanWorkspaces(rootPkg) {
|
|
87
|
+
let patterns = [];
|
|
88
|
+
if (Array.isArray(rootPkg.workspaces)) {
|
|
89
|
+
patterns = rootPkg.workspaces;
|
|
90
|
+
}
|
|
91
|
+
else if (rootPkg.workspaces?.packages) {
|
|
92
|
+
patterns = rootPkg.workspaces.packages;
|
|
93
|
+
}
|
|
94
|
+
if (!patterns.length)
|
|
95
|
+
return [];
|
|
96
|
+
const dirs = await resolveWorkspaceDirs(patterns);
|
|
97
|
+
const results = new Set();
|
|
98
|
+
for (const dir of dirs) {
|
|
99
|
+
if (await isRunnableProject(dir)) {
|
|
100
|
+
results.add(path_1.default.resolve(dir));
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
return [...results];
|
|
104
|
+
}
|
|
105
|
+
/**
|
|
106
|
+
* Deep scan fallback
|
|
107
|
+
*/
|
|
108
|
+
async function scanRecursively() {
|
|
109
|
+
const pkgFiles = await (0, fast_glob_1.default)("**/package.json", {
|
|
110
|
+
cwd: ROOT,
|
|
111
|
+
absolute: true,
|
|
112
|
+
ignore: IGNORE,
|
|
113
|
+
});
|
|
114
|
+
const results = new Set();
|
|
115
|
+
for (const pkgFile of pkgFiles) {
|
|
116
|
+
const dir = path_1.default.dirname(pkgFile);
|
|
117
|
+
if (await isRunnableProject(dir)) {
|
|
118
|
+
results.add(path_1.default.resolve(dir));
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
return [...results];
|
|
122
|
+
}
|
|
123
|
+
/**
|
|
124
|
+
* Route
|
|
125
|
+
*/
|
|
126
|
+
route.get("/", async (req, res) => {
|
|
127
|
+
res.header("Access-Control-Allow-Origin", "*");
|
|
128
|
+
try {
|
|
129
|
+
const rootPkgPath = path_1.default.join(ROOT, "package.json");
|
|
130
|
+
const rootPkg = await readJSON(rootPkgPath);
|
|
131
|
+
let projects = [];
|
|
132
|
+
if (rootPkg?.workspaces) {
|
|
133
|
+
projects = await scanWorkspaces(rootPkg);
|
|
134
|
+
}
|
|
135
|
+
else {
|
|
136
|
+
projects = await scanRecursively();
|
|
137
|
+
}
|
|
138
|
+
const projectInfos = (await Promise.all(projects.map(async (p) => {
|
|
139
|
+
const pkgPath = path_1.default.join(p, "package.json");
|
|
140
|
+
const pkg = await readJSON(pkgPath);
|
|
141
|
+
if (!pkg)
|
|
142
|
+
return null;
|
|
143
|
+
return {
|
|
144
|
+
name: pkg.name || path_1.default.basename(p),
|
|
145
|
+
path: p,
|
|
146
|
+
fontawesomeIcon: (pkg.fontawesomeIcon != typeof null) ? pkg.fontawesomeIcon : null,
|
|
147
|
+
description: (pkg.description != typeof null) ? pkg.description : null,
|
|
148
|
+
devCommand: (pkg.scripts.dev != typeof null) ? pkg.scripts.dev : null,
|
|
149
|
+
startCommand: (pkg.scripts.start != typeof null) ? pkg.scripts.start : null,
|
|
150
|
+
stopCommand: (pkg.scripts.stop != typeof null) ? pkg.scripts.stop : null,
|
|
151
|
+
buildCommand: (pkg.scripts.build != typeof null) ? pkg.scripts.build : null,
|
|
152
|
+
cleanCommand: (pkg.scripts.clean != typeof null) ? pkg.scripts.clean : null,
|
|
153
|
+
lintCommand: (pkg.scripts.lint != typeof null) ? pkg.scripts.lint : null,
|
|
154
|
+
testCommand: (pkg.scripts.test != typeof null) ? pkg.scripts.test : null,
|
|
155
|
+
};
|
|
156
|
+
}))).filter(Boolean); // Filter out any failed reads
|
|
157
|
+
res.json({
|
|
158
|
+
root: ROOT,
|
|
159
|
+
count: projectInfos.length,
|
|
160
|
+
workspace: projectInfos,
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
catch (err) {
|
|
164
|
+
res.status(500).json({ error: err.message });
|
|
165
|
+
}
|
|
166
|
+
});
|
|
167
|
+
exports.default = route;
|
|
@@ -0,0 +1,203 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const express_1 = require("express");
|
|
7
|
+
const runcmddev_1 = require("./runcmddev");
|
|
8
|
+
const child_process_1 = require("child_process");
|
|
9
|
+
const util_1 = require("util");
|
|
10
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
11
|
+
const router = (0, express_1.Router)();
|
|
12
|
+
router.post("/", async (req, res) => {
|
|
13
|
+
res.header("Access-Control-Allow-Origin", "*");
|
|
14
|
+
try {
|
|
15
|
+
const body = req.body;
|
|
16
|
+
const workspace = body.workspace;
|
|
17
|
+
if (!workspace) {
|
|
18
|
+
return res.status(400).json({ error: "No workspace provided" });
|
|
19
|
+
}
|
|
20
|
+
const currentSocket = runcmddev_1.sockets.get(workspace.name);
|
|
21
|
+
const currentProcess = runcmddev_1.activeProcesses.get(workspace.name);
|
|
22
|
+
// 1. Kill the Active Process
|
|
23
|
+
if (currentProcess) {
|
|
24
|
+
currentSocket?.emit('log', chalk_1.default.yellow("Stopping active process..."));
|
|
25
|
+
if (currentProcess.pid) {
|
|
26
|
+
if (process.platform !== 'win32') {
|
|
27
|
+
await cleanupProcessPorts(currentProcess.pid, currentSocket);
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
await new Promise((resolve) => {
|
|
31
|
+
let resolved = false;
|
|
32
|
+
const safeResolve = () => {
|
|
33
|
+
if (!resolved) {
|
|
34
|
+
resolved = true;
|
|
35
|
+
resolve();
|
|
36
|
+
}
|
|
37
|
+
};
|
|
38
|
+
// Timeout safety: Proceed after 5s if process hangs
|
|
39
|
+
const timer = setTimeout(() => {
|
|
40
|
+
console.log(`Process stop timed out for ${workspace.name}`);
|
|
41
|
+
safeResolve();
|
|
42
|
+
}, 5000);
|
|
43
|
+
currentProcess.once('exit', () => {
|
|
44
|
+
clearTimeout(timer);
|
|
45
|
+
safeResolve();
|
|
46
|
+
});
|
|
47
|
+
if (currentProcess.pid) {
|
|
48
|
+
try {
|
|
49
|
+
if (process.platform !== 'win32') {
|
|
50
|
+
process.kill(-currentProcess.pid, 'SIGINT');
|
|
51
|
+
}
|
|
52
|
+
else {
|
|
53
|
+
currentProcess.kill();
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
catch (error) {
|
|
57
|
+
if (error.code === 'ESRCH') {
|
|
58
|
+
// Process already dead
|
|
59
|
+
clearTimeout(timer);
|
|
60
|
+
safeResolve();
|
|
61
|
+
}
|
|
62
|
+
else {
|
|
63
|
+
console.error(`Failed to kill process: ${error.message}`);
|
|
64
|
+
// Don't resolve here, wait for timeout or natural exit if signal worked partially
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
else {
|
|
69
|
+
safeResolve();
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
runcmddev_1.activeProcesses.delete(workspace.name);
|
|
73
|
+
}
|
|
74
|
+
else {
|
|
75
|
+
currentSocket?.emit('log', chalk_1.default.yellow("No active process found to stop."));
|
|
76
|
+
}
|
|
77
|
+
// 3. Execute Stop Command (if any)
|
|
78
|
+
const commandToRun = workspace.stopCommand;
|
|
79
|
+
if (commandToRun) {
|
|
80
|
+
currentSocket?.emit('log', chalk_1.default.green(`Running stop command: ${commandToRun}`));
|
|
81
|
+
const baseCMD = commandToRun.split(" ")[0];
|
|
82
|
+
const args = commandToRun.split(" ").slice(1);
|
|
83
|
+
const child = (0, child_process_1.spawn)(baseCMD, args, {
|
|
84
|
+
cwd: workspace.path,
|
|
85
|
+
env: {
|
|
86
|
+
...process.env,
|
|
87
|
+
TERM: 'dumb',
|
|
88
|
+
FORCE_COLOR: '1',
|
|
89
|
+
},
|
|
90
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
91
|
+
shell: true,
|
|
92
|
+
detached: process.platform !== 'win32'
|
|
93
|
+
});
|
|
94
|
+
child.stdout.on('data', (data) => {
|
|
95
|
+
currentSocket?.emit('log', data.toString());
|
|
96
|
+
});
|
|
97
|
+
child.stderr.on('data', (data) => {
|
|
98
|
+
currentSocket?.emit('error', data.toString());
|
|
99
|
+
});
|
|
100
|
+
child.on('close', (code) => {
|
|
101
|
+
currentSocket?.emit('log', chalk_1.default.green(`Stop command finished with code ${code}`));
|
|
102
|
+
currentSocket?.emit('exit', 'Process stopped');
|
|
103
|
+
// We keep the socket open or let client disconnect
|
|
104
|
+
});
|
|
105
|
+
}
|
|
106
|
+
else {
|
|
107
|
+
currentSocket?.emit('log', "Process stopped (no stop command defined).");
|
|
108
|
+
currentSocket?.emit('exit', 'Process stopped');
|
|
109
|
+
}
|
|
110
|
+
await new Promise((resolve) => setTimeout(resolve, 1000));
|
|
111
|
+
res.end();
|
|
112
|
+
}
|
|
113
|
+
catch (e) {
|
|
114
|
+
console.error("Error in stopcmd:", e);
|
|
115
|
+
res.status(500).json({ error: e.message });
|
|
116
|
+
}
|
|
117
|
+
});
|
|
118
|
+
exports.default = router;
|
|
119
|
+
const execAsync = (0, util_1.promisify)(child_process_1.exec);
|
|
120
|
+
async function getProcessTreePids(rootPid) {
|
|
121
|
+
try {
|
|
122
|
+
// getting all processes with ppid
|
|
123
|
+
const { stdout } = await execAsync('ps -e -o pid,ppid --no-headers');
|
|
124
|
+
const pids = new Set();
|
|
125
|
+
pids.add(rootPid);
|
|
126
|
+
const tree = new Map();
|
|
127
|
+
const lines = stdout.trim().split('\n');
|
|
128
|
+
for (const line of lines) {
|
|
129
|
+
const parts = line.trim().split(/\s+/);
|
|
130
|
+
if (parts.length >= 2) {
|
|
131
|
+
const pid = parseInt(parts[0], 10);
|
|
132
|
+
const ppid = parseInt(parts[1], 10);
|
|
133
|
+
if (!tree.has(ppid))
|
|
134
|
+
tree.set(ppid, []);
|
|
135
|
+
tree.get(ppid)?.push(pid);
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
const queue = [rootPid];
|
|
139
|
+
while (queue.length > 0) {
|
|
140
|
+
const current = queue.shift();
|
|
141
|
+
const children = tree.get(current);
|
|
142
|
+
if (children) {
|
|
143
|
+
for (const child of children) {
|
|
144
|
+
pids.add(child);
|
|
145
|
+
queue.push(child);
|
|
146
|
+
}
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
return Array.from(pids);
|
|
150
|
+
}
|
|
151
|
+
catch (e) {
|
|
152
|
+
console.error("Error building process tree:", e);
|
|
153
|
+
return [rootPid];
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
async function cleanupProcessPorts(rootPid, socket) {
|
|
157
|
+
try {
|
|
158
|
+
const pids = await getProcessTreePids(rootPid);
|
|
159
|
+
// lsof output format "-F pn":
|
|
160
|
+
// p1234
|
|
161
|
+
// n*:3000
|
|
162
|
+
const { stdout } = await execAsync('lsof -P -n -iTCP -sTCP:LISTEN -F pn');
|
|
163
|
+
const lines = stdout.trim().split('\n');
|
|
164
|
+
let currentPid = -1;
|
|
165
|
+
const pidPorts = new Map();
|
|
166
|
+
for (const line of lines) {
|
|
167
|
+
const type = line[0];
|
|
168
|
+
const content = line.substring(1);
|
|
169
|
+
if (type === 'p') {
|
|
170
|
+
currentPid = parseInt(content, 10);
|
|
171
|
+
}
|
|
172
|
+
else if (type === 'n' && currentPid !== -1) {
|
|
173
|
+
const match = content.match(/:(\d+)$/);
|
|
174
|
+
if (match) {
|
|
175
|
+
const port = match[1];
|
|
176
|
+
// Only check if this port belongs to one of our tree PIDs
|
|
177
|
+
if (!pidPorts.has(currentPid))
|
|
178
|
+
pidPorts.set(currentPid, []);
|
|
179
|
+
pidPorts.get(currentPid)?.push(port);
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
for (const pid of pids) {
|
|
184
|
+
if (pidPorts.has(pid)) {
|
|
185
|
+
const ports = pidPorts.get(pid);
|
|
186
|
+
if (ports) {
|
|
187
|
+
for (const port of ports) {
|
|
188
|
+
socket?.emit('log', chalk_1.default.yellow(`Detected active port ${port} on PID ${pid}. Killing port...`));
|
|
189
|
+
try {
|
|
190
|
+
await execAsync(`npx -y kill-port ${port}`);
|
|
191
|
+
}
|
|
192
|
+
catch (err) {
|
|
193
|
+
socket?.emit('log', chalk_1.default.red(`Failed to kill port ${port}: ${err.message}`));
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
catch (e) {
|
|
201
|
+
// If lsof fails (e.g. no permissions or no ports open), just ignore
|
|
202
|
+
}
|
|
203
|
+
}
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const express_1 = require("express");
|
|
7
|
+
const fs_1 = __importDefault(require("fs"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const rootPath_1 = require("./rootPath");
|
|
10
|
+
const router = (0, express_1.Router)();
|
|
11
|
+
router.get("/", async (req, res) => {
|
|
12
|
+
try {
|
|
13
|
+
let isExist = true;
|
|
14
|
+
const turboJsonPath = path_1.default.join(rootPath_1.ROOT, 'turbo.json');
|
|
15
|
+
const turboExists = fs_1.default.existsSync(turboJsonPath);
|
|
16
|
+
if (!turboExists) {
|
|
17
|
+
isExist = false;
|
|
18
|
+
}
|
|
19
|
+
const monorepoJsonPath = path_1.default.join(rootPath_1.ROOT, 'monorepotime.json');
|
|
20
|
+
const monorepoExists = fs_1.default.existsSync(monorepoJsonPath);
|
|
21
|
+
if (!monorepoExists) {
|
|
22
|
+
isExist = false;
|
|
23
|
+
}
|
|
24
|
+
res.json({ exists: isExist });
|
|
25
|
+
}
|
|
26
|
+
catch (error) {
|
|
27
|
+
console.error("Error checking turbo.json:", error);
|
|
28
|
+
res.status(500).json({ error: "Internal server error", exists: false });
|
|
29
|
+
}
|
|
30
|
+
});
|
|
31
|
+
exports.default = router;
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const express_1 = require("express");
|
|
7
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const router = (0, express_1.Router)();
|
|
10
|
+
router.post("/", async (req, res) => {
|
|
11
|
+
res.header("Access-Control-Allow-Origin", "*");
|
|
12
|
+
try {
|
|
13
|
+
const workspace = req.body;
|
|
14
|
+
if (!workspace || !workspace.path) {
|
|
15
|
+
res.status(400).send({ error: "Invalid workspace data" });
|
|
16
|
+
return;
|
|
17
|
+
}
|
|
18
|
+
const packageJsonPath = path_1.default.join(workspace.path, 'package.json');
|
|
19
|
+
if (!fs_extra_1.default.existsSync(packageJsonPath)) {
|
|
20
|
+
res.status(404).send({ error: "package.json not found in workspace path" });
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
const packageJson = await fs_extra_1.default.readJson(packageJsonPath);
|
|
24
|
+
if (workspace.name)
|
|
25
|
+
packageJson.name = workspace.name;
|
|
26
|
+
if (workspace.description != typeof null)
|
|
27
|
+
packageJson.description = workspace.description;
|
|
28
|
+
if (workspace.devCommand != typeof null)
|
|
29
|
+
packageJson.scripts.dev = workspace.devCommand;
|
|
30
|
+
if (workspace.startCommand != typeof null)
|
|
31
|
+
packageJson.scripts.start = workspace.startCommand;
|
|
32
|
+
if (workspace.buildCommand != typeof null)
|
|
33
|
+
packageJson.scripts.build = workspace.buildCommand;
|
|
34
|
+
if (workspace.testCommand != typeof null)
|
|
35
|
+
packageJson.scripts.test = workspace.testCommand;
|
|
36
|
+
if (workspace.lintCommand != typeof null)
|
|
37
|
+
packageJson.scripts.lint = workspace.lintCommand;
|
|
38
|
+
if (workspace.stopCommand != typeof null)
|
|
39
|
+
packageJson.scripts.stop = workspace.stopCommand;
|
|
40
|
+
if (workspace.cleanCommand != typeof null)
|
|
41
|
+
packageJson.scripts.clean = workspace.cleanCommand;
|
|
42
|
+
await fs_extra_1.default.writeJson(packageJsonPath, packageJson, { spaces: 2 });
|
|
43
|
+
res.send({ success: true, message: "Workspace updated successfully" });
|
|
44
|
+
}
|
|
45
|
+
catch (error) {
|
|
46
|
+
console.error("Update workspace error:", error);
|
|
47
|
+
res.status(500).send({ error: error.message });
|
|
48
|
+
}
|
|
49
|
+
});
|
|
50
|
+
exports.default = router;
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
const express_1 = require("express");
|
|
7
|
+
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
8
|
+
const path_1 = __importDefault(require("path"));
|
|
9
|
+
const router = (0, express_1.Router)();
|
|
10
|
+
const EXCLUDE_PATTERNS = {
|
|
11
|
+
"**/node_modules": true,
|
|
12
|
+
"**/.git": true,
|
|
13
|
+
"**/.gitignore": true,
|
|
14
|
+
"**/.turbo": true,
|
|
15
|
+
"**/dist": true,
|
|
16
|
+
"**/_tool": true,
|
|
17
|
+
"**/package-lock.json": true,
|
|
18
|
+
"**/Dockerfile": true,
|
|
19
|
+
"**/docker-compose.yml": true,
|
|
20
|
+
"**/.dockerignore": true,
|
|
21
|
+
"**/turbo.json": true,
|
|
22
|
+
"**/nodemon.json": true,
|
|
23
|
+
"**/temp.md": true,
|
|
24
|
+
"**/*postcss*": true,
|
|
25
|
+
"**/*tailwind*": true,
|
|
26
|
+
"**/*tsconfig*": true,
|
|
27
|
+
"**/*eslint*": true,
|
|
28
|
+
"**/*prettier*": true,
|
|
29
|
+
"**/*vite*": true,
|
|
30
|
+
"_temp": true,
|
|
31
|
+
".gitignore": true,
|
|
32
|
+
".vscode": true,
|
|
33
|
+
"package.json": true,
|
|
34
|
+
"README.md": true,
|
|
35
|
+
".github": true,
|
|
36
|
+
".buildkite": true,
|
|
37
|
+
".circleci": true,
|
|
38
|
+
".gitlab-ci.yml": true,
|
|
39
|
+
".travis.yml": true,
|
|
40
|
+
"out": true
|
|
41
|
+
};
|
|
42
|
+
const EXCLUDE_PATTERNS_DEFAULT = {
|
|
43
|
+
"**/.git": true,
|
|
44
|
+
".vscode": true,
|
|
45
|
+
".turbo": true,
|
|
46
|
+
};
|
|
47
|
+
const START_DIR = process.cwd();
|
|
48
|
+
function findMonorepoRoot(startDir) {
|
|
49
|
+
let dir = startDir;
|
|
50
|
+
while (dir !== path_1.default.dirname(dir)) {
|
|
51
|
+
const pkgPath = path_1.default.join(dir, "package.json");
|
|
52
|
+
if (fs_extra_1.default.existsSync(pkgPath)) {
|
|
53
|
+
try {
|
|
54
|
+
const pkg = fs_extra_1.default.readJsonSync(pkgPath);
|
|
55
|
+
// heuristic: if it has workspaces or if it is the top with .git
|
|
56
|
+
// But simpler: just look for package.json that seems to be the root.
|
|
57
|
+
// Or if we are in a monorepo, the one with 'workspaces'.
|
|
58
|
+
// If not a monorepo, maybe just the one with .vscode?
|
|
59
|
+
// Let's stick to the scanworkspace logic: look for workspaces.
|
|
60
|
+
// If not found, maybe fall back to just the CWD if we assume the user runs it from root.
|
|
61
|
+
if (pkg.workspaces) {
|
|
62
|
+
return dir;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
catch (e) {
|
|
66
|
+
// Ignore errors
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
// Also check for .vscode folder as a strong indicator of "root" for this context
|
|
70
|
+
if (fs_extra_1.default.existsSync(path_1.default.join(dir, ".vscode"))) {
|
|
71
|
+
return dir;
|
|
72
|
+
}
|
|
73
|
+
dir = path_1.default.dirname(dir);
|
|
74
|
+
}
|
|
75
|
+
return startDir;
|
|
76
|
+
}
|
|
77
|
+
const ROOT = findMonorepoRoot(START_DIR);
|
|
78
|
+
const getSettingsPath = () => {
|
|
79
|
+
return path_1.default.join(ROOT, '.vscode/settings.json');
|
|
80
|
+
};
|
|
81
|
+
const ensureSettingsFile = async () => {
|
|
82
|
+
const settingsPath = getSettingsPath();
|
|
83
|
+
const dir = path_1.default.dirname(settingsPath);
|
|
84
|
+
await fs_extra_1.default.ensureDir(dir);
|
|
85
|
+
if (!await fs_extra_1.default.pathExists(settingsPath)) {
|
|
86
|
+
await fs_extra_1.default.writeJson(settingsPath, { "files.exclude": {} }, { spaces: 4 });
|
|
87
|
+
}
|
|
88
|
+
};
|
|
89
|
+
/**
|
|
90
|
+
* POST /
|
|
91
|
+
* Body: { hide: boolean, pathInclude?: string[] }
|
|
92
|
+
* Updates files.exclude in settings.json
|
|
93
|
+
*/
|
|
94
|
+
router.post("/", async (req, res) => {
|
|
95
|
+
try {
|
|
96
|
+
const { hide, pathInclude } = req.body;
|
|
97
|
+
await ensureSettingsFile();
|
|
98
|
+
const settingsPath = getSettingsPath();
|
|
99
|
+
const settings = await fs_extra_1.default.readJson(settingsPath);
|
|
100
|
+
// We will reconstruct files.exclude based on the logic
|
|
101
|
+
// 1. Start with valid defaults
|
|
102
|
+
const newExcludes = { ...EXCLUDE_PATTERNS_DEFAULT };
|
|
103
|
+
if (hide) {
|
|
104
|
+
// 2a. If hide is true, add the standard patterns
|
|
105
|
+
Object.assign(newExcludes, EXCLUDE_PATTERNS);
|
|
106
|
+
// 2b. Add the specific paths from pathInclude (converted to relative)
|
|
107
|
+
if (Array.isArray(pathInclude)) {
|
|
108
|
+
pathInclude.forEach(p => {
|
|
109
|
+
// Ensure path is relative to the workspace ROOT
|
|
110
|
+
const relativePath = path_1.default.relative(ROOT, p);
|
|
111
|
+
if (relativePath && !relativePath.startsWith('..') && !path_1.default.isAbsolute(relativePath)) {
|
|
112
|
+
newExcludes[relativePath] = true;
|
|
113
|
+
}
|
|
114
|
+
});
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
// 3. If hide is false, we strictly return the defaults (which we already initialized in step 1).
|
|
118
|
+
// This effectively "shows all files" (removes other exclusions) except the defaults.
|
|
119
|
+
// Update the settings object
|
|
120
|
+
settings['files.exclude'] = newExcludes;
|
|
121
|
+
await fs_extra_1.default.writeJson(settingsPath, settings, { spaces: 4 });
|
|
122
|
+
res.json({ success: true, isHidden: hide });
|
|
123
|
+
}
|
|
124
|
+
catch (error) {
|
|
125
|
+
console.error("Error updating VSCode settings:", error);
|
|
126
|
+
res.status(500).json({ error: 'Failed to update VSCode settings' });
|
|
127
|
+
}
|
|
128
|
+
});
|
|
129
|
+
exports.default = router;
|
package/package.json
ADDED
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "monorepotime",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "",
|
|
5
|
+
"main": "dist/index.js",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"dev": "nodemon --watch 'src/**/*.ts' --exec 'ts-node' src/index.ts",
|
|
8
|
+
"build": "tsc",
|
|
9
|
+
"start": "node dist/index.js",
|
|
10
|
+
"test": "echo \"Error: no test specified\" && exit 1",
|
|
11
|
+
"stop": "npx -y kill-port 3000",
|
|
12
|
+
"copy-frontend": "rm -rf public && mkdir -p public && cp -r ../web/dist/* public/"
|
|
13
|
+
},
|
|
14
|
+
"dependencies": {
|
|
15
|
+
"apiroute": "*",
|
|
16
|
+
"chalk": "^4.1.2",
|
|
17
|
+
"config": "*",
|
|
18
|
+
"cors": "^2.8.5",
|
|
19
|
+
"express": "^4.19.2",
|
|
20
|
+
"fast-glob": "^3.3.3",
|
|
21
|
+
"fs-extra": "^11.3.3",
|
|
22
|
+
"open": "^11.0.0",
|
|
23
|
+
"socket.io": "^4.8.3",
|
|
24
|
+
"types": "*"
|
|
25
|
+
},
|
|
26
|
+
"keywords": [],
|
|
27
|
+
"author": "Anghelo Amir",
|
|
28
|
+
"license": "ISC",
|
|
29
|
+
"type": "commonjs",
|
|
30
|
+
"devDependencies": {
|
|
31
|
+
"@types/cors": "^2.8.19",
|
|
32
|
+
"@types/express": "^5.0.6",
|
|
33
|
+
"@types/fs-extra": "^11.0.4",
|
|
34
|
+
"@types/node": "^24.10.4",
|
|
35
|
+
"nodemon": "^3.1.11",
|
|
36
|
+
"ts-node": "^10.9.2",
|
|
37
|
+
"typescript": "^5.9.3"
|
|
38
|
+
},
|
|
39
|
+
|
|
40
|
+
"bin": {
|
|
41
|
+
"monorepotime": "./dist/index.js"
|
|
42
|
+
},
|
|
43
|
+
"files": [
|
|
44
|
+
"dist",
|
|
45
|
+
"public"
|
|
46
|
+
]
|
|
47
|
+
}
|