monoverse 0.0.11 → 0.0.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.js +656 -440
- package/package.json +39 -91
- package/LICENCE +0 -1
- package/README.md +0 -18
- package/dist/assets/index-DIUWvkwu.css +0 -1
- package/dist/assets/index-UQPMV9jG.js +0 -161
- package/dist/assets/index.lazy-B2wDxrXt.js +0 -21
- package/dist/index.html +0 -14
- package/dist/vite.svg +0 -1
package/dist/cli.js
CHANGED
|
@@ -1,483 +1,699 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
import
|
|
5
|
-
import
|
|
6
|
-
import
|
|
7
|
-
import
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
type: z.enum([
|
|
18
|
-
"dependency",
|
|
19
|
-
"devDependency",
|
|
20
|
-
"peerDependency",
|
|
21
|
-
"optionalDependency"
|
|
22
|
-
])
|
|
1
|
+
import { Command, Args, Options, CliConfig } from "@effect/cli";
|
|
2
|
+
import { NodeContext, NodeRuntime } from "@effect/platform-node";
|
|
3
|
+
import { Data, Effect, Schema, Either, Console, Layer } from "effect";
|
|
4
|
+
import yaml from "js-yaml";
|
|
5
|
+
import * as fs from "node:fs/promises";
|
|
6
|
+
import * as nodePath from "node:path";
|
|
7
|
+
import { glob as glob$1 } from "tinyglobby";
|
|
8
|
+
import sortPackageJson from "sort-package-json";
|
|
9
|
+
import * as semver from "semver";
|
|
10
|
+
class FsError extends Data.TaggedError("FsError") {
|
|
11
|
+
}
|
|
12
|
+
Effect.sync(() => process.cwd());
|
|
13
|
+
const getParentDirectory = (dirPath) => Effect.sync(() => nodePath.dirname(dirPath));
|
|
14
|
+
const readFile = (filePath) => Effect.tryPromise({
|
|
15
|
+
try: () => fs.readFile(filePath, "utf-8"),
|
|
16
|
+
catch: (cause) => new FsError({ path: filePath, cause })
|
|
23
17
|
});
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
dependencies: z.array(dependencySchema)
|
|
18
|
+
const writeFile = (filePath, content) => Effect.tryPromise({
|
|
19
|
+
try: () => fs.writeFile(filePath, content, "utf-8"),
|
|
20
|
+
catch: (cause) => new FsError({ path: filePath, cause })
|
|
28
21
|
});
|
|
29
|
-
|
|
30
|
-
|
|
22
|
+
const fileExists = (filePath) => Effect.tryPromise({
|
|
23
|
+
try: async () => {
|
|
24
|
+
await fs.access(filePath);
|
|
25
|
+
return true;
|
|
26
|
+
},
|
|
27
|
+
catch: () => false
|
|
28
|
+
}).pipe(Effect.catchAll(() => Effect.succeed(false)));
|
|
29
|
+
const joinPath = (...paths) => nodePath.join(...paths);
|
|
30
|
+
const basename = (path) => nodePath.basename(path);
|
|
31
|
+
const dirname = (path) => nodePath.dirname(path);
|
|
32
|
+
const isRootPath = (dirPath) => nodePath.dirname(dirPath) === dirPath;
|
|
33
|
+
const glob = (patterns, options) => Effect.tryPromise({
|
|
34
|
+
try: () => glob$1(patterns, {
|
|
35
|
+
cwd: options.cwd,
|
|
36
|
+
ignore: options.ignore ?? [],
|
|
37
|
+
absolute: options.absolute ?? true
|
|
38
|
+
}),
|
|
39
|
+
catch: (cause) => new FsError({ path: options.cwd, cause })
|
|
31
40
|
});
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
41
|
+
class NotAMonorepoError extends Data.TaggedError("NotAMonorepoError") {
|
|
42
|
+
}
|
|
43
|
+
const PackageJsonSchema = Schema.Struct({
|
|
44
|
+
workspaces: Schema.optional(
|
|
45
|
+
Schema.Union(
|
|
46
|
+
Schema.mutable(Schema.Array(Schema.String)),
|
|
47
|
+
Schema.Struct({
|
|
48
|
+
packages: Schema.optional(Schema.mutable(Schema.Array(Schema.String)))
|
|
49
|
+
})
|
|
50
|
+
)
|
|
51
|
+
)
|
|
52
|
+
});
|
|
53
|
+
const PnpmWorkspaceSchema = Schema.Struct({
|
|
54
|
+
packages: Schema.optional(Schema.mutable(Schema.Array(Schema.String)))
|
|
55
|
+
});
|
|
56
|
+
const LOCK_FILES = [
|
|
57
|
+
{ file: "pnpm-lock.yaml", pm: "pnpm" },
|
|
58
|
+
{ file: "yarn.lock", pm: "yarn" },
|
|
59
|
+
{ file: "package-lock.json", pm: "npm" },
|
|
60
|
+
{ file: "bun.lockb", pm: "bun" }
|
|
61
|
+
];
|
|
62
|
+
const detectPackageManager = (dirPath) => Effect.gen(function* () {
|
|
63
|
+
for (const { file, pm } of LOCK_FILES) {
|
|
64
|
+
const exists = yield* fileExists(joinPath(dirPath, file));
|
|
65
|
+
if (exists) return pm;
|
|
52
66
|
}
|
|
53
|
-
return
|
|
54
|
-
};
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
const
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
67
|
+
return "unknown";
|
|
68
|
+
});
|
|
69
|
+
const parseYaml = (content) => Effect.try(() => yaml.load(content)).pipe(
|
|
70
|
+
Effect.flatMap(Schema.decodeUnknown(PnpmWorkspaceSchema))
|
|
71
|
+
);
|
|
72
|
+
const parsePackageJson$1 = (content) => Effect.try(() => JSON.parse(content)).pipe(
|
|
73
|
+
Effect.flatMap(Schema.decodeUnknown(PackageJsonSchema))
|
|
74
|
+
);
|
|
75
|
+
const getPnpmWorkspacePatterns = (dirPath) => Effect.gen(function* () {
|
|
76
|
+
const wsPath = joinPath(dirPath, "pnpm-workspace.yaml");
|
|
77
|
+
const exists = yield* fileExists(wsPath);
|
|
78
|
+
if (!exists) return null;
|
|
79
|
+
const content = yield* readFile(wsPath).pipe(
|
|
80
|
+
Effect.catchAll(() => Effect.succeed(""))
|
|
81
|
+
);
|
|
82
|
+
if (!content) return null;
|
|
83
|
+
const result = yield* parseYaml(content).pipe(
|
|
84
|
+
Effect.catchAll(() => Effect.succeed(null))
|
|
85
|
+
);
|
|
86
|
+
return result?.packages ?? null;
|
|
87
|
+
});
|
|
88
|
+
const getPackageJsonWorkspacePatterns = (dirPath) => Effect.gen(function* () {
|
|
89
|
+
const pkgPath = joinPath(dirPath, "package.json");
|
|
90
|
+
const exists = yield* fileExists(pkgPath);
|
|
91
|
+
if (!exists) return null;
|
|
92
|
+
const content = yield* readFile(pkgPath).pipe(
|
|
93
|
+
Effect.catchAll(() => Effect.succeed(""))
|
|
94
|
+
);
|
|
95
|
+
if (!content) return null;
|
|
96
|
+
const pkg = yield* parsePackageJson$1(content).pipe(
|
|
97
|
+
Effect.catchAll(() => Effect.succeed(null))
|
|
98
|
+
);
|
|
99
|
+
if (!pkg) return null;
|
|
100
|
+
if (Array.isArray(pkg.workspaces)) return pkg.workspaces;
|
|
101
|
+
if (Array.isArray(pkg.workspaces?.packages)) return pkg.workspaces.packages;
|
|
102
|
+
return null;
|
|
103
|
+
});
|
|
104
|
+
const hasPackageJson = (dirPath) => fileExists(joinPath(dirPath, "package.json"));
|
|
105
|
+
const toPackageJsonGlob = (patterns) => patterns.map((p) => joinPath(p, "package.json"));
|
|
106
|
+
const findMonorepoRoot = (startPath, options = {}) => Effect.gen(function* () {
|
|
107
|
+
let currentPath = startPath;
|
|
108
|
+
let singleRepoCandidate = null;
|
|
109
|
+
const stopAt = options.stopAt;
|
|
110
|
+
while (true) {
|
|
111
|
+
const pnpmPatterns = yield* getPnpmWorkspacePatterns(currentPath);
|
|
112
|
+
if (pnpmPatterns) {
|
|
69
113
|
return {
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
114
|
+
root: currentPath,
|
|
115
|
+
packageManager: "pnpm",
|
|
116
|
+
patterns: toPackageJsonGlob(pnpmPatterns)
|
|
73
117
|
};
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
118
|
+
}
|
|
119
|
+
const pkgPatterns = yield* getPackageJsonWorkspacePatterns(currentPath);
|
|
120
|
+
if (pkgPatterns) {
|
|
121
|
+
const pm = yield* detectPackageManager(currentPath);
|
|
122
|
+
return {
|
|
123
|
+
root: currentPath,
|
|
124
|
+
packageManager: pm,
|
|
125
|
+
patterns: toPackageJsonGlob(pkgPatterns)
|
|
126
|
+
};
|
|
127
|
+
}
|
|
128
|
+
if (!singleRepoCandidate) {
|
|
129
|
+
const hasPkg = yield* hasPackageJson(currentPath);
|
|
130
|
+
if (hasPkg) {
|
|
131
|
+
singleRepoCandidate = currentPath;
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
if (isRootPath(currentPath)) break;
|
|
135
|
+
if (stopAt && currentPath === stopAt) break;
|
|
136
|
+
currentPath = yield* getParentDirectory(currentPath);
|
|
137
|
+
}
|
|
138
|
+
if (singleRepoCandidate) {
|
|
139
|
+
const pm = yield* detectPackageManager(singleRepoCandidate);
|
|
140
|
+
return {
|
|
141
|
+
root: singleRepoCandidate,
|
|
142
|
+
packageManager: pm,
|
|
143
|
+
patterns: ["./package.json"]
|
|
144
|
+
};
|
|
145
|
+
}
|
|
146
|
+
return yield* Effect.fail(
|
|
147
|
+
new NotAMonorepoError({
|
|
148
|
+
startPath,
|
|
149
|
+
message: "No package.json found in directory tree"
|
|
87
150
|
})
|
|
88
151
|
);
|
|
152
|
+
});
|
|
153
|
+
const parseDependencySource = (versionRange) => {
|
|
154
|
+
if (versionRange.startsWith("file:") || versionRange.startsWith("./") || versionRange.startsWith("../") || versionRange.startsWith("/")) {
|
|
155
|
+
return "file";
|
|
156
|
+
}
|
|
157
|
+
if (versionRange.startsWith("git+") || versionRange.startsWith("git://") || versionRange.startsWith("github:") || versionRange.startsWith("gitlab:") || versionRange.startsWith("bitbucket:")) {
|
|
158
|
+
return "git";
|
|
159
|
+
}
|
|
160
|
+
if (versionRange.startsWith("http://") || versionRange.startsWith("https://")) {
|
|
161
|
+
return "url";
|
|
162
|
+
}
|
|
163
|
+
return "npm";
|
|
89
164
|
};
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
})
|
|
165
|
+
const parseDependencies = (deps, dependencyType, workspaceNames) => {
|
|
166
|
+
if (!deps) return [];
|
|
167
|
+
return Object.entries(deps).map(([name, versionRange]) => ({
|
|
168
|
+
name,
|
|
169
|
+
versionRange,
|
|
170
|
+
dependencyType,
|
|
171
|
+
source: workspaceNames.has(name) ? "workspace" : parseDependencySource(versionRange)
|
|
172
|
+
}));
|
|
173
|
+
};
|
|
174
|
+
const RawPackageJsonSchema = Schema.Struct({
|
|
175
|
+
name: Schema.optional(Schema.String),
|
|
176
|
+
version: Schema.optional(Schema.String),
|
|
177
|
+
private: Schema.optional(Schema.Boolean),
|
|
178
|
+
dependencies: Schema.optional(
|
|
179
|
+
Schema.Record({ key: Schema.String, value: Schema.String })
|
|
180
|
+
),
|
|
181
|
+
devDependencies: Schema.optional(
|
|
182
|
+
Schema.Record({ key: Schema.String, value: Schema.String })
|
|
105
183
|
),
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
z2.object({ approximateSize: z2.number(), name: z2.string() })
|
|
184
|
+
peerDependencies: Schema.optional(
|
|
185
|
+
Schema.Record({ key: Schema.String, value: Schema.String })
|
|
109
186
|
),
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
187
|
+
optionalDependencies: Schema.optional(
|
|
188
|
+
Schema.Record({ key: Schema.String, value: Schema.String })
|
|
189
|
+
)
|
|
113
190
|
});
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
191
|
+
const parseWorkspace = (pkgPath, raw, workspaceNames) => ({
|
|
192
|
+
name: raw.name ?? basename(dirname(pkgPath)),
|
|
193
|
+
version: raw.version ?? "0.0.0",
|
|
194
|
+
path: dirname(pkgPath),
|
|
195
|
+
private: raw.private ?? false,
|
|
196
|
+
dependencies: [
|
|
197
|
+
...parseDependencies(raw.dependencies, "dependency", workspaceNames),
|
|
198
|
+
...parseDependencies(raw.devDependencies, "devDependency", workspaceNames),
|
|
199
|
+
...parseDependencies(
|
|
200
|
+
raw.peerDependencies,
|
|
201
|
+
"peerDependency",
|
|
202
|
+
workspaceNames
|
|
203
|
+
),
|
|
204
|
+
...parseDependencies(
|
|
205
|
+
raw.optionalDependencies,
|
|
206
|
+
"optionalDependency",
|
|
207
|
+
workspaceNames
|
|
208
|
+
)
|
|
209
|
+
]
|
|
126
210
|
});
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
211
|
+
const parsePackageJson = (content) => Effect.try(() => JSON.parse(content)).pipe(
|
|
212
|
+
Effect.flatMap(Schema.decodeUnknown(RawPackageJsonSchema))
|
|
213
|
+
);
|
|
214
|
+
const discoverWorkspaces = (root, patterns) => Effect.gen(function* () {
|
|
215
|
+
const workspaces = [];
|
|
216
|
+
const errors = [];
|
|
217
|
+
const workspacePaths = yield* glob(patterns, {
|
|
218
|
+
cwd: root,
|
|
219
|
+
ignore: ["**/node_modules/**"],
|
|
220
|
+
absolute: true
|
|
221
|
+
}).pipe(Effect.catchAll(() => Effect.succeed([])));
|
|
222
|
+
const rawPackages = [];
|
|
223
|
+
const workspaceNames = /* @__PURE__ */ new Set();
|
|
224
|
+
for (const workspacePath of workspacePaths) {
|
|
225
|
+
const result = yield* readFile(workspacePath).pipe(
|
|
226
|
+
Effect.flatMap(parsePackageJson),
|
|
227
|
+
Effect.either
|
|
228
|
+
);
|
|
229
|
+
if (Either.isRight(result)) {
|
|
230
|
+
const rawPackageJson = result.right;
|
|
231
|
+
const name = rawPackageJson.name ?? basename(dirname(workspacePath));
|
|
232
|
+
workspaceNames.add(name);
|
|
233
|
+
rawPackages.push({ path: workspacePath, raw: rawPackageJson });
|
|
234
|
+
} else {
|
|
235
|
+
errors.push({
|
|
236
|
+
path: workspacePath,
|
|
237
|
+
message: "Failed to parse package.json",
|
|
238
|
+
cause: result.left
|
|
239
|
+
});
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
for (const { path: pkgPath, raw } of rawPackages) {
|
|
243
|
+
workspaces.push(parseWorkspace(pkgPath, raw, workspaceNames));
|
|
244
|
+
}
|
|
245
|
+
return { workspaces, errors };
|
|
138
246
|
});
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
247
|
+
const getPackageJsonStr = (workspace) => readFile(joinPath(workspace.path, "package.json"));
|
|
248
|
+
const analyzeMonorepo = (startPath) => Effect.gen(function* () {
|
|
249
|
+
const { root, packageManager, patterns } = yield* findMonorepoRoot(startPath);
|
|
250
|
+
const { workspaces, errors } = yield* discoverWorkspaces(root, patterns);
|
|
251
|
+
return {
|
|
252
|
+
root,
|
|
253
|
+
packageManager,
|
|
254
|
+
workspaces,
|
|
255
|
+
errors
|
|
256
|
+
};
|
|
257
|
+
});
|
|
258
|
+
class ModifyError extends Data.TaggedError("ModifyError") {
|
|
259
|
+
}
|
|
260
|
+
class DependencyNotFoundError extends Data.TaggedError(
|
|
261
|
+
"DependencyNotFoundError"
|
|
262
|
+
) {
|
|
263
|
+
}
|
|
264
|
+
class PackageJsonParseError extends Data.TaggedError(
|
|
265
|
+
"PackageJsonParseError"
|
|
266
|
+
) {
|
|
267
|
+
}
|
|
268
|
+
class PackageJsonWriteError extends Data.TaggedError(
|
|
269
|
+
"PackageJsonWriteError"
|
|
270
|
+
) {
|
|
271
|
+
}
|
|
272
|
+
const ALL_DEPENDENCY_KEYS = [
|
|
273
|
+
"dependencies",
|
|
274
|
+
"devDependencies",
|
|
275
|
+
"peerDependencies",
|
|
276
|
+
"optionalDependencies"
|
|
277
|
+
];
|
|
278
|
+
const readPackageJson = (workspace) => Effect.gen(function* () {
|
|
279
|
+
const filePath = joinPath(workspace.path, "package.json");
|
|
280
|
+
const raw = yield* readFile(filePath).pipe(
|
|
281
|
+
Effect.mapError(
|
|
282
|
+
(e) => new PackageJsonParseError({ workspace: workspace.name, cause: e })
|
|
283
|
+
)
|
|
284
|
+
);
|
|
285
|
+
return yield* Effect.try({
|
|
286
|
+
try: () => JSON.parse(raw),
|
|
287
|
+
catch: (cause) => new PackageJsonParseError({ workspace: workspace.name, cause })
|
|
288
|
+
});
|
|
289
|
+
});
|
|
290
|
+
const writePackageJson = (workspace, content) => Effect.gen(function* () {
|
|
291
|
+
const filePath = joinPath(workspace.path, "package.json");
|
|
292
|
+
const formatted = sortPackageJson(content);
|
|
293
|
+
const current = yield* readFile(filePath).pipe(
|
|
294
|
+
Effect.mapError(
|
|
295
|
+
(e) => new PackageJsonWriteError({ workspace: workspace.name, cause: e })
|
|
296
|
+
)
|
|
297
|
+
);
|
|
298
|
+
if (current === formatted) {
|
|
299
|
+
return;
|
|
160
300
|
}
|
|
161
|
-
|
|
301
|
+
yield* writeFile(filePath, formatted).pipe(
|
|
302
|
+
Effect.mapError(
|
|
303
|
+
(e) => new PackageJsonWriteError({ workspace: workspace.name, cause: e })
|
|
304
|
+
)
|
|
305
|
+
);
|
|
306
|
+
});
|
|
307
|
+
const DEPENDENCY_TYPE_TO_KEY = {
|
|
308
|
+
dependency: "dependencies",
|
|
309
|
+
devDependency: "devDependencies",
|
|
310
|
+
peerDependency: "peerDependencies",
|
|
311
|
+
optionalDependency: "optionalDependencies"
|
|
162
312
|
};
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
313
|
+
const upsertDependency = (options) => Effect.gen(function* () {
|
|
314
|
+
const { workspace, dependencyName, versionRange, dependencyType } = options;
|
|
315
|
+
const content = yield* readPackageJson(workspace).pipe(
|
|
316
|
+
Effect.mapError(
|
|
317
|
+
(e) => new ModifyError({
|
|
318
|
+
workspace: workspace.name,
|
|
319
|
+
message: "Failed to read package.json",
|
|
320
|
+
cause: e
|
|
321
|
+
})
|
|
322
|
+
)
|
|
323
|
+
);
|
|
324
|
+
const targetKey = DEPENDENCY_TYPE_TO_KEY[dependencyType];
|
|
325
|
+
for (const key of ALL_DEPENDENCY_KEYS) {
|
|
326
|
+
if (key === targetKey) continue;
|
|
327
|
+
const deps = content[key];
|
|
328
|
+
if (deps && dependencyName in deps) {
|
|
329
|
+
delete deps[dependencyName];
|
|
330
|
+
if (Object.keys(deps).length === 0) {
|
|
331
|
+
delete content[key];
|
|
332
|
+
}
|
|
333
|
+
}
|
|
173
334
|
}
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
const workspaces = getWorkspaces(dir);
|
|
177
|
-
if (!workspaces || workspaces.length === 0)
|
|
178
|
-
return void 0;
|
|
179
|
-
return workspaces.map((workspace) => {
|
|
180
|
-
const { packageJSON, location } = workspace;
|
|
181
|
-
const parsedPackageJson = packageJsonSchema.parse(packageJSON);
|
|
182
|
-
return {
|
|
183
|
-
workspace: transformPackageJsonToWorkspace(packageJSON),
|
|
184
|
-
location,
|
|
185
|
-
packageJSON: parsedPackageJson
|
|
186
|
-
};
|
|
187
|
-
});
|
|
188
|
-
}
|
|
189
|
-
function detectWorkspaceDir(dirPath) {
|
|
190
|
-
while (true) {
|
|
191
|
-
const workspace = getWorkspaceAtDir(dirPath);
|
|
192
|
-
if (workspace)
|
|
193
|
-
return dirPath;
|
|
194
|
-
const parentDir = path.dirname(dirPath);
|
|
195
|
-
if (parentDir === dirPath)
|
|
196
|
-
return void 0;
|
|
197
|
-
dirPath = parentDir;
|
|
335
|
+
if (!content[targetKey]) {
|
|
336
|
+
content[targetKey] = {};
|
|
198
337
|
}
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
338
|
+
content[targetKey][dependencyName] = versionRange;
|
|
339
|
+
yield* writePackageJson(workspace, JSON.stringify(content)).pipe(
|
|
340
|
+
Effect.mapError(
|
|
341
|
+
(e) => new ModifyError({
|
|
342
|
+
workspace: workspace.name,
|
|
343
|
+
message: "Failed to write package.json",
|
|
344
|
+
cause: e
|
|
345
|
+
})
|
|
346
|
+
)
|
|
347
|
+
);
|
|
348
|
+
});
|
|
349
|
+
const formatPackageJson = (workspace) => Effect.gen(function* () {
|
|
350
|
+
const filePath = joinPath(workspace.path, "package.json");
|
|
351
|
+
const content = yield* readFile(filePath).pipe(
|
|
352
|
+
Effect.mapError(
|
|
353
|
+
(cause) => new ModifyError({
|
|
354
|
+
workspace: workspace.name,
|
|
355
|
+
message: "Failed to read package.json",
|
|
356
|
+
cause
|
|
357
|
+
})
|
|
358
|
+
)
|
|
359
|
+
);
|
|
360
|
+
yield* writePackageJson(workspace, content).pipe(
|
|
361
|
+
Effect.mapError(
|
|
362
|
+
(cause) => new ModifyError({
|
|
363
|
+
workspace: workspace.name,
|
|
364
|
+
message: "Failed to write package.json",
|
|
365
|
+
cause
|
|
366
|
+
})
|
|
367
|
+
)
|
|
368
|
+
);
|
|
369
|
+
});
|
|
370
|
+
const removeDependency = (options) => Effect.gen(function* () {
|
|
371
|
+
const { workspace, dependencyName } = options;
|
|
372
|
+
const content = yield* readPackageJson(workspace).pipe(
|
|
373
|
+
Effect.mapError(
|
|
374
|
+
(e) => new ModifyError({
|
|
375
|
+
workspace: workspace.name,
|
|
376
|
+
message: "Failed to read package.json",
|
|
377
|
+
cause: e
|
|
378
|
+
})
|
|
379
|
+
)
|
|
380
|
+
);
|
|
381
|
+
let removed = false;
|
|
382
|
+
for (const key of ALL_DEPENDENCY_KEYS) {
|
|
383
|
+
const deps = content[key];
|
|
384
|
+
if (deps && dependencyName in deps) {
|
|
385
|
+
delete deps[dependencyName];
|
|
386
|
+
removed = true;
|
|
387
|
+
if (Object.keys(deps).length === 0) {
|
|
388
|
+
delete content[key];
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
}
|
|
392
|
+
if (!removed) {
|
|
393
|
+
return yield* Effect.fail(
|
|
394
|
+
new DependencyNotFoundError({
|
|
395
|
+
workspace: workspace.name,
|
|
396
|
+
dependencyName
|
|
397
|
+
})
|
|
205
398
|
);
|
|
206
|
-
const parsedPackageJson = packageJsonSchema.parse(packageJson);
|
|
207
|
-
return {
|
|
208
|
-
location: dir,
|
|
209
|
-
packageJSON: parsedPackageJson,
|
|
210
|
-
workspace: transformPackageJsonToWorkspace(packageJson)
|
|
211
|
-
};
|
|
212
|
-
} catch {
|
|
213
|
-
return void 0;
|
|
214
399
|
}
|
|
400
|
+
yield* writePackageJson(workspace, JSON.stringify(content)).pipe(
|
|
401
|
+
Effect.mapError(
|
|
402
|
+
(e) => new ModifyError({
|
|
403
|
+
workspace: workspace.name,
|
|
404
|
+
message: "Failed to write package.json",
|
|
405
|
+
cause: e
|
|
406
|
+
})
|
|
407
|
+
)
|
|
408
|
+
);
|
|
409
|
+
});
|
|
410
|
+
function groupDependenciesByPackage(analysis, filterBySources = ["npm"]) {
|
|
411
|
+
const sourceSet = new Set(filterBySources);
|
|
412
|
+
const grouped = /* @__PURE__ */ new Map();
|
|
413
|
+
for (const workspace of analysis.workspaces) {
|
|
414
|
+
for (const dep of workspace.dependencies) {
|
|
415
|
+
if (!sourceSet.has(dep.source)) {
|
|
416
|
+
continue;
|
|
417
|
+
}
|
|
418
|
+
const instance = {
|
|
419
|
+
workspace: workspace.name,
|
|
420
|
+
versionRange: dep.versionRange,
|
|
421
|
+
type: dep.dependencyType
|
|
422
|
+
};
|
|
423
|
+
const existing = grouped.get(dep.name);
|
|
424
|
+
if (existing) {
|
|
425
|
+
existing.push(instance);
|
|
426
|
+
} else {
|
|
427
|
+
grouped.set(dep.name, [instance]);
|
|
428
|
+
}
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
return Array.from(grouped.entries()).map(([name, instances]) => ({ name, instances })).sort((a, b) => a.name.localeCompare(b.name));
|
|
215
432
|
}
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
const
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
433
|
+
class InvalidSemverRangeError extends Data.TaggedError(
|
|
434
|
+
"InvalidSemverRangeError"
|
|
435
|
+
) {
|
|
436
|
+
}
|
|
437
|
+
const isPinnedVersion = (versionRange) => semver.valid(versionRange.trim()) !== null;
|
|
438
|
+
function detectUnpinnedVersions(analysis) {
|
|
439
|
+
return Effect.sync(() => {
|
|
440
|
+
const violations = [];
|
|
441
|
+
const dependenciesByPackage = groupDependenciesByPackage(analysis, ["npm"]);
|
|
442
|
+
for (const dep of dependenciesByPackage) {
|
|
443
|
+
for (const instance of dep.instances) {
|
|
444
|
+
if (instance.type === "peerDependency") continue;
|
|
445
|
+
if (!isPinnedVersion(instance.versionRange)) {
|
|
446
|
+
violations.push({
|
|
447
|
+
_tag: "ViolationUnpinnedVersion",
|
|
448
|
+
package: dep.name,
|
|
449
|
+
workspace: instance.workspace,
|
|
450
|
+
message: `Version range "${instance.versionRange}" is not pinned`,
|
|
451
|
+
versionRange: instance.versionRange,
|
|
452
|
+
dependencyType: instance.type
|
|
453
|
+
});
|
|
237
454
|
}
|
|
238
455
|
}
|
|
239
456
|
}
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
457
|
+
return violations;
|
|
458
|
+
});
|
|
459
|
+
}
|
|
460
|
+
function detectVersionMismatches(analysis) {
|
|
461
|
+
return Effect.sync(() => {
|
|
462
|
+
const violations = [];
|
|
463
|
+
const dependenciesByPackage = groupDependenciesByPackage(analysis, ["npm"]);
|
|
464
|
+
for (const dep of dependenciesByPackage) {
|
|
465
|
+
const instances = dep.instances.filter((i) => i.type !== "peerDependency");
|
|
466
|
+
if (instances.length < 2) continue;
|
|
467
|
+
const versions = new Set(instances.map((i) => i.versionRange));
|
|
468
|
+
if (versions.size === 1) continue;
|
|
469
|
+
const versionList = Array.from(versions).join(", ");
|
|
470
|
+
const allVersions = Array.from(versions);
|
|
471
|
+
for (const instance of instances) {
|
|
472
|
+
violations.push({
|
|
473
|
+
_tag: "ViolationVersionMismatch",
|
|
474
|
+
package: dep.name,
|
|
475
|
+
workspace: instance.workspace,
|
|
476
|
+
message: `Multiple versions found: ${versionList}`,
|
|
477
|
+
versionRange: instance.versionRange,
|
|
478
|
+
dependencyType: instance.type,
|
|
479
|
+
allVersions
|
|
480
|
+
});
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
return violations;
|
|
484
|
+
});
|
|
485
|
+
}
|
|
486
|
+
function detectFormatPackageJson(analysis) {
|
|
487
|
+
return Effect.gen(function* () {
|
|
488
|
+
const violations = [];
|
|
489
|
+
for (const workspace of analysis.workspaces) {
|
|
490
|
+
const content = yield* getPackageJsonStr(workspace).pipe(
|
|
491
|
+
Effect.catchAll(() => Effect.succeed(null))
|
|
492
|
+
);
|
|
493
|
+
if (content === null) continue;
|
|
494
|
+
const sorted = sortPackageJson(content);
|
|
495
|
+
if (content !== sorted) {
|
|
496
|
+
violations.push({
|
|
497
|
+
_tag: "ViolationFormatPackageJson",
|
|
498
|
+
package: workspace.name,
|
|
499
|
+
workspace: workspace.name,
|
|
500
|
+
message: "package.json is not sorted"
|
|
501
|
+
});
|
|
250
502
|
}
|
|
251
503
|
}
|
|
252
|
-
|
|
253
|
-
|
|
504
|
+
return violations;
|
|
505
|
+
});
|
|
506
|
+
}
|
|
507
|
+
class Monoverse extends Effect.Service()("Monoverse", {
|
|
508
|
+
succeed: {
|
|
509
|
+
analyze: (startPath) => analyzeMonorepo(startPath),
|
|
510
|
+
validate: (analysis) => Effect.gen(function* () {
|
|
511
|
+
const mismatches = yield* detectVersionMismatches(analysis);
|
|
512
|
+
const unpinned = yield* detectUnpinnedVersions(analysis);
|
|
513
|
+
const formatting = yield* detectFormatPackageJson(analysis);
|
|
514
|
+
return [...mismatches, ...unpinned, ...formatting];
|
|
515
|
+
}),
|
|
516
|
+
addPackage: (options) => upsertDependency({
|
|
517
|
+
workspace: options.workspace,
|
|
518
|
+
dependencyName: options.packageName,
|
|
519
|
+
versionRange: options.versionRange,
|
|
520
|
+
dependencyType: options.dependencyType
|
|
521
|
+
}),
|
|
522
|
+
removePackage: (options) => removeDependency({
|
|
523
|
+
workspace: options.workspace,
|
|
524
|
+
dependencyName: options.packageName
|
|
525
|
+
}),
|
|
526
|
+
formatWorkspace: (workspace) => formatPackageJson(workspace),
|
|
527
|
+
formatAllWorkspaces: (analysis) => Effect.forEach(analysis.workspaces, formatPackageJson, {
|
|
528
|
+
discard: true
|
|
529
|
+
})
|
|
254
530
|
}
|
|
255
|
-
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
const tempDir = await mkdtemp(path3.join(os.tmpdir(), "monoverse-"));
|
|
268
|
-
const outputPath = path3.join(tempDir, "github-zip.zip");
|
|
269
|
-
const outputDir = path3.join(tempDir, "github-extract");
|
|
270
|
-
await rm(outputDir, { recursive: true, force: true });
|
|
271
|
-
await rm(outputPath, { force: true });
|
|
272
|
-
const { ok } = await downloadZip(url, outputPath);
|
|
273
|
-
if (!ok) {
|
|
274
|
-
throw new Error("Failed to download the zip file");
|
|
531
|
+
}) {
|
|
532
|
+
}
|
|
533
|
+
const tui = Command.make("tui", {}, () => Effect.void);
|
|
534
|
+
const cwd = process.cwd();
|
|
535
|
+
const findCurrentWorkspace = Effect.gen(function* () {
|
|
536
|
+
const monoverse2 = yield* Monoverse;
|
|
537
|
+
const analysis = yield* monoverse2.analyze(cwd);
|
|
538
|
+
const workspace = analysis.workspaces.find((ws) => cwd.startsWith(ws.path));
|
|
539
|
+
if (!workspace) {
|
|
540
|
+
return yield* Effect.fail(
|
|
541
|
+
new Error("Not inside a workspace. Run from within a workspace directory.")
|
|
542
|
+
);
|
|
275
543
|
}
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
544
|
+
return { analysis, workspace };
|
|
545
|
+
});
|
|
546
|
+
const toDependencyType = (type) => {
|
|
547
|
+
const map = {
|
|
548
|
+
dependency: "dependency",
|
|
549
|
+
dev: "devDependency",
|
|
550
|
+
peer: "peerDependency",
|
|
551
|
+
optional: "optionalDependency"
|
|
283
552
|
};
|
|
553
|
+
return map[type];
|
|
284
554
|
};
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
555
|
+
const packageArg$1 = Args.text({ name: "package" });
|
|
556
|
+
const typeOption = Options.choice("type", [
|
|
557
|
+
"dependency",
|
|
558
|
+
"dev",
|
|
559
|
+
"peer",
|
|
560
|
+
"optional"
|
|
561
|
+
]).pipe(Options.withAlias("t"), Options.withDefault("dependency"));
|
|
562
|
+
const versionOption = Options.text("version").pipe(
|
|
563
|
+
Options.withAlias("v"),
|
|
564
|
+
Options.withDefault("latest")
|
|
565
|
+
);
|
|
566
|
+
const handler$1 = ({
|
|
567
|
+
package: pkg,
|
|
568
|
+
type,
|
|
569
|
+
version
|
|
570
|
+
}) => Effect.gen(function* () {
|
|
571
|
+
const monoverse2 = yield* Monoverse;
|
|
572
|
+
const { workspace } = yield* findCurrentWorkspace;
|
|
573
|
+
const dependencyType = toDependencyType(type);
|
|
574
|
+
yield* monoverse2.addPackage({
|
|
575
|
+
packageName: pkg,
|
|
576
|
+
versionRange: version,
|
|
577
|
+
dependencyType,
|
|
578
|
+
workspace
|
|
308
579
|
});
|
|
580
|
+
yield* Console.log(`Added ${pkg}@${version} to ${workspace.name}`);
|
|
581
|
+
});
|
|
582
|
+
const add = Command.make(
|
|
583
|
+
"add",
|
|
584
|
+
{ package: packageArg$1, type: typeOption, version: versionOption },
|
|
585
|
+
handler$1
|
|
586
|
+
);
|
|
587
|
+
const packageArg = Args.text({ name: "package" });
|
|
588
|
+
const handler = ({ package: pkg }) => Effect.gen(function* () {
|
|
589
|
+
const monoverse2 = yield* Monoverse;
|
|
590
|
+
const { workspace } = yield* findCurrentWorkspace;
|
|
591
|
+
yield* monoverse2.removePackage({
|
|
592
|
+
packageName: pkg,
|
|
593
|
+
workspace
|
|
594
|
+
});
|
|
595
|
+
yield* Console.log(`Removed ${pkg} from ${workspace.name}`);
|
|
596
|
+
});
|
|
597
|
+
const remove = Command.make("remove", { package: packageArg }, handler);
|
|
598
|
+
const rm = Command.make("rm", { package: packageArg }, handler);
|
|
599
|
+
const deleteCmd = Command.make("delete", { package: packageArg }, handler);
|
|
600
|
+
const format = Command.make(
|
|
601
|
+
"format",
|
|
602
|
+
{},
|
|
603
|
+
() => Effect.gen(function* () {
|
|
604
|
+
const monoverse2 = yield* Monoverse;
|
|
605
|
+
const analysis = yield* monoverse2.analyze(cwd);
|
|
606
|
+
yield* monoverse2.formatAllWorkspaces(analysis);
|
|
607
|
+
yield* Console.log(`Formatted ${analysis.workspaces.length} workspaces`);
|
|
608
|
+
})
|
|
609
|
+
);
|
|
610
|
+
const c = {
|
|
611
|
+
reset: "\x1B[0m",
|
|
612
|
+
red: "\x1B[38;2;238;136;136m",
|
|
613
|
+
green: "\x1B[38;2;136;238;136m",
|
|
614
|
+
gray: "\x1B[38;2;136;136;136m",
|
|
615
|
+
dim: "\x1B[38;2;102;102;102m",
|
|
616
|
+
white: "\x1B[38;2;255;255;255m"
|
|
309
617
|
};
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
}
|
|
317
|
-
const workspacesMap = monorepoInfo.workspaces.reduce(
|
|
318
|
-
(acc, { workspace }) => {
|
|
319
|
-
acc[workspace.name] = workspace;
|
|
320
|
-
return acc;
|
|
321
|
-
},
|
|
322
|
-
{}
|
|
618
|
+
const formatViolations = (violations, workspaces, root) => {
|
|
619
|
+
const pathByName = new Map(
|
|
620
|
+
workspaces.map((w) => [
|
|
621
|
+
w.name,
|
|
622
|
+
w.path === root ? "." : w.path.replace(root + "/", "")
|
|
623
|
+
])
|
|
323
624
|
);
|
|
324
|
-
const
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
internalDependencies
|
|
329
|
-
};
|
|
330
|
-
});
|
|
331
|
-
const getDependencyType = (workspace) => {
|
|
332
|
-
const hasDecendants = dependencyLinks.find((v) => v.workspaceName === workspace).internalDependencies.length > 0;
|
|
333
|
-
const hasAncestors = dependencyLinks.filter((v) => {
|
|
334
|
-
if (v.workspaceName === workspace)
|
|
335
|
-
return false;
|
|
336
|
-
if (v.internalDependencies.includes(workspace))
|
|
337
|
-
return true;
|
|
338
|
-
return false;
|
|
339
|
-
}).length > 0;
|
|
340
|
-
if (hasDecendants && hasAncestors) {
|
|
341
|
-
return "internal";
|
|
625
|
+
const grouped = /* @__PURE__ */ new Map();
|
|
626
|
+
for (const v of violations) {
|
|
627
|
+
if (!grouped.has(v.workspace)) {
|
|
628
|
+
grouped.set(v.workspace, /* @__PURE__ */ new Map());
|
|
342
629
|
}
|
|
343
|
-
|
|
344
|
-
|
|
630
|
+
const pkgMap = grouped.get(v.workspace);
|
|
631
|
+
if (!pkgMap.has(v.package)) {
|
|
632
|
+
pkgMap.set(v.package, []);
|
|
345
633
|
}
|
|
346
|
-
|
|
347
|
-
};
|
|
348
|
-
return dependencyLinks.map((link) => {
|
|
349
|
-
return {
|
|
350
|
-
...link,
|
|
351
|
-
dependenciesCount: workspacesMap[link.workspaceName].dependencies.length,
|
|
352
|
-
type: getDependencyType(link.workspaceName)
|
|
353
|
-
};
|
|
354
|
-
});
|
|
355
|
-
};
|
|
356
|
-
|
|
357
|
-
// trpc/functionality/sync-updates.ts
|
|
358
|
-
var getSyncUpdates = (dirPath) => {
|
|
359
|
-
const monorepoInfo = getMonorepoInfo(dirPath);
|
|
360
|
-
if (!monorepoInfo) {
|
|
361
|
-
return null;
|
|
634
|
+
pkgMap.get(v.package).push(v);
|
|
362
635
|
}
|
|
363
|
-
const
|
|
364
|
-
(
|
|
365
|
-
|
|
366
|
-
return
|
|
367
|
-
}
|
|
368
|
-
{
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
return
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
};
|
|
382
|
-
|
|
383
|
-
// trpc/functionality/sync-versions.ts
|
|
384
|
-
var syncVersions = (dirPath, updates) => {
|
|
385
|
-
const monorepoInfo = getMonorepoInfo(dirPath);
|
|
386
|
-
if (!monorepoInfo) {
|
|
387
|
-
return null;
|
|
636
|
+
const formatDetail = (v) => {
|
|
637
|
+
const tag = v._tag.replace("Violation", "");
|
|
638
|
+
if (v._tag === "ViolationVersionMismatch" && v.allVersions) {
|
|
639
|
+
return `${tag} (${v.allVersions.join(", ")})`;
|
|
640
|
+
}
|
|
641
|
+
if (v._tag === "ViolationUnpinnedVersion" && v.versionRange) {
|
|
642
|
+
return `${tag} (${v.versionRange})`;
|
|
643
|
+
}
|
|
644
|
+
return tag;
|
|
645
|
+
};
|
|
646
|
+
const lines = [];
|
|
647
|
+
for (const [workspace, packages] of grouped) {
|
|
648
|
+
const path = pathByName.get(workspace) ?? "";
|
|
649
|
+
lines.push(`${c.white}${workspace}${c.dim} (${path})${c.reset}`);
|
|
650
|
+
for (const [pkg, vList] of packages) {
|
|
651
|
+
const details = vList.map(formatDetail).join(", ");
|
|
652
|
+
lines.push(`${c.gray} ${pkg.padEnd(28)}${c.red}${details}${c.reset}`);
|
|
653
|
+
}
|
|
388
654
|
}
|
|
389
|
-
return
|
|
655
|
+
return lines.join("\n");
|
|
390
656
|
};
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
return "Hello, World!";
|
|
402
|
-
}),
|
|
403
|
-
getOverview: publicProcedure.input(
|
|
404
|
-
z5.object({
|
|
405
|
-
type: z5.union([z5.literal("filepath"), z5.literal("url")]),
|
|
406
|
-
value: z5.string()
|
|
407
|
-
})
|
|
408
|
-
).query(async ({ input }) => {
|
|
409
|
-
const { type, value } = input;
|
|
410
|
-
if (type === "filepath") {
|
|
411
|
-
const dirPath = value;
|
|
412
|
-
const result = getOverview(dirPath);
|
|
413
|
-
return {
|
|
414
|
-
success: true,
|
|
415
|
-
result
|
|
416
|
-
};
|
|
417
|
-
} else {
|
|
418
|
-
const { projectDir, cleanupDir } = await downloadGitRepo(value);
|
|
419
|
-
const result = getOverview(projectDir);
|
|
420
|
-
await cleanupDir();
|
|
421
|
-
return {
|
|
422
|
-
success: true,
|
|
423
|
-
result
|
|
424
|
-
};
|
|
425
|
-
}
|
|
426
|
-
}),
|
|
427
|
-
getSyncUpdates: publicProcedure.input(
|
|
428
|
-
z5.object({
|
|
429
|
-
type: z5.union([z5.literal("filepath"), z5.literal("url")]),
|
|
430
|
-
value: z5.string()
|
|
431
|
-
})
|
|
432
|
-
).query(async ({ input }) => {
|
|
433
|
-
const { type, value } = input;
|
|
434
|
-
if (type === "filepath") {
|
|
435
|
-
const dirPath = value;
|
|
436
|
-
const result = getSyncUpdates(dirPath);
|
|
437
|
-
return {
|
|
438
|
-
success: true,
|
|
439
|
-
result
|
|
440
|
-
};
|
|
441
|
-
} else {
|
|
442
|
-
const { projectDir, cleanupDir } = await downloadGitRepo(value);
|
|
443
|
-
const result = getSyncUpdates(projectDir);
|
|
444
|
-
await cleanupDir();
|
|
445
|
-
return {
|
|
446
|
-
success: true,
|
|
447
|
-
result
|
|
448
|
-
};
|
|
657
|
+
const lint = Command.make(
|
|
658
|
+
"lint",
|
|
659
|
+
{},
|
|
660
|
+
() => Effect.gen(function* () {
|
|
661
|
+
const monoverse2 = yield* Monoverse;
|
|
662
|
+
const analysis = yield* monoverse2.analyze(cwd);
|
|
663
|
+
const violations = yield* monoverse2.validate(analysis);
|
|
664
|
+
if (violations.length === 0) {
|
|
665
|
+
yield* Console.log(`${c.green}No issues found${c.reset}`);
|
|
666
|
+
return;
|
|
449
667
|
}
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
})
|
|
459
|
-
)
|
|
460
|
-
})
|
|
461
|
-
).mutation(async ({ input }) => {
|
|
462
|
-
const result = syncVersions(input.dirPath, input.updates);
|
|
463
|
-
return result;
|
|
668
|
+
yield* Console.error(
|
|
669
|
+
`${c.red}Found ${violations.length} issues${c.reset}
|
|
670
|
+
`
|
|
671
|
+
);
|
|
672
|
+
yield* Console.error(
|
|
673
|
+
formatViolations(violations, analysis.workspaces, analysis.root)
|
|
674
|
+
);
|
|
675
|
+
yield* Effect.sync(() => process.exit(1));
|
|
464
676
|
})
|
|
677
|
+
);
|
|
678
|
+
const monoverse = Command.make(
|
|
679
|
+
"monoverse",
|
|
680
|
+
{},
|
|
681
|
+
() => Console.log("Use --help to see available commands")
|
|
682
|
+
);
|
|
683
|
+
const command = monoverse.pipe(
|
|
684
|
+
Command.withSubcommands([tui, add, remove, rm, deleteCmd, format, lint])
|
|
685
|
+
);
|
|
686
|
+
const cli = Command.run(command, {
|
|
687
|
+
name: "monoverse",
|
|
688
|
+
version: "v0.0.12"
|
|
465
689
|
});
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
router: appRouter
|
|
690
|
+
const MainLayer = Layer.mergeAll(
|
|
691
|
+
NodeContext.layer,
|
|
692
|
+
Monoverse.Default,
|
|
693
|
+
CliConfig.layer({
|
|
694
|
+
isCaseSensitive: true,
|
|
695
|
+
showBuiltIns: false,
|
|
696
|
+
showTypes: false
|
|
474
697
|
})
|
|
475
698
|
);
|
|
476
|
-
|
|
477
|
-
app.get("*", (_, res) => {
|
|
478
|
-
res.sendFile(path4.join(__DIRNAME, "dist", "index.html"));
|
|
479
|
-
});
|
|
480
|
-
var port = 21212;
|
|
481
|
-
app.listen(port, () => {
|
|
482
|
-
console.log(`Server listening... http://localhost:${port}/`);
|
|
483
|
-
});
|
|
699
|
+
cli(process.argv).pipe(Effect.provide(MainLayer), NodeRuntime.runMain);
|