@lpm-registry/cli 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +36 -0
- package/LICENSE +15 -0
- package/README.md +406 -0
- package/bin/lpm.js +334 -0
- package/index.d.ts +131 -0
- package/index.js +31 -0
- package/lib/api.js +324 -0
- package/lib/commands/add.js +1217 -0
- package/lib/commands/audit.js +283 -0
- package/lib/commands/cache.js +209 -0
- package/lib/commands/check-name.js +112 -0
- package/lib/commands/config.js +174 -0
- package/lib/commands/doctor.js +142 -0
- package/lib/commands/info.js +215 -0
- package/lib/commands/init.js +146 -0
- package/lib/commands/install.js +217 -0
- package/lib/commands/login.js +547 -0
- package/lib/commands/logout.js +94 -0
- package/lib/commands/marketplace-compare.js +164 -0
- package/lib/commands/marketplace-earnings.js +89 -0
- package/lib/commands/mcp-setup.js +363 -0
- package/lib/commands/open.js +82 -0
- package/lib/commands/outdated.js +291 -0
- package/lib/commands/pool-stats.js +100 -0
- package/lib/commands/publish.js +707 -0
- package/lib/commands/quality.js +211 -0
- package/lib/commands/remove.js +82 -0
- package/lib/commands/run.js +14 -0
- package/lib/commands/search.js +143 -0
- package/lib/commands/setup.js +92 -0
- package/lib/commands/skills.js +863 -0
- package/lib/commands/token-rotate.js +25 -0
- package/lib/commands/whoami.js +129 -0
- package/lib/config.js +240 -0
- package/lib/constants.js +190 -0
- package/lib/ecosystem.js +501 -0
- package/lib/editors.js +215 -0
- package/lib/import-rewriter.js +364 -0
- package/lib/install-targets/mcp-server.js +245 -0
- package/lib/install-targets/vscode-extension.js +178 -0
- package/lib/install-targets.js +82 -0
- package/lib/integrity.js +179 -0
- package/lib/lpm-config-prompts.js +102 -0
- package/lib/lpm-config.js +408 -0
- package/lib/project-utils.js +152 -0
- package/lib/quality/checks.js +654 -0
- package/lib/quality/display.js +139 -0
- package/lib/quality/score.js +115 -0
- package/lib/quality/swift-checks.js +447 -0
- package/lib/safe-path.js +180 -0
- package/lib/secure-store.js +288 -0
- package/lib/swift-project.js +637 -0
- package/lib/ui.js +40 -0
- package/package.json +74 -0
package/lib/ecosystem.js
ADDED
|
@@ -0,0 +1,501 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Ecosystem Detection & Non-JS Packaging Utilities
|
|
3
|
+
*
|
|
4
|
+
* Detects project ecosystem (JS, Swift, Rust, Python, Ruby) from manifest files,
|
|
5
|
+
* reads ecosystem-specific manifests, and creates tarballs without npm.
|
|
6
|
+
*
|
|
7
|
+
* @module cli/lib/ecosystem
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { exec } from "node:child_process"
|
|
11
|
+
import fs from "node:fs"
|
|
12
|
+
import path from "node:path"
|
|
13
|
+
import { promisify } from "node:util"
|
|
14
|
+
import * as tar from "tar"
|
|
15
|
+
|
|
16
|
+
const execAsync = promisify(exec)
|
|
17
|
+
|
|
18
|
+
/**
|
|
19
|
+
* Ecosystem manifest files in detection priority order.
|
|
20
|
+
* First match wins.
|
|
21
|
+
*/
|
|
22
|
+
const ECOSYSTEM_MANIFESTS = [
|
|
23
|
+
{ file: "Package.swift", ecosystem: "swift" },
|
|
24
|
+
{ file: "Cargo.toml", ecosystem: "rust" },
|
|
25
|
+
{ file: "pyproject.toml", ecosystem: "python" },
|
|
26
|
+
{ file: "package.json", ecosystem: "js" },
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
/**
|
|
30
|
+
* Files/directories to always skip when creating tarballs.
|
|
31
|
+
* Keyed by ecosystem.
|
|
32
|
+
*/
|
|
33
|
+
const ECOSYSTEM_SKIP_PATTERNS = {
|
|
34
|
+
swift: [
|
|
35
|
+
".build/",
|
|
36
|
+
"DerivedData/",
|
|
37
|
+
"Pods/",
|
|
38
|
+
".swiftpm/",
|
|
39
|
+
"*.xcworkspace/",
|
|
40
|
+
"xcuserdata/",
|
|
41
|
+
".DS_Store",
|
|
42
|
+
],
|
|
43
|
+
rust: ["target/", ".DS_Store"],
|
|
44
|
+
python: [
|
|
45
|
+
"__pycache__/",
|
|
46
|
+
"*.pyc",
|
|
47
|
+
".venv/",
|
|
48
|
+
"venv/",
|
|
49
|
+
"dist/",
|
|
50
|
+
"*.egg-info/",
|
|
51
|
+
".DS_Store",
|
|
52
|
+
],
|
|
53
|
+
ruby: [".bundle/", "vendor/bundle/", "*.gem", ".DS_Store"],
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
/**
|
|
57
|
+
* Files to always include in tarball for each ecosystem.
|
|
58
|
+
*/
|
|
59
|
+
const _ECOSYSTEM_REQUIRED_FILES = {
|
|
60
|
+
swift: ["Package.swift", "lpm.config.json"],
|
|
61
|
+
rust: ["Cargo.toml", "lpm.config.json"],
|
|
62
|
+
python: ["pyproject.toml", "lpm.config.json"],
|
|
63
|
+
ruby: ["Gemfile", "lpm.config.json"],
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
/**
|
|
67
|
+
* Detect the ecosystem of the current project.
|
|
68
|
+
* Checks for manifest files in priority order.
|
|
69
|
+
*
|
|
70
|
+
* @param {string} [cwd] - Working directory (defaults to process.cwd())
|
|
71
|
+
* @returns {{ ecosystem: string, manifestFile: string }}
|
|
72
|
+
*/
|
|
73
|
+
export function detectEcosystem(cwd = process.cwd()) {
|
|
74
|
+
for (const { file, ecosystem } of ECOSYSTEM_MANIFESTS) {
|
|
75
|
+
const filePath = path.resolve(cwd, file)
|
|
76
|
+
if (fs.existsSync(filePath)) {
|
|
77
|
+
return { ecosystem, manifestFile: file }
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
// No manifest found — caller should handle this
|
|
81
|
+
return { ecosystem: null, manifestFile: null }
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
/**
|
|
85
|
+
* Read and parse the Swift Package.swift manifest using `swift package dump-package`.
|
|
86
|
+
* Returns structured data about the Swift package.
|
|
87
|
+
*
|
|
88
|
+
* @param {string} [cwd] - Working directory
|
|
89
|
+
* @returns {Promise<object>} Parsed Swift manifest JSON
|
|
90
|
+
* @throws {Error} If swift CLI is not available or manifest is invalid
|
|
91
|
+
*/
|
|
92
|
+
export async function readSwiftManifest(cwd = process.cwd()) {
|
|
93
|
+
try {
|
|
94
|
+
const { stdout } = await execAsync("swift package dump-package", {
|
|
95
|
+
cwd,
|
|
96
|
+
timeout: 30_000,
|
|
97
|
+
})
|
|
98
|
+
const manifest = JSON.parse(stdout)
|
|
99
|
+
return manifest
|
|
100
|
+
} catch (err) {
|
|
101
|
+
if (err.code === "ENOENT" || err.message.includes("not found")) {
|
|
102
|
+
throw new Error(
|
|
103
|
+
"Swift toolchain not found. Install Xcode or Swift from swift.org.",
|
|
104
|
+
)
|
|
105
|
+
}
|
|
106
|
+
throw new Error(`Failed to read Package.swift: ${err.message}`)
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
/**
|
|
111
|
+
* Extract structured metadata from a Swift manifest for storage in versionMeta.
|
|
112
|
+
*
|
|
113
|
+
* @param {object} manifest - Parsed output from `swift package dump-package`
|
|
114
|
+
* @returns {object} Structured Swift metadata
|
|
115
|
+
*/
|
|
116
|
+
export function extractSwiftMetadata(manifest) {
|
|
117
|
+
const metadata = {
|
|
118
|
+
toolsVersion: manifest.toolsVersion?._version || null,
|
|
119
|
+
platforms: [],
|
|
120
|
+
products: [],
|
|
121
|
+
targets: [],
|
|
122
|
+
dependencies: [],
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
// Extract platform declarations
|
|
126
|
+
if (manifest.platforms) {
|
|
127
|
+
metadata.platforms = manifest.platforms.map(p => ({
|
|
128
|
+
name: p.platformName,
|
|
129
|
+
version: p.version,
|
|
130
|
+
}))
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
// Extract products (libraries, executables)
|
|
134
|
+
if (manifest.products) {
|
|
135
|
+
metadata.products = manifest.products.map(p => ({
|
|
136
|
+
name: p.name,
|
|
137
|
+
type: p.type ? Object.keys(p.type)[0] : "library",
|
|
138
|
+
targets: p.targets || [],
|
|
139
|
+
}))
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
// Extract targets
|
|
143
|
+
if (manifest.targets) {
|
|
144
|
+
metadata.targets = manifest.targets.map(t => ({
|
|
145
|
+
name: t.name,
|
|
146
|
+
type: t.type || "regular",
|
|
147
|
+
dependencies: (t.dependencies || []).map(d => {
|
|
148
|
+
if (d.byName) return { type: "byName", name: d.byName[0] }
|
|
149
|
+
if (d.product) return { type: "product", name: d.product[0] }
|
|
150
|
+
return d
|
|
151
|
+
}),
|
|
152
|
+
}))
|
|
153
|
+
}
|
|
154
|
+
|
|
155
|
+
// Extract dependencies (external packages)
|
|
156
|
+
if (manifest.dependencies) {
|
|
157
|
+
metadata.dependencies = manifest.dependencies.map(dep => {
|
|
158
|
+
if (dep.sourceControl) {
|
|
159
|
+
const sc = dep.sourceControl[0]
|
|
160
|
+
return {
|
|
161
|
+
type: "sourceControl",
|
|
162
|
+
identity: sc.identity,
|
|
163
|
+
location: sc.location?.remote?.[0] || null,
|
|
164
|
+
requirement: sc.requirement || null,
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
if (dep.fileSystem) {
|
|
168
|
+
const fs = dep.fileSystem[0]
|
|
169
|
+
return {
|
|
170
|
+
type: "fileSystem",
|
|
171
|
+
identity: fs.identity,
|
|
172
|
+
path: fs.path,
|
|
173
|
+
}
|
|
174
|
+
}
|
|
175
|
+
return dep
|
|
176
|
+
})
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
return metadata
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
/**
|
|
183
|
+
* Map Swift dependencies to LPM dependency tree format.
|
|
184
|
+
*
|
|
185
|
+
* @param {object} swiftMetadata - Output from extractSwiftMetadata
|
|
186
|
+
* @returns {{ lpm: Array, external: Array }}
|
|
187
|
+
*/
|
|
188
|
+
export function mapSwiftDependencies(swiftMetadata) {
|
|
189
|
+
const lpm = []
|
|
190
|
+
const external = []
|
|
191
|
+
|
|
192
|
+
for (const dep of swiftMetadata.dependencies) {
|
|
193
|
+
if (dep.type === "sourceControl") {
|
|
194
|
+
// Check if it's an LPM package (lpm.dev URL) — for future use
|
|
195
|
+
const isLpm = dep.location?.includes("lpm.dev")
|
|
196
|
+
|
|
197
|
+
if (isLpm) {
|
|
198
|
+
lpm.push({
|
|
199
|
+
name: dep.identity,
|
|
200
|
+
location: dep.location,
|
|
201
|
+
requirement: dep.requirement,
|
|
202
|
+
})
|
|
203
|
+
} else {
|
|
204
|
+
external.push({
|
|
205
|
+
name: dep.identity,
|
|
206
|
+
location: dep.location,
|
|
207
|
+
requirement: dep.requirement,
|
|
208
|
+
})
|
|
209
|
+
}
|
|
210
|
+
}
|
|
211
|
+
// Skip fileSystem dependencies (local only)
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
return { lpm, external }
|
|
215
|
+
}
|
|
216
|
+
|
|
217
|
+
/**
|
|
218
|
+
* Parse an XCFramework Info.plist to extract platform slices.
|
|
219
|
+
* Handles the XML plist format without external dependencies.
|
|
220
|
+
*
|
|
221
|
+
* @param {string} plistPath - Path to Info.plist
|
|
222
|
+
* @returns {{ slices: Array<{ identifier: string, platform: string, variant: string|null, architectures: string[] }>, formatVersion: string|null }}
|
|
223
|
+
*/
|
|
224
|
+
/**
|
|
225
|
+
* Extract the content of an outer <array>...</array> after a given <key>,
|
|
226
|
+
* correctly handling nested <array> tags inside (e.g. SupportedArchitectures).
|
|
227
|
+
*
|
|
228
|
+
* @param {string} xml - Full plist XML string
|
|
229
|
+
* @param {string} keyName - The plist key name (e.g. "AvailableLibraries")
|
|
230
|
+
* @returns {string|null} The inner content of the matched array, or null
|
|
231
|
+
*/
|
|
232
|
+
function extractOuterArray(xml, keyName) {
|
|
233
|
+
const keyPattern = `<key>${keyName}</key>`
|
|
234
|
+
const keyIdx = xml.indexOf(keyPattern)
|
|
235
|
+
if (keyIdx === -1) return null
|
|
236
|
+
|
|
237
|
+
const afterKey = xml.substring(keyIdx + keyPattern.length)
|
|
238
|
+
const arrayTagIdx = afterKey.indexOf("<array>")
|
|
239
|
+
if (arrayTagIdx === -1) return null
|
|
240
|
+
|
|
241
|
+
const startContent = arrayTagIdx + "<array>".length
|
|
242
|
+
let depth = 1
|
|
243
|
+
let i = startContent
|
|
244
|
+
|
|
245
|
+
while (i < afterKey.length && depth > 0) {
|
|
246
|
+
if (afterKey.substring(i, i + 7) === "<array>") {
|
|
247
|
+
depth++
|
|
248
|
+
i += 7
|
|
249
|
+
} else if (afterKey.substring(i, i + 8) === "</array>") {
|
|
250
|
+
depth--
|
|
251
|
+
if (depth === 0) return afterKey.substring(startContent, i)
|
|
252
|
+
i += 8
|
|
253
|
+
} else {
|
|
254
|
+
i++
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
return null
|
|
259
|
+
}
|
|
260
|
+
|
|
261
|
+
export function parseXCFrameworkPlist(plistPath) {
|
|
262
|
+
const xml = fs.readFileSync(plistPath, "utf-8")
|
|
263
|
+
const slices = []
|
|
264
|
+
let formatVersion = null
|
|
265
|
+
|
|
266
|
+
// Extract XCFrameworkFormatVersion
|
|
267
|
+
const fvMatch = xml.match(
|
|
268
|
+
/<key>XCFrameworkFormatVersion<\/key>\s*<string>([^<]+)<\/string>/,
|
|
269
|
+
)
|
|
270
|
+
if (fvMatch) formatVersion = fvMatch[1]
|
|
271
|
+
|
|
272
|
+
// Extract AvailableLibraries array (handles nested <array> for architectures)
|
|
273
|
+
const libsContent = extractOuterArray(xml, "AvailableLibraries")
|
|
274
|
+
if (!libsContent) return { slices, formatVersion }
|
|
275
|
+
|
|
276
|
+
// Split into individual <dict> entries
|
|
277
|
+
const dictBlocks = libsContent.match(/<dict>[\s\S]*?<\/dict>/g) || []
|
|
278
|
+
|
|
279
|
+
for (const block of dictBlocks) {
|
|
280
|
+
const id = block.match(
|
|
281
|
+
/<key>LibraryIdentifier<\/key>\s*<string>([^<]+)<\/string>/,
|
|
282
|
+
)
|
|
283
|
+
const platform = block.match(
|
|
284
|
+
/<key>SupportedPlatform<\/key>\s*<string>([^<]+)<\/string>/,
|
|
285
|
+
)
|
|
286
|
+
const variant = block.match(
|
|
287
|
+
/<key>SupportedPlatformVariant<\/key>\s*<string>([^<]+)<\/string>/,
|
|
288
|
+
)
|
|
289
|
+
|
|
290
|
+
// Extract architectures array
|
|
291
|
+
const archMatch = block.match(
|
|
292
|
+
/<key>SupportedArchitectures<\/key>\s*<array>([\s\S]*?)<\/array>/,
|
|
293
|
+
)
|
|
294
|
+
const architectures = archMatch
|
|
295
|
+
? (archMatch[1].match(/<string>([^<]+)<\/string>/g) || []).map(s =>
|
|
296
|
+
s.replace(/<\/?string>/g, ""),
|
|
297
|
+
)
|
|
298
|
+
: []
|
|
299
|
+
|
|
300
|
+
if (id && platform) {
|
|
301
|
+
slices.push({
|
|
302
|
+
identifier: id[1],
|
|
303
|
+
platform: platform[1],
|
|
304
|
+
variant: variant ? variant[1] : null,
|
|
305
|
+
architectures,
|
|
306
|
+
})
|
|
307
|
+
}
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
return { slices, formatVersion }
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
/**
|
|
314
|
+
* Detect XCFramework directories in the project.
|
|
315
|
+
* If found and Info.plist exists, parses platform slices.
|
|
316
|
+
*
|
|
317
|
+
* @param {string} [cwd] - Working directory
|
|
318
|
+
* @returns {{ found: boolean, name: string|null, path: string|null, hasInfoPlist: boolean, slices: Array, formatVersion: string|null }}
|
|
319
|
+
*/
|
|
320
|
+
export function detectXCFramework(cwd = process.cwd()) {
|
|
321
|
+
const entries = fs.readdirSync(cwd, { withFileTypes: true })
|
|
322
|
+
|
|
323
|
+
for (const entry of entries) {
|
|
324
|
+
if (entry.isDirectory() && entry.name.endsWith(".xcframework")) {
|
|
325
|
+
const xcfPath = path.join(cwd, entry.name)
|
|
326
|
+
const infoPlistPath = path.join(xcfPath, "Info.plist")
|
|
327
|
+
const hasInfoPlist = fs.existsSync(infoPlistPath)
|
|
328
|
+
|
|
329
|
+
if (hasInfoPlist) {
|
|
330
|
+
const { slices, formatVersion } = parseXCFrameworkPlist(infoPlistPath)
|
|
331
|
+
return {
|
|
332
|
+
found: true,
|
|
333
|
+
name: entry.name,
|
|
334
|
+
path: xcfPath,
|
|
335
|
+
hasInfoPlist: true,
|
|
336
|
+
slices,
|
|
337
|
+
formatVersion,
|
|
338
|
+
}
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
return {
|
|
342
|
+
found: true,
|
|
343
|
+
name: entry.name,
|
|
344
|
+
path: xcfPath,
|
|
345
|
+
hasInfoPlist: false,
|
|
346
|
+
slices: [],
|
|
347
|
+
formatVersion: null,
|
|
348
|
+
}
|
|
349
|
+
}
|
|
350
|
+
}
|
|
351
|
+
|
|
352
|
+
return {
|
|
353
|
+
found: false,
|
|
354
|
+
name: null,
|
|
355
|
+
path: null,
|
|
356
|
+
hasInfoPlist: false,
|
|
357
|
+
slices: [],
|
|
358
|
+
formatVersion: null,
|
|
359
|
+
}
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
/**
|
|
363
|
+
* Recursively collect files in a directory, respecting skip patterns and .gitignore.
|
|
364
|
+
*
|
|
365
|
+
* @param {string} dir - Directory to scan
|
|
366
|
+
* @param {string} baseDir - Base directory for relative paths
|
|
367
|
+
* @param {string[]} skipPatterns - Patterns to skip
|
|
368
|
+
* @returns {Array<{ path: string, size: number }>}
|
|
369
|
+
*/
|
|
370
|
+
function collectFilesRecursive(dir, baseDir, skipPatterns) {
|
|
371
|
+
const results = []
|
|
372
|
+
let entries
|
|
373
|
+
try {
|
|
374
|
+
entries = fs.readdirSync(dir, { withFileTypes: true })
|
|
375
|
+
} catch {
|
|
376
|
+
return results
|
|
377
|
+
}
|
|
378
|
+
|
|
379
|
+
for (const entry of entries) {
|
|
380
|
+
const fullPath = path.join(dir, entry.name)
|
|
381
|
+
const relativePath = path.relative(baseDir, fullPath)
|
|
382
|
+
|
|
383
|
+
// Check skip patterns
|
|
384
|
+
const shouldSkip = skipPatterns.some(pattern => {
|
|
385
|
+
if (pattern.endsWith("/")) {
|
|
386
|
+
// Directory pattern
|
|
387
|
+
const dirName = pattern.slice(0, -1)
|
|
388
|
+
return entry.name === dirName || relativePath.startsWith(`${dirName}/`)
|
|
389
|
+
}
|
|
390
|
+
if (pattern.startsWith("*.")) {
|
|
391
|
+
// Extension pattern
|
|
392
|
+
return entry.name.endsWith(pattern.slice(1))
|
|
393
|
+
}
|
|
394
|
+
return entry.name === pattern
|
|
395
|
+
})
|
|
396
|
+
|
|
397
|
+
if (shouldSkip) continue
|
|
398
|
+
|
|
399
|
+
if (entry.isDirectory()) {
|
|
400
|
+
results.push(...collectFilesRecursive(fullPath, baseDir, skipPatterns))
|
|
401
|
+
} else if (entry.isFile()) {
|
|
402
|
+
try {
|
|
403
|
+
const stats = fs.statSync(fullPath)
|
|
404
|
+
results.push({
|
|
405
|
+
path: relativePath,
|
|
406
|
+
size: stats.size,
|
|
407
|
+
})
|
|
408
|
+
} catch {
|
|
409
|
+
// Skip files we can't stat
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
}
|
|
413
|
+
|
|
414
|
+
return results
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
/**
|
|
418
|
+
* Collect all files that should be included in the tarball for a non-JS ecosystem.
|
|
419
|
+
*
|
|
420
|
+
* @param {string} ecosystem - The ecosystem identifier
|
|
421
|
+
* @param {string} [cwd] - Working directory
|
|
422
|
+
* @returns {Array<{ path: string, size: number }>}
|
|
423
|
+
*/
|
|
424
|
+
export function collectPackageFiles(ecosystem, cwd = process.cwd()) {
|
|
425
|
+
const skipPatterns = [
|
|
426
|
+
// Universal skips
|
|
427
|
+
".git/",
|
|
428
|
+
"node_modules/",
|
|
429
|
+
".DS_Store",
|
|
430
|
+
// Ecosystem-specific skips
|
|
431
|
+
...(ECOSYSTEM_SKIP_PATTERNS[ecosystem] || []),
|
|
432
|
+
]
|
|
433
|
+
|
|
434
|
+
// Also read .gitignore patterns if available
|
|
435
|
+
const gitignorePath = path.join(cwd, ".gitignore")
|
|
436
|
+
if (fs.existsSync(gitignorePath)) {
|
|
437
|
+
try {
|
|
438
|
+
const gitignore = fs.readFileSync(gitignorePath, "utf8")
|
|
439
|
+
const patterns = gitignore
|
|
440
|
+
.split("\n")
|
|
441
|
+
.map(line => line.trim())
|
|
442
|
+
.filter(line => line && !line.startsWith("#"))
|
|
443
|
+
skipPatterns.push(...patterns)
|
|
444
|
+
} catch {
|
|
445
|
+
// Ignore .gitignore read errors
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
|
|
449
|
+
return collectFilesRecursive(cwd, cwd, skipPatterns)
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
/**
|
|
453
|
+
* Create a tarball (.tgz) from a list of files using the `tar` command.
|
|
454
|
+
* Used for non-JS ecosystems where npm pack is not available.
|
|
455
|
+
*
|
|
456
|
+
* @param {string} ecosystem - Ecosystem identifier
|
|
457
|
+
* @param {string} name - Package name (for tarball filename)
|
|
458
|
+
* @param {string} version - Package version
|
|
459
|
+
* @param {string} [cwd] - Working directory
|
|
460
|
+
* @returns {Promise<{ tarballPath: string, files: Array<{ path: string, size: number }>, unpackedSize: number, fileCount: number }>}
|
|
461
|
+
*/
|
|
462
|
+
export async function createEcosystemTarball(
|
|
463
|
+
ecosystem,
|
|
464
|
+
name,
|
|
465
|
+
version,
|
|
466
|
+
cwd = process.cwd(),
|
|
467
|
+
) {
|
|
468
|
+
const files = collectPackageFiles(ecosystem, cwd)
|
|
469
|
+
|
|
470
|
+
if (files.length === 0) {
|
|
471
|
+
throw new Error("No files found to package. Check your project directory.")
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
// Calculate unpacked size
|
|
475
|
+
const unpackedSize = files.reduce((sum, f) => sum + f.size, 0)
|
|
476
|
+
|
|
477
|
+
// Generate tarball filename (sanitize name for filesystem)
|
|
478
|
+
const safeName = name.replace(/[/@]/g, "-").replace(/^-/, "")
|
|
479
|
+
const tarballFilename = `${safeName}-${version}.tgz`
|
|
480
|
+
const tarballPath = path.resolve(cwd, tarballFilename)
|
|
481
|
+
|
|
482
|
+
// Create tarball using node-tar with package/ prefix.
|
|
483
|
+
// This matches npm pack convention so `lpm add` can use strip:1
|
|
484
|
+
// to extract files at the correct paths.
|
|
485
|
+
await tar.create(
|
|
486
|
+
{
|
|
487
|
+
gzip: true,
|
|
488
|
+
file: tarballPath,
|
|
489
|
+
cwd,
|
|
490
|
+
prefix: "package",
|
|
491
|
+
},
|
|
492
|
+
files.map(f => f.path),
|
|
493
|
+
)
|
|
494
|
+
|
|
495
|
+
return {
|
|
496
|
+
tarballPath,
|
|
497
|
+
files,
|
|
498
|
+
unpackedSize,
|
|
499
|
+
fileCount: files.length,
|
|
500
|
+
}
|
|
501
|
+
}
|
package/lib/editors.js
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Shared editor detection and MCP config helpers.
|
|
3
|
+
*
|
|
4
|
+
* Used by both `lpm mcp setup` (for the LPM registry MCP server)
|
|
5
|
+
* and `lpm add` (for installing third-party MCP server packages).
|
|
6
|
+
*
|
|
7
|
+
* @module cli/lib/editors
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import fs from "node:fs"
|
|
11
|
+
import os from "node:os"
|
|
12
|
+
import path from "node:path"
|
|
13
|
+
|
|
14
|
+
const HOME = os.homedir()
|
|
15
|
+
|
|
16
|
+
// ============================================================================
|
|
17
|
+
// Editor definitions
|
|
18
|
+
// ============================================================================
|
|
19
|
+
|
|
20
|
+
export const EDITORS = [
|
|
21
|
+
{
|
|
22
|
+
id: "claude-code",
|
|
23
|
+
name: "Claude Code",
|
|
24
|
+
globalPath: path.join(HOME, ".claude.json"),
|
|
25
|
+
projectPath: ".mcp.json",
|
|
26
|
+
serverKey: "mcpServers",
|
|
27
|
+
detect: () =>
|
|
28
|
+
fs.existsSync(path.join(HOME, ".claude")) ||
|
|
29
|
+
fs.existsSync(path.join(HOME, ".claude.json")),
|
|
30
|
+
},
|
|
31
|
+
{
|
|
32
|
+
id: "cursor",
|
|
33
|
+
name: "Cursor",
|
|
34
|
+
globalPath: path.join(HOME, ".cursor", "mcp.json"),
|
|
35
|
+
projectPath: path.join(".cursor", "mcp.json"),
|
|
36
|
+
serverKey: "mcpServers",
|
|
37
|
+
detect: () => fs.existsSync(path.join(HOME, ".cursor")),
|
|
38
|
+
},
|
|
39
|
+
{
|
|
40
|
+
id: "vscode",
|
|
41
|
+
name: "VS Code",
|
|
42
|
+
globalPath:
|
|
43
|
+
process.platform === "darwin"
|
|
44
|
+
? path.join(
|
|
45
|
+
HOME,
|
|
46
|
+
"Library",
|
|
47
|
+
"Application Support",
|
|
48
|
+
"Code",
|
|
49
|
+
"User",
|
|
50
|
+
"mcp.json",
|
|
51
|
+
)
|
|
52
|
+
: process.platform === "win32"
|
|
53
|
+
? path.join(process.env.APPDATA || "", "Code", "User", "mcp.json")
|
|
54
|
+
: path.join(HOME, ".config", "Code", "User", "mcp.json"),
|
|
55
|
+
projectPath: path.join(".vscode", "mcp.json"),
|
|
56
|
+
serverKey: "servers",
|
|
57
|
+
detect: () => {
|
|
58
|
+
if (process.platform === "darwin") {
|
|
59
|
+
return fs.existsSync(
|
|
60
|
+
path.join(HOME, "Library", "Application Support", "Code"),
|
|
61
|
+
)
|
|
62
|
+
}
|
|
63
|
+
if (process.platform === "win32") {
|
|
64
|
+
return fs.existsSync(path.join(process.env.APPDATA || "", "Code"))
|
|
65
|
+
}
|
|
66
|
+
return fs.existsSync(path.join(HOME, ".config", "Code"))
|
|
67
|
+
},
|
|
68
|
+
},
|
|
69
|
+
{
|
|
70
|
+
id: "claude-desktop",
|
|
71
|
+
name: "Claude Desktop",
|
|
72
|
+
globalPath:
|
|
73
|
+
process.platform === "darwin"
|
|
74
|
+
? path.join(
|
|
75
|
+
HOME,
|
|
76
|
+
"Library",
|
|
77
|
+
"Application Support",
|
|
78
|
+
"Claude",
|
|
79
|
+
"claude_desktop_config.json",
|
|
80
|
+
)
|
|
81
|
+
: process.platform === "win32"
|
|
82
|
+
? path.join(
|
|
83
|
+
process.env.APPDATA || "",
|
|
84
|
+
"Claude",
|
|
85
|
+
"claude_desktop_config.json",
|
|
86
|
+
)
|
|
87
|
+
: path.join(HOME, ".config", "Claude", "claude_desktop_config.json"),
|
|
88
|
+
projectPath: null,
|
|
89
|
+
serverKey: "mcpServers",
|
|
90
|
+
detect: () => {
|
|
91
|
+
if (process.platform === "darwin") {
|
|
92
|
+
return fs.existsSync(
|
|
93
|
+
path.join(HOME, "Library", "Application Support", "Claude"),
|
|
94
|
+
)
|
|
95
|
+
}
|
|
96
|
+
if (process.platform === "win32") {
|
|
97
|
+
return fs.existsSync(path.join(process.env.APPDATA || "", "Claude"))
|
|
98
|
+
}
|
|
99
|
+
return fs.existsSync(path.join(HOME, ".config", "Claude"))
|
|
100
|
+
},
|
|
101
|
+
},
|
|
102
|
+
{
|
|
103
|
+
id: "windsurf",
|
|
104
|
+
name: "Windsurf",
|
|
105
|
+
globalPath: path.join(HOME, ".codeium", "windsurf", "mcp_config.json"),
|
|
106
|
+
projectPath: null,
|
|
107
|
+
serverKey: "mcpServers",
|
|
108
|
+
detect: () => fs.existsSync(path.join(HOME, ".codeium", "windsurf")),
|
|
109
|
+
},
|
|
110
|
+
]
|
|
111
|
+
|
|
112
|
+
// ============================================================================
|
|
113
|
+
// JSON file helpers
|
|
114
|
+
// ============================================================================
|
|
115
|
+
|
|
116
|
+
export function readJsonSafe(filePath) {
|
|
117
|
+
try {
|
|
118
|
+
return JSON.parse(fs.readFileSync(filePath, "utf-8"))
|
|
119
|
+
} catch {
|
|
120
|
+
return {}
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
export function writeJson(filePath, data) {
|
|
125
|
+
const dir = path.dirname(filePath)
|
|
126
|
+
if (!fs.existsSync(dir)) {
|
|
127
|
+
fs.mkdirSync(dir, { recursive: true })
|
|
128
|
+
}
|
|
129
|
+
fs.writeFileSync(filePath, `${JSON.stringify(data, null, 2)}\n`)
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
// ============================================================================
|
|
133
|
+
// MCP config helpers (generic — work with any server name)
|
|
134
|
+
// ============================================================================
|
|
135
|
+
|
|
136
|
+
/**
|
|
137
|
+
* Add an MCP server entry to an editor config file.
|
|
138
|
+
*
|
|
139
|
+
* @param {string} filePath - Path to the editor's MCP config JSON
|
|
140
|
+
* @param {string} serverKey - Top-level key ('mcpServers' or 'servers')
|
|
141
|
+
* @param {string} serverName - Name for the server entry
|
|
142
|
+
* @param {object} serverConfig - Server config (command, args, env)
|
|
143
|
+
*/
|
|
144
|
+
export function addMcpServer(filePath, serverKey, serverName, serverConfig) {
|
|
145
|
+
const config = readJsonSafe(filePath)
|
|
146
|
+
if (!config[serverKey]) {
|
|
147
|
+
config[serverKey] = {}
|
|
148
|
+
}
|
|
149
|
+
config[serverKey][serverName] = serverConfig
|
|
150
|
+
writeJson(filePath, config)
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
/**
|
|
154
|
+
* Remove an MCP server entry from an editor config file.
|
|
155
|
+
*
|
|
156
|
+
* @param {string} filePath - Path to the editor's MCP config JSON
|
|
157
|
+
* @param {string} serverKey - Top-level key ('mcpServers' or 'servers')
|
|
158
|
+
* @param {string} serverName - Name of the server entry to remove
|
|
159
|
+
* @returns {boolean} Whether the server was found and removed
|
|
160
|
+
*/
|
|
161
|
+
export function removeMcpServerEntry(filePath, serverKey, serverName) {
|
|
162
|
+
if (!fs.existsSync(filePath)) return false
|
|
163
|
+
const config = readJsonSafe(filePath)
|
|
164
|
+
if (config[serverKey]?.[serverName]) {
|
|
165
|
+
delete config[serverKey][serverName]
|
|
166
|
+
writeJson(filePath, config)
|
|
167
|
+
return true
|
|
168
|
+
}
|
|
169
|
+
return false
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Check if an MCP server entry exists in an editor config file.
|
|
174
|
+
*
|
|
175
|
+
* @param {string} filePath - Path to the editor's MCP config JSON
|
|
176
|
+
* @param {string} serverKey - Top-level key ('mcpServers' or 'servers')
|
|
177
|
+
* @param {string} serverName - Name of the server entry
|
|
178
|
+
* @returns {boolean}
|
|
179
|
+
*/
|
|
180
|
+
export function hasMcpServer(filePath, serverKey, serverName) {
|
|
181
|
+
if (!fs.existsSync(filePath)) return false
|
|
182
|
+
const config = readJsonSafe(filePath)
|
|
183
|
+
return !!config[serverKey]?.[serverName]
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
/**
|
|
187
|
+
* Read MCP server config entry from an editor config file.
|
|
188
|
+
*
|
|
189
|
+
* @param {string} filePath - Path to the editor's MCP config JSON
|
|
190
|
+
* @param {string} serverKey - Top-level key ('mcpServers' or 'servers')
|
|
191
|
+
* @param {string} serverName - Name of the server entry
|
|
192
|
+
* @returns {object|null}
|
|
193
|
+
*/
|
|
194
|
+
export function getMcpServerConfig(filePath, serverKey, serverName) {
|
|
195
|
+
if (!fs.existsSync(filePath)) return null
|
|
196
|
+
const config = readJsonSafe(filePath)
|
|
197
|
+
return config[serverKey]?.[serverName] || null
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
/**
|
|
201
|
+
* Detect installed editors.
|
|
202
|
+
* @returns {Array} Array of editor definitions that are installed
|
|
203
|
+
*/
|
|
204
|
+
export function detectEditors() {
|
|
205
|
+
return EDITORS.filter(e => e.detect())
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* Shorten a path by replacing HOME with ~
|
|
210
|
+
* @param {string} fullPath
|
|
211
|
+
* @returns {string}
|
|
212
|
+
*/
|
|
213
|
+
export function shortPath(fullPath) {
|
|
214
|
+
return fullPath.replace(HOME, "~")
|
|
215
|
+
}
|