@visulima/vis 0.0.1 → 1.0.0-alpha.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +29 -0
- package/LICENSE.md +21 -0
- package/README.md +123 -29
- package/dist/ai-analysis.d.ts +40 -0
- package/dist/ai-cache.d.ts +21 -0
- package/dist/bin.d.ts +1 -0
- package/dist/bin.js +147 -0
- package/dist/catalog.d.ts +110 -0
- package/dist/commands/affected.d.ts +3 -0
- package/dist/commands/ai.d.ts +3 -0
- package/dist/commands/analyze.d.ts +3 -0
- package/dist/commands/check.d.ts +3 -0
- package/dist/commands/graph.d.ts +3 -0
- package/dist/commands/hook/constants.d.ts +8 -0
- package/dist/commands/hook/index.d.ts +3 -0
- package/dist/commands/hook/install.d.ts +7 -0
- package/dist/commands/hook/migrate.d.ts +27 -0
- package/dist/commands/hook/uninstall.d.ts +3 -0
- package/dist/commands/migrate/constants.d.ts +12 -0
- package/dist/commands/migrate/deps.d.ts +32 -0
- package/dist/commands/migrate/index.d.ts +3 -0
- package/dist/commands/migrate/json.d.ts +20 -0
- package/dist/commands/migrate/lint-staged.d.ts +62 -0
- package/dist/commands/migrate/types.d.ts +20 -0
- package/dist/commands/run.d.ts +3 -0
- package/dist/commands/staged.d.ts +3 -0
- package/dist/commands/update.d.ts +3 -0
- package/dist/config.d.ts +40 -0
- package/dist/config.js +1 -0
- package/dist/package-manager.d.ts +23 -0
- package/dist/workspace.d.ts +58 -0
- package/package.json +81 -7
package/CHANGELOG.md
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
## @visulima/vis [1.0.0-alpha.2](https://github.com/visulima/visulima/compare/@visulima/vis@1.0.0-alpha.1...@visulima/vis@1.0.0-alpha.2) (2026-03-26)
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
### Dependencies
|
|
5
|
+
|
|
6
|
+
* **@visulima/task-runner:** upgraded to 1.0.0-alpha.2
|
|
7
|
+
|
|
8
|
+
## @visulima/vis 1.0.0-alpha.1 (2026-03-26)
|
|
9
|
+
|
|
10
|
+
### Features
|
|
11
|
+
|
|
12
|
+
* Add @visulima/task-runner , vis and find-ai-runner ([#594](https://github.com/visulima/visulima/issues/594)) ([034b5db](https://github.com/visulima/visulima/commit/034b5db8aadcc02e23abe007208c5196859c7755))
|
|
13
|
+
|
|
14
|
+
### Bug Fixes
|
|
15
|
+
|
|
16
|
+
* **vis:** fall back to package.json deps when pnpm/bun have no catalogs ([8da8e19](https://github.com/visulima/visulima/commit/8da8e190a40abc22e18e3af740a594edc8cc382d))
|
|
17
|
+
* **vis:** isolate loadNpmrc test from host ~/.npmrc ([a7016d6](https://github.com/visulima/visulima/commit/a7016d6ce8770c1d462ebfb9b2dab530fcedac5d))
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
### Dependencies
|
|
21
|
+
|
|
22
|
+
* **@visulima/boxen:** upgraded to 3.0.0-alpha.7
|
|
23
|
+
* **@visulima/cerebro:** upgraded to 3.0.0-alpha.8
|
|
24
|
+
* **@visulima/find-ai-runner:** upgraded to 1.0.0-alpha.1
|
|
25
|
+
* **@visulima/fs:** upgraded to 5.0.0-alpha.5
|
|
26
|
+
* **@visulima/package:** upgraded to 5.0.0-alpha.5
|
|
27
|
+
* **@visulima/path:** upgraded to 3.0.0-alpha.6
|
|
28
|
+
* **@visulima/tabular:** upgraded to 4.0.0-alpha.7
|
|
29
|
+
* **@visulima/task-runner:** upgraded to 1.0.0-alpha.1
|
package/LICENSE.md
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 visulima
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
CHANGED
|
@@ -1,45 +1,139 @@
|
|
|
1
|
-
|
|
1
|
+
<!-- START_PACKAGE_OG_IMAGE_PLACEHOLDER -->
|
|
2
2
|
|
|
3
|
-
|
|
3
|
+
<a href="https://www.anolilab.com/open-source" align="center">
|
|
4
4
|
|
|
5
|
-
|
|
5
|
+
<img src="__assets__/package-og.svg" alt="vis" />
|
|
6
6
|
|
|
7
|
-
|
|
7
|
+
</a>
|
|
8
8
|
|
|
9
|
-
|
|
9
|
+
<h3 align="center">A CLI task runner for monorepo workspaces</h3>
|
|
10
10
|
|
|
11
|
-
|
|
12
|
-
1. Configure OIDC trusted publishing for the package name `@visulima/vis`
|
|
13
|
-
2. Enable secure, token-less publishing from CI/CD workflows
|
|
14
|
-
3. Establish provenance for packages published under this name
|
|
11
|
+
<!-- END_PACKAGE_OG_IMAGE_PLACEHOLDER -->
|
|
15
12
|
|
|
16
|
-
|
|
13
|
+
<br />
|
|
17
14
|
|
|
18
|
-
|
|
15
|
+
<div align="center">
|
|
19
16
|
|
|
20
|
-
|
|
17
|
+
[![typescript-image][typescript-badge]][typescript-url]
|
|
18
|
+
[![mit licence][license-badge]][license]
|
|
19
|
+
[![npm downloads][npm-downloads-badge]][npm-downloads]
|
|
20
|
+
[![Chat][chat-badge]][chat]
|
|
21
|
+
[![PRs Welcome][prs-welcome-badge]][prs-welcome]
|
|
21
22
|
|
|
22
|
-
|
|
23
|
+
</div>
|
|
23
24
|
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
25
|
+
---
|
|
26
|
+
|
|
27
|
+
<div align="center">
|
|
28
|
+
<p>
|
|
29
|
+
<sup>
|
|
30
|
+
Daniel Bannert's open source work is supported by the community on <a href="https://github.com/sponsors/prisis">GitHub Sponsors</a>
|
|
31
|
+
</sup>
|
|
32
|
+
</p>
|
|
33
|
+
</div>
|
|
28
34
|
|
|
29
|
-
|
|
35
|
+
---
|
|
30
36
|
|
|
31
|
-
|
|
32
|
-
- Contains no executable code
|
|
33
|
-
- Provides no functionality
|
|
34
|
-
- Should not be installed as a dependency
|
|
35
|
-
- Exists only for administrative purposes
|
|
37
|
+
## Features
|
|
36
38
|
|
|
37
|
-
|
|
39
|
+
- **Workspace-aware**: Automatically discovers projects from `pnpm-workspace.yaml` or `package.json` workspaces
|
|
40
|
+
- **Task caching**: Powered by `@visulima/task-runner` with local and remote caching support
|
|
41
|
+
- **Dependency-aware scheduling**: Runs tasks in topological order with configurable parallelism
|
|
42
|
+
- **Affected detection**: Only runs tasks for projects changed since a given git ref
|
|
43
|
+
- **Catalog management**: Check and update dependencies in pnpm/bun workspace catalogs
|
|
44
|
+
- **Security scanning**: Check for known vulnerabilities via OSV.dev
|
|
45
|
+
- **Graph visualization**: View your project dependency graph in ASCII, DOT, JSON, or HTML
|
|
46
|
+
- **Git hooks**: Install, manage, and migrate git hooks (husky migration supported)
|
|
47
|
+
- **Configurable**: `vis.json` for target defaults, cache settings, and task runner options
|
|
48
|
+
- **Built on Cerebro**: Uses `@visulima/cerebro` for a robust CLI experience with built-in help, version, and completion
|
|
38
49
|
|
|
39
|
-
|
|
40
|
-
- [npm Trusted Publishing Documentation](https://docs.npmjs.com/generating-provenance-statements)
|
|
41
|
-
- [GitHub Actions OIDC Documentation](https://docs.github.com/en/actions/deployment/security-hardening-your-deployments/about-security-hardening-with-openid-connect)
|
|
50
|
+
## Install
|
|
42
51
|
|
|
43
|
-
|
|
52
|
+
```sh
|
|
53
|
+
npm install @visulima/vis
|
|
54
|
+
```
|
|
55
|
+
|
|
56
|
+
```sh
|
|
57
|
+
yarn add @visulima/vis
|
|
58
|
+
```
|
|
59
|
+
|
|
60
|
+
```sh
|
|
61
|
+
pnpm add @visulima/vis
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
## Quick Start
|
|
65
|
+
|
|
66
|
+
```bash
|
|
67
|
+
# Run a target across all workspace projects
|
|
68
|
+
vis run build
|
|
69
|
+
|
|
70
|
+
# Run tests only on affected projects
|
|
71
|
+
vis affected test --base=main
|
|
72
|
+
|
|
73
|
+
# Visualize the project dependency graph
|
|
74
|
+
vis graph
|
|
75
|
+
|
|
76
|
+
# Check for outdated catalog dependencies
|
|
77
|
+
vis check
|
|
78
|
+
|
|
79
|
+
# Check with security vulnerability scanning
|
|
80
|
+
vis check --security
|
|
81
|
+
|
|
82
|
+
# Update catalog dependencies interactively
|
|
83
|
+
vis update --interactive
|
|
84
|
+
|
|
85
|
+
# Install git hooks
|
|
86
|
+
vis hook install
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
## Commands
|
|
90
|
+
|
|
91
|
+
| Command | Alias | Description |
|
|
92
|
+
| ----------------------- | ----- | ----------------------------------------------------- |
|
|
93
|
+
| `vis run <target>` | | Run a target across workspace projects with caching |
|
|
94
|
+
| `vis affected <target>` | | Run tasks only on projects affected by git changes |
|
|
95
|
+
| `vis graph` | | Visualize the project dependency graph |
|
|
96
|
+
| `vis check [packages]` | `c` | Check for outdated dependencies in workspace catalogs |
|
|
97
|
+
| `vis update [packages]` | `up` | Update packages to their latest versions |
|
|
98
|
+
| `vis hook <action>` | | Manage git hooks (install, uninstall, migrate) |
|
|
99
|
+
|
|
100
|
+
## Documentation
|
|
101
|
+
|
|
102
|
+
For full documentation including command reference, configuration options, best practices, and CI/CD integration guides, see the [docs](./docs) folder.
|
|
103
|
+
|
|
104
|
+
## Supported Node.js Versions
|
|
105
|
+
|
|
106
|
+
Libraries in this ecosystem make the best effort to track [Node.js' release schedule](https://github.com/nodejs/release#release-schedule).
|
|
107
|
+
Here's [a post on why we think this is important](https://medium.com/the-node-js-collection/maintainers-should-consider-following-node-js-release-schedule-ab08ed4de71a).
|
|
108
|
+
|
|
109
|
+
## Contributing
|
|
110
|
+
|
|
111
|
+
If you would like to help take a look at the [list of issues](https://github.com/visulima/visulima/issues) and check our [Contributing](.github/CONTRIBUTING.md) guidelines.
|
|
112
|
+
|
|
113
|
+
> **Note:** please note that this project is released with a Contributor Code of Conduct. By participating in this project you agree to abide by its terms.
|
|
114
|
+
|
|
115
|
+
## Credits
|
|
116
|
+
|
|
117
|
+
- [Daniel Bannert](https://github.com/prisis)
|
|
118
|
+
- [All Contributors](https://github.com/visulima/visulima/graphs/contributors)
|
|
119
|
+
|
|
120
|
+
## Made with ❤️ at Anolilab
|
|
121
|
+
|
|
122
|
+
This is an open source project and will always remain free to use. If you think it's cool, please star it 🌟. [Anolilab](https://www.anolilab.com/open-source) is a Development and AI Studio. Contact us at [hello@anolilab.com](mailto:hello@anolilab.com) if you need any help with these technologies or just want to say hi!
|
|
123
|
+
|
|
124
|
+
## License
|
|
125
|
+
|
|
126
|
+
The visulima vis is open-sourced software licensed under the [MIT][license]
|
|
127
|
+
|
|
128
|
+
<!-- badges -->
|
|
44
129
|
|
|
45
|
-
|
|
130
|
+
[license-badge]: https://img.shields.io/npm/l/@visulima/vis?style=for-the-badge
|
|
131
|
+
[license]: https://github.com/visulima/visulima/blob/main/LICENSE
|
|
132
|
+
[npm-downloads-badge]: https://img.shields.io/npm/dm/@visulima/vis?style=for-the-badge
|
|
133
|
+
[npm-downloads]: https://www.npmjs.com/package/@visulima/vis
|
|
134
|
+
[prs-welcome-badge]: https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=for-the-badge
|
|
135
|
+
[prs-welcome]: https://github.com/visulima/visulima/blob/main/.github/CONTRIBUTING.md
|
|
136
|
+
[chat-badge]: https://img.shields.io/discord/932323359193186354.svg?style=for-the-badge
|
|
137
|
+
[chat]: https://discord.gg/TtFJY8xkFK
|
|
138
|
+
[typescript-badge]: https://img.shields.io/badge/Typescript-294E80.svg?style=for-the-badge&logo=typescript
|
|
139
|
+
[typescript-url]: https://www.typescriptlang.org/
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import type { AiProviderInfo } from "@visulima/find-ai-runner";
|
|
2
|
+
import type { OutdatedEntry } from "./catalog.d.ts";
|
|
3
|
+
interface AiConfig {
|
|
4
|
+
/** Cache TTL in milliseconds. Overrides default (1h / 30min for security). */
|
|
5
|
+
cacheTtl?: number;
|
|
6
|
+
/** Override default provider priority. Higher = preferred. */
|
|
7
|
+
priority?: Record<string, number>;
|
|
8
|
+
/** Use a specific provider, skip auto-detection. */
|
|
9
|
+
provider?: string;
|
|
10
|
+
}
|
|
11
|
+
type AnalysisType = "compatibility" | "impact" | "recommend" | "security";
|
|
12
|
+
declare const DEFAULT_PRIORITY: Record<string, number>;
|
|
13
|
+
/** Resolve which AI provider to use based on config and availability. */
|
|
14
|
+
declare const resolveProvider: (config?: AiConfig) => AiProviderInfo | undefined;
|
|
15
|
+
interface AiRecommendation {
|
|
16
|
+
action: "defer" | "review" | "skip" | "update";
|
|
17
|
+
breakingChanges: string[];
|
|
18
|
+
effort: "high" | "low" | "medium";
|
|
19
|
+
package: string;
|
|
20
|
+
reason: string;
|
|
21
|
+
riskLevel: "critical" | "high" | "low" | "medium";
|
|
22
|
+
}
|
|
23
|
+
interface AiAnalysisResult {
|
|
24
|
+
analysisType: AnalysisType;
|
|
25
|
+
provider: string;
|
|
26
|
+
recommendations: AiRecommendation[];
|
|
27
|
+
summary: string;
|
|
28
|
+
warnings: string[];
|
|
29
|
+
}
|
|
30
|
+
declare const validateAnalysisType: (type: string) => AnalysisType;
|
|
31
|
+
declare const buildAnalysisPrompt: (outdated: OutdatedEntry[], analysisType?: AnalysisType) => string;
|
|
32
|
+
declare const extractJson: (text: string) => unknown | undefined;
|
|
33
|
+
declare const normalizeRecommendation: (raw: Record<string, unknown>) => AiRecommendation;
|
|
34
|
+
declare const parseAiResponse: (text: string, provider: string, analysisType: AnalysisType) => AiAnalysisResult;
|
|
35
|
+
declare const ruleBasedAnalysis: (outdated: OutdatedEntry[], analysisType: AnalysisType) => AiAnalysisResult;
|
|
36
|
+
declare const formatAiAnalysis: (result: AiAnalysisResult) => string;
|
|
37
|
+
declare const formatAiAnalysisJson: (result: AiAnalysisResult) => string;
|
|
38
|
+
declare const runAiAnalysis: (outdated: OutdatedEntry[], logger: Console, config?: AiConfig, analysisType?: AnalysisType) => Promise<AiAnalysisResult>;
|
|
39
|
+
export type { AiAnalysisResult, AiConfig, AiRecommendation, AnalysisType };
|
|
40
|
+
export { buildAnalysisPrompt, DEFAULT_PRIORITY, extractJson, formatAiAnalysis, formatAiAnalysisJson, normalizeRecommendation, parseAiResponse, resolveProvider, ruleBasedAnalysis, runAiAnalysis, validateAnalysisType, };
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { AiAnalysisResult, AnalysisType } from "./ai-analysis.d.ts";
|
|
2
|
+
import type { OutdatedEntry } from "./catalog.d.ts";
|
|
3
|
+
interface CacheEntry {
|
|
4
|
+
createdAt: number;
|
|
5
|
+
result: AiAnalysisResult;
|
|
6
|
+
ttlMs: number;
|
|
7
|
+
}
|
|
8
|
+
interface CacheStats {
|
|
9
|
+
entries: number;
|
|
10
|
+
newestEntry: number | undefined;
|
|
11
|
+
oldestEntry: number | undefined;
|
|
12
|
+
totalSizeBytes: number;
|
|
13
|
+
}
|
|
14
|
+
declare const buildCacheKey: (provider: string, analysisType: string, outdated: Pick<OutdatedEntry, "currentRange" | "packageName" | "targetVersion">[]) => string;
|
|
15
|
+
declare const getCachedAnalysis: (cacheKey: string) => AiAnalysisResult | undefined;
|
|
16
|
+
declare const setCachedAnalysis: (cacheKey: string, result: AiAnalysisResult, ttlMs: number) => void;
|
|
17
|
+
declare const getTtlForAnalysisType: (analysisType: AnalysisType | string, configTtl?: number) => number;
|
|
18
|
+
declare const getCacheStats: () => CacheStats;
|
|
19
|
+
declare const clearCache: () => number;
|
|
20
|
+
export type { CacheEntry, CacheStats };
|
|
21
|
+
export { buildCacheKey, clearCache, getCachedAnalysis, getCacheStats, getTtlForAnalysisType, setCachedAnalysis };
|
package/dist/bin.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/bin.js
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
var ra=Object.defineProperty;var y=(e,t)=>ra(e,"name",{value:t,configurable:!0});import{createRequire as oa}from"node:module";import{createCerebro as ia}from"@visulima/cerebro";import{findPackageManagerSync as xe,getPackageManagerVersion as ca,findMonorepoRootSync as la}from"@visulima/package";import{getAffectedProjects as pa,projectGraphToDot as ua,createTaskGraph as da,ConsoleLifeCycle as ga,defaultTaskRunner as fa,generateRunSummary as ma,writeRunSummary as ha}from"@visulima/task-runner";import{readJsonSync as W,walkSync as Ee,isAccessibleSync as b,readFileSync as A,writeFileSync as te,writeJsonSync as st,removeSync as ya,ensureDirSync as Ge}from"@visulima/fs";import{join as u,resolve as Re,dirname as va}from"@visulima/path";import{runProvider as it,PROVIDER_NAMES as ka,detectProvider as $a,detectAvailableProviders as wa,detectAllProviders as ba}from"@visulima/find-ai-runner";import{createTable as Ne}from"@visulima/tabular";import{boxen as ct}from"@visulima/boxen";import{findVisConfigFile as pt,loadVisConfig as xa}from"./config.js";import __cjs_mod__ from "node:module"; // -- packem CommonJS require shim --
|
|
3
|
+
const require = __cjs_mod__.createRequire(import.meta.url);
|
|
4
|
+
const sa=oa(import.meta.url),K=typeof globalThis<"u"&&typeof globalThis.process<"u"?globalThis.process:process,he=y(e=>{if(typeof K<"u"&&K.versions&&K.versions.node){const[t,a]=K.versions.node.split(".").map(Number);if(t>22||t===22&&a>=3||t===20&&a>=16)return K.getBuiltinModule(e)}return sa(e)},"__cjs_getBuiltinModule"),{existsSync:k,readFileSync:B,rmSync:ee,writeFileSync:E,mkdirSync:Oe,readdirSync:Be,statSync:Ie,unlinkSync:Sa}=he("node:fs"),{homedir:ja}=he("node:os"),{cwd:lt}=K,{createInterface:Ca}=he("node:readline"),{spawnSync:F,execSync:Pe}=he("node:child_process");var Ea="1.0.0-alpha.1";const Ra={version:Ea};var Na=Object.defineProperty,L=y((e,t)=>Na(e,"name",{value:t,configurable:!0}),"a$4");const Oa=/\/+$/,Ba=/\/\*\*$/,Ia=/\/\*\/\*$/,Pa=/^['"]|['"]$/g,ut=/node_modules/,dt=/\.git/,ie=L(e=>{try{return W(e)}catch{return}},"readJsonFileSafe"),Aa=L((e,t,a)=>{for(const n of Ee(e,{includeFiles:!1,skip:[ut,dt]}))if(n.path!==e&&b(u(n.path,"package.json"))){const r=n.path.slice(e.length+1);a.push(`${t}/${r}`)}},"scanDirectoryRecursive"),La=L((e,t,a)=>{const n=t.slice(0,-2),r=Re(e,n);if(b(r))for(const o of Ee(r,{includeFiles:!1,maxDepth:1,skip:[ut,dt]}))o.path!==r&&b(u(o.path,"package.json"))&&a.push(u(n,o.name))},"resolveSimpleGlob"),Ta=L((e,t,a)=>{const n=t.replace(Ba,"").replace(Ia,""),r=Re(e,n);b(r)&&Aa(r,n,a)},"resolveDoubleGlob"),Ua=L((e,t,a)=>{const n=Re(e,t);b(n)&&b(u(n,"package.json"))&&a.push(t)},"resolveExactDirectory"),gt=L((e,t)=>{const a=[];for(const n of t){const r=n.replace(Oa,"");r.startsWith("!")||(r.endsWith("/*")?La(e,r,a):r.endsWith("/**")||r.endsWith("/*/*")?Ta(e,r,a):Ua(e,r,a))}return a},"resolveWorkspacePatterns"),Ma=L(e=>{const t=u(e,"pnpm-workspace.yaml");if(!b(t))return;const a=A(t),n=[];let r=!1;for(const o of a.split(`
|
|
5
|
+
`)){const s=o.trim();if(s==="packages:"){r=!0;continue}if(r){if(s.startsWith("- ")){const i=s.slice(2).replaceAll(Pa,"");n.push(i)}else if(s&&!s.startsWith("#"))break}}return n.length>0?n:void 0},"readPnpmWorkspacePatterns"),Va=L((e,t)=>{const a={};for(const[n,r]of Object.entries(e)){const o=t?.[n];a[n]={...o,command:r}}return a},"createTargetsFromScripts"),ye=L((e,t={})=>{const a={},n=Ma(e),r=ie(u(e,"package.json"));let o;if(n?o=n:r?.workspaces&&(o=Array.isArray(r.workspaces)?r.workspaces:r.workspaces.packages),!o)throw new Error("No workspace configuration found. Expected pnpm-workspace.yaml or package.json workspaces field.");const s=gt(e,o);for(const i of s){const c=u(e,i,"package.json"),l=ie(c);if(!l?.name)continue;const p=u(e,i,"project.json"),g=ie(p),f=l.scripts?Va(l.scripts,t.targetDefaults):{};a[l.name]={projectType:g?.projectType??"library",root:i,sourceRoot:g?.sourceRoot??`${i}/src`,tags:g?.tags,targets:f}}return{config:t,workspace:{projects:a}}},"discoverWorkspace"),Ae=L((e,t)=>{const a={},n={},r=new Set(Object.keys(t.projects));for(const[o,s]of Object.entries(t.projects)){a[o]={data:s,name:o,type:s.projectType??"library"},n[o]=[];const i=ie(u(e,s.root,"package.json"));if(!i)continue;const c={...i.dependencies,...i.devDependencies,...i.peerDependencies};for(const l of Object.keys(c))r.has(l)&&n[o]?.push({source:o,target:l,type:"static"})}return{dependencies:n,nodes:a}},"buildProjectGraph");var Fa=Object.defineProperty,Da=y((e,t)=>Fa(e,"name",{value:t,configurable:!0}),"f$6");const Ja={argument:{description:"The target to run (e.g., build, test, lint)",name:"target",type:String},description:"Run a target only on projects affected by recent changes",examples:[["vis affected build","Run build on affected projects"],["vis affected test --base=main","Run tests on projects changed since main"]],execute:Da(async({argument:e,logger:t,options:a,runtime:n,visConfig:r,workspaceRoot:o})=>{const s=e[0];if(!s)throw new Error("Missing target. Usage: vis affected <target>");if(!o)throw new Error("Could not determine workspace root. Run this command inside a monorepo.");const i=o,{workspace:c}=ye(i,r),l=Ae(i,c),p={base:a.base,head:a.head,projectGraph:l,projects:c.projects,workspaceRoot:i},g=await pa(p);if(g.changedFiles.length===0){t.info("No files changed. Nothing to run.");return}if(g.affectedProjects.length===0){t.info("No projects affected by the changes.");return}t.info(`Affected projects: ${g.affectedProjects.join(", ")}`);const f=[s,`--projects=${g.affectedProjects.join(",")}`];a.parallel!==void 0&&f.push(`--parallel=${String(a.parallel)}`),a.cache||f.push("--no-cache"),a.dryRun&&f.push("--dry-run"),await n.runCommand("run",{argv:f})},"execute"),name:"affected",options:[{defaultValue:"HEAD~1",description:"Git base ref for comparison",name:"base",type:String},{defaultValue:"HEAD",description:"Git head ref for comparison",name:"head",type:String},{defaultValue:3,description:"Maximum number of parallel tasks",name:"parallel",type:Number},{defaultValue:!0,description:"Enable caching (use --no-cache to disable)",name:"cache",type:Boolean},{defaultValue:!1,description:"Show what would run without executing",name:"dry-run",type:Boolean}]};var Wa=Object.defineProperty,h=y((e,t)=>Wa(e,"name",{value:t,configurable:!0}),"f$5");const Y=(1n<<128n)-1n,m=(1n<<64n)-1n,pe=(1n<<32n)-1n,ft=0x9E3779B1n,mt=0x85EBCA77n,_a=0xC2B2AE3Dn,J=0x9E3779B185EBCA87n,q=0xC2B2AE3D27D4EB4Fn,ht=0x165667B19E3779F9n,Le=0x85EBCA77C2B2AE63n,za=0x27D4EB2F165667C5n,Ha=0x165667919E3779F9n,Ga=0x9FB21C651E98DF25n,M=64,yt=M/8,qa=8,I=Buffer.from("b8fe6c3923a44bbe7c01812cf721ad1cded46de9839097db7240a4a4b7b3671fcb79e64eccc0e578825ad07dccff7221b8084674f743248ee03590e6813a264c3c2852bb91c300cb88d0658b1b532ea371644897a20df94e3819ef46a9deacd8a8fa763fe39c343ff9dcbbc7c70b4f1d8a51e04bcdb45931c89f7ec9d9787364eac5ac8334d3ebc3c581a0fffa1363eb170ddd51b7f0da49d316552629d4689e2b16be587d47a1fc8ff8b8d17ad031ce45cb3a8f95160428afd7fbcabb4b407e","hex"),v=h((e,t=0)=>Buffer.from(e.buffer,e.byteOffset+t,e.length-t),"getView"),Ka=h(e=>{const t=Buffer.allocUnsafe(8);return t.writeBigUInt64LE(e),t.readBigUInt64BE()},"bswap64"),Ya=h(e=>{let t=e;return t=(t&0x0000FFFFn)<<16n|(t&0xFFFF0000n)>>16n,t=(t&0x00FF00FFn)<<8n|(t&0xFF00FF00n)>>8n,t},"bswap32"),Xa=h((e,t)=>(e&pe)*(t&pe)&m,"multU32ToU64"),Za=h((e,t)=>(e<<t|e>>32n-t)&pe,"rotl32"),Se=h((e,t)=>e^e>>t,"xorshift64"),je=h(e=>~e+1n&m,"inv64"),vt=h((e,t)=>{const a=e*t&Y;return a&m^a>>64n},"mul128Fold64"),P=h(e=>{let t=e;return t^=t>>37n,t=t*Ha&m,t^=t>>32n,t},"avalanche"),ue=h(e=>{let t=e;return t^=t>>33n,t=t*q&m,t^=t>>29n,t=t*ht&m,t^=t>>32n,t},"avalanche64"),kt=h((e,t,a)=>{for(let n=0;n<yt;n++){const r=t.readBigUInt64LE(n*8),o=r^a.readBigUInt64LE(n*8);e[n^1]+=r,e[n]+=Xa(o,o>>32n)}return e},"accumulate512"),qe=h((e,t,a,n)=>{for(let r=0;r<n;r++)kt(e,v(t,r*M),v(a,r*8));return e},"accumulate"),Qa=h((e,t)=>{for(let a=0;a<yt;a++){const n=t.readBigUInt64LE(a*8);let r=e[a];r=Se(r,47n),r^=n,r*=ft,e[a]=r&m}return e},"scrambleAcc"),oe=h((e,t)=>vt(e[0]^t.readBigUInt64LE(0),e[1]^t.readBigUInt64LE(qa)),"mix2Accs"),Ke=h((e,t,a)=>{let n=a;return n+=oe(e.slice(0),v(t,0)),n+=oe(e.slice(2),v(t,16)),n+=oe(e.slice(4),v(t,32)),n+=oe(e.slice(6),v(t,48)),P(n&m)},"mergeAccs"),en=h((e,t,a)=>{const n=Math.floor((a.byteLength-M)/8),r=M*n,o=Math.floor((t.byteLength-1)/r);for(let i=0;i<o;i++)qe(e,v(t,i*r),a,n),Qa(e,v(a,a.byteLength-M));const s=Math.floor((t.byteLength-1-r*o)/M);return qe(e,v(t,o*r),a,s),kt(e,v(t,t.byteLength-M),v(a,a.byteLength-M-7)),e},"hashLong"),tn=h((e,t)=>{const a=new BigUint64Array([_a,J,q,ht,Le,mt,za,ft]);en(a,e,t);const n=Ke(a,v(t,11),BigInt(e.byteLength)*J&m);return Ke(a,v(t,t.byteLength-M-11),~(BigInt(e.byteLength)*q)&m)<<64n|n},"hashLong128b"),Ye=h((e,t,a)=>vt((e.readBigUInt64LE(0)^t.readBigUInt64LE(0)+a)&m,(e.readBigUInt64LE(8)^t.readBigUInt64LE(8)-a)&m),"mix16B"),ce=h((e,t,a,n,r)=>{let o=e&m,s=e>>64n&m;return o+=Ye(t,n,r),o^=a.readBigUInt64LE(0)+a.readBigUInt64LE(8),o&=m,s+=Ye(a,v(n,16),r),s^=t.readBigUInt64LE(0)+t.readBigUInt64LE(8),s&=m,s<<64n|o},"mix32B"),an=h((e,t,a)=>{const n=e.byteLength,r=BigInt(e.readUInt8(n-1))|BigInt(n<<8)|BigInt(e.readUInt8(0)<<16)|BigInt(e.readUInt8(n>>1)<<24),o=(BigInt(t.readUInt32LE(0))^BigInt(t.readUInt32LE(4)))+a,s=(r^o)&m,i=(BigInt(t.readUInt32LE(8))^BigInt(t.readUInt32LE(12)))-a,c=(Za(Ya(r),13n)^i)&m;return(ue(c)&m)<<64n|ue(s)},"len1to3_128b"),nn=h((e,t,a)=>{const n=e.byteLength,r=e.readUInt32LE(0),o=e.readUInt32LE(n-4),s=BigInt(r)|BigInt(o)<<32n,i=(t.readBigUInt64LE(16)^t.readBigUInt64LE(24))+a&m;let c=(s^i)*(J+(BigInt(n)<<2n))&Y;return c+=(c&m)<<65n,c&=Y,c^=c>>67n,Se(Se(c&m,35n)*Ga&m,28n)|P(c>>64n)<<64n},"len4to8_128b"),rn=h((e,t,a)=>{const n=e.byteLength,r=(t.readBigUInt64LE(32)^t.readBigUInt64LE(40))+a&m,o=(t.readBigUInt64LE(48)^t.readBigUInt64LE(56))-a&m,s=e.readBigUInt64LE();let i=e.readBigUInt64LE(n-8),c=(s^i^r)*J;const l=(c&m)+(BigInt(n-1)<<54n);c=c&(Y^m)|l,i^=o,c+=i+(i&pe)*(mt-1n)<<64n,c&=Y,c^=Ka(c>>64n);let p=(c&m)*q;return p+=(c>>64n)*q<<64n,p&=Y,P(p&m)|P(p>>64n)<<64n},"len9to16_128b"),on=h((e,t)=>{const a=e.byteLength;return a>8?rn(e,I,t):a>=4?nn(e,I,t):a>0?an(e,I,t):ue(t^I.readBigUInt64LE(64)^I.readBigUInt64LE(72))|ue(t^I.readBigUInt64LE(80)^I.readBigUInt64LE(88))<<64n},"len0to16_128b"),sn=h((e,t,a)=>{let n=BigInt(e.byteLength)*J&m,r=BigInt(e.byteLength-1)/32n;for(;r>=0n;){const i=Number(r);n=ce(n,v(e,16*i),v(e,e.byteLength-16*(i+1)),v(t,32*i),a),r--}let o=n+(n>>64n)&m;o=P(o);let s=(n&m)*J+(n>>64n)*Le+(BigInt(e.byteLength)-a&m)*q;return s&=m,s=je(P(s)),o|s<<64n},"len17to128_128b"),cn=h((e,t,a)=>{let n=BigInt(e.byteLength)*J&m;for(let s=32;s<160;s+=32)n=ce(n,v(e,s-32),v(e,s-16),v(t,s-32),a);n=P(n&m)|P(n>>64n)<<64n;for(let s=160;s<=e.byteLength;s+=32)n=ce(n,v(e,s-32),v(e,s-16),v(t,3+s-160),a);n=ce(n,v(e,e.byteLength-16),v(e,e.byteLength-32),v(t,103),je(a));let r=n+(n>>64n)&m;r=P(r);let o=(n&m)*J+(n>>64n)*Le+(BigInt(e.byteLength)-a&m)*q;return o&=m,o=je(P(o)),r|o<<64n},"len129to240_128b"),ln=h((e,t=0n)=>{const a=e.byteLength;return a<=16?on(e,t):a<=128?sn(e,I,t):a<=240?cn(e,I,t):tn(e,I)},"xxh3_128"),pn=h(e=>{const t=e>>64n&m,a=e&m;return t.toString(16).padStart(16,"0")+a.toString(16).padStart(16,"0")},"bigintToHex"),$t=h(e=>pn(ln(e)),"xxh3Hash");class un{static{y(this,"gn")}static{h(this,"Xxh3Hasher")}#e=[];update(t){return typeof t=="string"?this.#e.push(Buffer.from(t)):this.#e.push(t),this}digest(){return $t(Buffer.concat(this.#e))}}h(()=>new un,"createXxh3Hasher");var dn=Object.defineProperty,_=y((e,t)=>dn(e,"name",{value:t,configurable:!0}),"i$4");const ae=_(()=>u(ja(),".vis","cache","ai"),"getCacheDirectory"),gn=3600*1e3,fn=1800*1e3,mn=_(()=>{const e=ae();k(e)||Oe(e,{recursive:!0})},"ensureCacheDirectory"),hn=_((e,t,a)=>{const n=a.map(o=>({currentRange:o.currentRange,name:o.packageName,targetVersion:o.targetVersion})).toSorted((o,s)=>o.name.localeCompare(s.name)),r=JSON.stringify({analysisType:t,packages:n,provider:e});return $t(Buffer.from(r))},"buildCacheKey"),yn=_(e=>{const t=u(ae(),`${e}.json`);if(k(t))try{const a=B(t,"utf8"),n=JSON.parse(a);if(Date.now()-n.createdAt>n.ttlMs){ee(t,{force:!0});return}return n.result}catch{ee(t,{force:!0});return}},"getCachedAnalysis"),vn=_((e,t,a)=>{mn();const n=ae(),r={createdAt:Date.now(),result:t,ttlMs:a};E(u(n,`${e}.json`),JSON.stringify(r,void 0,2),"utf8")},"setCachedAnalysis"),kn=_((e,t)=>t!==void 0&&t>0?t:e==="security"?fn:gn,"getTtlForAnalysisType"),$n=_(()=>{const e=ae();if(!k(e))return{entries:0,newestEntry:void 0,oldestEntry:void 0,totalSizeBytes:0};const t=Be(e).filter(o=>o.endsWith(".json"));let a=0,n,r;for(const o of t){const s=u(e,o),i=Ie(s);a+=i.size;const{mtimeMs:c}=i;(n===void 0||c<n)&&(n=c),(r===void 0||c>r)&&(r=c)}return{entries:t.length,newestEntry:r,oldestEntry:n,totalSizeBytes:a}},"getCacheStats"),wn=_(()=>{const e=ae();if(!k(e))return 0;const t=Be(e).filter(a=>a.endsWith(".json"));for(const a of t)ee(u(e,a),{force:!0});return t.length},"clearCache");var bn=Object.defineProperty,S=y((e,t)=>bn(e,"name",{value:t,configurable:!0}),"a$2");const le={amp:30,claude:80,codex:60,copilot:50,crush:35,cursor:40,droid:20,gemini:100,kimi:25,opencode:35,qwen:30},Te=S(e=>{if(e?.provider){if(!ka.includes(e.provider))return;const n=$a(e.provider);return n.available?n:void 0}const t=wa();if(t.length===0)return;const a={...le,...e?.priority};return t.toSorted((n,r)=>(a[r.name]??0)-(a[n.name]??0))[0]},"resolveProvider"),Sn=new Set(["defer","review","skip","update"]),jn=new Set(["critical","high","low","medium"]),Cn=new Set(["high","low","medium"]),xn=50,$e=30,En=2,Rn=1e3,Nn=12e4,On=S(e=>e.map(t=>{const a=t.vulnerabilities&&t.vulnerabilities.length>0?` [VULNERABILITIES: ${t.vulnerabilities.map(n=>`${n.severity} ${n.id}`).join(", ")}]`:"";return`- ${t.packageName}: ${t.currentRange} → ${t.newRange} (${t.updateType})${a}`}).join(`
|
|
6
|
+
`),"buildPackageList"),se=`Respond ONLY with valid JSON in this exact structure:
|
|
7
|
+
{
|
|
8
|
+
"summary": "Brief overall summary",
|
|
9
|
+
"recommendations": [
|
|
10
|
+
{
|
|
11
|
+
"package": "package-name",
|
|
12
|
+
"action": "update|skip|review|defer",
|
|
13
|
+
"reason": "explanation",
|
|
14
|
+
"riskLevel": "low|medium|high|critical",
|
|
15
|
+
"breakingChanges": ["change1"],
|
|
16
|
+
"effort": "low|medium|high"
|
|
17
|
+
}
|
|
18
|
+
],
|
|
19
|
+
"warnings": ["warning1"]
|
|
20
|
+
}`,Bn={compatibility:S(e=>`Analyze the compatibility of these package updates:
|
|
21
|
+
|
|
22
|
+
${e}
|
|
23
|
+
|
|
24
|
+
For each package:
|
|
25
|
+
1. Check peer dependency compatibility
|
|
26
|
+
2. Identify potential conflicts with other packages in the list
|
|
27
|
+
3. Assess API compatibility between current and target versions
|
|
28
|
+
4. Check for deprecated features being removed
|
|
29
|
+
5. Evaluate Node.js version requirements
|
|
30
|
+
|
|
31
|
+
${se}`,"compatibility"),impact:S(e=>`Analyze the impact of updating these npm packages:
|
|
32
|
+
|
|
33
|
+
${e}
|
|
34
|
+
|
|
35
|
+
For each package, provide:
|
|
36
|
+
1. Risk level (low/medium/high/critical)
|
|
37
|
+
2. Recommended action (update/skip/review/defer)
|
|
38
|
+
3. Reason for recommendation
|
|
39
|
+
4. Known breaking changes (if any)
|
|
40
|
+
5. Estimated migration effort (low/medium/high)
|
|
41
|
+
|
|
42
|
+
${se}`,"impact"),recommend:S(e=>`Provide smart recommendations for updating these packages:
|
|
43
|
+
|
|
44
|
+
${e}
|
|
45
|
+
|
|
46
|
+
Consider:
|
|
47
|
+
1. Update priority based on security, features, and stability
|
|
48
|
+
2. Grouping related packages for atomic updates
|
|
49
|
+
3. Best practices for the specific package ecosystem
|
|
50
|
+
4. Risk vs. benefit analysis
|
|
51
|
+
5. Suggested update order
|
|
52
|
+
|
|
53
|
+
${se}`,"recommend"),security:S(e=>`Analyze the security implications of these package updates:
|
|
54
|
+
|
|
55
|
+
${e}
|
|
56
|
+
|
|
57
|
+
For each package:
|
|
58
|
+
1. Check if the update fixes known vulnerabilities (use the vulnerability data above)
|
|
59
|
+
2. Assess if the new version introduces security risks
|
|
60
|
+
3. Evaluate if this is a security-sensitive package (auth, crypto, session, etc.)
|
|
61
|
+
4. Recommend urgency of the update based on vulnerability severity
|
|
62
|
+
5. Flag any packages where skipping the update poses security risk
|
|
63
|
+
|
|
64
|
+
${se}`,"security")},In=new Set(["compatibility","impact","recommend","security"]),Ue=S(e=>In.has(e)?e:"impact","validateAnalysisType"),wt=S((e,t="impact")=>{const a=On(e);return Bn[t](a)},"buildAnalysisPrompt"),Pn=/```(?:json)?\s*([\s\S]*?)```/,An=/\{[\s\S]*\}/,Ln=S(e=>{try{return JSON.parse(e)}catch{}const t=Pn.exec(e);if(t?.[1])try{return JSON.parse(t[1])}catch{}const a=An.exec(e);if(a?.[0])try{return JSON.parse(a[0])}catch{}},"extractJson"),Tn=S(e=>({action:Sn.has(e.action)?e.action:"review",breakingChanges:Array.isArray(e.breakingChanges)?e.breakingChanges:[],effort:Cn.has(e.effort)?e.effort:"medium",package:typeof e.package=="string"?e.package:"",reason:typeof e.reason=="string"?e.reason:"",riskLevel:jn.has(e.riskLevel)?e.riskLevel:"medium"}),"normalizeRecommendation"),bt=S((e,t,a)=>{const n=Ln(e);if(!n||typeof n!="object")return{analysisType:a,provider:t,recommendations:[],summary:"Failed to parse AI response.",warnings:["AI response was not valid JSON."]};const r=n,o=Array.isArray(r.recommendations)?r.recommendations:[];return{analysisType:a,provider:t,recommendations:o.map(s=>Tn(s)),summary:typeof r.summary=="string"?r.summary:"",warnings:Array.isArray(r.warnings)?r.warnings:[]}},"parseAiResponse"),Un={eslint:["ESLint 9.0: Flat config required","ESLint 8.0+: New rule formats"],next:["Next.js 13+: App router changes","Next.js 14+: Server components default"],react:["React 17 to 18: Concurrent features","React 18+: Strict mode changes"],typescript:["TypeScript 5.0: New decorators","TypeScript 4.7+: ESM changes"],vite:["Vite 5: Node.js 18+ required"],vue:["Vue 3: Composition API","Vue 3: Breaking template changes"],webpack:["Webpack 5: Node.js polyfills removed"]},Mn=new Set(["bcrypt","cors","crypto-js","express-session","helmet","jose","jsonwebtoken","node-forge","oauth","passport"]),Xe=S((e,t)=>{const a=e.map(n=>{const r=n.vulnerabilities&&n.vulnerabilities.length>0,o=Mn.has(n.packageName),s=Un[n.packageName]??[];let i="low",c="update",l="low",p="Patch/minor update, safe to apply.";return n.updateType==="major"?(i="high",c=s.length>0?"review":"update",l="medium",p=s.length>0?`Major update with known breaking changes: ${s[0]}`:"Major version update, review changelog before applying."):n.updateType==="minor"&&(i="medium",p="Minor update, generally safe."),r&&(i="high",c="update",p="Security update — current version has known vulnerabilities."),o&&n.updateType==="major"&&(c="review",p="Security-sensitive package with major update, careful review needed.",l="high"),{action:c,breakingChanges:s,effort:l,package:n.packageName,reason:p,riskLevel:i}});return{analysisType:t,provider:"rule-engine",recommendations:a,summary:`Rule-based ${t} analysis for ${String(e.length)} packages.`,warnings:["No AI provider available — using built-in rule engine."]}},"ruleBasedAnalysis"),Vn=S(e=>new Promise(t=>{setTimeout(t,e)}),"sleep"),St=S(async(e,t,a=En)=>{let n;for(let r=0;r<=a;r+=1)try{return(await it(e,t,{timeoutMs:Nn})).stdout}catch(o){if(n=o instanceof Error?o:new Error(String(o)),n.message.includes("timed out"))throw n;if(r<a){const s=Rn*2**r;await Vn(s)}}throw n??new Error("AI analysis failed after retries")},"runWithRetry"),Fn=S(async(e,t,a)=>{const n=wt(t,a),r=await St(e,n);return bt(r,e.name,a)},"analyzeChunk"),Dn=S((e,t,a)=>{const n=[],r=[],o=[];for(const s of e)n.push(...s.recommendations),r.push(...s.warnings),s.summary&&o.push(s.summary);return{analysisType:a,provider:t,recommendations:n,summary:o.length===1?o[0]??"":`Analyzed ${String(n.length)} packages in ${String(e.length)} batches.`,warnings:[...new Set(r)]}},"mergeResults"),jt={compatibility:"Compatibility",impact:"Impact",recommend:"Recommendations",security:"Security"},de=S(e=>{const t=Ne();t.setHeaders(["Package","Risk","Action","Effort","Reason"]);for(const r of e.recommendations)t.addRow([r.package,r.riskLevel,r.action,r.effort,r.reason]),r.breakingChanges.length>0&&t.addRow(["",{colSpan:4,content:`Breaking: ${r.breakingChanges.join("; ")}`}]);const a=`${jt[e.analysisType]??e.analysisType} Analysis (${e.provider})`,n=[t.toString()];return e.warnings.length>0&&n.push(e.warnings.map(r=>` ${r}`).join(`
|
|
65
|
+
`)),ct(`${e.summary}
|
|
66
|
+
|
|
67
|
+
${n.join(`
|
|
68
|
+
`)}`,{headerText:a,padding:{left:1,right:1}})},"formatAiAnalysis");S(e=>JSON.stringify(e,void 0,2),"formatAiAnalysisJson");const Me=S(async(e,t,a,n="impact")=>{const r=Te(a);if(!r)return t.info(`No AI CLI tool found, using rule-based analysis.
|
|
69
|
+
`),Xe(e,n);const o=hn(r.name,n,e),s=yn(o);if(s)return t.info(`Using cached ${n} analysis from ${s.provider}.
|
|
70
|
+
`),s;const i=jt[n]??n;t.info(`Running ${i.toLowerCase()} analysis with ${r.name}...
|
|
71
|
+
`);try{let c;if(e.length>xn){t.info(`Splitting ${String(e.length)} packages into batches of ${String($e)}...
|
|
72
|
+
`);const l=[];for(let g=0;g<e.length;g+=$e)l.push(e.slice(g,g+$e));const p=[];for(let g=0;g<l.length;g+=1){t.info(` Batch ${String(g+1)}/${String(l.length)}...`);const f=l[g];f&&p.push(await Fn(r,f,n))}c=Dn(p,r.name,n)}else{const l=await St(r,wt(e,n));c=bt(l,r.name,n)}return vn(o,c,kn(n,a?.cacheTtl)),c}catch(c){const l=c instanceof Error?c.message:String(c);return t.warn(`AI analysis failed (${l}), falling back to rule engine.
|
|
73
|
+
`),Xe(e,n)}},"runAiAnalysis");var Jn=Object.defineProperty,ve=y((e,t)=>Jn(e,"name",{value:t,configurable:!0}),"r$4");const Wn=ve((e,t)=>{const a=$n();if(e==="json"){process.stdout.write(`${JSON.stringify(a,void 0,2)}
|
|
74
|
+
`);return}t.info("AI Cache Statistics:"),t.info(` Entries: ${String(a.entries)}`),t.info(` Total size: ${String(Math.round(a.totalSizeBytes/1024))} KB`),t.info(` Oldest: ${a.oldestEntry?new Date(a.oldestEntry).toISOString():"N/A"}`),t.info(` Newest: ${a.newestEntry?new Date(a.newestEntry).toISOString():"N/A"}`)},"handleCacheStats"),_n=ve(async(e,t)=>{const a=Te(t);if(!a){e.error("No AI provider available to test."),process.exitCode=1;return}e.info(`Testing ${a.name}...`);try{const n=await it(a,"Reply with exactly: OK",{timeoutMs:3e4});e.info(`Provider ${a.name} responded: ${n.stdout.trim().slice(0,200)}`)}catch(n){const r=n instanceof Error?n.message:String(n);e.error(`Provider ${a.name} failed: ${r}`),process.exitCode=1}},"handleTest"),zn=ve((e,t,a)=>{const n=ba(),r=Te(a);if(e==="json"){const s=n.map(i=>({available:i.available,method:i.detectionMethod,name:i.name,path:i.path,priority:le[i.name]??0,selected:i.name===r?.name,version:i.version}));process.stdout.write(`${JSON.stringify(s,void 0,2)}
|
|
75
|
+
`);return}const o=Ne();o.setHeaders(["Provider","Status","Version","Method","Path","Priority","Selected"]);for(const s of n)o.addRow([s.name,s.available?"available":"not found",s.version??"-",s.detectionMethod??"-",s.path??"-",String(le[s.name]??0),s.name===r?.name?">>>":""]);t.info(o.toString()),r?t.info(`
|
|
76
|
+
Selected provider: ${r.name} (priority ${String(le[r.name]??0)})`):t.info(`
|
|
77
|
+
No AI provider available. Install one of the supported AI CLI tools.`)},"handleProviderStatus"),Hn={alias:"a",description:"Show AI provider status, test connectivity, and manage cache",examples:[["vis ai","Show all AI providers and their status"],["vis ai --test","Test the best available provider"],["vis ai --cache-stats","Show AI response cache statistics"],["vis ai --clear-cache","Clear the AI response cache"],["vis ai --format json","Output as JSON"]],execute:ve(async({logger:e,options:t,visConfig:a})=>{const n=t.format??"table";if(t["cache-stats"]){Wn(n,e);return}if(t["clear-cache"]){const r=wn();e.info(`Cleared ${String(r)} cached AI response${r===1?"":"s"}.`);return}if(t.test){await _n(e,a?.ai);return}zn(n,e,a?.ai)},"execute"),name:"ai",options:[{defaultValue:!1,description:"Test the best available AI provider with a quick prompt",name:"test",type:Boolean},{defaultValue:!1,description:"Show AI response cache statistics",name:"cache-stats",type:Boolean},{defaultValue:!1,description:"Clear the AI response cache",name:"clear-cache",type:Boolean},{description:"Output format: table or json (default: table)",name:"format",type:String}]};var Gn=Object.defineProperty,d=y((e,t)=>Gn(e,"name",{value:t,configurable:!0}),"c$4");const qn=/(\d+)\.(\d+)\.(\d+)(?:-([a-z0-9.]+))?/i,Kn=/^([\^~]|>=|<=|[><=])/,Yn=/^(?:'([^']+)'|"([^"]+)"|([^:\s]+)):\s*(?:'([^']+)'|"([^"]+)"|(\S+))/,Xn=/^catalog:/m,Zn=/^catalogs:/m,Qn=/^(@[^:]+):registry$/,er=/^\/\/(.+)\/:_authToken$/,tr=/\*+/g,ar=/[.+^${}()|[\]\\]/g,Ze=/[.*+?^${}()|[\]\\]/g,Ct=/^['"]|['"]$/g,nr=/^https?:\/\//,xt=/\/$/,rr=/\n(\s+)/,or=new Set(["dependencies","devDependencies","optionalDependencies","peerDependencies"]),Ve=".vis-backup",D=d(e=>{const t=qn.exec(e);if(t)return{major:Number(t[1]),minor:Number(t[2]),patch:Number(t[3]),prerelease:t[4]??""}},"parseVersion"),Et=d(e=>Kn.exec(e)?.[1]??"","extractPrefix"),Rt=d((e,t)=>e.major!==t.major?"major":e.minor!==t.minor?"minor":e.patch!==t.patch?"patch":"none","getUpdateType"),Qe=d((e,t)=>t.major!==e.major?t.major>e.major:t.minor!==e.minor?t.minor>e.minor:t.patch!==e.patch?t.patch>e.patch:e.prerelease&&!t.prerelease?!0:e.prerelease&&t.prerelease?t.prerelease>e.prerelease:!1,"isNewer"),et=d((e,t)=>{const a=t.replaceAll(tr,"*").replaceAll(ar,String.raw`\$&`);return new RegExp(`^${a.replaceAll("*",".*").replaceAll("?",".")}$`).test(e)},"matchesPattern"),sr=d((e,t,a)=>a.some(n=>et(e,n))?!1:t.length>0?t.some(n=>et(e,n)):!0,"matchesFilters"),Nt=d(e=>{const t=Yn.exec(e);if(!t)return;const a=t[1]??t[2]??t[3],n=t[4]??t[5]??t[6];if(!(!a||!n))return[a,n]},"parseYamlEntry"),Ot=d((e,t,a,n)=>{e.has(t)||e.set(t,new Map);const r=e.get(t);r&&r.set(a,n)},"setCatalogEntry"),ir=d((e,t,a)=>{if(a<2)return;const n=Nt(t);n&&Ot(e,"default",n[0],n[1])},"parseCatalogSection"),cr=d((e,t,a,n)=>{if(a===2&&t.endsWith(":"))return t.slice(0,-1).trim().replaceAll(Ct,"");if(a>=4&&n){const r=Nt(t);r&&Ot(e,n,r[0],r[1])}return n},"parseCatalogsSection"),Bt=d(e=>e==="catalog:"||e.startsWith("catalog:")?"catalog":e==="catalogs:"||e.startsWith("catalogs:")?"catalogs":"none","detectTopLevelSection"),lr=d(e=>{const t=new Map;let a="none",n="";for(const r of e.split(`
|
|
78
|
+
`)){const o=r.trimStart(),s=r.length-o.length;if(s===0&&o.length>0&&!o.startsWith("#")){a=Bt(o),a==="catalogs"&&(n="");continue}o.length===0||o.startsWith("#")||(a==="catalog"&&ir(t,o,s),a==="catalogs"&&(n=cr(t,o,s,n)))}return t},"parseCatalogsFromYaml"),pr=d(e=>{const t=u(e,"pnpm-workspace.yaml");if(!b(t))return!1;const a=A(t);return Xn.test(a)||Zn.test(a)},"hasPnpmCatalogs"),ur=d(e=>{const t=u(e,"pnpm-workspace.yaml");if(!b(t))return new Map;const a=A(t);return lr(a)},"readPnpmCatalogs"),It=d(e=>{if(b(e))try{return W(e)}catch{return}},"readPackageJsonSafe"),dr=d(e=>{const t=It(u(e,"package.json"));return!!(t?.workspaces?.catalog||t?.workspaces?.catalogs)},"hasBunCatalogs"),gr=d(e=>{const t=new Map;if(e.workspaces?.catalog&&typeof e.workspaces.catalog=="object"&&t.set("default",new Map(Object.entries(e.workspaces.catalog))),e.workspaces?.catalogs&&typeof e.workspaces.catalogs=="object")for(const[a,n]of Object.entries(e.workspaces.catalogs))typeof n=="object"&&n!==void 0&&t.set(a,new Map(Object.entries(n)));return t},"parseBunCatalogs"),fr=d(e=>{const t=It(u(e,"package.json"));return t?gr(t):new Map},"readBunCatalogs"),Fe=d(e=>{const t=e.lastIndexOf(":");if(t===-1)return;const a=e.slice(t+1);if(or.has(a))return{depType:a,relativePath:e.slice(0,t)}},"parseCompositeCatalogName"),mr=d(e=>e?.dev?["devDependencies"]:e?.prod?["dependencies"]:["dependencies","devDependencies","peerDependencies","optionalDependencies"],"getDepTypesToInclude"),hr=d((e,t,a)=>{const n=new Set;t&&n.add(t);for(const r of a){const o=u(e,r,"package.json");if(b(o))try{const s=W(o);s.name&&n.add(s.name)}catch{}}return n},"collectInternalPackageNames"),yr=d((e,t)=>{const a=new Map;for(const[n,r]of Object.entries(e))t.has(n)||r.startsWith("workspace:")||r.startsWith("file:")||r.startsWith("link:")||a.set(n,r);return a},"filterExternalDeps"),vr=d((e,t,a,n,r,o)=>{const s=t==="."?a:u(e,t,"package.json");if(!b(s))return;let i;try{i=W(s)}catch{return}for(const c of n){const l=i[c];if(!l||typeof l!="object")continue;const p=yr(l,r);p.size>0&&o.set(`${t}:${c}`,p)}},"scanDirectoryDeps"),we=d((e,t)=>{const a=new Map,n=u(e,"package.json");if(!b(n))return a;const r=W(n);let o=[];const s=r.workspaces;if(s){const p=Array.isArray(s)?s:s.packages;o=gt(e,p)}const i=hr(e,r.name,o),c=mr(t),l=[".",...o];for(const p of l)vr(e,p,n,c,i,a);return a},"readPackageJsonDeps"),be=d(e=>{const t=u(e,"package.json");if(!b(t))return!1;try{const a=W(t);return!!(a.dependencies||a.devDependencies||a.peerDependencies||a.optionalDependencies)}catch{return!1}},"hasPackageJsonDeps"),kr=d((e,t)=>{const a=new Map;for(const n of t){const r=Fe(n.catalogName);if(!r)continue;const o=r.relativePath==="."?u(e,"package.json"):u(e,r.relativePath,"package.json");a.has(o)||a.set(o,[]);const s=a.get(o);s&&s.push({depType:r.depType,newRange:n.newRange,packageName:n.packageName})}for(const[n,r]of a){const o=W(n);for(const{depType:s,newRange:i,packageName:c}of r)o[s]&&(o[s][c]=i);st(n,o,{detectIndent:!0,overwrite:!0})}},"applyPackageJsonUpdates"),$r=d((e,t)=>t==="bun"?dr(e)||be(e):t==="npm"||t==="yarn"?be(e):pr(e)||be(e),"hasCatalogs"),De=d((e,t,a)=>{if(t==="bun"){const r=fr(e);return r.size>0?r:we(e,a)}if(t==="npm"||t==="yarn")return we(e,a);const n=ur(e);return n.size>0?n:we(e,a)},"readCatalogs"),tt=d(e=>{const t=new Map,a=new Map;let n="https://registry.npmjs.org";for(const r of e.split(`
|
|
79
|
+
`)){const o=r.trim();if(!o||o.startsWith("#")||o.startsWith(";"))continue;const s=o.indexOf("=");if(s===-1)continue;const i=o.slice(0,s).trim(),c=o.slice(s+1).trim(),l=Qn.exec(i);if(l?.[1]){t.set(l[1],c);continue}if(i==="registry"){n=c;continue}const p=er.exec(i);p?.[1]&&a.set(p[1],c)}return{authTokens:a,defaultRegistry:n,registries:t}},"parseNpmrc"),wr=d((e,t)=>({authTokens:new Map([...e.authTokens,...t.authTokens]),defaultRegistry:t.defaultRegistry==="https://registry.npmjs.org"?e.defaultRegistry:t.defaultRegistry,registries:new Map([...e.registries,...t.registries])}),"mergeNpmrcConfigs"),Pt=d(e=>{const t={authTokens:new Map,defaultRegistry:"https://registry.npmjs.org",registries:new Map},a=process.env.HOME??process.env.USERPROFILE??"",n=u(a,".npmrc");let r=a&&b(n)?tt(A(n)):t;const o=u(e,".npmrc");return b(o)&&(r=wr(r,tt(A(o)))),r},"loadNpmrc"),br=d((e,t)=>{let a=t.defaultRegistry;if(e.startsWith("@")){const r=e.split("/")[0];if(r&&t.registries.has(r)){const o=t.registries.get(r);o&&(a=o)}}const n=a.replace(nr,"").replace(xt,"");return{token:t.authTokens.get(n),url:a}},"getRegistryForPackage"),Sr=15e3,At=d(async(e,t,a=Sr)=>{const n=`${(t?.url??"https://registry.npmjs.org").replace(xt,"")}/${e}`,r={Accept:"application/vnd.npm.install-v1+json"};t?.authToken&&(r.Authorization=`Bearer ${t.authToken}`);const o=new AbortController,s=setTimeout(()=>o.abort(),a);try{const i=await fetch(n,{headers:r,signal:o.signal});if(!i.ok)throw new Error(`Failed to fetch ${e}: ${String(i.status)} ${i.statusText}`);const c=await i.json();return{latest:c["dist-tags"]?.latest??"",versions:Object.keys(c.versions??{})}}finally{clearTimeout(s)}},"fetchPackageVersions"),jr=d(e=>{const t=e.database_specific?.severity?.toUpperCase();if(t==="CRITICAL"||t==="HIGH"||t==="MODERATE"||t==="LOW")return t;const a=e.severity?.find(n=>n.type==="CVSS_V3")?.score;if(a){const n=Number.parseFloat(a);return n>=9?"CRITICAL":n>=7?"HIGH":n>=4?"MODERATE":"LOW"}return"UNKNOWN"},"mapOsvSeverity"),Cr=d(e=>{const t=e.severity?.find(a=>a.type==="CVSS_V3")?.score;return t?Number.parseFloat(t):void 0},"mapOsvCvss"),xr=d(e=>{const t=[];for(const a of e.affected??[])for(const n of a.ranges??[])for(const r of n.events??[])r.fixed&&t.push(r.fixed);return t},"mapOsvFixedVersions"),Er=d(e=>({cvssScore:Cr(e),fixedVersions:xr(e),id:e.id,severity:jr(e),summary:e.summary??""}),"mapOsvVuln"),Lt=d(async(e,t=1e4)=>{if(e.length===0)return new Map;const a=e.map(o=>({package:{ecosystem:"npm",name:o.name},version:o.version})),n=new AbortController,r=setTimeout(()=>n.abort(),t);try{const o=await fetch("https://api.osv.dev/v1/querybatch",{body:JSON.stringify({queries:a}),headers:{"Content-Type":"application/json"},method:"POST",signal:n.signal});if(!o.ok)return new Map;const s=await o.json(),i=new Map;for(const[c,l]of e.entries()){const p=s.results[c]?.vulns;p&&p.length>0&&i.set(l.name,p.map(g=>Er(g)))}return i}catch{return new Map}finally{clearTimeout(r)}},"fetchVulnerabilities"),Rr=d((e,t)=>e.parsed.major!==t.parsed.major?t.parsed.major-e.parsed.major:e.parsed.minor!==t.parsed.minor?t.parsed.minor-e.parsed.minor:t.parsed.patch-e.parsed.patch,"sortVersionCandidates"),Nr=d((e,t,a,n,r)=>{const o=D(a);if(o){if(n==="latest"){const s=D(t);return!s||!r&&s.prerelease||!Qe(o,s)?void 0:t}return e.map(s=>({parsed:D(s),raw:s})).filter(s=>!s.parsed||!r&&s.parsed.prerelease||!Qe(o,s.parsed)?!1:n==="patch"?s.parsed.major===o.major&&s.parsed.minor===o.minor:s.parsed.major===o.major).toSorted(Rr)[0]?.raw}},"findTargetVersion"),Or=d((e,t)=>{const a=[];for(const[n,r]of e)for(const[o,s]of r)s.startsWith("workspace:")||s.startsWith("file:")||s.startsWith("link:")||s==="*"||sr(o,t.include,t.exclude)&&a.push({catalogName:n,packageName:o,range:s});return a},"collectEntries"),Br=d(async(e,t,a)=>{const n=new Map,r=[],o=8;let s=0;for(let i=0;i<e.length;i+=o){const c=e.slice(i,i+o),l=await Promise.allSettled(c.map(async p=>{const g=t?br(p,t):void 0,f=await At(p,g?{authToken:g.token,url:g.url}:void 0);return n.set(p,f),p}));for(const[p,g]of l.entries())if(s+=1,g.status==="rejected"){const f=c[p];f&&r.push(f)}a&&a(s,e.length)}return{failed:r,versionCache:n}},"fetchVersionsBatched"),Ir=d((e,t,a)=>{const n=[];for(const r of e){const o=t.get(r.packageName);if(!o)continue;const s=Nr(o.versions,o.latest,r.range,a.target,a.includePrerelease);if(!s)continue;const i=D(r.range),c=D(s);if(!i||!c)continue;const l=Rt(i,c);if(l==="none")continue;const p=Et(r.range);n.push({catalogName:r.catalogName,currentRange:r.range,newRange:`${p}${s}`,packageName:r.packageName,targetVersion:s,updateType:l})}return n},"buildOutdatedEntries"),Pr=d(async(e,t)=>{const a=[...new Map(t.map(r=>{const o=D(r.range);return[r.packageName,{name:r.packageName,version:o?`${String(o.major)}.${String(o.minor)}.${String(o.patch)}`:""}]})).values()].filter(r=>r.version),n=await Lt(a);for(const r of e){const o=n.get(r.packageName);o&&o.length>0&&(r.vulnerabilities=o)}},"enrichWithSecurity"),Tt=d(async(e,t,a,n)=>{const r=Or(e,t),o=[...new Set(r.map(l=>l.packageName))],{failed:s,versionCache:i}=await Br(o,a,n),c=Ir(r,i,t);return t.security&&c.length>0&&await Pr(c,r),{failed:s,outdated:c}},"checkOutdated"),Je=d((e,t)=>t==="bun"?u(e,"package.json"):u(e,"pnpm-workspace.yaml"),"getCatalogFilePath"),Ar=d((e,t)=>{const a=u(e,Ve),n=new Set;for(const r of t){const o=Fe(r.catalogName);o&&n.add(o.relativePath==="."?"package.json":u(o.relativePath,"package.json"))}if(n.size!==0){Ge(a);for(const r of n){const o=u(e,r);if(b(o)){const s=u(a,r),i=va(s);Ge(i),te(s,A(o))}}return a}},"createPackageJsonBackup"),Lr=d((e,t,a)=>{if((t==="npm"||t==="yarn")&&a)return Ar(e,a);const n=Je(e,t);if(!b(n))return;const r=`${n}.bak`,o=A(n);return te(r,o),r},"createBackup"),Tr=d(e=>{const t=u(e,Ve);if(!b(t))return!1;for(const a of Ee(t,{includeDirs:!1})){const n=a.path.slice(t.length+1),r=u(e,n);te(r,A(a.path))}return ya(t),!0},"restorePackageJsonBackup"),Ur=d((e,t)=>{if(t==="npm"||t==="yarn")return Tr(e);const a=Je(e,t),n=`${a}.bak`;if(!b(n))return!1;const r=A(n);return te(a,r),!0},"restoreFromBackup"),Mr=d((e,t)=>{if(t==="npm"||t==="yarn")return b(u(e,Ve));const a=Je(e,t);return b(`${a}.bak`)},"hasBackup"),Vr=d(e=>JSON.stringify(e,void 0,2),"formatOutdatedJson"),Ut=d(e=>e.map(t=>`${t.packageName} ${t.currentRange} → ${t.newRange}`).join(`
|
|
80
|
+
`),"formatOutdatedMinimal"),V=d(e=>e?Array.isArray(e)?e:[e]:[],"toFilterArray"),Fr=d(e=>{const t=new Map;for(const a of e){t.has(a.catalogName)||t.set(a.catalogName,[]);const n=t.get(a.catalogName);n&&n.push(a)}return t},"groupByCatalog"),Dr=d(e=>{const t=Fe(e);return t?`${t.relativePath==="."?"root":t.relativePath} (${t.depType})`:`Catalog: ${e}`},"formatCatalogDisplayName"),Mt=d((e,t)=>{const a=Fr(e);for(const[n,r]of a){const o=Ne();o.setHeaders(["Package","Current","Target","Type"]);for(const i of r){const c=i.vulnerabilities&&i.vulnerabilities.length>0?`[SEC] ${i.packageName}`:i.packageName;if(o.addRow([c,i.currentRange,i.newRange,i.updateType]),i.vulnerabilities)for(const l of i.vulnerabilities)o.addRow([` ${l.severity} ${l.id}`,{colSpan:3,content:l.summary}])}const s=Dr(n);t.info(`${s}
|
|
81
|
+
${o.toString()}
|
|
82
|
+
`)}},"formatOutdatedTable"),Vt=d(e=>{const t=e.filter(i=>i.updateType==="major").length,a=e.filter(i=>i.updateType==="minor").length,n=e.filter(i=>i.updateType==="patch").length,r=e.filter(i=>i.vulnerabilities&&i.vulnerabilities.length>0).length,o=[];t&&o.push(`${String(t)} major`),a&&o.push(`${String(a)} minor`),n&&o.push(`${String(n)} patch`),r&&o.push(`${String(r)} with vulnerabilities`);const s=`Found ${String(e.length)} outdated (${o.join(", ")})`;return ct(s,{headerText:"Summary",padding:{left:1,right:1}})},"formatSummary"),Jr=d((e,t)=>{const a=e.replaceAll(Ze,String.raw`\$&`),n=t.replaceAll(Ze,String.raw`\$&`);return new RegExp(String.raw`^(?:'${a}'|"${a}"|${a}):\s*['"]?${n}['"]?`)},"buildLineMatchRegex"),Wr=d((e,t,a)=>Jr(t,a).test(e),"lineMatchesPackage"),_r=d(e=>{const t=new Map;for(const a of e){t.has(a.catalogName)||t.set(a.catalogName,new Map);const n=t.get(a.catalogName);n&&n.set(a.packageName,{newRange:a.newRange,oldRange:a.currentRange})}return t},"buildUpdateMap"),at=d((e,t,a)=>{if(!a)return e;for(const[n,{newRange:r,oldRange:o}]of a)if(Wr(t,n,o))return e.replace(o,r);return e},"applyLineUpdate"),zr=d((e,t,a,n)=>{const r=e.trimStart(),o=e.length-r.length;return r.length===0||r.startsWith("#")?e:t==="catalog"&&o>=2?at(e,r,n.get("default")):t==="catalogs"&&o>=4&&a?at(e,r,n.get(a)):e},"processYamlLineForUpdate"),Hr=d((e,t)=>{const a=u(e,"pnpm-workspace.yaml"),n=A(a).split(`
|
|
83
|
+
`),r=_r(t);let o="none",s="";const i=[];for(const c of n){const l=c.trimStart(),p=c.length-l.length;p===0&&l.length>0&&!l.startsWith("#")&&(o=Bt(l),o==="catalogs"&&(s="")),o==="catalogs"&&p===2&&l.endsWith(":")&&(s=l.slice(0,-1).trim().replaceAll(Ct,"")),i.push(zr(c,o,s,r))}te(a,i.join(`
|
|
84
|
+
`))},"applyPnpmCatalogUpdates");d(e=>{const t=rr.exec(e);if(!t)return 2;const a=t[1];return a?a.includes(" ")?a:a.length:2},"detectJsonIndent");const Gr=d((e,t)=>{const a=u(e,"package.json"),n=W(a);for(const r of t)if(r.catalogName==="default")n.workspaces?.catalog&&(n.workspaces.catalog[r.packageName]=r.newRange);else{const o=n.workspaces?.catalogs?.[r.catalogName];o&&(o[r.packageName]=r.newRange)}st(a,n,{detectIndent:!0,overwrite:!0})},"applyBunCatalogUpdates"),qr=d((e,t,a,n=!0)=>{let r;return n&&(r=Lr(e,a,t)),a==="npm"||a==="yarn"?kr(e,t):a==="bun"?Gr(e,t):Hr(e,t),r},"applyCatalogUpdates"),Kr=d(async e=>{const{createInterface:t}=await import("node:readline"),a=t({input:process.stdin,output:process.stdout}),n=d(o=>new Promise(s=>{a.question(o,i=>s(i.trim()))}),"ask");process.stdout.write(`
|
|
85
|
+
Outdated catalog dependencies:
|
|
86
|
+
`);for(const[o,s]of e.entries())s&&process.stdout.write(` ${String(o+1)}. ${s.packageName}: ${s.currentRange} → ${s.newRange} (${s.updateType})
|
|
87
|
+
`);process.stdout.write(`
|
|
88
|
+
`);const r=await n("Apply updates? [a]ll / [n]one / [s]elect: ");if(r.toLowerCase()==="a"||r.toLowerCase()==="all")return a.close(),e;if(r.toLowerCase()==="n"||r.toLowerCase()==="none")return a.close(),[];if(r.toLowerCase()==="s"||r.toLowerCase()==="select"){const o=await n("Enter numbers to apply (comma-separated): ");return a.close(),o.split(",").map(s=>Number.parseInt(s.trim(),10)-1).filter(s=>s>=0&&s<e.length).map(s=>e[s]).filter(s=>s!==void 0)}return a.close(),[]},"promptPackageSelection"),Yr=/github\.com[/:]([\w.-]+)\/([\w.-]+?)(?:\.git|\/|$)/,Xr=d(async(e,t=1e4)=>{const a=[],n=new AbortController,r=setTimeout(()=>n.abort(),t);try{const o=e.map(async s=>{const i=`https://www.npmjs.com/package/${s.packageName}`;try{const c=await fetch(`https://registry.npmjs.org/${s.packageName}`,{headers:{Accept:"application/json"},signal:n.signal});if(!c.ok)return{npmUrl:i,packageName:s.packageName};const l=(await c.json()).repository?.url;if(!l)return{npmUrl:i,packageName:s.packageName};const p=Yr.exec(l);if(!p)return{npmUrl:i,packageName:s.packageName,repoUrl:l};const g=p[1],f=p[2],w=`https://github.com/${g}/${f}/releases/tag/v${s.targetVersion}`;return{npmUrl:i,packageName:s.packageName,releaseUrl:w,repoUrl:`https://github.com/${g}/${f}`}}catch{return{npmUrl:i,packageName:s.packageName}}});a.push(...await Promise.all(o))}finally{clearTimeout(r)}return a},"fetchChangelogInfo");var Zr=Object.defineProperty,Qr=y((e,t)=>Zr(e,"name",{value:t,configurable:!0}),"w$3");const eo=/^[\^~>=<]+/,to={argument:{description:"Package name to analyze (e.g., react)",name:"package",required:!0,type:String},description:"Analyze a single package update with AI",examples:[["vis analyze react","Analyze updating react to latest"],["vis analyze react 19.0.0","Analyze updating react to specific version"],["vis analyze react --ai-type security","Run security-focused analysis"],["vis analyze react --format json","Output as JSON"]],execute:Qr(async({argument:e,logger:t,options:a,visConfig:n,workspaceRoot:r})=>{if(!r)throw new Error("Could not determine workspace root. Run this command inside a monorepo.");const o=e,s=o[0];if(!s)throw new Error("Package name is required. Usage: vis analyze <package> [version]");const i=o[1],{packageManager:c}=xe(r);let l,p="default";const g=De(r,c);for(const[O,R]of g){const U=R.get(s);if(U){l=U,p=O;break}}if(!l)throw new Error(`Package "${s}" not found in any catalog or package.json. Make sure it exists in your workspace dependencies.`);let f;if(i)f=i;else{t.info(`Fetching latest version for ${s}...
|
|
89
|
+
`);const O=await At(s);if(!O.latest)throw new Error(`Could not determine latest version for "${s}".`);f=O.latest}const w=D(l),$=D(f);if(!w||!$)throw new Error(`Could not parse versions: current="${l}", target="${f}".`);const x=Rt(w,$);if(x==="none"){t.info(`${s} is already at ${f}. Nothing to analyze.`);return}const G=Et(l),j={catalogName:p,currentRange:l,newRange:`${G}${f}`,packageName:s,targetVersion:f,updateType:x},T=Ue(a["ai-type"]??"impact");if(T==="security"||a.security){t.info(`Checking for known vulnerabilities...
|
|
90
|
+
`);const O=(await Lt([{name:s,version:l.replace(eo,"")}])).get(s);O&&O.length>0&&(j.vulnerabilities=O)}const Q=await Me([j],t,n?.ai,T);(a.format??"table")==="json"?process.stdout.write(`${JSON.stringify(Q,void 0,2)}
|
|
91
|
+
`):t.info(de(Q))},"execute"),name:"analyze",options:[{description:"AI analysis type: impact, security, compatibility, or recommend (default: impact)",name:"ai-type",type:String},{defaultValue:!1,description:"Check for known security vulnerabilities",name:"security",type:Boolean},{description:"Output format: table or json (default: table)",name:"format",type:String}]};var ao=Object.defineProperty,no=y((e,t)=>ao(e,"name",{value:t,configurable:!0}),"g$3");const ro={alias:"c",argument:{description:"Specific packages to check (checks all if omitted)",name:"packages",type:String},description:"Check for outdated dependencies in workspace packages",examples:[["vis check","Check all catalog dependencies"],["vis check react","Check specific packages"],["vis check --target minor","Only show minor/patch updates"],["vis check --exclude '@types/*'","Exclude packages by pattern"]],execute:no(async({argument:e,logger:t,options:a,visConfig:n,workspaceRoot:r})=>{if(!r)throw new Error("Could not determine workspace root. Run this command inside a monorepo.");const o=r,{packageManager:s}=xe(o),i=Pt(o),c=n?.update??{},l=De(o,s,{dev:a.dev,prod:a.prod});if(l.size===0){t.info("No catalogs found.");return}const p=a.target??c.target??"latest";if(!["latest","minor","patch"].includes(p))throw new Error(`Invalid target "${p}". Use: latest, minor, or patch.`);const g={exclude:[...V(a.exclude),...V(c.exclude)],include:[...V(a.include),...V(c.include),...e],includePrerelease:a.prerelease||c.prerelease||!1,security:a.security||a.ai||c.security||!1,target:p};let f=0;for(const T of l.values())f+=T.size;t.info(`Checking ${String(f)} catalog dependencies against npm registry...
|
|
92
|
+
`);const{failed:w,outdated:$}=await Tt(l,g,i);if(w.length>0&&t.warn(`Failed to fetch: ${w.join(", ")}`),$.length===0){t.info("All catalog dependencies are up to date.");return}const x=a.format??c.format??"table",G=Ue(a["ai-type"]??"impact"),j=a.ai?await Me($,t,n?.ai,G):void 0;if(x==="json"){const T={failed:w,outdated:$};j&&(T.aiAnalysis=j),process.stdout.write(`${JSON.stringify(T,void 0,2)}
|
|
93
|
+
`)}else x==="minimal"?process.stdout.write(`${Ut($)}
|
|
94
|
+
`):(Mt($,t),t.info(Vt($)),j&&(t.info(""),t.info(de(j))));a["exit-code"]&&$.length>0&&(process.exitCode=1)},"execute"),name:"check",options:[{alias:"t",description:"Update target: latest, minor, or patch (default: latest)",name:"target",type:String},{description:"Glob pattern to include packages (repeatable)",lazyMultiple:!0,name:"include",type:String},{description:"Glob pattern to exclude packages (repeatable)",lazyMultiple:!0,name:"exclude",type:String},{defaultValue:!1,description:"Include prerelease versions",name:"prerelease",type:Boolean},{defaultValue:!1,description:"Check for known security vulnerabilities (via OSV.dev)",name:"security",type:Boolean},{description:"Output format: table, json, or minimal (default: table)",name:"format",type:String},{defaultValue:!1,description:"Exit with code 1 if outdated dependencies found (for CI)",name:"exit-code",type:Boolean},{defaultValue:!1,description:"Run AI analysis on outdated packages",name:"ai",type:Boolean},{description:"AI analysis type: impact, security, compatibility, or recommend (default: impact)",name:"ai-type",type:String},{alias:"D",defaultValue:!1,description:"Check only devDependencies (npm/yarn mode)",name:"dev",type:Boolean},{alias:"P",defaultValue:!1,description:"Check only dependencies (npm/yarn mode)",name:"prod",type:Boolean}]};var oo=Object.defineProperty,We=y((e,t)=>oo(e,"name",{value:t,configurable:!0}),"p$2");const so=We(e=>{const t=["Project Dependency Graph","=======================",""];for(const[a,n]of Object.entries(e.nodes)){const r=e.dependencies[a]??[],o=n.type==="application"?" (app)":"";if(r.length===0)t.push(` ${a}${o}`);else{t.push(` ${a}${o}`);for(const s of r)t.push(` └── ${s.target}`)}t.push("")}return t.join(`
|
|
95
|
+
`)},"projectGraphToAscii"),io=We(e=>{const t=Object.values(e.nodes).map(a=>({name:a.name,type:a.type}));return{edges:Object.values(e.dependencies).flat(),nodes:t}},"projectGraphToJson"),co={description:"Visualize the project dependency graph",examples:[["vis graph","Show ASCII dependency graph"],["vis graph --format=dot","Output in Graphviz DOT format"],["vis graph --format=json --output=graph.json","Save JSON graph to file"]],execute:We(async({logger:e,options:t,visConfig:a,workspaceRoot:n})=>{if(!n)throw new Error("Could not determine workspace root. Run this command inside a monorepo.");const r=n,{workspace:o}=ye(r,a),s=Ae(r,o),i=t.format??"ascii",c=t.output;let l;switch(i){case"dot":{l=ua(s);break}case"json":{l=JSON.stringify(io(s),void 0,2);break}default:l=so(s)}c?(E(c,l,"utf8"),e.info(`Graph written to ${c}`)):e.info(l)},"execute"),name:"graph",options:[{alias:"f",defaultValue:"ascii",description:"Output format: ascii, dot, json",name:"format",type:String},{alias:"o",description:"Write output to file instead of stdout",name:"output",type:String}]},Ft=["pre-commit","pre-merge-commit","prepare-commit-msg","commit-msg","post-commit","applypatch-msg","pre-applypatch","post-applypatch","pre-rebase","post-rewrite","post-checkout","post-merge","pre-push","pre-auto-gc"],ge=".vis-hooks";var lo=Object.defineProperty,fe=y((e,t)=>lo(e,"name",{value:t,configurable:!0}),"o$3");const po=/\/$/,uo=fe(e=>{let t='"$0"';for(let a=0;a<e;a+=1)t=`"$(dirname ${t})"`;return t},"nestedDirname"),go=fe(e=>{const t=e.split("/").filter(a=>a!==""&&a!==".").length+2;return`#!/usr/bin/env sh
|
|
96
|
+
{ [ "$VIS_GIT_HOOKS" = "2" ]; } && set -x
|
|
97
|
+
n=$(basename "$0")
|
|
98
|
+
s=$(dirname "$(dirname "$0")")/$n
|
|
99
|
+
|
|
100
|
+
[ ! -f "$s" ] && exit 0
|
|
101
|
+
|
|
102
|
+
{ [ "\${VIS_GIT_HOOKS-}" = "0" ]; } && exit 0
|
|
103
|
+
|
|
104
|
+
d=${uo(t)}
|
|
105
|
+
export PATH="$d/node_modules/.bin:$PATH"
|
|
106
|
+
sh -e "$s" "$@"
|
|
107
|
+
c=$?
|
|
108
|
+
|
|
109
|
+
[ $c != 0 ] && echo "vis - $n script failed (code $c)"
|
|
110
|
+
[ $c = 127 ] && echo "vis - command not found in PATH=$PATH"
|
|
111
|
+
exit $c`},"hookScript"),Dt=fe((e=ge)=>{if(process.env.VIS_GIT_HOOKS==="0")return{isError:!1,message:"skip install (git hooks disabled via VIS_GIT_HOOKS=0)"};if(e.includes(".."))return{isError:!0,message:'".." is not allowed in hooks directory path'};const t=F("git",["rev-parse","--show-prefix"]);if(t.status===void 0||t.status===null)return{isError:!0,message:"git command not found"};if(t.status!==0)return{isError:!1,message:".git directory not found (not a git repository)"};const a=fe((l="")=>u(e,"_",l),"internal"),n=t.stdout.toString().trim().replace(po,""),r=n?`${n}/${e}/_`:`${e}/_`,o=F("git",["config","--local","core.hooksPath"]),s=o.status===0?o.stdout?.toString().trim():"";if(s&&s!==r)return{isError:!1,message:`core.hooksPath is already set to "${s}", skipping`};const{status:i,stderr:c}=F("git",["config","core.hooksPath",r]);if(i==null)return{isError:!0,message:"git command not found"};if(i)return{isError:!0,message:String(c)};Oe(a(),{recursive:!0}),E(a(".gitignore"),"*"),E(a("h"),go(e),{mode:493});for(const l of Ft)E(a(l),`#!/usr/bin/env sh
|
|
112
|
+
. "$(dirname "$0")/h"`,{mode:493});return{isError:!1,message:""}},"installHooks");var fo=Object.defineProperty,mo=y((e,t)=>fo(e,"name",{value:t,configurable:!0}),"t$1");const Jt=[".lintstagedrc.json",".lintstagedrc"],Wt=[".lintstagedrc.yaml",".lintstagedrc.yml",".lintstagedrc.mjs","lint-staged.config.mjs",".lintstagedrc.cjs","lint-staged.config.cjs",".lintstagedrc.js","lint-staged.config.js",".lintstagedrc.ts","lint-staged.config.ts",".lintstagedrc.mts","lint-staged.config.mts",".lintstagedrc.cts","lint-staged.config.cts"],_e=[...Jt,...Wt],ho=[/^((?:[A-Z_][A-Z0-9_]*(?:=\S*)?\s+)*)(pnpm|pnpm exec|npx|yarn|yarn run|npm exec|npm run|bunx|bun run|bun x)\s+lint-staged\b/,/^((?:[A-Z_][A-Z0-9_]*(?:=\S*)?\s+)*)lint-staged\b/],yo=["husky","lint-staged"],vo=/\(is-ci \|\| husky \|\| exit 0\)\s*&&\s*/g,ko=/\bhusky(?:\s+install)?\s*&&\s*/g,$o=/\s*&&\s*husky(?:\s+install)?/g,wo=/\s*\|\|\s*husky(?:\s+install)?/g,bo=[vo,ko,$o,wo],_t=mo(e=>{if(e==="husky"||e==="husky install")return;let t=e;for(const a of bo)t=t.replace(a,"");return t=t.trim(),t===e?e:t||void 0},"cleanHuskyFromScript");var So=Object.defineProperty,z=y((e,t)=>So(e,"name",{value:t,configurable:!0}),"a$1");const jo=[".husky",".config/husky"],Co=/^\. "\$\(dirname "\$0"\)\/common\.sh"\s*/m,zt=z(e=>{for(const t of jo)if(k(u(e,t))&&Ie(u(e,t)).isDirectory())return t},"detectHuskyDirectory"),xo=z((e,t)=>{const a=new Map,n=u(e,t),r=new Set(Ft);for(const o of Be(n)){if(o==="_"||o===".gitignore"||o.startsWith("."))continue;const s=u(n,o);Ie(s).isFile()&&(!r.has(o)&&o!=="common.sh"||a.set(o,B(s,"utf8")))}return a},"readHuskyHooks"),Eo=z(e=>e.replace(Co,""),"transformHookScript"),Ht=z(e=>k(u(e,"pnpm-lock.yaml"))||k(u(e,"pnpm-workspace.yaml"))?"pnpm":k(u(e,"yarn.lock"))?"yarn":k(u(e,"bun.lockb"))||k(u(e,"bun.lock"))?"bun":"npm","detectPackageManager"),Ro=z((e,t)=>{const a=Ht(e),n={bun:["bun","remove","husky"],npm:["npm","uninstall","husky"],pnpm:["pnpm","remove","husky"],yarn:["yarn","remove","husky"]},[r,...o]=n[a];t.info(`Removing husky package via ${a}...`);const s=F(r,o,{cwd:e,encoding:"utf8",stdio:"pipe"});return s.status!==0?(t.info(`Warning: failed to remove husky via ${a} (${s.stderr?.trim()??"unknown error"})`),!1):!0},"uninstallHuskyPackage"),No=z((e,t,a)=>{const n=_t(a);if(n!==a)return n?(e[t]=n,`updated "${t}" script`):(delete e[t],`removed "${t}" script (was: "${a}")`)},"processScript"),Oo=z(e=>{const t=u(e,"package.json");if(!k(t))return{modified:!1,removedScriptReferences:[]};const a=B(t,"utf8"),n=JSON.parse(a),r=[],o=n.scripts;if(o)for(const[s,i]of Object.entries(o)){if(typeof i!="string")continue;const c=No(o,s,i);c&&r.push(c)}return r.length>0&&E(t,`${JSON.stringify(n,void 0,4)}
|
|
113
|
+
`,"utf8"),{modified:r.length>0,removedScriptReferences:r}},"cleanPackageJsonScripts"),Gt=z((e,t,a)=>{const n=zt(e);if(!n)return{isError:!0,message:"No husky installation found (.husky/ or .config/husky/)"};a.info(`Found husky at ${n}/`);const r=xo(e,n);r.size===0&&a.info("No user-defined hooks found in husky directory.");const o=F("git",["config","--local","core.hooksPath"]),s=o.status===0?o.stdout?.toString().trim():"";s&&(s===".husky/_"||s.startsWith(".husky"))&&F("git",["config","--local","--unset","core.hooksPath"]);const i=Dt(t);if(i.isError)return i;i.message&&a.info(i.message);const c=u(e,t);Oe(c,{recursive:!0});let l=0;for(const[f,w]of r){if(f==="common.sh"){E(u(c,f),w,{mode:493}),a.info(" Copied common.sh");continue}const $=Eo(w);E(u(c,f),$,{mode:493}),l+=1,a.info(` Migrated ${f}`)}Ro(e,a);const p=Oo(e);if(p.modified){a.info("Updated package.json scripts:");for(const f of p.removedScriptReferences)a.info(` ${f}`)}const g=u(e,n);return ee(g,{force:!0,recursive:!0}),a.info(`Removed ${n}/`),{isError:!1,message:`Migration complete: ${l} hook${l===1?"":"s"} migrated from ${n}/ to ${t}/`}},"migrateFromHusky");var Bo=Object.defineProperty,Io=y((e,t)=>Bo(e,"name",{value:t,configurable:!0}),"e$1");const Po=Io((e=ge)=>{if(F("git",["config","--local","core.hooksPath"]).status!==0)return{isError:!1,message:"No custom hooks path configured"};const{status:t,stderr:a}=F("git",["config","--local","--unset","core.hooksPath"]);if(t==null)return{isError:!0,message:"git command not found"};if(t&&t!==5)return{isError:!0,message:String(a)};const n=u(e,"_");return k(n)&&ee(n,{force:!0,recursive:!0}),{isError:!1,message:""}},"uninstallHooks");var Ao=Object.defineProperty,ne=y((e,t)=>Ao(e,"name",{value:t,configurable:!0}),"n");const Lo=ne(e=>new Promise(t=>{const a=Ca({input:process.stdin,output:process.stdout});a.question(`${e} (y/N) `,n=>{a.close();const r=n.trim().toLowerCase();t(r==="y"||r==="yes")})}),"confirmPrompt"),To=ne(async(e,t)=>{const a=lt(),n=zt(a);if(n){if(t.info(`Existing husky installation found at ${n}/`),await Lo("Would you like to migrate your husky hooks to vis?")){const o=Gt(a,e,t);if(o.isError)throw new Error(o.message);o.message&&t.info(o.message);return}t.info("Aborting install. Remove husky first or run 'vis hook migrate' to migrate.");return}t.info(`Installing git hooks in ${e}/...`);const r=Dt(e);if(r.message){if(r.isError)throw new Error(r.message);t.info(r.message);return}k(u(a,e,"pre-commit"))||E(u(a,e,"pre-commit"),`#!/usr/bin/env sh
|
|
114
|
+
`,{mode:493}),t.info("Git hooks installed successfully.")},"executeInstall"),Uo=ne((e,t)=>{const a=lt(),n=Gt(a,e,t);if(n.isError)throw new Error(n.message);n.message&&t.info(n.message)},"executeMigrate"),Mo=ne((e,t)=>{t.info("Removing git hooks...");const a=Po(e);if(a.message){if(a.isError)throw new Error(a.message);t.info(a.message);return}t.info("Git hooks removed successfully.")},"executeUninstall"),Vo={argument:{description:"Action to perform: install, uninstall, or migrate",name:"action",type:String},description:"Manage git hooks for the workspace",env:[{defaultValue:void 0,description:"Set to 0 to disable git hooks, set to 2 for debug output",name:"VIS_GIT_HOOKS",type:String}],examples:[["vis hook install","Install git hooks in .vis-hooks/"],["vis hook uninstall","Remove git hooks and reset core.hooksPath"],["vis hook migrate","Migrate from husky to vis hooks"],["vis hook install --hooks-dir=.githooks","Install hooks in a custom directory"]],execute:ne(async({argument:e,logger:t,options:a})=>{const n=e[0]??"install",r=a["hooks-dir"]??ge;switch(n){case"install":{await To(r,t);break}case"migrate":{Uo(r,t);break}case"uninstall":{Mo(r,t);break}default:throw new Error(`Unknown action "${n}". Use "install", "uninstall", or "migrate".`)}},"execute"),name:"hook",options:[{defaultValue:ge,description:"Custom hooks directory",name:"hooks-dir",type:String}]};var Fo=Object.defineProperty,ke=y((e,t)=>Fo(e,"name",{value:t,configurable:!0}),"e");const ze=ke(e=>{try{const t=B(e,"utf8");return JSON.parse(t)}catch{return}},"readJsonFile"),qt=ke(e=>{if(!k(e))return!1;try{return JSON.parse(B(e,"utf8")),!0}catch{return!1}},"isJsonFile"),Do=/\n([ \t]+)"/,Kt=ke(e=>Do.exec(e)?.[1]?.length??4,"detectJsonIndent"),Jo=ke((e,t)=>{if(!k(e))return!1;const a=B(e,"utf8");let n;try{n=JSON.parse(a)}catch{return!1}const r=t(n);if(r===void 0)return!1;const o=Kt(a);return E(e,`${JSON.stringify(r,void 0,o)}
|
|
115
|
+
`,"utf8"),!0},"editJsonFile");var Wo=Object.defineProperty,H=y((e,t)=>Wo(e,"name",{value:t,configurable:!0}),"d$3");const _o=/\blint-staged\b/g,zo=H((e,t)=>{let a=!1;const n={...e};for(const[r,o]of Object.entries(n)){if(typeof o!="string")continue;let s=_t(o);s&&(s=s.replaceAll(_o,"vis staged").trim()||void 0),s!==o&&(s?n[r]=s:delete n[r],a=!0,t.rewrittenScriptCount+=1)}return{modified:a,scripts:n}},"rewriteScripts"),Yt=H((e,t,a,n)=>{const r=u(e,"package.json");k(r)&&Jo(r,o=>{let s=!1;for(const c of yo){const l=o.dependencies,p=o.devDependencies;l?.[c]&&(delete l[c],s=!0,n.removedPackageCount+=1),p?.[c]&&(delete p[c],s=!0,n.removedPackageCount+=1)}if(Object.keys(a).length>0)switch(t){case"bun":case"npm":{const c=o.overrides??{};o.overrides={...c,...a},s=!0;break}case"pnpm":{const c=o.pnpm??{},l=c.overrides??{};c.overrides={...l,...a},o.pnpm=c,s=!0;break}case"yarn":{const c=o.resolutions??{};o.resolutions={...c,...a},s=!0;break}}const i=o.scripts;if(i){const c=zo(i,n);c.modified&&(o.scripts=c.scripts,s=!0)}return s?o:void 0})},"rewritePackageJson"),Ho=H((e,t,a,n)=>{try{const{workspace:r}=ye(e);for(const o of Object.values(r.projects)){const s=u(e,o.root);Yt(s,t,a,n)}}catch{}},"migrateMonorepoPackages"),Go=H(e=>e.startsWith("- ")||e!==""&&!e.includes(":")&&!e.startsWith("#"),"isCatalogSectionEnd"),qo=H(e=>{let t=!1,a="";const n=new Set;for(const r of e){const o=r.trim();if(o==="catalog:"){t=!0;continue}if(!t)continue;if(Go(o))break;const s=o.includes(":")?o.split(":")[0]?.trim():void 0;s&&(n.add(s),a=a||r.slice(0,r.indexOf(o)))}return{entries:n,indent:a||" "}},"parseCatalogEntries"),Ko=H((e,t,a)=>{const n=[];let r=!1,o=!1;for(let s=0;s<e.length;s+=1){const i=e[s],c=i.trim();if(n.push(i),c==="catalog:"){o=!0;continue}if(o&&!r){const l=e[s+1]?.trim()??"";(!l.includes(":")||l.startsWith("- ")||!l||l==="catalog:")&&(n.push(...t),r=!0,o=!1)}}return r||(a.includes("catalog:")||n.push("catalog:"),n.push(...t)),n},"insertCatalogEntries"),Yo=H((e,t)=>{const a=u(e,"pnpm-workspace.yaml");if(!k(a)||Object.keys(t).length===0)return;const n=B(a,"utf8"),r=n.split(`
|
|
116
|
+
`),{entries:o,indent:s}=qo(r),i=[];for(const[l,p]of Object.entries(t))o.has(l)||i.push(`${s}${l}: "${p}"`);if(i.length===0)return;const c=Ko(r,i,n);E(a,c.join(`
|
|
117
|
+
`),"utf8")},"updatePnpmWorkspaceCatalog"),Xo=H((e,t,a,n,r,o)=>{const s=a.overrides??{};if(n.dryRun){r.info("[dry-run] Would rewrite package.json files (remove husky/lint-staged, rewrite scripts)"),Object.keys(s).length>0&&r.info(`[dry-run] Would add overrides: ${JSON.stringify(s)}`);return}Yt(e,t,s,o),r.info("Rewritten root package.json"),Ho(e,t,s,o),t==="pnpm"&&Yo(e,s)},"migrateDeps");var Zo=Object.defineProperty,He=y((e,t)=>Zo(e,"name",{value:t,configurable:!0}),"i$1");const Qo=He(()=>({gitHooksConfigured:!1,inlinedLintStagedConfigCount:0,manualSteps:[],mergedStagedConfigCount:0,removedConfigCount:0,removedPackageCount:0,rewrittenScriptCount:0,warnings:[]}),"createMigrationReport"),me=He((e,t)=>{!e||e.warnings.includes(t)||e.warnings.push(t)},"addMigrationWarning"),Xt=He((e,t)=>{!e||e.manualSteps.includes(t)||e.manualSteps.push(t)},"addManualStep");var es=Object.defineProperty,C=y((e,t)=>es(e,"name",{value:t,configurable:!0}),"r$1");const ts=/\bstaged\s*:/,nt=/(defineConfig\(\{)/,rt=/(export\s+default\s+\{)/;C(e=>_e.some(t=>k(u(e,t))),"hasStandaloneLintStagedConfig");const as=C(e=>{for(const a of Wt)if(k(u(e,a)))return!0;const t=u(e,".lintstagedrc");return k(t)&&!qt(t)},"hasUnsupportedLintStagedConfig"),ns=C(e=>{const t=pt(e);if(!t)return!1;const a=B(t,"utf8");return ts.test(a)},"hasStagedConfigInVisConfig"),rs=C(e=>{const t=u(e,"package.json");if(k(t)&&ze(t)?.["lint-staged"])return"package.json";for(const a of _e)if(k(u(e,a)))return a},"detectLintStagedConfig"),os=C(e=>ze(u(e,"package.json"))?.["lint-staged"],"extractLintStagedFromPackageJson"),ss=C(e=>ze(e),"parseLintStagedJsonFile"),Ce=C(e=>` staged: {
|
|
118
|
+
${Object.entries(e).map(([t,a])=>{const n=Array.isArray(a)?`[${a.map(r=>JSON.stringify(r)).join(", ")}]`:JSON.stringify(a);return` ${JSON.stringify(t)}: ${n}`}).join(`,
|
|
119
|
+
`)},
|
|
120
|
+
}`,"generateStagedConfigSnippet"),is=C((e,t,a)=>{const n=pt(e);if(n){const s=B(n,"utf8"),i=Ce(t);let c;return nt.test(s)?c=s.replace(nt,`$1
|
|
121
|
+
${i},`):rt.test(s)&&(c=s.replace(rt,`$1
|
|
122
|
+
${i},`)),c?(E(n,c,"utf8"),a.info(`Merged staged config into ${n}`),!0):(a.warn(`Could not auto-insert staged config into ${n} — please add manually`),!1)}const r=u(e,"vis.config.ts"),o=`import { defineConfig } from "@visulima/vis/config";
|
|
123
|
+
|
|
124
|
+
export default defineConfig({
|
|
125
|
+
${Ce(t)},
|
|
126
|
+
});
|
|
127
|
+
`;return E(r,o,"utf8"),a.info(`Created ${r} with staged config`),!0},"insertStagedIntoVisConfig"),cs=C(e=>{const t=u(e,"package.json"),a={configRemoved:!1,dependencyRemoved:!1};if(!k(t))return a;const n=B(t,"utf8"),r=JSON.parse(n);let o=!1;r["lint-staged"]&&(delete r["lint-staged"],o=!0,a.configRemoved=!0);const s=r.devDependencies,i=r.dependencies;if(s?.["lint-staged"]&&(delete s["lint-staged"],o=!0,a.dependencyRemoved=!0),i?.["lint-staged"]&&(delete i["lint-staged"],o=!0,a.dependencyRemoved=!0),o){const c=Kt(n);E(t,`${JSON.stringify(r,void 0,c)}
|
|
128
|
+
`,"utf8")}return a},"removeLintStagedFromPackageJson"),ls=C((e,t)=>{for(const a of _e){const n=u(e,a);k(n)&&(Sa(n),t.removedConfigCount+=1)}},"removeLintStagedConfigFiles"),ps=C((e,t)=>{const a=u(e,t,"pre-commit");if(!k(a))return!1;const n=B(a,"utf8");if(n.includes("vis staged"))return!1;const r=n.split(`
|
|
129
|
+
`);let o=!1;const s=[];for(const i of r){const c=i.trim();if(!o){let l=!1;for(const p of ho){const g=p.exec(c);if(g){const f=i.slice(0,i.length-i.trimStart().length),w=g[1]?.trim()??"",$=c.slice(g[0].length).trim(),x=[w,"vis staged",$].filter(Boolean);s.push(`${f}${x.join(" ")}`),o=!0,l=!0;break}}if(l)continue}s.push(i)}return o?(E(a,s.join(`
|
|
130
|
+
`)),!0):!1},"rewritePreCommitHook"),us=C((e,t,a)=>{if(t==="package.json")return os(e);const n=u(e,t);if(!Jt.includes(t)){me(a,`${t} cannot be auto-migrated — please add "staged" config to vis.config.ts manually`),Xt(a,`Manually convert ${t} to staged config in vis.config.ts`);return}if(t===".lintstagedrc"&&!qt(n)){me(a,".lintstagedrc is not JSON format — please migrate manually");return}return ss(n)},"extractConfig"),Zt=C((e,t)=>{const{configRemoved:a,dependencyRemoved:n}=cs(e);a&&(t.inlinedLintStagedConfigCount+=1),n&&(t.removedPackageCount+=1),ls(e,t)},"cleanupLintStagedArtifacts"),ds=C((e,t,a,n)=>{const r=[".vis-hooks",".husky"];for(const o of r)k(u(e,o))&&ps(e,o)&&(n.gitHooksConfigured=!0,t.silent||a.info(`Rewrote pre-commit hook in ${o}/ to use "vis staged"`))},"rewriteHooks"),gs=C((e,t,a,n,r)=>{is(e,t,n)&&(r.mergedStagedConfigCount+=1),Zt(e,r),ds(e,a,n,r)},"applyMigration"),fs=C((e,t,a,n)=>{const r=rs(e);if(!r)return t.silent||a.info("No lint-staged configuration found — nothing to migrate."),!1;if(as(e)&&(me(n,'Non-JSON lint-staged config found — please migrate to "staged" in vis.config.ts manually'),Xt(n,"Convert your lint-staged config file to JSON format or add staged config to vis.config.ts manually")),ns(e))return me(n,'vis.config.ts already has a "staged" config — skipping lint-staged merge'),t.silent||a.warn('vis.config.ts already has a "staged" config — skipping'),t.dryRun||Zt(e,n),!0;const o=us(e,r,n);return!o||Object.keys(o).length===0?(t.silent||a.warn("lint-staged config is empty — skipping"),!1):t.dryRun?(t.silent||(a.info("[dry-run] Would insert staged config into vis.config.ts:"),a.info(Ce(o))),!0):(gs(e,o,t,a,n),!0)},"migrateLintStaged");var ms=Object.defineProperty,Qt=y((e,t)=>ms(e,"name",{value:t,configurable:!0}),"g");const hs=Qt((e,t)=>{t.info("── Migration Summary ──");const a=[["Staged configs merged into vis.config.ts",e.mergedStagedConfigCount],["Lint-staged configs inlined",e.inlinedLintStagedConfigCount],["Config files removed",e.removedConfigCount],["Packages removed",e.removedPackageCount],["Scripts rewritten",e.rewrittenScriptCount]];for(const[n,r]of a)r>0&&t.info(` ${n}: ${String(r)}`);if(e.gitHooksConfigured&&t.info(" Pre-commit hook updated to use vis staged"),e.warnings.length>0){t.info(""),t.warn("Warnings:");for(const n of e.warnings)t.warn(` - ${n}`)}if(e.manualSteps.length>0){t.info(""),t.info("Manual steps required:");for(const n of e.manualSteps)t.info(` - ${n}`)}},"printSummary"),ys={argument:{description:"Migration type: all, deps, lint-staged",name:"type",type:String},description:"Migrate from other tools (husky, lint-staged) to vis",examples:[["vis migrate","Run all migrations"],["vis migrate deps","Migrate package dependencies and scripts"],["vis migrate lint-staged","Migrate lint-staged config to vis.config.ts"],["vis migrate --dry-run","Preview changes without applying"]],execute:Qt(async({argument:e,logger:t,options:a,visConfig:n,workspaceRoot:r})=>{const o=e[0]??"all",s=!!a["dry-run"],i=r??process.cwd(),c=n??{},l=Ht(i),p=Qo();if(!["all","deps","lint-staged"].includes(o))throw new Error(`Unknown migration type "${o}". Use "all", "deps", or "lint-staged".`);s&&t.info(`Running in dry-run mode — no changes will be made.
|
|
131
|
+
`),(o==="all"||o==="deps")&&(t.info("── Migrating dependencies and scripts ──"),Xo(i,l,c,{dryRun:s},t,p),t.info("")),(o==="all"||o==="lint-staged")&&(t.info("── Migrating lint-staged ──"),fs(i,{dryRun:s},t,p),t.info("")),hs(p,t)},"execute"),name:"migrate",options:[{defaultValue:!1,description:"Preview changes without applying",name:"dry-run",type:Boolean}]};var vs=Object.defineProperty,ea=y((e,t)=>vs(e,"name",{value:t,configurable:!0}),"f$1");const ks=ea(e=>async(t,a)=>{const n=a.cwd??t.projectRoot??e,r=n.startsWith("/")?n:`${e}/${n}`,o=t.overrides.command;if(!o)return{code:0,terminalOutput:`No command configured for ${t.target.project}:${t.target.target}`};try{return{code:0,terminalOutput:Pe(o,{cwd:r,encoding:"utf8",env:{...process.env,...a.env},stdio:"pipe"})}}catch(s){const i=s;return{code:i.status??1,terminalOutput:(i.stdout??"")+(i.stderr??"")}}},"createShellExecutor"),$s={argument:{description:"The target to run (e.g., build, test, lint)",name:"target",type:String},description:"Run a target across workspace projects",examples:[["vis run build","Run build on all projects"],["vis run test --projects=pkg-a,pkg-b","Run test on specific projects"],["vis run build --parallel=5","Run build with 5 parallel tasks"],["vis run build --no-cache","Run build without caching"]],execute:ea(async({argument:e,logger:t,options:a,visConfig:n,workspaceRoot:r})=>{const o=e[0];if(!o)throw new Error("Missing target. Usage: vis run <target>");if(!r)throw new Error("Could not determine workspace root. Run this command inside a monorepo.");const s=r,{config:i,workspace:c}=ye(s,n),l=Ae(s,c);let p=Object.keys(c.projects);if(a.projects){const R=new Set(a.projects.split(",").map(U=>U.trim()));if(p=p.filter(U=>R.has(U)),p.length===0)throw new Error(`No matching projects found for: ${String(a.projects)}`)}const g=p.filter(R=>c.projects[R]?.targets?.[o]!==void 0);if(g.length===0){t.info(`No projects have the "${o}" target.`);return}const f=g.map(R=>{const U=c.projects[R],re=U?.targets?.[o],aa={project:R,target:o},na=`${R}:${o}`;return{cache:re?.cache??i.targetDefaults?.[o]?.cache,id:na,outputs:re?.outputs??i.targetDefaults?.[o]?.outputs??[],overrides:{command:re?.command},parallelism:re?.parallelism??i.targetDefaults?.[o]?.parallelism,projectRoot:U?.root,target:aa}}),w=da(f,{projectGraph:l,targetDefaults:i.targetDefaults,workspace:c}),$=Object.keys(w.tasks).length,x=Date.now();t.info(`vis run ${o} (${$} task${$===1?"":"s"})`);const G={cacheDirectory:a.cacheDir,dryRun:a.dryRun,parallel:a.parallel??3,skipNxCache:!a.cache,summarize:a.summarize,...i.taskRunnerOptions},j=new ga,T=ks(s),Q=await fa(f,G,{lifeCycle:j,projectGraph:l,taskExecutor:T,taskGraph:w,workspaceRoot:s});if(a.summarize){const R=ma(Q,w,x);await ha(R,s)}let O=!1;for(const[,R]of Q)R.status==="failure"&&(O=!0);if(O)throw new Error("Some tasks failed.");t.info("All tasks completed successfully.")},"execute"),name:"run",options:[{alias:"p",description:"Comma-separated list of projects to run",name:"projects",type:String},{defaultValue:3,description:"Maximum number of parallel tasks",name:"parallel",type:Number},{defaultValue:!0,description:"Enable caching (use --no-cache to disable)",name:"cache",type:Boolean},{description:"Custom cache directory",name:"cache-dir",type:String},{defaultValue:!1,description:"Show what would run without executing",name:"dry-run",type:Boolean},{defaultValue:!1,description:"Generate a run summary after execution",name:"summarize",type:Boolean}]};var ws=Object.defineProperty,ta=y((e,t)=>ws(e,"name",{value:t,configurable:!0}),"s$1");const bs=ta((e,t)=>{const a=[["allow-empty","allowEmpty"],["continue-on-error","continueOnError"],["cwd","cwd"],["debug","debug"],["diff","diff"],["diff-filter","diffFilter"],["fail-on-changes","failOnChanges"],["hide-partially-staged","hidePartiallyStaged"],["hide-unstaged","hideUnstaged"],["quiet","quiet"],["relative","relative"],["revert","revert"],["stash","stash"],["verbose","verbose"]];for(const[n,r]of a)e[n]!==void 0&&(t[r]=e[n]);if(e.concurrent!==void 0){const n=e.concurrent;if(n==="true")t.concurrent=!0;else if(n==="false")t.concurrent=!1;else{const r=Number(n);t.concurrent=Number.isNaN(r)||n===""?!0:r}}},"mapOptions"),Ss={description:"Run linters on staged files using config from vis.config.ts",examples:[["vis staged","Run staged linters"],["vis staged --verbose","Run with verbose output"],["vis staged --no-stash","Run without backup stash"],["vis staged --diff HEAD~1","Run against a specific diff"]],execute:ta(async({options:e,visConfig:t})=>{const a=(t??{}).staged;if(!a)throw new Error(`No "staged" config found in vis.config.ts. Please add a staged config:
|
|
132
|
+
|
|
133
|
+
// vis.config.ts
|
|
134
|
+
import { defineConfig } from "@visulima/vis/config";
|
|
135
|
+
|
|
136
|
+
export default defineConfig({
|
|
137
|
+
staged: { '*': 'vis check --fix' },
|
|
138
|
+
});`);let n;try{n=(await import("lint-staged")).default}catch{throw new Error("lint-staged is required but not installed. Run: pnpm add -D lint-staged")}const r={config:a};bs(e,r);const o=await n(r);process.exit(o?0:1)},"execute"),name:"staged",options:[{defaultValue:!1,description:"Allow empty commits when tasks revert all staged changes",name:"allow-empty",type:Boolean},{description:"Number of concurrent tasks or false for serial",name:"concurrent",type:String},{defaultValue:!1,description:"Run all tasks to completion even if one fails",name:"continue-on-error",type:Boolean},{description:"Working directory to run all tasks in",name:"cwd",type:String},{defaultValue:!1,description:"Enable debug output",name:"debug",type:Boolean},{description:"Override the default --staged flag of git diff",name:"diff",type:String},{description:"Override the default diff-filter",name:"diff-filter",type:String},{defaultValue:!1,description:"Fail with exit code 1 when tasks modify tracked files",name:"fail-on-changes",type:Boolean},{defaultValue:!1,description:"Hide unstaged changes from partially staged files",name:"hide-partially-staged",type:Boolean},{defaultValue:!1,description:"Hide all unstaged changes before running tasks",name:"hide-unstaged",type:Boolean},{defaultValue:!1,description:"Suppress console output",name:"quiet",type:Boolean},{defaultValue:!1,description:"Pass filepaths relative to cwd to tasks",name:"relative",type:Boolean},{defaultValue:!1,description:"Revert to original state in case of errors",name:"revert",type:Boolean},{defaultValue:!0,description:"Enable backup stash",name:"stash",type:Boolean},{defaultValue:!1,description:"Show task output even when tasks succeed",name:"verbose",type:Boolean}]};var js=Object.defineProperty,X=y((e,t)=>js(e,"name",{value:t,configurable:!0}),"s");const Cs=X(e=>{const t=[];for(const a of e.filters)t.push("--filter",a);return e.workspaceRoot&&t.push("--filter","."),t.push("update"),e.latest&&t.push("--latest"),e.recursive&&t.push("--recursive"),e.interactive&&t.push("--interactive"),e.dev&&t.push("--dev"),e.prod&&t.push("--prod"),e.noOptional&&t.push("--no-optional"),e.noSave&&t.push("--no-save"),t.push(...e.packages),{args:t,bin:"pnpm"}},"resolvePnpm"),xs=X(e=>{const t=[];return e.filters.length>0&&t.push("workspace",e.filters[0]),t.push("upgrade"),e.latest&&t.push("--latest"),t.push(...e.packages),{args:t,bin:"yarn"}},"resolveYarnV1"),Es=X(e=>{const t=[];if(e.filters.length>0||e.recursive){t.push("workspaces","foreach","--all");for(const a of e.filters)t.push("--include",a)}return t.push("up"),e.interactive&&t.push("--interactive"),t.push(...e.packages),{args:t,bin:"yarn"}},"resolveYarnBerry"),Rs=X((e,t)=>{const a=["update"];e.latest&&t.push("npm does not support --latest flag. Packages will be updated within their semver range."),e.interactive&&t.push("npm does not support --interactive mode.");for(const n of e.filters)a.push("--workspace",n);return e.recursive&&a.push("--workspaces"),e.workspaceRoot&&a.push("--include-workspace-root"),e.dev&&a.push("--dev"),e.prod&&a.push("--production"),e.noOptional&&a.push("--no-optional"),e.noSave&&a.push("--no-save"),a.push(...e.packages),{args:a,bin:"npm"}},"resolveNpm"),Ns=X(e=>{const t=["update"];e.latest&&t.push("--latest");for(const a of e.filters)t.push("--filter",a);return t.push(...e.packages),{args:t,bin:"bun"}},"resolveBun"),Os=X((e,t,a)=>{const n=[];if(a.global)return{command:{args:["update","--global",...a.packages],bin:"npm"},warnings:n};let r;switch(e){case"bun":{r=Ns(a);break}case"npm":{r=Rs(a,n);break}case"pnpm":{r=Cs(a);break}case"yarn":{r=t.startsWith("1.")?xs(a):Es(a);break}default:{const o=e;throw new Error(`Unsupported package manager: ${String(o)}`)}}return{command:r,warnings:n}},"resolveUpdateCommand");var Bs=Object.defineProperty,Z=y((e,t)=>Bs(e,"name",{value:t,configurable:!0}),"m");const Is=Z((e,t,a)=>{const n=e.target??t.target??"latest";if(!["latest","minor","patch"].includes(n))throw new Error(`Invalid target "${n}". Use: latest, minor, or patch.`);return{exclude:[...V(e.exclude),...V(t.exclude)],include:[...V(e.include),...V(t.include),...a],includePrerelease:e.prerelease||t.prerelease||!1,security:e.security||e.ai||t.security||!1,target:n}},"buildCatalogCheckOptions"),ot=Z((e,t,a,n)=>{a==="json"?process.stdout.write(`${Vr({failed:t,outdated:e})}
|
|
139
|
+
`):a==="minimal"?process.stdout.write(`${Ut(e)}
|
|
140
|
+
`):(Mt(e,n),n.info(Vt(e)))},"writeFormattedOutput"),Ps=Z(async(e,t,a,n,r)=>{const o=qr(e,a,t),s=t==="pnpm"?"pnpm-workspace.yaml":"package.json";if(r.info(`
|
|
141
|
+
Updated ${s}`),o&&r.info(`Backup saved to ${o}`),n.changelog){r.info(`
|
|
142
|
+
Fetching changelogs...`);const i=await Xr(a);for(const c of i){const l=c.releaseUrl??c.repoUrl??c.npmUrl;r.info(` ${c.packageName}: ${l}`)}}if(n.install??!0){const i={bun:"bun install",npm:"npm install",pnpm:"pnpm install",yarn:"yarn install"}[t]??`${t} install`;r.info(`Running ${i}...
|
|
143
|
+
`);try{Pe(i,{cwd:e,env:process.env,stdio:"inherit"})}catch{r.warn(`${i} failed. You may need to run it manually.`)}}},"applyCatalogAndInstall"),As=Z(async(e,t,a,n,r,o)=>{const s=a.update??{},i=Pt(e),c=De(e,t,{dev:n.dev,prod:n.prod});if(c.size===0){o.info("No catalogs found.");return}const l=Is(n,s,r);let p=0;for(const j of c.values())p+=j.size;o.info(`Checking ${String(p)} catalog dependencies...
|
|
144
|
+
`);const{failed:g,outdated:f}=await Tt(c,l,i);if(g.length>0&&o.warn(`Failed to fetch: ${g.join(", ")}`),f.length===0){o.info("All catalog dependencies are up to date.");return}const w=n.format??s.format??"table";let $;if(n.ai){const j=Ue(n["ai-type"]??"impact");$=await Me(f,o,a.ai,j)}if(n["dry-run"]){if(w==="json"){const j={failed:g,outdated:f};$&&(j.aiAnalysis=$),process.stdout.write(`${JSON.stringify(j,void 0,2)}
|
|
145
|
+
`)}else o.info(`Would update ${String(f.length)} dependencies:
|
|
146
|
+
`),ot(f,g,w,o),$&&(o.info(""),o.info(de($)));return}$&&w!=="json"&&(o.info(de($)),o.info(""));let x=f;if(n.interactive&&(x=await Kr(f),x.length===0)){o.info("No updates selected.");return}o.info(`Updating ${String(x.length)} catalog dependencies...
|
|
147
|
+
`),ot(x,[],w,o);const G={...n,install:n.install??s.install};await Ps(e,t,x,G,o)},"executeCatalogUpdate"),Ls=Z((e,t,a,n,r,o)=>{const s={dev:n.dev,filters:V(n.filter),global:n.global,interactive:n.interactive,latest:n.latest||n.target==="latest",noOptional:n["no-optional"],noSave:n["no-save"],packages:r,prod:n.prod,recursive:n.recursive,workspaceRoot:n["workspace-root"]},{command:i,warnings:c}=Os(t,a,s);for(const p of c)o.warn(p);const l=`${i.bin} ${i.args.join(" ")}`.trim();if(n["dry-run"]){o.info(`Would run: ${l}`);return}o.info(`Running: ${l}`);try{Pe(l,{cwd:e,env:process.env,stdio:"inherit"})}catch(p){const g=p.status??1;throw new Error(`Update command failed with exit code ${String(g)}`,{cause:p})}},"executePmWrapper"),Ts={alias:"up",argument:{description:"Packages to update (updates all if omitted)",name:"packages",type:String},description:"Update packages to their latest versions",examples:[["vis update react","Update react within semver range"],["vis up react -L","Update react to latest"],["vis update -i","Interactive mode"],["vis update --filter app","Update in specific workspace"],["vis update -r","Update in all workspaces"],["vis update --target minor","Only apply minor/patch updates (catalog mode)"],["vis update --dry-run","Preview changes without applying"],["vis update --exclude '@types/*'","Exclude packages by pattern"],["vis update --changelog","Show changelog links after updating"],["vis update --rollback","Restore catalog from last backup"],["vis update --ai","Run AI analysis before applying updates"]],execute:Z(async({argument:e,logger:t,options:a,visConfig:n,workspaceRoot:r})=>{if(!r)throw new Error("Could not determine workspace root. Run this command inside a monorepo.");const o=r,{packageManager:s}=xe(o);if(a.rollback){if(!Mr(o,s)){t.info("No backup found. Run 'vis update' first to create a backup.");return}if(Ur(o,s))t.info("Restored from backup.");else throw new Error("Failed to restore from backup.");return}if(!a["no-catalog"]&&$r(o,s))await As(o,s,n??{},a,e,t);else{const i=ca(s);Ls(o,s,i,a,e,t)}},"execute"),name:"update",options:[{alias:"L",defaultValue:!1,description:"Update to latest version (ignore semver range)",name:"latest",type:Boolean},{alias:"t",description:"Update target: latest, minor, or patch (default: latest, catalog mode)",name:"target",type:String},{alias:"d",defaultValue:!1,description:"Preview changes without applying",name:"dry-run",type:Boolean},{alias:"g",defaultValue:!1,description:"Update global packages",name:"global",type:Boolean},{alias:"r",defaultValue:!1,description:"Update recursively in all workspace packages",name:"recursive",type:Boolean},{description:"Filter packages in monorepo",name:"filter",type:String},{alias:"w",defaultValue:!1,description:"Include workspace root",name:"workspace-root",type:Boolean},{alias:"D",defaultValue:!1,description:"Update only devDependencies",name:"dev",type:Boolean},{alias:"P",defaultValue:!1,description:"Update only dependencies",name:"prod",type:Boolean},{alias:"i",defaultValue:!1,description:"Interactive mode",name:"interactive",type:Boolean},{defaultValue:!1,description:"Don't update optionalDependencies",name:"no-optional",type:Boolean},{defaultValue:!1,description:"Update lockfile only",name:"no-save",type:Boolean},{description:"Glob pattern to include packages (repeatable, catalog mode)",lazyMultiple:!0,name:"include",type:String},{description:"Glob pattern to exclude packages (repeatable, catalog mode)",lazyMultiple:!0,name:"exclude",type:String},{defaultValue:!1,description:"Include prerelease versions (catalog mode)",name:"prerelease",type:Boolean},{defaultValue:!1,description:"Check for known security vulnerabilities (via OSV.dev)",name:"security",type:Boolean},{defaultValue:!1,description:"Skip catalog mode, use package manager directly",name:"no-catalog",type:Boolean},{description:"Output format: table, json, or minimal (default: table)",name:"format",type:String},{defaultValue:!1,description:"Show changelog URLs for updated packages",name:"changelog",type:Boolean},{description:"Run install after catalog update, --no-install to skip (default: true)",name:"install",type:Boolean},{defaultValue:!1,description:"Restore catalog file from the last backup",name:"rollback",type:Boolean},{defaultValue:!1,description:"Run AI analysis on outdated packages before updating (catalog mode)",name:"ai",type:Boolean},{description:"AI analysis type: impact, security, compatibility, or recommend (default: impact)",name:"ai-type",type:String}]};var Us=Object.defineProperty,Ms=y((e,t)=>Us(e,"name",{value:t,configurable:!0}),"r");try{require("node:module")?.enableCompileCache?.()||require("v8-compile-cache")}catch{}const N=ia("vis",{packageName:"vis",packageVersion:Ra.version});N.addPlugin({beforeCommand:Ms(async e=>{try{const t=la(process.cwd()).path;e.workspaceRoot=t,e.visConfig=await xa(t)}catch(t){t instanceof Error&&!t.message.includes("monorepo root")&&e.logger.warn(`Failed to load vis config: ${t.message}`),e.visConfig={}}},"beforeCommand"),name:"config-loader"});N.addCommand($s);N.addCommand(co);N.addCommand(Ja);N.addCommand(Vo);N.addCommand(Ts);N.addCommand(ro);N.addCommand(Hn);N.addCommand(to);N.addCommand(ys);N.addCommand(Ss);await N.run();
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
type UpdateTarget = "latest" | "minor" | "patch";
|
|
2
|
+
interface ParsedVersion {
|
|
3
|
+
major: number;
|
|
4
|
+
minor: number;
|
|
5
|
+
patch: number;
|
|
6
|
+
prerelease: string;
|
|
7
|
+
}
|
|
8
|
+
interface SecurityVulnerability {
|
|
9
|
+
cvssScore?: number;
|
|
10
|
+
fixedVersions: string[];
|
|
11
|
+
id: string;
|
|
12
|
+
severity: "CRITICAL" | "HIGH" | "LOW" | "MODERATE" | "UNKNOWN";
|
|
13
|
+
summary: string;
|
|
14
|
+
}
|
|
15
|
+
interface OutdatedEntry {
|
|
16
|
+
catalogName: string;
|
|
17
|
+
currentRange: string;
|
|
18
|
+
newRange: string;
|
|
19
|
+
packageName: string;
|
|
20
|
+
targetVersion: string;
|
|
21
|
+
updateType: "major" | "minor" | "patch";
|
|
22
|
+
vulnerabilities?: SecurityVulnerability[];
|
|
23
|
+
}
|
|
24
|
+
interface CatalogCheckOptions {
|
|
25
|
+
exclude: string[];
|
|
26
|
+
include: string[];
|
|
27
|
+
includePrerelease: boolean;
|
|
28
|
+
security?: boolean;
|
|
29
|
+
target: UpdateTarget;
|
|
30
|
+
}
|
|
31
|
+
interface ReadCatalogOptions {
|
|
32
|
+
dev?: boolean;
|
|
33
|
+
prod?: boolean;
|
|
34
|
+
}
|
|
35
|
+
declare const parseVersion: (input: string) => ParsedVersion | undefined;
|
|
36
|
+
declare const extractPrefix: (range: string) => string;
|
|
37
|
+
declare const getUpdateType: (current: ParsedVersion, target: ParsedVersion) => "major" | "minor" | "none" | "patch";
|
|
38
|
+
declare const isNewer: (current: ParsedVersion, target: ParsedVersion) => boolean;
|
|
39
|
+
declare const matchesPattern: (name: string, pattern: string) => boolean;
|
|
40
|
+
declare const matchesFilters: (name: string, include: string[], exclude: string[]) => boolean;
|
|
41
|
+
declare const parseCatalogsFromYaml: (content: string) => Map<string, Map<string, string>>;
|
|
42
|
+
interface BunPackageJson {
|
|
43
|
+
workspaces?: {
|
|
44
|
+
catalog?: Record<string, string>;
|
|
45
|
+
catalogs?: Record<string, Record<string, string>>;
|
|
46
|
+
packages?: string[];
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
declare const parseBunCatalogs: (pkg: BunPackageJson) => Map<string, Map<string, string>>;
|
|
50
|
+
declare const parseCompositeCatalogName: (name: string) => {
|
|
51
|
+
depType: string;
|
|
52
|
+
relativePath: string;
|
|
53
|
+
} | undefined;
|
|
54
|
+
declare const readPackageJsonDeps: (workspaceRoot: string, options?: ReadCatalogOptions) => Map<string, Map<string, string>>;
|
|
55
|
+
declare const hasPackageJsonDeps: (workspaceRoot: string) => boolean;
|
|
56
|
+
declare const applyPackageJsonUpdates: (workspaceRoot: string, updates: OutdatedEntry[]) => void;
|
|
57
|
+
type CatalogProvider = "bun" | "pnpm";
|
|
58
|
+
declare const hasCatalogs: (workspaceRoot: string, packageManager?: string) => boolean;
|
|
59
|
+
declare const readCatalogs: (workspaceRoot: string, packageManager?: string, options?: ReadCatalogOptions) => Map<string, Map<string, string>>;
|
|
60
|
+
interface NpmrcConfig {
|
|
61
|
+
authTokens: Map<string, string>;
|
|
62
|
+
defaultRegistry: string;
|
|
63
|
+
registries: Map<string, string>;
|
|
64
|
+
}
|
|
65
|
+
declare const parseNpmrc: (content: string) => NpmrcConfig;
|
|
66
|
+
declare const loadNpmrc: (workspaceRoot: string) => NpmrcConfig;
|
|
67
|
+
declare const getRegistryForPackage: (packageName: string, config: NpmrcConfig) => {
|
|
68
|
+
token?: string;
|
|
69
|
+
url: string;
|
|
70
|
+
};
|
|
71
|
+
interface RegistryVersionInfo {
|
|
72
|
+
latest: string;
|
|
73
|
+
versions: string[];
|
|
74
|
+
}
|
|
75
|
+
declare const fetchPackageVersions: (packageName: string, registryConfig?: {
|
|
76
|
+
authToken?: string;
|
|
77
|
+
url: string;
|
|
78
|
+
}, timeoutMs?: number) => Promise<RegistryVersionInfo>;
|
|
79
|
+
declare const fetchVulnerabilities: (packages: {
|
|
80
|
+
name: string;
|
|
81
|
+
version: string;
|
|
82
|
+
}[], timeoutMs?: number) => Promise<Map<string, SecurityVulnerability[]>>;
|
|
83
|
+
declare const findTargetVersion: (versions: string[], latest: string, currentRange: string, target: UpdateTarget, includePrerelease: boolean) => string | undefined;
|
|
84
|
+
interface CheckOutdatedResult {
|
|
85
|
+
failed: string[];
|
|
86
|
+
outdated: OutdatedEntry[];
|
|
87
|
+
}
|
|
88
|
+
declare const checkOutdated: (catalogs: Map<string, Map<string, string>>, options: CatalogCheckOptions, npmrcConfig?: NpmrcConfig, onProgress?: (current: number, total: number) => void) => Promise<CheckOutdatedResult>;
|
|
89
|
+
declare const createBackup: (workspaceRoot: string, packageManager?: string, updates?: OutdatedEntry[]) => string | undefined;
|
|
90
|
+
declare const restoreFromBackup: (workspaceRoot: string, packageManager?: string) => boolean;
|
|
91
|
+
declare const hasBackup: (workspaceRoot: string, packageManager?: string) => boolean;
|
|
92
|
+
type OutputFormat = "json" | "minimal" | "table";
|
|
93
|
+
declare const formatOutdatedJson: (result: CheckOutdatedResult) => string;
|
|
94
|
+
declare const formatOutdatedMinimal: (outdated: OutdatedEntry[]) => string;
|
|
95
|
+
declare const toFilterArray: (value: string | string[] | undefined) => string[];
|
|
96
|
+
declare const groupByCatalog: (entries: OutdatedEntry[]) => Map<string, OutdatedEntry[]>;
|
|
97
|
+
declare const formatOutdatedTable: (outdated: OutdatedEntry[], logger: Console) => void;
|
|
98
|
+
declare const formatSummary: (outdated: OutdatedEntry[]) => string;
|
|
99
|
+
declare const detectJsonIndent: (content: string) => number | string;
|
|
100
|
+
declare const applyCatalogUpdates: (workspaceRoot: string, updates: OutdatedEntry[], packageManager?: string, backup?: boolean) => string | undefined;
|
|
101
|
+
declare const promptPackageSelection: (outdated: OutdatedEntry[]) => Promise<OutdatedEntry[]>;
|
|
102
|
+
interface ChangelogInfo {
|
|
103
|
+
npmUrl: string;
|
|
104
|
+
packageName: string;
|
|
105
|
+
releaseUrl?: string;
|
|
106
|
+
repoUrl?: string;
|
|
107
|
+
}
|
|
108
|
+
declare const fetchChangelogInfo: (packages: OutdatedEntry[], timeoutMs?: number) => Promise<ChangelogInfo[]>;
|
|
109
|
+
export type { CatalogCheckOptions, CatalogProvider, ChangelogInfo, CheckOutdatedResult, NpmrcConfig, OutdatedEntry, OutputFormat, ParsedVersion, ReadCatalogOptions, SecurityVulnerability, UpdateTarget, };
|
|
110
|
+
export { applyCatalogUpdates, applyPackageJsonUpdates, checkOutdated, createBackup, detectJsonIndent, extractPrefix, fetchChangelogInfo, fetchPackageVersions, fetchVulnerabilities, findTargetVersion, formatOutdatedJson, formatOutdatedMinimal, formatOutdatedTable, formatSummary, getRegistryForPackage, getUpdateType, groupByCatalog, hasBackup, hasCatalogs, hasPackageJsonDeps, isNewer, loadNpmrc, matchesFilters, matchesPattern, parseBunCatalogs, parseCatalogsFromYaml, parseCompositeCatalogName, parseNpmrc, parseVersion, promptPackageSelection, readCatalogs, readPackageJsonDeps, restoreFromBackup, toFilterArray, };
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
declare const HOOKS: readonly ["pre-commit", "pre-merge-commit", "prepare-commit-msg", "commit-msg", "post-commit", "applypatch-msg", "pre-applypatch", "post-applypatch", "pre-rebase", "post-rewrite", "post-checkout", "post-merge", "pre-push", "pre-auto-gc"];
|
|
2
|
+
declare const DEFAULT_HOOKS_DIRECTORY = ".vis-hooks";
|
|
3
|
+
interface InstallResult {
|
|
4
|
+
isError: boolean;
|
|
5
|
+
message: string;
|
|
6
|
+
}
|
|
7
|
+
export type { InstallResult };
|
|
8
|
+
export { DEFAULT_HOOKS_DIRECTORY, HOOKS };
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import type { InstallResult } from "./constants.d.ts";
|
|
2
|
+
/**
|
|
3
|
+
* Generates the shell script that dispatches to user-defined hooks.
|
|
4
|
+
*/
|
|
5
|
+
declare const hookScript: (directory: string) => string;
|
|
6
|
+
declare const installHooks: (directory?: string) => InstallResult;
|
|
7
|
+
export { hookScript, installHooks };
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import type { PackageManagerType } from "../migrate/types.d.ts";
|
|
2
|
+
import type { InstallResult } from "./constants.d.ts";
|
|
3
|
+
/**
|
|
4
|
+
* Detects which husky directory is in use, if any.
|
|
5
|
+
*/
|
|
6
|
+
declare const detectHuskyDirectory: (root: string) => string | undefined;
|
|
7
|
+
/**
|
|
8
|
+
* Transforms a husky hook script for vis.
|
|
9
|
+
* Removes the common.sh sourcing line since vis handles PATH setup in its dispatcher.
|
|
10
|
+
*/
|
|
11
|
+
declare const transformHookScript: (content: string) => string;
|
|
12
|
+
/**
|
|
13
|
+
* Detects the package manager used in the project.
|
|
14
|
+
*/
|
|
15
|
+
declare const detectPackageManager: (root: string) => PackageManagerType;
|
|
16
|
+
/**
|
|
17
|
+
* Cleans husky references from package.json scripts.
|
|
18
|
+
*/
|
|
19
|
+
declare const cleanPackageJsonScripts: (root: string) => {
|
|
20
|
+
modified: boolean;
|
|
21
|
+
removedScriptReferences: string[];
|
|
22
|
+
};
|
|
23
|
+
/**
|
|
24
|
+
* Migrates from husky to vis hooks.
|
|
25
|
+
*/
|
|
26
|
+
declare const migrateFromHusky: (root: string, hooksDirectory: string, logger: Console) => InstallResult;
|
|
27
|
+
export { cleanPackageJsonScripts, detectHuskyDirectory, detectPackageManager, migrateFromHusky, transformHookScript };
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
declare const LINT_STAGED_JSON_CONFIG_FILES: readonly [".lintstagedrc.json", ".lintstagedrc"];
|
|
2
|
+
declare const LINT_STAGED_OTHER_CONFIG_FILES: readonly [".lintstagedrc.yaml", ".lintstagedrc.yml", ".lintstagedrc.mjs", "lint-staged.config.mjs", ".lintstagedrc.cjs", "lint-staged.config.cjs", ".lintstagedrc.js", "lint-staged.config.js", ".lintstagedrc.ts", "lint-staged.config.ts", ".lintstagedrc.mts", "lint-staged.config.mts", ".lintstagedrc.cts", "lint-staged.config.cts"];
|
|
3
|
+
declare const LINT_STAGED_ALL_CONFIG_FILES: ReadonlyArray<string>;
|
|
4
|
+
declare const STALE_LINT_STAGED_PATTERNS: ReadonlyArray<RegExp>;
|
|
5
|
+
declare const REPLACED_PACKAGES: readonly ["husky", "lint-staged"];
|
|
6
|
+
declare const HUSKY_SCRIPT_PATTERNS: ReadonlyArray<RegExp>;
|
|
7
|
+
/**
|
|
8
|
+
* Remove husky references from a single script value.
|
|
9
|
+
* Returns the cleaned script, or undefined if the entire script should be removed.
|
|
10
|
+
*/
|
|
11
|
+
declare const cleanHuskyFromScript: (scriptValue: string) => string | undefined;
|
|
12
|
+
export { cleanHuskyFromScript, HUSKY_SCRIPT_PATTERNS, LINT_STAGED_ALL_CONFIG_FILES, LINT_STAGED_JSON_CONFIG_FILES, LINT_STAGED_OTHER_CONFIG_FILES, REPLACED_PACKAGES, STALE_LINT_STAGED_PATTERNS, };
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import type { VisConfig } from "../../workspace.d.ts";
|
|
2
|
+
import type { MigrationReport, PackageManagerType } from "./types.d.ts";
|
|
3
|
+
interface MigrateLogger {
|
|
4
|
+
info: (message: string) => void;
|
|
5
|
+
warn: (message: string) => void;
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Rewrite scripts in package.json to replace husky/lint-staged references.
|
|
9
|
+
*/
|
|
10
|
+
declare const rewriteScripts: (scripts: Record<string, string>, report: MigrationReport) => {
|
|
11
|
+
modified: boolean;
|
|
12
|
+
scripts: Record<string, string>;
|
|
13
|
+
};
|
|
14
|
+
/**
|
|
15
|
+
* Rewrite a single package.json: remove replaced packages, add overrides, rewrite scripts.
|
|
16
|
+
*/
|
|
17
|
+
declare const rewritePackageJson: (root: string, packageManager: PackageManagerType, overrides: Record<string, string>, report: MigrationReport) => void;
|
|
18
|
+
/**
|
|
19
|
+
* Iterate over all workspace packages and rewrite their package.json files.
|
|
20
|
+
*/
|
|
21
|
+
declare const migrateMonorepoPackages: (root: string, packageManager: PackageManagerType, overrides: Record<string, string>, report: MigrationReport) => void;
|
|
22
|
+
/**
|
|
23
|
+
* Update pnpm-workspace.yaml catalog with override entries.
|
|
24
|
+
*/
|
|
25
|
+
declare const updatePnpmWorkspaceCatalog: (root: string, overrides: Record<string, string>) => void;
|
|
26
|
+
/**
|
|
27
|
+
* Top-level orchestrator for dependency migration.
|
|
28
|
+
*/
|
|
29
|
+
declare const migrateDeps: (root: string, packageManager: PackageManagerType, visConfig: VisConfig, options: {
|
|
30
|
+
dryRun: boolean;
|
|
31
|
+
}, logger: MigrateLogger, report: MigrationReport) => void;
|
|
32
|
+
export { migrateDeps, migrateMonorepoPackages, rewritePackageJson, rewriteScripts, updatePnpmWorkspaceCatalog };
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Reads and parses a JSON file. Returns undefined if the file doesn't exist or isn't valid JSON.
|
|
3
|
+
*/
|
|
4
|
+
declare const readJsonFile: <T>(filePath: string) => T | undefined;
|
|
5
|
+
/**
|
|
6
|
+
* Checks if a file exists and contains valid JSON.
|
|
7
|
+
*/
|
|
8
|
+
declare const isJsonFile: (filePath: string) => boolean;
|
|
9
|
+
/**
|
|
10
|
+
* Detect the indentation used in a JSON file by looking at the first indented line.
|
|
11
|
+
*/
|
|
12
|
+
declare const detectJsonIndent: (content: string) => number;
|
|
13
|
+
/**
|
|
14
|
+
* Edits a JSON file in place using a mutator function.
|
|
15
|
+
* The mutator receives the parsed data and should return the modified data,
|
|
16
|
+
* or undefined to skip writing. Returns true if the file was modified.
|
|
17
|
+
* Preserves the original indentation style.
|
|
18
|
+
*/
|
|
19
|
+
declare const editJsonFile: <T>(filePath: string, mutator: (data: T) => T | undefined) => boolean;
|
|
20
|
+
export { detectJsonIndent, editJsonFile, isJsonFile, readJsonFile };
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import type { MigrationReport } from "./types.d.ts";
|
|
2
|
+
interface MigrateLogger {
|
|
3
|
+
info: (message: string) => void;
|
|
4
|
+
warn: (message: string) => void;
|
|
5
|
+
}
|
|
6
|
+
/**
|
|
7
|
+
* Check if a standalone lint-staged config file exists.
|
|
8
|
+
*/
|
|
9
|
+
declare const hasStandaloneLintStagedConfig: (root: string) => boolean;
|
|
10
|
+
/**
|
|
11
|
+
* Check if a standalone lint-staged config exists in a format that can't be
|
|
12
|
+
* auto-migrated (non-JSON files like .yaml, .mjs, .cjs, .js, or a non-JSON .lintstagedrc).
|
|
13
|
+
*/
|
|
14
|
+
declare const hasUnsupportedLintStagedConfig: (root: string) => boolean;
|
|
15
|
+
/**
|
|
16
|
+
* Check if vis.config.ts already has a `staged` config key.
|
|
17
|
+
*/
|
|
18
|
+
declare const hasStagedConfigInVisConfig: (root: string) => boolean;
|
|
19
|
+
/**
|
|
20
|
+
* Detect lint-staged config: returns the source ("package.json", a filename, or undefined).
|
|
21
|
+
*/
|
|
22
|
+
declare const detectLintStagedConfig: (root: string) => string | undefined;
|
|
23
|
+
/**
|
|
24
|
+
* Extract lint-staged config from package.json.
|
|
25
|
+
*/
|
|
26
|
+
declare const extractLintStagedFromPackageJson: (root: string) => Record<string, string | string[]> | undefined;
|
|
27
|
+
/**
|
|
28
|
+
* Parse a JSON lint-staged config file.
|
|
29
|
+
*/
|
|
30
|
+
declare const parseLintStagedJsonFile: (filePath: string) => Record<string, string | string[]> | undefined;
|
|
31
|
+
/**
|
|
32
|
+
* Generate a TypeScript snippet for the staged block.
|
|
33
|
+
*/
|
|
34
|
+
declare const generateStagedConfigSnippet: (config: Record<string, string | string[]>) => string;
|
|
35
|
+
/**
|
|
36
|
+
* Insert staged config into vis.config.ts. Creates the file if it doesn't exist.
|
|
37
|
+
*/
|
|
38
|
+
declare const insertStagedIntoVisConfig: (root: string, config: Record<string, string | string[]>, logger: MigrateLogger) => boolean;
|
|
39
|
+
/**
|
|
40
|
+
* Remove `lint-staged` key, config, and dependency from package.json in a single read/write.
|
|
41
|
+
* Returns which removals were performed.
|
|
42
|
+
*/
|
|
43
|
+
declare const removeLintStagedFromPackageJson: (root: string) => {
|
|
44
|
+
configRemoved: boolean;
|
|
45
|
+
dependencyRemoved: boolean;
|
|
46
|
+
};
|
|
47
|
+
/**
|
|
48
|
+
* Remove standalone lint-staged config files.
|
|
49
|
+
*/
|
|
50
|
+
declare const removeLintStagedConfigFiles: (root: string, report: MigrationReport) => void;
|
|
51
|
+
/**
|
|
52
|
+
* Rewrite pre-commit hook to replace lint-staged invocations with `vis staged`.
|
|
53
|
+
*/
|
|
54
|
+
declare const rewritePreCommitHook: (root: string, hooksDirectory: string) => boolean;
|
|
55
|
+
/**
|
|
56
|
+
* Migrates lint-staged configuration to vis.config.ts staged block.
|
|
57
|
+
*/
|
|
58
|
+
declare const migrateLintStaged: (root: string, options: {
|
|
59
|
+
dryRun: boolean;
|
|
60
|
+
silent?: boolean;
|
|
61
|
+
}, logger: MigrateLogger, report: MigrationReport) => boolean;
|
|
62
|
+
export { detectLintStagedConfig, extractLintStagedFromPackageJson, generateStagedConfigSnippet, hasStagedConfigInVisConfig, hasStandaloneLintStagedConfig, hasUnsupportedLintStagedConfig, insertStagedIntoVisConfig, migrateLintStaged, parseLintStagedJsonFile, removeLintStagedConfigFiles, removeLintStagedFromPackageJson, rewritePreCommitHook, };
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
interface MigrateLogger {
|
|
2
|
+
info: (message: string) => void;
|
|
3
|
+
warn: (message: string) => void;
|
|
4
|
+
}
|
|
5
|
+
interface MigrationReport {
|
|
6
|
+
gitHooksConfigured: boolean;
|
|
7
|
+
inlinedLintStagedConfigCount: number;
|
|
8
|
+
manualSteps: string[];
|
|
9
|
+
mergedStagedConfigCount: number;
|
|
10
|
+
removedConfigCount: number;
|
|
11
|
+
removedPackageCount: number;
|
|
12
|
+
rewrittenScriptCount: number;
|
|
13
|
+
warnings: string[];
|
|
14
|
+
}
|
|
15
|
+
declare const createMigrationReport: () => MigrationReport;
|
|
16
|
+
declare const addMigrationWarning: (report: MigrationReport | undefined, warning: string) => void;
|
|
17
|
+
declare const addManualStep: (report: MigrationReport | undefined, step: string) => void;
|
|
18
|
+
type PackageManagerType = "bun" | "npm" | "pnpm" | "yarn";
|
|
19
|
+
export type { MigrateLogger, MigrationReport, PackageManagerType };
|
|
20
|
+
export { addManualStep, addMigrationWarning, createMigrationReport };
|
package/dist/config.d.ts
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import type { VisConfig } from "./workspace.d.ts";
|
|
2
|
+
/** Supported config file names, checked in order. */
|
|
3
|
+
declare const CONFIG_FILES: string[];
|
|
4
|
+
/**
|
|
5
|
+
* Find the vis config file in a directory.
|
|
6
|
+
* @param directory The directory to search in.
|
|
7
|
+
* @returns The absolute path to the config file, or `undefined` if not found.
|
|
8
|
+
*/
|
|
9
|
+
declare const findVisConfigFile: (directory: string) => string | undefined;
|
|
10
|
+
/**
|
|
11
|
+
* Load the vis configuration from a `vis.config.ts` (or `.js`, `.mjs`, `.cjs`, `.mts`, `.cts`) file.
|
|
12
|
+
*
|
|
13
|
+
* Uses jiti for runtime TypeScript support — no build step needed for config files.
|
|
14
|
+
* Falls back to an empty config if no config file is found.
|
|
15
|
+
* @param workspaceRoot The workspace root directory to search for the config file.
|
|
16
|
+
* @returns The loaded and resolved configuration.
|
|
17
|
+
*/
|
|
18
|
+
declare const loadVisConfig: (workspaceRoot: string) => Promise<VisConfig>;
|
|
19
|
+
/**
|
|
20
|
+
* Type-safe helper for defining vis configuration.
|
|
21
|
+
* Provides full TypeScript autocomplete when used in `vis.config.ts`.
|
|
22
|
+
* @example
|
|
23
|
+
* ```typescript
|
|
24
|
+
* // vis.config.ts
|
|
25
|
+
* import { defineConfig } from "@visulima/vis/config";
|
|
26
|
+
*
|
|
27
|
+
* export default defineConfig({
|
|
28
|
+
* update: {
|
|
29
|
+
* target: "minor",
|
|
30
|
+
* exclude: ["@types/*"],
|
|
31
|
+
* security: true,
|
|
32
|
+
* },
|
|
33
|
+
* ai: {
|
|
34
|
+
* provider: "claude",
|
|
35
|
+
* },
|
|
36
|
+
* });
|
|
37
|
+
* ```
|
|
38
|
+
*/
|
|
39
|
+
declare const defineConfig: (config: VisConfig) => VisConfig;
|
|
40
|
+
export { CONFIG_FILES, defineConfig, findVisConfigFile, loadVisConfig };
|
package/dist/config.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
var r=Object.defineProperty;var n=(e,i)=>r(e,"name",{value:i,configurable:!0});import{createRequire as f}from"node:module";import{join as g}from"@visulima/path";import{createJiti as d}from"jiti";const c=f(import.meta.url),s=typeof globalThis<"u"&&typeof globalThis.process<"u"?globalThis.process:process,a=n(e=>{if(typeof s<"u"&&s.versions&&s.versions.node){const[i,o]=s.versions.node.split(".").map(Number);if(i>22||i===22&&o>=3||i===20&&o>=16)return s.getBuiltinModule(e)}return c(e)},"__cjs_getBuiltinModule"),{existsSync:u}=a("node:fs");var l=Object.defineProperty,t=n((e,i)=>l(e,"name",{value:i,configurable:!0}),"o");const m=["vis.config.ts","vis.config.mts","vis.config.cts","vis.config.js","vis.config.mjs","vis.config.cjs"],p=t(e=>{for(const i of m){const o=g(e,i);if(u(o))return o}},"findVisConfigFile"),C=t(async e=>{const i=p(e);if(!i)return{};const o=await d(e).import(i,{default:!0,try:!0})??{};return typeof o=="function"?await o():o},"loadVisConfig"),b=t(e=>e,"defineConfig");export{m as CONFIG_FILES,b as defineConfig,p as findVisConfigFile,C as loadVisConfig};
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
interface UpdateCommandOptions {
|
|
2
|
+
dev: boolean;
|
|
3
|
+
filters: string[];
|
|
4
|
+
global: boolean;
|
|
5
|
+
interactive: boolean;
|
|
6
|
+
latest: boolean;
|
|
7
|
+
noOptional: boolean;
|
|
8
|
+
noSave: boolean;
|
|
9
|
+
packages: string[];
|
|
10
|
+
prod: boolean;
|
|
11
|
+
recursive: boolean;
|
|
12
|
+
workspaceRoot: boolean;
|
|
13
|
+
}
|
|
14
|
+
interface ResolvedCommand {
|
|
15
|
+
args: string[];
|
|
16
|
+
bin: string;
|
|
17
|
+
}
|
|
18
|
+
declare const resolveUpdateCommand: (packageManager: "bun" | "npm" | "pnpm" | "yarn", version: string, options: UpdateCommandOptions) => {
|
|
19
|
+
command: ResolvedCommand;
|
|
20
|
+
warnings: string[];
|
|
21
|
+
};
|
|
22
|
+
export type { ResolvedCommand, UpdateCommandOptions };
|
|
23
|
+
export { resolveUpdateCommand };
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import type { ProjectGraph, TargetConfiguration, WorkspaceConfiguration } from "@visulima/task-runner";
|
|
2
|
+
interface PackageJson {
|
|
3
|
+
dependencies?: Record<string, string>;
|
|
4
|
+
devDependencies?: Record<string, string>;
|
|
5
|
+
name?: string;
|
|
6
|
+
peerDependencies?: Record<string, string>;
|
|
7
|
+
scripts?: Record<string, string>;
|
|
8
|
+
workspaces?: string[] | {
|
|
9
|
+
packages: string[];
|
|
10
|
+
};
|
|
11
|
+
}
|
|
12
|
+
interface VisConfig {
|
|
13
|
+
/** AI analysis configuration */
|
|
14
|
+
ai?: {
|
|
15
|
+
/** Cache TTL in milliseconds. Overrides default (1h / 30min for security). */
|
|
16
|
+
cacheTtl?: number;
|
|
17
|
+
/** Override default provider priority. Higher number = preferred. */
|
|
18
|
+
priority?: Record<string, number>;
|
|
19
|
+
/** Use a specific provider instead of auto-detecting (e.g., `"claude"`, `"gemini"`). */
|
|
20
|
+
provider?: string;
|
|
21
|
+
};
|
|
22
|
+
/** Package override mappings applied during migration (e.g., `{ "lodash": "lodash-es" }`) */
|
|
23
|
+
overrides?: Record<string, string>;
|
|
24
|
+
/** Staged file patterns and commands (replaces lint-staged) */
|
|
25
|
+
staged?: Record<string, string | string[]>;
|
|
26
|
+
/** Target default configurations */
|
|
27
|
+
targetDefaults?: Record<string, Partial<TargetConfiguration>>;
|
|
28
|
+
/** Task runner options */
|
|
29
|
+
taskRunnerOptions?: Record<string, unknown>;
|
|
30
|
+
/** Update command defaults */
|
|
31
|
+
update?: {
|
|
32
|
+
exclude?: string[];
|
|
33
|
+
format?: "json" | "minimal" | "table";
|
|
34
|
+
include?: string[];
|
|
35
|
+
install?: boolean;
|
|
36
|
+
prerelease?: boolean;
|
|
37
|
+
security?: boolean;
|
|
38
|
+
target?: "latest" | "minor" | "patch";
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Resolves glob-like workspace patterns to actual directories.
|
|
43
|
+
* Supports simple patterns like "packages/*" and "packages/**".
|
|
44
|
+
*/
|
|
45
|
+
declare const resolveWorkspacePatterns: (workspaceRoot: string, patterns: string[]) => string[];
|
|
46
|
+
/**
|
|
47
|
+
* Discovers all projects in the workspace and builds a WorkspaceConfiguration.
|
|
48
|
+
*/
|
|
49
|
+
declare const discoverWorkspace: (workspaceRoot: string, config?: VisConfig) => {
|
|
50
|
+
config: VisConfig;
|
|
51
|
+
workspace: WorkspaceConfiguration;
|
|
52
|
+
};
|
|
53
|
+
/**
|
|
54
|
+
* Builds the project dependency graph from package.json dependencies.
|
|
55
|
+
*/
|
|
56
|
+
declare const buildProjectGraph: (workspaceRoot: string, workspace: WorkspaceConfiguration) => ProjectGraph;
|
|
57
|
+
export type { PackageJson, VisConfig };
|
|
58
|
+
export { buildProjectGraph, discoverWorkspace, resolveWorkspacePatterns };
|
package/package.json
CHANGED
|
@@ -1,10 +1,84 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@visulima/vis",
|
|
3
|
-
"version": "0.0.
|
|
4
|
-
"description": "
|
|
3
|
+
"version": "1.0.0-alpha.2",
|
|
4
|
+
"description": "A CLI task runner for monorepo workspaces, powered by @visulima/task-runner",
|
|
5
5
|
"keywords": [
|
|
6
|
-
"
|
|
7
|
-
"
|
|
8
|
-
"
|
|
9
|
-
|
|
10
|
-
|
|
6
|
+
"visulima",
|
|
7
|
+
"vis",
|
|
8
|
+
"cli",
|
|
9
|
+
"task-runner",
|
|
10
|
+
"monorepo",
|
|
11
|
+
"workspace",
|
|
12
|
+
"build",
|
|
13
|
+
"cache"
|
|
14
|
+
],
|
|
15
|
+
"homepage": "https://visulima.com/packages/vis",
|
|
16
|
+
"bugs": {
|
|
17
|
+
"url": "https://github.com/visulima/visulima/issues"
|
|
18
|
+
},
|
|
19
|
+
"repository": {
|
|
20
|
+
"type": "git",
|
|
21
|
+
"url": "git+https://github.com/visulima/visulima.git",
|
|
22
|
+
"directory": "packages/tooling/vis"
|
|
23
|
+
},
|
|
24
|
+
"funding": [
|
|
25
|
+
{
|
|
26
|
+
"type": "github",
|
|
27
|
+
"url": "https://github.com/sponsors/prisis"
|
|
28
|
+
},
|
|
29
|
+
{
|
|
30
|
+
"type": "consulting",
|
|
31
|
+
"url": "https://anolilab.com/support"
|
|
32
|
+
}
|
|
33
|
+
],
|
|
34
|
+
"license": "MIT",
|
|
35
|
+
"author": {
|
|
36
|
+
"name": "Daniel Bannert",
|
|
37
|
+
"email": "d.bannert@anolilab.de"
|
|
38
|
+
},
|
|
39
|
+
"sideEffects": false,
|
|
40
|
+
"type": "module",
|
|
41
|
+
"exports": {
|
|
42
|
+
"./config": {
|
|
43
|
+
"types": "./dist/config.d.ts",
|
|
44
|
+
"default": "./dist/config.js"
|
|
45
|
+
},
|
|
46
|
+
"./package.json": "./package.json"
|
|
47
|
+
},
|
|
48
|
+
"bin": {
|
|
49
|
+
"v": "./dist/bin.js",
|
|
50
|
+
"vis": "./dist/bin.js"
|
|
51
|
+
},
|
|
52
|
+
"files": [
|
|
53
|
+
"dist/**",
|
|
54
|
+
"README.md",
|
|
55
|
+
"CHANGELOG.md",
|
|
56
|
+
"LICENSE.md"
|
|
57
|
+
],
|
|
58
|
+
"dependencies": {
|
|
59
|
+
"@visulima/boxen": "3.0.0-alpha.7",
|
|
60
|
+
"@visulima/cerebro": "3.0.0-alpha.8",
|
|
61
|
+
"@visulima/find-ai-runner": "1.0.0-alpha.1",
|
|
62
|
+
"@visulima/fs": "5.0.0-alpha.5",
|
|
63
|
+
"@visulima/package": "5.0.0-alpha.5",
|
|
64
|
+
"@visulima/path": "3.0.0-alpha.6",
|
|
65
|
+
"@visulima/tabular": "4.0.0-alpha.7",
|
|
66
|
+
"@visulima/task-runner": "1.0.0-alpha.2",
|
|
67
|
+
"jiti": "^2.6.1"
|
|
68
|
+
},
|
|
69
|
+
"peerDependencies": {
|
|
70
|
+
"lint-staged": ">=15.0.0"
|
|
71
|
+
},
|
|
72
|
+
"peerDependenciesMeta": {
|
|
73
|
+
"lint-staged": {
|
|
74
|
+
"optional": true
|
|
75
|
+
}
|
|
76
|
+
},
|
|
77
|
+
"engines": {
|
|
78
|
+
"node": "^20.19.0 || >=22.12.0"
|
|
79
|
+
},
|
|
80
|
+
"publishConfig": {
|
|
81
|
+
"access": "public",
|
|
82
|
+
"provenance": true
|
|
83
|
+
}
|
|
84
|
+
}
|