projectify-cli 1.0.0 → 2.0.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +18 -24
- package/dist/index.js +73 -7
- package/package.json +7 -1
package/README.md
CHANGED
|
@@ -4,8 +4,8 @@
|
|
|
4
4
|
|
|
5
5
|
Projectify is a powerful, standalone Node.js tool designed to analyze codebase architectures, calculate dependency impacts ("Blast Radius"), and generate stunning, interactive visualizations. Built with TypeScript and powered by AI.
|
|
6
6
|
|
|
7
|
-

|
|
8
|
+

|
|
9
9
|
|
|
10
10
|
## Features
|
|
11
11
|
|
|
@@ -33,36 +33,30 @@ npm run build
|
|
|
33
33
|
|
|
34
34
|
## Usage
|
|
35
35
|
|
|
36
|
-
###
|
|
37
|
-
|
|
38
|
-
Run directly without installing:
|
|
36
|
+
### 1. Interactive Mode (Recommended)
|
|
37
|
+
Simply run the CLI without arguments to launch the interactive setup wizard. It will guide you through provider selection (OpenAI, Gemini, Ollama) and API key entry.
|
|
39
38
|
|
|
40
39
|
```bash
|
|
41
|
-
|
|
40
|
+
# Using npx
|
|
41
|
+
npx projectify-cli
|
|
42
|
+
|
|
43
|
+
# Or if installed globally
|
|
44
|
+
projectify
|
|
42
45
|
```
|
|
43
46
|
|
|
44
|
-
###
|
|
47
|
+
### 2. Manual / CI Mode
|
|
48
|
+
Run non-interactively by passing arguments:
|
|
45
49
|
|
|
46
50
|
```bash
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
# Run anywhere
|
|
50
|
-
projectify [path] [options]
|
|
51
|
-
```
|
|
52
|
-
|
|
53
|
-
### Local Development
|
|
51
|
+
# Basic Analysis (No AI)
|
|
52
|
+
projectify . --no-ai
|
|
54
53
|
|
|
55
|
-
|
|
54
|
+
# With auto-detected environment variables
|
|
55
|
+
export GEMINI_API_KEY=...
|
|
56
|
+
projectify . --provider gemini
|
|
56
57
|
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
npm start -- /path/to/target/project
|
|
60
|
-
|
|
61
|
-
# Analysis with AI Summary (Requires API Key)
|
|
62
|
-
export OPENAI_API_KEY=sk-your-key-here
|
|
63
|
-
export GEMINI_API_KEY=sk-your-key-here
|
|
64
|
-
export OLLAMA_API_KEY=sk-your-key-here
|
|
65
|
-
npm start -- /path/to/target/project --summary
|
|
58
|
+
# Force specific provider
|
|
59
|
+
projectify . --provider ollama --model llama3
|
|
66
60
|
```
|
|
67
61
|
|
|
68
62
|
### CLI Options
|
package/dist/index.js
CHANGED
|
@@ -5,6 +5,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
5
5
|
};
|
|
6
6
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
7
|
const commander_1 = require("commander");
|
|
8
|
+
const inquirer = require("inquirer");
|
|
8
9
|
const chalk_1 = __importDefault(require("chalk"));
|
|
9
10
|
const path_1 = __importDefault(require("path"));
|
|
10
11
|
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
@@ -17,12 +18,12 @@ const program = new commander_1.Command();
|
|
|
17
18
|
program
|
|
18
19
|
.name('projectify')
|
|
19
20
|
.description('Projectify - Autonomous Code Analysis & Visualization')
|
|
20
|
-
.version('2.0.
|
|
21
|
+
.version('2.0.2');
|
|
21
22
|
program
|
|
22
23
|
.argument('[path]', 'Project path to analyze', '.')
|
|
23
24
|
.option('--no-ai', 'Skip AI analysis')
|
|
24
25
|
.option('--summary', 'Generate full project summary')
|
|
25
|
-
.option('--provider <type>', 'AI Provider (openai, gemini, ollama)'
|
|
26
|
+
.option('--provider <type>', 'AI Provider (openai, gemini, ollama)')
|
|
26
27
|
.option('--model <name>', 'Model name (optional)')
|
|
27
28
|
.action(async (projectPath, options) => {
|
|
28
29
|
try {
|
|
@@ -57,17 +58,82 @@ program
|
|
|
57
58
|
let projectSummary = '';
|
|
58
59
|
let gitInsight = '';
|
|
59
60
|
if (options.ai) {
|
|
60
|
-
|
|
61
|
+
let providerType = options.provider;
|
|
62
|
+
// Interactive Provider Selection
|
|
63
|
+
if (!providerType) {
|
|
64
|
+
const answer = await inquirer.prompt([
|
|
65
|
+
{
|
|
66
|
+
type: 'list',
|
|
67
|
+
name: 'provider',
|
|
68
|
+
message: 'Select AI Provider for analysis:',
|
|
69
|
+
choices: ['openai', 'gemini', 'ollama']
|
|
70
|
+
}
|
|
71
|
+
]);
|
|
72
|
+
providerType = answer.provider;
|
|
73
|
+
}
|
|
74
|
+
// Interactive Model Selection
|
|
75
|
+
if (!options.model) {
|
|
76
|
+
let modelChoices = [];
|
|
77
|
+
if (providerType === 'openai') {
|
|
78
|
+
modelChoices = ['gpt-4o', 'gpt-5.2', 'gpt-4.1'];
|
|
79
|
+
}
|
|
80
|
+
else if (providerType === 'gemini') {
|
|
81
|
+
modelChoices = ['gemini-3-pro-preview', 'gemini-3-flash-preview'];
|
|
82
|
+
}
|
|
83
|
+
if (modelChoices.length > 0) {
|
|
84
|
+
const answer = await inquirer.prompt([
|
|
85
|
+
{
|
|
86
|
+
type: 'list',
|
|
87
|
+
name: 'model',
|
|
88
|
+
message: `Select ${providerType} Model:`,
|
|
89
|
+
choices: modelChoices
|
|
90
|
+
}
|
|
91
|
+
]);
|
|
92
|
+
options.model = answer.model;
|
|
93
|
+
}
|
|
94
|
+
else if (providerType === 'ollama') {
|
|
95
|
+
const answer = await inquirer.prompt([
|
|
96
|
+
{
|
|
97
|
+
type: 'input',
|
|
98
|
+
name: 'model',
|
|
99
|
+
message: 'Enter Ollama Model Name (e.g., llama3):',
|
|
100
|
+
default: 'llama3'
|
|
101
|
+
}
|
|
102
|
+
]);
|
|
103
|
+
options.model = answer.model;
|
|
104
|
+
}
|
|
105
|
+
}
|
|
61
106
|
let apiKey = '';
|
|
62
107
|
if (providerType === 'openai') {
|
|
63
108
|
apiKey = process.env.OPENAI_API_KEY || '';
|
|
64
|
-
if (!apiKey)
|
|
65
|
-
|
|
109
|
+
if (!apiKey) {
|
|
110
|
+
const answer = await inquirer.prompt([
|
|
111
|
+
{
|
|
112
|
+
type: 'password',
|
|
113
|
+
name: 'apiKey',
|
|
114
|
+
message: 'Enter OpenAI API Key:',
|
|
115
|
+
mask: '*'
|
|
116
|
+
}
|
|
117
|
+
]);
|
|
118
|
+
apiKey = answer.apiKey;
|
|
119
|
+
}
|
|
66
120
|
}
|
|
67
121
|
else if (providerType === 'gemini') {
|
|
68
122
|
apiKey = process.env.GEMINI_API_KEY || '';
|
|
69
|
-
if (!apiKey)
|
|
70
|
-
|
|
123
|
+
if (!apiKey) {
|
|
124
|
+
const answer = await inquirer.prompt([
|
|
125
|
+
{
|
|
126
|
+
type: 'password',
|
|
127
|
+
name: 'apiKey',
|
|
128
|
+
message: 'Enter Gemini API Key:',
|
|
129
|
+
mask: '*'
|
|
130
|
+
}
|
|
131
|
+
]);
|
|
132
|
+
apiKey = answer.apiKey;
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
if (!apiKey && providerType !== 'ollama') {
|
|
136
|
+
console.log(chalk_1.default.red('\n⚠️ API Key is required for this provider.'));
|
|
71
137
|
}
|
|
72
138
|
if ((providerType === 'ollama') || apiKey) {
|
|
73
139
|
try {
|
package/package.json
CHANGED
|
@@ -1,11 +1,15 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "projectify-cli",
|
|
3
|
-
"version": "
|
|
3
|
+
"version": "2.0.2",
|
|
4
4
|
"description": "Project Analyzer using LangChain",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"bin": {
|
|
7
7
|
"projectify": "dist/index.js"
|
|
8
8
|
},
|
|
9
|
+
"repository": {
|
|
10
|
+
"type": "git",
|
|
11
|
+
"url": "git+https://github.com/GreenHacker420/project-analyzer.git"
|
|
12
|
+
},
|
|
9
13
|
"files": [
|
|
10
14
|
"dist",
|
|
11
15
|
"package.json",
|
|
@@ -35,11 +39,13 @@
|
|
|
35
39
|
"@langchain/langgraph": "^1.0.7",
|
|
36
40
|
"@langchain/ollama": "^1.1.0",
|
|
37
41
|
"@langchain/openai": "^0.0.28",
|
|
42
|
+
"@types/inquirer": "^8.2.12",
|
|
38
43
|
"chalk": "^4.1.2",
|
|
39
44
|
"commander": "^11.1.0",
|
|
40
45
|
"fast-glob": "^3.3.2",
|
|
41
46
|
"fs-extra": "^11.2.0",
|
|
42
47
|
"graphology": "^0.25.4",
|
|
48
|
+
"inquirer": "^8.2.7",
|
|
43
49
|
"langchain": "^0.1.36",
|
|
44
50
|
"ora": "^5.4.1",
|
|
45
51
|
"simple-git": "^3.30.0",
|