agdi 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,100 @@
1
+ # agdi 🚀
2
+
3
+ **The AI-powered app generator** - Build full-stack applications from natural language in your terminal.
4
+
5
+ [![npm version](https://img.shields.io/npm/v/agdi.svg)](https://www.npmjs.com/package/agdi)
6
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
7
+
8
+ ## Installation
9
+
10
+ ```bash
11
+ npm install -g agdi
12
+ ```
13
+
14
+ Or run directly with npx:
15
+ ```bash
16
+ npx agdi init
17
+ ```
18
+
19
+ ## Quick Start
20
+
21
+ ```bash
22
+ # Interactive mode (recommended)
23
+ agdi init
24
+
25
+ # Generate from prompt
26
+ agdi generate "Create a todo app with dark mode"
27
+
28
+ # Configure API keys
29
+ agdi auth
30
+ ```
31
+
32
+ ## Features
33
+
34
+ - 🆓 **FREE AI Access** - No API key required (powered by Puter.com)
35
+ - 🤖 **400+ AI Models** - GPT-5, Claude 4.5, Gemini 3, Llama 4, and more
36
+ - ⚡ **Instant Generation** - Full React/Vite apps in seconds
37
+ - 💬 **Interactive Mode** - Chat-based coding assistant
38
+ - 🔑 **Bring Your Own Key** - Use your own OpenAI, Anthropic, or Gemini keys
39
+
40
+ ## Commands
41
+
42
+ | Command | Description |
43
+ |---------|-------------|
44
+ | `agdi init` | Interactive project creation wizard |
45
+ | `agdi generate <prompt>` | Generate app from prompt |
46
+ | `agdi chat` | Start interactive coding session |
47
+ | `agdi auth` | Configure API keys |
48
+ | `agdi models` | List available AI models |
49
+ | `agdi --help` | Show all commands |
50
+
51
+ ## Examples
52
+
53
+ ```bash
54
+ # Create a dashboard
55
+ agdi generate "Build an analytics dashboard with charts"
56
+
57
+ # Create an e-commerce store
58
+ agdi generate "Create a product catalog with shopping cart" -m claude-sonnet-4-5
59
+
60
+ # Use specific model
61
+ agdi generate "Build a blog" -p puter -m gpt-5
62
+ ```
63
+
64
+ ## Supported Models
65
+
66
+ ### Free (No API Key)
67
+ - GPT-5, GPT-5 Mini
68
+ - Claude 4.5 Sonnet, Claude 4.5 Opus
69
+ - Gemini 3 Pro, Gemini 2.5 Flash
70
+ - Llama 4, DeepSeek R1, Grok 3
71
+
72
+ ### Bring Your Own Key
73
+ - OpenAI (GPT-5, o3-mini)
74
+ - Anthropic (Claude 4.5)
75
+ - Google (Gemini 3)
76
+ - DeepSeek, Mistral, and more
77
+
78
+ ## Configuration
79
+
80
+ API keys are stored in `~/.agdi/config.json`:
81
+
82
+ ```bash
83
+ agdi auth # Interactive setup
84
+ agdi auth --status # Show current config
85
+ ```
86
+
87
+ ## Requirements
88
+
89
+ - Node.js 18 or higher
90
+ - npm or yarn
91
+
92
+ ## Links
93
+
94
+ - 🌐 [Website](https://agdi.dev)
95
+ - 📖 [Documentation](https://docs.agdi.dev)
96
+ - 🐙 [GitHub](https://github.com/agdi-dev/agdi)
97
+
98
+ ## License
99
+
100
+ MIT © [Agdi Systems Inc.](https://agdi.dev)
package/bin/agdi.js ADDED
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ import '../dist/index.js';
package/dist/index.js ADDED
@@ -0,0 +1,665 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/index.ts
4
+ import { Command } from "commander";
5
+ import chalk3 from "chalk";
6
+ import ora2 from "ora";
7
+ import { input as input3, select as select2, confirm as confirm2 } from "@inquirer/prompts";
8
+
9
+ // src/core/llm/index.ts
10
+ var PuterProvider = class {
11
+ model;
12
+ constructor(config) {
13
+ this.model = config.model || "gpt-4.1-nano";
14
+ }
15
+ async generate(prompt, systemPrompt) {
16
+ const response = await fetch("https://api.puter.com/ai/chat", {
17
+ method: "POST",
18
+ headers: {
19
+ "Content-Type": "application/json"
20
+ },
21
+ body: JSON.stringify({
22
+ model: this.model,
23
+ messages: [
24
+ ...systemPrompt ? [{ role: "system", content: systemPrompt }] : [],
25
+ { role: "user", content: prompt }
26
+ ]
27
+ })
28
+ });
29
+ if (!response.ok) {
30
+ throw new Error(`Puter API error: ${response.status} ${response.statusText}`);
31
+ }
32
+ const data = await response.json();
33
+ let text = "";
34
+ if (typeof data === "string") {
35
+ text = data;
36
+ } else if (data.message?.content) {
37
+ if (Array.isArray(data.message.content)) {
38
+ text = data.message.content.map((c) => c.text || "").join("");
39
+ } else {
40
+ text = data.message.content;
41
+ }
42
+ } else if (data.choices?.[0]?.message?.content) {
43
+ text = data.choices[0].message.content;
44
+ }
45
+ return {
46
+ text,
47
+ usage: typeof data !== "string" ? data.usage : void 0
48
+ };
49
+ }
50
+ };
51
+ var GeminiProvider = class {
52
+ config;
53
+ constructor(config) {
54
+ this.config = config;
55
+ }
56
+ async generate(prompt, systemPrompt) {
57
+ const { GoogleGenAI } = await import("@google/genai");
58
+ const ai = new GoogleGenAI({ apiKey: this.config.apiKey });
59
+ const contents = [];
60
+ if (systemPrompt) {
61
+ contents.push({ role: "user", parts: [{ text: systemPrompt }] });
62
+ contents.push({ role: "model", parts: [{ text: "Understood. I will follow these instructions." }] });
63
+ }
64
+ contents.push({ role: "user", parts: [{ text: prompt }] });
65
+ const response = await ai.models.generateContent({
66
+ model: this.config.model || "gemini-2.5-flash",
67
+ contents
68
+ });
69
+ const text = response.candidates?.[0]?.content?.parts?.[0]?.text || "";
70
+ return {
71
+ text,
72
+ usage: void 0
73
+ };
74
+ }
75
+ };
76
+ var PUTER_MODELS = {
77
+ // OpenAI Models
78
+ "gpt-5": "GPT-5 (Aug 2025)",
79
+ "gpt-5-mini": "GPT-5 Mini",
80
+ "gpt-4o": "GPT-4o",
81
+ "o3-mini": "o3 Mini (Jan 2025)",
82
+ "o1": "o1 (Reasoning)",
83
+ // Claude Models
84
+ "claude-opus-4-5": "Claude 4.5 Opus (Nov 2025)",
85
+ "claude-sonnet-4-5": "Claude 4.5 Sonnet (Sep 2025)",
86
+ "claude-sonnet-4": "Claude Sonnet 4",
87
+ "claude-3-5-sonnet": "Claude 3.5 Sonnet",
88
+ // Google Models
89
+ "gemini-3-pro-preview": "Gemini 3 Pro (Preview)",
90
+ "google/gemini-2.5-flash": "Gemini 2.5 Flash",
91
+ "google/gemini-2.5-pro": "Gemini 2.5 Pro",
92
+ // Meta Llama
93
+ "meta/llama-4-maverick": "Llama 4 Maverick (Apr 2025)",
94
+ "meta/llama-4-scout": "Llama 4 Scout",
95
+ "meta/llama-3.3-70b": "Llama 3.3 70B",
96
+ // DeepSeek
97
+ "deepseek/deepseek-v3.2": "DeepSeek V3.2 (Dec 2025)",
98
+ "deepseek/deepseek-reasoner": "DeepSeek R1",
99
+ // xAI
100
+ "x-ai/grok-3": "Grok 3",
101
+ // Mistral
102
+ "mistral/mistral-large": "Mistral Large"
103
+ };
104
+ function createLLMProvider(provider, config) {
105
+ switch (provider) {
106
+ case "puter":
107
+ return new PuterProvider(config);
108
+ case "gemini":
109
+ return new GeminiProvider(config);
110
+ default:
111
+ throw new Error(`Unsupported LLM provider: ${provider}`);
112
+ }
113
+ }
114
+
115
+ // src/core/project/index.ts
116
+ var ProjectManager = class {
117
+ project = null;
118
+ /**
119
+ * Create a new project
120
+ */
121
+ create(name, prompt) {
122
+ this.project = {
123
+ id: crypto.randomUUID(),
124
+ name,
125
+ description: "",
126
+ prompt,
127
+ files: [],
128
+ dependencies: [],
129
+ createdAt: /* @__PURE__ */ new Date(),
130
+ updatedAt: /* @__PURE__ */ new Date()
131
+ };
132
+ return this.project;
133
+ }
134
+ /**
135
+ * Get current project
136
+ */
137
+ get() {
138
+ return this.project;
139
+ }
140
+ /**
141
+ * Load an existing project
142
+ */
143
+ load(project) {
144
+ this.project = project;
145
+ }
146
+ /**
147
+ * Update project files
148
+ */
149
+ updateFiles(files) {
150
+ if (this.project) {
151
+ this.project.files = files;
152
+ this.project.updatedAt = /* @__PURE__ */ new Date();
153
+ }
154
+ }
155
+ /**
156
+ * Update project dependencies
157
+ */
158
+ updateDependencies(dependencies) {
159
+ if (this.project) {
160
+ this.project.dependencies = dependencies;
161
+ this.project.updatedAt = /* @__PURE__ */ new Date();
162
+ }
163
+ }
164
+ /**
165
+ * Add a file to the project
166
+ */
167
+ addFile(file) {
168
+ if (this.project) {
169
+ const existingIndex = this.project.files.findIndex((f) => f.path === file.path);
170
+ if (existingIndex >= 0) {
171
+ this.project.files[existingIndex] = file;
172
+ } else {
173
+ this.project.files.push(file);
174
+ }
175
+ this.project.updatedAt = /* @__PURE__ */ new Date();
176
+ }
177
+ }
178
+ /**
179
+ * Clear current project
180
+ */
181
+ clear() {
182
+ this.project = null;
183
+ }
184
+ /**
185
+ * Check if project exists
186
+ */
187
+ hasProject() {
188
+ return this.project !== null;
189
+ }
190
+ };
191
+
192
+ // src/core/io/index.ts
193
+ import JSZip from "jszip";
194
+
195
+ // src/core/architect/index.ts
196
+ var SYSTEM_PROMPT = `You are Agdi Architect, an expert software architect AI.
197
+ Your job is to generate complete, production-ready React applications.
198
+ Always use TypeScript, Tailwind CSS, and Vite.
199
+ Generate all necessary files including package.json, tsconfig.json, vite.config.ts.
200
+ Make the UI beautiful with modern design patterns.`;
201
+ async function generatePlan(prompt, llm) {
202
+ const planPrompt = `Create a detailed plan for: ${prompt}
203
+
204
+ Return a JSON object with:
205
+ {
206
+ "name": "app-name",
207
+ "description": "Brief description",
208
+ "files": [{"path": "src/App.tsx", "description": "Main component"}],
209
+ "dependencies": ["react", "tailwindcss"],
210
+ "architecture": "Component architecture description"
211
+ }
212
+
213
+ Return ONLY valid JSON, no markdown.`;
214
+ const response = await llm.generate(planPrompt, SYSTEM_PROMPT);
215
+ try {
216
+ const jsonMatch = response.text.match(/\{[\s\S]*\}/);
217
+ if (jsonMatch) {
218
+ return JSON.parse(jsonMatch[0]);
219
+ }
220
+ } catch (e) {
221
+ }
222
+ return {
223
+ name: "my-app",
224
+ description: prompt,
225
+ files: [
226
+ { path: "src/App.tsx", description: "Main App component" },
227
+ { path: "src/main.tsx", description: "Entry point" },
228
+ { path: "src/index.css", description: "Global styles" }
229
+ ],
230
+ dependencies: ["react", "react-dom", "tailwindcss"],
231
+ architecture: "Simple React SPA"
232
+ };
233
+ }
234
+ async function generateFile(filePath, fileDescription, plan, llm) {
235
+ const prompt = `Generate the complete code for: ${filePath}
236
+
237
+ Context:
238
+ - App: ${plan.name}
239
+ - Description: ${plan.description}
240
+ - This file: ${fileDescription}
241
+ - Architecture: ${plan.architecture}
242
+
243
+ Return ONLY the file content, no markdown code blocks.`;
244
+ const response = await llm.generate(prompt, SYSTEM_PROMPT);
245
+ let content = response.text;
246
+ if (content.startsWith("```")) {
247
+ content = content.replace(/^```[\w]*\n/, "").replace(/\n```$/, "");
248
+ }
249
+ return {
250
+ path: filePath,
251
+ content: content.trim()
252
+ };
253
+ }
254
+ async function generateApp(prompt, llm, onProgress) {
255
+ onProgress?.("Planning app architecture...");
256
+ const plan = await generatePlan(prompt, llm);
257
+ onProgress?.("Generating files...");
258
+ const files = [];
259
+ for (const fileSpec of plan.files) {
260
+ onProgress?.(`Creating ${fileSpec.path}...`, fileSpec.path);
261
+ const file = await generateFile(fileSpec.path, fileSpec.description, plan, llm);
262
+ files.push(file);
263
+ }
264
+ onProgress?.("Creating package.json...", "package.json");
265
+ files.push({
266
+ path: "package.json",
267
+ content: JSON.stringify({
268
+ name: plan.name,
269
+ version: "0.1.0",
270
+ type: "module",
271
+ scripts: {
272
+ dev: "vite",
273
+ build: "vite build",
274
+ preview: "vite preview"
275
+ },
276
+ dependencies: plan.dependencies.reduce((acc, dep) => {
277
+ acc[dep] = "latest";
278
+ return acc;
279
+ }, {})
280
+ }, null, 2)
281
+ });
282
+ return { plan, files };
283
+ }
284
+
285
+ // src/utils/fs.ts
286
+ import fs from "fs-extra";
287
+ import path from "path";
288
+ async function writeProject(project, outputDir) {
289
+ await fs.ensureDir(outputDir);
290
+ for (const file of project.files) {
291
+ const filePath = path.join(outputDir, file.path);
292
+ await fs.ensureDir(path.dirname(filePath));
293
+ await fs.writeFile(filePath, file.content, "utf-8");
294
+ }
295
+ }
296
+
297
+ // src/utils/config.ts
298
+ import fs2 from "fs-extra";
299
+ import path2 from "path";
300
+ import os from "os";
301
+ var CONFIG_DIR = path2.join(os.homedir(), ".agdi");
302
+ var CONFIG_FILE = path2.join(CONFIG_DIR, "config.json");
303
+ function loadConfig() {
304
+ try {
305
+ if (fs2.existsSync(CONFIG_FILE)) {
306
+ return fs2.readJsonSync(CONFIG_FILE);
307
+ }
308
+ } catch {
309
+ }
310
+ return {};
311
+ }
312
+ function saveConfig(config) {
313
+ try {
314
+ fs2.ensureDirSync(CONFIG_DIR);
315
+ fs2.writeJsonSync(CONFIG_FILE, config, { spaces: 2 });
316
+ } catch (error) {
317
+ console.error("Failed to save config:", error);
318
+ }
319
+ }
320
+
321
+ // src/commands/auth.ts
322
+ import { input, select, password } from "@inquirer/prompts";
323
+ import chalk from "chalk";
324
+ async function login() {
325
+ console.log(chalk.cyan.bold("\n\u{1F510} Agdi Authentication\n"));
326
+ const config = loadConfig();
327
+ const provider = await select({
328
+ message: "How would you like to authenticate?",
329
+ choices: [
330
+ { name: "\u{1F193} Agdi Cloud (FREE) - No API key needed!", value: "puter" },
331
+ { name: "\u{1F511} Bring Your Own Key", value: "byok" },
332
+ { name: "\u{1F3E0} Local LLM (Ollama)", value: "ollama" }
333
+ ]
334
+ });
335
+ if (provider === "puter") {
336
+ config.defaultProvider = "puter";
337
+ saveConfig(config);
338
+ console.log(chalk.green("\n\u2705 Using FREE Agdi Cloud (Puter.com)"));
339
+ console.log(chalk.gray("No API key needed - access to 400+ models!\n"));
340
+ return;
341
+ }
342
+ if (provider === "ollama") {
343
+ const ollamaUrl = await input({
344
+ message: "Ollama server URL:",
345
+ default: "http://localhost:11434"
346
+ });
347
+ config.ollamaUrl = ollamaUrl;
348
+ config.defaultProvider = "ollama";
349
+ saveConfig(config);
350
+ console.log(chalk.green("\n\u2705 Ollama configured"));
351
+ console.log(chalk.gray(`Server: ${ollamaUrl}
352
+ `));
353
+ return;
354
+ }
355
+ console.log(chalk.cyan("\n\u{1F511} Configure API Keys\n"));
356
+ const keyType = await select({
357
+ message: "Which provider?",
358
+ choices: [
359
+ { name: "Google Gemini", value: "gemini" },
360
+ { name: "OpenAI (GPT-4)", value: "openai" },
361
+ { name: "Anthropic (Claude)", value: "anthropic" },
362
+ { name: "DeepSeek", value: "deepseek" },
363
+ { name: "OpenRouter (Multi-model)", value: "openrouter" }
364
+ ]
365
+ });
366
+ const apiKey = await password({
367
+ message: `Enter your ${keyType} API key:`,
368
+ mask: "*"
369
+ });
370
+ switch (keyType) {
371
+ case "gemini":
372
+ config.geminiApiKey = apiKey;
373
+ break;
374
+ case "openai":
375
+ config.openaiApiKey = apiKey;
376
+ break;
377
+ case "anthropic":
378
+ config.anthropicApiKey = apiKey;
379
+ break;
380
+ case "deepseek":
381
+ config.deepseekApiKey = apiKey;
382
+ break;
383
+ case "openrouter":
384
+ config.openrouterApiKey = apiKey;
385
+ break;
386
+ }
387
+ config.defaultProvider = keyType;
388
+ saveConfig(config);
389
+ console.log(chalk.green(`
390
+ \u2705 ${keyType} API key saved securely`));
391
+ console.log(chalk.gray("Keys stored in ~/.agdi/config.json\n"));
392
+ }
393
+ async function showStatus() {
394
+ const config = loadConfig();
395
+ console.log(chalk.cyan.bold("\n\u{1F4CA} Authentication Status\n"));
396
+ const providers = [
397
+ { name: "Gemini", key: config.geminiApiKey },
398
+ { name: "OpenAI", key: config.openaiApiKey },
399
+ { name: "Anthropic", key: config.anthropicApiKey },
400
+ { name: "DeepSeek", key: config.deepseekApiKey },
401
+ { name: "OpenRouter", key: config.openrouterApiKey }
402
+ ];
403
+ for (const p of providers) {
404
+ const status = p.key ? chalk.green("\u2713 Configured") : chalk.gray("\u2717 Not set");
405
+ console.log(` ${p.name.padEnd(12)} ${status}`);
406
+ }
407
+ console.log(chalk.cyan(`
408
+ Default: ${config.defaultProvider || "puter (FREE)"}
409
+ `));
410
+ console.log(chalk.gray('\u{1F4A1} Tip: Use "agdi auth" to reconfigure\n'));
411
+ }
412
+
413
+ // src/commands/chat.ts
414
+ import { input as input2 } from "@inquirer/prompts";
415
+ import chalk2 from "chalk";
416
+ import ora from "ora";
417
+ var SYSTEM_PROMPT2 = `You are Agdi, an expert AI software architect.
418
+ You help users build applications by generating code.
419
+ When asked to create something, output complete, working code files.
420
+ Be concise but thorough. Use TypeScript and modern best practices.`;
421
+ async function startChat() {
422
+ console.log(chalk2.cyan.bold("\n\u{1F4AC} Agdi Interactive Mode\n"));
423
+ console.log(chalk2.gray('Type your coding requests. Type "exit" to quit.\n'));
424
+ const config = loadConfig();
425
+ const provider = config.defaultProvider || "puter";
426
+ let apiKey = "";
427
+ switch (provider) {
428
+ case "gemini":
429
+ apiKey = config.geminiApiKey || "";
430
+ break;
431
+ case "puter":
432
+ apiKey = "";
433
+ break;
434
+ default:
435
+ apiKey = config.geminiApiKey || "";
436
+ }
437
+ if (provider !== "puter" && !apiKey) {
438
+ console.log(chalk2.yellow("\u26A0\uFE0F No API key configured for " + provider));
439
+ console.log(chalk2.gray('Run "agdi auth" to configure, or use FREE mode.\n'));
440
+ return;
441
+ }
442
+ console.log(chalk2.gray(`Using provider: ${chalk2.cyan(provider)}`));
443
+ console.log(chalk2.gray("\u2500".repeat(50) + "\n"));
444
+ const pm = new ProjectManager();
445
+ while (true) {
446
+ const userInput = await input2({
447
+ message: chalk2.cyan("You:")
448
+ });
449
+ if (userInput.toLowerCase() === "exit" || userInput.toLowerCase() === "quit") {
450
+ console.log(chalk2.gray("\n\u{1F44B} Goodbye!\n"));
451
+ break;
452
+ }
453
+ if (!userInput.trim()) {
454
+ continue;
455
+ }
456
+ const spinner = ora("Thinking...").start();
457
+ try {
458
+ const llm = createLLMProvider(provider, { apiKey, model: config.defaultModel });
459
+ if (userInput.toLowerCase().includes("create") || userInput.toLowerCase().includes("build") || userInput.toLowerCase().includes("make")) {
460
+ spinner.text = "Generating application...";
461
+ pm.create("my-app", userInput);
462
+ const { plan, files } = await generateApp(userInput, llm, (step) => {
463
+ spinner.text = step;
464
+ });
465
+ pm.updateFiles(files);
466
+ spinner.succeed("Application generated!");
467
+ console.log(chalk2.green("\n\u{1F4C1} Files created:"));
468
+ for (const file of files) {
469
+ console.log(chalk2.gray(` - ${file.path}`));
470
+ }
471
+ const shouldWrite = await input2({
472
+ message: "Write files to disk? (y/n):",
473
+ default: "y"
474
+ });
475
+ if (shouldWrite.toLowerCase() === "y") {
476
+ const dir = await input2({
477
+ message: "Output directory:",
478
+ default: "./generated-app"
479
+ });
480
+ await writeProject(pm.get(), dir);
481
+ console.log(chalk2.green(`
482
+ \u2705 Files written to ${dir}
483
+ `));
484
+ }
485
+ } else {
486
+ const response = await llm.generate(userInput, SYSTEM_PROMPT2);
487
+ spinner.stop();
488
+ console.log(chalk2.cyan("\nAgdi: ") + response.text + "\n");
489
+ }
490
+ } catch (error) {
491
+ spinner.fail("Error");
492
+ console.error(chalk2.red(error instanceof Error ? error.message : String(error)));
493
+ console.log("");
494
+ }
495
+ }
496
+ }
497
+
498
+ // src/index.ts
499
+ var BANNER = `
500
+ ${chalk3.cyan(` ___ __ _ `)}
501
+ ${chalk3.cyan(` / | ____ _____/ /(_) `)}
502
+ ${chalk3.cyan(` / /| | / __ \`/ __ // / `)}
503
+ ${chalk3.cyan(` / ___ |/ /_/ / /_/ // / `)}
504
+ ${chalk3.cyan(`/_/ |_|\\__, /\\__,_//_/ `)}
505
+ ${chalk3.cyan(` /____/ `)}
506
+ `;
507
+ var program = new Command();
508
+ console.log(BANNER);
509
+ console.log(chalk3.gray(" The Open Source AI Architect"));
510
+ console.log(chalk3.gray(" \u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\u2500\n"));
511
+ program.name("agdi").description(chalk3.cyan("\u{1F680} AI-powered app generator - build apps from natural language")).version("1.0.0");
512
+ program.command("auth").description("Login or configure API keys").option("--status", "Show authentication status").action(async (options) => {
513
+ if (options.status) {
514
+ await showStatus();
515
+ } else {
516
+ await login();
517
+ }
518
+ });
519
+ program.command("chat").description("Start an interactive coding session").action(startChat);
520
+ program.command("init").description("Create a new project interactively").action(async () => {
521
+ const config = loadConfig();
522
+ const provider = await select2({
523
+ message: "Select AI provider:",
524
+ choices: [
525
+ { name: "\u{1F193} Agdi Cloud (FREE - No API key!)", value: "puter" },
526
+ { name: "\u{1F511} Gemini (Requires API key)", value: "gemini" }
527
+ ]
528
+ });
529
+ if (provider === "gemini" && !config.geminiApiKey) {
530
+ console.log(chalk3.yellow("\n\u26A0\uFE0F No Gemini API key configured\n"));
531
+ const apiKey = await input3({
532
+ message: "Enter your Gemini API key:",
533
+ validate: (v) => v.trim().length > 0 || "API key is required"
534
+ });
535
+ config.geminiApiKey = apiKey.trim();
536
+ saveConfig(config);
537
+ console.log(chalk3.green("\u2705 API key saved\n"));
538
+ }
539
+ const prompt = await input3({
540
+ message: "What would you like to build?",
541
+ default: "A modern todo app with categories, dark mode, and local storage"
542
+ });
543
+ let model;
544
+ if (provider === "puter") {
545
+ model = await select2({
546
+ message: "Select AI model (400+ available):",
547
+ choices: [
548
+ { name: "\u26A1 GPT-4.1 Nano (Fast)", value: "gpt-4.1-nano" },
549
+ { name: "\u{1F680} Claude Sonnet 4 (Best)", value: "claude-sonnet-4" },
550
+ { name: "\u{1F9E0} Claude 3.5 Sonnet", value: "claude-3-5-sonnet" },
551
+ { name: "\u{1F48E} GPT-4o", value: "gpt-4o" },
552
+ { name: "\u{1F525} Gemini 2.5 Flash", value: "google/gemini-2.5-flash" },
553
+ { name: "\u{1F999} Llama 3.3 70B", value: "meta/llama-3.3-70b" },
554
+ { name: "\u{1F30A} DeepSeek R1", value: "deepseek/deepseek-r1" },
555
+ { name: "\u{1F916} Mistral Large", value: "mistral/mistral-large" },
556
+ { name: "\u{1F47D} Grok 2", value: "x-ai/grok-2" }
557
+ ]
558
+ });
559
+ } else {
560
+ model = await select2({
561
+ message: "Select AI model:",
562
+ choices: [
563
+ { name: "Gemini 2.5 Flash (Fast)", value: "gemini-2.5-flash" },
564
+ { name: "Gemini 2.5 Pro (Best quality)", value: "gemini-2.5-pro" }
565
+ ]
566
+ });
567
+ }
568
+ const defaultDir = `./${prompt.split(" ").slice(0, 3).join("-").toLowerCase().replace(/[^a-z0-9-]/g, "")}`;
569
+ const outputDir = await input3({
570
+ message: "Output directory:",
571
+ default: defaultDir
572
+ });
573
+ const shouldProceed = await confirm2({
574
+ message: `Generate app in ${chalk3.cyan(outputDir)} using ${chalk3.yellow(model)}?`,
575
+ default: true
576
+ });
577
+ if (!shouldProceed) {
578
+ console.log(chalk3.yellow("\nCancelled."));
579
+ return;
580
+ }
581
+ console.log("");
582
+ const spinner = ora2("Initializing AI architect...").start();
583
+ try {
584
+ const llm = createLLMProvider(provider, {
585
+ apiKey: provider === "gemini" ? config.geminiApiKey : "",
586
+ model
587
+ });
588
+ const pm = new ProjectManager();
589
+ pm.create(outputDir.replace("./", ""), prompt);
590
+ const { plan, files } = await generateApp(prompt, llm, (step, file) => {
591
+ spinner.text = file ? `${step} ${chalk3.gray(file)}` : step;
592
+ });
593
+ pm.updateFiles(files);
594
+ pm.updateDependencies(plan.dependencies);
595
+ spinner.text = "Writing files to disk...";
596
+ await writeProject(pm.get(), outputDir);
597
+ spinner.succeed(chalk3.green("App generated successfully!"));
598
+ console.log(chalk3.gray("\n\u{1F4C1} Project created at:"), chalk3.cyan(outputDir));
599
+ console.log(chalk3.gray(`\u{1F4C4} ${files.length} files generated
600
+ `));
601
+ console.log(chalk3.white("Next steps:\n"));
602
+ console.log(chalk3.gray(` cd ${outputDir}`));
603
+ console.log(chalk3.gray(" npm install"));
604
+ console.log(chalk3.gray(" npm run dev\n"));
605
+ } catch (error) {
606
+ spinner.fail(chalk3.red("Generation failed"));
607
+ console.error(chalk3.red("\n" + (error instanceof Error ? error.message : String(error))));
608
+ process.exit(1);
609
+ }
610
+ });
611
+ program.command("generate <prompt>").alias("g").description("Generate an app from a prompt").option("-p, --provider <provider>", "AI provider (puter or gemini)", "puter").option("-m, --model <model>", "AI model to use", "claude-sonnet-4").option("-o, --output <dir>", "Output directory", "./generated-app").action(async (prompt, options) => {
612
+ const config = loadConfig();
613
+ const provider = options.provider;
614
+ if (provider === "gemini" && !config.geminiApiKey) {
615
+ console.error(chalk3.red("\u274C No Gemini API key configured. Run: agdi auth"));
616
+ console.error(chalk3.yellow("\u{1F4A1} Tip: Use --provider puter for FREE access without API keys!"));
617
+ process.exit(1);
618
+ }
619
+ const spinner = ora2(`Generating app with ${chalk3.cyan(options.model)}...`).start();
620
+ try {
621
+ const llm = createLLMProvider(provider, {
622
+ apiKey: provider === "gemini" ? config.geminiApiKey : "",
623
+ model: options.model
624
+ });
625
+ const pm = new ProjectManager();
626
+ pm.create(options.output.replace("./", ""), prompt);
627
+ const { plan, files } = await generateApp(prompt, llm, (step) => {
628
+ spinner.text = step;
629
+ });
630
+ pm.updateFiles(files);
631
+ pm.updateDependencies(plan.dependencies);
632
+ await writeProject(pm.get(), options.output);
633
+ spinner.succeed(`App generated in ${chalk3.cyan(options.output)}`);
634
+ } catch (error) {
635
+ spinner.fail("Generation failed");
636
+ console.error(chalk3.red(error instanceof Error ? error.message : String(error)));
637
+ process.exit(1);
638
+ }
639
+ });
640
+ program.command("config").description("Show or modify configuration").option("--show", "Show current configuration").action(async (options) => {
641
+ await showStatus();
642
+ });
643
+ program.command("models").description("List available AI models").action(() => {
644
+ console.log(chalk3.cyan.bold("\n\u{1F916} Available Puter.com Models (FREE - No API key!)\n"));
645
+ const categories = {
646
+ "OpenAI": ["gpt-4.1-nano", "gpt-4.1-mini", "gpt-4o", "gpt-4-turbo", "o1", "o1-mini"],
647
+ "Anthropic": ["claude-sonnet-4", "claude-3-5-sonnet", "claude-3-opus"],
648
+ "Google": ["google/gemini-2.5-flash", "google/gemini-2.5-pro"],
649
+ "Open Source": ["meta/llama-3.3-70b", "mistral/mistral-large", "deepseek/deepseek-r1", "x-ai/grok-2"]
650
+ };
651
+ for (const [category, models] of Object.entries(categories)) {
652
+ console.log(chalk3.yellow(`
653
+ ${category}:`));
654
+ for (const model of models) {
655
+ const name = PUTER_MODELS[model] || model;
656
+ console.log(chalk3.gray(` - ${model}`) + chalk3.white(` (${name})`));
657
+ }
658
+ }
659
+ console.log(chalk3.gray("\n ... and 400+ more models available!\n"));
660
+ console.log(chalk3.white('Usage: agdi generate "prompt" -p puter -m claude-sonnet-4\n'));
661
+ });
662
+ program.action(() => {
663
+ program.help();
664
+ });
665
+ program.parse();
package/package.json ADDED
@@ -0,0 +1,63 @@
1
+ {
2
+ "name": "agdi",
3
+ "version": "1.0.0",
4
+ "description": "AI-powered app generator - build full-stack apps from natural language in your terminal",
5
+ "type": "module",
6
+ "bin": {
7
+ "agdi": "./bin/agdi.js"
8
+ },
9
+ "main": "./dist/index.js",
10
+ "files": [
11
+ "bin",
12
+ "dist",
13
+ "README.md"
14
+ ],
15
+ "scripts": {
16
+ "build": "tsup src/index.ts --format esm --clean",
17
+ "dev": "tsx src/index.ts",
18
+ "start": "node bin/agdi.js",
19
+ "prepublishOnly": "npm run build"
20
+ },
21
+ "keywords": [
22
+ "cli",
23
+ "ai",
24
+ "code-generation",
25
+ "app-generator",
26
+ "llm",
27
+ "gpt",
28
+ "claude",
29
+ "gemini",
30
+ "react",
31
+ "vite",
32
+ "full-stack"
33
+ ],
34
+ "author": "Agdi Systems Inc.",
35
+ "license": "MIT",
36
+ "repository": {
37
+ "type": "git",
38
+ "url": "https://github.com/agdi-dev/agdi"
39
+ },
40
+ "homepage": "https://agdi.dev",
41
+ "bugs": {
42
+ "url": "https://github.com/agdi-dev/agdi/issues"
43
+ },
44
+ "engines": {
45
+ "node": ">=18.0.0"
46
+ },
47
+ "devDependencies": {
48
+ "tsup": "^8.0.0",
49
+ "tsx": "^4.7.0",
50
+ "typescript": "^5.4.0",
51
+ "@types/fs-extra": "^11.0.0",
52
+ "@types/node": "^20.0.0"
53
+ },
54
+ "dependencies": {
55
+ "@google/genai": "^1.0.0",
56
+ "@inquirer/prompts": "^5.0.0",
57
+ "chalk": "^5.3.0",
58
+ "commander": "^12.0.0",
59
+ "fs-extra": "^11.2.0",
60
+ "jszip": "^3.10.0",
61
+ "ora": "^8.0.0"
62
+ }
63
+ }