overai 1.4.15 → 1.4.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/cli/create.js +72 -18
  2. package/package.json +1 -1
@@ -41,8 +41,30 @@ const child_process_1 = require("child_process");
41
41
  const util_1 = require("util");
42
42
  const readline = __importStar(require("readline"));
43
43
  const execAsync = (0, util_1.promisify)(child_process_1.exec);
44
+ const rl = readline.createInterface({
45
+ input: process.stdin,
46
+ output: process.stdout
47
+ });
44
48
  // 1. Define Tools for the Agent
45
49
  const fileTools = [
50
+ {
51
+ name: "ask_user",
52
+ description: "Ask the user for input or confirmation. Use this when you need clarification or want to confirm an action.",
53
+ parameters: {
54
+ type: "object",
55
+ properties: {
56
+ question: { type: "string", description: "The question to ask the user" }
57
+ },
58
+ required: ["question"]
59
+ },
60
+ execute: async ({ question: q }) => {
61
+ return new Promise((resolve) => {
62
+ rl.question(`\n❓ ${q}\n> `, (answer) => {
63
+ resolve(answer);
64
+ });
65
+ });
66
+ }
67
+ },
46
68
  {
47
69
  name: "write_file",
48
70
  description: "Write content to a file. Use this to create code files.",
@@ -112,11 +134,12 @@ const fileTools = [
112
134
  }
113
135
  }
114
136
  ];
115
- const rl = readline.createInterface({
116
- input: process.stdin,
117
- output: process.stdout
118
- });
119
137
  async function main() {
138
+ // 3. Get User Input
139
+ const question = (query) => {
140
+ return new Promise((resolve) => rl.question(query, resolve));
141
+ };
142
+ // 3. Get User Input already defined above
120
143
  // Model Selection Logic
121
144
  let model = "gpt-4o";
122
145
  const args = process.argv.slice(2);
@@ -127,24 +150,53 @@ async function main() {
127
150
  args.splice(modelArgIndex, 2); // Remove flag from args so it doesn't become part of the prompt
128
151
  }
129
152
  else {
130
- // Auto-detect best available model based on env vars
153
+ // Interactive Selection
154
+ const availableModels = [];
131
155
  if (process.env.OPENAI_API_KEY) {
132
- model = "gpt-4o";
156
+ availableModels.push({ name: "OpenAI: GPT-4o", value: "gpt-4o" });
157
+ availableModels.push({ name: "OpenAI: GPT-4o-mini", value: "gpt-4o-mini" });
133
158
  }
134
- else if (process.env.GOOGLE_API_KEY) {
135
- model = "google/gemini-2.5-flash";
159
+ if (process.env.GOOGLE_API_KEY) {
160
+ availableModels.push({ name: "Google: Gemini 2.0 Flash", value: "google/gemini-2.0-flash-exp" });
161
+ availableModels.push({ name: "Google: Gemini 1.5 Pro", value: "google/gemini-1.5-pro" });
136
162
  }
137
- else if (process.env.ANTHROPIC_API_KEY) {
138
- model = "anthropic/claude-3-5-sonnet-20240620";
163
+ if (process.env.ANTHROPIC_API_KEY) {
164
+ availableModels.push({ name: "Anthropic: Claude 3.5 Sonnet", value: "anthropic/claude-3-5-sonnet-20240620" });
165
+ availableModels.push({ name: "Anthropic: Claude 3 Haiku", value: "anthropic/claude-3-haiku-20240307" });
139
166
  }
140
- else {
167
+ if (process.env.DEEPSEEK_API_KEY) {
168
+ availableModels.push({ name: "DeepSeek: V3", value: "deepseek/deepseek-chat" });
169
+ }
170
+ if (availableModels.length === 0) {
141
171
  console.error("❌ Error: No API Key found.");
142
- console.error("Please set ONE of the following environment variables:");
172
+ console.error("Please set at least ONE of the following environment variables:");
143
173
  console.error(" export OPENAI_API_KEY=sk-...");
144
174
  console.error(" export GOOGLE_API_KEY=AIza...");
145
175
  console.error(" export ANTHROPIC_API_KEY=sk-ant-...");
146
176
  process.exit(1);
147
177
  }
178
+ // If we have models, ask the user
179
+ console.log("\n🤖 **Select AI Model**:");
180
+ availableModels.forEach((m, index) => {
181
+ console.log(` ${index + 1}. ${m.name}`);
182
+ });
183
+ console.log(` ${availableModels.length + 1}. Custom (enter model name manually)`);
184
+ const choice = await question(`\n👉 Choose a model (1-${availableModels.length + 1}) [default: 1]: `);
185
+ const choiceIndex = parseInt(choice.trim()) - 1;
186
+ if (!choice.trim()) {
187
+ model = availableModels[0].value;
188
+ }
189
+ else if (choiceIndex >= 0 && choiceIndex < availableModels.length) {
190
+ model = availableModels[choiceIndex].value;
191
+ }
192
+ else if (choiceIndex === availableModels.length) {
193
+ model = await question("👉 Enter custom model name (e.g., 'ollama/llama3'): ");
194
+ }
195
+ else {
196
+ // Fallback to first available if invalid input
197
+ if (availableModels.length > 0)
198
+ model = availableModels[0].value;
199
+ }
148
200
  }
149
201
  console.log(`🧠 Using AI Model: \x1b[36m${model}\x1b[0m`);
150
202
  console.log(`📂 Working in: ${process.cwd()}`);
@@ -161,7 +213,9 @@ async function main() {
161
213
 
162
214
  PROCESS:
163
215
  1. ANALYZE: Understand the user's request and technology stack (Node, Python, React, etc.).
216
+ - If the request is vague or you need preferences (e.g., "Tailwind or CSS?"), use 'ask_user' tool.
164
217
  2. PLAN: Decide on the directory structure and necessary files.
218
+ - You CAN ask the user for confirmation of your plan if it involves many files.
165
219
  3. SCAFFOLD: Create the project directory (e.g. "mkdir my-project").
166
220
  4. IMPLEMENT: Create the core files with working code.
167
221
  IMPORTANT: When using 'write_file', you MUST use the project directory prefix (e.g. "my-project/index.js").
@@ -175,14 +229,12 @@ async function main() {
175
229
  - Do not rely on 'cd' in 'run_command' to change the directory for 'write_file' calls.
176
230
  - Be comprehensive: Include README.md, .gitignore, and config files.
177
231
  - If something fails, try to fix it or report the specific error.
178
- - Do not ask for permission for each step, proceed autonomously until completion.`,
232
+ - Do not ask for permission for each step, proceed autonomously until completion.
233
+ - INTERACTIVITY: Use 'ask_user' when you need key decisions from the user. Don't be too chatty, but ask when it matters.`,
179
234
  llm: model,
180
235
  tools: fileTools
181
236
  });
182
- // 3. Get User Input
183
- const question = (query) => {
184
- return new Promise((resolve) => rl.question(query, resolve));
185
- };
237
+ // 3. Get User Input (Defined at top of main)
186
238
  console.log("\n🚀 **OverAI Auto-Coder** initialized.");
187
239
  console.log("I can build any project for you (Node.js, Python, simple websites, scripts...).");
188
240
  // If argument provided via CLI, use it. Otherwise ask.
@@ -193,7 +245,6 @@ async function main() {
193
245
  else {
194
246
  console.log(`\n🛠️ Request received: "${userRequest}"`);
195
247
  }
196
- rl.close();
197
248
  if (!userRequest.trim()) {
198
249
  console.log("❌ No request provided. Exiting.");
199
250
  return;
@@ -220,5 +271,8 @@ async function main() {
220
271
  catch (error) {
221
272
  console.error("❌ Mission Failed:", error);
222
273
  }
274
+ finally {
275
+ rl.close();
276
+ }
223
277
  }
224
278
  main();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "overai",
3
- "version": "1.4.15",
3
+ "version": "1.4.16",
4
4
  "description": "OverAI TypeScript AI Agents Framework - Build, Deploy, and Monetize AI Agents in Minutes",
5
5
  "main": "dist/index.js",
6
6
  "types": "dist/index.d.ts",