awel 0.2.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +22 -5
- package/README.zh-CN.md +22 -5
- package/dist/cli/onboarding.js +46 -21
- package/dist/cli/providers/vercel.js +7 -1
- package/package.json +1 -1
package/README.md
CHANGED
|
@@ -8,16 +8,24 @@ AI-powered development overlay for Next.js. Awel runs a proxy in front of your d
|
|
|
8
8
|
|
|
9
9
|
## Quick Start
|
|
10
10
|
|
|
11
|
-
|
|
12
|
-
# Skip if you're already in a Next.js app
|
|
13
|
-
npx create-next-app@latest my-app && cd my-app
|
|
11
|
+
### Create a new project
|
|
14
12
|
|
|
13
|
+
```bash
|
|
15
14
|
# Set up at least one AI provider (pick one):
|
|
16
15
|
export ANTHROPIC_API_KEY="sk-ant-..." # Anthropic API
|
|
17
16
|
export OPENAI_API_KEY="sk-..." # OpenAI
|
|
18
17
|
export GOOGLE_GENERATIVE_AI_API_KEY="..." # Google AI
|
|
19
18
|
# Or install the Claude CLI: https://docs.anthropic.com/en/docs/claude-code
|
|
20
19
|
|
|
20
|
+
npx awel create
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
This scaffolds a new Next.js project and marks it for creation mode. Follow the instructions to `cd` into the project and run `npx awel dev`. You'll see a full-page creation UI where you describe what you want to build — the AI agent generates the entire app for you, then transitions to the normal Awel overlay.
|
|
24
|
+
|
|
25
|
+
### Use with an existing project
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
cd my-existing-next-app
|
|
21
29
|
npx awel dev
|
|
22
30
|
```
|
|
23
31
|
|
|
@@ -25,10 +33,11 @@ Awel needs at least one configured provider to function. See [Supported Models](
|
|
|
25
33
|
|
|
26
34
|
This starts Awel on port 3001 and proxies your Next.js dev server on port 3000. Open `http://localhost:3001` to see your app with the Awel overlay.
|
|
27
35
|
|
|
28
|
-
###
|
|
36
|
+
### Commands
|
|
29
37
|
|
|
30
38
|
```
|
|
31
|
-
awel
|
|
39
|
+
awel create Create a new Next.js project with Awel
|
|
40
|
+
awel dev [options] Start the development server with Awel overlay
|
|
32
41
|
|
|
33
42
|
-p, --port <port> Target app port (default: 3000)
|
|
34
43
|
-v, --verbose Print LLM stream events to stderr
|
|
@@ -65,6 +74,13 @@ Awel uses the [Vercel AI SDK](https://sdk.vercel.ai) and supports multiple provi
|
|
|
65
74
|
|
|
66
75
|
Switch models at any time from the dropdown in the dashboard header.
|
|
67
76
|
|
|
77
|
+
### Additional Environment Variables
|
|
78
|
+
|
|
79
|
+
| Variable | Description |
|
|
80
|
+
|----------|-------------|
|
|
81
|
+
| `OPENAI_BASE_URL` | Custom base URL for the OpenAI provider (e.g. a proxy or compatible API). Defaults to `https://api.openai.com/v1`. |
|
|
82
|
+
| `AWEL_MAX_OUTPUT_TOKENS` | Maximum number of tokens the model can generate per response. Applies to all providers. |
|
|
83
|
+
|
|
68
84
|
## Agent Tools
|
|
69
85
|
|
|
70
86
|
The AI agent has access to:
|
|
@@ -94,6 +110,7 @@ The AI agent has access to:
|
|
|
94
110
|
- **Diff review** — review a summary of all file changes before accepting
|
|
95
111
|
- **Dark mode** — follows your system preference
|
|
96
112
|
- **i18n** — English and Chinese
|
|
113
|
+
- **Creation mode** — `awel create` scaffolds a new project and launches a full-page AI chat where you describe your app and the agent builds it from scratch
|
|
97
114
|
|
|
98
115
|
## Development
|
|
99
116
|
|
package/README.zh-CN.md
CHANGED
|
@@ -8,16 +8,24 @@
|
|
|
8
8
|
|
|
9
9
|
## 快速开始
|
|
10
10
|
|
|
11
|
-
|
|
12
|
-
# 如果你已经在一个 Next.js 项目中,可以跳过这一步
|
|
13
|
-
npx create-next-app@latest my-app && cd my-app
|
|
11
|
+
### 创建新项目
|
|
14
12
|
|
|
13
|
+
```bash
|
|
15
14
|
# 至少配置一个 AI 服务商(任选其一):
|
|
16
15
|
export ANTHROPIC_API_KEY="sk-ant-..." # Anthropic API
|
|
17
16
|
export OPENAI_API_KEY="sk-..." # OpenAI
|
|
18
17
|
export GOOGLE_GENERATIVE_AI_API_KEY="..." # Google AI
|
|
19
18
|
# 或安装 Claude CLI:https://docs.anthropic.com/en/docs/claude-code
|
|
20
19
|
|
|
20
|
+
npx awel create
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
此命令会创建一个新的 Next.js 项目并标记为创建模式。按照提示 `cd` 进入项目目录并运行 `npx awel dev`。你会看到一个全屏创建界面,描述你想构建的应用——AI 智能体会为你生成整个应用,完成后自动切换到正常的 Awel 浮层模式。
|
|
24
|
+
|
|
25
|
+
### 在已有项目中使用
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
cd my-existing-next-app
|
|
21
29
|
npx awel dev
|
|
22
30
|
```
|
|
23
31
|
|
|
@@ -25,10 +33,11 @@ Awel 需要至少一个已配置的服务商才能运行。完整列表见[支
|
|
|
25
33
|
|
|
26
34
|
Awel 会在端口 3001 启动,并代理运行在端口 3000 的 Next.js 开发服务器。打开 `http://localhost:3001` 即可看到带有 Awel 浮层的应用。
|
|
27
35
|
|
|
28
|
-
###
|
|
36
|
+
### 命令
|
|
29
37
|
|
|
30
38
|
```
|
|
31
|
-
awel
|
|
39
|
+
awel create 创建一个带有 Awel 的新 Next.js 项目
|
|
40
|
+
awel dev [options] 启动带有 Awel 浮层的开发服务器
|
|
32
41
|
|
|
33
42
|
-p, --port <port> 目标应用端口(默认:3000)
|
|
34
43
|
-v, --verbose 将 LLM 流式事件输出到 stderr
|
|
@@ -65,6 +74,13 @@ Awel 使用 [Vercel AI SDK](https://sdk.vercel.ai),支持多个服务商。设
|
|
|
65
74
|
|
|
66
75
|
可随时在面板顶部的下拉菜单中切换模型。
|
|
67
76
|
|
|
77
|
+
### 额外环境变量
|
|
78
|
+
|
|
79
|
+
| 变量 | 说明 |
|
|
80
|
+
|------|------|
|
|
81
|
+
| `OPENAI_BASE_URL` | OpenAI 服务商的自定义 Base URL(例如代理或兼容 API)。默认为 `https://api.openai.com/v1`。 |
|
|
82
|
+
| `AWEL_MAX_OUTPUT_TOKENS` | 模型单次响应的最大生成 token 数。对所有服务商生效。 |
|
|
83
|
+
|
|
68
84
|
## 智能体工具
|
|
69
85
|
|
|
70
86
|
AI 智能体可使用以下工具:
|
|
@@ -94,6 +110,7 @@ AI 智能体可使用以下工具:
|
|
|
94
110
|
- **Diff 审查** — 在接受变更前查看所有文件修改的摘要
|
|
95
111
|
- **深色模式** — 跟随系统偏好
|
|
96
112
|
- **国际化** — 支持英文和中文
|
|
113
|
+
- **创建模式** — `awel create` 创建新项目并启动全屏 AI 对话界面,描述你的应用,智能体从零开始为你构建
|
|
97
114
|
|
|
98
115
|
## 开发
|
|
99
116
|
|
package/dist/cli/onboarding.js
CHANGED
|
@@ -1,25 +1,50 @@
|
|
|
1
1
|
import { readAwelConfig, writeAwelConfig } from './awel-config.js';
|
|
2
2
|
import { getAvailableProviders, PROVIDER_ENV_KEYS, PROVIDER_LABELS } from './providers/registry.js';
|
|
3
3
|
import { awel } from './logger.js';
|
|
4
|
-
function
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
4
|
+
async function promptProviderSetup() {
|
|
5
|
+
const p = await import('@clack/prompts');
|
|
6
|
+
p.log.warn('No LLM providers are configured.');
|
|
7
|
+
p.log.message('Awel needs at least one AI provider to function.\n');
|
|
8
|
+
const provider = await p.select({
|
|
9
|
+
message: 'Which provider would you like to set up?',
|
|
10
|
+
options: [
|
|
11
|
+
{ value: 'claude-code', label: 'Claude Code', hint: 'Uses Claude CLI binary' },
|
|
12
|
+
{ value: 'anthropic', label: 'Anthropic API', hint: 'ANTHROPIC_API_KEY' },
|
|
13
|
+
{ value: 'openai', label: 'OpenAI', hint: 'OPENAI_API_KEY' },
|
|
14
|
+
{ value: 'google-ai', label: 'Google AI', hint: 'GOOGLE_GENERATIVE_AI_API_KEY' },
|
|
15
|
+
{ value: 'vercel-gateway', label: 'Vercel AI Gateway', hint: 'AI_GATEWAY_API_KEY' },
|
|
16
|
+
{ value: 'qwen', label: 'Qwen', hint: 'DASHSCOPE_API_KEY' },
|
|
17
|
+
{ value: 'minimax', label: 'MiniMax', hint: 'MINIMAX_API_KEY' },
|
|
18
|
+
],
|
|
19
|
+
});
|
|
20
|
+
if (p.isCancel(provider)) {
|
|
21
|
+
p.cancel('Cancelled');
|
|
22
|
+
process.exit(0);
|
|
23
|
+
}
|
|
24
|
+
if (provider === 'claude-code') {
|
|
25
|
+
p.note('Install the Claude CLI:\n\n' +
|
|
26
|
+
' npm install -g @anthropic-ai/claude-code\n\n' +
|
|
27
|
+
'Then authenticate:\n\n' +
|
|
28
|
+
' claude login', 'Claude Code Setup');
|
|
29
|
+
}
|
|
30
|
+
else if (provider === 'openai') {
|
|
31
|
+
const envKey = PROVIDER_ENV_KEYS[provider];
|
|
32
|
+
p.note(`Export your API key:\n\n` +
|
|
33
|
+
` export ${envKey}="your-api-key"\n\n` +
|
|
34
|
+
`Or add it to your .env file:\n\n` +
|
|
35
|
+
` ${envKey}=your-api-key\n\n` +
|
|
36
|
+
`To use a custom base URL (e.g. a proxy or compatible API):\n\n` +
|
|
37
|
+
` export OPENAI_BASE_URL="https://your-proxy.com/v1"`, 'OpenAI Setup');
|
|
38
|
+
}
|
|
39
|
+
else {
|
|
40
|
+
const envKey = PROVIDER_ENV_KEYS[provider];
|
|
12
41
|
const label = PROVIDER_LABELS[provider] ?? provider;
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
42
|
+
p.note(`Export your API key:\n\n` +
|
|
43
|
+
` export ${envKey}="your-api-key"\n\n` +
|
|
44
|
+
`Or add it to your .env file:\n\n` +
|
|
45
|
+
` ${envKey}=your-api-key`, `${label} Setup`);
|
|
16
46
|
}
|
|
17
|
-
|
|
18
|
-
const label = PROVIDER_LABELS['claude-code'] ?? 'Claude Code';
|
|
19
|
-
awel.log(` ${label}`);
|
|
20
|
-
awel.log(' Install the Claude CLI: https://docs.anthropic.com/en/docs/claude-code');
|
|
21
|
-
awel.log('');
|
|
22
|
-
awel.log('Then run `awel dev` again.');
|
|
47
|
+
p.log.message('Then run `awel dev` again.');
|
|
23
48
|
}
|
|
24
49
|
export async function ensureProvider(projectCwd) {
|
|
25
50
|
const config = readAwelConfig(projectCwd);
|
|
@@ -41,17 +66,17 @@ export async function ensureProvider(projectCwd) {
|
|
|
41
66
|
return;
|
|
42
67
|
}
|
|
43
68
|
if (isFirstRun && available.length === 0) {
|
|
44
|
-
// First run with NO providers — show welcome +
|
|
69
|
+
// First run with NO providers — show welcome + interactive setup, exit
|
|
45
70
|
awel.log('');
|
|
46
71
|
awel.log('Welcome to Awel!');
|
|
47
72
|
awel.log('AI-powered development overlay for Next.js');
|
|
48
73
|
awel.log('');
|
|
49
|
-
|
|
74
|
+
await promptProviderSetup();
|
|
50
75
|
process.exit(1);
|
|
51
76
|
}
|
|
52
77
|
if (!isFirstRun && available.length === 0) {
|
|
53
|
-
// Subsequent run with NO providers —
|
|
54
|
-
|
|
78
|
+
// Subsequent run with NO providers — interactive setup, exit
|
|
79
|
+
await promptProviderSetup();
|
|
55
80
|
process.exit(1);
|
|
56
81
|
}
|
|
57
82
|
// Subsequent run with providers available — silent pass-through
|
|
@@ -149,7 +149,9 @@ function createModel(modelId, providerType, cwd) {
|
|
|
149
149
|
return anthropic(modelId);
|
|
150
150
|
}
|
|
151
151
|
else if (providerType === 'openai') {
|
|
152
|
-
const openai = createOpenAI({
|
|
152
|
+
const openai = createOpenAI({
|
|
153
|
+
baseURL: process.env.OPENAI_BASE_URL,
|
|
154
|
+
});
|
|
153
155
|
return openai(modelId);
|
|
154
156
|
}
|
|
155
157
|
else if (providerType === 'google-ai') {
|
|
@@ -230,12 +232,16 @@ export function createVercelProvider(modelId, providerType) {
|
|
|
230
232
|
const systemPrompt = isSelfContained
|
|
231
233
|
? undefined
|
|
232
234
|
: `${basePrompt}\n\nThe user's project directory is: ${config.projectCwd}`;
|
|
235
|
+
const maxOutputTokens = process.env.AWEL_MAX_OUTPUT_TOKENS
|
|
236
|
+
? parseInt(process.env.AWEL_MAX_OUTPUT_TOKENS, 10)
|
|
237
|
+
: undefined;
|
|
233
238
|
const streamTextArgs = {
|
|
234
239
|
model,
|
|
235
240
|
...(systemPrompt && { system: systemPrompt }),
|
|
236
241
|
messages,
|
|
237
242
|
tools,
|
|
238
243
|
...(!isSelfContained && { stopWhen: stepCountIs(25) }),
|
|
244
|
+
...(maxOutputTokens && { maxOutputTokens }),
|
|
239
245
|
abortSignal: abortController.signal,
|
|
240
246
|
};
|
|
241
247
|
logEvent('stream:start', `model=${modelId} provider=${providerType} messages=${messages.length}`);
|