@louloulinx/metagpt 0.1.3 → 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README-CN.md CHANGED
@@ -14,23 +14,23 @@
14
14
  ### 快速开始
15
15
  ```bash
16
16
  # 使用 bun 安装
17
- bun install @louloulin/metagpt
17
+ bun install @louloulinx/metagpt
18
18
 
19
19
  # 或使用 npm
20
- npm install @louloulin/metagpt
20
+ npm install @louloulinx/metagpt
21
21
  ```
22
22
 
23
23
  ### 基础配置
24
24
  ```typescript
25
25
  // 配置LLM提供商
26
- import { config } from " @louloulin/metagpt/config";
26
+ import { config } from " @louloulinx/metagpt";
27
27
 
28
28
  config.OPENAI_API_KEY = "sk-..."; // 你的API密钥
29
29
  ```
30
30
 
31
31
  ### 简单示例
32
32
  ```typescript
33
- import { Team, ProductManager, Architect, Engineer } from " @louloulin/metagpt/roles";
33
+ import { Team, ProductManager, Architect, Engineer } from "@louloulinx/metagpt";
34
34
 
35
35
  // 创建团队
36
36
  const team = new Team();
@@ -76,7 +76,6 @@ await team.runProject("实现一个简单的待办事项管理应用");
76
76
  - AI团队模拟与优化
77
77
  - 企业级应用智能化改造
78
78
 
79
- [![Open in GitHub Codespaces](https://github.com/codespaces/badge.svg)](https://codespaces.new/geekan/MetaGPT)
80
79
 
81
80
 
82
81
  ### 核心功能特性
@@ -150,7 +149,7 @@ PROJECT_ROOT=./workspace
150
149
 
151
150
  ```typescript
152
151
  // 配置LLM提供商(以OpenAI为例)
153
- import { config } from "metagpt/config";
152
+ import { config } from "@louloulinx/metagpt";
154
153
 
155
154
  config.OPENAI_API_KEY = "sk-..."; // 你的API密钥
156
155
  config.OPENAI_API_MODEL = "gpt-4-1106-preview"; // 模型版本
@@ -158,8 +157,8 @@ config.OPENAI_API_MODEL = "gpt-4-1106-preview"; // 模型版本
158
157
 
159
158
  ### 基础使用示例(待实现)
160
159
  ```typescript
161
- import { Team, ProductManager, Architect, Engineer } from "@louloulin/metagpt/roles";
162
- import { Message } from "@louloulin/metagpt/types";
160
+ import { Team, ProductManager, Architect, Engineer } from "@louloulinx/metagpt";
161
+ import { Message } from "@louloulinx/metagpt";
163
162
 
164
163
  async function startup(idea: string) {
165
164
  // 初始化团队
@@ -223,7 +222,7 @@ PROJECT_ROOT=./workspace
223
222
 
224
223
  ```typescript
225
224
  // 配置LLM提供商(以OpenAI为例)
226
- import { config } from "metagpt/config";
225
+ import { config } from "@louloulinx/metagpt";
227
226
 
228
227
  config.OPENAI_API_KEY = "sk-..."; // 你的API密钥
229
228
  config.OPENAI_API_MODEL = "gpt-4-1106-preview"; // 模型版本
@@ -231,8 +230,9 @@ config.OPENAI_API_MODEL = "gpt-4-1106-preview"; // 模型版本
231
230
 
232
231
  ### 基础使用示例(待实现)
233
232
  ```typescript
234
- import { Team, ProductManager, Architect, Engineer } from "@louloulin/metagpt/roles";
235
- import { Message } from "@louloulin/metagpt/types";
233
+ import { Team, ProductManager, Architect, Engineer } from "@louloulinx/metagpt";
234
+ import { Message } from "@louloulinx/metagpt";
235
+
236
236
 
237
237
  async function startup(idea: string) {
238
238
  // 初始化团队
package/README.md CHANGED
@@ -31,7 +31,7 @@ npm install @louloulinx/metagpt
31
31
 
32
32
  ```typescript
33
33
  // Configure LLM provider
34
- import { config } from "@louloulinx/metagpt/config";
34
+ import { config } from "@louloulinx/metagpt";
35
35
 
36
36
  config.OPENAI_API_KEY = "sk-..."; // Your API key
37
37
  ```
@@ -39,7 +39,7 @@ config.OPENAI_API_KEY = "sk-..."; // Your API key
39
39
  ### Simple Example
40
40
 
41
41
  ```typescript
42
- import { Team, ProductManager, Architect, Engineer } from "@louloulinx/metagpt/roles";
42
+ import { Team, ProductManager, Architect, Engineer } from "@louloulinx/metagpt";
43
43
 
44
44
  // Create a team
45
45
  const team = new Team();
@@ -135,17 +135,122 @@ PROJECT_ROOT=./workspace
135
135
 
136
136
  ```typescript
137
137
  // Configure LLM provider (OpenAI example)
138
- import { config } from "@louloulinx/metagpt/config";
138
+ import { config } from "@louloulinx/metagpt";
139
139
 
140
140
  config.OPENAI_API_KEY = "sk-..."; // Your API key
141
141
  config.OPENAI_API_MODEL = "gpt-4-1106-preview"; // Model version
142
142
  ```
143
143
 
144
+ ## Tutorial Assistant Example
145
+
146
+ The Tutorial Assistant is a specialized role that can generate comprehensive tutorial documents in Markdown format.
147
+
148
+ ```typescript
149
+ import { VercelLLMProvider } from '@louloulinx/metagpt';
150
+ import { TutorialAssistant } from '@louloulinx/metagpt';
151
+ import { v4 as uuidv4 } from 'uuid';
152
+
153
+ /**
154
+ * 教程助手示例
155
+ *
156
+ * 该示例演示如何使用教程助手生成Markdown格式的教程文档
157
+ */
158
+ async function main() {
159
+ console.log(`🚀 开始执行教程生成 [${new Date().toISOString()}]`);
160
+
161
+ try {
162
+ // 从环境变量获取API密钥
163
+ const apiKey = process.env.DASHSCOPE_API_KEY;
164
+ console.log('✓ 检查环境变量');
165
+
166
+ if (!apiKey) {
167
+ console.error('❌ 错误: 请设置环境变量: DASHSCOPE_API_KEY');
168
+ process.exit(1);
169
+ }
170
+ console.log('✓ 环境变量已设置');
171
+
172
+ // 初始化Vercel LLM提供商 - 使用百炼大模型(qwen)
173
+ console.log('⚙️ 配置百炼大模型...');
174
+ const llmProvider = new VercelLLMProvider({
175
+ providerType: 'qwen',
176
+ apiKey,
177
+ model: 'qwen-plus-2025-01-25',
178
+ baseURL: 'https://dashscope.aliyuncs.com/compatible-mode/v1', // 自定义API端点
179
+ extraConfig: {
180
+ qwenOptions: {
181
+ debug: true, // 启用调试日志
182
+ },
183
+ generateOptions: {
184
+ system: '你是一位专业的教程编写专家,擅长生成高质量、结构清晰的教程文档。'
185
+ }
186
+ }
187
+ });
188
+ console.log(`✓ 模型配置完成: ${llmProvider.config?.providerType} - ${llmProvider.config?.model}`);
189
+
190
+ console.log('⚙️ 初始化教程助手...');
191
+ console.time('教程助手初始化时间');
192
+
193
+ // 创建教程助手
194
+ const tutorialAssistant = new TutorialAssistant({
195
+ llm: llmProvider,
196
+ language: 'Chinese', // 可选: 'English'
197
+ outputDir: './output/tutorials', // 可选,默认为 './tutorials'
198
+ });
199
+
200
+ console.timeEnd('教程助手初始化时间');
201
+ console.log('✓ 教程助手初始化完成');
202
+
203
+ // 设置要生成的教程主题
204
+ const topic = 'rust语言教程';
205
+ console.log(`📝 生成主题: "${topic}"`);
206
+
207
+ // 生成教程
208
+ console.log('🔄 开始生成教程...');
209
+ console.log('👉 步骤 1: 生成目录结构');
210
+ console.time('教程生成总时间');
211
+
212
+ const result = await tutorialAssistant.react({
213
+ id: uuidv4(),
214
+ role: 'user',
215
+ content: topic,
216
+ sentFrom: 'user',
217
+ sendTo: new Set(['*']),
218
+ instructContent: null,
219
+ });
220
+
221
+ console.timeEnd('教程生成总时间');
222
+ console.log('✅ 教程生成完成!');
223
+
224
+ // 提取文件路径(假设结果消息中包含文件路径信息)
225
+ const filePath = result.content.includes('saved to')
226
+ ? result.content.split('saved to ')[1].trim()
227
+ : '未找到文件路径';
228
+
229
+ console.log(`📄 生成结果: ${result.content}`);
230
+ console.log(`📂 输出文件: ${filePath}`);
231
+ console.log(`🏁 教程生成完成 [${new Date().toISOString()}]`);
232
+ } catch (error) {
233
+ console.error('❌ 生成教程时出错:', error);
234
+ if (error instanceof Error) {
235
+ console.error(`错误类型: ${error.name}`);
236
+ console.error(`错误信息: ${error.message}`);
237
+ console.error(`错误堆栈: ${error.stack}`);
238
+ }
239
+ }
240
+ }
241
+
242
+ // 运行示例
243
+ console.log('📌 教程助手示例');
244
+ main();
245
+ ```
246
+
247
+
248
+
144
249
  ## Basic Usage Example (Coming Soon)
145
250
 
146
251
  ```typescript
147
252
  import { Team, ProductManager, Architect, Engineer } from "@louloulinx/metagpt/roles";
148
- import { Message } from "@louloulinx/metagpt/types";
253
+ import { Message } from "@louloulinx/metagpt";
149
254
 
150
255
  async function startup(idea: string) {
151
256
  // Initialize team
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@louloulinx/metagpt",
3
- "version": "0.1.3",
3
+ "version": "0.1.5",
4
4
  "description": "A TypeScript implementation of MetaGPT",
5
5
  "module": "src/index.ts",
6
6
  "type": "module",