@aigne/example-mcp-github 1.9.2 → 1.10.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/README.md +36 -4
  2. package/index.ts +38 -43
  3. package/package.json +6 -6
  4. package/usages.ts +1 -1
package/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # GitHub MCP Server Integration
2
2
 
3
- This is a demonstration of using [AIGNE Framework](https://github.com/AIGNE-io/aigne-framework) and [GitHub MCP Server](https://github.com/modelcontextprotocol/servers/tree/main/src/github) to interact with GitHub repositories.
3
+ This is a demonstration of using [AIGNE Framework](https://github.com/AIGNE-io/aigne-framework) and [GitHub MCP Server](https://github.com/modelcontextprotocol/servers/tree/main/src/github) to interact with GitHub repositories. The example now supports both one-shot and interactive chat modes, along with customizable model settings and pipeline input/output.
4
4
 
5
5
  ```mermaid
6
6
  flowchart LR
@@ -108,9 +108,41 @@ GITHUB_TOKEN="" # Set your GitHub Personal Access Token here
108
108
  ### Run the Example
109
109
 
110
110
  ```bash
111
- pnpm start # Interactive chat interface
112
- # or
113
- pnpm example # Run predefined examples
111
+ pnpm start # Run in one-shot mode (default)
112
+
113
+ # Run in interactive chat mode
114
+ pnpm start -- --chat
115
+
116
+ # Use pipeline input
117
+ echo "Search for repositories related to 'modelcontextprotocol'" | pnpm start
118
+ ```
119
+
120
+ ### Run Options
121
+
122
+ The example supports the following command-line parameters:
123
+
124
+ | Parameter | Description | Default |
125
+ |-----------|-------------|---------|
126
+ | `--chat` | Run in interactive chat mode | Disabled (one-shot mode) |
127
+ | `--model <provider[:model]>` | AI model to use in format 'provider[:model]' where model is optional. Examples: 'openai' or 'openai:gpt-4o-mini' | openai |
128
+ | `--temperature <value>` | Temperature for model generation | Provider default |
129
+ | `--top-p <value>` | Top-p sampling value | Provider default |
130
+ | `--presence-penalty <value>` | Presence penalty value | Provider default |
131
+ | `--frequency-penalty <value>` | Frequency penalty value | Provider default |
132
+ | `--log-level <level>` | Set logging level (ERROR, WARN, INFO, DEBUG, TRACE) | INFO |
133
+ | `--input`, `-i <input>` | Specify input directly | None |
134
+
135
+ #### Examples
136
+
137
+ ```bash
138
+ # Run in chat mode (interactive)
139
+ pnpm start -- --chat
140
+
141
+ # Set logging level
142
+ pnpm start -- --log-level DEBUG
143
+
144
+ # Use pipeline input
145
+ echo "Search for repositories related to 'modelcontextprotocol'" | pnpm start
114
146
  ```
115
147
 
116
148
  ## Example
package/index.ts CHANGED
@@ -1,54 +1,49 @@
1
1
  #!/usr/bin/env bunwrapper
2
2
 
3
3
  import assert from "node:assert";
4
- import { runChatLoopInTerminal } from "@aigne/cli/utils/run-chat-loop.js";
5
- import { AIAgent, AIGNE, MCPAgent } from "@aigne/core";
6
- import { loadModel } from "@aigne/core/loader/index.js";
7
- import { logger } from "@aigne/core/utils/logger.js";
4
+ import { runWithAIGNE } from "@aigne/cli/utils/run-with-aigne.js";
5
+ import { AIAgent, MCPAgent } from "@aigne/core";
8
6
 
9
7
  const { GITHUB_TOKEN } = process.env;
10
8
 
11
9
  assert(GITHUB_TOKEN, "Please set the GITHUB_TOKEN environment variable");
12
10
 
13
- logger.enable(`aigne:mcp,${process.env.DEBUG}`);
14
-
15
- const model = await loadModel();
16
-
17
- const github = await MCPAgent.from({
18
- command: "npx",
19
- args: ["-y", "@modelcontextprotocol/server-github"],
20
- env: {
21
- GITHUB_TOKEN,
11
+ await runWithAIGNE(
12
+ async () => {
13
+ const github = await MCPAgent.from({
14
+ command: "npx",
15
+ args: ["-y", "@modelcontextprotocol/server-github"],
16
+ env: {
17
+ GITHUB_TOKEN,
18
+ },
19
+ });
20
+
21
+ const agent = AIAgent.from({
22
+ instructions: `\
23
+ ## GitHub Interaction Assistant
24
+ You are an assistant that helps users interact with GitHub repositories.
25
+ You can perform various GitHub operations like:
26
+ 1. Searching repositories
27
+ 2. Getting file contents
28
+ 3. Creating or updating files
29
+ 4. Creating issues and pull requests
30
+ 5. And many more GitHub operations
31
+
32
+ Always provide clear, concise responses with relevant information from GitHub.
33
+ `,
34
+ skills: [github],
35
+ memory: true,
36
+ });
37
+
38
+ return agent;
39
+ },
40
+ {
41
+ chatLoopOptions: {
42
+ welcome:
43
+ "Hello! I'm a chatbot that can help you interact with GitHub. Try asking me a question about GitHub repositories!",
44
+ defaultQuestion: "Search for repositories related to 'aigne-framework'",
45
+ },
22
46
  },
23
- });
24
-
25
- const aigne = new AIGNE({
26
- model,
27
- skills: [github],
28
- });
29
-
30
- const agent = AIAgent.from({
31
- instructions: `\
32
- ## GitHub Interaction Assistant
33
- You are an assistant that helps users interact with GitHub repositories.
34
- You can perform various GitHub operations like:
35
- 1. Searching repositories
36
- 2. Getting file contents
37
- 3. Creating or updating files
38
- 4. Creating issues and pull requests
39
- 5. And many more GitHub operations
40
-
41
- Always provide clear, concise responses with relevant information from GitHub.
42
- `,
43
- memory: true,
44
- });
45
-
46
- const userAgent = aigne.invoke(agent);
47
-
48
- await runChatLoopInTerminal(userAgent, {
49
- welcome:
50
- "Hello! I'm a chatbot that can help you interact with GitHub. Try asking me a question about GitHub repositories!",
51
- defaultQuestion: "Search for repositories related to 'aigne-framework'",
52
- });
47
+ );
53
48
 
54
49
  process.exit(0);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/example-mcp-github",
3
- "version": "1.9.2",
3
+ "version": "1.10.0",
4
4
  "description": "A demonstration of using AIGNE Framework and GitHub MCP Server to interact with GitHub repositories",
5
5
  "author": "Arcblock <blocklet@arcblock.io> https://github.com/blocklet",
6
6
  "homepage": "https://github.com/AIGNE-io/aigne-framework/tree/main/examples/mcp-github",
@@ -16,13 +16,13 @@
16
16
  "README.md"
17
17
  ],
18
18
  "dependencies": {
19
- "openai": "^4.97.0",
20
- "zod": "^3.24.4",
21
- "@aigne/cli": "^1.9.0",
22
- "@aigne/core": "^1.14.0"
19
+ "@aigne/core": "^1.16.0",
20
+ "@aigne/cli": "^1.10.0",
21
+ "@aigne/openai": "^0.1.0"
23
22
  },
24
23
  "devDependencies": {
25
- "@aigne/test-utils": "^0.2.0"
24
+ "@types/bun": "^1.2.9",
25
+ "@aigne/test-utils": "^0.3.0"
26
26
  },
27
27
  "scripts": {
28
28
  "start": "bun run index.ts",
package/usages.ts CHANGED
@@ -1,6 +1,6 @@
1
1
  import assert from "node:assert";
2
2
  import { AIAgent, AIGNE, MCPAgent } from "@aigne/core";
3
- import { OpenAIChatModel } from "@aigne/core/models/openai-chat-model.js";
3
+ import { OpenAIChatModel } from "@aigne/openai";
4
4
 
5
5
  const { OPENAI_API_KEY, GITHUB_TOKEN } = process.env;
6
6
  assert(OPENAI_API_KEY, "Please set the OPENAI_API_KEY environment variable");