@aigne/cli 1.22.8 → 1.24.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,5 +1,80 @@
1
1
  # Changelog
2
2
 
3
+ ## [1.24.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.23.1...cli-v1.24.0) (2025-07-17)
4
+
5
+
6
+ ### Features
7
+
8
+ * **core:** support define hooks for agent in yaml ([#260](https://github.com/AIGNE-io/aigne-framework/issues/260)) ([c388e82](https://github.com/AIGNE-io/aigne-framework/commit/c388e8216134271af4d9c7def70862ea3c354c7f))
9
+
10
+
11
+ ### Dependencies
12
+
13
+ * The following workspace dependencies were updated
14
+ * dependencies
15
+ * @aigne/agent-library bumped to 1.21.2
16
+ * @aigne/agentic-memory bumped to 1.0.2
17
+ * @aigne/anthropic bumped to 0.9.2
18
+ * @aigne/bedrock bumped to 0.8.2
19
+ * @aigne/core bumped to 1.36.0
20
+ * @aigne/deepseek bumped to 0.7.2
21
+ * @aigne/default-memory bumped to 1.0.2
22
+ * @aigne/gemini bumped to 0.8.2
23
+ * @aigne/ollama bumped to 0.7.2
24
+ * @aigne/open-router bumped to 0.7.2
25
+ * @aigne/openai bumped to 0.10.2
26
+ * @aigne/xai bumped to 0.7.2
27
+ * @aigne/aigne-hub bumped to 0.1.2
28
+
29
+ ## [1.23.1](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.23.0...cli-v1.23.1) (2025-07-17)
30
+
31
+
32
+ ### Dependencies
33
+
34
+ * The following workspace dependencies were updated
35
+ * dependencies
36
+ * @aigne/agent-library bumped to 1.21.1
37
+ * @aigne/agentic-memory bumped to 1.0.1
38
+ * @aigne/anthropic bumped to 0.9.1
39
+ * @aigne/bedrock bumped to 0.8.1
40
+ * @aigne/core bumped to 1.35.0
41
+ * @aigne/deepseek bumped to 0.7.1
42
+ * @aigne/default-memory bumped to 1.0.1
43
+ * @aigne/gemini bumped to 0.8.1
44
+ * @aigne/ollama bumped to 0.7.1
45
+ * @aigne/open-router bumped to 0.7.1
46
+ * @aigne/openai bumped to 0.10.1
47
+ * @aigne/xai bumped to 0.7.1
48
+ * @aigne/aigne-hub bumped to 0.1.1
49
+
50
+ ## [1.23.0](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.22.8...cli-v1.23.0) (2025-07-15)
51
+
52
+
53
+ ### Features
54
+
55
+ * **memory:** support did space memory adapter ([#229](https://github.com/AIGNE-io/aigne-framework/issues/229)) ([6f69b64](https://github.com/AIGNE-io/aigne-framework/commit/6f69b64e98b963db9d6ab5357306b445385eaa68))
56
+ * **model:** support aigne-hub model adapter ([#253](https://github.com/AIGNE-io/aigne-framework/issues/253)) ([4b33f8d](https://github.com/AIGNE-io/aigne-framework/commit/4b33f8d1a819f52357db81d502c56b55eaa0669f))
57
+
58
+
59
+ ### Dependencies
60
+
61
+ * The following workspace dependencies were updated
62
+ * dependencies
63
+ * @aigne/agent-library bumped to 1.21.0
64
+ * @aigne/agentic-memory bumped to 1.0.0
65
+ * @aigne/anthropic bumped to 0.9.0
66
+ * @aigne/bedrock bumped to 0.8.0
67
+ * @aigne/core bumped to 1.34.0
68
+ * @aigne/deepseek bumped to 0.7.0
69
+ * @aigne/default-memory bumped to 1.0.0
70
+ * @aigne/gemini bumped to 0.8.0
71
+ * @aigne/observability-api bumped to 0.8.0
72
+ * @aigne/ollama bumped to 0.7.0
73
+ * @aigne/open-router bumped to 0.7.0
74
+ * @aigne/openai bumped to 0.10.0
75
+ * @aigne/xai bumped to 0.7.0
76
+ * @aigne/aigne-hub bumped to 0.1.0
77
+
3
78
  ## [1.22.8](https://github.com/AIGNE-io/aigne-framework/compare/cli-v1.22.7...cli-v1.22.8) (2025-07-14)
4
79
 
5
80
 
package/README.md CHANGED
@@ -91,10 +91,10 @@ Launch a chat loop with the specified agent.
91
91
  aigne run
92
92
 
93
93
  # Run the agent at the specified path
94
- aigne run path/to/agents
94
+ aigne run --path path/to/agents
95
95
 
96
96
  # Run the agent from a remote URL
97
- aigne run https://example.com/aigne-project
97
+ aigne run --url https://example.com/aigne-project
98
98
 
99
99
  # Run a specific agent
100
100
  aigne run --entry-agent myAgent
@@ -1,5 +1,5 @@
1
- import { DefaultMemory } from "@aigne/agent-library/default-memory/index.js";
2
1
  import type { LoadableModel } from "@aigne/core/loader/index.js";
2
+ import { DefaultMemory } from "@aigne/default-memory";
3
3
  export declare const AIGNE_CLI_VERSION: any;
4
4
  export declare function availableModels(): LoadableModel[];
5
5
  export declare const availableMemories: (typeof DefaultMemory)[];
package/dist/constants.js CHANGED
@@ -1,8 +1,10 @@
1
1
  import { createRequire } from "node:module";
2
- import { DefaultMemory } from "@aigne/agent-library/default-memory/index.js";
2
+ import { AgenticMemory } from "@aigne/agentic-memory";
3
+ import { AIGNEHubChatModel } from "@aigne/aigne-hub";
3
4
  import { AnthropicChatModel } from "@aigne/anthropic";
4
5
  import { BedrockChatModel } from "@aigne/bedrock";
5
6
  import { DeepSeekChatModel } from "@aigne/deepseek";
7
+ import { DefaultMemory } from "@aigne/default-memory";
6
8
  import { GeminiChatModel } from "@aigne/gemini";
7
9
  import { OllamaChatModel } from "@aigne/ollama";
8
10
  import { OpenRouterChatModel } from "@aigne/open-router";
@@ -17,7 +19,12 @@ export function availableModels() {
17
19
  .map((i) => process.env[i])
18
20
  .filter(Boolean)[0];
19
21
  const httpAgent = proxy ? new HttpsProxyAgent(proxy) : undefined;
20
- const clientOptions = { fetchOptions: { agent: httpAgent } };
22
+ const clientOptions = {
23
+ fetchOptions: {
24
+ // @ts-ignore
25
+ agent: httpAgent,
26
+ },
27
+ };
21
28
  return [
22
29
  {
23
30
  name: OpenAIChatModel.name,
@@ -32,10 +39,7 @@ export function availableModels() {
32
39
  create: (params) => new BedrockChatModel({
33
40
  ...params,
34
41
  clientOptions: {
35
- requestHandler: NodeHttpHandler.create({
36
- httpAgent,
37
- httpsAgent: httpAgent,
38
- }),
42
+ requestHandler: NodeHttpHandler.create({ httpAgent, httpsAgent: httpAgent }),
39
43
  streamCollector,
40
44
  },
41
45
  }),
@@ -60,6 +64,10 @@ export function availableModels() {
60
64
  name: XAIChatModel.name,
61
65
  create: (params) => new XAIChatModel({ ...params, clientOptions }),
62
66
  },
67
+ {
68
+ name: AIGNEHubChatModel.name,
69
+ create: (params) => new AIGNEHubChatModel({ ...params, clientOptions }),
70
+ },
63
71
  ];
64
72
  }
65
- export const availableMemories = [DefaultMemory];
73
+ export const availableMemories = [DefaultMemory, AgenticMemory];
@@ -1,5 +1,5 @@
1
1
  import { type InspectOptions } from "node:util";
2
- import { type Agent, type Context, type ContextUsage, type Message } from "@aigne/core";
2
+ import { type Agent, type Context, type ContextUsage, type InvokeOptions, type Message } from "@aigne/core";
3
3
  import { promiseWithResolvers } from "@aigne/core/utils/promise.js";
4
4
  import { type Listr } from "@aigne/listr2";
5
5
  import { type AIGNEListrTaskWrapper } from "../utils/listr.js";
@@ -12,7 +12,7 @@ export declare class TerminalTracer {
12
12
  readonly options: TerminalTracerOptions;
13
13
  constructor(context: Context, options?: TerminalTracerOptions);
14
14
  private tasks;
15
- run(agent: Agent, input: Message): Promise<{
15
+ run(agent: Agent, input: Message, options?: InvokeOptions): Promise<{
16
16
  result: Message;
17
17
  context: Context<import("@aigne/core").UserContext>;
18
18
  }>;
@@ -18,7 +18,7 @@ export class TerminalTracer {
18
18
  this.options = options;
19
19
  }
20
20
  tasks = {};
21
- async run(agent, input) {
21
+ async run(agent, input, options) {
22
22
  await this.context.observer?.serve();
23
23
  const context = this.context.newContext({ reset: true });
24
24
  const listr = new AIGNEListr({
@@ -89,7 +89,7 @@ export class TerminalTracer {
89
89
  context.on("agentSucceed", onAgentSucceed);
90
90
  context.on("agentFailed", onAgentFailed);
91
91
  try {
92
- const result = await listr.run(() => context.invoke(agent, input, { streaming: true, newContext: false }));
92
+ const result = await listr.run(() => context.invoke(agent, input, { ...options, streaming: true, newContext: false }));
93
93
  return { result, context };
94
94
  }
95
95
  finally {
@@ -1,4 +1,3 @@
1
- import assert from "node:assert";
2
1
  import { fstat } from "node:fs";
3
2
  import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
4
3
  import { dirname, isAbsolute, join } from "node:path";
@@ -177,8 +176,7 @@ export async function runAgentWithAIGNE(aigne, agent, { outputKey, chatLoopOptio
177
176
  printRequest: logger.enabled(LogLevel.INFO),
178
177
  outputKey,
179
178
  });
180
- assert(options.input);
181
- const { result } = await tracer.run(agent, options.input);
179
+ const { result } = await tracer.run(agent, options.input ?? {});
182
180
  if (options.output) {
183
181
  const message = result[outputKey || DEFAULT_OUTPUT_KEY];
184
182
  const content = typeof message === "string" ? message : JSON.stringify(result, null, 2);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@aigne/cli",
3
- "version": "1.22.8",
3
+ "version": "1.24.0",
4
4
  "description": "cli for AIGNE framework",
5
5
  "publishConfig": {
6
6
  "access": "public"
@@ -54,17 +54,20 @@
54
54
  "wrap-ansi": "^9.0.0",
55
55
  "yaml": "^2.8.0",
56
56
  "zod": "^3.25.67",
57
- "@aigne/anthropic": "^0.8.2",
58
- "@aigne/agent-library": "^1.20.5",
59
- "@aigne/bedrock": "^0.7.5",
60
- "@aigne/deepseek": "^0.6.5",
61
- "@aigne/core": "^1.33.2",
62
- "@aigne/gemini": "^0.7.2",
63
- "@aigne/observability-api": "^0.7.2",
64
- "@aigne/open-router": "^0.6.5",
65
- "@aigne/openai": "^0.9.2",
66
- "@aigne/xai": "^0.6.6",
67
- "@aigne/ollama": "^0.6.5"
57
+ "@aigne/bedrock": "^0.8.2",
58
+ "@aigne/core": "^1.36.0",
59
+ "@aigne/anthropic": "^0.9.2",
60
+ "@aigne/deepseek": "^0.7.2",
61
+ "@aigne/gemini": "^0.8.2",
62
+ "@aigne/default-memory": "^1.0.2",
63
+ "@aigne/observability-api": "^0.8.0",
64
+ "@aigne/ollama": "^0.7.2",
65
+ "@aigne/open-router": "^0.7.2",
66
+ "@aigne/openai": "^0.10.2",
67
+ "@aigne/agentic-memory": "^1.0.2",
68
+ "@aigne/xai": "^0.7.2",
69
+ "@aigne/aigne-hub": "^0.1.2",
70
+ "@aigne/agent-library": "^1.21.2"
68
71
  },
69
72
  "devDependencies": {
70
73
  "@types/archiver": "^6.0.3",