@agentica/core 0.10.1 → 0.10.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/README.md +48 -6
  2. package/package.json +1 -1
package/README.md CHANGED
@@ -52,19 +52,17 @@ By the way, as `typia` is a transformer library analyzing TypeScript source code
52
52
  ### Chat with Backend Server
53
53
  ```typescript
54
54
  import { IHttpLlmApplication } from "@samchon/openapi";
55
- import { Agentica, createHttpApplication } from "@agentica/core";
55
+ import { Agentica, createHttpLlmApplication } from "@agentica/core";
56
56
  import OpenAI from "openai";
57
57
  import { IValidation } from "typia";
58
58
 
59
59
  const main = async (): Promise<void> => {
60
60
  // LOAD SWAGGER DOCUMENT, AND CONVERT TO LLM APPLICATION SCHEMA
61
61
  const application: IValidation<IHttpLlmApplication<"chatgpt">> =
62
- createHttpApplication({
62
+ createHttpLlmApplication({
63
63
  model: "chatgpt",
64
- document: OpenApi.convert(
65
- await fetch("https://shopping-be.wrtn.ai/editor/swagger.json").then(
66
- (r) => r.json()
67
- )
64
+ document: await fetch("https://shopping-be.wrtn.ai/editor/swagger.json").then(
65
+ (r) => r.json()
68
66
  ),
69
67
  });
70
68
  if (application.success === false) {
@@ -259,6 +257,50 @@ In the `@agentica/core`, you can implement multi-agent orchestration super easil
259
257
  Just develop a TypeScript class which contains agent feature like Vector Store, and just deliver the TypeScript class type to the `@agentica/core` like above. The `@agentica/core` will centralize and realize the multi-agent orchestration by LLM function calling strategy to the TypeScript class.
260
258
 
261
259
 
260
+ ### If you want drastically improves function selection speed
261
+
262
+ Use the [@agentica/pg-selector](../pg-selector/README.md)
263
+
264
+ Just initialize and set the config
265
+ when use this adapter, you should run the [connector-hive](https://github.com/wrtnlabs/connector-hive)
266
+
267
+ ```typescript
268
+ import { Agentica } from "@agentica/core";
269
+ import { AgenticaPgVectorSelector } from "@agentica/pg-selector";
270
+
271
+ import typia from "typia";
272
+
273
+
274
+ // Initialize with connector-hive server
275
+ const selectorExecute = AgenticaPgVectorSelector.boot<"chatgpt">(
276
+ 'https://your-connector-hive-server.com'
277
+ );
278
+
279
+
280
+ const agent = new Agentica({
281
+ model: "chatgpt",
282
+ vendor: {
283
+ model: "gpt-4o-mini",
284
+ api: new OpenAI({
285
+ apiKey: process.env.CHATGPT_API_KEY,
286
+ }),
287
+ },
288
+ controllers: [
289
+ await fetch(
290
+ "https://shopping-be.wrtn.ai/editor/swagger.json",
291
+ ).then(r => r.json()),
292
+ typia.llm.application<ShoppingCounselor>(),
293
+ typia.llm.application<ShoppingPolicy>(),
294
+ typia.llm.application<ShoppingSearchRag>(),
295
+ ],
296
+ config: {
297
+ executor: {
298
+ select: selectorExecute,
299
+ }
300
+ }
301
+ });
302
+ await agent.conversate("I wanna buy MacBook Pro");
303
+ ```
262
304
 
263
305
 
264
306
  ## Principles
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@agentica/core",
3
- "version": "0.10.1",
3
+ "version": "0.10.3",
4
4
  "main": "lib/index.js",
5
5
  "description": "Agentic AI Library specialized in LLM Function Calling",
6
6
  "scripts": {