typia 9.3.0-dev.20250511 → 9.3.1-dev.20250520

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -23,10 +23,14 @@ export namespace json {
23
23
  export function assertStringify<T>(input: T): string; // safe and faster
24
24
  }
25
25
 
26
- // LLM FUNCTION CALLING SCHEMA
26
+ // AI FUNCTION CALLING SCHEMA
27
27
  export namespace llm {
28
- // application schema from a class or interface type
29
- export function application<App, Model>(): ILlmApplication<Model>;
28
+ // collection of function calling schemas
29
+ export function application<Class, Model>(): ILlmApplication<Class>;
30
+ export function controller<Class, Model>(
31
+ name: string,
32
+ execute: Class,
33
+ ): ILlmController<Model>; // +executor
30
34
  // structured output
31
35
  export function parameters<P, Model>(): ILlmSchema.IParameters<Model>;
32
36
  export function schema<T, Model>(): ILlmSchema<Model>; // type schema
@@ -35,14 +35,17 @@ var PluginConfigurator;
35
35
  })();
36
36
  const strict = compilerOptions.strict;
37
37
  const strictNullChecks = compilerOptions.strictNullChecks;
38
+ const skipLibCheck = compilerOptions.skipLibCheck;
38
39
  const oldbie = plugins.find((p) => typeof p === "object" &&
39
40
  p !== null &&
40
41
  p.transform === "typia/lib/transform");
41
42
  if (strictNullChecks !== false &&
42
43
  (strict === true || strictNullChecks === true) &&
43
- oldbie !== undefined)
44
+ oldbie !== undefined &&
45
+ skipLibCheck === true)
44
46
  return;
45
47
  // DO CONFIGURE
48
+ compilerOptions.skipLibCheck = true;
46
49
  compilerOptions.strictNullChecks = true;
47
50
  if (strict === undefined && strictNullChecks === undefined)
48
51
  compilerOptions.strict = true;
@@ -1 +1 @@
1
- {"version":3,"file":"PluginConfigurator.js","sourceRoot":"","sources":["../../../src/executable/setup/PluginConfigurator.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,gEAAoC;AACpC,4CAAoB;AAIpB,IAAiB,kBAAkB,CA+DlC;AA/DD,WAAiB,kBAAkB;IACjC,SAAsB,SAAS,CAC7B,IAAiC;;YAEjC,uBAAuB;YACvB,MAAM,MAAM,GAA2B,sBAAQ,CAAC,KAAK,CACnD,MAAM,YAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAQ,EAAE,MAAM,CAAC,CACxB,CAAC;YAC5B,MAAM,eAAe,GAAG,MAAM,CAAC,eAElB,CAAC;YACd,IAAI,eAAe,KAAK,SAAS;gBAC/B,MAAM,IAAI,cAAc,CACtB,GAAG,IAAI,CAAC,OAAO,iDAAiD,CACjE,CAAC;YAEJ,kBAAkB;YAClB,MAAM,OAAO,GAAkD,CAAC,GAAG,EAAE;gBACnE,MAAM,OAAO,GAAG,eAAe,CAAC,OAEnB,CAAC;gBACd,IAAI,OAAO,KAAK,SAAS;oBAAE,OAAO,CAAC,eAAe,CAAC,OAAO,GAAG,EAAS,CAAC,CAAC;qBACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC;oBAC9B,MAAM,IAAI,SAAS,CACjB,yBAAyB,IAAI,CAAC,OAAO,sBAAsB,CAC5D,CAAC;gBACJ,OAAO,OAAO,CAAC;YACjB,CAAC,CAAC,EAAE,CAAC;YAEL,MAAM,MAAM,GAAwB,eAAe,CAAC,MAEvC,CAAC;YACd,MAAM,gBAAgB,GACpB,eAAe,CAAC,gBAAuC,CAAC;YAC1D,MAAM,MAAM,GAAuC,OAAO,CAAC,IAAI,CAC7D,CAAC,CAAC,EAAE,EAAE,CACJ,OAAO,CAAC,KAAK,QAAQ;gBACrB,CAAC,KAAK,IAAI;gBACV,CAAC,CAAC,SAAS,KAAK,qBAAqB,CACxC,CAAC;YACF,IACE,gBAAgB,KAAK,KAAK;gBAC1B,CAAC,MAAM,KAAK,IAAI,IAAI,gBAAgB,KAAK,IAAI,CAAC;gBAC9C,MAAM,KAAK,SAAS;gBAEpB,OAAO;YAET,eAAe;YACf,eAAe,CAAC,gBAAgB,GAAG,IAAI,CAAC;YACxC,IAAI,MAAM,KAAK,SAAS,IAAI,gBAAgB,KAAK,SAAS;gBACxD,eAAe,CAAC,MAAM,GAAG,IAAI,CAAC;YAChC,IAAI,MAAM,KAAK,SAAS;gBACtB,OAAO,CAAC,IAAI,CACV,sBAAQ,CAAC,KAAK,CAAC;;;0BAGG,CAA2B,CAC9C,CAAC;YACJ,MAAM,YAAE,CAAC,QAAQ,CAAC,SAAS,CACzB,IAAI,CAAC,OAAQ,EACb,sBAAQ,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CACpC,CAAC;QACJ,CAAC;KAAA;IA7DqB,4BAAS,YA6D9B,CAAA;AACH,CAAC,EA/DgB,kBAAkB,kCAAlB,kBAAkB,QA+DlC"}
1
+ {"version":3,"file":"PluginConfigurator.js","sourceRoot":"","sources":["../../../src/executable/setup/PluginConfigurator.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,gEAAoC;AACpC,4CAAoB;AAIpB,IAAiB,kBAAkB,CAoElC;AApED,WAAiB,kBAAkB;IACjC,SAAsB,SAAS,CAC7B,IAAiC;;YAEjC,uBAAuB;YACvB,MAAM,MAAM,GAA2B,sBAAQ,CAAC,KAAK,CACnD,MAAM,YAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAQ,EAAE,MAAM,CAAC,CACxB,CAAC;YAC5B,MAAM,eAAe,GAAG,MAAM,CAAC,eAElB,CAAC;YACd,IAAI,eAAe,KAAK,SAAS;gBAC/B,MAAM,IAAI,cAAc,CACtB,GAAG,IAAI,CAAC,OAAO,iDAAiD,CACjE,CAAC;YAEJ,kBAAkB;YAClB,MAAM,OAAO,GAAkD,CAAC,GAAG,EAAE;gBACnE,MAAM,OAAO,GAAG,eAAe,CAAC,OAEnB,CAAC;gBACd,IAAI,OAAO,KAAK,SAAS;oBAAE,OAAO,CAAC,eAAe,CAAC,OAAO,GAAG,EAAS,CAAC,CAAC;qBACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC;oBAC9B,MAAM,IAAI,SAAS,CACjB,yBAAyB,IAAI,CAAC,OAAO,sBAAsB,CAC5D,CAAC;gBACJ,OAAO,OAAO,CAAC;YACjB,CAAC,CAAC,EAAE,CAAC;YAEL,MAAM,MAAM,GAAwB,eAAe,CAAC,MAEvC,CAAC;YACd,MAAM,gBAAgB,GACpB,eAAe,CAAC,gBAAuC,CAAC;YAC1D,MAAM,YAAY,GAAwB,eAAe,CAAC,YAE7C,CAAC;YACd,MAAM,MAAM,GAAuC,OAAO,CAAC,IAAI,CAC7D,CAAC,CAAC,EAAE,EAAE,CACJ,OAAO,CAAC,KAAK,QAAQ;gBACrB,CAAC,KAAK,IAAI;gBACV,CAAC,CAAC,SAAS,KAAK,qBAAqB,CACxC,CAAC;YACF,IACE,gBAAgB,KAAK,KAAK;gBAC1B,CAAC,MAAM,KAAK,IAAI,IAAI,gBAAgB,KAAK,IAAI,CAAC;gBAC9C,MAAM,KAAK,SAAS;gBACpB,YAAY,KAAK,IAAI;gBAErB,OAAO;YAET,eAAe;YACf,eAAe,CAAC,YAAY,GAAG,IAAI,CAAC;YACpC,eAAe,CAAC,gBAAgB,GAAG,IAAI,CAAC;YACxC,IAAI,MAAM,KAAK,SAAS,IAAI,gBAAgB,KAAK,SAAS;gBACxD,eAAe,CAAC,MAAM,GAAG,IAAI,CAAC;YAChC,IAAI,MAAM,KAAK,SAAS;gBACtB,OAAO,CAAC,IAAI,CACV,sBAAQ,CAAC,KAAK,CAAC;;;0BAGG,CAA2B,CAC9C,CAAC;YACJ,MAAM,YAAE,CAAC,QAAQ,CAAC,SAAS,CACzB,IAAI,CAAC,OAAQ,EACb,sBAAQ,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,CACpC,CAAC;QACJ,CAAC;KAAA;IAlEqB,4BAAS,YAkE9B,CAAA;AACH,CAAC,EApEgB,kBAAkB,kCAAlB,kBAAkB,QAoElC"}
@@ -20,14 +20,17 @@ var PluginConfigurator;
20
20
  })();
21
21
  const strict = compilerOptions.strict;
22
22
  const strictNullChecks = compilerOptions.strictNullChecks;
23
+ const skipLibCheck = compilerOptions.skipLibCheck;
23
24
  const oldbie = plugins.find((p) => typeof p === "object" &&
24
25
  p !== null &&
25
26
  p.transform === "typia/lib/transform");
26
27
  if (strictNullChecks !== false &&
27
28
  (strict === true || strictNullChecks === true) &&
28
- oldbie !== undefined)
29
+ oldbie !== undefined &&
30
+ skipLibCheck === true)
29
31
  return;
30
32
  // DO CONFIGURE
33
+ compilerOptions.skipLibCheck = true;
31
34
  compilerOptions.strictNullChecks = true;
32
35
  if (strict === undefined && strictNullChecks === undefined)
33
36
  compilerOptions.strict = true;
package/lib/llm.d.mts CHANGED
@@ -1,8 +1,150 @@
1
1
  import { ILlmApplication, ILlmController, ILlmSchema } from "@samchon/openapi";
2
+ /**
3
+ * > You must configure the generic argument `Class`.
4
+ *
5
+ * TypeScript functions to LLM function calling controller.
6
+ *
7
+ * Creates a controller of LLM (Large Language Model) function calling
8
+ * from a TypeScript class or interface type containing the target functions to be
9
+ * called by the LLM function calling feature. The returned controller contains
10
+ * not only the {@link application} of {@link ILlmFunction function calling schemas},
11
+ * but also the {@link ILlmController.execute executor} of the functions.
12
+ *
13
+ * If you put the returned {@link ILlmController} to the LLM provider like
14
+ * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
15
+ * proper function and fill its arguments from the conversation (maybe chatting text)
16
+ * with user (human). And you can actually call the function by using
17
+ * {@link ILlmController.execute} property. This is the concept of the LLM function
18
+ * calling.
19
+ *
20
+ * Here is an example of using `typia.llm.controller()` function for AI agent
21
+ * development of performing such AI function calling to mobile API classes
22
+ * through this `typia` and external `@agentica` libraries.
23
+ *
24
+ * ```typescript
25
+ * import { Agentica } from "@agentica/core";
26
+ * import typia from "typia";
27
+ *
28
+ * const agentica = new Agentica({
29
+ * model: "chatgpt",
30
+ * vendor: {
31
+ * api: new OpenAI({ apiKey: "********" }),
32
+ * model: "gpt-4o-mini",
33
+ * },
34
+ * controllers: [
35
+ * typia.llm.controller<ReactNativeFileSystem, "chatgpt">(
36
+ * "filesystem",
37
+ * new ReactNativeFileSystem(),
38
+ * ),
39
+ * typia.llm.controller<ReactNativeGallery, "chatgpt">(
40
+ * "gallery",
41
+ * new ReactNativeGallery(),
42
+ * ),
43
+ * ],
44
+ * });
45
+ * await agentica.conversate(
46
+ * "Organize photo collection and sort them into appropriate folders.",
47
+ * );
48
+ * ```
49
+ *
50
+ * Here is the list of available `Model` types with their corresponding LLM schema.
51
+ * Reading the following list, and determine the `Model` type considering the
52
+ * characteristics of the target LLM provider.
53
+ *
54
+ * - LLM provider schemas
55
+ * - `chatgpt`: [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
56
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
57
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
58
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
59
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
60
+ * - Midldle layer schemas
61
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
62
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
63
+ *
64
+ * @template Class Target class or interface type collecting the functions to call
65
+ * @template Model LLM schema model
66
+ * @template Config Configuration of LLM schema composition
67
+ * @param name Identifier name of the controller
68
+ * @param execute Executor instance
69
+ * @param options Options for the LLM application construction
70
+ * @returns Controller of LLM function calling
71
+ * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
72
+ * @author Jeongho Nam - https://github.com/samchon
73
+ */
2
74
  export declare function controller(name: string, execute: object, options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>): never;
75
+ /**
76
+ * TypeScript functions to LLM function calling controller.
77
+ *
78
+ * Creates a controller of LLM (Large Language Model) function calling
79
+ * from a TypeScript class or interface type containing the target functions to be
80
+ * called by the LLM function calling feature. The returned controller contains
81
+ * not only the {@link application} of {@link ILlmFunction function calling schemas},
82
+ * but also the {@link ILlmController.execute executor} of the functions.
83
+ *
84
+ * If you put the returned {@link ILlmController} to the LLM provider like
85
+ * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
86
+ * proper function and fill its arguments from the conversation (maybe chatting text)
87
+ * with user (human). And you can actually call the function by using
88
+ * {@link ILlmController.execute} property. This is the concept of the LLM function
89
+ * calling.
90
+ *
91
+ * Here is an example of using `typia.llm.controller()` function for AI agent
92
+ * development of performing such AI function calling to mobile API classes
93
+ * through this `typia` and external `@agentica` libraries.
94
+ *
95
+ * ```typescript
96
+ * import { Agentica } from "@agentica/core";
97
+ * import typia from "typia";
98
+ *
99
+ * const agentica = new Agentica({
100
+ * model: "chatgpt",
101
+ * vendor: {
102
+ * api: new OpenAI({ apiKey: "********" }),
103
+ * model: "gpt-4o-mini",
104
+ * },
105
+ * controllers: [
106
+ * typia.llm.controller<ReactNativeFileSystem, "chatgpt">(
107
+ * "filesystem",
108
+ * new ReactNativeFileSystem(),
109
+ * ),
110
+ * typia.llm.controller<ReactNativeGallery, "chatgpt">(
111
+ * "gallery",
112
+ * new ReactNativeGallery(),
113
+ * ),
114
+ * ],
115
+ * });
116
+ * await agentica.conversate(
117
+ * "Organize photo collection and sort them into appropriate folders.",
118
+ * );
119
+ * ```
120
+ *
121
+ * Here is the list of available `Model` types with their corresponding LLM schema.
122
+ * Reading the following list, and determine the `Model` type considering the
123
+ * characteristics of the target LLM provider.
124
+ *
125
+ * - LLM provider schemas
126
+ * - `chatgpt`: [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
127
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
128
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
129
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
130
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
131
+ * - Midldle layer schemas
132
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
133
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
134
+ *
135
+ * @template Class Target class or interface type collecting the functions to call
136
+ * @template Model LLM schema model
137
+ * @template Config Configuration of LLM schema composition
138
+ * @param name Identifier name of the controller
139
+ * @param execute Executor instance
140
+ * @param options Options for the LLM application construction
141
+ * @returns Controller of LLM function calling
142
+ * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
143
+ * @author Jeongho Nam - https://github.com/samchon
144
+ */
3
145
  export declare function controller<Class extends Record<string, any>, Model extends ILlmSchema.Model, Config extends Partial<ILlmSchema.ModelConfig[Model]> = {}>(name: string, execute: Class, options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>): ILlmController<Model>;
4
146
  /**
5
- * > You must configure the generic argument `App`.
147
+ * > You must configure the generic argument `Class`.
6
148
  *
7
149
  * TypeScript functions to LLM function calling application.
8
150
  *
@@ -39,12 +181,13 @@ export declare function controller<Class extends Record<string, any>, Model exte
39
181
  *
40
182
  * - LLM provider schemas
41
183
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
42
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
43
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
44
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
184
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
185
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
186
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
187
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
45
188
  * - Midldle layer schemas
46
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
47
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
189
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
190
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
48
191
  *
49
192
  * @template Class Target class or interface type collecting the functions to call
50
193
  * @template Model LLM schema model
@@ -91,12 +234,13 @@ export declare function application(options?: Partial<Pick<ILlmApplication.IOpti
91
234
  *
92
235
  * - LLM provider schemas
93
236
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
94
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
95
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
96
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
237
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
238
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
239
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
240
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
97
241
  * - Midldle layer schemas
98
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
99
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
242
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
243
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
100
244
  *
101
245
  * @template Class Target class or interface type collecting the functions to call
102
246
  * @template Model LLM schema model
@@ -133,12 +277,13 @@ export declare function application<Class extends Record<string, any>, Model ext
133
277
  *
134
278
  * - LLM provider schemas
135
279
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
136
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
137
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
138
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
280
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
281
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
282
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
283
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
139
284
  * - Midldle layer schemas
140
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
141
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
285
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
286
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
142
287
  *
143
288
  * @template Parameters Target parameters type
144
289
  * @template Model LLM schema model
@@ -172,12 +317,13 @@ export declare function parameters(): never;
172
317
  *
173
318
  * - LLM provider schemas
174
319
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
175
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
176
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
177
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
320
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
321
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
322
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
323
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
178
324
  * - Midldle layer schemas
179
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
180
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
325
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
326
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
181
327
  *
182
328
  * @template Parameters Target parameters type
183
329
  * @template Model LLM schema model
@@ -203,12 +349,13 @@ export declare function parameters<Parameters extends Record<string, any>, Model
203
349
  *
204
350
  * - LLM provider schemas
205
351
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
206
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
207
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
208
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
352
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
353
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
354
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
355
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
209
356
  * - Midldle layer schemas
210
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
211
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
357
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
358
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
212
359
  *
213
360
  * If you actually want to perform the LLM function calling with TypeScript functions,
214
361
  * you can do it with the {@link application} function. Otherwise you hope to perform the
@@ -249,12 +396,13 @@ export declare function schema(): never;
249
396
  *
250
397
  * - LLM provider schemas
251
398
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
252
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
253
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
254
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
399
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
400
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
401
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
402
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
255
403
  * - Midldle layer schemas
256
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
257
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
404
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
405
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
258
406
  *
259
407
  * If you actually want to perform the LLM function calling with TypeScript functions,
260
408
  * you can do it with the {@link application} function. Otherwise you hope to perform the
package/lib/llm.d.ts CHANGED
@@ -1,8 +1,150 @@
1
1
  import { ILlmApplication, ILlmController, ILlmSchema } from "@samchon/openapi";
2
+ /**
3
+ * > You must configure the generic argument `Class`.
4
+ *
5
+ * TypeScript functions to LLM function calling controller.
6
+ *
7
+ * Creates a controller of LLM (Large Language Model) function calling
8
+ * from a TypeScript class or interface type containing the target functions to be
9
+ * called by the LLM function calling feature. The returned controller contains
10
+ * not only the {@link application} of {@link ILlmFunction function calling schemas},
11
+ * but also the {@link ILlmController.execute executor} of the functions.
12
+ *
13
+ * If you put the returned {@link ILlmController} to the LLM provider like
14
+ * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
15
+ * proper function and fill its arguments from the conversation (maybe chatting text)
16
+ * with user (human). And you can actually call the function by using
17
+ * {@link ILlmController.execute} property. This is the concept of the LLM function
18
+ * calling.
19
+ *
20
+ * Here is an example of using `typia.llm.controller()` function for AI agent
21
+ * development of performing such AI function calling to mobile API classes
22
+ * through this `typia` and external `@agentica` libraries.
23
+ *
24
+ * ```typescript
25
+ * import { Agentica } from "@agentica/core";
26
+ * import typia from "typia";
27
+ *
28
+ * const agentica = new Agentica({
29
+ * model: "chatgpt",
30
+ * vendor: {
31
+ * api: new OpenAI({ apiKey: "********" }),
32
+ * model: "gpt-4o-mini",
33
+ * },
34
+ * controllers: [
35
+ * typia.llm.controller<ReactNativeFileSystem, "chatgpt">(
36
+ * "filesystem",
37
+ * new ReactNativeFileSystem(),
38
+ * ),
39
+ * typia.llm.controller<ReactNativeGallery, "chatgpt">(
40
+ * "gallery",
41
+ * new ReactNativeGallery(),
42
+ * ),
43
+ * ],
44
+ * });
45
+ * await agentica.conversate(
46
+ * "Organize photo collection and sort them into appropriate folders.",
47
+ * );
48
+ * ```
49
+ *
50
+ * Here is the list of available `Model` types with their corresponding LLM schema.
51
+ * Reading the following list, and determine the `Model` type considering the
52
+ * characteristics of the target LLM provider.
53
+ *
54
+ * - LLM provider schemas
55
+ * - `chatgpt`: [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
56
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
57
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
58
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
59
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
60
+ * - Midldle layer schemas
61
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
62
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
63
+ *
64
+ * @template Class Target class or interface type collecting the functions to call
65
+ * @template Model LLM schema model
66
+ * @template Config Configuration of LLM schema composition
67
+ * @param name Identifier name of the controller
68
+ * @param execute Executor instance
69
+ * @param options Options for the LLM application construction
70
+ * @returns Controller of LLM function calling
71
+ * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
72
+ * @author Jeongho Nam - https://github.com/samchon
73
+ */
2
74
  export declare function controller(name: string, execute: object, options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>): never;
75
+ /**
76
+ * TypeScript functions to LLM function calling controller.
77
+ *
78
+ * Creates a controller of LLM (Large Language Model) function calling
79
+ * from a TypeScript class or interface type containing the target functions to be
80
+ * called by the LLM function calling feature. The returned controller contains
81
+ * not only the {@link application} of {@link ILlmFunction function calling schemas},
82
+ * but also the {@link ILlmController.execute executor} of the functions.
83
+ *
84
+ * If you put the returned {@link ILlmController} to the LLM provider like
85
+ * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
86
+ * proper function and fill its arguments from the conversation (maybe chatting text)
87
+ * with user (human). And you can actually call the function by using
88
+ * {@link ILlmController.execute} property. This is the concept of the LLM function
89
+ * calling.
90
+ *
91
+ * Here is an example of using `typia.llm.controller()` function for AI agent
92
+ * development of performing such AI function calling to mobile API classes
93
+ * through this `typia` and external `@agentica` libraries.
94
+ *
95
+ * ```typescript
96
+ * import { Agentica } from "@agentica/core";
97
+ * import typia from "typia";
98
+ *
99
+ * const agentica = new Agentica({
100
+ * model: "chatgpt",
101
+ * vendor: {
102
+ * api: new OpenAI({ apiKey: "********" }),
103
+ * model: "gpt-4o-mini",
104
+ * },
105
+ * controllers: [
106
+ * typia.llm.controller<ReactNativeFileSystem, "chatgpt">(
107
+ * "filesystem",
108
+ * new ReactNativeFileSystem(),
109
+ * ),
110
+ * typia.llm.controller<ReactNativeGallery, "chatgpt">(
111
+ * "gallery",
112
+ * new ReactNativeGallery(),
113
+ * ),
114
+ * ],
115
+ * });
116
+ * await agentica.conversate(
117
+ * "Organize photo collection and sort them into appropriate folders.",
118
+ * );
119
+ * ```
120
+ *
121
+ * Here is the list of available `Model` types with their corresponding LLM schema.
122
+ * Reading the following list, and determine the `Model` type considering the
123
+ * characteristics of the target LLM provider.
124
+ *
125
+ * - LLM provider schemas
126
+ * - `chatgpt`: [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
127
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
128
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
129
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
130
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
131
+ * - Midldle layer schemas
132
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
133
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
134
+ *
135
+ * @template Class Target class or interface type collecting the functions to call
136
+ * @template Model LLM schema model
137
+ * @template Config Configuration of LLM schema composition
138
+ * @param name Identifier name of the controller
139
+ * @param execute Executor instance
140
+ * @param options Options for the LLM application construction
141
+ * @returns Controller of LLM function calling
142
+ * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
143
+ * @author Jeongho Nam - https://github.com/samchon
144
+ */
3
145
  export declare function controller<Class extends Record<string, any>, Model extends ILlmSchema.Model, Config extends Partial<ILlmSchema.ModelConfig[Model]> = {}>(name: string, execute: Class, options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>): ILlmController<Model>;
4
146
  /**
5
- * > You must configure the generic argument `App`.
147
+ * > You must configure the generic argument `Class`.
6
148
  *
7
149
  * TypeScript functions to LLM function calling application.
8
150
  *
@@ -39,12 +181,13 @@ export declare function controller<Class extends Record<string, any>, Model exte
39
181
  *
40
182
  * - LLM provider schemas
41
183
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
42
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
43
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
44
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
184
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
185
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
186
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
187
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
45
188
  * - Midldle layer schemas
46
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
47
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
189
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
190
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
48
191
  *
49
192
  * @template Class Target class or interface type collecting the functions to call
50
193
  * @template Model LLM schema model
@@ -91,12 +234,13 @@ export declare function application(options?: Partial<Pick<ILlmApplication.IOpti
91
234
  *
92
235
  * - LLM provider schemas
93
236
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
94
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
95
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
96
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
237
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
238
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
239
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
240
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
97
241
  * - Midldle layer schemas
98
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
99
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
242
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
243
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
100
244
  *
101
245
  * @template Class Target class or interface type collecting the functions to call
102
246
  * @template Model LLM schema model
@@ -133,12 +277,13 @@ export declare function application<Class extends Record<string, any>, Model ext
133
277
  *
134
278
  * - LLM provider schemas
135
279
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
136
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
137
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
138
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
280
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
281
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
282
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
283
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
139
284
  * - Midldle layer schemas
140
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
141
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
285
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
286
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
142
287
  *
143
288
  * @template Parameters Target parameters type
144
289
  * @template Model LLM schema model
@@ -172,12 +317,13 @@ export declare function parameters(): never;
172
317
  *
173
318
  * - LLM provider schemas
174
319
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
175
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
176
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
177
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
320
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
321
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
322
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
323
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
178
324
  * - Midldle layer schemas
179
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
180
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
325
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
326
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
181
327
  *
182
328
  * @template Parameters Target parameters type
183
329
  * @template Model LLM schema model
@@ -203,12 +349,13 @@ export declare function parameters<Parameters extends Record<string, any>, Model
203
349
  *
204
350
  * - LLM provider schemas
205
351
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
206
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
207
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
208
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
352
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
353
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
354
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
355
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
209
356
  * - Midldle layer schemas
210
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
211
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
357
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
358
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
212
359
  *
213
360
  * If you actually want to perform the LLM function calling with TypeScript functions,
214
361
  * you can do it with the {@link application} function. Otherwise you hope to perform the
@@ -249,12 +396,13 @@ export declare function schema(): never;
249
396
  *
250
397
  * - LLM provider schemas
251
398
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
252
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
253
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
254
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
399
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
400
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
401
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
402
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
255
403
  * - Midldle layer schemas
256
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
257
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
404
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
405
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
258
406
  *
259
407
  * If you actually want to perform the LLM function calling with TypeScript functions,
260
408
  * you can do it with the {@link application} function. Otherwise you hope to perform the
package/lib/llm.js.map CHANGED
@@ -1 +1 @@
1
- {"version":3,"file":"llm.js","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":";;AAuBA,gCAEC;AAyHD,kCAEC;AA2FD,gCAEC;AAkHD,wBAEC;AAnWD,gGAA6F;AAkB7F;;GAEG;AACH,SAAgB,UAAU,CAAC,GAAG,KAAY;IACxC,IAAA,6DAA6B,EAAC,gBAAgB,CAAC,CAAC;AAClD,CAAC;AAsHD;;GAEG;AACH,SAAgB,WAAW;IACzB,IAAA,6DAA6B,EAAC,iBAAiB,CAAC,CAAC;AACnD,CAAC;AAwFD;;GAEG;AACH,SAAgB,UAAU;IACxB,IAAA,6DAA6B,EAAC,gBAAgB,CAAC,CAAC;AAClD,CAAC;AA+GD;;GAEG;AACH,SAAgB,MAAM;IACpB,IAAA,6DAA6B,EAAC,YAAY,CAAC,CAAC;AAC9C,CAAC"}
1
+ {"version":3,"file":"llm.js","sourceRoot":"","sources":["../src/llm.ts"],"names":[],"mappings":";;AAqKA,gCAEC;AA2HD,kCAEC;AA6FD,gCAEC;AAoHD,wBAEC;AAvfD,gGAA6F;AAgK7F;;GAEG;AACH,SAAgB,UAAU,CAAC,GAAG,KAAY;IACxC,IAAA,6DAA6B,EAAC,gBAAgB,CAAC,CAAC;AAClD,CAAC;AAwHD;;GAEG;AACH,SAAgB,WAAW;IACzB,IAAA,6DAA6B,EAAC,iBAAiB,CAAC,CAAC;AACnD,CAAC;AA0FD;;GAEG;AACH,SAAgB,UAAU;IACxB,IAAA,6DAA6B,EAAC,gBAAgB,CAAC,CAAC;AAClD,CAAC;AAiHD;;GAEG;AACH,SAAgB,MAAM;IACpB,IAAA,6DAA6B,EAAC,YAAY,CAAC,CAAC;AAC9C,CAAC"}
@@ -9,4 +9,8 @@ import { IJsonSchemaCollection } from "./IJsonSchemaCollection";
9
9
  * @template Types Original TypeScript types used in the JSON schemas.
10
10
  * @author Jeongho Nam - https://github.com/samchon
11
11
  */
12
- export import IJsonApplication = IJsonSchemaCollection;
12
+ export type IJsonApplication = IJsonSchemaCollection;
13
+ export declare namespace IJsonApplication {
14
+ type IV3_0 = IJsonSchemaCollection.IV3_0;
15
+ type IV3_1 = IJsonSchemaCollection.IV3_1;
16
+ }
@@ -9,4 +9,8 @@ import { IJsonSchemaCollection } from "./IJsonSchemaCollection";
9
9
  * @template Types Original TypeScript types used in the JSON schemas.
10
10
  * @author Jeongho Nam - https://github.com/samchon
11
11
  */
12
- export import IJsonApplication = IJsonSchemaCollection;
12
+ export type IJsonApplication = IJsonSchemaCollection;
13
+ export declare namespace IJsonApplication {
14
+ type IV3_0 = IJsonSchemaCollection.IV3_0;
15
+ type IV3_1 = IJsonSchemaCollection.IV3_1;
16
+ }
@@ -1 +1 @@
1
- {"version":3,"file":"IJsonApplication.js","sourceRoot":"","sources":["../../../src/schemas/json/IJsonApplication.ts"],"names":[],"mappings":";;AAcA,qCAAqC;AACrC,2CAA2C;AAC3C,8BAA8B;AAC9B,MAAM;AACN,sBAAsB;AACtB,gFAAgF;AAChF,+EAA+E;AAC/E,qCAAqC;AACrC,IAAI;AACJ,sCAAsC;AACtC,8EAA8E;AAC9E,4BAA4B;AAC5B,+BAA+B;AAE/B,kCAAkC;AAClC,qBAAqB;AACrB,8BAA8B;AAC9B,uDAAuD;AACvD,QAAQ;AACR,wCAAwC;AACxC,MAAM;AAEN,gCAAgC;AAChC,qBAAqB;AACrB,8BAA8B;AAC9B,uDAAuD;AACvD,QAAQ;AACR,sBAAsB;AACtB,oBAAoB;AACpB,wCAAwC;AACxC,2CAA2C;AAC3C,oCAAoC;AACpC,wCAAwC;AACxC,4BAA4B;AAC5B,uBAAuB;AACvB,MAAM;AAEN,iCAAiC;AACjC,qBAAqB;AACrB,8BAA8B;AAC9B,uDAAuD;AACvD,QAAQ;AACR,oBAAoB;AACpB,yBAAyB;AACzB,sBAAsB;AACtB,kCAAkC;AAClC,wCAAwC;AACxC,MAAM;AAEN,8BAA8B;AAC9B,qBAAqB;AACrB,8BAA8B;AAC9B,uDAAuD;AACvD,QAAQ;AACR,sBAAsB;AACtB,yBAAyB;AACzB,wCAAwC;AACxC,MAAM;AACN,IAAI"}
1
+ {"version":3,"file":"IJsonApplication.js","sourceRoot":"","sources":["../../../src/schemas/json/IJsonApplication.ts"],"names":[],"mappings":";;AAkBA,qCAAqC;AACrC,2CAA2C;AAC3C,8BAA8B;AAC9B,MAAM;AACN,sBAAsB;AACtB,gFAAgF;AAChF,+EAA+E;AAC/E,qCAAqC;AACrC,IAAI;AACJ,sCAAsC;AACtC,8EAA8E;AAC9E,4BAA4B;AAC5B,+BAA+B;AAE/B,kCAAkC;AAClC,qBAAqB;AACrB,8BAA8B;AAC9B,uDAAuD;AACvD,QAAQ;AACR,wCAAwC;AACxC,MAAM;AAEN,gCAAgC;AAChC,qBAAqB;AACrB,8BAA8B;AAC9B,uDAAuD;AACvD,QAAQ;AACR,sBAAsB;AACtB,oBAAoB;AACpB,wCAAwC;AACxC,2CAA2C;AAC3C,oCAAoC;AACpC,wCAAwC;AACxC,4BAA4B;AAC5B,uBAAuB;AACvB,MAAM;AAEN,iCAAiC;AACjC,qBAAqB;AACrB,8BAA8B;AAC9B,uDAAuD;AACvD,QAAQ;AACR,oBAAoB;AACpB,yBAAyB;AACzB,sBAAsB;AACtB,kCAAkC;AAClC,wCAAwC;AACxC,MAAM;AAEN,8BAA8B;AAC9B,qBAAqB;AACrB,8BAA8B;AAC9B,uDAAuD;AACvD,QAAQ;AACR,sBAAsB;AACtB,yBAAyB;AACzB,wCAAwC;AACxC,MAAM;AACN,IAAI"}
@@ -17,12 +17,12 @@ var LlmControllerTransformer;
17
17
  const dec = LlmApplicationTransformer_1.LlmApplicationTransformer.decompose("application", props);
18
18
  if (dec === null)
19
19
  return props.expression;
20
- else if (props.expression.arguments.length === 0)
20
+ else if (props.expression.arguments[0] === undefined)
21
21
  throw new TransformerError_1.TransformerError({
22
22
  code: `typia.llm.controller`,
23
23
  message: `no identifier name.`,
24
24
  });
25
- else if (props.expression.arguments.length === 1)
25
+ else if (props.expression.arguments[1] === undefined)
26
26
  throw new TransformerError_1.TransformerError({
27
27
  code: `typia.llm.controller`,
28
28
  message: `no executor.`,
@@ -45,7 +45,7 @@ var LlmControllerTransformer;
45
45
  name: "application",
46
46
  value: property,
47
47
  }),
48
- ...(!!((_a = props.expression.arguments) === null || _a === void 0 ? void 0 : _a[2])
48
+ ...(((_a = props.expression.arguments) === null || _a === void 0 ? void 0 : _a[2]) !== undefined
49
49
  ? [
50
50
  typescript_1.default.factory.createExpressionStatement(typescript_1.default.factory.createCallExpression(props.context.importer.internal("llmApplicationFinalize"), undefined, [
51
51
  typescript_1.default.factory.createIdentifier("application"),
@@ -1 +1 @@
1
- {"version":3,"file":"LlmControllerTransformer.js","sourceRoot":"","sources":["../../../../src/transformers/features/llm/LlmControllerTransformer.ts"],"names":[],"mappings":";;;;;;AAAA,4DAA4B;AAE5B,4EAAyE;AACzE,sEAAmE;AACnE,0EAAuE;AAGvE,6DAA0D;AAC1D,2EAAwE;AAExE,IAAiB,wBAAwB,CAkFxC;AAlFD,WAAiB,wBAAwB;IAC1B,kCAAS,GAAG,CAAC,KAAsB,EAAiB,EAAE;;QACjE,MAAM,GAAG,GAAG,qDAAyB,CAAC,SAAS,CAAC,aAAa,EAAE,KAAK,CAAC,CAAC;QACtE,IAAI,GAAG,KAAK,IAAI;YAAE,OAAO,KAAK,CAAC,UAAU,CAAC;aACrC,IAAI,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC;YAC9C,MAAM,IAAI,mCAAgB,CAAC;gBACzB,IAAI,EAAE,sBAAsB;gBAC5B,OAAO,EAAE,qBAAqB;aAC/B,CAAC,CAAC;aACA,IAAI,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,MAAM,KAAK,CAAC;YAC9C,MAAM,IAAI,mCAAgB,CAAC;gBACzB,IAAI,EAAE,sBAAsB;gBAC5B,OAAO,EAAE,cAAc;aACxB,CAAC,CAAC;QAEL,MAAM,QAAQ,GAAkB,oBAAE,CAAC,OAAO,CAAC,kBAAkB,CAC3D,+BAAc,CAAC,KAAK,CAAC,GAAG,CAAC,WAAW,CAAC,EACrC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;YAC1B,IAAI,EAAE,kBAAkB;YACxB,IAAI,EAAE,iBAAiB;YACvB,SAAS,EAAE;gBACT,oBAAE,CAAC,OAAO,CAAC,qBAAqB,CAC9B,oBAAE,CAAC,OAAO,CAAC,mBAAmB,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,CAAC,CACtD;aACF;SACF,CAAC,CACH,CAAC;QACF,MAAM,KAAK,GAAkB,oBAAE,CAAC,OAAO,CAAC,6BAA6B,CACnE;YACE,oBAAE,CAAC,OAAO,CAAC,wBAAwB,CACjC,UAAU,EACV,oBAAE,CAAC,OAAO,CAAC,mBAAmB,CAAC,OAAO,CAAC,CACxC;YACD,oBAAE,CAAC,OAAO,CAAC,wBAAwB,CACjC,MAAM,EACN,KAAK,CAAC,UAAU,CAAC,SAAU,CAAC,CAAC,CAAE,CAChC;YACD,oBAAE,CAAC,OAAO,CAAC,wBAAwB,CACjC,SAAS,EACT,KAAK,CAAC,UAAU,CAAC,SAAU,CAAC,CAAC,CAAE,CAChC;YACD,oBAAE,CAAC,OAAO,CAAC,iCAAiC,CAAC,aAAa,CAAC;SAC5D,EACD,IAAI,CACL,CAAC;QACF,OAAO,qCAAiB,CAAC,QAAQ,CAC/B,oBAAE,CAAC,OAAO,CAAC,WAAW,CACpB;YACE,mCAAgB,CAAC,QAAQ,CAAC;gBACxB,IAAI,EAAE,aAAa;gBACnB,KAAK,EAAE,QAAQ;aAChB,CAAC;YACF,GAAG,CAAC,CAAC,CAAC,CAAA,MAAA,KAAK,CAAC,UAAU,CAAC,SAAS,0CAAG,CAAC,CAAC,CAAA;gBACnC,CAAC,CAAC;oBACE,oBAAE,CAAC,OAAO,CAAC,yBAAyB,CAClC,oBAAE,CAAC,OAAO,CAAC,oBAAoB,CAC7B,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,wBAAwB,CAAC,EACzD,SAAS,EACT;wBACE,oBAAE,CAAC,OAAO,CAAC,gBAAgB,CAAC,aAAa,CAAC;wBAC1C,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAC;qBAC9B,CACF,CACF;iBACF;gBACH,CAAC,CAAC,EAAE,CAAC;YACP,oBAAE,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC;SACxC,EACD,IAAI,CACL,EACD,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;YAC1B,IAAI,EAAE,kBAAkB;YACxB,IAAI,EAAE,gBAAgB;YACtB,SAAS,EAAE;gBACT,oBAAE,CAAC,OAAO,CAAC,qBAAqB,CAC9B,oBAAE,CAAC,OAAO,CAAC,mBAAmB,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,CAAC,CACtD;gBACD,GAAG,CAAC,IAAI;aACT;SACF,CAAC,CACH,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC,EAlFgB,wBAAwB,wCAAxB,wBAAwB,QAkFxC"}
1
+ {"version":3,"file":"LlmControllerTransformer.js","sourceRoot":"","sources":["../../../../src/transformers/features/llm/LlmControllerTransformer.ts"],"names":[],"mappings":";;;;;;AAAA,4DAA4B;AAE5B,4EAAyE;AACzE,sEAAmE;AACnE,0EAAuE;AAGvE,6DAA0D;AAC1D,2EAAwE;AAExE,IAAiB,wBAAwB,CAkFxC;AAlFD,WAAiB,wBAAwB;IAC1B,kCAAS,GAAG,CAAC,KAAsB,EAAiB,EAAE;;QACjE,MAAM,GAAG,GAAG,qDAAyB,CAAC,SAAS,CAAC,aAAa,EAAE,KAAK,CAAC,CAAC;QACtE,IAAI,GAAG,KAAK,IAAI;YAAE,OAAO,KAAK,CAAC,UAAU,CAAC;aACrC,IAAI,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,SAAS;YAClD,MAAM,IAAI,mCAAgB,CAAC;gBACzB,IAAI,EAAE,sBAAsB;gBAC5B,OAAO,EAAE,qBAAqB;aAC/B,CAAC,CAAC;aACA,IAAI,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAC,KAAK,SAAS;YAClD,MAAM,IAAI,mCAAgB,CAAC;gBACzB,IAAI,EAAE,sBAAsB;gBAC5B,OAAO,EAAE,cAAc;aACxB,CAAC,CAAC;QAEL,MAAM,QAAQ,GAAkB,oBAAE,CAAC,OAAO,CAAC,kBAAkB,CAC3D,+BAAc,CAAC,KAAK,CAAC,GAAG,CAAC,WAAW,CAAC,EACrC,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;YAC1B,IAAI,EAAE,kBAAkB;YACxB,IAAI,EAAE,iBAAiB;YACvB,SAAS,EAAE;gBACT,oBAAE,CAAC,OAAO,CAAC,qBAAqB,CAC9B,oBAAE,CAAC,OAAO,CAAC,mBAAmB,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,CAAC,CACtD;aACF;SACF,CAAC,CACH,CAAC;QACF,MAAM,KAAK,GAAkB,oBAAE,CAAC,OAAO,CAAC,6BAA6B,CACnE;YACE,oBAAE,CAAC,OAAO,CAAC,wBAAwB,CACjC,UAAU,EACV,oBAAE,CAAC,OAAO,CAAC,mBAAmB,CAAC,OAAO,CAAC,CACxC;YACD,oBAAE,CAAC,OAAO,CAAC,wBAAwB,CACjC,MAAM,EACN,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAC,CAC9B;YACD,oBAAE,CAAC,OAAO,CAAC,wBAAwB,CACjC,SAAS,EACT,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAC,CAC9B;YACD,oBAAE,CAAC,OAAO,CAAC,iCAAiC,CAAC,aAAa,CAAC;SAC5D,EACD,IAAI,CACL,CAAC;QACF,OAAO,qCAAiB,CAAC,QAAQ,CAC/B,oBAAE,CAAC,OAAO,CAAC,WAAW,CACpB;YACE,mCAAgB,CAAC,QAAQ,CAAC;gBACxB,IAAI,EAAE,aAAa;gBACnB,KAAK,EAAE,QAAQ;aAChB,CAAC;YACF,GAAG,CAAC,CAAA,MAAA,KAAK,CAAC,UAAU,CAAC,SAAS,0CAAG,CAAC,CAAC,MAAK,SAAS;gBAC/C,CAAC,CAAC;oBACE,oBAAE,CAAC,OAAO,CAAC,yBAAyB,CAClC,oBAAE,CAAC,OAAO,CAAC,oBAAoB,CAC7B,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAC,wBAAwB,CAAC,EACzD,SAAS,EACT;wBACE,oBAAE,CAAC,OAAO,CAAC,gBAAgB,CAAC,aAAa,CAAC;wBAC1C,KAAK,CAAC,UAAU,CAAC,SAAS,CAAC,CAAC,CAAC;qBAC9B,CACF,CACF;iBACF;gBACH,CAAC,CAAC,EAAE,CAAC;YACP,oBAAE,CAAC,OAAO,CAAC,qBAAqB,CAAC,KAAK,CAAC;SACxC,EACD,IAAI,CACL,EACD,KAAK,CAAC,OAAO,CAAC,QAAQ,CAAC,IAAI,CAAC;YAC1B,IAAI,EAAE,kBAAkB;YACxB,IAAI,EAAE,gBAAgB;YACtB,SAAS,EAAE;gBACT,oBAAE,CAAC,OAAO,CAAC,qBAAqB,CAC9B,oBAAE,CAAC,OAAO,CAAC,mBAAmB,CAAC,GAAG,CAAC,WAAW,CAAC,KAAK,CAAC,CACtD;gBACD,GAAG,CAAC,IAAI;aACT;SACF,CAAC,CACH,CAAC;IACJ,CAAC,CAAC;AACJ,CAAC,EAlFgB,wBAAwB,wCAAxB,wBAAwB,QAkFxC"}
@@ -11,12 +11,12 @@ var LlmControllerTransformer;
11
11
  const dec = LlmApplicationTransformer.decompose("application", props);
12
12
  if (dec === null)
13
13
  return props.expression;
14
- else if (props.expression.arguments.length === 0)
14
+ else if (props.expression.arguments[0] === undefined)
15
15
  throw new TransformerError({
16
16
  code: `typia.llm.controller`,
17
17
  message: `no identifier name.`,
18
18
  });
19
- else if (props.expression.arguments.length === 1)
19
+ else if (props.expression.arguments[1] === undefined)
20
20
  throw new TransformerError({
21
21
  code: `typia.llm.controller`,
22
22
  message: `no executor.`,
@@ -39,7 +39,7 @@ var LlmControllerTransformer;
39
39
  name: "application",
40
40
  value: property,
41
41
  }),
42
- ...(!!props.expression.arguments?.[2]
42
+ ...(props.expression.arguments?.[2] !== undefined
43
43
  ? [
44
44
  ts.factory.createExpressionStatement(ts.factory.createCallExpression(props.context.importer.internal("llmApplicationFinalize"), undefined, [
45
45
  ts.factory.createIdentifier("application"),
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "typia",
3
- "version": "9.3.0-dev.20250511",
3
+ "version": "9.3.1-dev.20250520",
4
4
  "description": "Superfast runtime validators with only one line",
5
5
  "main": "lib/index.js",
6
6
  "typings": "lib/index.d.ts",
@@ -41,7 +41,7 @@
41
41
  },
42
42
  "homepage": "https://typia.io",
43
43
  "dependencies": {
44
- "@samchon/openapi": "4.3.0-dev.20250511",
44
+ "@samchon/openapi": "^4.3.1",
45
45
  "@standard-schema/spec": "^1.0.0",
46
46
  "commander": "^10.0.0",
47
47
  "comment-json": "^4.2.3",
@@ -50,7 +50,7 @@
50
50
  "randexp": "^0.5.3"
51
51
  },
52
52
  "peerDependencies": {
53
- "@samchon/openapi": ">=4.2.0 <5.0.0",
53
+ "@samchon/openapi": ">=4.3.1 <5.0.0",
54
54
  "typescript": ">=4.8.0 <5.9.0"
55
55
  },
56
56
  "devDependencies": {
@@ -37,6 +37,9 @@ export namespace PluginConfigurator {
37
37
  | undefined;
38
38
  const strictNullChecks: boolean | undefined =
39
39
  compilerOptions.strictNullChecks as boolean | undefined;
40
+ const skipLibCheck: boolean | undefined = compilerOptions.skipLibCheck as
41
+ | boolean
42
+ | undefined;
40
43
  const oldbie: comments.CommentObject | undefined = plugins.find(
41
44
  (p) =>
42
45
  typeof p === "object" &&
@@ -46,11 +49,13 @@ export namespace PluginConfigurator {
46
49
  if (
47
50
  strictNullChecks !== false &&
48
51
  (strict === true || strictNullChecks === true) &&
49
- oldbie !== undefined
52
+ oldbie !== undefined &&
53
+ skipLibCheck === true
50
54
  )
51
55
  return;
52
56
 
53
57
  // DO CONFIGURE
58
+ compilerOptions.skipLibCheck = true;
54
59
  compilerOptions.strictNullChecks = true;
55
60
  if (strict === undefined && strictNullChecks === undefined)
56
61
  compilerOptions.strict = true;
package/src/llm.ts CHANGED
@@ -2,12 +2,154 @@ import { ILlmApplication, ILlmController, ILlmSchema } from "@samchon/openapi";
2
2
 
3
3
  import { NoTransformConfigurationError } from "./transformers/NoTransformConfigurationError";
4
4
 
5
+ /**
6
+ * > You must configure the generic argument `Class`.
7
+ *
8
+ * TypeScript functions to LLM function calling controller.
9
+ *
10
+ * Creates a controller of LLM (Large Language Model) function calling
11
+ * from a TypeScript class or interface type containing the target functions to be
12
+ * called by the LLM function calling feature. The returned controller contains
13
+ * not only the {@link application} of {@link ILlmFunction function calling schemas},
14
+ * but also the {@link ILlmController.execute executor} of the functions.
15
+ *
16
+ * If you put the returned {@link ILlmController} to the LLM provider like
17
+ * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
18
+ * proper function and fill its arguments from the conversation (maybe chatting text)
19
+ * with user (human). And you can actually call the function by using
20
+ * {@link ILlmController.execute} property. This is the concept of the LLM function
21
+ * calling.
22
+ *
23
+ * Here is an example of using `typia.llm.controller()` function for AI agent
24
+ * development of performing such AI function calling to mobile API classes
25
+ * through this `typia` and external `@agentica` libraries.
26
+ *
27
+ * ```typescript
28
+ * import { Agentica } from "@agentica/core";
29
+ * import typia from "typia";
30
+ *
31
+ * const agentica = new Agentica({
32
+ * model: "chatgpt",
33
+ * vendor: {
34
+ * api: new OpenAI({ apiKey: "********" }),
35
+ * model: "gpt-4o-mini",
36
+ * },
37
+ * controllers: [
38
+ * typia.llm.controller<ReactNativeFileSystem, "chatgpt">(
39
+ * "filesystem",
40
+ * new ReactNativeFileSystem(),
41
+ * ),
42
+ * typia.llm.controller<ReactNativeGallery, "chatgpt">(
43
+ * "gallery",
44
+ * new ReactNativeGallery(),
45
+ * ),
46
+ * ],
47
+ * });
48
+ * await agentica.conversate(
49
+ * "Organize photo collection and sort them into appropriate folders.",
50
+ * );
51
+ * ```
52
+ *
53
+ * Here is the list of available `Model` types with their corresponding LLM schema.
54
+ * Reading the following list, and determine the `Model` type considering the
55
+ * characteristics of the target LLM provider.
56
+ *
57
+ * - LLM provider schemas
58
+ * - `chatgpt`: [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
59
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
60
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
61
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
62
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
63
+ * - Midldle layer schemas
64
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
65
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
66
+ *
67
+ * @template Class Target class or interface type collecting the functions to call
68
+ * @template Model LLM schema model
69
+ * @template Config Configuration of LLM schema composition
70
+ * @param name Identifier name of the controller
71
+ * @param execute Executor instance
72
+ * @param options Options for the LLM application construction
73
+ * @returns Controller of LLM function calling
74
+ * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
75
+ * @author Jeongho Nam - https://github.com/samchon
76
+ */
5
77
  export function controller(
6
78
  name: string,
7
79
  execute: object,
8
80
  options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
9
81
  ): never;
10
82
 
83
+ /**
84
+ * TypeScript functions to LLM function calling controller.
85
+ *
86
+ * Creates a controller of LLM (Large Language Model) function calling
87
+ * from a TypeScript class or interface type containing the target functions to be
88
+ * called by the LLM function calling feature. The returned controller contains
89
+ * not only the {@link application} of {@link ILlmFunction function calling schemas},
90
+ * but also the {@link ILlmController.execute executor} of the functions.
91
+ *
92
+ * If you put the returned {@link ILlmController} to the LLM provider like
93
+ * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
94
+ * proper function and fill its arguments from the conversation (maybe chatting text)
95
+ * with user (human). And you can actually call the function by using
96
+ * {@link ILlmController.execute} property. This is the concept of the LLM function
97
+ * calling.
98
+ *
99
+ * Here is an example of using `typia.llm.controller()` function for AI agent
100
+ * development of performing such AI function calling to mobile API classes
101
+ * through this `typia` and external `@agentica` libraries.
102
+ *
103
+ * ```typescript
104
+ * import { Agentica } from "@agentica/core";
105
+ * import typia from "typia";
106
+ *
107
+ * const agentica = new Agentica({
108
+ * model: "chatgpt",
109
+ * vendor: {
110
+ * api: new OpenAI({ apiKey: "********" }),
111
+ * model: "gpt-4o-mini",
112
+ * },
113
+ * controllers: [
114
+ * typia.llm.controller<ReactNativeFileSystem, "chatgpt">(
115
+ * "filesystem",
116
+ * new ReactNativeFileSystem(),
117
+ * ),
118
+ * typia.llm.controller<ReactNativeGallery, "chatgpt">(
119
+ * "gallery",
120
+ * new ReactNativeGallery(),
121
+ * ),
122
+ * ],
123
+ * });
124
+ * await agentica.conversate(
125
+ * "Organize photo collection and sort them into appropriate folders.",
126
+ * );
127
+ * ```
128
+ *
129
+ * Here is the list of available `Model` types with their corresponding LLM schema.
130
+ * Reading the following list, and determine the `Model` type considering the
131
+ * characteristics of the target LLM provider.
132
+ *
133
+ * - LLM provider schemas
134
+ * - `chatgpt`: [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
135
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
136
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
137
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
138
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
139
+ * - Midldle layer schemas
140
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
141
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
142
+ *
143
+ * @template Class Target class or interface type collecting the functions to call
144
+ * @template Model LLM schema model
145
+ * @template Config Configuration of LLM schema composition
146
+ * @param name Identifier name of the controller
147
+ * @param execute Executor instance
148
+ * @param options Options for the LLM application construction
149
+ * @returns Controller of LLM function calling
150
+ * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
151
+ * @author Jeongho Nam - https://github.com/samchon
152
+ */
11
153
  export function controller<
12
154
  Class extends Record<string, any>,
13
155
  Model extends ILlmSchema.Model,
@@ -26,7 +168,7 @@ export function controller(..._args: any[]): never {
26
168
  }
27
169
 
28
170
  /**
29
- * > You must configure the generic argument `App`.
171
+ * > You must configure the generic argument `Class`.
30
172
  *
31
173
  * TypeScript functions to LLM function calling application.
32
174
  *
@@ -63,12 +205,13 @@ export function controller(..._args: any[]): never {
63
205
  *
64
206
  * - LLM provider schemas
65
207
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
66
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
67
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
68
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
208
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
209
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
210
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
211
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
69
212
  * - Midldle layer schemas
70
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
71
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
213
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
214
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
72
215
  *
73
216
  * @template Class Target class or interface type collecting the functions to call
74
217
  * @template Model LLM schema model
@@ -118,12 +261,13 @@ export function application(
118
261
  *
119
262
  * - LLM provider schemas
120
263
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
121
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
122
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
123
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
264
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
265
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
266
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
267
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
124
268
  * - Midldle layer schemas
125
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
126
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
269
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
270
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
127
271
  *
128
272
  * @template Class Target class or interface type collecting the functions to call
129
273
  * @template Model LLM schema model
@@ -174,12 +318,13 @@ export function application(): never {
174
318
  *
175
319
  * - LLM provider schemas
176
320
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
177
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
178
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
179
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
321
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
322
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
323
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
324
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
180
325
  * - Midldle layer schemas
181
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
182
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
326
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
327
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
183
328
  *
184
329
  * @template Parameters Target parameters type
185
330
  * @template Model LLM schema model
@@ -214,12 +359,13 @@ export function parameters(): never;
214
359
  *
215
360
  * - LLM provider schemas
216
361
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
217
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
218
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
219
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
362
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
363
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
364
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
365
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
220
366
  * - Midldle layer schemas
221
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
222
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
367
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
368
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
223
369
  *
224
370
  * @template Parameters Target parameters type
225
371
  * @template Model LLM schema model
@@ -257,12 +403,13 @@ export function parameters(): never {
257
403
  *
258
404
  * - LLM provider schemas
259
405
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
260
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
261
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
262
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
406
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
407
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
408
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
409
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
263
410
  * - Midldle layer schemas
264
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
265
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
411
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
412
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
266
413
  *
267
414
  * If you actually want to perform the LLM function calling with TypeScript functions,
268
415
  * you can do it with the {@link application} function. Otherwise you hope to perform the
@@ -304,12 +451,13 @@ export function schema(): never;
304
451
  *
305
452
  * - LLM provider schemas
306
453
  * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
307
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
308
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
309
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
454
+ * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
455
+ * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
456
+ * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
457
+ * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
310
458
  * - Midldle layer schemas
311
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
312
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
459
+ * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
460
+ * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
313
461
  *
314
462
  * If you actually want to perform the LLM function calling with TypeScript functions,
315
463
  * you can do it with the {@link application} function. Otherwise you hope to perform the
@@ -10,7 +10,11 @@ import { IJsonSchemaCollection } from "./IJsonSchemaCollection";
10
10
  * @template Types Original TypeScript types used in the JSON schemas.
11
11
  * @author Jeongho Nam - https://github.com/samchon
12
12
  */
13
- export import IJsonApplication = IJsonSchemaCollection;
13
+ export type IJsonApplication = IJsonSchemaCollection;
14
+ export namespace IJsonApplication {
15
+ export type IV3_0 = IJsonSchemaCollection.IV3_0;
16
+ export type IV3_1 = IJsonSchemaCollection.IV3_1;
17
+ }
14
18
 
15
19
  // export interface IJsonApplication<
16
20
  // Version extends "3.0" | "3.1" = "3.1",
@@ -12,12 +12,12 @@ export namespace LlmControllerTransformer {
12
12
  export const transform = (props: ITransformProps): ts.Expression => {
13
13
  const dec = LlmApplicationTransformer.decompose("application", props);
14
14
  if (dec === null) return props.expression;
15
- else if (props.expression.arguments.length === 0)
15
+ else if (props.expression.arguments[0] === undefined)
16
16
  throw new TransformerError({
17
17
  code: `typia.llm.controller`,
18
18
  message: `no identifier name.`,
19
19
  });
20
- else if (props.expression.arguments.length === 1)
20
+ else if (props.expression.arguments[1] === undefined)
21
21
  throw new TransformerError({
22
22
  code: `typia.llm.controller`,
23
23
  message: `no executor.`,
@@ -43,11 +43,11 @@ export namespace LlmControllerTransformer {
43
43
  ),
44
44
  ts.factory.createPropertyAssignment(
45
45
  "name",
46
- props.expression.arguments![0]!,
46
+ props.expression.arguments[0],
47
47
  ),
48
48
  ts.factory.createPropertyAssignment(
49
49
  "execute",
50
- props.expression.arguments![1]!,
50
+ props.expression.arguments[1],
51
51
  ),
52
52
  ts.factory.createShorthandPropertyAssignment("application"),
53
53
  ],
@@ -60,7 +60,7 @@ export namespace LlmControllerTransformer {
60
60
  name: "application",
61
61
  value: property,
62
62
  }),
63
- ...(!!props.expression.arguments?.[2]
63
+ ...(props.expression.arguments?.[2] !== undefined
64
64
  ? [
65
65
  ts.factory.createExpressionStatement(
66
66
  ts.factory.createCallExpression(