typia 10.1.0 → 11.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +14 -10
  2. package/lib/internal/_createStandardSchema.d.mts +1 -1
  3. package/lib/internal/_createStandardSchema.d.ts +1 -1
  4. package/lib/internal/_createStandardSchema.js.map +1 -1
  5. package/lib/internal/_llmApplicationFinalize.d.mts +2 -2
  6. package/lib/internal/_llmApplicationFinalize.d.ts +2 -2
  7. package/lib/internal/_llmApplicationFinalize.js +9 -10
  8. package/lib/internal/_llmApplicationFinalize.js.map +1 -1
  9. package/lib/internal/_llmApplicationFinalize.mjs +12 -13
  10. package/lib/llm.d.mts +20 -199
  11. package/lib/llm.d.ts +20 -199
  12. package/lib/llm.js.map +1 -1
  13. package/lib/module.d.mts +2 -2
  14. package/lib/module.d.ts +2 -2
  15. package/lib/programmers/llm/LlmApplicationProgrammer.d.mts +8 -8
  16. package/lib/programmers/llm/LlmApplicationProgrammer.d.ts +8 -8
  17. package/lib/programmers/llm/LlmApplicationProgrammer.js +58 -64
  18. package/lib/programmers/llm/LlmApplicationProgrammer.js.map +1 -1
  19. package/lib/programmers/llm/LlmApplicationProgrammer.mjs +63 -68
  20. package/lib/programmers/llm/{LlmModelPredicator.d.mts → LlmMetadataFactory.d.mts} +2 -8
  21. package/lib/programmers/llm/{LlmModelPredicator.d.ts → LlmMetadataFactory.d.ts} +2 -8
  22. package/lib/programmers/llm/{LlmModelPredicator.js → LlmMetadataFactory.js} +6 -37
  23. package/lib/programmers/llm/LlmMetadataFactory.js.map +1 -0
  24. package/lib/programmers/llm/{LlmModelPredicator.mjs → LlmMetadataFactory.mjs} +6 -34
  25. package/lib/programmers/llm/LlmMetadataFactory.mjs.map +1 -0
  26. package/lib/programmers/llm/LlmParametersProgrammer.d.mts +8 -8
  27. package/lib/programmers/llm/LlmParametersProgrammer.d.ts +8 -8
  28. package/lib/programmers/llm/LlmParametersProgrammer.js +10 -10
  29. package/lib/programmers/llm/LlmParametersProgrammer.js.map +1 -1
  30. package/lib/programmers/llm/LlmParametersProgrammer.mjs +10 -13
  31. package/lib/programmers/llm/LlmSchemaProgrammer.d.mts +10 -12
  32. package/lib/programmers/llm/LlmSchemaProgrammer.d.ts +10 -12
  33. package/lib/programmers/llm/LlmSchemaProgrammer.js +21 -33
  34. package/lib/programmers/llm/LlmSchemaProgrammer.js.map +1 -1
  35. package/lib/programmers/llm/LlmSchemaProgrammer.mjs +21 -37
  36. package/lib/transformers/features/llm/LlmApplicationTransformer.d.mts +0 -2
  37. package/lib/transformers/features/llm/LlmApplicationTransformer.d.ts +0 -2
  38. package/lib/transformers/features/llm/LlmApplicationTransformer.js +9 -20
  39. package/lib/transformers/features/llm/LlmApplicationTransformer.js.map +1 -1
  40. package/lib/transformers/features/llm/LlmApplicationTransformer.mjs +9 -20
  41. package/lib/transformers/features/llm/LlmControllerTransformer.js +1 -8
  42. package/lib/transformers/features/llm/LlmControllerTransformer.js.map +1 -1
  43. package/lib/transformers/features/llm/LlmControllerTransformer.mjs +1 -8
  44. package/lib/transformers/features/llm/LlmParametersTransformer.js +6 -15
  45. package/lib/transformers/features/llm/LlmParametersTransformer.js.map +1 -1
  46. package/lib/transformers/features/llm/LlmParametersTransformer.mjs +6 -15
  47. package/lib/transformers/features/llm/LlmSchemaTransformer.js +11 -26
  48. package/lib/transformers/features/llm/LlmSchemaTransformer.js.map +1 -1
  49. package/lib/transformers/features/llm/LlmSchemaTransformer.mjs +11 -26
  50. package/package.json +2 -2
  51. package/src/internal/_createStandardSchema.ts +2 -2
  52. package/src/internal/_llmApplicationFinalize.ts +18 -25
  53. package/src/llm.ts +28 -221
  54. package/src/module.ts +2 -2
  55. package/src/programmers/llm/LlmApplicationProgrammer.ts +139 -151
  56. package/src/programmers/llm/{LlmModelPredicator.ts → LlmMetadataFactory.ts} +4 -42
  57. package/src/programmers/llm/LlmParametersProgrammer.ts +34 -41
  58. package/src/programmers/llm/LlmSchemaProgrammer.ts +59 -98
  59. package/src/transformers/features/llm/LlmApplicationTransformer.ts +14 -29
  60. package/src/transformers/features/llm/LlmControllerTransformer.ts +1 -12
  61. package/src/transformers/features/llm/LlmParametersTransformer.ts +10 -21
  62. package/src/transformers/features/llm/LlmSchemaTransformer.ts +29 -65
  63. package/lib/programmers/llm/LlmModelPredicator.js.map +0 -1
  64. package/lib/programmers/llm/LlmModelPredicator.mjs.map +0 -1
package/README.md CHANGED
@@ -26,16 +26,16 @@ export namespace json {
26
26
  // AI FUNCTION CALLING SCHEMA
27
27
  export namespace llm {
28
28
  // collection of function calling schemas
29
- export function application<Class, Model>(): ILlmApplication<Class>;
30
- export function controller<Class, Model>(
29
+ export function application<Class>(): ILlmApplication<Class>;
30
+ export function controller<Class>(
31
31
  name: string,
32
32
  execute: Class,
33
- ): ILlmController<Model>; // +executor
33
+ ): ILlmController; // +executor
34
34
  // structured output
35
- export function parameters<P, Model>(): ILlmSchema.IParameters<Model>;
36
- export function schema<T, Model>(
37
- $defs: Record<string, ILlmSchema<Model>>,
38
- ): ILlmSchema<Model>; // type schema
35
+ export function parameters<P>(): ILlmSchema.IParameters;
36
+ export function schema<T>(
37
+ $defs: Record<string, ILlmSchema>,
38
+ ): ILlmSchema; // type schema
39
39
  }
40
40
 
41
41
  // PROTOCOL BUFFER
@@ -63,9 +63,6 @@ export function random<T>(g?: Partial<IRandomGenerator>): T;
63
63
  > - Runtime validator is **20,000x faster** than `class-validator`
64
64
  > - JSON serialization is **200x faster** than `class-transformer`
65
65
 
66
-
67
-
68
-
69
66
  ## Transformation
70
67
  If you call `typia` function, it would be compiled like below.
71
68
 
@@ -153,3 +150,10 @@ Check out the document in the [website](https://typia.io/docs/):
153
150
  - [tRPC](https://typia.io/docs/utilization/trpc/)
154
151
  - [⇲ Benchmark Result](https://github.com/samchon/typia/tree/master/benchmark/results/11th%20Gen%20Intel(R)%20Core(TM)%20i5-1135G7%20%40%202.40GHz)
155
152
  - [⇲ `dev.to` Articles](https://dev.to/samchon/series/22474)
153
+
154
+
155
+
156
+
157
+ ## References
158
+ - inspired by [`typescript-is`](https://github.com/woutervh-/typescript-is)
159
+ - inspired by [`ts-patch`](https://github.com/nonara/ts-patch)
@@ -1,3 +1,3 @@
1
1
  import { StandardSchemaV1 } from "@standard-schema/spec";
2
2
  import { IValidation } from "../IValidation";
3
- export declare const _createStandardSchema: <T>(fn: (input: unknown) => IValidation<T>) => ((input: unknown) => IValidation<T>) & StandardSchemaV1<unknown, T>;
3
+ export declare const _createStandardSchema: <T>(fn: (input: unknown) => IValidation<T>) => ((input: unknown) => IValidation<T>) & StandardSchemaV1<T, T>;
@@ -1,3 +1,3 @@
1
1
  import { StandardSchemaV1 } from "@standard-schema/spec";
2
2
  import { IValidation } from "../IValidation";
3
- export declare const _createStandardSchema: <T>(fn: (input: unknown) => IValidation<T>) => ((input: unknown) => IValidation<T>) & StandardSchemaV1<unknown, T>;
3
+ export declare const _createStandardSchema: <T>(fn: (input: unknown) => IValidation<T>) => ((input: unknown) => IValidation<T>) & StandardSchemaV1<T, T>;
@@ -1 +1 @@
1
- {"version":3,"file":"_createStandardSchema.js","sourceRoot":"","sources":["../../src/internal/_createStandardSchema.ts"],"names":[],"mappings":";;;AAIO,MAAM,qBAAqB,GAAG,CACnC,EAAsC,EAC+B,EAAE,CACvE,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE;IAChB,WAAW,EAAE;QACX,OAAO,EAAE,CAAC;QACV,MAAM,EAAE,OAAO;QACf,QAAQ,EAAE,CAAC,KAAc,EAA8B,EAAE;YACvD,MAAM,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC;YACzB,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;gBACnB,OAAO;oBACL,KAAK,EAAE,MAAM,CAAC,IAAI;iBACyB,CAAC;YAChD,CAAC;iBAAM,CAAC;gBACN,OAAO;oBACL,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;wBACpC,OAAO,EAAE,YAAY,KAAK,CAAC,QAAQ,SAAS,KAAK,CAAC,KAAK,EAAE;wBACzD,IAAI,EAAE,6BAA6B,CAAC,KAAK,CAAC,IAAI,CAAC;qBAChD,CAAC,CAAC;iBACqC,CAAC;YAC7C,CAAC;QACH,CAAC;KACF;CACqC,CAAC,CAAC;AAvB/B,QAAA,qBAAqB,yBAuBU;AAE5C,IAAK,eAWJ;AAXD,WAAK,eAAe;IAClB,yBAAyB;IACzB,oCAAoC;IACpC,mEAAmE;IACnE,uDAAK,CAAA;IACL,mCAAmC;IACnC,6DAAQ,CAAA;IACR,oCAAoC;IACpC,+DAAS,CAAA;IACT,gCAAgC;IAChC,+DAAS,CAAA;AACX,CAAC,EAXI,eAAe,KAAf,eAAe,QAWnB;AAED,MAAM,6BAA6B,GAAG,CACpC,IAAY,EACiC,EAAE;IAC/C,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC/B,MAAM,IAAI,KAAK,CAAC,iBAAiB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC3D,CAAC;IAED,MAAM,QAAQ,GAAmC,EAAE,CAAC;IACpD,IAAI,cAAc,GAAG,EAAE,CAAC;IACxB,IAAI,KAAK,GAAoB,eAAe,CAAC,KAAK,CAAC;IACnD,IAAI,KAAK,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC;IAChC,OAAO,KAAK,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC/B,KAAK,EAAE,CAAC;QACR,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAEzB,IAAI,KAAK,KAAK,eAAe,CAAC,QAAQ,EAAE,CAAC;YACvC,IAAI,IAAI,KAAK,GAAG,IAAI,IAAI,KAAK,GAAG,EAAE,CAAC;gBACjC,kBAAkB;gBAClB,QAAQ,CAAC,IAAI,CAAC;oBACZ,GAAG,EAAE,cAAc;iBACpB,CAAC,CAAC;gBACH,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;YAChC,CAAC;iBAAM,IAAI,KAAK,KAAK,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACrC,cAAc;gBACd,cAAc,IAAI,IAAI,CAAC;gBACvB,QAAQ,CAAC,IAAI,CAAC;oBACZ,GAAG,EAAE,cAAc;iBACpB,CAAC,CAAC;gBACH,KAAK,EAAE,CAAC;gBACR,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;YAChC,CAAC;iBAAM,CAAC;gBACN,cAAc,IAAI,IAAI,CAAC;YACzB,CAAC;QACH,CAAC;aAAM,IAAI,KAAK,KAAK,eAAe,CAAC,SAAS,EAAE,CAAC;YAC/C,IAAI,IAAI,KAAK,GAAG,EAAE,CAAC;gBACjB,oBAAoB;gBACpB,QAAQ,CAAC,IAAI,CAAC;oBACZ,GAAG,EAAE,IAAI,CAAC,KAAK,CAAC,cAAc,GAAG,IAAI,CAAC;iBACvC,CAAC,CAAC;gBACH,mBAAmB;gBACnB,KAAK,IAAI,CAAC,CAAC;gBACX,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;YAChC,CAAC;iBAAM,IAAI,IAAI,KAAK,IAAI,EAAE,CAAC;gBACzB,uCAAuC;gBACvC,cAAc,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC;gBAC9B,KAAK,EAAE,CAAC;gBACR,cAAc,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC;YAChC,CAAC;iBAAM,CAAC;gBACN,cAAc,IAAI,IAAI,CAAC;YACzB,CAAC;QACH,CAAC;aAAM,IAAI,KAAK,KAAK,eAAe,CAAC,SAAS,EAAE,CAAC;YAC/C,IAAI,IAAI,KAAK,GAAG,EAAE,CAAC;gBACjB,oBAAoB;gBACpB,QAAQ,CAAC,IAAI,CAAC;oBACZ,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,cAAc,CAAC;iBACrC,CAAC,CAAC;gBACH,KAAK,EAAE,CAAC;gBACR,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;YAChC,CAAC;iBAAM,CAAC;gBACN,cAAc,IAAI,IAAI,CAAC;YACzB,CAAC;QACH,CAAC;QAED,IAAI,KAAK,KAAK,eAAe,CAAC,KAAK,IAAI,KAAK,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC/D,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;YAC5B,cAAc,GAAG,EAAE,CAAC;YACpB,IAAI,OAAO,KAAK,GAAG,EAAE,CAAC;gBACpB,IAAI,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;oBAC5B,sBAAsB;oBACtB,oGAAoG;oBACpG,KAAK,GAAG,eAAe,CAAC,SAAS,CAAC;oBAClC,KAAK,EAAE,CAAC;oBACR,cAAc,GAAG,GAAG,CAAC;gBACvB,CAAC;qBAAM,CAAC;oBACN,sBAAsB;oBACtB,KAAK,GAAG,eAAe,CAAC,SAAS,CAAC;gBACpC,CAAC;YACH,CAAC;iBAAM,IAAI,OAAO,KAAK,GAAG,EAAE,CAAC;gBAC3B,oBAAoB;gBACpB,KAAK,GAAG,eAAe,CAAC,QAAQ,CAAC;YACnC,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;QACH,CAAC;IACH,CAAC;IAED,IAAI,KAAK,KAAK,eAAe,CAAC,KAAK,EAAE,CAAC;QACpC,MAAM,IAAI,KAAK,CAAC,yBAAyB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IACnE,CAAC;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC,CAAC"}
1
+ {"version":3,"file":"_createStandardSchema.js","sourceRoot":"","sources":["../../src/internal/_createStandardSchema.ts"],"names":[],"mappings":";;;AAIO,MAAM,qBAAqB,GAAG,CACnC,EAAsC,EACyB,EAAE,CACjE,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE;IAChB,WAAW,EAAE;QACX,OAAO,EAAE,CAAC;QACV,MAAM,EAAE,OAAO;QACf,QAAQ,EAAE,CAAC,KAAc,EAA8B,EAAE;YACvD,MAAM,MAAM,GAAG,EAAE,CAAC,KAAK,CAAC,CAAC;YACzB,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;gBACnB,OAAO;oBACL,KAAK,EAAE,MAAM,CAAC,IAAI;iBACyB,CAAC;YAChD,CAAC;iBAAM,CAAC;gBACN,OAAO;oBACL,MAAM,EAAE,MAAM,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,KAAK,EAAE,EAAE,CAAC,CAAC;wBACpC,OAAO,EAAE,YAAY,KAAK,CAAC,QAAQ,SAAS,KAAK,CAAC,KAAK,EAAE;wBACzD,IAAI,EAAE,6BAA6B,CAAC,KAAK,CAAC,IAAI,CAAC;qBAChD,CAAC,CAAC;iBACqC,CAAC;YAC7C,CAAC;QACH,CAAC;KACF;CAC+B,CAAC,CAAC;AAvBzB,QAAA,qBAAqB,yBAuBI;AAEtC,IAAK,eAWJ;AAXD,WAAK,eAAe;IAClB,yBAAyB;IACzB,oCAAoC;IACpC,mEAAmE;IACnE,uDAAK,CAAA;IACL,mCAAmC;IACnC,6DAAQ,CAAA;IACR,oCAAoC;IACpC,+DAAS,CAAA;IACT,gCAAgC;IAChC,+DAAS,CAAA;AACX,CAAC,EAXI,eAAe,KAAf,eAAe,QAWnB;AAED,MAAM,6BAA6B,GAAG,CACpC,IAAY,EACiC,EAAE;IAC/C,IAAI,CAAC,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC/B,MAAM,IAAI,KAAK,CAAC,iBAAiB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IAC3D,CAAC;IAED,MAAM,QAAQ,GAAmC,EAAE,CAAC;IACpD,IAAI,cAAc,GAAG,EAAE,CAAC;IACxB,IAAI,KAAK,GAAoB,eAAe,CAAC,KAAK,CAAC;IACnD,IAAI,KAAK,GAAG,QAAQ,CAAC,MAAM,GAAG,CAAC,CAAC;IAChC,OAAO,KAAK,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;QAC/B,KAAK,EAAE,CAAC;QACR,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;QAEzB,IAAI,KAAK,KAAK,eAAe,CAAC,QAAQ,EAAE,CAAC;YACvC,IAAI,IAAI,KAAK,GAAG,IAAI,IAAI,KAAK,GAAG,EAAE,CAAC;gBACjC,kBAAkB;gBAClB,QAAQ,CAAC,IAAI,CAAC;oBACZ,GAAG,EAAE,cAAc;iBACpB,CAAC,CAAC;gBACH,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;YAChC,CAAC;iBAAM,IAAI,KAAK,KAAK,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACrC,cAAc;gBACd,cAAc,IAAI,IAAI,CAAC;gBACvB,QAAQ,CAAC,IAAI,CAAC;oBACZ,GAAG,EAAE,cAAc;iBACpB,CAAC,CAAC;gBACH,KAAK,EAAE,CAAC;gBACR,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;YAChC,CAAC;iBAAM,CAAC;gBACN,cAAc,IAAI,IAAI,CAAC;YACzB,CAAC;QACH,CAAC;aAAM,IAAI,KAAK,KAAK,eAAe,CAAC,SAAS,EAAE,CAAC;YAC/C,IAAI,IAAI,KAAK,GAAG,EAAE,CAAC;gBACjB,oBAAoB;gBACpB,QAAQ,CAAC,IAAI,CAAC;oBACZ,GAAG,EAAE,IAAI,CAAC,KAAK,CAAC,cAAc,GAAG,IAAI,CAAC;iBACvC,CAAC,CAAC;gBACH,mBAAmB;gBACnB,KAAK,IAAI,CAAC,CAAC;gBACX,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;YAChC,CAAC;iBAAM,IAAI,IAAI,KAAK,IAAI,EAAE,CAAC;gBACzB,uCAAuC;gBACvC,cAAc,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC;gBAC9B,KAAK,EAAE,CAAC;gBACR,cAAc,IAAI,IAAI,CAAC,KAAK,CAAC,CAAC;YAChC,CAAC;iBAAM,CAAC;gBACN,cAAc,IAAI,IAAI,CAAC;YACzB,CAAC;QACH,CAAC;aAAM,IAAI,KAAK,KAAK,eAAe,CAAC,SAAS,EAAE,CAAC;YAC/C,IAAI,IAAI,KAAK,GAAG,EAAE,CAAC;gBACjB,oBAAoB;gBACpB,QAAQ,CAAC,IAAI,CAAC;oBACZ,GAAG,EAAE,MAAM,CAAC,QAAQ,CAAC,cAAc,CAAC;iBACrC,CAAC,CAAC;gBACH,KAAK,EAAE,CAAC;gBACR,KAAK,GAAG,eAAe,CAAC,KAAK,CAAC;YAChC,CAAC;iBAAM,CAAC;gBACN,cAAc,IAAI,IAAI,CAAC;YACzB,CAAC;QACH,CAAC;QAED,IAAI,KAAK,KAAK,eAAe,CAAC,KAAK,IAAI,KAAK,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC/D,MAAM,OAAO,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC;YAC5B,cAAc,GAAG,EAAE,CAAC;YACpB,IAAI,OAAO,KAAK,GAAG,EAAE,CAAC;gBACpB,IAAI,IAAI,CAAC,KAAK,GAAG,CAAC,CAAC,KAAK,GAAG,EAAE,CAAC;oBAC5B,sBAAsB;oBACtB,oGAAoG;oBACpG,KAAK,GAAG,eAAe,CAAC,SAAS,CAAC;oBAClC,KAAK,EAAE,CAAC;oBACR,cAAc,GAAG,GAAG,CAAC;gBACvB,CAAC;qBAAM,CAAC;oBACN,sBAAsB;oBACtB,KAAK,GAAG,eAAe,CAAC,SAAS,CAAC;gBACpC,CAAC;YACH,CAAC;iBAAM,IAAI,OAAO,KAAK,GAAG,EAAE,CAAC;gBAC3B,oBAAoB;gBACpB,KAAK,GAAG,eAAe,CAAC,QAAQ,CAAC;YACnC,CAAC;iBAAM,CAAC;gBACN,MAAM,IAAI,KAAK,CAAC,+CAA+C,CAAC,CAAC;YACnE,CAAC;QACH,CAAC;IACH,CAAC;IAED,IAAI,KAAK,KAAK,eAAe,CAAC,KAAK,EAAE,CAAC;QACpC,MAAM,IAAI,KAAK,CAAC,yBAAyB,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;IACnE,CAAC;IAED,OAAO,QAAQ,CAAC;AAClB,CAAC,CAAC"}
@@ -1,4 +1,4 @@
1
- import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
- export declare const _llmApplicationFinalize: <Model extends ILlmSchema.Model>(app: ILlmApplication<Model>, options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate" | "validate"> & {
1
+ import { ILlmApplication } from "@samchon/openapi";
2
+ export declare const _llmApplicationFinalize: (app: ILlmApplication, config?: Partial<Pick<ILlmApplication.IConfig, "separate" | "validate"> & {
3
3
  equals?: boolean;
4
4
  }>) => void;
@@ -1,4 +1,4 @@
1
- import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
- export declare const _llmApplicationFinalize: <Model extends ILlmSchema.Model>(app: ILlmApplication<Model>, options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate" | "validate"> & {
1
+ import { ILlmApplication } from "@samchon/openapi";
2
+ export declare const _llmApplicationFinalize: (app: ILlmApplication, config?: Partial<Pick<ILlmApplication.IConfig, "separate" | "validate"> & {
3
3
  equals?: boolean;
4
4
  }>) => void;
@@ -2,21 +2,20 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports._llmApplicationFinalize = void 0;
4
4
  const LlmSchemaComposer_1 = require("@samchon/openapi/lib/composers/LlmSchemaComposer");
5
- const _llmApplicationFinalize = (app, options) => {
5
+ const _llmApplicationFinalize = (app, config) => {
6
6
  var _a, _b, _c, _d;
7
- app.options = Object.assign(Object.assign({}, LlmSchemaComposer_1.LlmSchemaComposer.defaultConfig(app.model)), { separate: (_a = options === null || options === void 0 ? void 0 : options.separate) !== null && _a !== void 0 ? _a : null, validate: (_b = options === null || options === void 0 ? void 0 : options.validate) !== null && _b !== void 0 ? _b : null });
8
- if (app.options.separate !== null)
7
+ app.config = Object.assign(Object.assign({}, LlmSchemaComposer_1.LlmSchemaComposer.getConfig()), { separate: (_a = config === null || config === void 0 ? void 0 : config.separate) !== null && _a !== void 0 ? _a : null, validate: (_b = config === null || config === void 0 ? void 0 : config.validate) !== null && _b !== void 0 ? _b : null });
8
+ if (app.config.separate !== null)
9
9
  for (const func of app.functions)
10
- func.separated = LlmSchemaComposer_1.LlmSchemaComposer.separateParameters(app.model)({
10
+ func.separated = LlmSchemaComposer_1.LlmSchemaComposer.separate({
11
11
  parameters: func.parameters,
12
- predicate: app.options
13
- .separate,
14
- equals: (_c = options === null || options === void 0 ? void 0 : options.equals) !== null && _c !== void 0 ? _c : false,
12
+ predicate: app.config.separate,
13
+ equals: (_c = config === null || config === void 0 ? void 0 : config.equals) !== null && _c !== void 0 ? _c : false,
15
14
  });
16
- if (app.options.validate !== null)
15
+ if (app.config.validate !== null)
17
16
  for (const func of app.functions)
18
- if (typeof ((_d = app.options.validate) === null || _d === void 0 ? void 0 : _d[func.name]) === "function")
19
- func.validate = app.options.validate[func.name];
17
+ if (typeof ((_d = app.config.validate) === null || _d === void 0 ? void 0 : _d[func.name]) === "function")
18
+ func.validate = app.config.validate[func.name];
20
19
  };
21
20
  exports._llmApplicationFinalize = _llmApplicationFinalize;
22
21
  //# sourceMappingURL=_llmApplicationFinalize.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"_llmApplicationFinalize.js","sourceRoot":"","sources":["../../src/internal/_llmApplicationFinalize.ts"],"names":[],"mappings":";;;AAMA,wFAAqF;AAE9E,MAAM,uBAAuB,GAAG,CACrC,GAA2B,EAC3B,OAIC,EACK,EAAE;;IACR,GAAG,CAAC,OAAO,mCACN,qCAAiB,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,KAC7C,QAAQ,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,QAAQ,mCAAI,IAAI,EACnC,QAAQ,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,QAAQ,mCAAI,IAAI,GACpC,CAAC;IACF,IAAI,GAAG,CAAC,OAAO,CAAC,QAAQ,KAAK,IAAI;QAC/B,KAAK,MAAM,IAAI,IAAI,GAAG,CAAC,SAAS;YAC9B,IAAI,CAAC,SAAS,GAAG,qCAAiB,CAAC,kBAAkB,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC;gBAC/D,UAAU,EACR,IAAI,CAAC,UAAyD;gBAChE,SAAS,EAAE,GAAG,CAAC,OAAO;qBACnB,QAA8D;gBACjE,MAAM,EAAE,MAAA,OAAO,aAAP,OAAO,uBAAP,OAAO,CAAE,MAAM,mCAAI,KAAK;aACjC,CAAmC,CAAC;IACzC,IAAI,GAAG,CAAC,OAAO,CAAC,QAAQ,KAAK,IAAI;QAC/B,KAAK,MAAM,IAAI,IAAI,GAAG,CAAC,SAAS;YAC9B,IAAI,OAAO,CAAA,MAAA,GAAG,CAAC,OAAO,CAAC,QAAQ,0CAAG,IAAI,CAAC,IAAI,CAAC,CAAA,KAAK,UAAU;gBACzD,IAAI,CAAC,QAAQ,GAAG,GAAG,CAAC,OAAO,CAAC,QAAQ,CAClC,IAAI,CAAC,IAAI,CACwB,CAAC;AAC5C,CAAC,CAAC;AA5BW,QAAA,uBAAuB,2BA4BlC"}
1
+ {"version":3,"file":"_llmApplicationFinalize.js","sourceRoot":"","sources":["../../src/internal/_llmApplicationFinalize.ts"],"names":[],"mappings":";;;AACA,wFAAqF;AAE9E,MAAM,uBAAuB,GAAG,CACrC,GAAoB,EACpB,MAIC,EACK,EAAE;;IACR,GAAG,CAAC,MAAM,mCACL,qCAAiB,CAAC,SAAS,EAAE,KAChC,QAAQ,EAAE,MAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,QAAQ,mCAAI,IAAI,EAClC,QAAQ,EAAE,MAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,QAAQ,mCAAI,IAAI,GACnC,CAAC;IACF,IAAI,GAAG,CAAC,MAAM,CAAC,QAAQ,KAAK,IAAI;QAC9B,KAAK,MAAM,IAAI,IAAI,GAAG,CAAC,SAAS;YAC9B,IAAI,CAAC,SAAS,GAAG,qCAAiB,CAAC,QAAQ,CAAC;gBAC1C,UAAU,EAAE,IAAI,CAAC,UAAU;gBAC3B,SAAS,EAAE,GAAG,CAAC,MAAM,CAAC,QAAQ;gBAC9B,MAAM,EAAE,MAAA,MAAM,aAAN,MAAM,uBAAN,MAAM,CAAE,MAAM,mCAAI,KAAK;aAChC,CAAC,CAAC;IACP,IAAI,GAAG,CAAC,MAAM,CAAC,QAAQ,KAAK,IAAI;QAC9B,KAAK,MAAM,IAAI,IAAI,GAAG,CAAC,SAAS;YAC9B,IAAI,OAAO,CAAA,MAAA,GAAG,CAAC,MAAM,CAAC,QAAQ,0CAAG,IAAI,CAAC,IAAI,CAAC,CAAA,KAAK,UAAU;gBACxD,IAAI,CAAC,QAAQ,GAAG,GAAG,CAAC,MAAM,CAAC,QAAQ,CACjC,IAAI,CAAC,IAAI,CACwB,CAAC;AAC5C,CAAC,CAAC;AA1BW,QAAA,uBAAuB,2BA0BlC"}
@@ -1,23 +1,22 @@
1
1
  import { LlmSchemaComposer } from '@samchon/openapi/lib/composers/LlmSchemaComposer.mjs';
2
2
 
3
- const _llmApplicationFinalize = (app, options) => {
4
- app.options = {
5
- ...LlmSchemaComposer.defaultConfig(app.model),
6
- separate: options?.separate ?? null,
7
- validate: options?.validate ?? null,
3
+ const _llmApplicationFinalize = (app, config) => {
4
+ app.config = {
5
+ ...LlmSchemaComposer.getConfig(),
6
+ separate: config?.separate ?? null,
7
+ validate: config?.validate ?? null,
8
8
  };
9
- if (app.options.separate !== null)
9
+ if (app.config.separate !== null)
10
10
  for (const func of app.functions)
11
- func.separated = LlmSchemaComposer.separateParameters(app.model)({
11
+ func.separated = LlmSchemaComposer.separate({
12
12
  parameters: func.parameters,
13
- predicate: app.options
14
- .separate,
15
- equals: options?.equals ?? false,
13
+ predicate: app.config.separate,
14
+ equals: config?.equals ?? false,
16
15
  });
17
- if (app.options.validate !== null)
16
+ if (app.config.validate !== null)
18
17
  for (const func of app.functions)
19
- if (typeof app.options.validate?.[func.name] === "function")
20
- func.validate = app.options.validate[func.name];
18
+ if (typeof app.config.validate?.[func.name] === "function")
19
+ func.validate = app.config.validate[func.name];
21
20
  };
22
21
 
23
22
  export { _llmApplicationFinalize };
package/lib/llm.d.mts CHANGED
@@ -33,11 +33,11 @@ import { ILlmApplication, ILlmController, ILlmSchema } from "@samchon/openapi";
33
33
  * model: "gpt-4o-mini",
34
34
  * },
35
35
  * controllers: [
36
- * typia.llm.controller<ReactNativeFileSystem, "chatgpt">(
36
+ * typia.llm.controller<ReactNativeFileSystem>(
37
37
  * "filesystem",
38
38
  * new ReactNativeFileSystem(),
39
39
  * ),
40
- * typia.llm.controller<ReactNativeGallery, "chatgpt">(
40
+ * typia.llm.controller<ReactNativeGallery>(
41
41
  * "gallery",
42
42
  * new ReactNativeGallery(),
43
43
  * ),
@@ -48,39 +48,17 @@ import { ILlmApplication, ILlmController, ILlmSchema } from "@samchon/openapi";
48
48
  * );
49
49
  * ```
50
50
  *
51
- * Here is the list of available `Model` types with their corresponding LLM
52
- * schema. Reading the following list, and determine the `Model` type
53
- * considering the characteristics of the target LLM provider.
54
- *
55
- * - LLM provider schemas
56
- *
57
- * - `chatgpt`:
58
- * [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
59
- * - `claude`:
60
- * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
61
- * - `deepseek`:
62
- * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
63
- * - `gemini`:
64
- * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
65
- * - `llama`:
66
- * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
67
- * - Middle layer schemas
68
- *
69
- * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
70
- * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
71
- *
72
51
  * @author Jeongho Nam - https://github.com/samchon
73
52
  * @template Class Target class or interface type collecting the functions to
74
53
  * call
75
- * @template Model LLM schema model
76
54
  * @template Config Configuration of LLM schema composition
77
55
  * @param name Identifier name of the controller
78
56
  * @param execute Executor instance
79
- * @param options Options for the LLM application construction
57
+ * @param config Options for the LLM application construction
80
58
  * @returns Controller of LLM function calling
81
59
  * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
82
60
  */
83
- export declare function controller(name: string, execute: object, options?: Partial<Pick<ILlmApplication.IOptions<any, any>, "separate" | "validate">>): never;
61
+ export declare function controller(name: string, execute: object, config?: Partial<Pick<ILlmApplication.IConfig<any>, "separate" | "validate">>): never;
84
62
  /**
85
63
  * TypeScript functions to LLM function calling controller.
86
64
  *
@@ -113,11 +91,11 @@ export declare function controller(name: string, execute: object, options?: Part
113
91
  * model: "gpt-4o-mini",
114
92
  * },
115
93
  * controllers: [
116
- * typia.llm.controller<ReactNativeFileSystem, "chatgpt">(
94
+ * typia.llm.controller<ReactNativeFileSystem>(
117
95
  * "filesystem",
118
96
  * new ReactNativeFileSystem(),
119
97
  * ),
120
- * typia.llm.controller<ReactNativeGallery, "chatgpt">(
98
+ * typia.llm.controller<ReactNativeGallery>(
121
99
  * "gallery",
122
100
  * new ReactNativeGallery(),
123
101
  * ),
@@ -128,39 +106,17 @@ export declare function controller(name: string, execute: object, options?: Part
128
106
  * );
129
107
  * ```
130
108
  *
131
- * Here is the list of available `Model` types with their corresponding LLM
132
- * schema. Reading the following list, and determine the `Model` type
133
- * considering the characteristics of the target LLM provider.
134
- *
135
- * - LLM provider schemas
136
- *
137
- * - `chatgpt`:
138
- * [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
139
- * - `claude`:
140
- * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
141
- * - `deepseek`:
142
- * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
143
- * - `gemini`:
144
- * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
145
- * - `llama`:
146
- * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
147
- * - Middle layer schemas
148
- *
149
- * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
150
- * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
151
- *
152
109
  * @author Jeongho Nam - https://github.com/samchon
153
110
  * @template Class Target class or interface type collecting the functions to
154
111
  * call
155
- * @template Model LLM schema model
156
112
  * @template Config Configuration of LLM schema composition
157
113
  * @param name Identifier name of the controller
158
114
  * @param execute Executor instance
159
- * @param options Options for the LLM application construction
115
+ * @param config Options for the LLM application construction
160
116
  * @returns Controller of LLM function calling
161
117
  * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
162
118
  */
163
- export declare function controller<Class extends Record<string, any>, Model extends ILlmSchema.Model, Config extends Partial<ILlmSchema.ModelConfig[Model] & {
119
+ export declare function controller<Class extends Record<string, any>, Config extends Partial<ILlmSchema.IConfig & {
164
120
  /**
165
121
  * Whether to disallow superfluous properties or not.
166
122
  *
@@ -171,7 +127,7 @@ export declare function controller<Class extends Record<string, any>, Model exte
171
127
  * @default false
172
128
  */
173
129
  equals: boolean;
174
- }> = {}>(name: string, execute: Class, options?: Partial<Pick<ILlmApplication.IOptions<Model, Class>, "separate" | "validate">>): ILlmController<Model>;
130
+ }> = {}>(name: string, execute: Class, config?: Partial<Pick<ILlmApplication.IConfig<Class>, "separate" | "validate">>): ILlmController<Class>;
175
131
  /**
176
132
  * > You must configure the generic argument `Class`.
177
133
  *
@@ -191,7 +147,7 @@ export declare function controller<Class extends Record<string, any>, Model exte
191
147
  * must be composed by human, not by LLM. File uploading feature or some
192
148
  * sensitive information like security keys (password) are the examples. In that
193
149
  * case, you can separate the function parameters to both LLM and human sides by
194
- * configuring the {@link ILlmApplication.IOptions.separate} property. The
150
+ * configuring the {@link ILlmApplication.IConfig.separate} property. The
195
151
  * separated parameters are assigned to the {@link ILlmFunction.separated}
196
152
  * property.
197
153
  *
@@ -201,43 +157,21 @@ export declare function controller<Class extends Record<string, any>, Model exte
201
157
  * return value to the LLM by system prompt. The LLM will continue the next
202
158
  * conversation based on the return value.
203
159
  *
204
- * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
160
+ * Additionally, if you've configured {@link ILlmApplication.IConfig.separate},
205
161
  * so that the parameters are separated to human and LLM sides, you can merge
206
162
  * these human and LLM sides' parameters into one through
207
163
  * {@link HttpLlm.mergeParameters} before the actual LLM function call
208
164
  * execution.
209
165
  *
210
- * Here is the list of available `Model` types with their corresponding LLM
211
- * schema. Reading the following list, and determine the `Model` type
212
- * considering the characteristics of the target LLM provider.
213
- *
214
- * - LLM provider schemas
215
- *
216
- * - `chatgpt`:
217
- * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
218
- * - `claude`:
219
- * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
220
- * - `deepseek`:
221
- * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
222
- * - `gemini`:
223
- * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
224
- * - `llama`:
225
- * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
226
- * - Middle layer schemas
227
- *
228
- * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
229
- * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
230
- *
231
166
  * @author Jeongho Nam - https://github.com/samchon
232
167
  * @template Class Target class or interface type collecting the functions to
233
168
  * call
234
- * @template Model LLM schema model
235
169
  * @template Config Configuration of LLM schema composition
236
- * @param options Options for the LLM application construction
170
+ * @param config Options for the LLM application construction
237
171
  * @returns Application of LLM function calling schemas
238
172
  * @reference https://platform.openai.com/docs/guides/function-calling
239
173
  */
240
- export declare function application(options?: Partial<Pick<ILlmApplication.IOptions<any, any>, "separate" | "validate">>): never;
174
+ export declare function application(config?: Partial<Pick<ILlmApplication.IConfig<any>, "separate" | "validate">>): never;
241
175
  /**
242
176
  * TypeScript functions to LLM function calling application.
243
177
  *
@@ -255,7 +189,7 @@ export declare function application(options?: Partial<Pick<ILlmApplication.IOpti
255
189
  * must be composed by human, not by LLM. File uploading feature or some
256
190
  * sensitive information like security keys (password) are the examples. In that
257
191
  * case, you can separate the function parameters to both LLM and human sides by
258
- * configuring the {@link ILlmApplication.IOptions.separate} property. The
192
+ * configuring the {@link ILlmApplication.IConfig.separate} property. The
259
193
  * separated parameters are assigned to the {@link ILlmFunction.separated}
260
194
  * property.
261
195
  *
@@ -265,43 +199,21 @@ export declare function application(options?: Partial<Pick<ILlmApplication.IOpti
265
199
  * return value to the LLM by system prompt. The LLM will continue the next
266
200
  * conversation based on the return value.
267
201
  *
268
- * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
202
+ * Additionally, if you've configured {@link ILlmApplication.IConfig.separate},
269
203
  * so that the parameters are separated to human and LLM sides, you can merge
270
204
  * these human and LLM sides' parameters into one through
271
205
  * {@link HttpLlm.mergeParameters} before the actual LLM function call
272
206
  * execution.
273
207
  *
274
- * Here is the list of available `Model` types with their corresponding LLM
275
- * schema. Reading the following list, and determine the `Model` type
276
- * considering the characteristics of the target LLM provider.
277
- *
278
- * - LLM provider schemas
279
- *
280
- * - `chatgpt`:
281
- * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
282
- * - `claude`:
283
- * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
284
- * - `deepseek`:
285
- * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
286
- * - `gemini`:
287
- * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
288
- * - `llama`:
289
- * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
290
- * - Middle layer schemas
291
- *
292
- * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
293
- * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
294
- *
295
208
  * @author Jeongho Nam - https://github.com/samchon
296
209
  * @template Class Target class or interface type collecting the functions to
297
210
  * call
298
- * @template Model LLM schema model
299
211
  * @template Config Configuration of LLM schema composition
300
- * @param options Options for the LLM application construction
212
+ * @param config Options for the LLM application construction
301
213
  * @returns Application of LLM function calling schemas
302
214
  * @reference https://platform.openai.com/docs/guides/function-calling
303
215
  */
304
- export declare function application<Class extends Record<string, any>, Model extends ILlmSchema.Model, Config extends Partial<{
216
+ export declare function application<Class extends Record<string, any>, Config extends Partial<ILlmSchema.IConfig & {
305
217
  /**
306
218
  * Whether to disallow superfluous properties or not.
307
219
  *
@@ -312,7 +224,7 @@ export declare function application<Class extends Record<string, any>, Model ext
312
224
  * @default false
313
225
  */
314
226
  equals: boolean;
315
- } & ILlmSchema.ModelConfig[Model]> = {}>(options?: Partial<Pick<ILlmApplication.IOptions<Model, Class>, "separate" | "validate">>): ILlmApplication<Model, Class>;
227
+ }> = {}>(config?: Partial<Pick<ILlmApplication.IConfig<Class>, "separate" | "validate">>): ILlmApplication<Class>;
316
228
  /**
317
229
  * > You must configure the generic argument `Parameters`.
318
230
  *
@@ -336,29 +248,7 @@ export declare function application<Class extends Record<string, any>, Model ext
336
248
  * output, not only a plain text, by filling the parameters from the
337
249
  * conversation (maybe chatting text) with user (human).
338
250
  *
339
- * Here is the list of available `Model` types with their corresponding LLM
340
- * schema. Reading the following list, and determine the `Model` type
341
- * considering the characteristics of the target LLM provider.
342
- *
343
- * - LLM provider schemas
344
- *
345
- * - `chatgpt`:
346
- * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
347
- * - `claude`:
348
- * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
349
- * - `deepseek`:
350
- * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
351
- * - `gemini`:
352
- * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
353
- * - `llama`:
354
- * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
355
- * - Middle layer schemas
356
- *
357
- * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
358
- * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
359
- *
360
251
  * @template Parameters Target parameters type
361
- * @template Model LLM schema model
362
252
  * @template Config Configuration of LLM schema composition
363
253
  * @returns LLM parameters schema
364
254
  * @reference https://platform.openai.com/docs/guides/function-calling
@@ -386,35 +276,13 @@ export declare function parameters(): never;
386
276
  * output, not only a plain text, by filling the parameters from the
387
277
  * conversation (maybe chatting text) with user (human).
388
278
  *
389
- * Here is the list of available `Model` types with their corresponding LLM
390
- * schema. Reading the following list, and determine the `Model` type
391
- * considering the characteristics of the target LLM provider.
392
- *
393
- * - LLM provider schemas
394
- *
395
- * - `chatgpt`:
396
- * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
397
- * - `claude`:
398
- * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
399
- * - `deepseek`:
400
- * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
401
- * - `gemini`:
402
- * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
403
- * - `llama`:
404
- * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
405
- * - Middle layer schemas
406
- *
407
- * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
408
- * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
409
- *
410
279
  * @template Parameters Target parameters type
411
- * @template Model LLM schema model
412
280
  * @template Config Configuration of LLM schema composition
413
281
  * @returns LLM parameters schema
414
282
  * @reference https://platform.openai.com/docs/guides/function-calling
415
283
  * @reference https://platform.openai.com/docs/guides/structured-outputs
416
284
  */
417
- export declare function parameters<Parameters extends Record<string, any>, Model extends ILlmSchema.Model, Config extends Partial<ILlmSchema.ModelConfig[Model]> = {}>(): ILlmSchema.ModelParameters[Model];
285
+ export declare function parameters<Parameters extends Record<string, any>, Config extends Partial<ILlmSchema.IConfig> = {}>(): ILlmSchema.IParameters;
418
286
  /**
419
287
  * > You must configure the generic argument `T`.
420
288
  *
@@ -425,28 +293,6 @@ export declare function parameters<Parameters extends Record<string, any>, Model
425
293
  * https://platform.openai.com/docs/guides/function-calling), from a TypeScript
426
294
  * type.
427
295
  *
428
- * The returned {@link ILlmSchema} type will be specified by the `Model`
429
- * argument, and here is the list of available `Model` types with their
430
- * corresponding LLM schema. Reading the following list, and determine the
431
- * `Model` type considering the characteristics of the target LLM provider.
432
- *
433
- * - LLM provider schemas
434
- *
435
- * - `chatgpt`:
436
- * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
437
- * - `claude`:
438
- * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
439
- * - `deepseek`:
440
- * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
441
- * - `gemini`:
442
- * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
443
- * - `llama`:
444
- * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
445
- * - Middle layer schemas
446
- *
447
- * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
448
- * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
449
- *
450
296
  * If you actually want to perform the LLM function calling with TypeScript
451
297
  * functions, you can do it with the {@link application} function. Otherwise you
452
298
  * hope to perform the structured output, {@link parameters} function is better.
@@ -470,7 +316,6 @@ export declare function parameters<Parameters extends Record<string, any>, Model
470
316
  *
471
317
  * @author Jeongho Nam - https://github.com/samchon
472
318
  * @template T Target type
473
- * @template Model LLM schema model
474
319
  * @template Config Configuration of LLM schema composition
475
320
  * @returns LLM schema
476
321
  * @reference https://platform.openai.com/docs/guides/function-calling
@@ -485,27 +330,6 @@ export declare function schema(): never;
485
330
  * https://platform.openai.com/docs/guides/function-calling), from a TypeScript
486
331
  * type.
487
332
  *
488
- * The returned {@link ILlmSchema} type will be specified by the `Model`
489
- * argument, and here is the list of available `Model` types with their
490
- * corresponding LLM schema:
491
- *
492
- * - LLM provider schemas
493
- *
494
- * - `chatgpt`:
495
- * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
496
- * - `claude`:
497
- * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
498
- * - `deepseek`:
499
- * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
500
- * - `gemini`:
501
- * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
502
- * - `llama`:
503
- * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
504
- * - Middle layer schemas
505
- *
506
- * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
507
- * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
508
- *
509
333
  * If you actually want to perform the LLM function calling with TypeScript
510
334
  * functions, you can do it with the {@link application} function. Otherwise you
511
335
  * hope to perform the structured output, {@link parameters} function is better.
@@ -529,12 +353,9 @@ export declare function schema(): never;
529
353
  *
530
354
  * @author Jeongho Nam - https://github.com/samchon
531
355
  * @template T Target type
532
- * @template Model LLM schema model
533
356
  * @template Config Configuration of LLM schema composition
534
357
  * @returns LLM schema
535
358
  * @reference https://platform.openai.com/docs/guides/function-calling
536
359
  * @reference https://platform.openai.com/docs/guides/structured-outputs
537
360
  */
538
- export declare function schema<T, Model extends ILlmSchema.Model, Config extends Partial<ILlmSchema.ModelConfig[Model]> = {}>(...$defs: Extract<ILlmSchema.ModelSchema[Model], {
539
- $ref: string;
540
- }> extends never ? [] : [Record<string, ILlmSchema.ModelSchema[Model]>]): ILlmSchema.ModelSchema[Model];
361
+ export declare function schema<T, Config extends Partial<ILlmSchema.IConfig> = {}>($defs: Record<string, ILlmSchema>): ILlmSchema;