typia 7.4.2 → 7.5.0-dev.20241218

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (445) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +148 -148
  3. package/lib/executable/typia.js +0 -0
  4. package/package.json +3 -3
  5. package/src/IRandomGenerator.ts +49 -49
  6. package/src/IReadableURLSearchParams.ts +9 -9
  7. package/src/IValidation.ts +21 -21
  8. package/src/executable/TypiaGenerateWizard.ts +83 -83
  9. package/src/executable/TypiaPatchWizard.ts +45 -45
  10. package/src/executable/TypiaSetupWizard.ts +179 -179
  11. package/src/executable/setup/ArgumentParser.ts +42 -42
  12. package/src/executable/setup/FileRetriever.ts +19 -19
  13. package/src/executable/setup/PackageManager.ts +87 -87
  14. package/src/factories/ExpressionFactory.ts +216 -216
  15. package/src/factories/IdentifierFactory.ts +89 -89
  16. package/src/factories/JsonMetadataFactory.ts +83 -83
  17. package/src/factories/LiteralFactory.ts +52 -52
  18. package/src/factories/MetadataCollection.ts +278 -278
  19. package/src/factories/MetadataCommentTagFactory.ts +650 -650
  20. package/src/factories/MetadataFactory.ts +404 -404
  21. package/src/factories/MetadataTypeTagFactory.ts +411 -411
  22. package/src/factories/MetadataTypeTagSchemaFactory.ts +82 -82
  23. package/src/factories/NumericRangeFactory.ts +72 -72
  24. package/src/factories/ProtobufFactory.ts +875 -875
  25. package/src/factories/StatementFactory.ts +90 -90
  26. package/src/factories/TemplateFactory.ts +64 -64
  27. package/src/factories/TypeFactory.ts +140 -140
  28. package/src/factories/internal/metadata/IMetadataIteratorProps.ts +17 -17
  29. package/src/factories/internal/metadata/MetadataHelper.ts +21 -21
  30. package/src/factories/internal/metadata/emplace_metadata_alias.ts +33 -33
  31. package/src/factories/internal/metadata/emplace_metadata_array_type.ts +39 -39
  32. package/src/factories/internal/metadata/emplace_metadata_object.ts +208 -208
  33. package/src/factories/internal/metadata/emplace_metadata_tuple.ts +57 -57
  34. package/src/factories/internal/metadata/explore_metadata.ts +31 -31
  35. package/src/factories/internal/metadata/iterate_metadata.ts +54 -54
  36. package/src/factories/internal/metadata/iterate_metadata_alias.ts +33 -33
  37. package/src/factories/internal/metadata/iterate_metadata_array.ts +63 -63
  38. package/src/factories/internal/metadata/iterate_metadata_atomic.ts +62 -62
  39. package/src/factories/internal/metadata/iterate_metadata_coalesce.ts +28 -28
  40. package/src/factories/internal/metadata/iterate_metadata_collection.ts +146 -146
  41. package/src/factories/internal/metadata/iterate_metadata_comment_tags.ts +32 -32
  42. package/src/factories/internal/metadata/iterate_metadata_constant.ts +76 -76
  43. package/src/factories/internal/metadata/iterate_metadata_escape.ts +49 -49
  44. package/src/factories/internal/metadata/iterate_metadata_function.ts +91 -91
  45. package/src/factories/internal/metadata/iterate_metadata_intersection.ts +213 -213
  46. package/src/factories/internal/metadata/iterate_metadata_map.ts +57 -57
  47. package/src/factories/internal/metadata/iterate_metadata_native.ts +255 -255
  48. package/src/factories/internal/metadata/iterate_metadata_object.ts +35 -35
  49. package/src/factories/internal/metadata/iterate_metadata_set.ts +57 -57
  50. package/src/factories/internal/metadata/iterate_metadata_sort.ts +87 -87
  51. package/src/factories/internal/metadata/iterate_metadata_template.ts +41 -41
  52. package/src/factories/internal/metadata/iterate_metadata_tuple.ts +26 -26
  53. package/src/factories/internal/metadata/iterate_metadata_union.ts +19 -19
  54. package/src/functional.ts +750 -750
  55. package/src/http.ts +1047 -1047
  56. package/src/internal/_IProtobufWriter.ts +18 -18
  57. package/src/internal/_ProtobufReader.ts +194 -194
  58. package/src/internal/_ProtobufSizer.ts +145 -145
  59. package/src/internal/_ProtobufWriter.ts +145 -145
  60. package/src/internal/_accessExpressionAsString.ts +46 -46
  61. package/src/internal/_assertGuard.ts +13 -13
  62. package/src/internal/_functionalTypeGuardErrorFactory.ts +4 -4
  63. package/src/internal/_httpFormDataReadArray.ts +4 -4
  64. package/src/internal/_httpFormDataReadBigint.ts +18 -18
  65. package/src/internal/_httpFormDataReadBlob.ts +10 -10
  66. package/src/internal/_httpFormDataReadBoolean.ts +16 -16
  67. package/src/internal/_httpFormDataReadFile.ts +10 -10
  68. package/src/internal/_httpFormDataReadNumber.ts +15 -15
  69. package/src/internal/_httpFormDataReadString.ts +10 -10
  70. package/src/internal/_httpHeaderReadBigint.ts +10 -10
  71. package/src/internal/_httpHeaderReadBoolean.ts +8 -8
  72. package/src/internal/_httpHeaderReadNumber.ts +7 -7
  73. package/src/internal/_httpParameterReadBigint.ts +10 -10
  74. package/src/internal/_httpParameterReadBoolean.ts +8 -8
  75. package/src/internal/_httpParameterReadNumber.ts +7 -7
  76. package/src/internal/_httpParameterReadString.ts +2 -2
  77. package/src/internal/_httpQueryParseURLSearchParams.ts +12 -12
  78. package/src/internal/_httpQueryReadArray.ts +4 -4
  79. package/src/internal/_httpQueryReadBigint.ts +12 -12
  80. package/src/internal/_httpQueryReadBoolean.ts +14 -14
  81. package/src/internal/_httpQueryReadNumber.ts +9 -9
  82. package/src/internal/_httpQueryReadString.ts +4 -4
  83. package/src/internal/_isBetween.ts +2 -2
  84. package/src/internal/_isBigintString.ts +8 -8
  85. package/src/internal/_isFormatByte.ts +7 -7
  86. package/src/internal/_isFormatDate.ts +3 -3
  87. package/src/internal/_isFormatDateTime.ts +4 -4
  88. package/src/internal/_isFormatDuration.ts +4 -4
  89. package/src/internal/_isFormatEmail.ts +4 -4
  90. package/src/internal/_isFormatHostname.ts +4 -4
  91. package/src/internal/_isFormatIdnEmail.ts +4 -4
  92. package/src/internal/_isFormatIdnHostname.ts +4 -4
  93. package/src/internal/_isFormatIpv4.ts +4 -4
  94. package/src/internal/_isFormatIpv6.ts +4 -4
  95. package/src/internal/_isFormatIri.ts +3 -3
  96. package/src/internal/_isFormatIriReference.ts +4 -4
  97. package/src/internal/_isFormatJsonPointer.ts +3 -3
  98. package/src/internal/_isFormatPassword.ts +1 -1
  99. package/src/internal/_isFormatRegex.ts +8 -8
  100. package/src/internal/_isFormatRelativeJsonPointer.ts +4 -4
  101. package/src/internal/_isFormatTime.ts +4 -4
  102. package/src/internal/_isFormatUri.ts +6 -6
  103. package/src/internal/_isFormatUriReference.ts +5 -5
  104. package/src/internal/_isFormatUriTemplate.ts +4 -4
  105. package/src/internal/_isFormatUrl.ts +4 -4
  106. package/src/internal/_isFormatUuid.ts +3 -3
  107. package/src/internal/_isTypeFloat.ts +5 -5
  108. package/src/internal/_isTypeInt32.ts +5 -5
  109. package/src/internal/_isTypeInt64.ts +5 -5
  110. package/src/internal/_isTypeUint32.ts +5 -5
  111. package/src/internal/_isTypeUint64.ts +5 -5
  112. package/src/internal/_isUniqueItems.ts +159 -159
  113. package/src/internal/_jsonStringifyNumber.ts +12 -12
  114. package/src/internal/_jsonStringifyRest.ts +3 -3
  115. package/src/internal/_jsonStringifyString.ts +42 -42
  116. package/src/internal/_jsonStringifyTail.ts +2 -2
  117. package/src/internal/_llmApplicationFinalize.ts +20 -20
  118. package/src/internal/_miscCloneAny.ts +46 -46
  119. package/src/internal/_notationAny.ts +37 -37
  120. package/src/internal/_notationCamel.ts +13 -13
  121. package/src/internal/_notationPascal.ts +8 -8
  122. package/src/internal/_notationSnake.ts +43 -43
  123. package/src/internal/_randomArray.ts +21 -21
  124. package/src/internal/_randomBigint.ts +6 -6
  125. package/src/internal/_randomBoolean.ts +1 -1
  126. package/src/internal/_randomFormatByte.ts +3 -3
  127. package/src/internal/_randomFormatDate.ts +18 -18
  128. package/src/internal/_randomFormatDatetime.ts +16 -16
  129. package/src/internal/_randomFormatDuration.ts +27 -27
  130. package/src/internal/_randomFormatEmail.ts +11 -11
  131. package/src/internal/_randomFormatHostname.ts +6 -6
  132. package/src/internal/_randomFormatIdnEmail.ts +3 -3
  133. package/src/internal/_randomFormatIdnHostname.ts +3 -3
  134. package/src/internal/_randomFormatIpv4.ts +11 -11
  135. package/src/internal/_randomFormatIpv6.ts +11 -11
  136. package/src/internal/_randomFormatIri.ts +3 -3
  137. package/src/internal/_randomFormatIriReference.ts +3 -3
  138. package/src/internal/_randomFormatJsonPointer.ts +7 -7
  139. package/src/internal/_randomFormatPassword.ts +8 -8
  140. package/src/internal/_randomFormatRegex.ts +4 -4
  141. package/src/internal/_randomFormatRelativeJsonPointer.ts +8 -8
  142. package/src/internal/_randomFormatTime.ts +14 -14
  143. package/src/internal/_randomFormatUri.ts +3 -3
  144. package/src/internal/_randomFormatUriReference.ts +3 -3
  145. package/src/internal/_randomFormatUriTemplate.ts +3 -3
  146. package/src/internal/_randomFormatUrl.ts +11 -11
  147. package/src/internal/_randomFormatUuid.ts +6 -6
  148. package/src/internal/_randomInteger.ts +47 -47
  149. package/src/internal/_randomNumber.ts +74 -74
  150. package/src/internal/_randomPattern.ts +10 -10
  151. package/src/internal/_randomPick.ts +9 -9
  152. package/src/internal/_randomString.ts +24 -24
  153. package/src/internal/_throwTypeGuardError.ts +5 -5
  154. package/src/internal/_validateReport.ts +13 -13
  155. package/src/internal/private/__notationCapitalize.ts +2 -2
  156. package/src/internal/private/__notationUnsnake.ts +24 -24
  157. package/src/json.ts +752 -752
  158. package/src/llm.ts +481 -481
  159. package/src/misc.ts +658 -658
  160. package/src/module.ts +937 -937
  161. package/src/notations.ts +827 -827
  162. package/src/programmers/AssertProgrammer.ts +454 -454
  163. package/src/programmers/CheckerProgrammer.ts +1617 -1617
  164. package/src/programmers/FeatureProgrammer.ts +622 -622
  165. package/src/programmers/ImportProgrammer.ts +185 -185
  166. package/src/programmers/IsProgrammer.ts +273 -273
  167. package/src/programmers/RandomProgrammer.ts +1190 -1190
  168. package/src/programmers/TypiaProgrammer.ts +174 -174
  169. package/src/programmers/ValidateProgrammer.ts +439 -439
  170. package/src/programmers/functional/FunctionalAssertFunctionProgrammer.ts +153 -153
  171. package/src/programmers/functional/FunctionalAssertParametersProgrammer.ts +125 -125
  172. package/src/programmers/functional/FunctionalAssertReturnProgrammer.ts +115 -115
  173. package/src/programmers/functional/FunctionalIsFunctionProgrammer.ts +72 -72
  174. package/src/programmers/functional/FunctionalIsParametersProgrammer.ts +113 -113
  175. package/src/programmers/functional/FunctionalIsReturnProgrammer.ts +116 -116
  176. package/src/programmers/functional/FunctionalValidateFunctionProgrammer.ts +119 -119
  177. package/src/programmers/functional/FunctionalValidateParametersProgrammer.ts +274 -274
  178. package/src/programmers/functional/FunctionalValidateReturnProgrammer.ts +135 -135
  179. package/src/programmers/functional/internal/FunctionalGeneralProgrammer.ts +34 -34
  180. package/src/programmers/helpers/AtomicPredicator.ts +35 -35
  181. package/src/programmers/helpers/CloneJoiner.ts +143 -143
  182. package/src/programmers/helpers/FunctionProgrammer.ts +67 -67
  183. package/src/programmers/helpers/HttpMetadataUtil.ts +21 -21
  184. package/src/programmers/helpers/NotationJoiner.ts +144 -144
  185. package/src/programmers/helpers/OptionPredicator.ts +15 -15
  186. package/src/programmers/helpers/ProtobufUtil.ts +228 -228
  187. package/src/programmers/helpers/PruneJoiner.ts +148 -148
  188. package/src/programmers/helpers/RandomJoiner.ts +168 -168
  189. package/src/programmers/helpers/StringifyJoinder.ts +115 -115
  190. package/src/programmers/helpers/StringifyPredicator.ts +13 -13
  191. package/src/programmers/helpers/UnionExplorer.ts +372 -372
  192. package/src/programmers/helpers/UnionPredicator.ts +79 -79
  193. package/src/programmers/helpers/disable_function_programmer_declare.ts +32 -32
  194. package/src/programmers/http/HttpAssertFormDataProgrammer.ts +99 -99
  195. package/src/programmers/http/HttpAssertHeadersProgrammer.ts +99 -99
  196. package/src/programmers/http/HttpAssertQueryProgrammer.ts +105 -105
  197. package/src/programmers/http/HttpFormDataProgrammer.ts +308 -308
  198. package/src/programmers/http/HttpHeadersProgrammer.ts +400 -400
  199. package/src/programmers/http/HttpIsFormDataProgrammer.ts +108 -108
  200. package/src/programmers/http/HttpIsHeadersProgrammer.ts +108 -108
  201. package/src/programmers/http/HttpIsQueryProgrammer.ts +114 -114
  202. package/src/programmers/http/HttpParameterProgrammer.ts +115 -115
  203. package/src/programmers/http/HttpQueryProgrammer.ts +336 -336
  204. package/src/programmers/http/HttpValidateFormDataProgrammer.ts +92 -92
  205. package/src/programmers/http/HttpValidateHeadersProgrammer.ts +92 -92
  206. package/src/programmers/http/HttpValidateQueryProgrammer.ts +98 -98
  207. package/src/programmers/internal/check_array_length.ts +47 -47
  208. package/src/programmers/internal/check_bigint.ts +50 -50
  209. package/src/programmers/internal/check_dynamic_key.ts +201 -201
  210. package/src/programmers/internal/check_dynamic_properties.ts +208 -208
  211. package/src/programmers/internal/check_everything.ts +23 -23
  212. package/src/programmers/internal/check_native.ts +27 -27
  213. package/src/programmers/internal/check_number.ts +112 -112
  214. package/src/programmers/internal/check_object.ts +75 -75
  215. package/src/programmers/internal/check_string.ts +50 -50
  216. package/src/programmers/internal/check_template.ts +48 -48
  217. package/src/programmers/internal/check_union_array_like.ts +335 -335
  218. package/src/programmers/internal/decode_union_object.ts +116 -116
  219. package/src/programmers/internal/feature_object_entries.ts +61 -61
  220. package/src/programmers/internal/json_schema_alias.ts +47 -47
  221. package/src/programmers/internal/json_schema_array.ts +45 -45
  222. package/src/programmers/internal/json_schema_bigint.ts +15 -15
  223. package/src/programmers/internal/json_schema_boolean.ts +15 -15
  224. package/src/programmers/internal/json_schema_constant.ts +26 -26
  225. package/src/programmers/internal/json_schema_description.ts +12 -12
  226. package/src/programmers/internal/json_schema_discriminator.ts +35 -35
  227. package/src/programmers/internal/json_schema_escaped.ts +82 -82
  228. package/src/programmers/internal/json_schema_native.ts +33 -33
  229. package/src/programmers/internal/json_schema_number.ts +15 -15
  230. package/src/programmers/internal/json_schema_object.ts +158 -158
  231. package/src/programmers/internal/json_schema_plugin.ts +18 -18
  232. package/src/programmers/internal/json_schema_station.ts +182 -182
  233. package/src/programmers/internal/json_schema_string.ts +15 -15
  234. package/src/programmers/internal/json_schema_template.ts +55 -55
  235. package/src/programmers/internal/json_schema_title.ts +20 -20
  236. package/src/programmers/internal/json_schema_tuple.ts +35 -35
  237. package/src/programmers/internal/metadata_to_pattern.ts +42 -42
  238. package/src/programmers/internal/postfix_of_tuple.ts +5 -5
  239. package/src/programmers/internal/prune_object_properties.ts +71 -71
  240. package/src/programmers/internal/stringify_dynamic_properties.ts +162 -162
  241. package/src/programmers/internal/stringify_regular_properties.ts +81 -81
  242. package/src/programmers/internal/template_to_pattern.ts +23 -23
  243. package/src/programmers/internal/wrap_metadata_rest_tuple.ts +23 -23
  244. package/src/programmers/json/JsonApplicationProgrammer.ts +279 -279
  245. package/src/programmers/json/JsonAssertParseProgrammer.ts +113 -113
  246. package/src/programmers/json/JsonAssertStringifyProgrammer.ts +115 -115
  247. package/src/programmers/json/JsonIsParseProgrammer.ts +114 -114
  248. package/src/programmers/json/JsonIsStringifyProgrammer.ts +108 -108
  249. package/src/programmers/json/JsonSchemasProgrammer.ts +91 -91
  250. package/src/programmers/json/JsonStringifyProgrammer.ts +1124 -1124
  251. package/src/programmers/json/JsonValidateParseProgrammer.ts +105 -105
  252. package/src/programmers/json/JsonValidateStringifyProgrammer.ts +124 -124
  253. package/src/programmers/llm/LlmApplicationOfValidateProgrammer.ts +90 -90
  254. package/src/programmers/llm/LlmApplicationProgrammer.ts +280 -280
  255. package/src/programmers/llm/LlmModelPredicator.ts +127 -127
  256. package/src/programmers/llm/LlmParametersProgrammer.ts +93 -93
  257. package/src/programmers/llm/LlmSchemaProgrammer.ts +192 -192
  258. package/src/programmers/misc/MiscAssertCloneProgrammer.ts +95 -95
  259. package/src/programmers/misc/MiscAssertPruneProgrammer.ts +116 -116
  260. package/src/programmers/misc/MiscCloneProgrammer.ts +1032 -1032
  261. package/src/programmers/misc/MiscIsCloneProgrammer.ts +99 -99
  262. package/src/programmers/misc/MiscIsPruneProgrammer.ts +97 -97
  263. package/src/programmers/misc/MiscLiteralsProgrammer.ts +80 -80
  264. package/src/programmers/misc/MiscPruneProgrammer.ts +728 -728
  265. package/src/programmers/misc/MiscValidateCloneProgrammer.ts +111 -111
  266. package/src/programmers/misc/MiscValidatePruneProgrammer.ts +113 -113
  267. package/src/programmers/notations/NotationAssertGeneralProgrammer.ts +101 -101
  268. package/src/programmers/notations/NotationGeneralProgrammer.ts +984 -984
  269. package/src/programmers/notations/NotationIsGeneralProgrammer.ts +105 -105
  270. package/src/programmers/notations/NotationValidateGeneralProgrammer.ts +119 -119
  271. package/src/programmers/protobuf/ProtobufAssertDecodeProgrammer.ts +98 -98
  272. package/src/programmers/protobuf/ProtobufAssertEncodeProgrammer.ts +102 -102
  273. package/src/programmers/protobuf/ProtobufDecodeProgrammer.ts +654 -654
  274. package/src/programmers/protobuf/ProtobufEncodeProgrammer.ts +945 -945
  275. package/src/programmers/protobuf/ProtobufIsDecodeProgrammer.ts +109 -109
  276. package/src/programmers/protobuf/ProtobufIsEncodeProgrammer.ts +98 -98
  277. package/src/programmers/protobuf/ProtobufMessageProgrammer.ts +179 -179
  278. package/src/programmers/protobuf/ProtobufValidateDecodeProgrammer.ts +92 -92
  279. package/src/programmers/protobuf/ProtobufValidateEncodeProgrammer.ts +119 -119
  280. package/src/protobuf.ts +868 -868
  281. package/src/reflect.ts +57 -57
  282. package/src/schemas/json/IJsonApplication.ts +73 -73
  283. package/src/schemas/json/IJsonSchemaCollection.ts +29 -29
  284. package/src/schemas/json/__IJsonApplication.ts +63 -63
  285. package/src/schemas/llm/ILlmApplicationOfValidate.ts +55 -55
  286. package/src/schemas/llm/ILlmFunctionOfValidate.ts +39 -39
  287. package/src/schemas/metadata/IMetadata.ts +35 -35
  288. package/src/schemas/metadata/IMetadataAlias.ts +6 -6
  289. package/src/schemas/metadata/IMetadataAliasType.ts +12 -12
  290. package/src/schemas/metadata/IMetadataApplication.ts +7 -7
  291. package/src/schemas/metadata/IMetadataArray.ts +6 -6
  292. package/src/schemas/metadata/IMetadataComponents.ts +11 -11
  293. package/src/schemas/metadata/IMetadataConstantValue.ts +11 -11
  294. package/src/schemas/metadata/IMetadataDictionary.ts +11 -11
  295. package/src/schemas/metadata/IMetadataMap.ts +8 -8
  296. package/src/schemas/metadata/IMetadataNative.ts +6 -6
  297. package/src/schemas/metadata/IMetadataObject.ts +6 -6
  298. package/src/schemas/metadata/IMetadataObjectType.ts +13 -13
  299. package/src/schemas/metadata/IMetadataSet.ts +7 -7
  300. package/src/schemas/metadata/IMetadataTemplate.ts +7 -7
  301. package/src/schemas/metadata/IMetadataTuple.ts +6 -6
  302. package/src/schemas/metadata/IMetadataTypeTag.ts +16 -16
  303. package/src/schemas/metadata/Metadata.ts +669 -669
  304. package/src/schemas/metadata/MetadataAlias.ts +46 -46
  305. package/src/schemas/metadata/MetadataAliasType.ts +63 -63
  306. package/src/schemas/metadata/MetadataApplication.ts +44 -44
  307. package/src/schemas/metadata/MetadataArray.ts +49 -49
  308. package/src/schemas/metadata/MetadataAtomic.ts +87 -87
  309. package/src/schemas/metadata/MetadataComponents.ts +98 -98
  310. package/src/schemas/metadata/MetadataConstantValue.ts +62 -62
  311. package/src/schemas/metadata/MetadataMap.ts +48 -48
  312. package/src/schemas/metadata/MetadataNative.ts +44 -44
  313. package/src/schemas/metadata/MetadataObject.ts +48 -48
  314. package/src/schemas/metadata/MetadataObjectType.ts +149 -149
  315. package/src/schemas/metadata/MetadataParameter.ts +54 -54
  316. package/src/schemas/metadata/MetadataProperty.ts +59 -59
  317. package/src/schemas/metadata/MetadataSet.ts +45 -45
  318. package/src/schemas/metadata/MetadataTemplate.ts +80 -80
  319. package/src/schemas/metadata/MetadataTuple.ts +32 -32
  320. package/src/schemas/protobuf/IProtobufProperty.ts +6 -6
  321. package/src/schemas/protobuf/IProtobufPropertyType.ts +37 -37
  322. package/src/schemas/protobuf/IProtobufSchema.ts +50 -50
  323. package/src/tags/Example.ts +24 -24
  324. package/src/tags/Examples.ts +16 -16
  325. package/src/tags/Format.ts +50 -50
  326. package/src/tags/JsonSchemaPlugin.ts +8 -8
  327. package/src/tags/Sequence.ts +10 -10
  328. package/src/tags/TagBase.ts +82 -82
  329. package/src/tags/Type.ts +32 -32
  330. package/src/tags/UniqueItems.ts +14 -14
  331. package/src/tags/index.ts +21 -21
  332. package/src/transform.ts +35 -35
  333. package/src/transformers/CallExpressionTransformer.ts +547 -547
  334. package/src/transformers/FileTransformer.ts +136 -136
  335. package/src/transformers/IProgrammerProps.ts +11 -11
  336. package/src/transformers/ITransformOptions.ts +62 -62
  337. package/src/transformers/ITransformProps.ts +9 -9
  338. package/src/transformers/ITypiaContext.ts +18 -18
  339. package/src/transformers/ImportTransformer.ts +81 -81
  340. package/src/transformers/NodeTransformer.ts +17 -17
  341. package/src/transformers/TransformerError.ts +60 -60
  342. package/src/transformers/features/AssertTransformer.ts +24 -24
  343. package/src/transformers/features/CreateAssertTransformer.ts +24 -24
  344. package/src/transformers/features/CreateIsTransformer.ts +18 -18
  345. package/src/transformers/features/CreateRandomTransformer.ts +43 -43
  346. package/src/transformers/features/CreateValidateTransformer.ts +18 -18
  347. package/src/transformers/features/IsTransformer.ts +18 -18
  348. package/src/transformers/features/RandomTransformer.ts +41 -41
  349. package/src/transformers/features/ValidateTransformer.ts +18 -18
  350. package/src/transformers/features/functional/FunctionalGenericTransformer.ts +57 -57
  351. package/src/transformers/features/http/CreateHttpAssertFormDataTransformer.ts +13 -13
  352. package/src/transformers/features/http/CreateHttpAssertHeadersTransformer.ts +13 -13
  353. package/src/transformers/features/http/CreateHttpAssertQueryTransformer.ts +13 -13
  354. package/src/transformers/features/http/CreateHttpFormDataTransformer.ts +13 -13
  355. package/src/transformers/features/http/CreateHttpHeadersTransformer.ts +13 -13
  356. package/src/transformers/features/http/CreateHttpIsFormDataTransformer.ts +13 -13
  357. package/src/transformers/features/http/CreateHttpIsHeadersTransformer.ts +13 -13
  358. package/src/transformers/features/http/CreateHttpIsQueryTransformer.ts +13 -13
  359. package/src/transformers/features/http/CreateHttpParameterTransformer.ts +13 -13
  360. package/src/transformers/features/http/CreateHttpQueryTransformer.ts +13 -13
  361. package/src/transformers/features/http/CreateHttpValidateFormDataTransformer.ts +13 -13
  362. package/src/transformers/features/http/CreateHttpValidateHeadersTransformer.ts +13 -13
  363. package/src/transformers/features/http/CreateHttpValidateQueryTransformer.ts +13 -13
  364. package/src/transformers/features/http/HttpAssertFormDataTransformer.ts +13 -13
  365. package/src/transformers/features/http/HttpAssertHeadersTransformer.ts +13 -13
  366. package/src/transformers/features/http/HttpAssertQueryTransformer.ts +13 -13
  367. package/src/transformers/features/http/HttpFormDataTransformer.ts +13 -13
  368. package/src/transformers/features/http/HttpHeadersTransformer.ts +13 -13
  369. package/src/transformers/features/http/HttpIsFormDataTransformer.ts +13 -13
  370. package/src/transformers/features/http/HttpIsHeadersTransformer.ts +13 -13
  371. package/src/transformers/features/http/HttpIsQueryTransformer.ts +13 -13
  372. package/src/transformers/features/http/HttpParameterTransformer.ts +13 -13
  373. package/src/transformers/features/http/HttpQueryTransformer.ts +13 -13
  374. package/src/transformers/features/http/HttpValidateFormDataTransformer.ts +13 -13
  375. package/src/transformers/features/http/HttpValidateHeadersTransformer.ts +13 -13
  376. package/src/transformers/features/http/HttpValidateQueryTransformer.ts +13 -13
  377. package/src/transformers/features/json/JsonApplicationTransformer.ts +105 -105
  378. package/src/transformers/features/json/JsonAssertParseTransformer.ts +13 -13
  379. package/src/transformers/features/json/JsonAssertStringifyTransformer.ts +13 -13
  380. package/src/transformers/features/json/JsonCreateAssertParseTransformer.ts +13 -13
  381. package/src/transformers/features/json/JsonCreateAssertStringifyTransformer.ts +13 -13
  382. package/src/transformers/features/json/JsonCreateIsParseTransformer.ts +13 -13
  383. package/src/transformers/features/json/JsonCreateIsStringifyTransformer.ts +13 -13
  384. package/src/transformers/features/json/JsonCreateStringifyTransformer.ts +13 -13
  385. package/src/transformers/features/json/JsonCreateValidateParseTransformer.ts +13 -13
  386. package/src/transformers/features/json/JsonCreateValidateStringifyProgrammer.ts +13 -13
  387. package/src/transformers/features/json/JsonIsParseTransformer.ts +13 -13
  388. package/src/transformers/features/json/JsonIsStringifyTransformer.ts +13 -13
  389. package/src/transformers/features/json/JsonSchemasTransformer.ts +143 -143
  390. package/src/transformers/features/json/JsonStringifyTransformer.ts +13 -13
  391. package/src/transformers/features/json/JsonValidateParseTransformer.ts +13 -13
  392. package/src/transformers/features/json/JsonValidateStringifyTransformer.ts +13 -13
  393. package/src/transformers/features/llm/LlmApplicationOfValidateTransformer.ts +121 -121
  394. package/src/transformers/features/llm/LlmApplicationTransformer.ts +118 -118
  395. package/src/transformers/features/llm/LlmParametersTransformer.ts +94 -94
  396. package/src/transformers/features/llm/LlmSchemaTransformer.ts +135 -135
  397. package/src/transformers/features/misc/MiscAssertCloneTransformer.ts +13 -13
  398. package/src/transformers/features/misc/MiscAssertPruneTransformer.ts +13 -13
  399. package/src/transformers/features/misc/MiscCloneTransformer.ts +13 -13
  400. package/src/transformers/features/misc/MiscCreateAssertCloneTransformer.ts +13 -13
  401. package/src/transformers/features/misc/MiscCreateAssertPruneTransformer.ts +13 -13
  402. package/src/transformers/features/misc/MiscCreateCloneTransformer.ts +13 -13
  403. package/src/transformers/features/misc/MiscCreateIsCloneTransformer.ts +13 -13
  404. package/src/transformers/features/misc/MiscCreateIsPruneTransformer.ts +13 -13
  405. package/src/transformers/features/misc/MiscCreatePruneTransformer.ts +13 -13
  406. package/src/transformers/features/misc/MiscCreateValidateCloneTransformer.ts +13 -13
  407. package/src/transformers/features/misc/MiscCreateValidatePruneTransformer.ts +13 -13
  408. package/src/transformers/features/misc/MiscIsCloneTransformer.ts +13 -13
  409. package/src/transformers/features/misc/MiscIsPruneTransformer.ts +13 -13
  410. package/src/transformers/features/misc/MiscLiteralsTransformer.ts +35 -35
  411. package/src/transformers/features/misc/MiscPruneTransformer.ts +13 -13
  412. package/src/transformers/features/misc/MiscValidateCloneTransformer.ts +13 -13
  413. package/src/transformers/features/misc/MiscValidatePruneTransformer.ts +13 -13
  414. package/src/transformers/features/notations/NotationAssertGeneralTransformer.ts +20 -20
  415. package/src/transformers/features/notations/NotationCreateAssertGeneralTransformer.ts +20 -20
  416. package/src/transformers/features/notations/NotationCreateGeneralTransformer.ts +20 -20
  417. package/src/transformers/features/notations/NotationCreateIsGeneralTransformer.ts +20 -20
  418. package/src/transformers/features/notations/NotationCreateValidateGeneralTransformer.ts +20 -20
  419. package/src/transformers/features/notations/NotationGeneralTransformer.ts +18 -18
  420. package/src/transformers/features/notations/NotationIsGeneralTransformer.ts +20 -20
  421. package/src/transformers/features/notations/NotationValidateGeneralTransformer.ts +20 -20
  422. package/src/transformers/features/protobuf/ProtobufAssertDecodeTransformer.ts +13 -13
  423. package/src/transformers/features/protobuf/ProtobufAssertEncodeTransformer.ts +13 -13
  424. package/src/transformers/features/protobuf/ProtobufCreateAssertDecodeTransformer.ts +13 -13
  425. package/src/transformers/features/protobuf/ProtobufCreateAssertEncodeTransformer.ts +13 -13
  426. package/src/transformers/features/protobuf/ProtobufCreateDecodeTransformer.ts +13 -13
  427. package/src/transformers/features/protobuf/ProtobufCreateEncodeTransformer.ts +13 -13
  428. package/src/transformers/features/protobuf/ProtobufCreateIsDecodeTransformer.ts +13 -13
  429. package/src/transformers/features/protobuf/ProtobufCreateIsEncodeTransformer.ts +13 -13
  430. package/src/transformers/features/protobuf/ProtobufCreateValidateDecodeTransformer.ts +13 -13
  431. package/src/transformers/features/protobuf/ProtobufCreateValidateEncodeTransformer.ts +13 -13
  432. package/src/transformers/features/protobuf/ProtobufDecodeTransformer.ts +13 -13
  433. package/src/transformers/features/protobuf/ProtobufEncodeTransformer.ts +13 -13
  434. package/src/transformers/features/protobuf/ProtobufIsDecodeTransformer.ts +13 -13
  435. package/src/transformers/features/protobuf/ProtobufIsEncodeTransformer.ts +13 -13
  436. package/src/transformers/features/protobuf/ProtobufMessageTransformer.ts +35 -35
  437. package/src/transformers/features/protobuf/ProtobufValidateDecodeTransformer.ts +13 -13
  438. package/src/transformers/features/protobuf/ProtobufValidateEncodeTransformer.ts +13 -13
  439. package/src/transformers/features/reflect/ReflectMetadataTransformer.ts +69 -69
  440. package/src/transformers/features/reflect/ReflectNameTransformer.ts +82 -82
  441. package/src/transformers/internal/GenericTransformer.ts +101 -101
  442. package/src/utils/MapUtil.ts +14 -14
  443. package/src/utils/NamingConvention.ts +94 -94
  444. package/src/utils/ProtobufNameEncoder.ts +32 -32
  445. package/src/utils/StringUtil.ts +16 -16
package/src/llm.ts CHANGED
@@ -1,481 +1,481 @@
1
- import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
-
3
- import { ILlmApplicationOfValidate } from "./module";
4
-
5
- /**
6
- * > You must configure the generic argument `App`.
7
- *
8
- * TypeScript functions to LLM function calling application with validators.
9
- *
10
- * Creates an application of LLM (Large Language Model) function calling application
11
- * from a TypeScript class or interface type containing the target functions to be
12
- * called by the LLM function calling feature.
13
- *
14
- * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
15
- * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
16
- * select the proper function and fill its arguments from the conversation
17
- * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
18
- *
19
- * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
20
- * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
21
- * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
22
- * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
23
- * is a validator function reporting the detailed information about the wrong typed parameters.
24
- *
25
- * By the way, there can be some parameters (or their nested properties) which must be
26
- * composed by human, not by LLM. File uploading feature or some sensitive information
27
- * like secrety key (password) are the examples. In that case, you can separate the
28
- * function parameters to both LLM and human sides by configuring the
29
- * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
30
- * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
31
- *
32
- * For reference, the actual function call execution is not by LLM, but by you.
33
- * When the LLM selects the proper function and fills the arguments, you just call
34
- * the function with the LLM prepared arguments. And then informs the return value to
35
- * the LLM by system prompt. The LLM will continue the next conversation based on
36
- * the return value.
37
- *
38
- * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
39
- * so that the parameters are separated to human and LLM sides, you can merge these
40
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
41
- * before the actual LLM function call execution.
42
- *
43
- * Here is the list of available `Model` types with their corresponding LLM schema.
44
- * Reading the following list, and determine the `Model` type considering the
45
- * characteristics of the target LLM provider.
46
- *
47
- * - LLM provider schemas
48
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
49
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
50
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
51
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
52
- * - Midldle layer schemas
53
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
54
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
55
- *
56
- * @template App Target class or interface type collecting the functions to call
57
- * @template Model LLM schema model
58
- * @template Config Configuration of LLM schema composition
59
- * @param options Options for the LLM application construction
60
- * @returns Application of LLM function calling schemas
61
- * @reference https://platform.openai.com/docs/guides/function-calling
62
- * @author Jeongho Nam - https://github.com/samchon
63
- */
64
- export function applicationOfValidate(
65
- options?: Partial<Pick<ILlmApplicationOfValidate.IOptions<any>, "separate">>,
66
- ): never;
67
-
68
- /**
69
- * TypeScript functions to LLM function calling application with validators.
70
- *
71
- * Creates an application of LLM (Large Language Model) function calling application
72
- * from a TypeScript class or interface type containing the target functions to be
73
- * called by the LLM function calling feature.
74
- *
75
- * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
76
- * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
77
- * select the proper function and fill its arguments from the conversation
78
- * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
79
- *
80
- * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
81
- * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
82
- * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
83
- * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
84
- * is a validator function reporting the detailed information about the wrong typed parameters.
85
- *
86
- * By the way, there can be some parameters (or their nested properties) which must be
87
- * composed by human, not by LLM. File uploading feature or some sensitive information
88
- * like secrety key (password) are the examples. In that case, you can separate the
89
- * function parameters to both LLM and human sides by configuring the
90
- * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
91
- * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
92
- *
93
- * For reference, the actual function call execution is not by LLM, but by you.
94
- * When the LLM selects the proper function and fills the arguments, you just call
95
- * the function with the LLM prepared arguments. And then informs the return value to
96
- * the LLM by system prompt. The LLM will continue the next conversation based on
97
- * the return value.
98
- *
99
- * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
100
- * so that the parameters are separated to human and LLM sides, you can merge these
101
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
102
- * before the actual LLM function call execution.
103
- *
104
- * Here is the list of available `Model` types with their corresponding LLM schema.
105
- * Reading the following list, and determine the `Model` type considering the
106
- * characteristics of the target LLM provider.
107
- *
108
- * - LLM provider schemas
109
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
110
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
111
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
112
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
113
- * - Midldle layer schemas
114
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
115
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
116
- *
117
- * @template App Target class or interface type collecting the functions to call
118
- * @template Model LLM schema model
119
- * @template Config Configuration of LLM schema composition
120
- * @param options Options for the LLM application construction
121
- * @returns Application of LLM function calling schemas
122
- * @reference https://platform.openai.com/docs/guides/function-calling
123
- * @author Jeongho Nam - https://github.com/samchon
124
- */
125
- export function applicationOfValidate<
126
- App extends Record<string, any>,
127
- Model extends ILlmSchema.Model,
128
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
129
- >(
130
- options?: Partial<
131
- Pick<ILlmApplicationOfValidate.IOptions<Model>, "separate">
132
- >,
133
- ): ILlmApplicationOfValidate<Model>;
134
-
135
- /**
136
- * @internal
137
- */
138
- export function applicationOfValidate(): never {
139
- halt("applicationOfValidate");
140
- }
141
-
142
- /**
143
- * > You must configure the generic argument `App`.
144
- *
145
- * TypeScript functions to LLM function calling application.
146
- *
147
- * Creates an application of LLM (Large Language Model) function calling application
148
- * from a TypeScript class or interface type containing the target functions to be
149
- * called by the LLM function calling feature.
150
- *
151
- * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
152
- * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
153
- * proper function and fill its arguments from the conversation (maybe chatting text)
154
- * with user (human). This is the concept of the LLM function calling.
155
- *
156
- * By the way, there can be some parameters (or their nested properties) which must be
157
- * composed by human, not by LLM. File uploading feature or some sensitive information
158
- * like secrety key (password) are the examples. In that case, you can separate the
159
- * function parameters to both LLM and human sides by configuring the
160
- * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
161
- * assigned to the {@link ILlmFunction.separated} property.
162
- *
163
- * For reference, the actual function call execution is not by LLM, but by you.
164
- * When the LLM selects the proper function and fills the arguments, you just call
165
- * the function with the LLM prepared arguments. And then informs the return value to
166
- * the LLM by system prompt. The LLM will continue the next conversation based on
167
- * the return value.
168
- *
169
- * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
170
- * so that the parameters are separated to human and LLM sides, you can merge these
171
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
172
- * before the actual LLM function call execution.
173
- *
174
- * Here is the list of available `Model` types with their corresponding LLM schema.
175
- * Reading the following list, and determine the `Model` type considering the
176
- * characteristics of the target LLM provider.
177
- *
178
- * - LLM provider schemas
179
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
180
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
181
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
182
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
183
- * - Midldle layer schemas
184
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
185
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
186
- *
187
- * @template App Target class or interface type collecting the functions to call
188
- * @template Model LLM schema model
189
- * @template Config Configuration of LLM schema composition
190
- * @param options Options for the LLM application construction
191
- * @returns Application of LLM function calling schemas
192
- * @reference https://platform.openai.com/docs/guides/function-calling
193
- * @author Jeongho Nam - https://github.com/samchon
194
- */
195
- export function application(
196
- options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
197
- ): never;
198
-
199
- /**
200
- * TypeScript functions to LLM function calling application.
201
- *
202
- * Creates an application of LLM (Large Language Model) function calling application
203
- * from a TypeScript class or interface type containing the target functions to be
204
- * called by the LLM function calling feature.
205
- *
206
- * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
207
- * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
208
- * proper function and fill its arguments from the conversation (maybe chatting text)
209
- * with user (human). This is the concept of the LLM function calling.
210
- *
211
- * By the way, there can be some parameters (or their nested properties) which must be
212
- * composed by human, not by LLM. File uploading feature or some sensitive information
213
- * like secrety key (password) are the examples. In that case, you can separate the
214
- * function parameters to both LLM and human sides by configuring the
215
- * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
216
- * assigned to the {@link ILlmFunction.separated} property.
217
- *
218
- * For reference, the actual function call execution is not by LLM, but by you.
219
- * When the LLM selects the proper function and fills the arguments, you just call
220
- * the function with the LLM prepared arguments. And then informs the return value to
221
- * the LLM by system prompt. The LLM will continue the next conversation based on
222
- * the return value.
223
- *
224
- * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
225
- * so that the parameters are separated to human and LLM sides, you can merge these
226
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
227
- * before the actual LLM function call execution.
228
- *
229
- * Here is the list of available `Model` types with their corresponding LLM schema.
230
- * Reading the following list, and determine the `Model` type considering the
231
- * characteristics of the target LLM provider.
232
- *
233
- * - LLM provider schemas
234
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
235
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
236
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
237
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
238
- * - Midldle layer schemas
239
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
240
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
241
- *
242
- * @template App Target class or interface type collecting the functions to call
243
- * @template Model LLM schema model
244
- * @template Config Configuration of LLM schema composition
245
- * @param options Options for the LLM application construction
246
- * @returns Application of LLM function calling schemas
247
- * @reference https://platform.openai.com/docs/guides/function-calling
248
- * @author Jeongho Nam - https://github.com/samchon
249
- */
250
- export function application<
251
- App extends Record<string, any>,
252
- Model extends ILlmSchema.Model,
253
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
254
- >(
255
- options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
256
- ): ILlmApplication<Model>;
257
-
258
- /**
259
- * @internal
260
- */
261
- export function application(): never {
262
- halt("application");
263
- }
264
-
265
- /**
266
- * > You must configure the generic argument `Parameters`.
267
- *
268
- * TypeScript parameters to LLM parameters schema.
269
- *
270
- * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
271
- * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
272
- * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
273
- * from a TypeScript parameters type.
274
- *
275
- * For references, LLM identifies only keyworded arguments, not positional arguments.
276
- * Therefore, the TypeScript parameters type must be an object type, and its properties
277
- * must be static. If dynamic properties are, it would be compilation error.
278
- *
279
- * Also, such parameters type can be utilized not only for the LLM function calling,
280
- * but also for the LLM structured outputs. The LLM structured outputs is a feature
281
- * that LLM (Large Language Model) can generate a structured output, not only a plain
282
- * text, by filling the parameters from the conversation (maybe chatting text) with user
283
- * (human).
284
- *
285
- * Here is the list of available `Model` types with their corresponding LLM schema.
286
- * Reading the following list, and determine the `Model` type considering the
287
- * characteristics of the target LLM provider.
288
- *
289
- * - LLM provider schemas
290
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
291
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
292
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
293
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
294
- * - Midldle layer schemas
295
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
296
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
297
- *
298
- * @template Parameters Target parameters type
299
- * @template Model LLM schema model
300
- * @template Config Configuration of LLM schema composition
301
- * @returns LLM parameters schema
302
- * @reference https://platform.openai.com/docs/guides/function-calling
303
- * @reference https://platform.openai.com/docs/guides/structured-outputs
304
- */
305
- export function parameters(): never;
306
-
307
- /**
308
- * TypeScript parameters to LLM parameters schema.
309
- *
310
- * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
311
- * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
312
- * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
313
- * from a TypeScript parameters type.
314
- *
315
- * For references, LLM identifies only keyworded arguments, not positional arguments.
316
- * Therefore, the TypeScript parameters type must be an object type, and its properties
317
- * must be static. If dynamic properties are, it would be compilation error.
318
- *
319
- * Also, such parameters type can be utilized not only for the LLM function calling,
320
- * but also for the LLM structured outputs. The LLM structured outputs is a feature
321
- * that LLM (Large Language Model) can generate a structured output, not only a plain
322
- * text, by filling the parameters from the conversation (maybe chatting text) with user
323
- * (human).
324
- *
325
- * Here is the list of available `Model` types with their corresponding LLM schema.
326
- * Reading the following list, and determine the `Model` type considering the
327
- * characteristics of the target LLM provider.
328
- *
329
- * - LLM provider schemas
330
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
331
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
332
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
333
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
334
- * - Midldle layer schemas
335
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
336
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
337
- *
338
- * @template Parameters Target parameters type
339
- * @template Model LLM schema model
340
- * @template Config Configuration of LLM schema composition
341
- * @returns LLM parameters schema
342
- * @reference https://platform.openai.com/docs/guides/function-calling
343
- * @reference https://platform.openai.com/docs/guides/structured-outputs
344
- */
345
- export function parameters<
346
- Parameters extends Record<string, any>,
347
- Model extends ILlmSchema.Model,
348
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
349
- >(): ILlmSchema.ModelParameters[Model];
350
-
351
- /**
352
- * @internal
353
- */
354
- export function parameters(): never {
355
- halt("parameters");
356
- }
357
-
358
- /**
359
- * > You must configure the generic argument `T`.
360
- *
361
- * TypeScript type to LLM type schema.
362
- *
363
- * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
364
- * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
365
- * from a TypeScript type.
366
- *
367
- * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
368
- * and here is the list of available `Model` types with their corresponding LLM schema.
369
- * Reading the following list, and determine the `Model` type considering the
370
- * characteristics of the target LLM provider.
371
- *
372
- * - LLM provider schemas
373
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
374
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
375
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
376
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
377
- * - Midldle layer schemas
378
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
379
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
380
- *
381
- * If you actually want to perform the LLM function calling with TypeScript functions,
382
- * you can do it with the {@link application} function. Otherwise you hope to perform the
383
- * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
384
- * and structured output with the native TypeScript functions and types.
385
- *
386
- * > **What LLM function calling is?
387
- * >
388
- * > LLM (Large Language Model) selects propert function and fill the arguments,
389
- * > but actuall function call execution is not by LLM, but by you.
390
- * >
391
- * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
392
- * > "function calling" feature. The "function calling" means that LLM automatically selects
393
- * > a proper function and compose parameter values from the user's chatting text.
394
- * >
395
- * > When LLM selects the proper function and its arguments, you just call the function
396
- * > with the arguments. And then informs the return value to the LLM by system prompt,
397
- * > LLM will continue the next conversation based on the return value.
398
- *
399
- * @template T Target type
400
- * @template Model LLM schema model
401
- * @template Config Configuration of LLM schema composition
402
- * @returns LLM schema
403
- * @reference https://platform.openai.com/docs/guides/function-calling
404
- * @reference https://platform.openai.com/docs/guides/structured-outputs
405
- * @author Jeongho Nam - https://github.com/samchon
406
- */
407
- export function schema(): never;
408
-
409
- /**
410
- * TypeScript type to LLM type schema.
411
- *
412
- * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
413
- * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
414
- * from a TypeScript type.
415
- *
416
- * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
417
- * and here is the list of available `Model` types with their corresponding LLM schema:
418
- *
419
- * - LLM provider schemas
420
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
421
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
422
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
423
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
424
- * - Midldle layer schemas
425
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
426
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
427
- *
428
- * If you actually want to perform the LLM function calling with TypeScript functions,
429
- * you can do it with the {@link application} function. Otherwise you hope to perform the
430
- * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
431
- * and structured output with the native TypeScript functions and types.
432
- *
433
- * > **What LLM function calling is?
434
- * >
435
- * > LLM (Large Language Model) selects propert function and fill the arguments,
436
- * > but actuall function call execution is not by LLM, but by you.
437
- * >
438
- * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
439
- * > "function calling" feature. The "function calling" means that LLM automatically selects
440
- * > a proper function and compose parameter values from the user's chatting text.
441
- * >
442
- * > When LLM selects the proper function and its arguments, you just call the function
443
- * > with the arguments. And then informs the return value to the LLM by system prompt,
444
- * > LLM will continue the next conversation based on the return value.
445
- *
446
- * @template T Target type
447
- * @template Model LLM schema model
448
- * @template Config Configuration of LLM schema composition
449
- * @returns LLM schema
450
- * @reference https://platform.openai.com/docs/guides/function-calling
451
- * @reference https://platform.openai.com/docs/guides/structured-outputs
452
- * @author Jeongho Nam - https://github.com/samchon
453
- */
454
- export function schema<
455
- T,
456
- Model extends ILlmSchema.Model,
457
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
458
- >(
459
- ...$defs: Extract<
460
- ILlmSchema.ModelSchema[Model],
461
- { $ref: string }
462
- > extends never
463
- ? []
464
- : [Record<string, ILlmSchema.ModelSchema[Model]>]
465
- ): ILlmSchema.ModelSchema[Model];
466
-
467
- /**
468
- * @internal
469
- */
470
- export function schema(): never {
471
- halt("schema");
472
- }
473
-
474
- /**
475
- * @internal
476
- */
477
- function halt(name: string): never {
478
- throw new Error(
479
- `Error on typia.llm.${name}(): no transform has been configured. Read and follow https://typia.io/docs/setup please.`,
480
- );
481
- }
1
+ import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { ILlmApplicationOfValidate } from "./module";
4
+
5
+ /**
6
+ * > You must configure the generic argument `App`.
7
+ *
8
+ * TypeScript functions to LLM function calling application with validators.
9
+ *
10
+ * Creates an application of LLM (Large Language Model) function calling application
11
+ * from a TypeScript class or interface type containing the target functions to be
12
+ * called by the LLM function calling feature.
13
+ *
14
+ * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
15
+ * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
16
+ * select the proper function and fill its arguments from the conversation
17
+ * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
18
+ *
19
+ * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
20
+ * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
21
+ * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
22
+ * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
23
+ * is a validator function reporting the detailed information about the wrong typed parameters.
24
+ *
25
+ * By the way, there can be some parameters (or their nested properties) which must be
26
+ * composed by human, not by LLM. File uploading feature or some sensitive information
27
+ * like secrety key (password) are the examples. In that case, you can separate the
28
+ * function parameters to both LLM and human sides by configuring the
29
+ * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
30
+ * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
31
+ *
32
+ * For reference, the actual function call execution is not by LLM, but by you.
33
+ * When the LLM selects the proper function and fills the arguments, you just call
34
+ * the function with the LLM prepared arguments. And then informs the return value to
35
+ * the LLM by system prompt. The LLM will continue the next conversation based on
36
+ * the return value.
37
+ *
38
+ * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
39
+ * so that the parameters are separated to human and LLM sides, you can merge these
40
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
41
+ * before the actual LLM function call execution.
42
+ *
43
+ * Here is the list of available `Model` types with their corresponding LLM schema.
44
+ * Reading the following list, and determine the `Model` type considering the
45
+ * characteristics of the target LLM provider.
46
+ *
47
+ * - LLM provider schemas
48
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
49
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
50
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
51
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
52
+ * - Midldle layer schemas
53
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
54
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
55
+ *
56
+ * @template App Target class or interface type collecting the functions to call
57
+ * @template Model LLM schema model
58
+ * @template Config Configuration of LLM schema composition
59
+ * @param options Options for the LLM application construction
60
+ * @returns Application of LLM function calling schemas
61
+ * @reference https://platform.openai.com/docs/guides/function-calling
62
+ * @author Jeongho Nam - https://github.com/samchon
63
+ */
64
+ export function applicationOfValidate(
65
+ options?: Partial<Pick<ILlmApplicationOfValidate.IOptions<any>, "separate">>,
66
+ ): never;
67
+
68
+ /**
69
+ * TypeScript functions to LLM function calling application with validators.
70
+ *
71
+ * Creates an application of LLM (Large Language Model) function calling application
72
+ * from a TypeScript class or interface type containing the target functions to be
73
+ * called by the LLM function calling feature.
74
+ *
75
+ * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
76
+ * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
77
+ * select the proper function and fill its arguments from the conversation
78
+ * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
79
+ *
80
+ * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
81
+ * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
82
+ * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
83
+ * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
84
+ * is a validator function reporting the detailed information about the wrong typed parameters.
85
+ *
86
+ * By the way, there can be some parameters (or their nested properties) which must be
87
+ * composed by human, not by LLM. File uploading feature or some sensitive information
88
+ * like secrety key (password) are the examples. In that case, you can separate the
89
+ * function parameters to both LLM and human sides by configuring the
90
+ * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
91
+ * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
92
+ *
93
+ * For reference, the actual function call execution is not by LLM, but by you.
94
+ * When the LLM selects the proper function and fills the arguments, you just call
95
+ * the function with the LLM prepared arguments. And then informs the return value to
96
+ * the LLM by system prompt. The LLM will continue the next conversation based on
97
+ * the return value.
98
+ *
99
+ * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
100
+ * so that the parameters are separated to human and LLM sides, you can merge these
101
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
102
+ * before the actual LLM function call execution.
103
+ *
104
+ * Here is the list of available `Model` types with their corresponding LLM schema.
105
+ * Reading the following list, and determine the `Model` type considering the
106
+ * characteristics of the target LLM provider.
107
+ *
108
+ * - LLM provider schemas
109
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
110
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
111
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
112
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
113
+ * - Midldle layer schemas
114
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
115
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
116
+ *
117
+ * @template App Target class or interface type collecting the functions to call
118
+ * @template Model LLM schema model
119
+ * @template Config Configuration of LLM schema composition
120
+ * @param options Options for the LLM application construction
121
+ * @returns Application of LLM function calling schemas
122
+ * @reference https://platform.openai.com/docs/guides/function-calling
123
+ * @author Jeongho Nam - https://github.com/samchon
124
+ */
125
+ export function applicationOfValidate<
126
+ App extends Record<string, any>,
127
+ Model extends ILlmSchema.Model,
128
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
129
+ >(
130
+ options?: Partial<
131
+ Pick<ILlmApplicationOfValidate.IOptions<Model>, "separate">
132
+ >,
133
+ ): ILlmApplicationOfValidate<Model>;
134
+
135
+ /**
136
+ * @internal
137
+ */
138
+ export function applicationOfValidate(): never {
139
+ halt("applicationOfValidate");
140
+ }
141
+
142
+ /**
143
+ * > You must configure the generic argument `App`.
144
+ *
145
+ * TypeScript functions to LLM function calling application.
146
+ *
147
+ * Creates an application of LLM (Large Language Model) function calling application
148
+ * from a TypeScript class or interface type containing the target functions to be
149
+ * called by the LLM function calling feature.
150
+ *
151
+ * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
152
+ * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
153
+ * proper function and fill its arguments from the conversation (maybe chatting text)
154
+ * with user (human). This is the concept of the LLM function calling.
155
+ *
156
+ * By the way, there can be some parameters (or their nested properties) which must be
157
+ * composed by human, not by LLM. File uploading feature or some sensitive information
158
+ * like secrety key (password) are the examples. In that case, you can separate the
159
+ * function parameters to both LLM and human sides by configuring the
160
+ * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
161
+ * assigned to the {@link ILlmFunction.separated} property.
162
+ *
163
+ * For reference, the actual function call execution is not by LLM, but by you.
164
+ * When the LLM selects the proper function and fills the arguments, you just call
165
+ * the function with the LLM prepared arguments. And then informs the return value to
166
+ * the LLM by system prompt. The LLM will continue the next conversation based on
167
+ * the return value.
168
+ *
169
+ * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
170
+ * so that the parameters are separated to human and LLM sides, you can merge these
171
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
172
+ * before the actual LLM function call execution.
173
+ *
174
+ * Here is the list of available `Model` types with their corresponding LLM schema.
175
+ * Reading the following list, and determine the `Model` type considering the
176
+ * characteristics of the target LLM provider.
177
+ *
178
+ * - LLM provider schemas
179
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
180
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
181
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
182
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
183
+ * - Midldle layer schemas
184
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
185
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
186
+ *
187
+ * @template App Target class or interface type collecting the functions to call
188
+ * @template Model LLM schema model
189
+ * @template Config Configuration of LLM schema composition
190
+ * @param options Options for the LLM application construction
191
+ * @returns Application of LLM function calling schemas
192
+ * @reference https://platform.openai.com/docs/guides/function-calling
193
+ * @author Jeongho Nam - https://github.com/samchon
194
+ */
195
+ export function application(
196
+ options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
197
+ ): never;
198
+
199
+ /**
200
+ * TypeScript functions to LLM function calling application.
201
+ *
202
+ * Creates an application of LLM (Large Language Model) function calling application
203
+ * from a TypeScript class or interface type containing the target functions to be
204
+ * called by the LLM function calling feature.
205
+ *
206
+ * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
207
+ * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
208
+ * proper function and fill its arguments from the conversation (maybe chatting text)
209
+ * with user (human). This is the concept of the LLM function calling.
210
+ *
211
+ * By the way, there can be some parameters (or their nested properties) which must be
212
+ * composed by human, not by LLM. File uploading feature or some sensitive information
213
+ * like secrety key (password) are the examples. In that case, you can separate the
214
+ * function parameters to both LLM and human sides by configuring the
215
+ * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
216
+ * assigned to the {@link ILlmFunction.separated} property.
217
+ *
218
+ * For reference, the actual function call execution is not by LLM, but by you.
219
+ * When the LLM selects the proper function and fills the arguments, you just call
220
+ * the function with the LLM prepared arguments. And then informs the return value to
221
+ * the LLM by system prompt. The LLM will continue the next conversation based on
222
+ * the return value.
223
+ *
224
+ * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
225
+ * so that the parameters are separated to human and LLM sides, you can merge these
226
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
227
+ * before the actual LLM function call execution.
228
+ *
229
+ * Here is the list of available `Model` types with their corresponding LLM schema.
230
+ * Reading the following list, and determine the `Model` type considering the
231
+ * characteristics of the target LLM provider.
232
+ *
233
+ * - LLM provider schemas
234
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
235
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
236
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
237
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
238
+ * - Midldle layer schemas
239
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
240
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
241
+ *
242
+ * @template App Target class or interface type collecting the functions to call
243
+ * @template Model LLM schema model
244
+ * @template Config Configuration of LLM schema composition
245
+ * @param options Options for the LLM application construction
246
+ * @returns Application of LLM function calling schemas
247
+ * @reference https://platform.openai.com/docs/guides/function-calling
248
+ * @author Jeongho Nam - https://github.com/samchon
249
+ */
250
+ export function application<
251
+ App extends Record<string, any>,
252
+ Model extends ILlmSchema.Model,
253
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
254
+ >(
255
+ options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
256
+ ): ILlmApplication<Model>;
257
+
258
+ /**
259
+ * @internal
260
+ */
261
+ export function application(): never {
262
+ halt("application");
263
+ }
264
+
265
+ /**
266
+ * > You must configure the generic argument `Parameters`.
267
+ *
268
+ * TypeScript parameters to LLM parameters schema.
269
+ *
270
+ * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
271
+ * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
272
+ * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
273
+ * from a TypeScript parameters type.
274
+ *
275
+ * For references, LLM identifies only keyworded arguments, not positional arguments.
276
+ * Therefore, the TypeScript parameters type must be an object type, and its properties
277
+ * must be static. If dynamic properties are, it would be compilation error.
278
+ *
279
+ * Also, such parameters type can be utilized not only for the LLM function calling,
280
+ * but also for the LLM structured outputs. The LLM structured outputs is a feature
281
+ * that LLM (Large Language Model) can generate a structured output, not only a plain
282
+ * text, by filling the parameters from the conversation (maybe chatting text) with user
283
+ * (human).
284
+ *
285
+ * Here is the list of available `Model` types with their corresponding LLM schema.
286
+ * Reading the following list, and determine the `Model` type considering the
287
+ * characteristics of the target LLM provider.
288
+ *
289
+ * - LLM provider schemas
290
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
291
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
292
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
293
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
294
+ * - Midldle layer schemas
295
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
296
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
297
+ *
298
+ * @template Parameters Target parameters type
299
+ * @template Model LLM schema model
300
+ * @template Config Configuration of LLM schema composition
301
+ * @returns LLM parameters schema
302
+ * @reference https://platform.openai.com/docs/guides/function-calling
303
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
304
+ */
305
+ export function parameters(): never;
306
+
307
+ /**
308
+ * TypeScript parameters to LLM parameters schema.
309
+ *
310
+ * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
311
+ * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
312
+ * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
313
+ * from a TypeScript parameters type.
314
+ *
315
+ * For references, LLM identifies only keyworded arguments, not positional arguments.
316
+ * Therefore, the TypeScript parameters type must be an object type, and its properties
317
+ * must be static. If dynamic properties are, it would be compilation error.
318
+ *
319
+ * Also, such parameters type can be utilized not only for the LLM function calling,
320
+ * but also for the LLM structured outputs. The LLM structured outputs is a feature
321
+ * that LLM (Large Language Model) can generate a structured output, not only a plain
322
+ * text, by filling the parameters from the conversation (maybe chatting text) with user
323
+ * (human).
324
+ *
325
+ * Here is the list of available `Model` types with their corresponding LLM schema.
326
+ * Reading the following list, and determine the `Model` type considering the
327
+ * characteristics of the target LLM provider.
328
+ *
329
+ * - LLM provider schemas
330
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
331
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
332
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
333
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
334
+ * - Midldle layer schemas
335
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
336
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
337
+ *
338
+ * @template Parameters Target parameters type
339
+ * @template Model LLM schema model
340
+ * @template Config Configuration of LLM schema composition
341
+ * @returns LLM parameters schema
342
+ * @reference https://platform.openai.com/docs/guides/function-calling
343
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
344
+ */
345
+ export function parameters<
346
+ Parameters extends Record<string, any>,
347
+ Model extends ILlmSchema.Model,
348
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
349
+ >(): ILlmSchema.ModelParameters[Model];
350
+
351
+ /**
352
+ * @internal
353
+ */
354
+ export function parameters(): never {
355
+ halt("parameters");
356
+ }
357
+
358
+ /**
359
+ * > You must configure the generic argument `T`.
360
+ *
361
+ * TypeScript type to LLM type schema.
362
+ *
363
+ * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
364
+ * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
365
+ * from a TypeScript type.
366
+ *
367
+ * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
368
+ * and here is the list of available `Model` types with their corresponding LLM schema.
369
+ * Reading the following list, and determine the `Model` type considering the
370
+ * characteristics of the target LLM provider.
371
+ *
372
+ * - LLM provider schemas
373
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
374
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
375
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
376
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
377
+ * - Midldle layer schemas
378
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
379
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
380
+ *
381
+ * If you actually want to perform the LLM function calling with TypeScript functions,
382
+ * you can do it with the {@link application} function. Otherwise you hope to perform the
383
+ * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
384
+ * and structured output with the native TypeScript functions and types.
385
+ *
386
+ * > **What LLM function calling is?
387
+ * >
388
+ * > LLM (Large Language Model) selects propert function and fill the arguments,
389
+ * > but actuall function call execution is not by LLM, but by you.
390
+ * >
391
+ * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
392
+ * > "function calling" feature. The "function calling" means that LLM automatically selects
393
+ * > a proper function and compose parameter values from the user's chatting text.
394
+ * >
395
+ * > When LLM selects the proper function and its arguments, you just call the function
396
+ * > with the arguments. And then informs the return value to the LLM by system prompt,
397
+ * > LLM will continue the next conversation based on the return value.
398
+ *
399
+ * @template T Target type
400
+ * @template Model LLM schema model
401
+ * @template Config Configuration of LLM schema composition
402
+ * @returns LLM schema
403
+ * @reference https://platform.openai.com/docs/guides/function-calling
404
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
405
+ * @author Jeongho Nam - https://github.com/samchon
406
+ */
407
+ export function schema(): never;
408
+
409
+ /**
410
+ * TypeScript type to LLM type schema.
411
+ *
412
+ * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
413
+ * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
414
+ * from a TypeScript type.
415
+ *
416
+ * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
417
+ * and here is the list of available `Model` types with their corresponding LLM schema:
418
+ *
419
+ * - LLM provider schemas
420
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
421
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
422
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
423
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
424
+ * - Midldle layer schemas
425
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
426
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
427
+ *
428
+ * If you actually want to perform the LLM function calling with TypeScript functions,
429
+ * you can do it with the {@link application} function. Otherwise you hope to perform the
430
+ * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
431
+ * and structured output with the native TypeScript functions and types.
432
+ *
433
+ * > **What LLM function calling is?
434
+ * >
435
+ * > LLM (Large Language Model) selects propert function and fill the arguments,
436
+ * > but actuall function call execution is not by LLM, but by you.
437
+ * >
438
+ * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
439
+ * > "function calling" feature. The "function calling" means that LLM automatically selects
440
+ * > a proper function and compose parameter values from the user's chatting text.
441
+ * >
442
+ * > When LLM selects the proper function and its arguments, you just call the function
443
+ * > with the arguments. And then informs the return value to the LLM by system prompt,
444
+ * > LLM will continue the next conversation based on the return value.
445
+ *
446
+ * @template T Target type
447
+ * @template Model LLM schema model
448
+ * @template Config Configuration of LLM schema composition
449
+ * @returns LLM schema
450
+ * @reference https://platform.openai.com/docs/guides/function-calling
451
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
452
+ * @author Jeongho Nam - https://github.com/samchon
453
+ */
454
+ export function schema<
455
+ T,
456
+ Model extends ILlmSchema.Model,
457
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
458
+ >(
459
+ ...$defs: Extract<
460
+ ILlmSchema.ModelSchema[Model],
461
+ { $ref: string }
462
+ > extends never
463
+ ? []
464
+ : [Record<string, ILlmSchema.ModelSchema[Model]>]
465
+ ): ILlmSchema.ModelSchema[Model];
466
+
467
+ /**
468
+ * @internal
469
+ */
470
+ export function schema(): never {
471
+ halt("schema");
472
+ }
473
+
474
+ /**
475
+ * @internal
476
+ */
477
+ function halt(name: string): never {
478
+ throw new Error(
479
+ `Error on typia.llm.${name}(): no transform has been configured. Read and follow https://typia.io/docs/setup please.`,
480
+ );
481
+ }