typia 7.2.1 → 7.3.0-dev.20241213

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (447) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +148 -148
  3. package/lib/executable/typia.js +0 -0
  4. package/lib/programmers/llm/LlmApplicationProgrammer.js +0 -1
  5. package/lib/programmers/llm/LlmApplicationProgrammer.js.map +1 -1
  6. package/package.json +3 -3
  7. package/src/IRandomGenerator.ts +49 -49
  8. package/src/IReadableURLSearchParams.ts +9 -9
  9. package/src/IValidation.ts +21 -21
  10. package/src/executable/TypiaGenerateWizard.ts +83 -83
  11. package/src/executable/TypiaPatchWizard.ts +45 -45
  12. package/src/executable/TypiaSetupWizard.ts +179 -179
  13. package/src/executable/setup/ArgumentParser.ts +42 -42
  14. package/src/executable/setup/FileRetriever.ts +19 -19
  15. package/src/executable/setup/PackageManager.ts +87 -87
  16. package/src/factories/ExpressionFactory.ts +216 -216
  17. package/src/factories/IdentifierFactory.ts +89 -89
  18. package/src/factories/JsonMetadataFactory.ts +76 -76
  19. package/src/factories/LiteralFactory.ts +52 -52
  20. package/src/factories/MetadataCollection.ts +278 -278
  21. package/src/factories/MetadataCommentTagFactory.ts +650 -650
  22. package/src/factories/MetadataFactory.ts +404 -404
  23. package/src/factories/MetadataTypeTagFactory.ts +411 -411
  24. package/src/factories/MetadataTypeTagSchemaFactory.ts +82 -82
  25. package/src/factories/NumericRangeFactory.ts +72 -72
  26. package/src/factories/ProtobufFactory.ts +875 -875
  27. package/src/factories/StatementFactory.ts +90 -90
  28. package/src/factories/TemplateFactory.ts +64 -64
  29. package/src/factories/TypeFactory.ts +140 -140
  30. package/src/factories/internal/metadata/IMetadataIteratorProps.ts +17 -17
  31. package/src/factories/internal/metadata/MetadataHelper.ts +21 -21
  32. package/src/factories/internal/metadata/emplace_metadata_alias.ts +33 -33
  33. package/src/factories/internal/metadata/emplace_metadata_array_type.ts +39 -39
  34. package/src/factories/internal/metadata/emplace_metadata_object.ts +208 -208
  35. package/src/factories/internal/metadata/emplace_metadata_tuple.ts +57 -57
  36. package/src/factories/internal/metadata/explore_metadata.ts +31 -31
  37. package/src/factories/internal/metadata/iterate_metadata.ts +54 -54
  38. package/src/factories/internal/metadata/iterate_metadata_alias.ts +33 -33
  39. package/src/factories/internal/metadata/iterate_metadata_array.ts +63 -63
  40. package/src/factories/internal/metadata/iterate_metadata_atomic.ts +62 -62
  41. package/src/factories/internal/metadata/iterate_metadata_coalesce.ts +28 -28
  42. package/src/factories/internal/metadata/iterate_metadata_collection.ts +146 -146
  43. package/src/factories/internal/metadata/iterate_metadata_comment_tags.ts +32 -32
  44. package/src/factories/internal/metadata/iterate_metadata_constant.ts +76 -76
  45. package/src/factories/internal/metadata/iterate_metadata_escape.ts +49 -49
  46. package/src/factories/internal/metadata/iterate_metadata_function.ts +91 -91
  47. package/src/factories/internal/metadata/iterate_metadata_intersection.ts +213 -213
  48. package/src/factories/internal/metadata/iterate_metadata_map.ts +57 -57
  49. package/src/factories/internal/metadata/iterate_metadata_native.ts +255 -255
  50. package/src/factories/internal/metadata/iterate_metadata_object.ts +35 -35
  51. package/src/factories/internal/metadata/iterate_metadata_set.ts +57 -57
  52. package/src/factories/internal/metadata/iterate_metadata_sort.ts +87 -87
  53. package/src/factories/internal/metadata/iterate_metadata_template.ts +41 -41
  54. package/src/factories/internal/metadata/iterate_metadata_tuple.ts +26 -26
  55. package/src/factories/internal/metadata/iterate_metadata_union.ts +19 -19
  56. package/src/functional.ts +750 -750
  57. package/src/http.ts +1047 -1047
  58. package/src/internal/_IProtobufWriter.ts +18 -18
  59. package/src/internal/_ProtobufReader.ts +194 -194
  60. package/src/internal/_ProtobufSizer.ts +145 -145
  61. package/src/internal/_ProtobufWriter.ts +145 -145
  62. package/src/internal/_accessExpressionAsString.ts +46 -46
  63. package/src/internal/_assertGuard.ts +13 -13
  64. package/src/internal/_functionalTypeGuardErrorFactory.ts +4 -4
  65. package/src/internal/_httpFormDataReadArray.ts +4 -4
  66. package/src/internal/_httpFormDataReadBigint.ts +18 -18
  67. package/src/internal/_httpFormDataReadBlob.ts +10 -10
  68. package/src/internal/_httpFormDataReadBoolean.ts +16 -16
  69. package/src/internal/_httpFormDataReadFile.ts +10 -10
  70. package/src/internal/_httpFormDataReadNumber.ts +15 -15
  71. package/src/internal/_httpFormDataReadString.ts +10 -10
  72. package/src/internal/_httpHeaderReadBigint.ts +10 -10
  73. package/src/internal/_httpHeaderReadBoolean.ts +8 -8
  74. package/src/internal/_httpHeaderReadNumber.ts +7 -7
  75. package/src/internal/_httpParameterReadBigint.ts +10 -10
  76. package/src/internal/_httpParameterReadBoolean.ts +8 -8
  77. package/src/internal/_httpParameterReadNumber.ts +7 -7
  78. package/src/internal/_httpParameterReadString.ts +2 -2
  79. package/src/internal/_httpQueryParseURLSearchParams.ts +12 -12
  80. package/src/internal/_httpQueryReadArray.ts +4 -4
  81. package/src/internal/_httpQueryReadBigint.ts +12 -12
  82. package/src/internal/_httpQueryReadBoolean.ts +14 -14
  83. package/src/internal/_httpQueryReadNumber.ts +9 -9
  84. package/src/internal/_httpQueryReadString.ts +4 -4
  85. package/src/internal/_isBetween.ts +2 -2
  86. package/src/internal/_isBigintString.ts +8 -8
  87. package/src/internal/_isFormatByte.ts +7 -7
  88. package/src/internal/_isFormatDate.ts +3 -3
  89. package/src/internal/_isFormatDateTime.ts +4 -4
  90. package/src/internal/_isFormatDuration.ts +4 -4
  91. package/src/internal/_isFormatEmail.ts +4 -4
  92. package/src/internal/_isFormatHostname.ts +4 -4
  93. package/src/internal/_isFormatIdnEmail.ts +4 -4
  94. package/src/internal/_isFormatIdnHostname.ts +4 -4
  95. package/src/internal/_isFormatIpv4.ts +4 -4
  96. package/src/internal/_isFormatIpv6.ts +4 -4
  97. package/src/internal/_isFormatIri.ts +3 -3
  98. package/src/internal/_isFormatIriReference.ts +4 -4
  99. package/src/internal/_isFormatJsonPointer.ts +3 -3
  100. package/src/internal/_isFormatPassword.ts +1 -1
  101. package/src/internal/_isFormatRegex.ts +8 -8
  102. package/src/internal/_isFormatRelativeJsonPointer.ts +4 -4
  103. package/src/internal/_isFormatTime.ts +4 -4
  104. package/src/internal/_isFormatUri.ts +6 -6
  105. package/src/internal/_isFormatUriReference.ts +5 -5
  106. package/src/internal/_isFormatUriTemplate.ts +4 -4
  107. package/src/internal/_isFormatUrl.ts +4 -4
  108. package/src/internal/_isFormatUuid.ts +3 -3
  109. package/src/internal/_isTypeFloat.ts +5 -5
  110. package/src/internal/_isTypeInt32.ts +5 -5
  111. package/src/internal/_isTypeInt64.ts +5 -5
  112. package/src/internal/_isTypeUint32.ts +5 -5
  113. package/src/internal/_isTypeUint64.ts +5 -5
  114. package/src/internal/_isUniqueItems.ts +159 -159
  115. package/src/internal/_jsonStringifyNumber.ts +12 -12
  116. package/src/internal/_jsonStringifyRest.ts +3 -3
  117. package/src/internal/_jsonStringifyString.ts +42 -42
  118. package/src/internal/_jsonStringifyTail.ts +2 -2
  119. package/src/internal/_llmApplicationFinalize.ts +20 -20
  120. package/src/internal/_miscCloneAny.ts +46 -46
  121. package/src/internal/_notationAny.ts +37 -37
  122. package/src/internal/_notationCamel.ts +13 -13
  123. package/src/internal/_notationPascal.ts +8 -8
  124. package/src/internal/_notationSnake.ts +43 -43
  125. package/src/internal/_randomArray.ts +21 -21
  126. package/src/internal/_randomBigint.ts +6 -6
  127. package/src/internal/_randomBoolean.ts +1 -1
  128. package/src/internal/_randomFormatByte.ts +3 -3
  129. package/src/internal/_randomFormatDate.ts +18 -18
  130. package/src/internal/_randomFormatDatetime.ts +16 -16
  131. package/src/internal/_randomFormatDuration.ts +27 -27
  132. package/src/internal/_randomFormatEmail.ts +11 -11
  133. package/src/internal/_randomFormatHostname.ts +6 -6
  134. package/src/internal/_randomFormatIdnEmail.ts +3 -3
  135. package/src/internal/_randomFormatIdnHostname.ts +3 -3
  136. package/src/internal/_randomFormatIpv4.ts +11 -11
  137. package/src/internal/_randomFormatIpv6.ts +11 -11
  138. package/src/internal/_randomFormatIri.ts +3 -3
  139. package/src/internal/_randomFormatIriReference.ts +3 -3
  140. package/src/internal/_randomFormatJsonPointer.ts +7 -7
  141. package/src/internal/_randomFormatPassword.ts +8 -8
  142. package/src/internal/_randomFormatRegex.ts +4 -4
  143. package/src/internal/_randomFormatRelativeJsonPointer.ts +8 -8
  144. package/src/internal/_randomFormatTime.ts +14 -14
  145. package/src/internal/_randomFormatUri.ts +3 -3
  146. package/src/internal/_randomFormatUriReference.ts +3 -3
  147. package/src/internal/_randomFormatUriTemplate.ts +3 -3
  148. package/src/internal/_randomFormatUrl.ts +11 -11
  149. package/src/internal/_randomFormatUuid.ts +6 -6
  150. package/src/internal/_randomInteger.ts +47 -47
  151. package/src/internal/_randomNumber.ts +74 -74
  152. package/src/internal/_randomPattern.ts +10 -10
  153. package/src/internal/_randomPick.ts +9 -9
  154. package/src/internal/_randomString.ts +24 -24
  155. package/src/internal/_throwTypeGuardError.ts +5 -5
  156. package/src/internal/_validateReport.ts +13 -13
  157. package/src/internal/private/__notationCapitalize.ts +2 -2
  158. package/src/internal/private/__notationUnsnake.ts +24 -24
  159. package/src/json.ts +752 -752
  160. package/src/llm.ts +481 -481
  161. package/src/misc.ts +658 -658
  162. package/src/module.ts +937 -937
  163. package/src/notations.ts +827 -827
  164. package/src/programmers/AssertProgrammer.ts +454 -454
  165. package/src/programmers/CheckerProgrammer.ts +1617 -1617
  166. package/src/programmers/FeatureProgrammer.ts +622 -622
  167. package/src/programmers/ImportProgrammer.ts +185 -185
  168. package/src/programmers/IsProgrammer.ts +273 -273
  169. package/src/programmers/RandomProgrammer.ts +1190 -1190
  170. package/src/programmers/TypiaProgrammer.ts +174 -174
  171. package/src/programmers/ValidateProgrammer.ts +439 -439
  172. package/src/programmers/functional/FunctionalAssertFunctionProgrammer.ts +153 -153
  173. package/src/programmers/functional/FunctionalAssertParametersProgrammer.ts +125 -125
  174. package/src/programmers/functional/FunctionalAssertReturnProgrammer.ts +115 -115
  175. package/src/programmers/functional/FunctionalIsFunctionProgrammer.ts +72 -72
  176. package/src/programmers/functional/FunctionalIsParametersProgrammer.ts +113 -113
  177. package/src/programmers/functional/FunctionalIsReturnProgrammer.ts +116 -116
  178. package/src/programmers/functional/FunctionalValidateFunctionProgrammer.ts +119 -119
  179. package/src/programmers/functional/FunctionalValidateParametersProgrammer.ts +274 -274
  180. package/src/programmers/functional/FunctionalValidateReturnProgrammer.ts +135 -135
  181. package/src/programmers/functional/internal/FunctionalGeneralProgrammer.ts +34 -34
  182. package/src/programmers/helpers/AtomicPredicator.ts +35 -35
  183. package/src/programmers/helpers/CloneJoiner.ts +143 -143
  184. package/src/programmers/helpers/FunctionProgrammer.ts +67 -67
  185. package/src/programmers/helpers/HttpMetadataUtil.ts +21 -21
  186. package/src/programmers/helpers/NotationJoiner.ts +144 -144
  187. package/src/programmers/helpers/OptionPredicator.ts +15 -15
  188. package/src/programmers/helpers/ProtobufUtil.ts +228 -228
  189. package/src/programmers/helpers/PruneJoiner.ts +148 -148
  190. package/src/programmers/helpers/RandomJoiner.ts +168 -168
  191. package/src/programmers/helpers/StringifyJoinder.ts +115 -115
  192. package/src/programmers/helpers/StringifyPredicator.ts +13 -13
  193. package/src/programmers/helpers/UnionExplorer.ts +372 -372
  194. package/src/programmers/helpers/UnionPredicator.ts +79 -79
  195. package/src/programmers/helpers/disable_function_programmer_declare.ts +32 -32
  196. package/src/programmers/http/HttpAssertFormDataProgrammer.ts +99 -99
  197. package/src/programmers/http/HttpAssertHeadersProgrammer.ts +99 -99
  198. package/src/programmers/http/HttpAssertQueryProgrammer.ts +105 -105
  199. package/src/programmers/http/HttpFormDataProgrammer.ts +308 -308
  200. package/src/programmers/http/HttpHeadersProgrammer.ts +400 -400
  201. package/src/programmers/http/HttpIsFormDataProgrammer.ts +108 -108
  202. package/src/programmers/http/HttpIsHeadersProgrammer.ts +108 -108
  203. package/src/programmers/http/HttpIsQueryProgrammer.ts +114 -114
  204. package/src/programmers/http/HttpParameterProgrammer.ts +115 -115
  205. package/src/programmers/http/HttpQueryProgrammer.ts +336 -336
  206. package/src/programmers/http/HttpValidateFormDataProgrammer.ts +92 -92
  207. package/src/programmers/http/HttpValidateHeadersProgrammer.ts +92 -92
  208. package/src/programmers/http/HttpValidateQueryProgrammer.ts +98 -98
  209. package/src/programmers/internal/check_array_length.ts +47 -47
  210. package/src/programmers/internal/check_bigint.ts +50 -50
  211. package/src/programmers/internal/check_dynamic_key.ts +201 -201
  212. package/src/programmers/internal/check_dynamic_properties.ts +208 -208
  213. package/src/programmers/internal/check_everything.ts +23 -23
  214. package/src/programmers/internal/check_native.ts +27 -27
  215. package/src/programmers/internal/check_number.ts +112 -112
  216. package/src/programmers/internal/check_object.ts +75 -75
  217. package/src/programmers/internal/check_string.ts +50 -50
  218. package/src/programmers/internal/check_template.ts +48 -48
  219. package/src/programmers/internal/check_union_array_like.ts +335 -335
  220. package/src/programmers/internal/decode_union_object.ts +116 -116
  221. package/src/programmers/internal/feature_object_entries.ts +61 -61
  222. package/src/programmers/internal/json_schema_alias.ts +47 -47
  223. package/src/programmers/internal/json_schema_array.ts +45 -45
  224. package/src/programmers/internal/json_schema_bigint.ts +15 -15
  225. package/src/programmers/internal/json_schema_boolean.ts +15 -15
  226. package/src/programmers/internal/json_schema_constant.ts +26 -26
  227. package/src/programmers/internal/json_schema_description.ts +12 -12
  228. package/src/programmers/internal/json_schema_discriminator.ts +35 -35
  229. package/src/programmers/internal/json_schema_escaped.ts +82 -82
  230. package/src/programmers/internal/json_schema_native.ts +33 -33
  231. package/src/programmers/internal/json_schema_number.ts +15 -15
  232. package/src/programmers/internal/json_schema_object.ts +158 -158
  233. package/src/programmers/internal/json_schema_plugin.ts +18 -18
  234. package/src/programmers/internal/json_schema_station.ts +182 -182
  235. package/src/programmers/internal/json_schema_string.ts +15 -15
  236. package/src/programmers/internal/json_schema_template.ts +55 -55
  237. package/src/programmers/internal/json_schema_title.ts +20 -20
  238. package/src/programmers/internal/json_schema_tuple.ts +35 -35
  239. package/src/programmers/internal/metadata_to_pattern.ts +42 -42
  240. package/src/programmers/internal/postfix_of_tuple.ts +5 -5
  241. package/src/programmers/internal/prune_object_properties.ts +71 -71
  242. package/src/programmers/internal/stringify_dynamic_properties.ts +162 -162
  243. package/src/programmers/internal/stringify_regular_properties.ts +81 -81
  244. package/src/programmers/internal/template_to_pattern.ts +23 -23
  245. package/src/programmers/internal/wrap_metadata_rest_tuple.ts +23 -23
  246. package/src/programmers/json/JsonApplicationProgrammer.ts +279 -279
  247. package/src/programmers/json/JsonAssertParseProgrammer.ts +113 -113
  248. package/src/programmers/json/JsonAssertStringifyProgrammer.ts +115 -115
  249. package/src/programmers/json/JsonIsParseProgrammer.ts +114 -114
  250. package/src/programmers/json/JsonIsStringifyProgrammer.ts +108 -108
  251. package/src/programmers/json/JsonSchemasProgrammer.ts +91 -91
  252. package/src/programmers/json/JsonStringifyProgrammer.ts +1124 -1124
  253. package/src/programmers/json/JsonValidateParseProgrammer.ts +105 -105
  254. package/src/programmers/json/JsonValidateStringifyProgrammer.ts +124 -124
  255. package/src/programmers/llm/LlmApplicationOfValidateProgrammer.ts +81 -81
  256. package/src/programmers/llm/LlmApplicationProgrammer.ts +277 -278
  257. package/src/programmers/llm/LlmModelPredicator.ts +127 -127
  258. package/src/programmers/llm/LlmParametersProgrammer.ts +90 -90
  259. package/src/programmers/llm/LlmSchemaProgrammer.ts +143 -143
  260. package/src/programmers/misc/MiscAssertCloneProgrammer.ts +95 -95
  261. package/src/programmers/misc/MiscAssertPruneProgrammer.ts +116 -116
  262. package/src/programmers/misc/MiscCloneProgrammer.ts +1032 -1032
  263. package/src/programmers/misc/MiscIsCloneProgrammer.ts +99 -99
  264. package/src/programmers/misc/MiscIsPruneProgrammer.ts +97 -97
  265. package/src/programmers/misc/MiscLiteralsProgrammer.ts +80 -80
  266. package/src/programmers/misc/MiscPruneProgrammer.ts +728 -728
  267. package/src/programmers/misc/MiscValidateCloneProgrammer.ts +111 -111
  268. package/src/programmers/misc/MiscValidatePruneProgrammer.ts +113 -113
  269. package/src/programmers/notations/NotationAssertGeneralProgrammer.ts +101 -101
  270. package/src/programmers/notations/NotationGeneralProgrammer.ts +984 -984
  271. package/src/programmers/notations/NotationIsGeneralProgrammer.ts +105 -105
  272. package/src/programmers/notations/NotationValidateGeneralProgrammer.ts +119 -119
  273. package/src/programmers/protobuf/ProtobufAssertDecodeProgrammer.ts +98 -98
  274. package/src/programmers/protobuf/ProtobufAssertEncodeProgrammer.ts +102 -102
  275. package/src/programmers/protobuf/ProtobufDecodeProgrammer.ts +654 -654
  276. package/src/programmers/protobuf/ProtobufEncodeProgrammer.ts +945 -945
  277. package/src/programmers/protobuf/ProtobufIsDecodeProgrammer.ts +109 -109
  278. package/src/programmers/protobuf/ProtobufIsEncodeProgrammer.ts +98 -98
  279. package/src/programmers/protobuf/ProtobufMessageProgrammer.ts +179 -179
  280. package/src/programmers/protobuf/ProtobufValidateDecodeProgrammer.ts +92 -92
  281. package/src/programmers/protobuf/ProtobufValidateEncodeProgrammer.ts +119 -119
  282. package/src/protobuf.ts +868 -868
  283. package/src/reflect.ts +57 -57
  284. package/src/schemas/json/IJsonApplication.ts +73 -73
  285. package/src/schemas/json/IJsonSchemaCollection.ts +29 -29
  286. package/src/schemas/json/__IJsonApplication.ts +63 -63
  287. package/src/schemas/llm/ILlmApplicationOfValidate.ts +55 -55
  288. package/src/schemas/llm/ILlmFunctionOfValidate.ts +39 -39
  289. package/src/schemas/metadata/IMetadata.ts +35 -35
  290. package/src/schemas/metadata/IMetadataAlias.ts +6 -6
  291. package/src/schemas/metadata/IMetadataAliasType.ts +12 -12
  292. package/src/schemas/metadata/IMetadataApplication.ts +7 -7
  293. package/src/schemas/metadata/IMetadataArray.ts +6 -6
  294. package/src/schemas/metadata/IMetadataComponents.ts +11 -11
  295. package/src/schemas/metadata/IMetadataConstantValue.ts +11 -11
  296. package/src/schemas/metadata/IMetadataDictionary.ts +11 -11
  297. package/src/schemas/metadata/IMetadataMap.ts +8 -8
  298. package/src/schemas/metadata/IMetadataNative.ts +6 -6
  299. package/src/schemas/metadata/IMetadataObject.ts +6 -6
  300. package/src/schemas/metadata/IMetadataObjectType.ts +13 -13
  301. package/src/schemas/metadata/IMetadataSet.ts +7 -7
  302. package/src/schemas/metadata/IMetadataTemplate.ts +7 -7
  303. package/src/schemas/metadata/IMetadataTuple.ts +6 -6
  304. package/src/schemas/metadata/IMetadataTypeTag.ts +16 -16
  305. package/src/schemas/metadata/Metadata.ts +669 -669
  306. package/src/schemas/metadata/MetadataAlias.ts +46 -46
  307. package/src/schemas/metadata/MetadataAliasType.ts +63 -63
  308. package/src/schemas/metadata/MetadataApplication.ts +44 -44
  309. package/src/schemas/metadata/MetadataArray.ts +49 -49
  310. package/src/schemas/metadata/MetadataAtomic.ts +87 -87
  311. package/src/schemas/metadata/MetadataComponents.ts +98 -98
  312. package/src/schemas/metadata/MetadataConstantValue.ts +62 -62
  313. package/src/schemas/metadata/MetadataMap.ts +48 -48
  314. package/src/schemas/metadata/MetadataNative.ts +44 -44
  315. package/src/schemas/metadata/MetadataObject.ts +48 -48
  316. package/src/schemas/metadata/MetadataObjectType.ts +149 -149
  317. package/src/schemas/metadata/MetadataParameter.ts +54 -54
  318. package/src/schemas/metadata/MetadataProperty.ts +59 -59
  319. package/src/schemas/metadata/MetadataSet.ts +45 -45
  320. package/src/schemas/metadata/MetadataTemplate.ts +80 -80
  321. package/src/schemas/metadata/MetadataTuple.ts +32 -32
  322. package/src/schemas/protobuf/IProtobufProperty.ts +6 -6
  323. package/src/schemas/protobuf/IProtobufPropertyType.ts +37 -37
  324. package/src/schemas/protobuf/IProtobufSchema.ts +50 -50
  325. package/src/tags/Example.ts +24 -24
  326. package/src/tags/Examples.ts +16 -16
  327. package/src/tags/Format.ts +50 -50
  328. package/src/tags/JsonSchemaPlugin.ts +8 -8
  329. package/src/tags/Sequence.ts +10 -10
  330. package/src/tags/TagBase.ts +82 -82
  331. package/src/tags/Type.ts +32 -32
  332. package/src/tags/UniqueItems.ts +14 -14
  333. package/src/tags/index.ts +21 -21
  334. package/src/transform.ts +35 -35
  335. package/src/transformers/CallExpressionTransformer.ts +547 -547
  336. package/src/transformers/FileTransformer.ts +136 -136
  337. package/src/transformers/IProgrammerProps.ts +11 -11
  338. package/src/transformers/ITransformOptions.ts +62 -62
  339. package/src/transformers/ITransformProps.ts +9 -9
  340. package/src/transformers/ITypiaContext.ts +18 -18
  341. package/src/transformers/ImportTransformer.ts +81 -81
  342. package/src/transformers/NodeTransformer.ts +17 -17
  343. package/src/transformers/TransformerError.ts +60 -60
  344. package/src/transformers/features/AssertTransformer.ts +24 -24
  345. package/src/transformers/features/CreateAssertTransformer.ts +24 -24
  346. package/src/transformers/features/CreateIsTransformer.ts +18 -18
  347. package/src/transformers/features/CreateRandomTransformer.ts +43 -43
  348. package/src/transformers/features/CreateValidateTransformer.ts +18 -18
  349. package/src/transformers/features/IsTransformer.ts +18 -18
  350. package/src/transformers/features/RandomTransformer.ts +41 -41
  351. package/src/transformers/features/ValidateTransformer.ts +18 -18
  352. package/src/transformers/features/functional/FunctionalGenericTransformer.ts +57 -57
  353. package/src/transformers/features/http/CreateHttpAssertFormDataTransformer.ts +13 -13
  354. package/src/transformers/features/http/CreateHttpAssertHeadersTransformer.ts +13 -13
  355. package/src/transformers/features/http/CreateHttpAssertQueryTransformer.ts +13 -13
  356. package/src/transformers/features/http/CreateHttpFormDataTransformer.ts +13 -13
  357. package/src/transformers/features/http/CreateHttpHeadersTransformer.ts +13 -13
  358. package/src/transformers/features/http/CreateHttpIsFormDataTransformer.ts +13 -13
  359. package/src/transformers/features/http/CreateHttpIsHeadersTransformer.ts +13 -13
  360. package/src/transformers/features/http/CreateHttpIsQueryTransformer.ts +13 -13
  361. package/src/transformers/features/http/CreateHttpParameterTransformer.ts +13 -13
  362. package/src/transformers/features/http/CreateHttpQueryTransformer.ts +13 -13
  363. package/src/transformers/features/http/CreateHttpValidateFormDataTransformer.ts +13 -13
  364. package/src/transformers/features/http/CreateHttpValidateHeadersTransformer.ts +13 -13
  365. package/src/transformers/features/http/CreateHttpValidateQueryTransformer.ts +13 -13
  366. package/src/transformers/features/http/HttpAssertFormDataTransformer.ts +13 -13
  367. package/src/transformers/features/http/HttpAssertHeadersTransformer.ts +13 -13
  368. package/src/transformers/features/http/HttpAssertQueryTransformer.ts +13 -13
  369. package/src/transformers/features/http/HttpFormDataTransformer.ts +13 -13
  370. package/src/transformers/features/http/HttpHeadersTransformer.ts +13 -13
  371. package/src/transformers/features/http/HttpIsFormDataTransformer.ts +13 -13
  372. package/src/transformers/features/http/HttpIsHeadersTransformer.ts +13 -13
  373. package/src/transformers/features/http/HttpIsQueryTransformer.ts +13 -13
  374. package/src/transformers/features/http/HttpParameterTransformer.ts +13 -13
  375. package/src/transformers/features/http/HttpQueryTransformer.ts +13 -13
  376. package/src/transformers/features/http/HttpValidateFormDataTransformer.ts +13 -13
  377. package/src/transformers/features/http/HttpValidateHeadersTransformer.ts +13 -13
  378. package/src/transformers/features/http/HttpValidateQueryTransformer.ts +13 -13
  379. package/src/transformers/features/json/JsonApplicationTransformer.ts +105 -105
  380. package/src/transformers/features/json/JsonAssertParseTransformer.ts +13 -13
  381. package/src/transformers/features/json/JsonAssertStringifyTransformer.ts +13 -13
  382. package/src/transformers/features/json/JsonCreateAssertParseTransformer.ts +13 -13
  383. package/src/transformers/features/json/JsonCreateAssertStringifyTransformer.ts +13 -13
  384. package/src/transformers/features/json/JsonCreateIsParseTransformer.ts +13 -13
  385. package/src/transformers/features/json/JsonCreateIsStringifyTransformer.ts +13 -13
  386. package/src/transformers/features/json/JsonCreateStringifyTransformer.ts +13 -13
  387. package/src/transformers/features/json/JsonCreateValidateParseTransformer.ts +13 -13
  388. package/src/transformers/features/json/JsonCreateValidateStringifyProgrammer.ts +13 -13
  389. package/src/transformers/features/json/JsonIsParseTransformer.ts +13 -13
  390. package/src/transformers/features/json/JsonIsStringifyTransformer.ts +13 -13
  391. package/src/transformers/features/json/JsonSchemasTransformer.ts +143 -143
  392. package/src/transformers/features/json/JsonStringifyTransformer.ts +13 -13
  393. package/src/transformers/features/json/JsonValidateParseTransformer.ts +13 -13
  394. package/src/transformers/features/json/JsonValidateStringifyTransformer.ts +13 -13
  395. package/src/transformers/features/llm/LlmApplicationOfValidateTransformer.ts +115 -115
  396. package/src/transformers/features/llm/LlmApplicationTransformer.ts +113 -113
  397. package/src/transformers/features/llm/LlmParametersTransformer.ts +89 -89
  398. package/src/transformers/features/llm/LlmSchemaTransformer.ts +130 -130
  399. package/src/transformers/features/misc/MiscAssertCloneTransformer.ts +13 -13
  400. package/src/transformers/features/misc/MiscAssertPruneTransformer.ts +13 -13
  401. package/src/transformers/features/misc/MiscCloneTransformer.ts +13 -13
  402. package/src/transformers/features/misc/MiscCreateAssertCloneTransformer.ts +13 -13
  403. package/src/transformers/features/misc/MiscCreateAssertPruneTransformer.ts +13 -13
  404. package/src/transformers/features/misc/MiscCreateCloneTransformer.ts +13 -13
  405. package/src/transformers/features/misc/MiscCreateIsCloneTransformer.ts +13 -13
  406. package/src/transformers/features/misc/MiscCreateIsPruneTransformer.ts +13 -13
  407. package/src/transformers/features/misc/MiscCreatePruneTransformer.ts +13 -13
  408. package/src/transformers/features/misc/MiscCreateValidateCloneTransformer.ts +13 -13
  409. package/src/transformers/features/misc/MiscCreateValidatePruneTransformer.ts +13 -13
  410. package/src/transformers/features/misc/MiscIsCloneTransformer.ts +13 -13
  411. package/src/transformers/features/misc/MiscIsPruneTransformer.ts +13 -13
  412. package/src/transformers/features/misc/MiscLiteralsTransformer.ts +35 -35
  413. package/src/transformers/features/misc/MiscPruneTransformer.ts +13 -13
  414. package/src/transformers/features/misc/MiscValidateCloneTransformer.ts +13 -13
  415. package/src/transformers/features/misc/MiscValidatePruneTransformer.ts +13 -13
  416. package/src/transformers/features/notations/NotationAssertGeneralTransformer.ts +20 -20
  417. package/src/transformers/features/notations/NotationCreateAssertGeneralTransformer.ts +20 -20
  418. package/src/transformers/features/notations/NotationCreateGeneralTransformer.ts +20 -20
  419. package/src/transformers/features/notations/NotationCreateIsGeneralTransformer.ts +20 -20
  420. package/src/transformers/features/notations/NotationCreateValidateGeneralTransformer.ts +20 -20
  421. package/src/transformers/features/notations/NotationGeneralTransformer.ts +18 -18
  422. package/src/transformers/features/notations/NotationIsGeneralTransformer.ts +20 -20
  423. package/src/transformers/features/notations/NotationValidateGeneralTransformer.ts +20 -20
  424. package/src/transformers/features/protobuf/ProtobufAssertDecodeTransformer.ts +13 -13
  425. package/src/transformers/features/protobuf/ProtobufAssertEncodeTransformer.ts +13 -13
  426. package/src/transformers/features/protobuf/ProtobufCreateAssertDecodeTransformer.ts +13 -13
  427. package/src/transformers/features/protobuf/ProtobufCreateAssertEncodeTransformer.ts +13 -13
  428. package/src/transformers/features/protobuf/ProtobufCreateDecodeTransformer.ts +13 -13
  429. package/src/transformers/features/protobuf/ProtobufCreateEncodeTransformer.ts +13 -13
  430. package/src/transformers/features/protobuf/ProtobufCreateIsDecodeTransformer.ts +13 -13
  431. package/src/transformers/features/protobuf/ProtobufCreateIsEncodeTransformer.ts +13 -13
  432. package/src/transformers/features/protobuf/ProtobufCreateValidateDecodeTransformer.ts +13 -13
  433. package/src/transformers/features/protobuf/ProtobufCreateValidateEncodeTransformer.ts +13 -13
  434. package/src/transformers/features/protobuf/ProtobufDecodeTransformer.ts +13 -13
  435. package/src/transformers/features/protobuf/ProtobufEncodeTransformer.ts +13 -13
  436. package/src/transformers/features/protobuf/ProtobufIsDecodeTransformer.ts +13 -13
  437. package/src/transformers/features/protobuf/ProtobufIsEncodeTransformer.ts +13 -13
  438. package/src/transformers/features/protobuf/ProtobufMessageTransformer.ts +35 -35
  439. package/src/transformers/features/protobuf/ProtobufValidateDecodeTransformer.ts +13 -13
  440. package/src/transformers/features/protobuf/ProtobufValidateEncodeTransformer.ts +13 -13
  441. package/src/transformers/features/reflect/ReflectMetadataTransformer.ts +69 -69
  442. package/src/transformers/features/reflect/ReflectNameTransformer.ts +82 -82
  443. package/src/transformers/internal/GenericTransformer.ts +101 -101
  444. package/src/utils/MapUtil.ts +14 -14
  445. package/src/utils/NamingConvention.ts +94 -94
  446. package/src/utils/ProtobufNameEncoder.ts +32 -32
  447. package/src/utils/StringUtil.ts +16 -16
package/src/llm.ts CHANGED
@@ -1,481 +1,481 @@
1
- import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
-
3
- import { ILlmApplicationOfValidate } from "./module";
4
-
5
- /**
6
- * > You must configure the generic argument `App`.
7
- *
8
- * TypeScript functions to LLM function calling application with validators.
9
- *
10
- * Creates an application of LLM (Large Language Model) function calling application
11
- * from a TypeScript class or interface type containing the target functions to be
12
- * called by the LLM function calling feature.
13
- *
14
- * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
15
- * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
16
- * select the proper function and fill its arguments from the conversation
17
- * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
18
- *
19
- * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
20
- * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
21
- * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
22
- * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
23
- * is a validator function reporting the detailed information about the wrong typed parameters.
24
- *
25
- * By the way, there can be some parameters (or their nested properties) which must be
26
- * composed by human, not by LLM. File uploading feature or some sensitive information
27
- * like secrety key (password) are the examples. In that case, you can separate the
28
- * function parameters to both LLM and human sides by configuring the
29
- * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
30
- * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
31
- *
32
- * For reference, the actual function call execution is not by LLM, but by you.
33
- * When the LLM selects the proper function and fills the arguments, you just call
34
- * the function with the LLM prepared arguments. And then informs the return value to
35
- * the LLM by system prompt. The LLM will continue the next conversation based on
36
- * the return value.
37
- *
38
- * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
39
- * so that the parameters are separated to human and LLM sides, you can merge these
40
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
41
- * before the actual LLM function call execution.
42
- *
43
- * Here is the list of available `Model` types with their corresponding LLM schema.
44
- * Reading the following list, and determine the `Model` type considering the
45
- * characteristics of the target LLM provider.
46
- *
47
- * - LLM provider schemas
48
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
49
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
50
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
51
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
52
- * - Midldle layer schemas
53
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
54
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
55
- *
56
- * @template App Target class or interface type collecting the functions to call
57
- * @template Model LLM schema model
58
- * @template Config Configuration of LLM schema composition
59
- * @param options Options for the LLM application construction
60
- * @returns Application of LLM function calling schemas
61
- * @reference https://platform.openai.com/docs/guides/function-calling
62
- * @author Jeongho Nam - https://github.com/samchon
63
- */
64
- export function applicationOfValidate(
65
- options?: Partial<Pick<ILlmApplicationOfValidate.IOptions<any>, "separate">>,
66
- ): never;
67
-
68
- /**
69
- * TypeScript functions to LLM function calling application with validators.
70
- *
71
- * Creates an application of LLM (Large Language Model) function calling application
72
- * from a TypeScript class or interface type containing the target functions to be
73
- * called by the LLM function calling feature.
74
- *
75
- * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
76
- * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
77
- * select the proper function and fill its arguments from the conversation
78
- * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
79
- *
80
- * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
81
- * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
82
- * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
83
- * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
84
- * is a validator function reporting the detailed information about the wrong typed parameters.
85
- *
86
- * By the way, there can be some parameters (or their nested properties) which must be
87
- * composed by human, not by LLM. File uploading feature or some sensitive information
88
- * like secrety key (password) are the examples. In that case, you can separate the
89
- * function parameters to both LLM and human sides by configuring the
90
- * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
91
- * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
92
- *
93
- * For reference, the actual function call execution is not by LLM, but by you.
94
- * When the LLM selects the proper function and fills the arguments, you just call
95
- * the function with the LLM prepared arguments. And then informs the return value to
96
- * the LLM by system prompt. The LLM will continue the next conversation based on
97
- * the return value.
98
- *
99
- * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
100
- * so that the parameters are separated to human and LLM sides, you can merge these
101
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
102
- * before the actual LLM function call execution.
103
- *
104
- * Here is the list of available `Model` types with their corresponding LLM schema.
105
- * Reading the following list, and determine the `Model` type considering the
106
- * characteristics of the target LLM provider.
107
- *
108
- * - LLM provider schemas
109
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
110
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
111
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
112
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
113
- * - Midldle layer schemas
114
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
115
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
116
- *
117
- * @template App Target class or interface type collecting the functions to call
118
- * @template Model LLM schema model
119
- * @template Config Configuration of LLM schema composition
120
- * @param options Options for the LLM application construction
121
- * @returns Application of LLM function calling schemas
122
- * @reference https://platform.openai.com/docs/guides/function-calling
123
- * @author Jeongho Nam - https://github.com/samchon
124
- */
125
- export function applicationOfValidate<
126
- App extends Record<string, any>,
127
- Model extends ILlmSchema.Model,
128
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
129
- >(
130
- options?: Partial<
131
- Pick<ILlmApplicationOfValidate.IOptions<Model>, "separate">
132
- >,
133
- ): ILlmApplicationOfValidate<Model>;
134
-
135
- /**
136
- * @internal
137
- */
138
- export function applicationOfValidate(): never {
139
- halt("applicationOfValidate");
140
- }
141
-
142
- /**
143
- * > You must configure the generic argument `App`.
144
- *
145
- * TypeScript functions to LLM function calling application.
146
- *
147
- * Creates an application of LLM (Large Language Model) function calling application
148
- * from a TypeScript class or interface type containing the target functions to be
149
- * called by the LLM function calling feature.
150
- *
151
- * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
152
- * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
153
- * proper function and fill its arguments from the conversation (maybe chatting text)
154
- * with user (human). This is the concept of the LLM function calling.
155
- *
156
- * By the way, there can be some parameters (or their nested properties) which must be
157
- * composed by human, not by LLM. File uploading feature or some sensitive information
158
- * like secrety key (password) are the examples. In that case, you can separate the
159
- * function parameters to both LLM and human sides by configuring the
160
- * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
161
- * assigned to the {@link ILlmFunction.separated} property.
162
- *
163
- * For reference, the actual function call execution is not by LLM, but by you.
164
- * When the LLM selects the proper function and fills the arguments, you just call
165
- * the function with the LLM prepared arguments. And then informs the return value to
166
- * the LLM by system prompt. The LLM will continue the next conversation based on
167
- * the return value.
168
- *
169
- * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
170
- * so that the parameters are separated to human and LLM sides, you can merge these
171
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
172
- * before the actual LLM function call execution.
173
- *
174
- * Here is the list of available `Model` types with their corresponding LLM schema.
175
- * Reading the following list, and determine the `Model` type considering the
176
- * characteristics of the target LLM provider.
177
- *
178
- * - LLM provider schemas
179
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
180
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
181
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
182
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
183
- * - Midldle layer schemas
184
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
185
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
186
- *
187
- * @template App Target class or interface type collecting the functions to call
188
- * @template Model LLM schema model
189
- * @template Config Configuration of LLM schema composition
190
- * @param options Options for the LLM application construction
191
- * @returns Application of LLM function calling schemas
192
- * @reference https://platform.openai.com/docs/guides/function-calling
193
- * @author Jeongho Nam - https://github.com/samchon
194
- */
195
- export function application(
196
- options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
197
- ): never;
198
-
199
- /**
200
- * TypeScript functions to LLM function calling application.
201
- *
202
- * Creates an application of LLM (Large Language Model) function calling application
203
- * from a TypeScript class or interface type containing the target functions to be
204
- * called by the LLM function calling feature.
205
- *
206
- * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
207
- * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
208
- * proper function and fill its arguments from the conversation (maybe chatting text)
209
- * with user (human). This is the concept of the LLM function calling.
210
- *
211
- * By the way, there can be some parameters (or their nested properties) which must be
212
- * composed by human, not by LLM. File uploading feature or some sensitive information
213
- * like secrety key (password) are the examples. In that case, you can separate the
214
- * function parameters to both LLM and human sides by configuring the
215
- * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
216
- * assigned to the {@link ILlmFunction.separated} property.
217
- *
218
- * For reference, the actual function call execution is not by LLM, but by you.
219
- * When the LLM selects the proper function and fills the arguments, you just call
220
- * the function with the LLM prepared arguments. And then informs the return value to
221
- * the LLM by system prompt. The LLM will continue the next conversation based on
222
- * the return value.
223
- *
224
- * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
225
- * so that the parameters are separated to human and LLM sides, you can merge these
226
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
227
- * before the actual LLM function call execution.
228
- *
229
- * Here is the list of available `Model` types with their corresponding LLM schema.
230
- * Reading the following list, and determine the `Model` type considering the
231
- * characteristics of the target LLM provider.
232
- *
233
- * - LLM provider schemas
234
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
235
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
236
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
237
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
238
- * - Midldle layer schemas
239
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
240
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
241
- *
242
- * @template App Target class or interface type collecting the functions to call
243
- * @template Model LLM schema model
244
- * @template Config Configuration of LLM schema composition
245
- * @param options Options for the LLM application construction
246
- * @returns Application of LLM function calling schemas
247
- * @reference https://platform.openai.com/docs/guides/function-calling
248
- * @author Jeongho Nam - https://github.com/samchon
249
- */
250
- export function application<
251
- App extends Record<string, any>,
252
- Model extends ILlmSchema.Model,
253
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
254
- >(
255
- options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
256
- ): ILlmApplication<Model>;
257
-
258
- /**
259
- * @internal
260
- */
261
- export function application(): never {
262
- halt("application");
263
- }
264
-
265
- /**
266
- * > You must configure the generic argument `Parameters`.
267
- *
268
- * TypeScript parameters to LLM parameters schema.
269
- *
270
- * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
271
- * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
272
- * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
273
- * from a TypeScript parameters type.
274
- *
275
- * For references, LLM identifies only keyworded arguments, not positional arguments.
276
- * Therefore, the TypeScript parameters type must be an object type, and its properties
277
- * must be static. If dynamic properties are, it would be compilation error.
278
- *
279
- * Also, such parameters type can be utilized not only for the LLM function calling,
280
- * but also for the LLM structured outputs. The LLM structured outputs is a feature
281
- * that LLM (Large Language Model) can generate a structured output, not only a plain
282
- * text, by filling the parameters from the conversation (maybe chatting text) with user
283
- * (human).
284
- *
285
- * Here is the list of available `Model` types with their corresponding LLM schema.
286
- * Reading the following list, and determine the `Model` type considering the
287
- * characteristics of the target LLM provider.
288
- *
289
- * - LLM provider schemas
290
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
291
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
292
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
293
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
294
- * - Midldle layer schemas
295
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
296
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
297
- *
298
- * @template Parameters Target parameters type
299
- * @template Model LLM schema model
300
- * @template Config Configuration of LLM schema composition
301
- * @returns LLM parameters schema
302
- * @reference https://platform.openai.com/docs/guides/function-calling
303
- * @reference https://platform.openai.com/docs/guides/structured-outputs
304
- */
305
- export function parameters(): never;
306
-
307
- /**
308
- * TypeScript parameters to LLM parameters schema.
309
- *
310
- * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
311
- * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
312
- * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
313
- * from a TypeScript parameters type.
314
- *
315
- * For references, LLM identifies only keyworded arguments, not positional arguments.
316
- * Therefore, the TypeScript parameters type must be an object type, and its properties
317
- * must be static. If dynamic properties are, it would be compilation error.
318
- *
319
- * Also, such parameters type can be utilized not only for the LLM function calling,
320
- * but also for the LLM structured outputs. The LLM structured outputs is a feature
321
- * that LLM (Large Language Model) can generate a structured output, not only a plain
322
- * text, by filling the parameters from the conversation (maybe chatting text) with user
323
- * (human).
324
- *
325
- * Here is the list of available `Model` types with their corresponding LLM schema.
326
- * Reading the following list, and determine the `Model` type considering the
327
- * characteristics of the target LLM provider.
328
- *
329
- * - LLM provider schemas
330
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
331
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
332
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
333
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
334
- * - Midldle layer schemas
335
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
336
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
337
- *
338
- * @template Parameters Target parameters type
339
- * @template Model LLM schema model
340
- * @template Config Configuration of LLM schema composition
341
- * @returns LLM parameters schema
342
- * @reference https://platform.openai.com/docs/guides/function-calling
343
- * @reference https://platform.openai.com/docs/guides/structured-outputs
344
- */
345
- export function parameters<
346
- Parameters extends Record<string, any>,
347
- Model extends ILlmSchema.Model,
348
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
349
- >(): ILlmSchema.ModelParameters[Model];
350
-
351
- /**
352
- * @internal
353
- */
354
- export function parameters(): never {
355
- halt("parameters");
356
- }
357
-
358
- /**
359
- * > You must configure the generic argument `T`.
360
- *
361
- * TypeScript type to LLM type schema.
362
- *
363
- * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
364
- * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
365
- * from a TypeScript type.
366
- *
367
- * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
368
- * and here is the list of available `Model` types with their corresponding LLM schema.
369
- * Reading the following list, and determine the `Model` type considering the
370
- * characteristics of the target LLM provider.
371
- *
372
- * - LLM provider schemas
373
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
374
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
375
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
376
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
377
- * - Midldle layer schemas
378
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
379
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
380
- *
381
- * If you actually want to perform the LLM function calling with TypeScript functions,
382
- * you can do it with the {@link application} function. Otherwise you hope to perform the
383
- * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
384
- * and structured output with the native TypeScript functions and types.
385
- *
386
- * > **What LLM function calling is?
387
- * >
388
- * > LLM (Large Language Model) selects propert function and fill the arguments,
389
- * > but actuall function call execution is not by LLM, but by you.
390
- * >
391
- * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
392
- * > "function calling" feature. The "function calling" means that LLM automatically selects
393
- * > a proper function and compose parameter values from the user's chatting text.
394
- * >
395
- * > When LLM selects the proper function and its arguments, you just call the function
396
- * > with the arguments. And then informs the return value to the LLM by system prompt,
397
- * > LLM will continue the next conversation based on the return value.
398
- *
399
- * @template T Target type
400
- * @template Model LLM schema model
401
- * @template Config Configuration of LLM schema composition
402
- * @returns LLM schema
403
- * @reference https://platform.openai.com/docs/guides/function-calling
404
- * @reference https://platform.openai.com/docs/guides/structured-outputs
405
- * @author Jeongho Nam - https://github.com/samchon
406
- */
407
- export function schema(): never;
408
-
409
- /**
410
- * TypeScript type to LLM type schema.
411
- *
412
- * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
413
- * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
414
- * from a TypeScript type.
415
- *
416
- * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
417
- * and here is the list of available `Model` types with their corresponding LLM schema:
418
- *
419
- * - LLM provider schemas
420
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
421
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
422
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
423
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
424
- * - Midldle layer schemas
425
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
426
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
427
- *
428
- * If you actually want to perform the LLM function calling with TypeScript functions,
429
- * you can do it with the {@link application} function. Otherwise you hope to perform the
430
- * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
431
- * and structured output with the native TypeScript functions and types.
432
- *
433
- * > **What LLM function calling is?
434
- * >
435
- * > LLM (Large Language Model) selects propert function and fill the arguments,
436
- * > but actuall function call execution is not by LLM, but by you.
437
- * >
438
- * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
439
- * > "function calling" feature. The "function calling" means that LLM automatically selects
440
- * > a proper function and compose parameter values from the user's chatting text.
441
- * >
442
- * > When LLM selects the proper function and its arguments, you just call the function
443
- * > with the arguments. And then informs the return value to the LLM by system prompt,
444
- * > LLM will continue the next conversation based on the return value.
445
- *
446
- * @template T Target type
447
- * @template Model LLM schema model
448
- * @template Config Configuration of LLM schema composition
449
- * @returns LLM schema
450
- * @reference https://platform.openai.com/docs/guides/function-calling
451
- * @reference https://platform.openai.com/docs/guides/structured-outputs
452
- * @author Jeongho Nam - https://github.com/samchon
453
- */
454
- export function schema<
455
- T,
456
- Model extends ILlmSchema.Model,
457
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
458
- >(
459
- ...$defs: Extract<
460
- ILlmSchema.ModelSchema[Model],
461
- { $ref: string }
462
- > extends never
463
- ? []
464
- : [Record<string, ILlmSchema.ModelSchema[Model]>]
465
- ): ILlmSchema.ModelSchema[Model];
466
-
467
- /**
468
- * @internal
469
- */
470
- export function schema(): never {
471
- halt("schema");
472
- }
473
-
474
- /**
475
- * @internal
476
- */
477
- function halt(name: string): never {
478
- throw new Error(
479
- `Error on typia.llm.${name}(): no transform has been configured. Read and follow https://typia.io/docs/setup please.`,
480
- );
481
- }
1
+ import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { ILlmApplicationOfValidate } from "./module";
4
+
5
+ /**
6
+ * > You must configure the generic argument `App`.
7
+ *
8
+ * TypeScript functions to LLM function calling application with validators.
9
+ *
10
+ * Creates an application of LLM (Large Language Model) function calling application
11
+ * from a TypeScript class or interface type containing the target functions to be
12
+ * called by the LLM function calling feature.
13
+ *
14
+ * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
15
+ * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
16
+ * select the proper function and fill its arguments from the conversation
17
+ * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
18
+ *
19
+ * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
20
+ * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
21
+ * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
22
+ * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
23
+ * is a validator function reporting the detailed information about the wrong typed parameters.
24
+ *
25
+ * By the way, there can be some parameters (or their nested properties) which must be
26
+ * composed by human, not by LLM. File uploading feature or some sensitive information
27
+ * like secrety key (password) are the examples. In that case, you can separate the
28
+ * function parameters to both LLM and human sides by configuring the
29
+ * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
30
+ * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
31
+ *
32
+ * For reference, the actual function call execution is not by LLM, but by you.
33
+ * When the LLM selects the proper function and fills the arguments, you just call
34
+ * the function with the LLM prepared arguments. And then informs the return value to
35
+ * the LLM by system prompt. The LLM will continue the next conversation based on
36
+ * the return value.
37
+ *
38
+ * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
39
+ * so that the parameters are separated to human and LLM sides, you can merge these
40
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
41
+ * before the actual LLM function call execution.
42
+ *
43
+ * Here is the list of available `Model` types with their corresponding LLM schema.
44
+ * Reading the following list, and determine the `Model` type considering the
45
+ * characteristics of the target LLM provider.
46
+ *
47
+ * - LLM provider schemas
48
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
49
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
50
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
51
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
52
+ * - Midldle layer schemas
53
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
54
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
55
+ *
56
+ * @template App Target class or interface type collecting the functions to call
57
+ * @template Model LLM schema model
58
+ * @template Config Configuration of LLM schema composition
59
+ * @param options Options for the LLM application construction
60
+ * @returns Application of LLM function calling schemas
61
+ * @reference https://platform.openai.com/docs/guides/function-calling
62
+ * @author Jeongho Nam - https://github.com/samchon
63
+ */
64
+ export function applicationOfValidate(
65
+ options?: Partial<Pick<ILlmApplicationOfValidate.IOptions<any>, "separate">>,
66
+ ): never;
67
+
68
+ /**
69
+ * TypeScript functions to LLM function calling application with validators.
70
+ *
71
+ * Creates an application of LLM (Large Language Model) function calling application
72
+ * from a TypeScript class or interface type containing the target functions to be
73
+ * called by the LLM function calling feature.
74
+ *
75
+ * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
76
+ * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
77
+ * select the proper function and fill its arguments from the conversation
78
+ * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
79
+ *
80
+ * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
81
+ * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
82
+ * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
83
+ * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
84
+ * is a validator function reporting the detailed information about the wrong typed parameters.
85
+ *
86
+ * By the way, there can be some parameters (or their nested properties) which must be
87
+ * composed by human, not by LLM. File uploading feature or some sensitive information
88
+ * like secrety key (password) are the examples. In that case, you can separate the
89
+ * function parameters to both LLM and human sides by configuring the
90
+ * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
91
+ * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
92
+ *
93
+ * For reference, the actual function call execution is not by LLM, but by you.
94
+ * When the LLM selects the proper function and fills the arguments, you just call
95
+ * the function with the LLM prepared arguments. And then informs the return value to
96
+ * the LLM by system prompt. The LLM will continue the next conversation based on
97
+ * the return value.
98
+ *
99
+ * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
100
+ * so that the parameters are separated to human and LLM sides, you can merge these
101
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
102
+ * before the actual LLM function call execution.
103
+ *
104
+ * Here is the list of available `Model` types with their corresponding LLM schema.
105
+ * Reading the following list, and determine the `Model` type considering the
106
+ * characteristics of the target LLM provider.
107
+ *
108
+ * - LLM provider schemas
109
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
110
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
111
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
112
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
113
+ * - Midldle layer schemas
114
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
115
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
116
+ *
117
+ * @template App Target class or interface type collecting the functions to call
118
+ * @template Model LLM schema model
119
+ * @template Config Configuration of LLM schema composition
120
+ * @param options Options for the LLM application construction
121
+ * @returns Application of LLM function calling schemas
122
+ * @reference https://platform.openai.com/docs/guides/function-calling
123
+ * @author Jeongho Nam - https://github.com/samchon
124
+ */
125
+ export function applicationOfValidate<
126
+ App extends Record<string, any>,
127
+ Model extends ILlmSchema.Model,
128
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
129
+ >(
130
+ options?: Partial<
131
+ Pick<ILlmApplicationOfValidate.IOptions<Model>, "separate">
132
+ >,
133
+ ): ILlmApplicationOfValidate<Model>;
134
+
135
+ /**
136
+ * @internal
137
+ */
138
+ export function applicationOfValidate(): never {
139
+ halt("applicationOfValidate");
140
+ }
141
+
142
+ /**
143
+ * > You must configure the generic argument `App`.
144
+ *
145
+ * TypeScript functions to LLM function calling application.
146
+ *
147
+ * Creates an application of LLM (Large Language Model) function calling application
148
+ * from a TypeScript class or interface type containing the target functions to be
149
+ * called by the LLM function calling feature.
150
+ *
151
+ * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
152
+ * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
153
+ * proper function and fill its arguments from the conversation (maybe chatting text)
154
+ * with user (human). This is the concept of the LLM function calling.
155
+ *
156
+ * By the way, there can be some parameters (or their nested properties) which must be
157
+ * composed by human, not by LLM. File uploading feature or some sensitive information
158
+ * like secrety key (password) are the examples. In that case, you can separate the
159
+ * function parameters to both LLM and human sides by configuring the
160
+ * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
161
+ * assigned to the {@link ILlmFunction.separated} property.
162
+ *
163
+ * For reference, the actual function call execution is not by LLM, but by you.
164
+ * When the LLM selects the proper function and fills the arguments, you just call
165
+ * the function with the LLM prepared arguments. And then informs the return value to
166
+ * the LLM by system prompt. The LLM will continue the next conversation based on
167
+ * the return value.
168
+ *
169
+ * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
170
+ * so that the parameters are separated to human and LLM sides, you can merge these
171
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
172
+ * before the actual LLM function call execution.
173
+ *
174
+ * Here is the list of available `Model` types with their corresponding LLM schema.
175
+ * Reading the following list, and determine the `Model` type considering the
176
+ * characteristics of the target LLM provider.
177
+ *
178
+ * - LLM provider schemas
179
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
180
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
181
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
182
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
183
+ * - Midldle layer schemas
184
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
185
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
186
+ *
187
+ * @template App Target class or interface type collecting the functions to call
188
+ * @template Model LLM schema model
189
+ * @template Config Configuration of LLM schema composition
190
+ * @param options Options for the LLM application construction
191
+ * @returns Application of LLM function calling schemas
192
+ * @reference https://platform.openai.com/docs/guides/function-calling
193
+ * @author Jeongho Nam - https://github.com/samchon
194
+ */
195
+ export function application(
196
+ options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
197
+ ): never;
198
+
199
+ /**
200
+ * TypeScript functions to LLM function calling application.
201
+ *
202
+ * Creates an application of LLM (Large Language Model) function calling application
203
+ * from a TypeScript class or interface type containing the target functions to be
204
+ * called by the LLM function calling feature.
205
+ *
206
+ * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
207
+ * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
208
+ * proper function and fill its arguments from the conversation (maybe chatting text)
209
+ * with user (human). This is the concept of the LLM function calling.
210
+ *
211
+ * By the way, there can be some parameters (or their nested properties) which must be
212
+ * composed by human, not by LLM. File uploading feature or some sensitive information
213
+ * like secrety key (password) are the examples. In that case, you can separate the
214
+ * function parameters to both LLM and human sides by configuring the
215
+ * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
216
+ * assigned to the {@link ILlmFunction.separated} property.
217
+ *
218
+ * For reference, the actual function call execution is not by LLM, but by you.
219
+ * When the LLM selects the proper function and fills the arguments, you just call
220
+ * the function with the LLM prepared arguments. And then informs the return value to
221
+ * the LLM by system prompt. The LLM will continue the next conversation based on
222
+ * the return value.
223
+ *
224
+ * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
225
+ * so that the parameters are separated to human and LLM sides, you can merge these
226
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
227
+ * before the actual LLM function call execution.
228
+ *
229
+ * Here is the list of available `Model` types with their corresponding LLM schema.
230
+ * Reading the following list, and determine the `Model` type considering the
231
+ * characteristics of the target LLM provider.
232
+ *
233
+ * - LLM provider schemas
234
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
235
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
236
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
237
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
238
+ * - Midldle layer schemas
239
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
240
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
241
+ *
242
+ * @template App Target class or interface type collecting the functions to call
243
+ * @template Model LLM schema model
244
+ * @template Config Configuration of LLM schema composition
245
+ * @param options Options for the LLM application construction
246
+ * @returns Application of LLM function calling schemas
247
+ * @reference https://platform.openai.com/docs/guides/function-calling
248
+ * @author Jeongho Nam - https://github.com/samchon
249
+ */
250
+ export function application<
251
+ App extends Record<string, any>,
252
+ Model extends ILlmSchema.Model,
253
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
254
+ >(
255
+ options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
256
+ ): ILlmApplication<Model>;
257
+
258
+ /**
259
+ * @internal
260
+ */
261
+ export function application(): never {
262
+ halt("application");
263
+ }
264
+
265
+ /**
266
+ * > You must configure the generic argument `Parameters`.
267
+ *
268
+ * TypeScript parameters to LLM parameters schema.
269
+ *
270
+ * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
271
+ * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
272
+ * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
273
+ * from a TypeScript parameters type.
274
+ *
275
+ * For references, LLM identifies only keyworded arguments, not positional arguments.
276
+ * Therefore, the TypeScript parameters type must be an object type, and its properties
277
+ * must be static. If dynamic properties are, it would be compilation error.
278
+ *
279
+ * Also, such parameters type can be utilized not only for the LLM function calling,
280
+ * but also for the LLM structured outputs. The LLM structured outputs is a feature
281
+ * that LLM (Large Language Model) can generate a structured output, not only a plain
282
+ * text, by filling the parameters from the conversation (maybe chatting text) with user
283
+ * (human).
284
+ *
285
+ * Here is the list of available `Model` types with their corresponding LLM schema.
286
+ * Reading the following list, and determine the `Model` type considering the
287
+ * characteristics of the target LLM provider.
288
+ *
289
+ * - LLM provider schemas
290
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
291
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
292
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
293
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
294
+ * - Midldle layer schemas
295
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
296
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
297
+ *
298
+ * @template Parameters Target parameters type
299
+ * @template Model LLM schema model
300
+ * @template Config Configuration of LLM schema composition
301
+ * @returns LLM parameters schema
302
+ * @reference https://platform.openai.com/docs/guides/function-calling
303
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
304
+ */
305
+ export function parameters(): never;
306
+
307
+ /**
308
+ * TypeScript parameters to LLM parameters schema.
309
+ *
310
+ * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
311
+ * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
312
+ * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
313
+ * from a TypeScript parameters type.
314
+ *
315
+ * For references, LLM identifies only keyworded arguments, not positional arguments.
316
+ * Therefore, the TypeScript parameters type must be an object type, and its properties
317
+ * must be static. If dynamic properties are, it would be compilation error.
318
+ *
319
+ * Also, such parameters type can be utilized not only for the LLM function calling,
320
+ * but also for the LLM structured outputs. The LLM structured outputs is a feature
321
+ * that LLM (Large Language Model) can generate a structured output, not only a plain
322
+ * text, by filling the parameters from the conversation (maybe chatting text) with user
323
+ * (human).
324
+ *
325
+ * Here is the list of available `Model` types with their corresponding LLM schema.
326
+ * Reading the following list, and determine the `Model` type considering the
327
+ * characteristics of the target LLM provider.
328
+ *
329
+ * - LLM provider schemas
330
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
331
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
332
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
333
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
334
+ * - Midldle layer schemas
335
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
336
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
337
+ *
338
+ * @template Parameters Target parameters type
339
+ * @template Model LLM schema model
340
+ * @template Config Configuration of LLM schema composition
341
+ * @returns LLM parameters schema
342
+ * @reference https://platform.openai.com/docs/guides/function-calling
343
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
344
+ */
345
+ export function parameters<
346
+ Parameters extends Record<string, any>,
347
+ Model extends ILlmSchema.Model,
348
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
349
+ >(): ILlmSchema.ModelParameters[Model];
350
+
351
+ /**
352
+ * @internal
353
+ */
354
+ export function parameters(): never {
355
+ halt("parameters");
356
+ }
357
+
358
+ /**
359
+ * > You must configure the generic argument `T`.
360
+ *
361
+ * TypeScript type to LLM type schema.
362
+ *
363
+ * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
364
+ * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
365
+ * from a TypeScript type.
366
+ *
367
+ * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
368
+ * and here is the list of available `Model` types with their corresponding LLM schema.
369
+ * Reading the following list, and determine the `Model` type considering the
370
+ * characteristics of the target LLM provider.
371
+ *
372
+ * - LLM provider schemas
373
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
374
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
375
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
376
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
377
+ * - Midldle layer schemas
378
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
379
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
380
+ *
381
+ * If you actually want to perform the LLM function calling with TypeScript functions,
382
+ * you can do it with the {@link application} function. Otherwise you hope to perform the
383
+ * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
384
+ * and structured output with the native TypeScript functions and types.
385
+ *
386
+ * > **What LLM function calling is?
387
+ * >
388
+ * > LLM (Large Language Model) selects propert function and fill the arguments,
389
+ * > but actuall function call execution is not by LLM, but by you.
390
+ * >
391
+ * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
392
+ * > "function calling" feature. The "function calling" means that LLM automatically selects
393
+ * > a proper function and compose parameter values from the user's chatting text.
394
+ * >
395
+ * > When LLM selects the proper function and its arguments, you just call the function
396
+ * > with the arguments. And then informs the return value to the LLM by system prompt,
397
+ * > LLM will continue the next conversation based on the return value.
398
+ *
399
+ * @template T Target type
400
+ * @template Model LLM schema model
401
+ * @template Config Configuration of LLM schema composition
402
+ * @returns LLM schema
403
+ * @reference https://platform.openai.com/docs/guides/function-calling
404
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
405
+ * @author Jeongho Nam - https://github.com/samchon
406
+ */
407
+ export function schema(): never;
408
+
409
+ /**
410
+ * TypeScript type to LLM type schema.
411
+ *
412
+ * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
413
+ * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
414
+ * from a TypeScript type.
415
+ *
416
+ * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
417
+ * and here is the list of available `Model` types with their corresponding LLM schema:
418
+ *
419
+ * - LLM provider schemas
420
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
421
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
422
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
423
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
424
+ * - Midldle layer schemas
425
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
426
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
427
+ *
428
+ * If you actually want to perform the LLM function calling with TypeScript functions,
429
+ * you can do it with the {@link application} function. Otherwise you hope to perform the
430
+ * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
431
+ * and structured output with the native TypeScript functions and types.
432
+ *
433
+ * > **What LLM function calling is?
434
+ * >
435
+ * > LLM (Large Language Model) selects propert function and fill the arguments,
436
+ * > but actuall function call execution is not by LLM, but by you.
437
+ * >
438
+ * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
439
+ * > "function calling" feature. The "function calling" means that LLM automatically selects
440
+ * > a proper function and compose parameter values from the user's chatting text.
441
+ * >
442
+ * > When LLM selects the proper function and its arguments, you just call the function
443
+ * > with the arguments. And then informs the return value to the LLM by system prompt,
444
+ * > LLM will continue the next conversation based on the return value.
445
+ *
446
+ * @template T Target type
447
+ * @template Model LLM schema model
448
+ * @template Config Configuration of LLM schema composition
449
+ * @returns LLM schema
450
+ * @reference https://platform.openai.com/docs/guides/function-calling
451
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
452
+ * @author Jeongho Nam - https://github.com/samchon
453
+ */
454
+ export function schema<
455
+ T,
456
+ Model extends ILlmSchema.Model,
457
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
458
+ >(
459
+ ...$defs: Extract<
460
+ ILlmSchema.ModelSchema[Model],
461
+ { $ref: string }
462
+ > extends never
463
+ ? []
464
+ : [Record<string, ILlmSchema.ModelSchema[Model]>]
465
+ ): ILlmSchema.ModelSchema[Model];
466
+
467
+ /**
468
+ * @internal
469
+ */
470
+ export function schema(): never {
471
+ halt("schema");
472
+ }
473
+
474
+ /**
475
+ * @internal
476
+ */
477
+ function halt(name: string): never {
478
+ throw new Error(
479
+ `Error on typia.llm.${name}(): no transform has been configured. Read and follow https://typia.io/docs/setup please.`,
480
+ );
481
+ }