typia 7.3.0 → 7.4.0-dev.20241215

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (465) hide show
  1. package/LICENSE +21 -21
  2. package/README.md +148 -148
  3. package/lib/executable/typia.js +0 -0
  4. package/lib/programmers/llm/LlmApplicationOfValidateProgrammer.d.ts +4 -1
  5. package/lib/programmers/llm/LlmApplicationOfValidateProgrammer.js +1 -1
  6. package/lib/programmers/llm/LlmApplicationOfValidateProgrammer.js.map +1 -1
  7. package/lib/programmers/llm/LlmApplicationProgrammer.d.ts +4 -1
  8. package/lib/programmers/llm/LlmApplicationProgrammer.js +3 -3
  9. package/lib/programmers/llm/LlmApplicationProgrammer.js.map +1 -1
  10. package/lib/programmers/llm/LlmParametersProgrammer.d.ts +4 -1
  11. package/lib/programmers/llm/LlmParametersProgrammer.js +2 -2
  12. package/lib/programmers/llm/LlmParametersProgrammer.js.map +1 -1
  13. package/lib/programmers/llm/LlmSchemaProgrammer.d.ts +4 -1
  14. package/lib/programmers/llm/LlmSchemaProgrammer.js +17 -6
  15. package/lib/programmers/llm/LlmSchemaProgrammer.js.map +1 -1
  16. package/lib/transformers/features/llm/LlmApplicationOfValidateTransformer.js +11 -7
  17. package/lib/transformers/features/llm/LlmApplicationOfValidateTransformer.js.map +1 -1
  18. package/lib/transformers/features/llm/LlmApplicationTransformer.js +12 -8
  19. package/lib/transformers/features/llm/LlmApplicationTransformer.js.map +1 -1
  20. package/lib/transformers/features/llm/LlmParametersTransformer.js +11 -7
  21. package/lib/transformers/features/llm/LlmParametersTransformer.js.map +1 -1
  22. package/lib/transformers/features/llm/LlmSchemaTransformer.js +11 -7
  23. package/lib/transformers/features/llm/LlmSchemaTransformer.js.map +1 -1
  24. package/package.json +3 -3
  25. package/src/IRandomGenerator.ts +49 -49
  26. package/src/IReadableURLSearchParams.ts +9 -9
  27. package/src/IValidation.ts +21 -21
  28. package/src/executable/TypiaGenerateWizard.ts +83 -83
  29. package/src/executable/TypiaPatchWizard.ts +45 -45
  30. package/src/executable/TypiaSetupWizard.ts +179 -179
  31. package/src/executable/setup/ArgumentParser.ts +42 -42
  32. package/src/executable/setup/FileRetriever.ts +19 -19
  33. package/src/executable/setup/PackageManager.ts +87 -87
  34. package/src/factories/ExpressionFactory.ts +216 -216
  35. package/src/factories/IdentifierFactory.ts +89 -89
  36. package/src/factories/JsonMetadataFactory.ts +76 -76
  37. package/src/factories/LiteralFactory.ts +52 -52
  38. package/src/factories/MetadataCollection.ts +278 -278
  39. package/src/factories/MetadataCommentTagFactory.ts +650 -650
  40. package/src/factories/MetadataFactory.ts +404 -404
  41. package/src/factories/MetadataTypeTagFactory.ts +411 -411
  42. package/src/factories/MetadataTypeTagSchemaFactory.ts +82 -82
  43. package/src/factories/NumericRangeFactory.ts +72 -72
  44. package/src/factories/ProtobufFactory.ts +875 -875
  45. package/src/factories/StatementFactory.ts +90 -90
  46. package/src/factories/TemplateFactory.ts +64 -64
  47. package/src/factories/TypeFactory.ts +140 -140
  48. package/src/factories/internal/metadata/IMetadataIteratorProps.ts +17 -17
  49. package/src/factories/internal/metadata/MetadataHelper.ts +21 -21
  50. package/src/factories/internal/metadata/emplace_metadata_alias.ts +33 -33
  51. package/src/factories/internal/metadata/emplace_metadata_array_type.ts +39 -39
  52. package/src/factories/internal/metadata/emplace_metadata_object.ts +208 -208
  53. package/src/factories/internal/metadata/emplace_metadata_tuple.ts +57 -57
  54. package/src/factories/internal/metadata/explore_metadata.ts +31 -31
  55. package/src/factories/internal/metadata/iterate_metadata.ts +54 -54
  56. package/src/factories/internal/metadata/iterate_metadata_alias.ts +33 -33
  57. package/src/factories/internal/metadata/iterate_metadata_array.ts +63 -63
  58. package/src/factories/internal/metadata/iterate_metadata_atomic.ts +62 -62
  59. package/src/factories/internal/metadata/iterate_metadata_coalesce.ts +28 -28
  60. package/src/factories/internal/metadata/iterate_metadata_collection.ts +146 -146
  61. package/src/factories/internal/metadata/iterate_metadata_comment_tags.ts +32 -32
  62. package/src/factories/internal/metadata/iterate_metadata_constant.ts +76 -76
  63. package/src/factories/internal/metadata/iterate_metadata_escape.ts +49 -49
  64. package/src/factories/internal/metadata/iterate_metadata_function.ts +91 -91
  65. package/src/factories/internal/metadata/iterate_metadata_intersection.ts +213 -213
  66. package/src/factories/internal/metadata/iterate_metadata_map.ts +57 -57
  67. package/src/factories/internal/metadata/iterate_metadata_native.ts +255 -255
  68. package/src/factories/internal/metadata/iterate_metadata_object.ts +35 -35
  69. package/src/factories/internal/metadata/iterate_metadata_set.ts +57 -57
  70. package/src/factories/internal/metadata/iterate_metadata_sort.ts +87 -87
  71. package/src/factories/internal/metadata/iterate_metadata_template.ts +41 -41
  72. package/src/factories/internal/metadata/iterate_metadata_tuple.ts +26 -26
  73. package/src/factories/internal/metadata/iterate_metadata_union.ts +19 -19
  74. package/src/functional.ts +750 -750
  75. package/src/http.ts +1047 -1047
  76. package/src/internal/_IProtobufWriter.ts +18 -18
  77. package/src/internal/_ProtobufReader.ts +194 -194
  78. package/src/internal/_ProtobufSizer.ts +145 -145
  79. package/src/internal/_ProtobufWriter.ts +145 -145
  80. package/src/internal/_accessExpressionAsString.ts +46 -46
  81. package/src/internal/_assertGuard.ts +13 -13
  82. package/src/internal/_functionalTypeGuardErrorFactory.ts +4 -4
  83. package/src/internal/_httpFormDataReadArray.ts +4 -4
  84. package/src/internal/_httpFormDataReadBigint.ts +18 -18
  85. package/src/internal/_httpFormDataReadBlob.ts +10 -10
  86. package/src/internal/_httpFormDataReadBoolean.ts +16 -16
  87. package/src/internal/_httpFormDataReadFile.ts +10 -10
  88. package/src/internal/_httpFormDataReadNumber.ts +15 -15
  89. package/src/internal/_httpFormDataReadString.ts +10 -10
  90. package/src/internal/_httpHeaderReadBigint.ts +10 -10
  91. package/src/internal/_httpHeaderReadBoolean.ts +8 -8
  92. package/src/internal/_httpHeaderReadNumber.ts +7 -7
  93. package/src/internal/_httpParameterReadBigint.ts +10 -10
  94. package/src/internal/_httpParameterReadBoolean.ts +8 -8
  95. package/src/internal/_httpParameterReadNumber.ts +7 -7
  96. package/src/internal/_httpParameterReadString.ts +2 -2
  97. package/src/internal/_httpQueryParseURLSearchParams.ts +12 -12
  98. package/src/internal/_httpQueryReadArray.ts +4 -4
  99. package/src/internal/_httpQueryReadBigint.ts +12 -12
  100. package/src/internal/_httpQueryReadBoolean.ts +14 -14
  101. package/src/internal/_httpQueryReadNumber.ts +9 -9
  102. package/src/internal/_httpQueryReadString.ts +4 -4
  103. package/src/internal/_isBetween.ts +2 -2
  104. package/src/internal/_isBigintString.ts +8 -8
  105. package/src/internal/_isFormatByte.ts +7 -7
  106. package/src/internal/_isFormatDate.ts +3 -3
  107. package/src/internal/_isFormatDateTime.ts +4 -4
  108. package/src/internal/_isFormatDuration.ts +4 -4
  109. package/src/internal/_isFormatEmail.ts +4 -4
  110. package/src/internal/_isFormatHostname.ts +4 -4
  111. package/src/internal/_isFormatIdnEmail.ts +4 -4
  112. package/src/internal/_isFormatIdnHostname.ts +4 -4
  113. package/src/internal/_isFormatIpv4.ts +4 -4
  114. package/src/internal/_isFormatIpv6.ts +4 -4
  115. package/src/internal/_isFormatIri.ts +3 -3
  116. package/src/internal/_isFormatIriReference.ts +4 -4
  117. package/src/internal/_isFormatJsonPointer.ts +3 -3
  118. package/src/internal/_isFormatPassword.ts +1 -1
  119. package/src/internal/_isFormatRegex.ts +8 -8
  120. package/src/internal/_isFormatRelativeJsonPointer.ts +4 -4
  121. package/src/internal/_isFormatTime.ts +4 -4
  122. package/src/internal/_isFormatUri.ts +6 -6
  123. package/src/internal/_isFormatUriReference.ts +5 -5
  124. package/src/internal/_isFormatUriTemplate.ts +4 -4
  125. package/src/internal/_isFormatUrl.ts +4 -4
  126. package/src/internal/_isFormatUuid.ts +3 -3
  127. package/src/internal/_isTypeFloat.ts +5 -5
  128. package/src/internal/_isTypeInt32.ts +5 -5
  129. package/src/internal/_isTypeInt64.ts +5 -5
  130. package/src/internal/_isTypeUint32.ts +5 -5
  131. package/src/internal/_isTypeUint64.ts +5 -5
  132. package/src/internal/_isUniqueItems.ts +159 -159
  133. package/src/internal/_jsonStringifyNumber.ts +12 -12
  134. package/src/internal/_jsonStringifyRest.ts +3 -3
  135. package/src/internal/_jsonStringifyString.ts +42 -42
  136. package/src/internal/_jsonStringifyTail.ts +2 -2
  137. package/src/internal/_llmApplicationFinalize.ts +20 -20
  138. package/src/internal/_miscCloneAny.ts +46 -46
  139. package/src/internal/_notationAny.ts +37 -37
  140. package/src/internal/_notationCamel.ts +13 -13
  141. package/src/internal/_notationPascal.ts +8 -8
  142. package/src/internal/_notationSnake.ts +43 -43
  143. package/src/internal/_randomArray.ts +21 -21
  144. package/src/internal/_randomBigint.ts +6 -6
  145. package/src/internal/_randomBoolean.ts +1 -1
  146. package/src/internal/_randomFormatByte.ts +3 -3
  147. package/src/internal/_randomFormatDate.ts +18 -18
  148. package/src/internal/_randomFormatDatetime.ts +16 -16
  149. package/src/internal/_randomFormatDuration.ts +27 -27
  150. package/src/internal/_randomFormatEmail.ts +11 -11
  151. package/src/internal/_randomFormatHostname.ts +6 -6
  152. package/src/internal/_randomFormatIdnEmail.ts +3 -3
  153. package/src/internal/_randomFormatIdnHostname.ts +3 -3
  154. package/src/internal/_randomFormatIpv4.ts +11 -11
  155. package/src/internal/_randomFormatIpv6.ts +11 -11
  156. package/src/internal/_randomFormatIri.ts +3 -3
  157. package/src/internal/_randomFormatIriReference.ts +3 -3
  158. package/src/internal/_randomFormatJsonPointer.ts +7 -7
  159. package/src/internal/_randomFormatPassword.ts +8 -8
  160. package/src/internal/_randomFormatRegex.ts +4 -4
  161. package/src/internal/_randomFormatRelativeJsonPointer.ts +8 -8
  162. package/src/internal/_randomFormatTime.ts +14 -14
  163. package/src/internal/_randomFormatUri.ts +3 -3
  164. package/src/internal/_randomFormatUriReference.ts +3 -3
  165. package/src/internal/_randomFormatUriTemplate.ts +3 -3
  166. package/src/internal/_randomFormatUrl.ts +11 -11
  167. package/src/internal/_randomFormatUuid.ts +6 -6
  168. package/src/internal/_randomInteger.ts +47 -47
  169. package/src/internal/_randomNumber.ts +74 -74
  170. package/src/internal/_randomPattern.ts +10 -10
  171. package/src/internal/_randomPick.ts +9 -9
  172. package/src/internal/_randomString.ts +24 -24
  173. package/src/internal/_throwTypeGuardError.ts +5 -5
  174. package/src/internal/_validateReport.ts +13 -13
  175. package/src/internal/private/__notationCapitalize.ts +2 -2
  176. package/src/internal/private/__notationUnsnake.ts +24 -24
  177. package/src/json.ts +752 -752
  178. package/src/llm.ts +481 -481
  179. package/src/misc.ts +658 -658
  180. package/src/module.ts +937 -937
  181. package/src/notations.ts +827 -827
  182. package/src/programmers/AssertProgrammer.ts +454 -454
  183. package/src/programmers/CheckerProgrammer.ts +1617 -1617
  184. package/src/programmers/FeatureProgrammer.ts +622 -622
  185. package/src/programmers/ImportProgrammer.ts +185 -185
  186. package/src/programmers/IsProgrammer.ts +273 -273
  187. package/src/programmers/RandomProgrammer.ts +1190 -1190
  188. package/src/programmers/TypiaProgrammer.ts +174 -174
  189. package/src/programmers/ValidateProgrammer.ts +439 -439
  190. package/src/programmers/functional/FunctionalAssertFunctionProgrammer.ts +153 -153
  191. package/src/programmers/functional/FunctionalAssertParametersProgrammer.ts +125 -125
  192. package/src/programmers/functional/FunctionalAssertReturnProgrammer.ts +115 -115
  193. package/src/programmers/functional/FunctionalIsFunctionProgrammer.ts +72 -72
  194. package/src/programmers/functional/FunctionalIsParametersProgrammer.ts +113 -113
  195. package/src/programmers/functional/FunctionalIsReturnProgrammer.ts +116 -116
  196. package/src/programmers/functional/FunctionalValidateFunctionProgrammer.ts +119 -119
  197. package/src/programmers/functional/FunctionalValidateParametersProgrammer.ts +274 -274
  198. package/src/programmers/functional/FunctionalValidateReturnProgrammer.ts +135 -135
  199. package/src/programmers/functional/internal/FunctionalGeneralProgrammer.ts +34 -34
  200. package/src/programmers/helpers/AtomicPredicator.ts +35 -35
  201. package/src/programmers/helpers/CloneJoiner.ts +143 -143
  202. package/src/programmers/helpers/FunctionProgrammer.ts +67 -67
  203. package/src/programmers/helpers/HttpMetadataUtil.ts +21 -21
  204. package/src/programmers/helpers/NotationJoiner.ts +144 -144
  205. package/src/programmers/helpers/OptionPredicator.ts +15 -15
  206. package/src/programmers/helpers/ProtobufUtil.ts +228 -228
  207. package/src/programmers/helpers/PruneJoiner.ts +148 -148
  208. package/src/programmers/helpers/RandomJoiner.ts +168 -168
  209. package/src/programmers/helpers/StringifyJoinder.ts +115 -115
  210. package/src/programmers/helpers/StringifyPredicator.ts +13 -13
  211. package/src/programmers/helpers/UnionExplorer.ts +372 -372
  212. package/src/programmers/helpers/UnionPredicator.ts +79 -79
  213. package/src/programmers/helpers/disable_function_programmer_declare.ts +32 -32
  214. package/src/programmers/http/HttpAssertFormDataProgrammer.ts +99 -99
  215. package/src/programmers/http/HttpAssertHeadersProgrammer.ts +99 -99
  216. package/src/programmers/http/HttpAssertQueryProgrammer.ts +105 -105
  217. package/src/programmers/http/HttpFormDataProgrammer.ts +308 -308
  218. package/src/programmers/http/HttpHeadersProgrammer.ts +400 -400
  219. package/src/programmers/http/HttpIsFormDataProgrammer.ts +108 -108
  220. package/src/programmers/http/HttpIsHeadersProgrammer.ts +108 -108
  221. package/src/programmers/http/HttpIsQueryProgrammer.ts +114 -114
  222. package/src/programmers/http/HttpParameterProgrammer.ts +115 -115
  223. package/src/programmers/http/HttpQueryProgrammer.ts +336 -336
  224. package/src/programmers/http/HttpValidateFormDataProgrammer.ts +92 -92
  225. package/src/programmers/http/HttpValidateHeadersProgrammer.ts +92 -92
  226. package/src/programmers/http/HttpValidateQueryProgrammer.ts +98 -98
  227. package/src/programmers/internal/check_array_length.ts +47 -47
  228. package/src/programmers/internal/check_bigint.ts +50 -50
  229. package/src/programmers/internal/check_dynamic_key.ts +201 -201
  230. package/src/programmers/internal/check_dynamic_properties.ts +208 -208
  231. package/src/programmers/internal/check_everything.ts +23 -23
  232. package/src/programmers/internal/check_native.ts +27 -27
  233. package/src/programmers/internal/check_number.ts +112 -112
  234. package/src/programmers/internal/check_object.ts +75 -75
  235. package/src/programmers/internal/check_string.ts +50 -50
  236. package/src/programmers/internal/check_template.ts +48 -48
  237. package/src/programmers/internal/check_union_array_like.ts +335 -335
  238. package/src/programmers/internal/decode_union_object.ts +116 -116
  239. package/src/programmers/internal/feature_object_entries.ts +61 -61
  240. package/src/programmers/internal/json_schema_alias.ts +47 -47
  241. package/src/programmers/internal/json_schema_array.ts +45 -45
  242. package/src/programmers/internal/json_schema_bigint.ts +15 -15
  243. package/src/programmers/internal/json_schema_boolean.ts +15 -15
  244. package/src/programmers/internal/json_schema_constant.ts +26 -26
  245. package/src/programmers/internal/json_schema_description.ts +12 -12
  246. package/src/programmers/internal/json_schema_discriminator.ts +35 -35
  247. package/src/programmers/internal/json_schema_escaped.ts +82 -82
  248. package/src/programmers/internal/json_schema_native.ts +33 -33
  249. package/src/programmers/internal/json_schema_number.ts +15 -15
  250. package/src/programmers/internal/json_schema_object.ts +158 -158
  251. package/src/programmers/internal/json_schema_plugin.ts +18 -18
  252. package/src/programmers/internal/json_schema_station.ts +182 -182
  253. package/src/programmers/internal/json_schema_string.ts +15 -15
  254. package/src/programmers/internal/json_schema_template.ts +55 -55
  255. package/src/programmers/internal/json_schema_title.ts +20 -20
  256. package/src/programmers/internal/json_schema_tuple.ts +35 -35
  257. package/src/programmers/internal/metadata_to_pattern.ts +42 -42
  258. package/src/programmers/internal/postfix_of_tuple.ts +5 -5
  259. package/src/programmers/internal/prune_object_properties.ts +71 -71
  260. package/src/programmers/internal/stringify_dynamic_properties.ts +162 -162
  261. package/src/programmers/internal/stringify_regular_properties.ts +81 -81
  262. package/src/programmers/internal/template_to_pattern.ts +23 -23
  263. package/src/programmers/internal/wrap_metadata_rest_tuple.ts +23 -23
  264. package/src/programmers/json/JsonApplicationProgrammer.ts +279 -279
  265. package/src/programmers/json/JsonAssertParseProgrammer.ts +113 -113
  266. package/src/programmers/json/JsonAssertStringifyProgrammer.ts +115 -115
  267. package/src/programmers/json/JsonIsParseProgrammer.ts +114 -114
  268. package/src/programmers/json/JsonIsStringifyProgrammer.ts +108 -108
  269. package/src/programmers/json/JsonSchemasProgrammer.ts +91 -91
  270. package/src/programmers/json/JsonStringifyProgrammer.ts +1124 -1124
  271. package/src/programmers/json/JsonValidateParseProgrammer.ts +105 -105
  272. package/src/programmers/json/JsonValidateStringifyProgrammer.ts +124 -124
  273. package/src/programmers/llm/LlmApplicationOfValidateProgrammer.ts +83 -81
  274. package/src/programmers/llm/LlmApplicationProgrammer.ts +280 -277
  275. package/src/programmers/llm/LlmModelPredicator.ts +127 -127
  276. package/src/programmers/llm/LlmParametersProgrammer.ts +93 -90
  277. package/src/programmers/llm/LlmSchemaProgrammer.ts +173 -143
  278. package/src/programmers/misc/MiscAssertCloneProgrammer.ts +95 -95
  279. package/src/programmers/misc/MiscAssertPruneProgrammer.ts +116 -116
  280. package/src/programmers/misc/MiscCloneProgrammer.ts +1032 -1032
  281. package/src/programmers/misc/MiscIsCloneProgrammer.ts +99 -99
  282. package/src/programmers/misc/MiscIsPruneProgrammer.ts +97 -97
  283. package/src/programmers/misc/MiscLiteralsProgrammer.ts +80 -80
  284. package/src/programmers/misc/MiscPruneProgrammer.ts +728 -728
  285. package/src/programmers/misc/MiscValidateCloneProgrammer.ts +111 -111
  286. package/src/programmers/misc/MiscValidatePruneProgrammer.ts +113 -113
  287. package/src/programmers/notations/NotationAssertGeneralProgrammer.ts +101 -101
  288. package/src/programmers/notations/NotationGeneralProgrammer.ts +984 -984
  289. package/src/programmers/notations/NotationIsGeneralProgrammer.ts +105 -105
  290. package/src/programmers/notations/NotationValidateGeneralProgrammer.ts +119 -119
  291. package/src/programmers/protobuf/ProtobufAssertDecodeProgrammer.ts +98 -98
  292. package/src/programmers/protobuf/ProtobufAssertEncodeProgrammer.ts +102 -102
  293. package/src/programmers/protobuf/ProtobufDecodeProgrammer.ts +654 -654
  294. package/src/programmers/protobuf/ProtobufEncodeProgrammer.ts +945 -945
  295. package/src/programmers/protobuf/ProtobufIsDecodeProgrammer.ts +109 -109
  296. package/src/programmers/protobuf/ProtobufIsEncodeProgrammer.ts +98 -98
  297. package/src/programmers/protobuf/ProtobufMessageProgrammer.ts +179 -179
  298. package/src/programmers/protobuf/ProtobufValidateDecodeProgrammer.ts +92 -92
  299. package/src/programmers/protobuf/ProtobufValidateEncodeProgrammer.ts +119 -119
  300. package/src/protobuf.ts +868 -868
  301. package/src/reflect.ts +57 -57
  302. package/src/schemas/json/IJsonApplication.ts +73 -73
  303. package/src/schemas/json/IJsonSchemaCollection.ts +29 -29
  304. package/src/schemas/json/__IJsonApplication.ts +63 -63
  305. package/src/schemas/llm/ILlmApplicationOfValidate.ts +55 -55
  306. package/src/schemas/llm/ILlmFunctionOfValidate.ts +39 -39
  307. package/src/schemas/metadata/IMetadata.ts +35 -35
  308. package/src/schemas/metadata/IMetadataAlias.ts +6 -6
  309. package/src/schemas/metadata/IMetadataAliasType.ts +12 -12
  310. package/src/schemas/metadata/IMetadataApplication.ts +7 -7
  311. package/src/schemas/metadata/IMetadataArray.ts +6 -6
  312. package/src/schemas/metadata/IMetadataComponents.ts +11 -11
  313. package/src/schemas/metadata/IMetadataConstantValue.ts +11 -11
  314. package/src/schemas/metadata/IMetadataDictionary.ts +11 -11
  315. package/src/schemas/metadata/IMetadataMap.ts +8 -8
  316. package/src/schemas/metadata/IMetadataNative.ts +6 -6
  317. package/src/schemas/metadata/IMetadataObject.ts +6 -6
  318. package/src/schemas/metadata/IMetadataObjectType.ts +13 -13
  319. package/src/schemas/metadata/IMetadataSet.ts +7 -7
  320. package/src/schemas/metadata/IMetadataTemplate.ts +7 -7
  321. package/src/schemas/metadata/IMetadataTuple.ts +6 -6
  322. package/src/schemas/metadata/IMetadataTypeTag.ts +16 -16
  323. package/src/schemas/metadata/Metadata.ts +669 -669
  324. package/src/schemas/metadata/MetadataAlias.ts +46 -46
  325. package/src/schemas/metadata/MetadataAliasType.ts +63 -63
  326. package/src/schemas/metadata/MetadataApplication.ts +44 -44
  327. package/src/schemas/metadata/MetadataArray.ts +49 -49
  328. package/src/schemas/metadata/MetadataAtomic.ts +87 -87
  329. package/src/schemas/metadata/MetadataComponents.ts +98 -98
  330. package/src/schemas/metadata/MetadataConstantValue.ts +62 -62
  331. package/src/schemas/metadata/MetadataMap.ts +48 -48
  332. package/src/schemas/metadata/MetadataNative.ts +44 -44
  333. package/src/schemas/metadata/MetadataObject.ts +48 -48
  334. package/src/schemas/metadata/MetadataObjectType.ts +149 -149
  335. package/src/schemas/metadata/MetadataParameter.ts +54 -54
  336. package/src/schemas/metadata/MetadataProperty.ts +59 -59
  337. package/src/schemas/metadata/MetadataSet.ts +45 -45
  338. package/src/schemas/metadata/MetadataTemplate.ts +80 -80
  339. package/src/schemas/metadata/MetadataTuple.ts +32 -32
  340. package/src/schemas/protobuf/IProtobufProperty.ts +6 -6
  341. package/src/schemas/protobuf/IProtobufPropertyType.ts +37 -37
  342. package/src/schemas/protobuf/IProtobufSchema.ts +50 -50
  343. package/src/tags/Example.ts +24 -24
  344. package/src/tags/Examples.ts +16 -16
  345. package/src/tags/Format.ts +50 -50
  346. package/src/tags/JsonSchemaPlugin.ts +8 -8
  347. package/src/tags/Sequence.ts +10 -10
  348. package/src/tags/TagBase.ts +82 -82
  349. package/src/tags/Type.ts +32 -32
  350. package/src/tags/UniqueItems.ts +14 -14
  351. package/src/tags/index.ts +21 -21
  352. package/src/transform.ts +35 -35
  353. package/src/transformers/CallExpressionTransformer.ts +547 -547
  354. package/src/transformers/FileTransformer.ts +136 -136
  355. package/src/transformers/IProgrammerProps.ts +11 -11
  356. package/src/transformers/ITransformOptions.ts +62 -62
  357. package/src/transformers/ITransformProps.ts +9 -9
  358. package/src/transformers/ITypiaContext.ts +18 -18
  359. package/src/transformers/ImportTransformer.ts +81 -81
  360. package/src/transformers/NodeTransformer.ts +17 -17
  361. package/src/transformers/TransformerError.ts +60 -60
  362. package/src/transformers/features/AssertTransformer.ts +24 -24
  363. package/src/transformers/features/CreateAssertTransformer.ts +24 -24
  364. package/src/transformers/features/CreateIsTransformer.ts +18 -18
  365. package/src/transformers/features/CreateRandomTransformer.ts +43 -43
  366. package/src/transformers/features/CreateValidateTransformer.ts +18 -18
  367. package/src/transformers/features/IsTransformer.ts +18 -18
  368. package/src/transformers/features/RandomTransformer.ts +41 -41
  369. package/src/transformers/features/ValidateTransformer.ts +18 -18
  370. package/src/transformers/features/functional/FunctionalGenericTransformer.ts +57 -57
  371. package/src/transformers/features/http/CreateHttpAssertFormDataTransformer.ts +13 -13
  372. package/src/transformers/features/http/CreateHttpAssertHeadersTransformer.ts +13 -13
  373. package/src/transformers/features/http/CreateHttpAssertQueryTransformer.ts +13 -13
  374. package/src/transformers/features/http/CreateHttpFormDataTransformer.ts +13 -13
  375. package/src/transformers/features/http/CreateHttpHeadersTransformer.ts +13 -13
  376. package/src/transformers/features/http/CreateHttpIsFormDataTransformer.ts +13 -13
  377. package/src/transformers/features/http/CreateHttpIsHeadersTransformer.ts +13 -13
  378. package/src/transformers/features/http/CreateHttpIsQueryTransformer.ts +13 -13
  379. package/src/transformers/features/http/CreateHttpParameterTransformer.ts +13 -13
  380. package/src/transformers/features/http/CreateHttpQueryTransformer.ts +13 -13
  381. package/src/transformers/features/http/CreateHttpValidateFormDataTransformer.ts +13 -13
  382. package/src/transformers/features/http/CreateHttpValidateHeadersTransformer.ts +13 -13
  383. package/src/transformers/features/http/CreateHttpValidateQueryTransformer.ts +13 -13
  384. package/src/transformers/features/http/HttpAssertFormDataTransformer.ts +13 -13
  385. package/src/transformers/features/http/HttpAssertHeadersTransformer.ts +13 -13
  386. package/src/transformers/features/http/HttpAssertQueryTransformer.ts +13 -13
  387. package/src/transformers/features/http/HttpFormDataTransformer.ts +13 -13
  388. package/src/transformers/features/http/HttpHeadersTransformer.ts +13 -13
  389. package/src/transformers/features/http/HttpIsFormDataTransformer.ts +13 -13
  390. package/src/transformers/features/http/HttpIsHeadersTransformer.ts +13 -13
  391. package/src/transformers/features/http/HttpIsQueryTransformer.ts +13 -13
  392. package/src/transformers/features/http/HttpParameterTransformer.ts +13 -13
  393. package/src/transformers/features/http/HttpQueryTransformer.ts +13 -13
  394. package/src/transformers/features/http/HttpValidateFormDataTransformer.ts +13 -13
  395. package/src/transformers/features/http/HttpValidateHeadersTransformer.ts +13 -13
  396. package/src/transformers/features/http/HttpValidateQueryTransformer.ts +13 -13
  397. package/src/transformers/features/json/JsonApplicationTransformer.ts +105 -105
  398. package/src/transformers/features/json/JsonAssertParseTransformer.ts +13 -13
  399. package/src/transformers/features/json/JsonAssertStringifyTransformer.ts +13 -13
  400. package/src/transformers/features/json/JsonCreateAssertParseTransformer.ts +13 -13
  401. package/src/transformers/features/json/JsonCreateAssertStringifyTransformer.ts +13 -13
  402. package/src/transformers/features/json/JsonCreateIsParseTransformer.ts +13 -13
  403. package/src/transformers/features/json/JsonCreateIsStringifyTransformer.ts +13 -13
  404. package/src/transformers/features/json/JsonCreateStringifyTransformer.ts +13 -13
  405. package/src/transformers/features/json/JsonCreateValidateParseTransformer.ts +13 -13
  406. package/src/transformers/features/json/JsonCreateValidateStringifyProgrammer.ts +13 -13
  407. package/src/transformers/features/json/JsonIsParseTransformer.ts +13 -13
  408. package/src/transformers/features/json/JsonIsStringifyTransformer.ts +13 -13
  409. package/src/transformers/features/json/JsonSchemasTransformer.ts +143 -143
  410. package/src/transformers/features/json/JsonStringifyTransformer.ts +13 -13
  411. package/src/transformers/features/json/JsonValidateParseTransformer.ts +13 -13
  412. package/src/transformers/features/json/JsonValidateStringifyTransformer.ts +13 -13
  413. package/src/transformers/features/llm/LlmApplicationOfValidateTransformer.ts +120 -115
  414. package/src/transformers/features/llm/LlmApplicationTransformer.ts +118 -113
  415. package/src/transformers/features/llm/LlmParametersTransformer.ts +94 -89
  416. package/src/transformers/features/llm/LlmSchemaTransformer.ts +135 -130
  417. package/src/transformers/features/misc/MiscAssertCloneTransformer.ts +13 -13
  418. package/src/transformers/features/misc/MiscAssertPruneTransformer.ts +13 -13
  419. package/src/transformers/features/misc/MiscCloneTransformer.ts +13 -13
  420. package/src/transformers/features/misc/MiscCreateAssertCloneTransformer.ts +13 -13
  421. package/src/transformers/features/misc/MiscCreateAssertPruneTransformer.ts +13 -13
  422. package/src/transformers/features/misc/MiscCreateCloneTransformer.ts +13 -13
  423. package/src/transformers/features/misc/MiscCreateIsCloneTransformer.ts +13 -13
  424. package/src/transformers/features/misc/MiscCreateIsPruneTransformer.ts +13 -13
  425. package/src/transformers/features/misc/MiscCreatePruneTransformer.ts +13 -13
  426. package/src/transformers/features/misc/MiscCreateValidateCloneTransformer.ts +13 -13
  427. package/src/transformers/features/misc/MiscCreateValidatePruneTransformer.ts +13 -13
  428. package/src/transformers/features/misc/MiscIsCloneTransformer.ts +13 -13
  429. package/src/transformers/features/misc/MiscIsPruneTransformer.ts +13 -13
  430. package/src/transformers/features/misc/MiscLiteralsTransformer.ts +35 -35
  431. package/src/transformers/features/misc/MiscPruneTransformer.ts +13 -13
  432. package/src/transformers/features/misc/MiscValidateCloneTransformer.ts +13 -13
  433. package/src/transformers/features/misc/MiscValidatePruneTransformer.ts +13 -13
  434. package/src/transformers/features/notations/NotationAssertGeneralTransformer.ts +20 -20
  435. package/src/transformers/features/notations/NotationCreateAssertGeneralTransformer.ts +20 -20
  436. package/src/transformers/features/notations/NotationCreateGeneralTransformer.ts +20 -20
  437. package/src/transformers/features/notations/NotationCreateIsGeneralTransformer.ts +20 -20
  438. package/src/transformers/features/notations/NotationCreateValidateGeneralTransformer.ts +20 -20
  439. package/src/transformers/features/notations/NotationGeneralTransformer.ts +18 -18
  440. package/src/transformers/features/notations/NotationIsGeneralTransformer.ts +20 -20
  441. package/src/transformers/features/notations/NotationValidateGeneralTransformer.ts +20 -20
  442. package/src/transformers/features/protobuf/ProtobufAssertDecodeTransformer.ts +13 -13
  443. package/src/transformers/features/protobuf/ProtobufAssertEncodeTransformer.ts +13 -13
  444. package/src/transformers/features/protobuf/ProtobufCreateAssertDecodeTransformer.ts +13 -13
  445. package/src/transformers/features/protobuf/ProtobufCreateAssertEncodeTransformer.ts +13 -13
  446. package/src/transformers/features/protobuf/ProtobufCreateDecodeTransformer.ts +13 -13
  447. package/src/transformers/features/protobuf/ProtobufCreateEncodeTransformer.ts +13 -13
  448. package/src/transformers/features/protobuf/ProtobufCreateIsDecodeTransformer.ts +13 -13
  449. package/src/transformers/features/protobuf/ProtobufCreateIsEncodeTransformer.ts +13 -13
  450. package/src/transformers/features/protobuf/ProtobufCreateValidateDecodeTransformer.ts +13 -13
  451. package/src/transformers/features/protobuf/ProtobufCreateValidateEncodeTransformer.ts +13 -13
  452. package/src/transformers/features/protobuf/ProtobufDecodeTransformer.ts +13 -13
  453. package/src/transformers/features/protobuf/ProtobufEncodeTransformer.ts +13 -13
  454. package/src/transformers/features/protobuf/ProtobufIsDecodeTransformer.ts +13 -13
  455. package/src/transformers/features/protobuf/ProtobufIsEncodeTransformer.ts +13 -13
  456. package/src/transformers/features/protobuf/ProtobufMessageTransformer.ts +35 -35
  457. package/src/transformers/features/protobuf/ProtobufValidateDecodeTransformer.ts +13 -13
  458. package/src/transformers/features/protobuf/ProtobufValidateEncodeTransformer.ts +13 -13
  459. package/src/transformers/features/reflect/ReflectMetadataTransformer.ts +69 -69
  460. package/src/transformers/features/reflect/ReflectNameTransformer.ts +82 -82
  461. package/src/transformers/internal/GenericTransformer.ts +101 -101
  462. package/src/utils/MapUtil.ts +14 -14
  463. package/src/utils/NamingConvention.ts +94 -94
  464. package/src/utils/ProtobufNameEncoder.ts +32 -32
  465. package/src/utils/StringUtil.ts +16 -16
package/src/llm.ts CHANGED
@@ -1,481 +1,481 @@
1
- import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
-
3
- import { ILlmApplicationOfValidate } from "./module";
4
-
5
- /**
6
- * > You must configure the generic argument `App`.
7
- *
8
- * TypeScript functions to LLM function calling application with validators.
9
- *
10
- * Creates an application of LLM (Large Language Model) function calling application
11
- * from a TypeScript class or interface type containing the target functions to be
12
- * called by the LLM function calling feature.
13
- *
14
- * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
15
- * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
16
- * select the proper function and fill its arguments from the conversation
17
- * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
18
- *
19
- * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
20
- * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
21
- * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
22
- * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
23
- * is a validator function reporting the detailed information about the wrong typed parameters.
24
- *
25
- * By the way, there can be some parameters (or their nested properties) which must be
26
- * composed by human, not by LLM. File uploading feature or some sensitive information
27
- * like secrety key (password) are the examples. In that case, you can separate the
28
- * function parameters to both LLM and human sides by configuring the
29
- * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
30
- * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
31
- *
32
- * For reference, the actual function call execution is not by LLM, but by you.
33
- * When the LLM selects the proper function and fills the arguments, you just call
34
- * the function with the LLM prepared arguments. And then informs the return value to
35
- * the LLM by system prompt. The LLM will continue the next conversation based on
36
- * the return value.
37
- *
38
- * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
39
- * so that the parameters are separated to human and LLM sides, you can merge these
40
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
41
- * before the actual LLM function call execution.
42
- *
43
- * Here is the list of available `Model` types with their corresponding LLM schema.
44
- * Reading the following list, and determine the `Model` type considering the
45
- * characteristics of the target LLM provider.
46
- *
47
- * - LLM provider schemas
48
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
49
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
50
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
51
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
52
- * - Midldle layer schemas
53
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
54
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
55
- *
56
- * @template App Target class or interface type collecting the functions to call
57
- * @template Model LLM schema model
58
- * @template Config Configuration of LLM schema composition
59
- * @param options Options for the LLM application construction
60
- * @returns Application of LLM function calling schemas
61
- * @reference https://platform.openai.com/docs/guides/function-calling
62
- * @author Jeongho Nam - https://github.com/samchon
63
- */
64
- export function applicationOfValidate(
65
- options?: Partial<Pick<ILlmApplicationOfValidate.IOptions<any>, "separate">>,
66
- ): never;
67
-
68
- /**
69
- * TypeScript functions to LLM function calling application with validators.
70
- *
71
- * Creates an application of LLM (Large Language Model) function calling application
72
- * from a TypeScript class or interface type containing the target functions to be
73
- * called by the LLM function calling feature.
74
- *
75
- * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
76
- * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
77
- * select the proper function and fill its arguments from the conversation
78
- * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
79
- *
80
- * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
81
- * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
82
- * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
83
- * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
84
- * is a validator function reporting the detailed information about the wrong typed parameters.
85
- *
86
- * By the way, there can be some parameters (or their nested properties) which must be
87
- * composed by human, not by LLM. File uploading feature or some sensitive information
88
- * like secrety key (password) are the examples. In that case, you can separate the
89
- * function parameters to both LLM and human sides by configuring the
90
- * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
91
- * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
92
- *
93
- * For reference, the actual function call execution is not by LLM, but by you.
94
- * When the LLM selects the proper function and fills the arguments, you just call
95
- * the function with the LLM prepared arguments. And then informs the return value to
96
- * the LLM by system prompt. The LLM will continue the next conversation based on
97
- * the return value.
98
- *
99
- * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
100
- * so that the parameters are separated to human and LLM sides, you can merge these
101
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
102
- * before the actual LLM function call execution.
103
- *
104
- * Here is the list of available `Model` types with their corresponding LLM schema.
105
- * Reading the following list, and determine the `Model` type considering the
106
- * characteristics of the target LLM provider.
107
- *
108
- * - LLM provider schemas
109
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
110
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
111
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
112
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
113
- * - Midldle layer schemas
114
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
115
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
116
- *
117
- * @template App Target class or interface type collecting the functions to call
118
- * @template Model LLM schema model
119
- * @template Config Configuration of LLM schema composition
120
- * @param options Options for the LLM application construction
121
- * @returns Application of LLM function calling schemas
122
- * @reference https://platform.openai.com/docs/guides/function-calling
123
- * @author Jeongho Nam - https://github.com/samchon
124
- */
125
- export function applicationOfValidate<
126
- App extends Record<string, any>,
127
- Model extends ILlmSchema.Model,
128
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
129
- >(
130
- options?: Partial<
131
- Pick<ILlmApplicationOfValidate.IOptions<Model>, "separate">
132
- >,
133
- ): ILlmApplicationOfValidate<Model>;
134
-
135
- /**
136
- * @internal
137
- */
138
- export function applicationOfValidate(): never {
139
- halt("applicationOfValidate");
140
- }
141
-
142
- /**
143
- * > You must configure the generic argument `App`.
144
- *
145
- * TypeScript functions to LLM function calling application.
146
- *
147
- * Creates an application of LLM (Large Language Model) function calling application
148
- * from a TypeScript class or interface type containing the target functions to be
149
- * called by the LLM function calling feature.
150
- *
151
- * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
152
- * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
153
- * proper function and fill its arguments from the conversation (maybe chatting text)
154
- * with user (human). This is the concept of the LLM function calling.
155
- *
156
- * By the way, there can be some parameters (or their nested properties) which must be
157
- * composed by human, not by LLM. File uploading feature or some sensitive information
158
- * like secrety key (password) are the examples. In that case, you can separate the
159
- * function parameters to both LLM and human sides by configuring the
160
- * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
161
- * assigned to the {@link ILlmFunction.separated} property.
162
- *
163
- * For reference, the actual function call execution is not by LLM, but by you.
164
- * When the LLM selects the proper function and fills the arguments, you just call
165
- * the function with the LLM prepared arguments. And then informs the return value to
166
- * the LLM by system prompt. The LLM will continue the next conversation based on
167
- * the return value.
168
- *
169
- * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
170
- * so that the parameters are separated to human and LLM sides, you can merge these
171
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
172
- * before the actual LLM function call execution.
173
- *
174
- * Here is the list of available `Model` types with their corresponding LLM schema.
175
- * Reading the following list, and determine the `Model` type considering the
176
- * characteristics of the target LLM provider.
177
- *
178
- * - LLM provider schemas
179
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
180
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
181
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
182
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
183
- * - Midldle layer schemas
184
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
185
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
186
- *
187
- * @template App Target class or interface type collecting the functions to call
188
- * @template Model LLM schema model
189
- * @template Config Configuration of LLM schema composition
190
- * @param options Options for the LLM application construction
191
- * @returns Application of LLM function calling schemas
192
- * @reference https://platform.openai.com/docs/guides/function-calling
193
- * @author Jeongho Nam - https://github.com/samchon
194
- */
195
- export function application(
196
- options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
197
- ): never;
198
-
199
- /**
200
- * TypeScript functions to LLM function calling application.
201
- *
202
- * Creates an application of LLM (Large Language Model) function calling application
203
- * from a TypeScript class or interface type containing the target functions to be
204
- * called by the LLM function calling feature.
205
- *
206
- * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
207
- * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
208
- * proper function and fill its arguments from the conversation (maybe chatting text)
209
- * with user (human). This is the concept of the LLM function calling.
210
- *
211
- * By the way, there can be some parameters (or their nested properties) which must be
212
- * composed by human, not by LLM. File uploading feature or some sensitive information
213
- * like secrety key (password) are the examples. In that case, you can separate the
214
- * function parameters to both LLM and human sides by configuring the
215
- * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
216
- * assigned to the {@link ILlmFunction.separated} property.
217
- *
218
- * For reference, the actual function call execution is not by LLM, but by you.
219
- * When the LLM selects the proper function and fills the arguments, you just call
220
- * the function with the LLM prepared arguments. And then informs the return value to
221
- * the LLM by system prompt. The LLM will continue the next conversation based on
222
- * the return value.
223
- *
224
- * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
225
- * so that the parameters are separated to human and LLM sides, you can merge these
226
- * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
227
- * before the actual LLM function call execution.
228
- *
229
- * Here is the list of available `Model` types with their corresponding LLM schema.
230
- * Reading the following list, and determine the `Model` type considering the
231
- * characteristics of the target LLM provider.
232
- *
233
- * - LLM provider schemas
234
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
235
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
236
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
237
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
238
- * - Midldle layer schemas
239
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
240
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
241
- *
242
- * @template App Target class or interface type collecting the functions to call
243
- * @template Model LLM schema model
244
- * @template Config Configuration of LLM schema composition
245
- * @param options Options for the LLM application construction
246
- * @returns Application of LLM function calling schemas
247
- * @reference https://platform.openai.com/docs/guides/function-calling
248
- * @author Jeongho Nam - https://github.com/samchon
249
- */
250
- export function application<
251
- App extends Record<string, any>,
252
- Model extends ILlmSchema.Model,
253
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
254
- >(
255
- options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
256
- ): ILlmApplication<Model>;
257
-
258
- /**
259
- * @internal
260
- */
261
- export function application(): never {
262
- halt("application");
263
- }
264
-
265
- /**
266
- * > You must configure the generic argument `Parameters`.
267
- *
268
- * TypeScript parameters to LLM parameters schema.
269
- *
270
- * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
271
- * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
272
- * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
273
- * from a TypeScript parameters type.
274
- *
275
- * For references, LLM identifies only keyworded arguments, not positional arguments.
276
- * Therefore, the TypeScript parameters type must be an object type, and its properties
277
- * must be static. If dynamic properties are, it would be compilation error.
278
- *
279
- * Also, such parameters type can be utilized not only for the LLM function calling,
280
- * but also for the LLM structured outputs. The LLM structured outputs is a feature
281
- * that LLM (Large Language Model) can generate a structured output, not only a plain
282
- * text, by filling the parameters from the conversation (maybe chatting text) with user
283
- * (human).
284
- *
285
- * Here is the list of available `Model` types with their corresponding LLM schema.
286
- * Reading the following list, and determine the `Model` type considering the
287
- * characteristics of the target LLM provider.
288
- *
289
- * - LLM provider schemas
290
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
291
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
292
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
293
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
294
- * - Midldle layer schemas
295
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
296
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
297
- *
298
- * @template Parameters Target parameters type
299
- * @template Model LLM schema model
300
- * @template Config Configuration of LLM schema composition
301
- * @returns LLM parameters schema
302
- * @reference https://platform.openai.com/docs/guides/function-calling
303
- * @reference https://platform.openai.com/docs/guides/structured-outputs
304
- */
305
- export function parameters(): never;
306
-
307
- /**
308
- * TypeScript parameters to LLM parameters schema.
309
- *
310
- * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
311
- * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
312
- * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
313
- * from a TypeScript parameters type.
314
- *
315
- * For references, LLM identifies only keyworded arguments, not positional arguments.
316
- * Therefore, the TypeScript parameters type must be an object type, and its properties
317
- * must be static. If dynamic properties are, it would be compilation error.
318
- *
319
- * Also, such parameters type can be utilized not only for the LLM function calling,
320
- * but also for the LLM structured outputs. The LLM structured outputs is a feature
321
- * that LLM (Large Language Model) can generate a structured output, not only a plain
322
- * text, by filling the parameters from the conversation (maybe chatting text) with user
323
- * (human).
324
- *
325
- * Here is the list of available `Model` types with their corresponding LLM schema.
326
- * Reading the following list, and determine the `Model` type considering the
327
- * characteristics of the target LLM provider.
328
- *
329
- * - LLM provider schemas
330
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
331
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
332
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
333
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
334
- * - Midldle layer schemas
335
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
336
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
337
- *
338
- * @template Parameters Target parameters type
339
- * @template Model LLM schema model
340
- * @template Config Configuration of LLM schema composition
341
- * @returns LLM parameters schema
342
- * @reference https://platform.openai.com/docs/guides/function-calling
343
- * @reference https://platform.openai.com/docs/guides/structured-outputs
344
- */
345
- export function parameters<
346
- Parameters extends Record<string, any>,
347
- Model extends ILlmSchema.Model,
348
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
349
- >(): ILlmSchema.ModelParameters[Model];
350
-
351
- /**
352
- * @internal
353
- */
354
- export function parameters(): never {
355
- halt("parameters");
356
- }
357
-
358
- /**
359
- * > You must configure the generic argument `T`.
360
- *
361
- * TypeScript type to LLM type schema.
362
- *
363
- * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
364
- * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
365
- * from a TypeScript type.
366
- *
367
- * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
368
- * and here is the list of available `Model` types with their corresponding LLM schema.
369
- * Reading the following list, and determine the `Model` type considering the
370
- * characteristics of the target LLM provider.
371
- *
372
- * - LLM provider schemas
373
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
374
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
375
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
376
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
377
- * - Midldle layer schemas
378
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
379
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
380
- *
381
- * If you actually want to perform the LLM function calling with TypeScript functions,
382
- * you can do it with the {@link application} function. Otherwise you hope to perform the
383
- * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
384
- * and structured output with the native TypeScript functions and types.
385
- *
386
- * > **What LLM function calling is?
387
- * >
388
- * > LLM (Large Language Model) selects propert function and fill the arguments,
389
- * > but actuall function call execution is not by LLM, but by you.
390
- * >
391
- * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
392
- * > "function calling" feature. The "function calling" means that LLM automatically selects
393
- * > a proper function and compose parameter values from the user's chatting text.
394
- * >
395
- * > When LLM selects the proper function and its arguments, you just call the function
396
- * > with the arguments. And then informs the return value to the LLM by system prompt,
397
- * > LLM will continue the next conversation based on the return value.
398
- *
399
- * @template T Target type
400
- * @template Model LLM schema model
401
- * @template Config Configuration of LLM schema composition
402
- * @returns LLM schema
403
- * @reference https://platform.openai.com/docs/guides/function-calling
404
- * @reference https://platform.openai.com/docs/guides/structured-outputs
405
- * @author Jeongho Nam - https://github.com/samchon
406
- */
407
- export function schema(): never;
408
-
409
- /**
410
- * TypeScript type to LLM type schema.
411
- *
412
- * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
413
- * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
414
- * from a TypeScript type.
415
- *
416
- * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
417
- * and here is the list of available `Model` types with their corresponding LLM schema:
418
- *
419
- * - LLM provider schemas
420
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
421
- * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
422
- * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
423
- * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
424
- * - Midldle layer schemas
425
- * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
426
- * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
427
- *
428
- * If you actually want to perform the LLM function calling with TypeScript functions,
429
- * you can do it with the {@link application} function. Otherwise you hope to perform the
430
- * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
431
- * and structured output with the native TypeScript functions and types.
432
- *
433
- * > **What LLM function calling is?
434
- * >
435
- * > LLM (Large Language Model) selects propert function and fill the arguments,
436
- * > but actuall function call execution is not by LLM, but by you.
437
- * >
438
- * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
439
- * > "function calling" feature. The "function calling" means that LLM automatically selects
440
- * > a proper function and compose parameter values from the user's chatting text.
441
- * >
442
- * > When LLM selects the proper function and its arguments, you just call the function
443
- * > with the arguments. And then informs the return value to the LLM by system prompt,
444
- * > LLM will continue the next conversation based on the return value.
445
- *
446
- * @template T Target type
447
- * @template Model LLM schema model
448
- * @template Config Configuration of LLM schema composition
449
- * @returns LLM schema
450
- * @reference https://platform.openai.com/docs/guides/function-calling
451
- * @reference https://platform.openai.com/docs/guides/structured-outputs
452
- * @author Jeongho Nam - https://github.com/samchon
453
- */
454
- export function schema<
455
- T,
456
- Model extends ILlmSchema.Model,
457
- Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
458
- >(
459
- ...$defs: Extract<
460
- ILlmSchema.ModelSchema[Model],
461
- { $ref: string }
462
- > extends never
463
- ? []
464
- : [Record<string, ILlmSchema.ModelSchema[Model]>]
465
- ): ILlmSchema.ModelSchema[Model];
466
-
467
- /**
468
- * @internal
469
- */
470
- export function schema(): never {
471
- halt("schema");
472
- }
473
-
474
- /**
475
- * @internal
476
- */
477
- function halt(name: string): never {
478
- throw new Error(
479
- `Error on typia.llm.${name}(): no transform has been configured. Read and follow https://typia.io/docs/setup please.`,
480
- );
481
- }
1
+ import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
2
+
3
+ import { ILlmApplicationOfValidate } from "./module";
4
+
5
+ /**
6
+ * > You must configure the generic argument `App`.
7
+ *
8
+ * TypeScript functions to LLM function calling application with validators.
9
+ *
10
+ * Creates an application of LLM (Large Language Model) function calling application
11
+ * from a TypeScript class or interface type containing the target functions to be
12
+ * called by the LLM function calling feature.
13
+ *
14
+ * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
15
+ * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
16
+ * select the proper function and fill its arguments from the conversation
17
+ * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
18
+ *
19
+ * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
20
+ * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
21
+ * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
22
+ * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
23
+ * is a validator function reporting the detailed information about the wrong typed parameters.
24
+ *
25
+ * By the way, there can be some parameters (or their nested properties) which must be
26
+ * composed by human, not by LLM. File uploading feature or some sensitive information
27
+ * like secrety key (password) are the examples. In that case, you can separate the
28
+ * function parameters to both LLM and human sides by configuring the
29
+ * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
30
+ * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
31
+ *
32
+ * For reference, the actual function call execution is not by LLM, but by you.
33
+ * When the LLM selects the proper function and fills the arguments, you just call
34
+ * the function with the LLM prepared arguments. And then informs the return value to
35
+ * the LLM by system prompt. The LLM will continue the next conversation based on
36
+ * the return value.
37
+ *
38
+ * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
39
+ * so that the parameters are separated to human and LLM sides, you can merge these
40
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
41
+ * before the actual LLM function call execution.
42
+ *
43
+ * Here is the list of available `Model` types with their corresponding LLM schema.
44
+ * Reading the following list, and determine the `Model` type considering the
45
+ * characteristics of the target LLM provider.
46
+ *
47
+ * - LLM provider schemas
48
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
49
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
50
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
51
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
52
+ * - Midldle layer schemas
53
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
54
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
55
+ *
56
+ * @template App Target class or interface type collecting the functions to call
57
+ * @template Model LLM schema model
58
+ * @template Config Configuration of LLM schema composition
59
+ * @param options Options for the LLM application construction
60
+ * @returns Application of LLM function calling schemas
61
+ * @reference https://platform.openai.com/docs/guides/function-calling
62
+ * @author Jeongho Nam - https://github.com/samchon
63
+ */
64
+ export function applicationOfValidate(
65
+ options?: Partial<Pick<ILlmApplicationOfValidate.IOptions<any>, "separate">>,
66
+ ): never;
67
+
68
+ /**
69
+ * TypeScript functions to LLM function calling application with validators.
70
+ *
71
+ * Creates an application of LLM (Large Language Model) function calling application
72
+ * from a TypeScript class or interface type containing the target functions to be
73
+ * called by the LLM function calling feature.
74
+ *
75
+ * If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
76
+ * LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
77
+ * select the proper function and fill its arguments from the conversation
78
+ * (maybe chatting text) with user (human). This is the concept of the LLM function calling.
79
+ *
80
+ * Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
81
+ * {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
82
+ * {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
83
+ * parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
84
+ * is a validator function reporting the detailed information about the wrong typed parameters.
85
+ *
86
+ * By the way, there can be some parameters (or their nested properties) which must be
87
+ * composed by human, not by LLM. File uploading feature or some sensitive information
88
+ * like secrety key (password) are the examples. In that case, you can separate the
89
+ * function parameters to both LLM and human sides by configuring the
90
+ * {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
91
+ * are assigned to the {@link ILlmFunctionOfValidate.separated} property.
92
+ *
93
+ * For reference, the actual function call execution is not by LLM, but by you.
94
+ * When the LLM selects the proper function and fills the arguments, you just call
95
+ * the function with the LLM prepared arguments. And then informs the return value to
96
+ * the LLM by system prompt. The LLM will continue the next conversation based on
97
+ * the return value.
98
+ *
99
+ * Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
100
+ * so that the parameters are separated to human and LLM sides, you can merge these
101
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
102
+ * before the actual LLM function call execution.
103
+ *
104
+ * Here is the list of available `Model` types with their corresponding LLM schema.
105
+ * Reading the following list, and determine the `Model` type considering the
106
+ * characteristics of the target LLM provider.
107
+ *
108
+ * - LLM provider schemas
109
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
110
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
111
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
112
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
113
+ * - Midldle layer schemas
114
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
115
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
116
+ *
117
+ * @template App Target class or interface type collecting the functions to call
118
+ * @template Model LLM schema model
119
+ * @template Config Configuration of LLM schema composition
120
+ * @param options Options for the LLM application construction
121
+ * @returns Application of LLM function calling schemas
122
+ * @reference https://platform.openai.com/docs/guides/function-calling
123
+ * @author Jeongho Nam - https://github.com/samchon
124
+ */
125
+ export function applicationOfValidate<
126
+ App extends Record<string, any>,
127
+ Model extends ILlmSchema.Model,
128
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
129
+ >(
130
+ options?: Partial<
131
+ Pick<ILlmApplicationOfValidate.IOptions<Model>, "separate">
132
+ >,
133
+ ): ILlmApplicationOfValidate<Model>;
134
+
135
+ /**
136
+ * @internal
137
+ */
138
+ export function applicationOfValidate(): never {
139
+ halt("applicationOfValidate");
140
+ }
141
+
142
+ /**
143
+ * > You must configure the generic argument `App`.
144
+ *
145
+ * TypeScript functions to LLM function calling application.
146
+ *
147
+ * Creates an application of LLM (Large Language Model) function calling application
148
+ * from a TypeScript class or interface type containing the target functions to be
149
+ * called by the LLM function calling feature.
150
+ *
151
+ * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
152
+ * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
153
+ * proper function and fill its arguments from the conversation (maybe chatting text)
154
+ * with user (human). This is the concept of the LLM function calling.
155
+ *
156
+ * By the way, there can be some parameters (or their nested properties) which must be
157
+ * composed by human, not by LLM. File uploading feature or some sensitive information
158
+ * like secrety key (password) are the examples. In that case, you can separate the
159
+ * function parameters to both LLM and human sides by configuring the
160
+ * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
161
+ * assigned to the {@link ILlmFunction.separated} property.
162
+ *
163
+ * For reference, the actual function call execution is not by LLM, but by you.
164
+ * When the LLM selects the proper function and fills the arguments, you just call
165
+ * the function with the LLM prepared arguments. And then informs the return value to
166
+ * the LLM by system prompt. The LLM will continue the next conversation based on
167
+ * the return value.
168
+ *
169
+ * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
170
+ * so that the parameters are separated to human and LLM sides, you can merge these
171
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
172
+ * before the actual LLM function call execution.
173
+ *
174
+ * Here is the list of available `Model` types with their corresponding LLM schema.
175
+ * Reading the following list, and determine the `Model` type considering the
176
+ * characteristics of the target LLM provider.
177
+ *
178
+ * - LLM provider schemas
179
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
180
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
181
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
182
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
183
+ * - Midldle layer schemas
184
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
185
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
186
+ *
187
+ * @template App Target class or interface type collecting the functions to call
188
+ * @template Model LLM schema model
189
+ * @template Config Configuration of LLM schema composition
190
+ * @param options Options for the LLM application construction
191
+ * @returns Application of LLM function calling schemas
192
+ * @reference https://platform.openai.com/docs/guides/function-calling
193
+ * @author Jeongho Nam - https://github.com/samchon
194
+ */
195
+ export function application(
196
+ options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
197
+ ): never;
198
+
199
+ /**
200
+ * TypeScript functions to LLM function calling application.
201
+ *
202
+ * Creates an application of LLM (Large Language Model) function calling application
203
+ * from a TypeScript class or interface type containing the target functions to be
204
+ * called by the LLM function calling feature.
205
+ *
206
+ * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
207
+ * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
208
+ * proper function and fill its arguments from the conversation (maybe chatting text)
209
+ * with user (human). This is the concept of the LLM function calling.
210
+ *
211
+ * By the way, there can be some parameters (or their nested properties) which must be
212
+ * composed by human, not by LLM. File uploading feature or some sensitive information
213
+ * like secrety key (password) are the examples. In that case, you can separate the
214
+ * function parameters to both LLM and human sides by configuring the
215
+ * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
216
+ * assigned to the {@link ILlmFunction.separated} property.
217
+ *
218
+ * For reference, the actual function call execution is not by LLM, but by you.
219
+ * When the LLM selects the proper function and fills the arguments, you just call
220
+ * the function with the LLM prepared arguments. And then informs the return value to
221
+ * the LLM by system prompt. The LLM will continue the next conversation based on
222
+ * the return value.
223
+ *
224
+ * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
225
+ * so that the parameters are separated to human and LLM sides, you can merge these
226
+ * humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
227
+ * before the actual LLM function call execution.
228
+ *
229
+ * Here is the list of available `Model` types with their corresponding LLM schema.
230
+ * Reading the following list, and determine the `Model` type considering the
231
+ * characteristics of the target LLM provider.
232
+ *
233
+ * - LLM provider schemas
234
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
235
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
236
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
237
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
238
+ * - Midldle layer schemas
239
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
240
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
241
+ *
242
+ * @template App Target class or interface type collecting the functions to call
243
+ * @template Model LLM schema model
244
+ * @template Config Configuration of LLM schema composition
245
+ * @param options Options for the LLM application construction
246
+ * @returns Application of LLM function calling schemas
247
+ * @reference https://platform.openai.com/docs/guides/function-calling
248
+ * @author Jeongho Nam - https://github.com/samchon
249
+ */
250
+ export function application<
251
+ App extends Record<string, any>,
252
+ Model extends ILlmSchema.Model,
253
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
254
+ >(
255
+ options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
256
+ ): ILlmApplication<Model>;
257
+
258
+ /**
259
+ * @internal
260
+ */
261
+ export function application(): never {
262
+ halt("application");
263
+ }
264
+
265
+ /**
266
+ * > You must configure the generic argument `Parameters`.
267
+ *
268
+ * TypeScript parameters to LLM parameters schema.
269
+ *
270
+ * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
271
+ * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
272
+ * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
273
+ * from a TypeScript parameters type.
274
+ *
275
+ * For references, LLM identifies only keyworded arguments, not positional arguments.
276
+ * Therefore, the TypeScript parameters type must be an object type, and its properties
277
+ * must be static. If dynamic properties are, it would be compilation error.
278
+ *
279
+ * Also, such parameters type can be utilized not only for the LLM function calling,
280
+ * but also for the LLM structured outputs. The LLM structured outputs is a feature
281
+ * that LLM (Large Language Model) can generate a structured output, not only a plain
282
+ * text, by filling the parameters from the conversation (maybe chatting text) with user
283
+ * (human).
284
+ *
285
+ * Here is the list of available `Model` types with their corresponding LLM schema.
286
+ * Reading the following list, and determine the `Model` type considering the
287
+ * characteristics of the target LLM provider.
288
+ *
289
+ * - LLM provider schemas
290
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
291
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
292
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
293
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
294
+ * - Midldle layer schemas
295
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
296
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
297
+ *
298
+ * @template Parameters Target parameters type
299
+ * @template Model LLM schema model
300
+ * @template Config Configuration of LLM schema composition
301
+ * @returns LLM parameters schema
302
+ * @reference https://platform.openai.com/docs/guides/function-calling
303
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
304
+ */
305
+ export function parameters(): never;
306
+
307
+ /**
308
+ * TypeScript parameters to LLM parameters schema.
309
+ *
310
+ * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
311
+ * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
312
+ * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
313
+ * from a TypeScript parameters type.
314
+ *
315
+ * For references, LLM identifies only keyworded arguments, not positional arguments.
316
+ * Therefore, the TypeScript parameters type must be an object type, and its properties
317
+ * must be static. If dynamic properties are, it would be compilation error.
318
+ *
319
+ * Also, such parameters type can be utilized not only for the LLM function calling,
320
+ * but also for the LLM structured outputs. The LLM structured outputs is a feature
321
+ * that LLM (Large Language Model) can generate a structured output, not only a plain
322
+ * text, by filling the parameters from the conversation (maybe chatting text) with user
323
+ * (human).
324
+ *
325
+ * Here is the list of available `Model` types with their corresponding LLM schema.
326
+ * Reading the following list, and determine the `Model` type considering the
327
+ * characteristics of the target LLM provider.
328
+ *
329
+ * - LLM provider schemas
330
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
331
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
332
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
333
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
334
+ * - Midldle layer schemas
335
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
336
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
337
+ *
338
+ * @template Parameters Target parameters type
339
+ * @template Model LLM schema model
340
+ * @template Config Configuration of LLM schema composition
341
+ * @returns LLM parameters schema
342
+ * @reference https://platform.openai.com/docs/guides/function-calling
343
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
344
+ */
345
+ export function parameters<
346
+ Parameters extends Record<string, any>,
347
+ Model extends ILlmSchema.Model,
348
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
349
+ >(): ILlmSchema.ModelParameters[Model];
350
+
351
+ /**
352
+ * @internal
353
+ */
354
+ export function parameters(): never {
355
+ halt("parameters");
356
+ }
357
+
358
+ /**
359
+ * > You must configure the generic argument `T`.
360
+ *
361
+ * TypeScript type to LLM type schema.
362
+ *
363
+ * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
364
+ * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
365
+ * from a TypeScript type.
366
+ *
367
+ * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
368
+ * and here is the list of available `Model` types with their corresponding LLM schema.
369
+ * Reading the following list, and determine the `Model` type considering the
370
+ * characteristics of the target LLM provider.
371
+ *
372
+ * - LLM provider schemas
373
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
374
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
375
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
376
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
377
+ * - Midldle layer schemas
378
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
379
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
380
+ *
381
+ * If you actually want to perform the LLM function calling with TypeScript functions,
382
+ * you can do it with the {@link application} function. Otherwise you hope to perform the
383
+ * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
384
+ * and structured output with the native TypeScript functions and types.
385
+ *
386
+ * > **What LLM function calling is?
387
+ * >
388
+ * > LLM (Large Language Model) selects propert function and fill the arguments,
389
+ * > but actuall function call execution is not by LLM, but by you.
390
+ * >
391
+ * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
392
+ * > "function calling" feature. The "function calling" means that LLM automatically selects
393
+ * > a proper function and compose parameter values from the user's chatting text.
394
+ * >
395
+ * > When LLM selects the proper function and its arguments, you just call the function
396
+ * > with the arguments. And then informs the return value to the LLM by system prompt,
397
+ * > LLM will continue the next conversation based on the return value.
398
+ *
399
+ * @template T Target type
400
+ * @template Model LLM schema model
401
+ * @template Config Configuration of LLM schema composition
402
+ * @returns LLM schema
403
+ * @reference https://platform.openai.com/docs/guides/function-calling
404
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
405
+ * @author Jeongho Nam - https://github.com/samchon
406
+ */
407
+ export function schema(): never;
408
+
409
+ /**
410
+ * TypeScript type to LLM type schema.
411
+ *
412
+ * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
413
+ * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
414
+ * from a TypeScript type.
415
+ *
416
+ * The returned {@link ILlmSchema} type would be specified by the `Model` argument,
417
+ * and here is the list of available `Model` types with their corresponding LLM schema:
418
+ *
419
+ * - LLM provider schemas
420
+ * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
421
+ * - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
422
+ * - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
423
+ * - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
424
+ * - Midldle layer schemas
425
+ * - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
426
+ * - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
427
+ *
428
+ * If you actually want to perform the LLM function calling with TypeScript functions,
429
+ * you can do it with the {@link application} function. Otherwise you hope to perform the
430
+ * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
431
+ * and structured output with the native TypeScript functions and types.
432
+ *
433
+ * > **What LLM function calling is?
434
+ * >
435
+ * > LLM (Large Language Model) selects propert function and fill the arguments,
436
+ * > but actuall function call execution is not by LLM, but by you.
437
+ * >
438
+ * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
439
+ * > "function calling" feature. The "function calling" means that LLM automatically selects
440
+ * > a proper function and compose parameter values from the user's chatting text.
441
+ * >
442
+ * > When LLM selects the proper function and its arguments, you just call the function
443
+ * > with the arguments. And then informs the return value to the LLM by system prompt,
444
+ * > LLM will continue the next conversation based on the return value.
445
+ *
446
+ * @template T Target type
447
+ * @template Model LLM schema model
448
+ * @template Config Configuration of LLM schema composition
449
+ * @returns LLM schema
450
+ * @reference https://platform.openai.com/docs/guides/function-calling
451
+ * @reference https://platform.openai.com/docs/guides/structured-outputs
452
+ * @author Jeongho Nam - https://github.com/samchon
453
+ */
454
+ export function schema<
455
+ T,
456
+ Model extends ILlmSchema.Model,
457
+ Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
458
+ >(
459
+ ...$defs: Extract<
460
+ ILlmSchema.ModelSchema[Model],
461
+ { $ref: string }
462
+ > extends never
463
+ ? []
464
+ : [Record<string, ILlmSchema.ModelSchema[Model]>]
465
+ ): ILlmSchema.ModelSchema[Model];
466
+
467
+ /**
468
+ * @internal
469
+ */
470
+ export function schema(): never {
471
+ halt("schema");
472
+ }
473
+
474
+ /**
475
+ * @internal
476
+ */
477
+ function halt(name: string): never {
478
+ throw new Error(
479
+ `Error on typia.llm.${name}(): no transform has been configured. Read and follow https://typia.io/docs/setup please.`,
480
+ );
481
+ }