typia 9.7.1 → 9.7.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (411) hide show
  1. package/lib/AssertionGuard.d.mts +27 -25
  2. package/lib/AssertionGuard.d.ts +27 -25
  3. package/lib/CamelCase.d.mts +1 -1
  4. package/lib/CamelCase.d.ts +1 -1
  5. package/lib/IRandomGenerator.d.mts +44 -42
  6. package/lib/IRandomGenerator.d.ts +44 -42
  7. package/lib/IReadableURLSearchParams.d.mts +2 -2
  8. package/lib/IReadableURLSearchParams.d.ts +2 -2
  9. package/lib/PascalCase.d.mts +1 -1
  10. package/lib/PascalCase.d.ts +1 -1
  11. package/lib/Primitive.d.mts +20 -22
  12. package/lib/Primitive.d.ts +20 -22
  13. package/lib/Resolved.d.mts +16 -18
  14. package/lib/Resolved.d.ts +16 -18
  15. package/lib/SnakeCase.d.mts +3 -2
  16. package/lib/SnakeCase.d.ts +3 -2
  17. package/lib/TypeGuardError.d.mts +88 -61
  18. package/lib/TypeGuardError.d.ts +88 -61
  19. package/lib/TypeGuardError.js +40 -29
  20. package/lib/TypeGuardError.js.map +1 -1
  21. package/lib/TypeGuardError.mjs +70 -48
  22. package/lib/factories/MetadataCollection.js +4 -12
  23. package/lib/factories/MetadataCollection.js.map +1 -1
  24. package/lib/factories/MetadataCollection.mjs +4 -12
  25. package/lib/factories/MetadataCommentTagFactory.js +5 -15
  26. package/lib/factories/MetadataCommentTagFactory.js.map +1 -1
  27. package/lib/factories/MetadataCommentTagFactory.mjs +5 -15
  28. package/lib/factories/MetadataFactory.js +1 -3
  29. package/lib/factories/MetadataFactory.js.map +1 -1
  30. package/lib/factories/MetadataFactory.mjs +1 -3
  31. package/lib/factories/ProtobufFactory.js +1 -3
  32. package/lib/factories/ProtobufFactory.js.map +1 -1
  33. package/lib/factories/ProtobufFactory.mjs +1 -3
  34. package/lib/functional.d.mts +196 -195
  35. package/lib/functional.d.ts +196 -195
  36. package/lib/functional.js +18 -54
  37. package/lib/functional.js.map +1 -1
  38. package/lib/functional.mjs +18 -54
  39. package/lib/http.d.mts +303 -319
  40. package/lib/http.d.ts +303 -319
  41. package/lib/http.js +26 -78
  42. package/lib/http.js.map +1 -1
  43. package/lib/http.mjs +26 -78
  44. package/lib/internal/_ProtobufReader.d.mts +3 -9
  45. package/lib/internal/_ProtobufReader.d.ts +3 -9
  46. package/lib/internal/_ProtobufReader.js.map +1 -1
  47. package/lib/internal/_ProtobufReader.mjs +3 -9
  48. package/lib/internal/_ProtobufSizer.d.mts +4 -12
  49. package/lib/internal/_ProtobufSizer.d.ts +4 -12
  50. package/lib/internal/_ProtobufSizer.js.map +1 -1
  51. package/lib/internal/_ProtobufSizer.mjs +4 -12
  52. package/lib/internal/_ProtobufWriter.d.mts +5 -15
  53. package/lib/internal/_ProtobufWriter.d.ts +5 -15
  54. package/lib/internal/_ProtobufWriter.js.map +1 -1
  55. package/lib/internal/_ProtobufWriter.mjs +5 -15
  56. package/lib/internal/_jsonStringifyString.d.mts +4 -4
  57. package/lib/internal/_jsonStringifyString.d.ts +4 -4
  58. package/lib/internal/_jsonStringifyString.js +4 -4
  59. package/lib/internal/_jsonStringifyString.mjs +4 -4
  60. package/lib/json.d.mts +174 -195
  61. package/lib/json.d.ts +174 -195
  62. package/lib/json.js +16 -48
  63. package/lib/json.js.map +1 -1
  64. package/lib/json.mjs +16 -48
  65. package/lib/llm.d.mts +275 -192
  66. package/lib/llm.d.ts +275 -192
  67. package/lib/llm.js +4 -12
  68. package/lib/llm.js.map +1 -1
  69. package/lib/llm.mjs +4 -12
  70. package/lib/misc.d.mts +149 -172
  71. package/lib/misc.d.ts +149 -172
  72. package/lib/misc.js +17 -51
  73. package/lib/misc.js.map +1 -1
  74. package/lib/misc.mjs +17 -51
  75. package/lib/module.d.mts +263 -275
  76. package/lib/module.d.ts +263 -275
  77. package/lib/module.js +18 -54
  78. package/lib/module.js.map +1 -1
  79. package/lib/module.mjs +18 -54
  80. package/lib/notations.d.mts +153 -174
  81. package/lib/notations.d.ts +153 -174
  82. package/lib/notations.js +24 -72
  83. package/lib/notations.js.map +1 -1
  84. package/lib/notations.mjs +24 -72
  85. package/lib/programmers/FeatureProgrammer.d.mts +19 -41
  86. package/lib/programmers/FeatureProgrammer.d.ts +19 -41
  87. package/lib/programmers/FeatureProgrammer.js.map +1 -1
  88. package/lib/programmers/ImportProgrammer.js +3 -9
  89. package/lib/programmers/ImportProgrammer.js.map +1 -1
  90. package/lib/programmers/ImportProgrammer.mjs +3 -9
  91. package/lib/programmers/RandomProgrammer.js +6 -0
  92. package/lib/programmers/RandomProgrammer.js.map +1 -1
  93. package/lib/programmers/RandomProgrammer.mjs +6 -0
  94. package/lib/programmers/helpers/ProtobufWire.d.mts +13 -13
  95. package/lib/programmers/helpers/ProtobufWire.d.ts +13 -13
  96. package/lib/programmers/internal/check_array_length.js +2 -6
  97. package/lib/programmers/internal/check_array_length.js.map +1 -1
  98. package/lib/programmers/internal/check_array_length.mjs +2 -6
  99. package/lib/programmers/internal/check_bigint.js +2 -6
  100. package/lib/programmers/internal/check_bigint.js.map +1 -1
  101. package/lib/programmers/internal/check_bigint.mjs +2 -6
  102. package/lib/programmers/internal/check_dynamic_key.js +2 -6
  103. package/lib/programmers/internal/check_dynamic_key.js.map +1 -1
  104. package/lib/programmers/internal/check_dynamic_key.mjs +2 -6
  105. package/lib/programmers/internal/check_dynamic_properties.js +3 -9
  106. package/lib/programmers/internal/check_dynamic_properties.js.map +1 -1
  107. package/lib/programmers/internal/check_dynamic_properties.mjs +3 -9
  108. package/lib/programmers/internal/check_everything.js +1 -3
  109. package/lib/programmers/internal/check_everything.js.map +1 -1
  110. package/lib/programmers/internal/check_everything.mjs +1 -3
  111. package/lib/programmers/internal/check_native.js +2 -6
  112. package/lib/programmers/internal/check_native.js.map +1 -1
  113. package/lib/programmers/internal/check_native.mjs +2 -6
  114. package/lib/programmers/internal/check_number.js +2 -6
  115. package/lib/programmers/internal/check_number.js.map +1 -1
  116. package/lib/programmers/internal/check_number.mjs +2 -6
  117. package/lib/programmers/internal/check_object.js +2 -6
  118. package/lib/programmers/internal/check_object.js.map +1 -1
  119. package/lib/programmers/internal/check_object.mjs +2 -6
  120. package/lib/programmers/internal/check_string.js +2 -6
  121. package/lib/programmers/internal/check_string.js.map +1 -1
  122. package/lib/programmers/internal/check_string.mjs +2 -6
  123. package/lib/programmers/internal/check_template.js +1 -3
  124. package/lib/programmers/internal/check_template.js.map +1 -1
  125. package/lib/programmers/internal/check_template.mjs +1 -3
  126. package/lib/programmers/internal/check_union_array_like.js +1 -3
  127. package/lib/programmers/internal/check_union_array_like.js.map +1 -1
  128. package/lib/programmers/internal/check_union_array_like.mjs +1 -3
  129. package/lib/programmers/internal/decode_union_object.js +2 -6
  130. package/lib/programmers/internal/decode_union_object.js.map +1 -1
  131. package/lib/programmers/internal/decode_union_object.mjs +2 -6
  132. package/lib/programmers/internal/feature_object_entries.js +1 -3
  133. package/lib/programmers/internal/feature_object_entries.js.map +1 -1
  134. package/lib/programmers/internal/feature_object_entries.mjs +1 -3
  135. package/lib/programmers/internal/json_schema_escaped.js +2 -6
  136. package/lib/programmers/internal/json_schema_escaped.js.map +1 -1
  137. package/lib/programmers/internal/json_schema_escaped.mjs +2 -6
  138. package/lib/programmers/internal/json_schema_object.js +3 -9
  139. package/lib/programmers/internal/json_schema_object.js.map +1 -1
  140. package/lib/programmers/internal/json_schema_object.mjs +3 -9
  141. package/lib/programmers/internal/metadata_to_pattern.js +1 -3
  142. package/lib/programmers/internal/metadata_to_pattern.js.map +1 -1
  143. package/lib/programmers/internal/metadata_to_pattern.mjs +1 -3
  144. package/lib/programmers/internal/postfix_of_tuple.js +1 -3
  145. package/lib/programmers/internal/postfix_of_tuple.js.map +1 -1
  146. package/lib/programmers/internal/postfix_of_tuple.mjs +1 -3
  147. package/lib/programmers/internal/prune_object_properties.js +1 -3
  148. package/lib/programmers/internal/prune_object_properties.js.map +1 -1
  149. package/lib/programmers/internal/prune_object_properties.mjs +1 -3
  150. package/lib/programmers/internal/stringify_dynamic_properties.js +2 -6
  151. package/lib/programmers/internal/stringify_dynamic_properties.js.map +1 -1
  152. package/lib/programmers/internal/stringify_dynamic_properties.mjs +2 -6
  153. package/lib/programmers/internal/stringify_native.js +1 -3
  154. package/lib/programmers/internal/stringify_native.js.map +1 -1
  155. package/lib/programmers/internal/stringify_native.mjs +1 -3
  156. package/lib/programmers/internal/stringify_regular_properties.js +2 -6
  157. package/lib/programmers/internal/stringify_regular_properties.js.map +1 -1
  158. package/lib/programmers/internal/stringify_regular_properties.mjs +2 -6
  159. package/lib/programmers/internal/template_to_pattern.js +1 -3
  160. package/lib/programmers/internal/template_to_pattern.js.map +1 -1
  161. package/lib/programmers/internal/template_to_pattern.mjs +1 -3
  162. package/lib/programmers/internal/wrap_metadata_rest_tuple.js +1 -3
  163. package/lib/programmers/internal/wrap_metadata_rest_tuple.js.map +1 -1
  164. package/lib/programmers/internal/wrap_metadata_rest_tuple.mjs +1 -3
  165. package/lib/programmers/json/JsonStringifyProgrammer.js +2 -2
  166. package/lib/programmers/json/JsonStringifyProgrammer.js.map +1 -1
  167. package/lib/programmers/json/JsonStringifyProgrammer.mjs +2 -2
  168. package/lib/protobuf.d.mts +290 -297
  169. package/lib/protobuf.d.ts +290 -297
  170. package/lib/protobuf.js +17 -51
  171. package/lib/protobuf.js.map +1 -1
  172. package/lib/protobuf.mjs +17 -51
  173. package/lib/reflect.d.mts +2 -4
  174. package/lib/reflect.d.ts +2 -4
  175. package/lib/reflect.js +1 -3
  176. package/lib/reflect.js.map +1 -1
  177. package/lib/reflect.mjs +1 -3
  178. package/lib/schemas/json/IJsonApplication.d.mts +4 -4
  179. package/lib/schemas/json/IJsonApplication.d.ts +4 -4
  180. package/lib/schemas/json/IJsonSchemaCollection.d.mts +73 -56
  181. package/lib/schemas/json/IJsonSchemaCollection.d.ts +73 -56
  182. package/lib/schemas/json/IJsonSchemaUnit.d.mts +83 -70
  183. package/lib/schemas/json/IJsonSchemaUnit.d.ts +83 -70
  184. package/lib/schemas/metadata/Metadata.d.mts +1 -3
  185. package/lib/schemas/metadata/Metadata.d.ts +1 -3
  186. package/lib/schemas/metadata/Metadata.js +9 -27
  187. package/lib/schemas/metadata/Metadata.js.map +1 -1
  188. package/lib/schemas/metadata/Metadata.mjs +9 -27
  189. package/lib/schemas/metadata/MetadataAliasType.d.mts +1 -3
  190. package/lib/schemas/metadata/MetadataAliasType.d.ts +1 -3
  191. package/lib/schemas/metadata/MetadataAliasType.js +3 -9
  192. package/lib/schemas/metadata/MetadataAliasType.js.map +1 -1
  193. package/lib/schemas/metadata/MetadataAliasType.mjs +3 -9
  194. package/lib/schemas/metadata/MetadataApplication.d.mts +1 -3
  195. package/lib/schemas/metadata/MetadataApplication.d.ts +1 -3
  196. package/lib/schemas/metadata/MetadataApplication.js +2 -6
  197. package/lib/schemas/metadata/MetadataApplication.js.map +1 -1
  198. package/lib/schemas/metadata/MetadataApplication.mjs +2 -6
  199. package/lib/schemas/metadata/MetadataArray.d.mts +1 -3
  200. package/lib/schemas/metadata/MetadataArray.d.ts +1 -3
  201. package/lib/schemas/metadata/MetadataArray.js +1 -3
  202. package/lib/schemas/metadata/MetadataArray.js.map +1 -1
  203. package/lib/schemas/metadata/MetadataArray.mjs +1 -3
  204. package/lib/schemas/metadata/MetadataArrayType.d.mts +1 -3
  205. package/lib/schemas/metadata/MetadataArrayType.d.ts +1 -3
  206. package/lib/schemas/metadata/MetadataArrayType.js +3 -9
  207. package/lib/schemas/metadata/MetadataArrayType.js.map +1 -1
  208. package/lib/schemas/metadata/MetadataArrayType.mjs +3 -9
  209. package/lib/schemas/metadata/MetadataAtomic.js +1 -3
  210. package/lib/schemas/metadata/MetadataAtomic.js.map +1 -1
  211. package/lib/schemas/metadata/MetadataAtomic.mjs +1 -3
  212. package/lib/schemas/metadata/MetadataEscaped.d.mts +1 -3
  213. package/lib/schemas/metadata/MetadataEscaped.d.ts +1 -3
  214. package/lib/schemas/metadata/MetadataEscaped.js +3 -9
  215. package/lib/schemas/metadata/MetadataEscaped.js.map +1 -1
  216. package/lib/schemas/metadata/MetadataEscaped.mjs +3 -9
  217. package/lib/schemas/metadata/MetadataFunction.d.mts +1 -3
  218. package/lib/schemas/metadata/MetadataFunction.d.ts +1 -3
  219. package/lib/schemas/metadata/MetadataFunction.js +2 -6
  220. package/lib/schemas/metadata/MetadataFunction.js.map +1 -1
  221. package/lib/schemas/metadata/MetadataFunction.mjs +2 -6
  222. package/lib/schemas/metadata/MetadataObject.d.mts +1 -3
  223. package/lib/schemas/metadata/MetadataObject.d.ts +1 -3
  224. package/lib/schemas/metadata/MetadataObject.js +1 -3
  225. package/lib/schemas/metadata/MetadataObject.js.map +1 -1
  226. package/lib/schemas/metadata/MetadataObject.mjs +1 -3
  227. package/lib/schemas/metadata/MetadataObjectType.d.mts +1 -3
  228. package/lib/schemas/metadata/MetadataObjectType.d.ts +1 -3
  229. package/lib/schemas/metadata/MetadataObjectType.js +5 -15
  230. package/lib/schemas/metadata/MetadataObjectType.js.map +1 -1
  231. package/lib/schemas/metadata/MetadataObjectType.mjs +6 -18
  232. package/lib/schemas/metadata/MetadataParameter.js +1 -3
  233. package/lib/schemas/metadata/MetadataParameter.js.map +1 -1
  234. package/lib/schemas/metadata/MetadataParameter.mjs +1 -3
  235. package/lib/schemas/metadata/MetadataProperty.d.mts +1 -3
  236. package/lib/schemas/metadata/MetadataProperty.d.ts +1 -3
  237. package/lib/schemas/metadata/MetadataProperty.js +3 -9
  238. package/lib/schemas/metadata/MetadataProperty.js.map +1 -1
  239. package/lib/schemas/metadata/MetadataProperty.mjs +3 -9
  240. package/lib/schemas/metadata/MetadataTemplate.js +1 -3
  241. package/lib/schemas/metadata/MetadataTemplate.js.map +1 -1
  242. package/lib/schemas/metadata/MetadataTemplate.mjs +1 -3
  243. package/lib/schemas/metadata/MetadataTuple.d.mts +1 -3
  244. package/lib/schemas/metadata/MetadataTuple.d.ts +1 -3
  245. package/lib/schemas/metadata/MetadataTuple.js +2 -6
  246. package/lib/schemas/metadata/MetadataTuple.js.map +1 -1
  247. package/lib/schemas/metadata/MetadataTuple.mjs +2 -6
  248. package/lib/schemas/metadata/MetadataTupleType.js +2 -6
  249. package/lib/schemas/metadata/MetadataTupleType.js.map +1 -1
  250. package/lib/schemas/metadata/MetadataTupleType.mjs +3 -9
  251. package/lib/tags/Constant.d.mts +16 -16
  252. package/lib/tags/Constant.d.ts +16 -16
  253. package/lib/tags/ContentMediaType.d.mts +7 -7
  254. package/lib/tags/ContentMediaType.d.ts +7 -7
  255. package/lib/tags/Default.d.mts +19 -19
  256. package/lib/tags/Default.d.ts +19 -19
  257. package/lib/tags/Example.d.mts +18 -18
  258. package/lib/tags/Example.d.ts +18 -18
  259. package/lib/tags/Examples.d.mts +23 -23
  260. package/lib/tags/Examples.d.ts +23 -23
  261. package/lib/tags/ExclusiveMaximum.d.mts +8 -5
  262. package/lib/tags/ExclusiveMaximum.d.ts +8 -5
  263. package/lib/tags/ExclusiveMinimum.d.mts +8 -5
  264. package/lib/tags/ExclusiveMinimum.d.ts +8 -5
  265. package/lib/tags/Format.d.mts +12 -8
  266. package/lib/tags/Format.d.ts +12 -8
  267. package/lib/tags/JsonSchemaPlugin.d.mts +20 -18
  268. package/lib/tags/JsonSchemaPlugin.d.ts +20 -18
  269. package/lib/tags/MaxItems.d.mts +9 -9
  270. package/lib/tags/MaxItems.d.ts +9 -9
  271. package/lib/tags/MaxLength.d.mts +6 -5
  272. package/lib/tags/MaxLength.d.ts +6 -5
  273. package/lib/tags/Maximum.d.mts +9 -7
  274. package/lib/tags/Maximum.d.ts +9 -7
  275. package/lib/tags/MinItems.d.mts +9 -9
  276. package/lib/tags/MinItems.d.ts +9 -9
  277. package/lib/tags/MinLength.d.mts +6 -5
  278. package/lib/tags/MinLength.d.ts +6 -5
  279. package/lib/tags/Minimum.d.mts +9 -7
  280. package/lib/tags/Minimum.d.ts +9 -7
  281. package/lib/tags/MultipleOf.d.mts +10 -7
  282. package/lib/tags/MultipleOf.d.ts +10 -7
  283. package/lib/tags/Pattern.d.mts +7 -4
  284. package/lib/tags/Pattern.d.ts +7 -4
  285. package/lib/tags/Sequence.d.mts +19 -17
  286. package/lib/tags/Sequence.d.ts +19 -17
  287. package/lib/tags/TagBase.d.mts +21 -28
  288. package/lib/tags/TagBase.d.ts +21 -28
  289. package/lib/tags/Type.d.mts +12 -11
  290. package/lib/tags/Type.d.ts +12 -11
  291. package/lib/tags/UniqueItems.d.mts +10 -9
  292. package/lib/tags/UniqueItems.d.ts +10 -9
  293. package/lib/tags/internal/FormatCheatSheet.d.mts +1 -3
  294. package/lib/tags/internal/FormatCheatSheet.d.ts +1 -3
  295. package/lib/tags/internal/FormatCheatSheet.js +1 -3
  296. package/lib/tags/internal/FormatCheatSheet.js.map +1 -1
  297. package/lib/tags/internal/FormatCheatSheet.mjs +1 -3
  298. package/lib/transformers/ITransformOptions.d.mts +27 -19
  299. package/lib/transformers/ITransformOptions.d.ts +27 -19
  300. package/lib/transformers/ImportTransformer.js +5 -10
  301. package/lib/transformers/ImportTransformer.js.map +1 -1
  302. package/lib/transformers/ImportTransformer.mjs +5 -10
  303. package/lib/transformers/NoTransformConfigurationError.js +1 -3
  304. package/lib/transformers/NoTransformConfigurationError.js.map +1 -1
  305. package/lib/transformers/NoTransformConfigurationError.mjs +1 -3
  306. package/lib/transformers/features/llm/LlmApplicationTransformer.js +1 -3
  307. package/lib/transformers/features/llm/LlmApplicationTransformer.js.map +1 -1
  308. package/lib/transformers/features/llm/LlmApplicationTransformer.mjs +1 -3
  309. package/lib/typings/Equal.d.mts +6 -6
  310. package/lib/typings/Equal.d.ts +6 -6
  311. package/package.json +2 -1
  312. package/src/AssertionGuard.ts +27 -25
  313. package/src/CamelCase.ts +1 -1
  314. package/src/IRandomGenerator.ts +44 -42
  315. package/src/IReadableURLSearchParams.ts +2 -2
  316. package/src/PascalCase.ts +1 -1
  317. package/src/Primitive.ts +20 -22
  318. package/src/Resolved.ts +16 -18
  319. package/src/SnakeCase.ts +3 -2
  320. package/src/TypeGuardError.ts +101 -64
  321. package/src/factories/MetadataCollection.ts +4 -12
  322. package/src/factories/MetadataCommentTagFactory.ts +8 -24
  323. package/src/factories/MetadataFactory.ts +1 -3
  324. package/src/factories/ProtobufFactory.ts +1 -3
  325. package/src/functional.ts +214 -249
  326. package/src/http.ts +329 -397
  327. package/src/internal/_ProtobufReader.ts +3 -9
  328. package/src/internal/_ProtobufSizer.ts +4 -12
  329. package/src/internal/_ProtobufWriter.ts +5 -15
  330. package/src/internal/_jsonStringifyString.ts +4 -4
  331. package/src/json.ts +190 -243
  332. package/src/llm.ts +279 -204
  333. package/src/misc.ts +166 -223
  334. package/src/module.ts +281 -329
  335. package/src/notations.ts +177 -246
  336. package/src/programmers/FeatureProgrammer.ts +19 -41
  337. package/src/programmers/ImportProgrammer.ts +3 -9
  338. package/src/programmers/RandomProgrammer.ts +6 -1
  339. package/src/programmers/helpers/ProtobufWire.ts +13 -13
  340. package/src/programmers/internal/check_array_length.ts +2 -6
  341. package/src/programmers/internal/check_bigint.ts +2 -6
  342. package/src/programmers/internal/check_dynamic_key.ts +2 -6
  343. package/src/programmers/internal/check_dynamic_properties.ts +3 -9
  344. package/src/programmers/internal/check_everything.ts +1 -3
  345. package/src/programmers/internal/check_native.ts +2 -6
  346. package/src/programmers/internal/check_number.ts +2 -6
  347. package/src/programmers/internal/check_object.ts +3 -9
  348. package/src/programmers/internal/check_string.ts +2 -6
  349. package/src/programmers/internal/check_template.ts +1 -3
  350. package/src/programmers/internal/check_union_array_like.ts +2 -6
  351. package/src/programmers/internal/decode_union_object.ts +3 -9
  352. package/src/programmers/internal/feature_object_entries.ts +1 -3
  353. package/src/programmers/internal/json_schema_escaped.ts +2 -6
  354. package/src/programmers/internal/json_schema_object.ts +4 -12
  355. package/src/programmers/internal/metadata_to_pattern.ts +1 -3
  356. package/src/programmers/internal/postfix_of_tuple.ts +1 -3
  357. package/src/programmers/internal/prune_object_properties.ts +1 -3
  358. package/src/programmers/internal/stringify_dynamic_properties.ts +2 -6
  359. package/src/programmers/internal/stringify_native.ts +1 -3
  360. package/src/programmers/internal/stringify_regular_properties.ts +2 -6
  361. package/src/programmers/internal/template_to_pattern.ts +1 -3
  362. package/src/programmers/internal/wrap_metadata_rest_tuple.ts +1 -3
  363. package/src/programmers/json/JsonStringifyProgrammer.ts +2 -2
  364. package/src/protobuf.ts +307 -348
  365. package/src/reflect.ts +3 -7
  366. package/src/schemas/json/IJsonApplication.ts +4 -4
  367. package/src/schemas/json/IJsonSchemaCollection.ts +73 -56
  368. package/src/schemas/json/IJsonSchemaUnit.ts +83 -70
  369. package/src/schemas/metadata/IMetadataTypeTag.ts +1 -3
  370. package/src/schemas/metadata/Metadata.ts +9 -27
  371. package/src/schemas/metadata/MetadataAliasType.ts +3 -9
  372. package/src/schemas/metadata/MetadataApplication.ts +2 -6
  373. package/src/schemas/metadata/MetadataArray.ts +1 -3
  374. package/src/schemas/metadata/MetadataArrayType.ts +3 -9
  375. package/src/schemas/metadata/MetadataAtomic.ts +1 -3
  376. package/src/schemas/metadata/MetadataEscaped.ts +3 -9
  377. package/src/schemas/metadata/MetadataFunction.ts +2 -6
  378. package/src/schemas/metadata/MetadataObject.ts +1 -3
  379. package/src/schemas/metadata/MetadataObjectType.ts +6 -18
  380. package/src/schemas/metadata/MetadataParameter.ts +1 -3
  381. package/src/schemas/metadata/MetadataProperty.ts +3 -9
  382. package/src/schemas/metadata/MetadataTemplate.ts +1 -3
  383. package/src/schemas/metadata/MetadataTuple.ts +2 -6
  384. package/src/schemas/metadata/MetadataTupleType.ts +3 -9
  385. package/src/tags/Constant.ts +16 -16
  386. package/src/tags/ContentMediaType.ts +7 -7
  387. package/src/tags/Default.ts +19 -19
  388. package/src/tags/Example.ts +18 -18
  389. package/src/tags/Examples.ts +23 -23
  390. package/src/tags/ExclusiveMaximum.ts +8 -5
  391. package/src/tags/ExclusiveMinimum.ts +8 -5
  392. package/src/tags/Format.ts +12 -8
  393. package/src/tags/JsonSchemaPlugin.ts +20 -18
  394. package/src/tags/MaxItems.ts +9 -9
  395. package/src/tags/MaxLength.ts +6 -5
  396. package/src/tags/Maximum.ts +9 -7
  397. package/src/tags/MinItems.ts +9 -9
  398. package/src/tags/MinLength.ts +6 -5
  399. package/src/tags/Minimum.ts +9 -7
  400. package/src/tags/MultipleOf.ts +10 -7
  401. package/src/tags/Pattern.ts +7 -4
  402. package/src/tags/Sequence.ts +19 -17
  403. package/src/tags/TagBase.ts +21 -28
  404. package/src/tags/Type.ts +12 -11
  405. package/src/tags/UniqueItems.ts +10 -9
  406. package/src/tags/internal/FormatCheatSheet.ts +1 -3
  407. package/src/transformers/ITransformOptions.ts +27 -19
  408. package/src/transformers/ImportTransformer.ts +5 -10
  409. package/src/transformers/NoTransformConfigurationError.ts +1 -3
  410. package/src/transformers/features/llm/LlmApplicationTransformer.ts +1 -3
  411. package/src/typings/Equal.ts +6 -6
package/src/llm.ts CHANGED
@@ -7,18 +7,19 @@ import { NoTransformConfigurationError } from "./transformers/NoTransformConfigu
7
7
  *
8
8
  * TypeScript functions to LLM function calling controller.
9
9
  *
10
- * Creates a controller of LLM (Large Language Model) function calling
11
- * from a TypeScript class or interface type containing the target functions to be
10
+ * Creates a controller of LLM (Large Language Model) function calling from a
11
+ * TypeScript class or interface type containing the target functions to be
12
12
  * called by the LLM function calling feature. The returned controller contains
13
- * not only the {@link application} of {@link ILlmFunction function calling schemas},
14
- * but also the {@link ILlmController.execute executor} of the functions.
13
+ * not only the {@link application} of
14
+ * {@link ILlmFunction function calling schemas}, but also the
15
+ * {@link ILlmController.execute executor} of the functions.
15
16
  *
16
17
  * If you put the returned {@link ILlmController} to the LLM provider like
17
- * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
18
- * proper function and fill its arguments from the conversation (maybe chatting text)
19
- * with user (human). And you can actually call the function by using
20
- * {@link ILlmController.execute} property. This is the concept of the LLM function
21
- * calling.
18
+ * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select
19
+ * the proper function and fill its arguments from the conversation (maybe
20
+ * chatting text) with user (human). And you can actually call the function by
21
+ * using {@link ILlmController.execute} property. This is the concept of the LLM
22
+ * function calling.
22
23
  *
23
24
  * Here is an example of using `typia.llm.controller()` function for AI agent
24
25
  * development of performing such AI function calling to mobile API classes
@@ -50,21 +51,30 @@ import { NoTransformConfigurationError } from "./transformers/NoTransformConfigu
50
51
  * );
51
52
  * ```
52
53
  *
53
- * Here is the list of available `Model` types with their corresponding LLM schema.
54
- * Reading the following list, and determine the `Model` type considering the
55
- * characteristics of the target LLM provider.
54
+ * Here is the list of available `Model` types with their corresponding LLM
55
+ * schema. Reading the following list, and determine the `Model` type
56
+ * considering the characteristics of the target LLM provider.
56
57
  *
57
58
  * - LLM provider schemas
58
- * - `chatgpt`: [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
59
- * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
60
- * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
61
- * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
62
- * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
59
+ *
60
+ * - `chatgpt`:
61
+ * [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
62
+ * - `claude`:
63
+ * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
64
+ * - `deepseek`:
65
+ * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
66
+ * - `gemini`:
67
+ * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
68
+ * - `llama`:
69
+ * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
63
70
  * - Middle layer schemas
71
+ *
64
72
  * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
65
73
  * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
66
74
  *
67
- * @template Class Target class or interface type collecting the functions to call
75
+ * @author Jeongho Nam - https://github.com/samchon
76
+ * @template Class Target class or interface type collecting the functions to
77
+ * call
68
78
  * @template Model LLM schema model
69
79
  * @template Config Configuration of LLM schema composition
70
80
  * @param name Identifier name of the controller
@@ -72,7 +82,6 @@ import { NoTransformConfigurationError } from "./transformers/NoTransformConfigu
72
82
  * @param options Options for the LLM application construction
73
83
  * @returns Controller of LLM function calling
74
84
  * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
75
- * @author Jeongho Nam - https://github.com/samchon
76
85
  */
77
86
  export function controller(
78
87
  name: string,
@@ -85,18 +94,19 @@ export function controller(
85
94
  /**
86
95
  * TypeScript functions to LLM function calling controller.
87
96
  *
88
- * Creates a controller of LLM (Large Language Model) function calling
89
- * from a TypeScript class or interface type containing the target functions to be
97
+ * Creates a controller of LLM (Large Language Model) function calling from a
98
+ * TypeScript class or interface type containing the target functions to be
90
99
  * called by the LLM function calling feature. The returned controller contains
91
- * not only the {@link application} of {@link ILlmFunction function calling schemas},
92
- * but also the {@link ILlmController.execute executor} of the functions.
100
+ * not only the {@link application} of
101
+ * {@link ILlmFunction function calling schemas}, but also the
102
+ * {@link ILlmController.execute executor} of the functions.
93
103
  *
94
104
  * If you put the returned {@link ILlmController} to the LLM provider like
95
- * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
96
- * proper function and fill its arguments from the conversation (maybe chatting text)
97
- * with user (human). And you can actually call the function by using
98
- * {@link ILlmController.execute} property. This is the concept of the LLM function
99
- * calling.
105
+ * [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select
106
+ * the proper function and fill its arguments from the conversation (maybe
107
+ * chatting text) with user (human). And you can actually call the function by
108
+ * using {@link ILlmController.execute} property. This is the concept of the LLM
109
+ * function calling.
100
110
  *
101
111
  * Here is an example of using `typia.llm.controller()` function for AI agent
102
112
  * development of performing such AI function calling to mobile API classes
@@ -128,21 +138,30 @@ export function controller(
128
138
  * );
129
139
  * ```
130
140
  *
131
- * Here is the list of available `Model` types with their corresponding LLM schema.
132
- * Reading the following list, and determine the `Model` type considering the
133
- * characteristics of the target LLM provider.
141
+ * Here is the list of available `Model` types with their corresponding LLM
142
+ * schema. Reading the following list, and determine the `Model` type
143
+ * considering the characteristics of the target LLM provider.
134
144
  *
135
145
  * - LLM provider schemas
136
- * - `chatgpt`: [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
137
- * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
138
- * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
139
- * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
140
- * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
146
+ *
147
+ * - `chatgpt`:
148
+ * [`IChatGptSchema`](https://samchon.github.io/openapi/api/types/IChatGptSchema-1.html)
149
+ * - `claude`:
150
+ * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
151
+ * - `deepseek`:
152
+ * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
153
+ * - `gemini`:
154
+ * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
155
+ * - `llama`:
156
+ * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
141
157
  * - Middle layer schemas
158
+ *
142
159
  * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
143
160
  * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
144
161
  *
145
- * @template Class Target class or interface type collecting the functions to call
162
+ * @author Jeongho Nam - https://github.com/samchon
163
+ * @template Class Target class or interface type collecting the functions to
164
+ * call
146
165
  * @template Model LLM schema model
147
166
  * @template Config Configuration of LLM schema composition
148
167
  * @param name Identifier name of the controller
@@ -150,7 +169,6 @@ export function controller(
150
169
  * @param options Options for the LLM application construction
151
170
  * @returns Controller of LLM function calling
152
171
  * @reference https://wrtnlabs.io/agentica/docs/core/controller/typescript/
153
- * @author Jeongho Nam - https://github.com/samchon
154
172
  */
155
173
  export function controller<
156
174
  Class extends Record<string, any>,
@@ -160,9 +178,9 @@ export function controller<
160
178
  /**
161
179
  * Whether to disallow superfluous properties or not.
162
180
  *
163
- * If configure as `true`, {@link validateEquals} function would be
164
- * used for validation feedback, which is more strict than
165
- * {@link validate} function.
181
+ * If configure as `true`, {@link validateEquals} function would be used
182
+ * for validation feedback, which is more strict than {@link validate}
183
+ * function.
166
184
  *
167
185
  * @default false
168
186
  */
@@ -177,9 +195,7 @@ export function controller<
177
195
  >,
178
196
  ): ILlmController<Model>;
179
197
 
180
- /**
181
- * @internal
182
- */
198
+ /** @internal */
183
199
  export function controller(..._args: any[]): never {
184
200
  NoTransformConfigurationError("llm.controller");
185
201
  }
@@ -189,54 +205,65 @@ export function controller(..._args: any[]): never {
189
205
  *
190
206
  * TypeScript functions to LLM function calling application.
191
207
  *
192
- * Creates an application of LLM (Large Language Model) function calling application
193
- * from a TypeScript class or interface type containing the target functions to be
194
- * called by the LLM function calling feature.
208
+ * Creates an application of LLM (Large Language Model) function calling
209
+ * application from a TypeScript class or interface type containing the target
210
+ * functions to be called by the LLM function calling feature.
195
211
  *
196
- * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
197
- * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
198
- * proper function and fill its arguments from the conversation (maybe chatting text)
199
- * with user (human). This is the concept of the LLM function calling.
212
+ * If you put the returned {@link ILlmApplication.functions} objects to the LLM
213
+ * provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will
214
+ * automatically select the proper function and fill its arguments from the
215
+ * conversation (maybe chatting text) with user (human). This is the concept of
216
+ * the LLM function calling.
200
217
  *
201
- * By the way, there can be some parameters (or their nested properties) which must be
202
- * composed by human, not by LLM. File uploading feature or some sensitive information
203
- * like security keys (password) are the examples. In that case, you can separate the
204
- * function parameters to both LLM and human sides by configuring the
205
- * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
206
- * assigned to the {@link ILlmFunction.separated} property.
218
+ * By the way, there can be some parameters (or their nested properties) which
219
+ * must be composed by human, not by LLM. File uploading feature or some
220
+ * sensitive information like security keys (password) are the examples. In that
221
+ * case, you can separate the function parameters to both LLM and human sides by
222
+ * configuring the {@link ILlmApplication.IOptions.separate} property. The
223
+ * separated parameters are assigned to the {@link ILlmFunction.separated}
224
+ * property.
207
225
  *
208
226
  * For reference, the actual function call execution is not by LLM, but by you.
209
- * When the LLM selects the proper function and fills the arguments, you just call
210
- * the function with the LLM prepared arguments. And then informs the return value to
211
- * the LLM by system prompt. The LLM will continue the next conversation based on
212
- * the return value.
227
+ * When the LLM selects the proper function and fills the arguments, you just
228
+ * call the function with the LLM prepared arguments. And then informs the
229
+ * return value to the LLM by system prompt. The LLM will continue the next
230
+ * conversation based on the return value.
213
231
  *
214
232
  * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
215
- * so that the parameters are separated to human and LLM sides, you can merge these
216
- * human and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
217
- * before the actual LLM function call execution.
233
+ * so that the parameters are separated to human and LLM sides, you can merge
234
+ * these human and LLM sides' parameters into one through
235
+ * {@link HttpLlm.mergeParameters} before the actual LLM function call
236
+ * execution.
218
237
  *
219
- * Here is the list of available `Model` types with their corresponding LLM schema.
220
- * Reading the following list, and determine the `Model` type considering the
221
- * characteristics of the target LLM provider.
238
+ * Here is the list of available `Model` types with their corresponding LLM
239
+ * schema. Reading the following list, and determine the `Model` type
240
+ * considering the characteristics of the target LLM provider.
222
241
  *
223
242
  * - LLM provider schemas
224
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
225
- * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
226
- * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
227
- * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
228
- * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
243
+ *
244
+ * - `chatgpt`:
245
+ * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
246
+ * - `claude`:
247
+ * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
248
+ * - `deepseek`:
249
+ * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
250
+ * - `gemini`:
251
+ * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
252
+ * - `llama`:
253
+ * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
229
254
  * - Middle layer schemas
255
+ *
230
256
  * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
231
257
  * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
232
258
  *
233
- * @template Class Target class or interface type collecting the functions to call
259
+ * @author Jeongho Nam - https://github.com/samchon
260
+ * @template Class Target class or interface type collecting the functions to
261
+ * call
234
262
  * @template Model LLM schema model
235
263
  * @template Config Configuration of LLM schema composition
236
264
  * @param options Options for the LLM application construction
237
265
  * @returns Application of LLM function calling schemas
238
266
  * @reference https://platform.openai.com/docs/guides/function-calling
239
- * @author Jeongho Nam - https://github.com/samchon
240
267
  */
241
268
  export function application(
242
269
  options?: Partial<
@@ -247,54 +274,65 @@ export function application(
247
274
  /**
248
275
  * TypeScript functions to LLM function calling application.
249
276
  *
250
- * Creates an application of LLM (Large Language Model) function calling application
251
- * from a TypeScript class or interface type containing the target functions to be
252
- * called by the LLM function calling feature.
277
+ * Creates an application of LLM (Large Language Model) function calling
278
+ * application from a TypeScript class or interface type containing the target
279
+ * functions to be called by the LLM function calling feature.
253
280
  *
254
- * If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
255
- * like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
256
- * proper function and fill its arguments from the conversation (maybe chatting text)
257
- * with user (human). This is the concept of the LLM function calling.
281
+ * If you put the returned {@link ILlmApplication.functions} objects to the LLM
282
+ * provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will
283
+ * automatically select the proper function and fill its arguments from the
284
+ * conversation (maybe chatting text) with user (human). This is the concept of
285
+ * the LLM function calling.
258
286
  *
259
- * By the way, there can be some parameters (or their nested properties) which must be
260
- * composed by human, not by LLM. File uploading feature or some sensitive information
261
- * like security keys (password) are the examples. In that case, you can separate the
262
- * function parameters to both LLM and human sides by configuring the
263
- * {@link ILlmApplication.IOptions.separate} property. The separated parameters are
264
- * assigned to the {@link ILlmFunction.separated} property.
287
+ * By the way, there can be some parameters (or their nested properties) which
288
+ * must be composed by human, not by LLM. File uploading feature or some
289
+ * sensitive information like security keys (password) are the examples. In that
290
+ * case, you can separate the function parameters to both LLM and human sides by
291
+ * configuring the {@link ILlmApplication.IOptions.separate} property. The
292
+ * separated parameters are assigned to the {@link ILlmFunction.separated}
293
+ * property.
265
294
  *
266
295
  * For reference, the actual function call execution is not by LLM, but by you.
267
- * When the LLM selects the proper function and fills the arguments, you just call
268
- * the function with the LLM prepared arguments. And then informs the return value to
269
- * the LLM by system prompt. The LLM will continue the next conversation based on
270
- * the return value.
296
+ * When the LLM selects the proper function and fills the arguments, you just
297
+ * call the function with the LLM prepared arguments. And then informs the
298
+ * return value to the LLM by system prompt. The LLM will continue the next
299
+ * conversation based on the return value.
271
300
  *
272
301
  * Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
273
- * so that the parameters are separated to human and LLM sides, you can merge these
274
- * human and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
275
- * before the actual LLM function call execution.
302
+ * so that the parameters are separated to human and LLM sides, you can merge
303
+ * these human and LLM sides' parameters into one through
304
+ * {@link HttpLlm.mergeParameters} before the actual LLM function call
305
+ * execution.
276
306
  *
277
- * Here is the list of available `Model` types with their corresponding LLM schema.
278
- * Reading the following list, and determine the `Model` type considering the
279
- * characteristics of the target LLM provider.
307
+ * Here is the list of available `Model` types with their corresponding LLM
308
+ * schema. Reading the following list, and determine the `Model` type
309
+ * considering the characteristics of the target LLM provider.
280
310
  *
281
311
  * - LLM provider schemas
282
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
283
- * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
284
- * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
285
- * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
286
- * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
312
+ *
313
+ * - `chatgpt`:
314
+ * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
315
+ * - `claude`:
316
+ * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
317
+ * - `deepseek`:
318
+ * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
319
+ * - `gemini`:
320
+ * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
321
+ * - `llama`:
322
+ * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
287
323
  * - Middle layer schemas
324
+ *
288
325
  * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
289
326
  * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
290
327
  *
291
- * @template Class Target class or interface type collecting the functions to call
328
+ * @author Jeongho Nam - https://github.com/samchon
329
+ * @template Class Target class or interface type collecting the functions to
330
+ * call
292
331
  * @template Model LLM schema model
293
332
  * @template Config Configuration of LLM schema composition
294
333
  * @param options Options for the LLM application construction
295
334
  * @returns Application of LLM function calling schemas
296
335
  * @reference https://platform.openai.com/docs/guides/function-calling
297
- * @author Jeongho Nam - https://github.com/samchon
298
336
  */
299
337
  export function application<
300
338
  Class extends Record<string, any>,
@@ -304,9 +342,9 @@ export function application<
304
342
  /**
305
343
  * Whether to disallow superfluous properties or not.
306
344
  *
307
- * If configure as `true`, {@link validateEquals} function would be
308
- * used for validation feedback, which is more strict than
309
- * {@link validate} function.
345
+ * If configure as `true`, {@link validateEquals} function would be used
346
+ * for validation feedback, which is more strict than {@link validate}
347
+ * function.
310
348
  *
311
349
  * @default false
312
350
  */
@@ -319,9 +357,7 @@ export function application<
319
357
  >,
320
358
  ): ILlmApplication<Model, Class>;
321
359
 
322
- /**
323
- * @internal
324
- */
360
+ /** @internal */
325
361
  export function application(): never {
326
362
  NoTransformConfigurationError("llm.application");
327
363
  }
@@ -331,32 +367,42 @@ export function application(): never {
331
367
  *
332
368
  * TypeScript parameters to LLM parameters schema.
333
369
  *
334
- * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
335
- * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
336
- * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
337
- * from a TypeScript parameters type.
370
+ * Creates an LLM (Large Language Model) parameters schema, a type metadata that
371
+ * is used in the [LLM function
372
+ * calling](https://platform.openai.com/docs/guides/function-calling) and [LLM
373
+ * structured
374
+ * outputs](https://platform.openai.com/docs/guides/structured-outputs), from a
375
+ * TypeScript parameters type.
338
376
  *
339
- * For references, LLM identifies only keyworded arguments, not positional arguments.
340
- * Therefore, the TypeScript parameters type must be an object type, and its properties
341
- * must be static. If dynamic properties are, it will be compilation error.
377
+ * For references, LLM identifies only keyworded arguments, not positional
378
+ * arguments. Therefore, the TypeScript parameters type must be an object type,
379
+ * and its properties must be static. If dynamic properties are, it will be
380
+ * compilation error.
342
381
  *
343
- * Also, such parameters type can be utilized not only for the LLM function calling,
344
- * but also for the LLM structured outputs. The LLM structured outputs is a feature
345
- * that LLM (Large Language Model) can generate a structured output, not only a plain
346
- * text, by filling the parameters from the conversation (maybe chatting text) with user
347
- * (human).
382
+ * Also, such parameters type can be utilized not only for the LLM function
383
+ * calling, but also for the LLM structured outputs. The LLM structured outputs
384
+ * is a feature that LLM (Large Language Model) can generate a structured
385
+ * output, not only a plain text, by filling the parameters from the
386
+ * conversation (maybe chatting text) with user (human).
348
387
  *
349
- * Here is the list of available `Model` types with their corresponding LLM schema.
350
- * Reading the following list, and determine the `Model` type considering the
351
- * characteristics of the target LLM provider.
388
+ * Here is the list of available `Model` types with their corresponding LLM
389
+ * schema. Reading the following list, and determine the `Model` type
390
+ * considering the characteristics of the target LLM provider.
352
391
  *
353
392
  * - LLM provider schemas
354
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
355
- * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
356
- * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
357
- * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
358
- * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
393
+ *
394
+ * - `chatgpt`:
395
+ * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
396
+ * - `claude`:
397
+ * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
398
+ * - `deepseek`:
399
+ * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
400
+ * - `gemini`:
401
+ * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
402
+ * - `llama`:
403
+ * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
359
404
  * - Middle layer schemas
405
+ *
360
406
  * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
361
407
  * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
362
408
  *
@@ -372,32 +418,42 @@ export function parameters(): never;
372
418
  /**
373
419
  * TypeScript parameters to LLM parameters schema.
374
420
  *
375
- * Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
376
- * [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
377
- * and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
378
- * from a TypeScript parameters type.
421
+ * Creates an LLM (Large Language Model) parameters schema, a type metadata that
422
+ * is used in the [LLM function
423
+ * calling](https://platform.openai.com/docs/guides/function-calling) and [LLM
424
+ * structured
425
+ * outputs](https://platform.openai.com/docs/guides/structured-outputs), from a
426
+ * TypeScript parameters type.
379
427
  *
380
- * For references, LLM identifies only keyworded arguments, not positional arguments.
381
- * Therefore, the TypeScript parameters type must be an object type, and its properties
382
- * must be static. If dynamic properties are, it will be compilation error.
428
+ * For references, LLM identifies only keyworded arguments, not positional
429
+ * arguments. Therefore, the TypeScript parameters type must be an object type,
430
+ * and its properties must be static. If dynamic properties are, it will be
431
+ * compilation error.
383
432
  *
384
- * Also, such parameters type can be utilized not only for the LLM function calling,
385
- * but also for the LLM structured outputs. The LLM structured outputs is a feature
386
- * that LLM (Large Language Model) can generate a structured output, not only a plain
387
- * text, by filling the parameters from the conversation (maybe chatting text) with user
388
- * (human).
433
+ * Also, such parameters type can be utilized not only for the LLM function
434
+ * calling, but also for the LLM structured outputs. The LLM structured outputs
435
+ * is a feature that LLM (Large Language Model) can generate a structured
436
+ * output, not only a plain text, by filling the parameters from the
437
+ * conversation (maybe chatting text) with user (human).
389
438
  *
390
- * Here is the list of available `Model` types with their corresponding LLM schema.
391
- * Reading the following list, and determine the `Model` type considering the
392
- * characteristics of the target LLM provider.
439
+ * Here is the list of available `Model` types with their corresponding LLM
440
+ * schema. Reading the following list, and determine the `Model` type
441
+ * considering the characteristics of the target LLM provider.
393
442
  *
394
443
  * - LLM provider schemas
395
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
396
- * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
397
- * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
398
- * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
399
- * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
444
+ *
445
+ * - `chatgpt`:
446
+ * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
447
+ * - `claude`:
448
+ * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
449
+ * - `deepseek`:
450
+ * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
451
+ * - `gemini`:
452
+ * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
453
+ * - `llama`:
454
+ * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
400
455
  * - Middle layer schemas
456
+ *
401
457
  * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
402
458
  * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
403
459
  *
@@ -414,9 +470,7 @@ export function parameters<
414
470
  Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
415
471
  >(): ILlmSchema.ModelParameters[Model];
416
472
 
417
- /**
418
- * @internal
419
- */
473
+ /** @internal */
420
474
  export function parameters(): never {
421
475
  NoTransformConfigurationError("llm.parameters");
422
476
  }
@@ -426,98 +480,121 @@ export function parameters(): never {
426
480
  *
427
481
  * TypeScript type to LLM type schema.
428
482
  *
429
- * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
430
- * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
431
- * from a TypeScript type.
483
+ * Creates an LLM (Large Language Model) type schema, a type metadata that is
484
+ * used in the [LLM function calling](@reference
485
+ * https://platform.openai.com/docs/guides/function-calling), from a TypeScript
486
+ * type.
432
487
  *
433
- * The returned {@link ILlmSchema} type will be specified by the `Model` argument,
434
- * and here is the list of available `Model` types with their corresponding LLM schema.
435
- * Reading the following list, and determine the `Model` type considering the
436
- * characteristics of the target LLM provider.
488
+ * The returned {@link ILlmSchema} type will be specified by the `Model`
489
+ * argument, and here is the list of available `Model` types with their
490
+ * corresponding LLM schema. Reading the following list, and determine the
491
+ * `Model` type considering the characteristics of the target LLM provider.
437
492
  *
438
493
  * - LLM provider schemas
439
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
440
- * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
441
- * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
442
- * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
443
- * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
494
+ *
495
+ * - `chatgpt`:
496
+ * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
497
+ * - `claude`:
498
+ * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
499
+ * - `deepseek`:
500
+ * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
501
+ * - `gemini`:
502
+ * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
503
+ * - `llama`:
504
+ * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
444
505
  * - Middle layer schemas
506
+ *
445
507
  * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
446
508
  * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
447
509
  *
448
- * If you actually want to perform the LLM function calling with TypeScript functions,
449
- * you can do it with the {@link application} function. Otherwise you hope to perform the
450
- * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
451
- * and structured output with the native TypeScript functions and types.
510
+ * If you actually want to perform the LLM function calling with TypeScript
511
+ * functions, you can do it with the {@link application} function. Otherwise you
512
+ * hope to perform the structured output, {@link parameters} function is better.
513
+ * Let's enjoy the LLM function calling and structured output with the native
514
+ * TypeScript functions and types.
452
515
  *
453
516
  * > **What LLM function calling is?
454
- * >
517
+ *
455
518
  * > LLM (Large Language Model) selects property function and fill the arguments,
456
519
  * > but actual function call execution is not by LLM, but by you.
457
- * >
520
+ *
458
521
  * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
459
- * > "function calling" feature. The "function calling" means that LLM automatically selects
460
- * > a proper function and compose parameter values from the user's chatting text.
461
- * >
462
- * > When LLM selects the proper function and its arguments, you just call the function
463
- * > with the arguments. And then informs the return value to the LLM by system prompt,
464
- * > LLM will continue the next conversation based on the return value.
522
+ * > "function calling" feature. The "function calling" means that LLM
523
+ * > automatically selects a proper function and compose parameter values from the
524
+ * > user's chatting text.
465
525
  *
526
+ * > When LLM selects the proper function and its arguments, you just call the
527
+ * > function with the arguments. And then informs the return value to the LLM by
528
+ * > system prompt, LLM will continue the next conversation based on the return
529
+ * > value.
530
+ *
531
+ * @author Jeongho Nam - https://github.com/samchon
466
532
  * @template T Target type
467
533
  * @template Model LLM schema model
468
534
  * @template Config Configuration of LLM schema composition
469
535
  * @returns LLM schema
470
536
  * @reference https://platform.openai.com/docs/guides/function-calling
471
537
  * @reference https://platform.openai.com/docs/guides/structured-outputs
472
- * @author Jeongho Nam - https://github.com/samchon
473
538
  */
474
539
  export function schema(): never;
475
540
 
476
541
  /**
477
542
  * TypeScript type to LLM type schema.
478
543
  *
479
- * Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
480
- * [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
481
- * from a TypeScript type.
544
+ * Creates an LLM (Large Language Model) type schema, a type metadata that is
545
+ * used in the [LLM function calling](@reference
546
+ * https://platform.openai.com/docs/guides/function-calling), from a TypeScript
547
+ * type.
482
548
  *
483
- * The returned {@link ILlmSchema} type will be specified by the `Model` argument,
484
- * and here is the list of available `Model` types with their corresponding LLM schema:
549
+ * The returned {@link ILlmSchema} type will be specified by the `Model`
550
+ * argument, and here is the list of available `Model` types with their
551
+ * corresponding LLM schema:
485
552
  *
486
553
  * - LLM provider schemas
487
- * - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
488
- * - `claude`: [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
489
- * - `deepseek`: [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
490
- * - `gemini`: [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
491
- * - `llama`: [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
554
+ *
555
+ * - `chatgpt`:
556
+ * [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
557
+ * - `claude`:
558
+ * [`IClaudeSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
559
+ * - `deepseek`:
560
+ * [`IDeepSeekSchema`](https://samchon.github.io/openapi/api/types/IClaudeSchema-1.html)
561
+ * - `gemini`:
562
+ * [`IGeminiSchema`](https://samchon.github.io/openapi/api/types/IGeminiSchema-1.html)
563
+ * - `llama`:
564
+ * [`ILlamaSchema`](https://samchon.github.io/openapi/api/types/ILlamaSchema-1.html)
492
565
  * - Middle layer schemas
566
+ *
493
567
  * - `3.0`: [`ILlmSchemaV3`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3-1.html)
494
568
  * - `3.1`: [`ILlmSchemaV3_1`](https://samchon.github.io/openapi/api/types/ILlmSchemaV3_1-1.html)
495
569
  *
496
- * If you actually want to perform the LLM function calling with TypeScript functions,
497
- * you can do it with the {@link application} function. Otherwise you hope to perform the
498
- * structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
499
- * and structured output with the native TypeScript functions and types.
570
+ * If you actually want to perform the LLM function calling with TypeScript
571
+ * functions, you can do it with the {@link application} function. Otherwise you
572
+ * hope to perform the structured output, {@link parameters} function is better.
573
+ * Let's enjoy the LLM function calling and structured output with the native
574
+ * TypeScript functions and types.
500
575
  *
501
576
  * > **What LLM function calling is?
502
- * >
577
+ *
503
578
  * > LLM (Large Language Model) selects property function and fill the arguments,
504
579
  * > but actual function call execution is not by LLM, but by you.
505
- * >
580
+ *
506
581
  * > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
507
- * > "function calling" feature. The "function calling" means that LLM automatically selects
508
- * > a proper function and compose parameter values from the user's chatting text.
509
- * >
510
- * > When LLM selects the proper function and its arguments, you just call the function
511
- * > with the arguments. And then informs the return value to the LLM by system prompt,
512
- * > LLM will continue the next conversation based on the return value.
582
+ * > "function calling" feature. The "function calling" means that LLM
583
+ * > automatically selects a proper function and compose parameter values from the
584
+ * > user's chatting text.
513
585
  *
586
+ * > When LLM selects the proper function and its arguments, you just call the
587
+ * > function with the arguments. And then informs the return value to the LLM by
588
+ * > system prompt, LLM will continue the next conversation based on the return
589
+ * > value.
590
+ *
591
+ * @author Jeongho Nam - https://github.com/samchon
514
592
  * @template T Target type
515
593
  * @template Model LLM schema model
516
594
  * @template Config Configuration of LLM schema composition
517
595
  * @returns LLM schema
518
596
  * @reference https://platform.openai.com/docs/guides/function-calling
519
597
  * @reference https://platform.openai.com/docs/guides/structured-outputs
520
- * @author Jeongho Nam - https://github.com/samchon
521
598
  */
522
599
  export function schema<
523
600
  T,
@@ -532,9 +609,7 @@ export function schema<
532
609
  : [Record<string, ILlmSchema.ModelSchema[Model]>]
533
610
  ): ILlmSchema.ModelSchema[Model];
534
611
 
535
- /**
536
- * @internal
537
- */
612
+ /** @internal */
538
613
  export function schema(): never {
539
614
  NoTransformConfigurationError("llm.schema");
540
615
  }