typia 7.2.1 → 7.3.0-dev.20241213
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -21
- package/README.md +148 -148
- package/lib/executable/typia.js +0 -0
- package/lib/programmers/llm/LlmApplicationProgrammer.js +0 -1
- package/lib/programmers/llm/LlmApplicationProgrammer.js.map +1 -1
- package/package.json +3 -3
- package/src/IRandomGenerator.ts +49 -49
- package/src/IReadableURLSearchParams.ts +9 -9
- package/src/IValidation.ts +21 -21
- package/src/executable/TypiaGenerateWizard.ts +83 -83
- package/src/executable/TypiaPatchWizard.ts +45 -45
- package/src/executable/TypiaSetupWizard.ts +179 -179
- package/src/executable/setup/ArgumentParser.ts +42 -42
- package/src/executable/setup/FileRetriever.ts +19 -19
- package/src/executable/setup/PackageManager.ts +87 -87
- package/src/factories/ExpressionFactory.ts +216 -216
- package/src/factories/IdentifierFactory.ts +89 -89
- package/src/factories/JsonMetadataFactory.ts +76 -76
- package/src/factories/LiteralFactory.ts +52 -52
- package/src/factories/MetadataCollection.ts +278 -278
- package/src/factories/MetadataCommentTagFactory.ts +650 -650
- package/src/factories/MetadataFactory.ts +404 -404
- package/src/factories/MetadataTypeTagFactory.ts +411 -411
- package/src/factories/MetadataTypeTagSchemaFactory.ts +82 -82
- package/src/factories/NumericRangeFactory.ts +72 -72
- package/src/factories/ProtobufFactory.ts +875 -875
- package/src/factories/StatementFactory.ts +90 -90
- package/src/factories/TemplateFactory.ts +64 -64
- package/src/factories/TypeFactory.ts +140 -140
- package/src/factories/internal/metadata/IMetadataIteratorProps.ts +17 -17
- package/src/factories/internal/metadata/MetadataHelper.ts +21 -21
- package/src/factories/internal/metadata/emplace_metadata_alias.ts +33 -33
- package/src/factories/internal/metadata/emplace_metadata_array_type.ts +39 -39
- package/src/factories/internal/metadata/emplace_metadata_object.ts +208 -208
- package/src/factories/internal/metadata/emplace_metadata_tuple.ts +57 -57
- package/src/factories/internal/metadata/explore_metadata.ts +31 -31
- package/src/factories/internal/metadata/iterate_metadata.ts +54 -54
- package/src/factories/internal/metadata/iterate_metadata_alias.ts +33 -33
- package/src/factories/internal/metadata/iterate_metadata_array.ts +63 -63
- package/src/factories/internal/metadata/iterate_metadata_atomic.ts +62 -62
- package/src/factories/internal/metadata/iterate_metadata_coalesce.ts +28 -28
- package/src/factories/internal/metadata/iterate_metadata_collection.ts +146 -146
- package/src/factories/internal/metadata/iterate_metadata_comment_tags.ts +32 -32
- package/src/factories/internal/metadata/iterate_metadata_constant.ts +76 -76
- package/src/factories/internal/metadata/iterate_metadata_escape.ts +49 -49
- package/src/factories/internal/metadata/iterate_metadata_function.ts +91 -91
- package/src/factories/internal/metadata/iterate_metadata_intersection.ts +213 -213
- package/src/factories/internal/metadata/iterate_metadata_map.ts +57 -57
- package/src/factories/internal/metadata/iterate_metadata_native.ts +255 -255
- package/src/factories/internal/metadata/iterate_metadata_object.ts +35 -35
- package/src/factories/internal/metadata/iterate_metadata_set.ts +57 -57
- package/src/factories/internal/metadata/iterate_metadata_sort.ts +87 -87
- package/src/factories/internal/metadata/iterate_metadata_template.ts +41 -41
- package/src/factories/internal/metadata/iterate_metadata_tuple.ts +26 -26
- package/src/factories/internal/metadata/iterate_metadata_union.ts +19 -19
- package/src/functional.ts +750 -750
- package/src/http.ts +1047 -1047
- package/src/internal/_IProtobufWriter.ts +18 -18
- package/src/internal/_ProtobufReader.ts +194 -194
- package/src/internal/_ProtobufSizer.ts +145 -145
- package/src/internal/_ProtobufWriter.ts +145 -145
- package/src/internal/_accessExpressionAsString.ts +46 -46
- package/src/internal/_assertGuard.ts +13 -13
- package/src/internal/_functionalTypeGuardErrorFactory.ts +4 -4
- package/src/internal/_httpFormDataReadArray.ts +4 -4
- package/src/internal/_httpFormDataReadBigint.ts +18 -18
- package/src/internal/_httpFormDataReadBlob.ts +10 -10
- package/src/internal/_httpFormDataReadBoolean.ts +16 -16
- package/src/internal/_httpFormDataReadFile.ts +10 -10
- package/src/internal/_httpFormDataReadNumber.ts +15 -15
- package/src/internal/_httpFormDataReadString.ts +10 -10
- package/src/internal/_httpHeaderReadBigint.ts +10 -10
- package/src/internal/_httpHeaderReadBoolean.ts +8 -8
- package/src/internal/_httpHeaderReadNumber.ts +7 -7
- package/src/internal/_httpParameterReadBigint.ts +10 -10
- package/src/internal/_httpParameterReadBoolean.ts +8 -8
- package/src/internal/_httpParameterReadNumber.ts +7 -7
- package/src/internal/_httpParameterReadString.ts +2 -2
- package/src/internal/_httpQueryParseURLSearchParams.ts +12 -12
- package/src/internal/_httpQueryReadArray.ts +4 -4
- package/src/internal/_httpQueryReadBigint.ts +12 -12
- package/src/internal/_httpQueryReadBoolean.ts +14 -14
- package/src/internal/_httpQueryReadNumber.ts +9 -9
- package/src/internal/_httpQueryReadString.ts +4 -4
- package/src/internal/_isBetween.ts +2 -2
- package/src/internal/_isBigintString.ts +8 -8
- package/src/internal/_isFormatByte.ts +7 -7
- package/src/internal/_isFormatDate.ts +3 -3
- package/src/internal/_isFormatDateTime.ts +4 -4
- package/src/internal/_isFormatDuration.ts +4 -4
- package/src/internal/_isFormatEmail.ts +4 -4
- package/src/internal/_isFormatHostname.ts +4 -4
- package/src/internal/_isFormatIdnEmail.ts +4 -4
- package/src/internal/_isFormatIdnHostname.ts +4 -4
- package/src/internal/_isFormatIpv4.ts +4 -4
- package/src/internal/_isFormatIpv6.ts +4 -4
- package/src/internal/_isFormatIri.ts +3 -3
- package/src/internal/_isFormatIriReference.ts +4 -4
- package/src/internal/_isFormatJsonPointer.ts +3 -3
- package/src/internal/_isFormatPassword.ts +1 -1
- package/src/internal/_isFormatRegex.ts +8 -8
- package/src/internal/_isFormatRelativeJsonPointer.ts +4 -4
- package/src/internal/_isFormatTime.ts +4 -4
- package/src/internal/_isFormatUri.ts +6 -6
- package/src/internal/_isFormatUriReference.ts +5 -5
- package/src/internal/_isFormatUriTemplate.ts +4 -4
- package/src/internal/_isFormatUrl.ts +4 -4
- package/src/internal/_isFormatUuid.ts +3 -3
- package/src/internal/_isTypeFloat.ts +5 -5
- package/src/internal/_isTypeInt32.ts +5 -5
- package/src/internal/_isTypeInt64.ts +5 -5
- package/src/internal/_isTypeUint32.ts +5 -5
- package/src/internal/_isTypeUint64.ts +5 -5
- package/src/internal/_isUniqueItems.ts +159 -159
- package/src/internal/_jsonStringifyNumber.ts +12 -12
- package/src/internal/_jsonStringifyRest.ts +3 -3
- package/src/internal/_jsonStringifyString.ts +42 -42
- package/src/internal/_jsonStringifyTail.ts +2 -2
- package/src/internal/_llmApplicationFinalize.ts +20 -20
- package/src/internal/_miscCloneAny.ts +46 -46
- package/src/internal/_notationAny.ts +37 -37
- package/src/internal/_notationCamel.ts +13 -13
- package/src/internal/_notationPascal.ts +8 -8
- package/src/internal/_notationSnake.ts +43 -43
- package/src/internal/_randomArray.ts +21 -21
- package/src/internal/_randomBigint.ts +6 -6
- package/src/internal/_randomBoolean.ts +1 -1
- package/src/internal/_randomFormatByte.ts +3 -3
- package/src/internal/_randomFormatDate.ts +18 -18
- package/src/internal/_randomFormatDatetime.ts +16 -16
- package/src/internal/_randomFormatDuration.ts +27 -27
- package/src/internal/_randomFormatEmail.ts +11 -11
- package/src/internal/_randomFormatHostname.ts +6 -6
- package/src/internal/_randomFormatIdnEmail.ts +3 -3
- package/src/internal/_randomFormatIdnHostname.ts +3 -3
- package/src/internal/_randomFormatIpv4.ts +11 -11
- package/src/internal/_randomFormatIpv6.ts +11 -11
- package/src/internal/_randomFormatIri.ts +3 -3
- package/src/internal/_randomFormatIriReference.ts +3 -3
- package/src/internal/_randomFormatJsonPointer.ts +7 -7
- package/src/internal/_randomFormatPassword.ts +8 -8
- package/src/internal/_randomFormatRegex.ts +4 -4
- package/src/internal/_randomFormatRelativeJsonPointer.ts +8 -8
- package/src/internal/_randomFormatTime.ts +14 -14
- package/src/internal/_randomFormatUri.ts +3 -3
- package/src/internal/_randomFormatUriReference.ts +3 -3
- package/src/internal/_randomFormatUriTemplate.ts +3 -3
- package/src/internal/_randomFormatUrl.ts +11 -11
- package/src/internal/_randomFormatUuid.ts +6 -6
- package/src/internal/_randomInteger.ts +47 -47
- package/src/internal/_randomNumber.ts +74 -74
- package/src/internal/_randomPattern.ts +10 -10
- package/src/internal/_randomPick.ts +9 -9
- package/src/internal/_randomString.ts +24 -24
- package/src/internal/_throwTypeGuardError.ts +5 -5
- package/src/internal/_validateReport.ts +13 -13
- package/src/internal/private/__notationCapitalize.ts +2 -2
- package/src/internal/private/__notationUnsnake.ts +24 -24
- package/src/json.ts +752 -752
- package/src/llm.ts +481 -481
- package/src/misc.ts +658 -658
- package/src/module.ts +937 -937
- package/src/notations.ts +827 -827
- package/src/programmers/AssertProgrammer.ts +454 -454
- package/src/programmers/CheckerProgrammer.ts +1617 -1617
- package/src/programmers/FeatureProgrammer.ts +622 -622
- package/src/programmers/ImportProgrammer.ts +185 -185
- package/src/programmers/IsProgrammer.ts +273 -273
- package/src/programmers/RandomProgrammer.ts +1190 -1190
- package/src/programmers/TypiaProgrammer.ts +174 -174
- package/src/programmers/ValidateProgrammer.ts +439 -439
- package/src/programmers/functional/FunctionalAssertFunctionProgrammer.ts +153 -153
- package/src/programmers/functional/FunctionalAssertParametersProgrammer.ts +125 -125
- package/src/programmers/functional/FunctionalAssertReturnProgrammer.ts +115 -115
- package/src/programmers/functional/FunctionalIsFunctionProgrammer.ts +72 -72
- package/src/programmers/functional/FunctionalIsParametersProgrammer.ts +113 -113
- package/src/programmers/functional/FunctionalIsReturnProgrammer.ts +116 -116
- package/src/programmers/functional/FunctionalValidateFunctionProgrammer.ts +119 -119
- package/src/programmers/functional/FunctionalValidateParametersProgrammer.ts +274 -274
- package/src/programmers/functional/FunctionalValidateReturnProgrammer.ts +135 -135
- package/src/programmers/functional/internal/FunctionalGeneralProgrammer.ts +34 -34
- package/src/programmers/helpers/AtomicPredicator.ts +35 -35
- package/src/programmers/helpers/CloneJoiner.ts +143 -143
- package/src/programmers/helpers/FunctionProgrammer.ts +67 -67
- package/src/programmers/helpers/HttpMetadataUtil.ts +21 -21
- package/src/programmers/helpers/NotationJoiner.ts +144 -144
- package/src/programmers/helpers/OptionPredicator.ts +15 -15
- package/src/programmers/helpers/ProtobufUtil.ts +228 -228
- package/src/programmers/helpers/PruneJoiner.ts +148 -148
- package/src/programmers/helpers/RandomJoiner.ts +168 -168
- package/src/programmers/helpers/StringifyJoinder.ts +115 -115
- package/src/programmers/helpers/StringifyPredicator.ts +13 -13
- package/src/programmers/helpers/UnionExplorer.ts +372 -372
- package/src/programmers/helpers/UnionPredicator.ts +79 -79
- package/src/programmers/helpers/disable_function_programmer_declare.ts +32 -32
- package/src/programmers/http/HttpAssertFormDataProgrammer.ts +99 -99
- package/src/programmers/http/HttpAssertHeadersProgrammer.ts +99 -99
- package/src/programmers/http/HttpAssertQueryProgrammer.ts +105 -105
- package/src/programmers/http/HttpFormDataProgrammer.ts +308 -308
- package/src/programmers/http/HttpHeadersProgrammer.ts +400 -400
- package/src/programmers/http/HttpIsFormDataProgrammer.ts +108 -108
- package/src/programmers/http/HttpIsHeadersProgrammer.ts +108 -108
- package/src/programmers/http/HttpIsQueryProgrammer.ts +114 -114
- package/src/programmers/http/HttpParameterProgrammer.ts +115 -115
- package/src/programmers/http/HttpQueryProgrammer.ts +336 -336
- package/src/programmers/http/HttpValidateFormDataProgrammer.ts +92 -92
- package/src/programmers/http/HttpValidateHeadersProgrammer.ts +92 -92
- package/src/programmers/http/HttpValidateQueryProgrammer.ts +98 -98
- package/src/programmers/internal/check_array_length.ts +47 -47
- package/src/programmers/internal/check_bigint.ts +50 -50
- package/src/programmers/internal/check_dynamic_key.ts +201 -201
- package/src/programmers/internal/check_dynamic_properties.ts +208 -208
- package/src/programmers/internal/check_everything.ts +23 -23
- package/src/programmers/internal/check_native.ts +27 -27
- package/src/programmers/internal/check_number.ts +112 -112
- package/src/programmers/internal/check_object.ts +75 -75
- package/src/programmers/internal/check_string.ts +50 -50
- package/src/programmers/internal/check_template.ts +48 -48
- package/src/programmers/internal/check_union_array_like.ts +335 -335
- package/src/programmers/internal/decode_union_object.ts +116 -116
- package/src/programmers/internal/feature_object_entries.ts +61 -61
- package/src/programmers/internal/json_schema_alias.ts +47 -47
- package/src/programmers/internal/json_schema_array.ts +45 -45
- package/src/programmers/internal/json_schema_bigint.ts +15 -15
- package/src/programmers/internal/json_schema_boolean.ts +15 -15
- package/src/programmers/internal/json_schema_constant.ts +26 -26
- package/src/programmers/internal/json_schema_description.ts +12 -12
- package/src/programmers/internal/json_schema_discriminator.ts +35 -35
- package/src/programmers/internal/json_schema_escaped.ts +82 -82
- package/src/programmers/internal/json_schema_native.ts +33 -33
- package/src/programmers/internal/json_schema_number.ts +15 -15
- package/src/programmers/internal/json_schema_object.ts +158 -158
- package/src/programmers/internal/json_schema_plugin.ts +18 -18
- package/src/programmers/internal/json_schema_station.ts +182 -182
- package/src/programmers/internal/json_schema_string.ts +15 -15
- package/src/programmers/internal/json_schema_template.ts +55 -55
- package/src/programmers/internal/json_schema_title.ts +20 -20
- package/src/programmers/internal/json_schema_tuple.ts +35 -35
- package/src/programmers/internal/metadata_to_pattern.ts +42 -42
- package/src/programmers/internal/postfix_of_tuple.ts +5 -5
- package/src/programmers/internal/prune_object_properties.ts +71 -71
- package/src/programmers/internal/stringify_dynamic_properties.ts +162 -162
- package/src/programmers/internal/stringify_regular_properties.ts +81 -81
- package/src/programmers/internal/template_to_pattern.ts +23 -23
- package/src/programmers/internal/wrap_metadata_rest_tuple.ts +23 -23
- package/src/programmers/json/JsonApplicationProgrammer.ts +279 -279
- package/src/programmers/json/JsonAssertParseProgrammer.ts +113 -113
- package/src/programmers/json/JsonAssertStringifyProgrammer.ts +115 -115
- package/src/programmers/json/JsonIsParseProgrammer.ts +114 -114
- package/src/programmers/json/JsonIsStringifyProgrammer.ts +108 -108
- package/src/programmers/json/JsonSchemasProgrammer.ts +91 -91
- package/src/programmers/json/JsonStringifyProgrammer.ts +1124 -1124
- package/src/programmers/json/JsonValidateParseProgrammer.ts +105 -105
- package/src/programmers/json/JsonValidateStringifyProgrammer.ts +124 -124
- package/src/programmers/llm/LlmApplicationOfValidateProgrammer.ts +81 -81
- package/src/programmers/llm/LlmApplicationProgrammer.ts +277 -278
- package/src/programmers/llm/LlmModelPredicator.ts +127 -127
- package/src/programmers/llm/LlmParametersProgrammer.ts +90 -90
- package/src/programmers/llm/LlmSchemaProgrammer.ts +143 -143
- package/src/programmers/misc/MiscAssertCloneProgrammer.ts +95 -95
- package/src/programmers/misc/MiscAssertPruneProgrammer.ts +116 -116
- package/src/programmers/misc/MiscCloneProgrammer.ts +1032 -1032
- package/src/programmers/misc/MiscIsCloneProgrammer.ts +99 -99
- package/src/programmers/misc/MiscIsPruneProgrammer.ts +97 -97
- package/src/programmers/misc/MiscLiteralsProgrammer.ts +80 -80
- package/src/programmers/misc/MiscPruneProgrammer.ts +728 -728
- package/src/programmers/misc/MiscValidateCloneProgrammer.ts +111 -111
- package/src/programmers/misc/MiscValidatePruneProgrammer.ts +113 -113
- package/src/programmers/notations/NotationAssertGeneralProgrammer.ts +101 -101
- package/src/programmers/notations/NotationGeneralProgrammer.ts +984 -984
- package/src/programmers/notations/NotationIsGeneralProgrammer.ts +105 -105
- package/src/programmers/notations/NotationValidateGeneralProgrammer.ts +119 -119
- package/src/programmers/protobuf/ProtobufAssertDecodeProgrammer.ts +98 -98
- package/src/programmers/protobuf/ProtobufAssertEncodeProgrammer.ts +102 -102
- package/src/programmers/protobuf/ProtobufDecodeProgrammer.ts +654 -654
- package/src/programmers/protobuf/ProtobufEncodeProgrammer.ts +945 -945
- package/src/programmers/protobuf/ProtobufIsDecodeProgrammer.ts +109 -109
- package/src/programmers/protobuf/ProtobufIsEncodeProgrammer.ts +98 -98
- package/src/programmers/protobuf/ProtobufMessageProgrammer.ts +179 -179
- package/src/programmers/protobuf/ProtobufValidateDecodeProgrammer.ts +92 -92
- package/src/programmers/protobuf/ProtobufValidateEncodeProgrammer.ts +119 -119
- package/src/protobuf.ts +868 -868
- package/src/reflect.ts +57 -57
- package/src/schemas/json/IJsonApplication.ts +73 -73
- package/src/schemas/json/IJsonSchemaCollection.ts +29 -29
- package/src/schemas/json/__IJsonApplication.ts +63 -63
- package/src/schemas/llm/ILlmApplicationOfValidate.ts +55 -55
- package/src/schemas/llm/ILlmFunctionOfValidate.ts +39 -39
- package/src/schemas/metadata/IMetadata.ts +35 -35
- package/src/schemas/metadata/IMetadataAlias.ts +6 -6
- package/src/schemas/metadata/IMetadataAliasType.ts +12 -12
- package/src/schemas/metadata/IMetadataApplication.ts +7 -7
- package/src/schemas/metadata/IMetadataArray.ts +6 -6
- package/src/schemas/metadata/IMetadataComponents.ts +11 -11
- package/src/schemas/metadata/IMetadataConstantValue.ts +11 -11
- package/src/schemas/metadata/IMetadataDictionary.ts +11 -11
- package/src/schemas/metadata/IMetadataMap.ts +8 -8
- package/src/schemas/metadata/IMetadataNative.ts +6 -6
- package/src/schemas/metadata/IMetadataObject.ts +6 -6
- package/src/schemas/metadata/IMetadataObjectType.ts +13 -13
- package/src/schemas/metadata/IMetadataSet.ts +7 -7
- package/src/schemas/metadata/IMetadataTemplate.ts +7 -7
- package/src/schemas/metadata/IMetadataTuple.ts +6 -6
- package/src/schemas/metadata/IMetadataTypeTag.ts +16 -16
- package/src/schemas/metadata/Metadata.ts +669 -669
- package/src/schemas/metadata/MetadataAlias.ts +46 -46
- package/src/schemas/metadata/MetadataAliasType.ts +63 -63
- package/src/schemas/metadata/MetadataApplication.ts +44 -44
- package/src/schemas/metadata/MetadataArray.ts +49 -49
- package/src/schemas/metadata/MetadataAtomic.ts +87 -87
- package/src/schemas/metadata/MetadataComponents.ts +98 -98
- package/src/schemas/metadata/MetadataConstantValue.ts +62 -62
- package/src/schemas/metadata/MetadataMap.ts +48 -48
- package/src/schemas/metadata/MetadataNative.ts +44 -44
- package/src/schemas/metadata/MetadataObject.ts +48 -48
- package/src/schemas/metadata/MetadataObjectType.ts +149 -149
- package/src/schemas/metadata/MetadataParameter.ts +54 -54
- package/src/schemas/metadata/MetadataProperty.ts +59 -59
- package/src/schemas/metadata/MetadataSet.ts +45 -45
- package/src/schemas/metadata/MetadataTemplate.ts +80 -80
- package/src/schemas/metadata/MetadataTuple.ts +32 -32
- package/src/schemas/protobuf/IProtobufProperty.ts +6 -6
- package/src/schemas/protobuf/IProtobufPropertyType.ts +37 -37
- package/src/schemas/protobuf/IProtobufSchema.ts +50 -50
- package/src/tags/Example.ts +24 -24
- package/src/tags/Examples.ts +16 -16
- package/src/tags/Format.ts +50 -50
- package/src/tags/JsonSchemaPlugin.ts +8 -8
- package/src/tags/Sequence.ts +10 -10
- package/src/tags/TagBase.ts +82 -82
- package/src/tags/Type.ts +32 -32
- package/src/tags/UniqueItems.ts +14 -14
- package/src/tags/index.ts +21 -21
- package/src/transform.ts +35 -35
- package/src/transformers/CallExpressionTransformer.ts +547 -547
- package/src/transformers/FileTransformer.ts +136 -136
- package/src/transformers/IProgrammerProps.ts +11 -11
- package/src/transformers/ITransformOptions.ts +62 -62
- package/src/transformers/ITransformProps.ts +9 -9
- package/src/transformers/ITypiaContext.ts +18 -18
- package/src/transformers/ImportTransformer.ts +81 -81
- package/src/transformers/NodeTransformer.ts +17 -17
- package/src/transformers/TransformerError.ts +60 -60
- package/src/transformers/features/AssertTransformer.ts +24 -24
- package/src/transformers/features/CreateAssertTransformer.ts +24 -24
- package/src/transformers/features/CreateIsTransformer.ts +18 -18
- package/src/transformers/features/CreateRandomTransformer.ts +43 -43
- package/src/transformers/features/CreateValidateTransformer.ts +18 -18
- package/src/transformers/features/IsTransformer.ts +18 -18
- package/src/transformers/features/RandomTransformer.ts +41 -41
- package/src/transformers/features/ValidateTransformer.ts +18 -18
- package/src/transformers/features/functional/FunctionalGenericTransformer.ts +57 -57
- package/src/transformers/features/http/CreateHttpAssertFormDataTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpAssertHeadersTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpAssertQueryTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpFormDataTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpHeadersTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpIsFormDataTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpIsHeadersTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpIsQueryTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpParameterTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpQueryTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpValidateFormDataTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpValidateHeadersTransformer.ts +13 -13
- package/src/transformers/features/http/CreateHttpValidateQueryTransformer.ts +13 -13
- package/src/transformers/features/http/HttpAssertFormDataTransformer.ts +13 -13
- package/src/transformers/features/http/HttpAssertHeadersTransformer.ts +13 -13
- package/src/transformers/features/http/HttpAssertQueryTransformer.ts +13 -13
- package/src/transformers/features/http/HttpFormDataTransformer.ts +13 -13
- package/src/transformers/features/http/HttpHeadersTransformer.ts +13 -13
- package/src/transformers/features/http/HttpIsFormDataTransformer.ts +13 -13
- package/src/transformers/features/http/HttpIsHeadersTransformer.ts +13 -13
- package/src/transformers/features/http/HttpIsQueryTransformer.ts +13 -13
- package/src/transformers/features/http/HttpParameterTransformer.ts +13 -13
- package/src/transformers/features/http/HttpQueryTransformer.ts +13 -13
- package/src/transformers/features/http/HttpValidateFormDataTransformer.ts +13 -13
- package/src/transformers/features/http/HttpValidateHeadersTransformer.ts +13 -13
- package/src/transformers/features/http/HttpValidateQueryTransformer.ts +13 -13
- package/src/transformers/features/json/JsonApplicationTransformer.ts +105 -105
- package/src/transformers/features/json/JsonAssertParseTransformer.ts +13 -13
- package/src/transformers/features/json/JsonAssertStringifyTransformer.ts +13 -13
- package/src/transformers/features/json/JsonCreateAssertParseTransformer.ts +13 -13
- package/src/transformers/features/json/JsonCreateAssertStringifyTransformer.ts +13 -13
- package/src/transformers/features/json/JsonCreateIsParseTransformer.ts +13 -13
- package/src/transformers/features/json/JsonCreateIsStringifyTransformer.ts +13 -13
- package/src/transformers/features/json/JsonCreateStringifyTransformer.ts +13 -13
- package/src/transformers/features/json/JsonCreateValidateParseTransformer.ts +13 -13
- package/src/transformers/features/json/JsonCreateValidateStringifyProgrammer.ts +13 -13
- package/src/transformers/features/json/JsonIsParseTransformer.ts +13 -13
- package/src/transformers/features/json/JsonIsStringifyTransformer.ts +13 -13
- package/src/transformers/features/json/JsonSchemasTransformer.ts +143 -143
- package/src/transformers/features/json/JsonStringifyTransformer.ts +13 -13
- package/src/transformers/features/json/JsonValidateParseTransformer.ts +13 -13
- package/src/transformers/features/json/JsonValidateStringifyTransformer.ts +13 -13
- package/src/transformers/features/llm/LlmApplicationOfValidateTransformer.ts +115 -115
- package/src/transformers/features/llm/LlmApplicationTransformer.ts +113 -113
- package/src/transformers/features/llm/LlmParametersTransformer.ts +89 -89
- package/src/transformers/features/llm/LlmSchemaTransformer.ts +130 -130
- package/src/transformers/features/misc/MiscAssertCloneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscAssertPruneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCloneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCreateAssertCloneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCreateAssertPruneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCreateCloneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCreateIsCloneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCreateIsPruneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCreatePruneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCreateValidateCloneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscCreateValidatePruneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscIsCloneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscIsPruneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscLiteralsTransformer.ts +35 -35
- package/src/transformers/features/misc/MiscPruneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscValidateCloneTransformer.ts +13 -13
- package/src/transformers/features/misc/MiscValidatePruneTransformer.ts +13 -13
- package/src/transformers/features/notations/NotationAssertGeneralTransformer.ts +20 -20
- package/src/transformers/features/notations/NotationCreateAssertGeneralTransformer.ts +20 -20
- package/src/transformers/features/notations/NotationCreateGeneralTransformer.ts +20 -20
- package/src/transformers/features/notations/NotationCreateIsGeneralTransformer.ts +20 -20
- package/src/transformers/features/notations/NotationCreateValidateGeneralTransformer.ts +20 -20
- package/src/transformers/features/notations/NotationGeneralTransformer.ts +18 -18
- package/src/transformers/features/notations/NotationIsGeneralTransformer.ts +20 -20
- package/src/transformers/features/notations/NotationValidateGeneralTransformer.ts +20 -20
- package/src/transformers/features/protobuf/ProtobufAssertDecodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufAssertEncodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufCreateAssertDecodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufCreateAssertEncodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufCreateDecodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufCreateEncodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufCreateIsDecodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufCreateIsEncodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufCreateValidateDecodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufCreateValidateEncodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufDecodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufEncodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufIsDecodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufIsEncodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufMessageTransformer.ts +35 -35
- package/src/transformers/features/protobuf/ProtobufValidateDecodeTransformer.ts +13 -13
- package/src/transformers/features/protobuf/ProtobufValidateEncodeTransformer.ts +13 -13
- package/src/transformers/features/reflect/ReflectMetadataTransformer.ts +69 -69
- package/src/transformers/features/reflect/ReflectNameTransformer.ts +82 -82
- package/src/transformers/internal/GenericTransformer.ts +101 -101
- package/src/utils/MapUtil.ts +14 -14
- package/src/utils/NamingConvention.ts +94 -94
- package/src/utils/ProtobufNameEncoder.ts +32 -32
- package/src/utils/StringUtil.ts +16 -16
package/src/llm.ts
CHANGED
|
@@ -1,481 +1,481 @@
|
|
|
1
|
-
import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
|
|
2
|
-
|
|
3
|
-
import { ILlmApplicationOfValidate } from "./module";
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* > You must configure the generic argument `App`.
|
|
7
|
-
*
|
|
8
|
-
* TypeScript functions to LLM function calling application with validators.
|
|
9
|
-
*
|
|
10
|
-
* Creates an application of LLM (Large Language Model) function calling application
|
|
11
|
-
* from a TypeScript class or interface type containing the target functions to be
|
|
12
|
-
* called by the LLM function calling feature.
|
|
13
|
-
*
|
|
14
|
-
* If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
|
|
15
|
-
* LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
|
|
16
|
-
* select the proper function and fill its arguments from the conversation
|
|
17
|
-
* (maybe chatting text) with user (human). This is the concept of the LLM function calling.
|
|
18
|
-
*
|
|
19
|
-
* Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
|
|
20
|
-
* {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
|
|
21
|
-
* {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
|
|
22
|
-
* parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
|
|
23
|
-
* is a validator function reporting the detailed information about the wrong typed parameters.
|
|
24
|
-
*
|
|
25
|
-
* By the way, there can be some parameters (or their nested properties) which must be
|
|
26
|
-
* composed by human, not by LLM. File uploading feature or some sensitive information
|
|
27
|
-
* like secrety key (password) are the examples. In that case, you can separate the
|
|
28
|
-
* function parameters to both LLM and human sides by configuring the
|
|
29
|
-
* {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
|
|
30
|
-
* are assigned to the {@link ILlmFunctionOfValidate.separated} property.
|
|
31
|
-
*
|
|
32
|
-
* For reference, the actual function call execution is not by LLM, but by you.
|
|
33
|
-
* When the LLM selects the proper function and fills the arguments, you just call
|
|
34
|
-
* the function with the LLM prepared arguments. And then informs the return value to
|
|
35
|
-
* the LLM by system prompt. The LLM will continue the next conversation based on
|
|
36
|
-
* the return value.
|
|
37
|
-
*
|
|
38
|
-
* Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
|
|
39
|
-
* so that the parameters are separated to human and LLM sides, you can merge these
|
|
40
|
-
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
|
|
41
|
-
* before the actual LLM function call execution.
|
|
42
|
-
*
|
|
43
|
-
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
44
|
-
* Reading the following list, and determine the `Model` type considering the
|
|
45
|
-
* characteristics of the target LLM provider.
|
|
46
|
-
*
|
|
47
|
-
* - LLM provider schemas
|
|
48
|
-
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
49
|
-
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
50
|
-
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
51
|
-
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
52
|
-
* - Midldle layer schemas
|
|
53
|
-
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
54
|
-
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
55
|
-
*
|
|
56
|
-
* @template App Target class or interface type collecting the functions to call
|
|
57
|
-
* @template Model LLM schema model
|
|
58
|
-
* @template Config Configuration of LLM schema composition
|
|
59
|
-
* @param options Options for the LLM application construction
|
|
60
|
-
* @returns Application of LLM function calling schemas
|
|
61
|
-
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
62
|
-
* @author Jeongho Nam - https://github.com/samchon
|
|
63
|
-
*/
|
|
64
|
-
export function applicationOfValidate(
|
|
65
|
-
options?: Partial<Pick<ILlmApplicationOfValidate.IOptions<any>, "separate">>,
|
|
66
|
-
): never;
|
|
67
|
-
|
|
68
|
-
/**
|
|
69
|
-
* TypeScript functions to LLM function calling application with validators.
|
|
70
|
-
*
|
|
71
|
-
* Creates an application of LLM (Large Language Model) function calling application
|
|
72
|
-
* from a TypeScript class or interface type containing the target functions to be
|
|
73
|
-
* called by the LLM function calling feature.
|
|
74
|
-
*
|
|
75
|
-
* If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
|
|
76
|
-
* LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
|
|
77
|
-
* select the proper function and fill its arguments from the conversation
|
|
78
|
-
* (maybe chatting text) with user (human). This is the concept of the LLM function calling.
|
|
79
|
-
*
|
|
80
|
-
* Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
|
|
81
|
-
* {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
|
|
82
|
-
* {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
|
|
83
|
-
* parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
|
|
84
|
-
* is a validator function reporting the detailed information about the wrong typed parameters.
|
|
85
|
-
*
|
|
86
|
-
* By the way, there can be some parameters (or their nested properties) which must be
|
|
87
|
-
* composed by human, not by LLM. File uploading feature or some sensitive information
|
|
88
|
-
* like secrety key (password) are the examples. In that case, you can separate the
|
|
89
|
-
* function parameters to both LLM and human sides by configuring the
|
|
90
|
-
* {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
|
|
91
|
-
* are assigned to the {@link ILlmFunctionOfValidate.separated} property.
|
|
92
|
-
*
|
|
93
|
-
* For reference, the actual function call execution is not by LLM, but by you.
|
|
94
|
-
* When the LLM selects the proper function and fills the arguments, you just call
|
|
95
|
-
* the function with the LLM prepared arguments. And then informs the return value to
|
|
96
|
-
* the LLM by system prompt. The LLM will continue the next conversation based on
|
|
97
|
-
* the return value.
|
|
98
|
-
*
|
|
99
|
-
* Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
|
|
100
|
-
* so that the parameters are separated to human and LLM sides, you can merge these
|
|
101
|
-
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
|
|
102
|
-
* before the actual LLM function call execution.
|
|
103
|
-
*
|
|
104
|
-
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
105
|
-
* Reading the following list, and determine the `Model` type considering the
|
|
106
|
-
* characteristics of the target LLM provider.
|
|
107
|
-
*
|
|
108
|
-
* - LLM provider schemas
|
|
109
|
-
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
110
|
-
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
111
|
-
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
112
|
-
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
113
|
-
* - Midldle layer schemas
|
|
114
|
-
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
115
|
-
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
116
|
-
*
|
|
117
|
-
* @template App Target class or interface type collecting the functions to call
|
|
118
|
-
* @template Model LLM schema model
|
|
119
|
-
* @template Config Configuration of LLM schema composition
|
|
120
|
-
* @param options Options for the LLM application construction
|
|
121
|
-
* @returns Application of LLM function calling schemas
|
|
122
|
-
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
123
|
-
* @author Jeongho Nam - https://github.com/samchon
|
|
124
|
-
*/
|
|
125
|
-
export function applicationOfValidate<
|
|
126
|
-
App extends Record<string, any>,
|
|
127
|
-
Model extends ILlmSchema.Model,
|
|
128
|
-
Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
|
|
129
|
-
>(
|
|
130
|
-
options?: Partial<
|
|
131
|
-
Pick<ILlmApplicationOfValidate.IOptions<Model>, "separate">
|
|
132
|
-
>,
|
|
133
|
-
): ILlmApplicationOfValidate<Model>;
|
|
134
|
-
|
|
135
|
-
/**
|
|
136
|
-
* @internal
|
|
137
|
-
*/
|
|
138
|
-
export function applicationOfValidate(): never {
|
|
139
|
-
halt("applicationOfValidate");
|
|
140
|
-
}
|
|
141
|
-
|
|
142
|
-
/**
|
|
143
|
-
* > You must configure the generic argument `App`.
|
|
144
|
-
*
|
|
145
|
-
* TypeScript functions to LLM function calling application.
|
|
146
|
-
*
|
|
147
|
-
* Creates an application of LLM (Large Language Model) function calling application
|
|
148
|
-
* from a TypeScript class or interface type containing the target functions to be
|
|
149
|
-
* called by the LLM function calling feature.
|
|
150
|
-
*
|
|
151
|
-
* If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
|
|
152
|
-
* like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
|
|
153
|
-
* proper function and fill its arguments from the conversation (maybe chatting text)
|
|
154
|
-
* with user (human). This is the concept of the LLM function calling.
|
|
155
|
-
*
|
|
156
|
-
* By the way, there can be some parameters (or their nested properties) which must be
|
|
157
|
-
* composed by human, not by LLM. File uploading feature or some sensitive information
|
|
158
|
-
* like secrety key (password) are the examples. In that case, you can separate the
|
|
159
|
-
* function parameters to both LLM and human sides by configuring the
|
|
160
|
-
* {@link ILlmApplication.IOptions.separate} property. The separated parameters are
|
|
161
|
-
* assigned to the {@link ILlmFunction.separated} property.
|
|
162
|
-
*
|
|
163
|
-
* For reference, the actual function call execution is not by LLM, but by you.
|
|
164
|
-
* When the LLM selects the proper function and fills the arguments, you just call
|
|
165
|
-
* the function with the LLM prepared arguments. And then informs the return value to
|
|
166
|
-
* the LLM by system prompt. The LLM will continue the next conversation based on
|
|
167
|
-
* the return value.
|
|
168
|
-
*
|
|
169
|
-
* Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
|
|
170
|
-
* so that the parameters are separated to human and LLM sides, you can merge these
|
|
171
|
-
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
|
|
172
|
-
* before the actual LLM function call execution.
|
|
173
|
-
*
|
|
174
|
-
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
175
|
-
* Reading the following list, and determine the `Model` type considering the
|
|
176
|
-
* characteristics of the target LLM provider.
|
|
177
|
-
*
|
|
178
|
-
* - LLM provider schemas
|
|
179
|
-
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
180
|
-
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
181
|
-
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
182
|
-
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
183
|
-
* - Midldle layer schemas
|
|
184
|
-
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
185
|
-
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
186
|
-
*
|
|
187
|
-
* @template App Target class or interface type collecting the functions to call
|
|
188
|
-
* @template Model LLM schema model
|
|
189
|
-
* @template Config Configuration of LLM schema composition
|
|
190
|
-
* @param options Options for the LLM application construction
|
|
191
|
-
* @returns Application of LLM function calling schemas
|
|
192
|
-
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
193
|
-
* @author Jeongho Nam - https://github.com/samchon
|
|
194
|
-
*/
|
|
195
|
-
export function application(
|
|
196
|
-
options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
|
|
197
|
-
): never;
|
|
198
|
-
|
|
199
|
-
/**
|
|
200
|
-
* TypeScript functions to LLM function calling application.
|
|
201
|
-
*
|
|
202
|
-
* Creates an application of LLM (Large Language Model) function calling application
|
|
203
|
-
* from a TypeScript class or interface type containing the target functions to be
|
|
204
|
-
* called by the LLM function calling feature.
|
|
205
|
-
*
|
|
206
|
-
* If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
|
|
207
|
-
* like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
|
|
208
|
-
* proper function and fill its arguments from the conversation (maybe chatting text)
|
|
209
|
-
* with user (human). This is the concept of the LLM function calling.
|
|
210
|
-
*
|
|
211
|
-
* By the way, there can be some parameters (or their nested properties) which must be
|
|
212
|
-
* composed by human, not by LLM. File uploading feature or some sensitive information
|
|
213
|
-
* like secrety key (password) are the examples. In that case, you can separate the
|
|
214
|
-
* function parameters to both LLM and human sides by configuring the
|
|
215
|
-
* {@link ILlmApplication.IOptions.separate} property. The separated parameters are
|
|
216
|
-
* assigned to the {@link ILlmFunction.separated} property.
|
|
217
|
-
*
|
|
218
|
-
* For reference, the actual function call execution is not by LLM, but by you.
|
|
219
|
-
* When the LLM selects the proper function and fills the arguments, you just call
|
|
220
|
-
* the function with the LLM prepared arguments. And then informs the return value to
|
|
221
|
-
* the LLM by system prompt. The LLM will continue the next conversation based on
|
|
222
|
-
* the return value.
|
|
223
|
-
*
|
|
224
|
-
* Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
|
|
225
|
-
* so that the parameters are separated to human and LLM sides, you can merge these
|
|
226
|
-
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
|
|
227
|
-
* before the actual LLM function call execution.
|
|
228
|
-
*
|
|
229
|
-
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
230
|
-
* Reading the following list, and determine the `Model` type considering the
|
|
231
|
-
* characteristics of the target LLM provider.
|
|
232
|
-
*
|
|
233
|
-
* - LLM provider schemas
|
|
234
|
-
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
235
|
-
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
236
|
-
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
237
|
-
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
238
|
-
* - Midldle layer schemas
|
|
239
|
-
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
240
|
-
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
241
|
-
*
|
|
242
|
-
* @template App Target class or interface type collecting the functions to call
|
|
243
|
-
* @template Model LLM schema model
|
|
244
|
-
* @template Config Configuration of LLM schema composition
|
|
245
|
-
* @param options Options for the LLM application construction
|
|
246
|
-
* @returns Application of LLM function calling schemas
|
|
247
|
-
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
248
|
-
* @author Jeongho Nam - https://github.com/samchon
|
|
249
|
-
*/
|
|
250
|
-
export function application<
|
|
251
|
-
App extends Record<string, any>,
|
|
252
|
-
Model extends ILlmSchema.Model,
|
|
253
|
-
Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
|
|
254
|
-
>(
|
|
255
|
-
options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
|
|
256
|
-
): ILlmApplication<Model>;
|
|
257
|
-
|
|
258
|
-
/**
|
|
259
|
-
* @internal
|
|
260
|
-
*/
|
|
261
|
-
export function application(): never {
|
|
262
|
-
halt("application");
|
|
263
|
-
}
|
|
264
|
-
|
|
265
|
-
/**
|
|
266
|
-
* > You must configure the generic argument `Parameters`.
|
|
267
|
-
*
|
|
268
|
-
* TypeScript parameters to LLM parameters schema.
|
|
269
|
-
*
|
|
270
|
-
* Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
|
|
271
|
-
* [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
|
|
272
|
-
* and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
|
|
273
|
-
* from a TypeScript parameters type.
|
|
274
|
-
*
|
|
275
|
-
* For references, LLM identifies only keyworded arguments, not positional arguments.
|
|
276
|
-
* Therefore, the TypeScript parameters type must be an object type, and its properties
|
|
277
|
-
* must be static. If dynamic properties are, it would be compilation error.
|
|
278
|
-
*
|
|
279
|
-
* Also, such parameters type can be utilized not only for the LLM function calling,
|
|
280
|
-
* but also for the LLM structured outputs. The LLM structured outputs is a feature
|
|
281
|
-
* that LLM (Large Language Model) can generate a structured output, not only a plain
|
|
282
|
-
* text, by filling the parameters from the conversation (maybe chatting text) with user
|
|
283
|
-
* (human).
|
|
284
|
-
*
|
|
285
|
-
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
286
|
-
* Reading the following list, and determine the `Model` type considering the
|
|
287
|
-
* characteristics of the target LLM provider.
|
|
288
|
-
*
|
|
289
|
-
* - LLM provider schemas
|
|
290
|
-
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
291
|
-
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
292
|
-
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
293
|
-
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
294
|
-
* - Midldle layer schemas
|
|
295
|
-
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
296
|
-
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
297
|
-
*
|
|
298
|
-
* @template Parameters Target parameters type
|
|
299
|
-
* @template Model LLM schema model
|
|
300
|
-
* @template Config Configuration of LLM schema composition
|
|
301
|
-
* @returns LLM parameters schema
|
|
302
|
-
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
303
|
-
* @reference https://platform.openai.com/docs/guides/structured-outputs
|
|
304
|
-
*/
|
|
305
|
-
export function parameters(): never;
|
|
306
|
-
|
|
307
|
-
/**
|
|
308
|
-
* TypeScript parameters to LLM parameters schema.
|
|
309
|
-
*
|
|
310
|
-
* Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
|
|
311
|
-
* [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
|
|
312
|
-
* and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
|
|
313
|
-
* from a TypeScript parameters type.
|
|
314
|
-
*
|
|
315
|
-
* For references, LLM identifies only keyworded arguments, not positional arguments.
|
|
316
|
-
* Therefore, the TypeScript parameters type must be an object type, and its properties
|
|
317
|
-
* must be static. If dynamic properties are, it would be compilation error.
|
|
318
|
-
*
|
|
319
|
-
* Also, such parameters type can be utilized not only for the LLM function calling,
|
|
320
|
-
* but also for the LLM structured outputs. The LLM structured outputs is a feature
|
|
321
|
-
* that LLM (Large Language Model) can generate a structured output, not only a plain
|
|
322
|
-
* text, by filling the parameters from the conversation (maybe chatting text) with user
|
|
323
|
-
* (human).
|
|
324
|
-
*
|
|
325
|
-
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
326
|
-
* Reading the following list, and determine the `Model` type considering the
|
|
327
|
-
* characteristics of the target LLM provider.
|
|
328
|
-
*
|
|
329
|
-
* - LLM provider schemas
|
|
330
|
-
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
331
|
-
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
332
|
-
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
333
|
-
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
334
|
-
* - Midldle layer schemas
|
|
335
|
-
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
336
|
-
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
337
|
-
*
|
|
338
|
-
* @template Parameters Target parameters type
|
|
339
|
-
* @template Model LLM schema model
|
|
340
|
-
* @template Config Configuration of LLM schema composition
|
|
341
|
-
* @returns LLM parameters schema
|
|
342
|
-
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
343
|
-
* @reference https://platform.openai.com/docs/guides/structured-outputs
|
|
344
|
-
*/
|
|
345
|
-
export function parameters<
|
|
346
|
-
Parameters extends Record<string, any>,
|
|
347
|
-
Model extends ILlmSchema.Model,
|
|
348
|
-
Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
|
|
349
|
-
>(): ILlmSchema.ModelParameters[Model];
|
|
350
|
-
|
|
351
|
-
/**
|
|
352
|
-
* @internal
|
|
353
|
-
*/
|
|
354
|
-
export function parameters(): never {
|
|
355
|
-
halt("parameters");
|
|
356
|
-
}
|
|
357
|
-
|
|
358
|
-
/**
|
|
359
|
-
* > You must configure the generic argument `T`.
|
|
360
|
-
*
|
|
361
|
-
* TypeScript type to LLM type schema.
|
|
362
|
-
*
|
|
363
|
-
* Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
|
|
364
|
-
* [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
|
|
365
|
-
* from a TypeScript type.
|
|
366
|
-
*
|
|
367
|
-
* The returned {@link ILlmSchema} type would be specified by the `Model` argument,
|
|
368
|
-
* and here is the list of available `Model` types with their corresponding LLM schema.
|
|
369
|
-
* Reading the following list, and determine the `Model` type considering the
|
|
370
|
-
* characteristics of the target LLM provider.
|
|
371
|
-
*
|
|
372
|
-
* - LLM provider schemas
|
|
373
|
-
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
374
|
-
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
375
|
-
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
376
|
-
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
377
|
-
* - Midldle layer schemas
|
|
378
|
-
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
379
|
-
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
380
|
-
*
|
|
381
|
-
* If you actually want to perform the LLM function calling with TypeScript functions,
|
|
382
|
-
* you can do it with the {@link application} function. Otherwise you hope to perform the
|
|
383
|
-
* structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
|
|
384
|
-
* and structured output with the native TypeScript functions and types.
|
|
385
|
-
*
|
|
386
|
-
* > **What LLM function calling is?
|
|
387
|
-
* >
|
|
388
|
-
* > LLM (Large Language Model) selects propert function and fill the arguments,
|
|
389
|
-
* > but actuall function call execution is not by LLM, but by you.
|
|
390
|
-
* >
|
|
391
|
-
* > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
|
|
392
|
-
* > "function calling" feature. The "function calling" means that LLM automatically selects
|
|
393
|
-
* > a proper function and compose parameter values from the user's chatting text.
|
|
394
|
-
* >
|
|
395
|
-
* > When LLM selects the proper function and its arguments, you just call the function
|
|
396
|
-
* > with the arguments. And then informs the return value to the LLM by system prompt,
|
|
397
|
-
* > LLM will continue the next conversation based on the return value.
|
|
398
|
-
*
|
|
399
|
-
* @template T Target type
|
|
400
|
-
* @template Model LLM schema model
|
|
401
|
-
* @template Config Configuration of LLM schema composition
|
|
402
|
-
* @returns LLM schema
|
|
403
|
-
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
404
|
-
* @reference https://platform.openai.com/docs/guides/structured-outputs
|
|
405
|
-
* @author Jeongho Nam - https://github.com/samchon
|
|
406
|
-
*/
|
|
407
|
-
export function schema(): never;
|
|
408
|
-
|
|
409
|
-
/**
|
|
410
|
-
* TypeScript type to LLM type schema.
|
|
411
|
-
*
|
|
412
|
-
* Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
|
|
413
|
-
* [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
|
|
414
|
-
* from a TypeScript type.
|
|
415
|
-
*
|
|
416
|
-
* The returned {@link ILlmSchema} type would be specified by the `Model` argument,
|
|
417
|
-
* and here is the list of available `Model` types with their corresponding LLM schema:
|
|
418
|
-
*
|
|
419
|
-
* - LLM provider schemas
|
|
420
|
-
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
421
|
-
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
422
|
-
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
423
|
-
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
424
|
-
* - Midldle layer schemas
|
|
425
|
-
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
426
|
-
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
427
|
-
*
|
|
428
|
-
* If you actually want to perform the LLM function calling with TypeScript functions,
|
|
429
|
-
* you can do it with the {@link application} function. Otherwise you hope to perform the
|
|
430
|
-
* structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
|
|
431
|
-
* and structured output with the native TypeScript functions and types.
|
|
432
|
-
*
|
|
433
|
-
* > **What LLM function calling is?
|
|
434
|
-
* >
|
|
435
|
-
* > LLM (Large Language Model) selects propert function and fill the arguments,
|
|
436
|
-
* > but actuall function call execution is not by LLM, but by you.
|
|
437
|
-
* >
|
|
438
|
-
* > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
|
|
439
|
-
* > "function calling" feature. The "function calling" means that LLM automatically selects
|
|
440
|
-
* > a proper function and compose parameter values from the user's chatting text.
|
|
441
|
-
* >
|
|
442
|
-
* > When LLM selects the proper function and its arguments, you just call the function
|
|
443
|
-
* > with the arguments. And then informs the return value to the LLM by system prompt,
|
|
444
|
-
* > LLM will continue the next conversation based on the return value.
|
|
445
|
-
*
|
|
446
|
-
* @template T Target type
|
|
447
|
-
* @template Model LLM schema model
|
|
448
|
-
* @template Config Configuration of LLM schema composition
|
|
449
|
-
* @returns LLM schema
|
|
450
|
-
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
451
|
-
* @reference https://platform.openai.com/docs/guides/structured-outputs
|
|
452
|
-
* @author Jeongho Nam - https://github.com/samchon
|
|
453
|
-
*/
|
|
454
|
-
export function schema<
|
|
455
|
-
T,
|
|
456
|
-
Model extends ILlmSchema.Model,
|
|
457
|
-
Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
|
|
458
|
-
>(
|
|
459
|
-
...$defs: Extract<
|
|
460
|
-
ILlmSchema.ModelSchema[Model],
|
|
461
|
-
{ $ref: string }
|
|
462
|
-
> extends never
|
|
463
|
-
? []
|
|
464
|
-
: [Record<string, ILlmSchema.ModelSchema[Model]>]
|
|
465
|
-
): ILlmSchema.ModelSchema[Model];
|
|
466
|
-
|
|
467
|
-
/**
|
|
468
|
-
* @internal
|
|
469
|
-
*/
|
|
470
|
-
export function schema(): never {
|
|
471
|
-
halt("schema");
|
|
472
|
-
}
|
|
473
|
-
|
|
474
|
-
/**
|
|
475
|
-
* @internal
|
|
476
|
-
*/
|
|
477
|
-
function halt(name: string): never {
|
|
478
|
-
throw new Error(
|
|
479
|
-
`Error on typia.llm.${name}(): no transform has been configured. Read and follow https://typia.io/docs/setup please.`,
|
|
480
|
-
);
|
|
481
|
-
}
|
|
1
|
+
import { ILlmApplication, ILlmSchema } from "@samchon/openapi";
|
|
2
|
+
|
|
3
|
+
import { ILlmApplicationOfValidate } from "./module";
|
|
4
|
+
|
|
5
|
+
/**
|
|
6
|
+
* > You must configure the generic argument `App`.
|
|
7
|
+
*
|
|
8
|
+
* TypeScript functions to LLM function calling application with validators.
|
|
9
|
+
*
|
|
10
|
+
* Creates an application of LLM (Large Language Model) function calling application
|
|
11
|
+
* from a TypeScript class or interface type containing the target functions to be
|
|
12
|
+
* called by the LLM function calling feature.
|
|
13
|
+
*
|
|
14
|
+
* If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
|
|
15
|
+
* LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
|
|
16
|
+
* select the proper function and fill its arguments from the conversation
|
|
17
|
+
* (maybe chatting text) with user (human). This is the concept of the LLM function calling.
|
|
18
|
+
*
|
|
19
|
+
* Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
|
|
20
|
+
* {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
|
|
21
|
+
* {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
|
|
22
|
+
* parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
|
|
23
|
+
* is a validator function reporting the detailed information about the wrong typed parameters.
|
|
24
|
+
*
|
|
25
|
+
* By the way, there can be some parameters (or their nested properties) which must be
|
|
26
|
+
* composed by human, not by LLM. File uploading feature or some sensitive information
|
|
27
|
+
* like secrety key (password) are the examples. In that case, you can separate the
|
|
28
|
+
* function parameters to both LLM and human sides by configuring the
|
|
29
|
+
* {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
|
|
30
|
+
* are assigned to the {@link ILlmFunctionOfValidate.separated} property.
|
|
31
|
+
*
|
|
32
|
+
* For reference, the actual function call execution is not by LLM, but by you.
|
|
33
|
+
* When the LLM selects the proper function and fills the arguments, you just call
|
|
34
|
+
* the function with the LLM prepared arguments. And then informs the return value to
|
|
35
|
+
* the LLM by system prompt. The LLM will continue the next conversation based on
|
|
36
|
+
* the return value.
|
|
37
|
+
*
|
|
38
|
+
* Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
|
|
39
|
+
* so that the parameters are separated to human and LLM sides, you can merge these
|
|
40
|
+
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
|
|
41
|
+
* before the actual LLM function call execution.
|
|
42
|
+
*
|
|
43
|
+
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
44
|
+
* Reading the following list, and determine the `Model` type considering the
|
|
45
|
+
* characteristics of the target LLM provider.
|
|
46
|
+
*
|
|
47
|
+
* - LLM provider schemas
|
|
48
|
+
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
49
|
+
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
50
|
+
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
51
|
+
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
52
|
+
* - Midldle layer schemas
|
|
53
|
+
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
54
|
+
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
55
|
+
*
|
|
56
|
+
* @template App Target class or interface type collecting the functions to call
|
|
57
|
+
* @template Model LLM schema model
|
|
58
|
+
* @template Config Configuration of LLM schema composition
|
|
59
|
+
* @param options Options for the LLM application construction
|
|
60
|
+
* @returns Application of LLM function calling schemas
|
|
61
|
+
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
62
|
+
* @author Jeongho Nam - https://github.com/samchon
|
|
63
|
+
*/
|
|
64
|
+
export function applicationOfValidate(
|
|
65
|
+
options?: Partial<Pick<ILlmApplicationOfValidate.IOptions<any>, "separate">>,
|
|
66
|
+
): never;
|
|
67
|
+
|
|
68
|
+
/**
|
|
69
|
+
* TypeScript functions to LLM function calling application with validators.
|
|
70
|
+
*
|
|
71
|
+
* Creates an application of LLM (Large Language Model) function calling application
|
|
72
|
+
* from a TypeScript class or interface type containing the target functions to be
|
|
73
|
+
* called by the LLM function calling feature.
|
|
74
|
+
*
|
|
75
|
+
* If you put the returned {@link ILlmApplicationOfValidate.functions} objects to the
|
|
76
|
+
* LLM provider like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically
|
|
77
|
+
* select the proper function and fill its arguments from the conversation
|
|
78
|
+
* (maybe chatting text) with user (human). This is the concept of the LLM function calling.
|
|
79
|
+
*
|
|
80
|
+
* Additionally, the LLM function calling sometimes take a mistake that composing wrong typed
|
|
81
|
+
* {@link ILlmFunctionOfValidate.parameters}. In that case, deliver return value of the
|
|
82
|
+
* {@link ILlmFunctionOfValidate.validate} function, then LLM provider will correct the
|
|
83
|
+
* parameters at the next conversation. The {@link ILlmFunctionOfValidate.validate} function
|
|
84
|
+
* is a validator function reporting the detailed information about the wrong typed parameters.
|
|
85
|
+
*
|
|
86
|
+
* By the way, there can be some parameters (or their nested properties) which must be
|
|
87
|
+
* composed by human, not by LLM. File uploading feature or some sensitive information
|
|
88
|
+
* like secrety key (password) are the examples. In that case, you can separate the
|
|
89
|
+
* function parameters to both LLM and human sides by configuring the
|
|
90
|
+
* {@link ILlmApplicationOfValidate.IOptions.separate} property. The separated parameters
|
|
91
|
+
* are assigned to the {@link ILlmFunctionOfValidate.separated} property.
|
|
92
|
+
*
|
|
93
|
+
* For reference, the actual function call execution is not by LLM, but by you.
|
|
94
|
+
* When the LLM selects the proper function and fills the arguments, you just call
|
|
95
|
+
* the function with the LLM prepared arguments. And then informs the return value to
|
|
96
|
+
* the LLM by system prompt. The LLM will continue the next conversation based on
|
|
97
|
+
* the return value.
|
|
98
|
+
*
|
|
99
|
+
* Additionally, if you've configured {@link ILlmApplicationOfValidate.IOptions.separate},
|
|
100
|
+
* so that the parameters are separated to human and LLM sides, you can merge these
|
|
101
|
+
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
|
|
102
|
+
* before the actual LLM function call execution.
|
|
103
|
+
*
|
|
104
|
+
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
105
|
+
* Reading the following list, and determine the `Model` type considering the
|
|
106
|
+
* characteristics of the target LLM provider.
|
|
107
|
+
*
|
|
108
|
+
* - LLM provider schemas
|
|
109
|
+
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
110
|
+
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
111
|
+
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
112
|
+
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
113
|
+
* - Midldle layer schemas
|
|
114
|
+
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
115
|
+
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
116
|
+
*
|
|
117
|
+
* @template App Target class or interface type collecting the functions to call
|
|
118
|
+
* @template Model LLM schema model
|
|
119
|
+
* @template Config Configuration of LLM schema composition
|
|
120
|
+
* @param options Options for the LLM application construction
|
|
121
|
+
* @returns Application of LLM function calling schemas
|
|
122
|
+
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
123
|
+
* @author Jeongho Nam - https://github.com/samchon
|
|
124
|
+
*/
|
|
125
|
+
export function applicationOfValidate<
|
|
126
|
+
App extends Record<string, any>,
|
|
127
|
+
Model extends ILlmSchema.Model,
|
|
128
|
+
Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
|
|
129
|
+
>(
|
|
130
|
+
options?: Partial<
|
|
131
|
+
Pick<ILlmApplicationOfValidate.IOptions<Model>, "separate">
|
|
132
|
+
>,
|
|
133
|
+
): ILlmApplicationOfValidate<Model>;
|
|
134
|
+
|
|
135
|
+
/**
|
|
136
|
+
* @internal
|
|
137
|
+
*/
|
|
138
|
+
export function applicationOfValidate(): never {
|
|
139
|
+
halt("applicationOfValidate");
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
/**
|
|
143
|
+
* > You must configure the generic argument `App`.
|
|
144
|
+
*
|
|
145
|
+
* TypeScript functions to LLM function calling application.
|
|
146
|
+
*
|
|
147
|
+
* Creates an application of LLM (Large Language Model) function calling application
|
|
148
|
+
* from a TypeScript class or interface type containing the target functions to be
|
|
149
|
+
* called by the LLM function calling feature.
|
|
150
|
+
*
|
|
151
|
+
* If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
|
|
152
|
+
* like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
|
|
153
|
+
* proper function and fill its arguments from the conversation (maybe chatting text)
|
|
154
|
+
* with user (human). This is the concept of the LLM function calling.
|
|
155
|
+
*
|
|
156
|
+
* By the way, there can be some parameters (or their nested properties) which must be
|
|
157
|
+
* composed by human, not by LLM. File uploading feature or some sensitive information
|
|
158
|
+
* like secrety key (password) are the examples. In that case, you can separate the
|
|
159
|
+
* function parameters to both LLM and human sides by configuring the
|
|
160
|
+
* {@link ILlmApplication.IOptions.separate} property. The separated parameters are
|
|
161
|
+
* assigned to the {@link ILlmFunction.separated} property.
|
|
162
|
+
*
|
|
163
|
+
* For reference, the actual function call execution is not by LLM, but by you.
|
|
164
|
+
* When the LLM selects the proper function and fills the arguments, you just call
|
|
165
|
+
* the function with the LLM prepared arguments. And then informs the return value to
|
|
166
|
+
* the LLM by system prompt. The LLM will continue the next conversation based on
|
|
167
|
+
* the return value.
|
|
168
|
+
*
|
|
169
|
+
* Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
|
|
170
|
+
* so that the parameters are separated to human and LLM sides, you can merge these
|
|
171
|
+
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
|
|
172
|
+
* before the actual LLM function call execution.
|
|
173
|
+
*
|
|
174
|
+
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
175
|
+
* Reading the following list, and determine the `Model` type considering the
|
|
176
|
+
* characteristics of the target LLM provider.
|
|
177
|
+
*
|
|
178
|
+
* - LLM provider schemas
|
|
179
|
+
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
180
|
+
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
181
|
+
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
182
|
+
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
183
|
+
* - Midldle layer schemas
|
|
184
|
+
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
185
|
+
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
186
|
+
*
|
|
187
|
+
* @template App Target class or interface type collecting the functions to call
|
|
188
|
+
* @template Model LLM schema model
|
|
189
|
+
* @template Config Configuration of LLM schema composition
|
|
190
|
+
* @param options Options for the LLM application construction
|
|
191
|
+
* @returns Application of LLM function calling schemas
|
|
192
|
+
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
193
|
+
* @author Jeongho Nam - https://github.com/samchon
|
|
194
|
+
*/
|
|
195
|
+
export function application(
|
|
196
|
+
options?: Partial<Pick<ILlmApplication.IOptions<any>, "separate">>,
|
|
197
|
+
): never;
|
|
198
|
+
|
|
199
|
+
/**
|
|
200
|
+
* TypeScript functions to LLM function calling application.
|
|
201
|
+
*
|
|
202
|
+
* Creates an application of LLM (Large Language Model) function calling application
|
|
203
|
+
* from a TypeScript class or interface type containing the target functions to be
|
|
204
|
+
* called by the LLM function calling feature.
|
|
205
|
+
*
|
|
206
|
+
* If you put the returned {@link ILlmApplication.functions} objects to the LLM provider
|
|
207
|
+
* like [OpenAI (ChatGPT)](https://openai.com/), the LLM will automatically select the
|
|
208
|
+
* proper function and fill its arguments from the conversation (maybe chatting text)
|
|
209
|
+
* with user (human). This is the concept of the LLM function calling.
|
|
210
|
+
*
|
|
211
|
+
* By the way, there can be some parameters (or their nested properties) which must be
|
|
212
|
+
* composed by human, not by LLM. File uploading feature or some sensitive information
|
|
213
|
+
* like secrety key (password) are the examples. In that case, you can separate the
|
|
214
|
+
* function parameters to both LLM and human sides by configuring the
|
|
215
|
+
* {@link ILlmApplication.IOptions.separate} property. The separated parameters are
|
|
216
|
+
* assigned to the {@link ILlmFunction.separated} property.
|
|
217
|
+
*
|
|
218
|
+
* For reference, the actual function call execution is not by LLM, but by you.
|
|
219
|
+
* When the LLM selects the proper function and fills the arguments, you just call
|
|
220
|
+
* the function with the LLM prepared arguments. And then informs the return value to
|
|
221
|
+
* the LLM by system prompt. The LLM will continue the next conversation based on
|
|
222
|
+
* the return value.
|
|
223
|
+
*
|
|
224
|
+
* Additionally, if you've configured {@link ILlmApplication.IOptions.separate},
|
|
225
|
+
* so that the parameters are separated to human and LLM sides, you can merge these
|
|
226
|
+
* humand and LLM sides' parameters into one through {@link HttpLlm.mergeParameters}
|
|
227
|
+
* before the actual LLM function call execution.
|
|
228
|
+
*
|
|
229
|
+
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
230
|
+
* Reading the following list, and determine the `Model` type considering the
|
|
231
|
+
* characteristics of the target LLM provider.
|
|
232
|
+
*
|
|
233
|
+
* - LLM provider schemas
|
|
234
|
+
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
235
|
+
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
236
|
+
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
237
|
+
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
238
|
+
* - Midldle layer schemas
|
|
239
|
+
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
240
|
+
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
241
|
+
*
|
|
242
|
+
* @template App Target class or interface type collecting the functions to call
|
|
243
|
+
* @template Model LLM schema model
|
|
244
|
+
* @template Config Configuration of LLM schema composition
|
|
245
|
+
* @param options Options for the LLM application construction
|
|
246
|
+
* @returns Application of LLM function calling schemas
|
|
247
|
+
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
248
|
+
* @author Jeongho Nam - https://github.com/samchon
|
|
249
|
+
*/
|
|
250
|
+
export function application<
|
|
251
|
+
App extends Record<string, any>,
|
|
252
|
+
Model extends ILlmSchema.Model,
|
|
253
|
+
Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
|
|
254
|
+
>(
|
|
255
|
+
options?: Partial<Pick<ILlmApplication.IOptions<Model>, "separate">>,
|
|
256
|
+
): ILlmApplication<Model>;
|
|
257
|
+
|
|
258
|
+
/**
|
|
259
|
+
* @internal
|
|
260
|
+
*/
|
|
261
|
+
export function application(): never {
|
|
262
|
+
halt("application");
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
/**
|
|
266
|
+
* > You must configure the generic argument `Parameters`.
|
|
267
|
+
*
|
|
268
|
+
* TypeScript parameters to LLM parameters schema.
|
|
269
|
+
*
|
|
270
|
+
* Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
|
|
271
|
+
* [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
|
|
272
|
+
* and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
|
|
273
|
+
* from a TypeScript parameters type.
|
|
274
|
+
*
|
|
275
|
+
* For references, LLM identifies only keyworded arguments, not positional arguments.
|
|
276
|
+
* Therefore, the TypeScript parameters type must be an object type, and its properties
|
|
277
|
+
* must be static. If dynamic properties are, it would be compilation error.
|
|
278
|
+
*
|
|
279
|
+
* Also, such parameters type can be utilized not only for the LLM function calling,
|
|
280
|
+
* but also for the LLM structured outputs. The LLM structured outputs is a feature
|
|
281
|
+
* that LLM (Large Language Model) can generate a structured output, not only a plain
|
|
282
|
+
* text, by filling the parameters from the conversation (maybe chatting text) with user
|
|
283
|
+
* (human).
|
|
284
|
+
*
|
|
285
|
+
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
286
|
+
* Reading the following list, and determine the `Model` type considering the
|
|
287
|
+
* characteristics of the target LLM provider.
|
|
288
|
+
*
|
|
289
|
+
* - LLM provider schemas
|
|
290
|
+
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
291
|
+
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
292
|
+
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
293
|
+
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
294
|
+
* - Midldle layer schemas
|
|
295
|
+
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
296
|
+
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
297
|
+
*
|
|
298
|
+
* @template Parameters Target parameters type
|
|
299
|
+
* @template Model LLM schema model
|
|
300
|
+
* @template Config Configuration of LLM schema composition
|
|
301
|
+
* @returns LLM parameters schema
|
|
302
|
+
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
303
|
+
* @reference https://platform.openai.com/docs/guides/structured-outputs
|
|
304
|
+
*/
|
|
305
|
+
export function parameters(): never;
|
|
306
|
+
|
|
307
|
+
/**
|
|
308
|
+
* TypeScript parameters to LLM parameters schema.
|
|
309
|
+
*
|
|
310
|
+
* Creates an LLM (Large Language Model) parameters schema, a type metadata that is used in the
|
|
311
|
+
* [LLM function calling](https://platform.openai.com/docs/guides/function-calling)
|
|
312
|
+
* and [LLM structured outputs](https://platform.openai.com/docs/guides/structured-outputs),
|
|
313
|
+
* from a TypeScript parameters type.
|
|
314
|
+
*
|
|
315
|
+
* For references, LLM identifies only keyworded arguments, not positional arguments.
|
|
316
|
+
* Therefore, the TypeScript parameters type must be an object type, and its properties
|
|
317
|
+
* must be static. If dynamic properties are, it would be compilation error.
|
|
318
|
+
*
|
|
319
|
+
* Also, such parameters type can be utilized not only for the LLM function calling,
|
|
320
|
+
* but also for the LLM structured outputs. The LLM structured outputs is a feature
|
|
321
|
+
* that LLM (Large Language Model) can generate a structured output, not only a plain
|
|
322
|
+
* text, by filling the parameters from the conversation (maybe chatting text) with user
|
|
323
|
+
* (human).
|
|
324
|
+
*
|
|
325
|
+
* Here is the list of available `Model` types with their corresponding LLM schema.
|
|
326
|
+
* Reading the following list, and determine the `Model` type considering the
|
|
327
|
+
* characteristics of the target LLM provider.
|
|
328
|
+
*
|
|
329
|
+
* - LLM provider schemas
|
|
330
|
+
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
331
|
+
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
332
|
+
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
333
|
+
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
334
|
+
* - Midldle layer schemas
|
|
335
|
+
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
336
|
+
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
337
|
+
*
|
|
338
|
+
* @template Parameters Target parameters type
|
|
339
|
+
* @template Model LLM schema model
|
|
340
|
+
* @template Config Configuration of LLM schema composition
|
|
341
|
+
* @returns LLM parameters schema
|
|
342
|
+
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
343
|
+
* @reference https://platform.openai.com/docs/guides/structured-outputs
|
|
344
|
+
*/
|
|
345
|
+
export function parameters<
|
|
346
|
+
Parameters extends Record<string, any>,
|
|
347
|
+
Model extends ILlmSchema.Model,
|
|
348
|
+
Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
|
|
349
|
+
>(): ILlmSchema.ModelParameters[Model];
|
|
350
|
+
|
|
351
|
+
/**
|
|
352
|
+
* @internal
|
|
353
|
+
*/
|
|
354
|
+
export function parameters(): never {
|
|
355
|
+
halt("parameters");
|
|
356
|
+
}
|
|
357
|
+
|
|
358
|
+
/**
|
|
359
|
+
* > You must configure the generic argument `T`.
|
|
360
|
+
*
|
|
361
|
+
* TypeScript type to LLM type schema.
|
|
362
|
+
*
|
|
363
|
+
* Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
|
|
364
|
+
* [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
|
|
365
|
+
* from a TypeScript type.
|
|
366
|
+
*
|
|
367
|
+
* The returned {@link ILlmSchema} type would be specified by the `Model` argument,
|
|
368
|
+
* and here is the list of available `Model` types with their corresponding LLM schema.
|
|
369
|
+
* Reading the following list, and determine the `Model` type considering the
|
|
370
|
+
* characteristics of the target LLM provider.
|
|
371
|
+
*
|
|
372
|
+
* - LLM provider schemas
|
|
373
|
+
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
374
|
+
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
375
|
+
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
376
|
+
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
377
|
+
* - Midldle layer schemas
|
|
378
|
+
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
379
|
+
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
380
|
+
*
|
|
381
|
+
* If you actually want to perform the LLM function calling with TypeScript functions,
|
|
382
|
+
* you can do it with the {@link application} function. Otherwise you hope to perform the
|
|
383
|
+
* structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
|
|
384
|
+
* and structured output with the native TypeScript functions and types.
|
|
385
|
+
*
|
|
386
|
+
* > **What LLM function calling is?
|
|
387
|
+
* >
|
|
388
|
+
* > LLM (Large Language Model) selects propert function and fill the arguments,
|
|
389
|
+
* > but actuall function call execution is not by LLM, but by you.
|
|
390
|
+
* >
|
|
391
|
+
* > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
|
|
392
|
+
* > "function calling" feature. The "function calling" means that LLM automatically selects
|
|
393
|
+
* > a proper function and compose parameter values from the user's chatting text.
|
|
394
|
+
* >
|
|
395
|
+
* > When LLM selects the proper function and its arguments, you just call the function
|
|
396
|
+
* > with the arguments. And then informs the return value to the LLM by system prompt,
|
|
397
|
+
* > LLM will continue the next conversation based on the return value.
|
|
398
|
+
*
|
|
399
|
+
* @template T Target type
|
|
400
|
+
* @template Model LLM schema model
|
|
401
|
+
* @template Config Configuration of LLM schema composition
|
|
402
|
+
* @returns LLM schema
|
|
403
|
+
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
404
|
+
* @reference https://platform.openai.com/docs/guides/structured-outputs
|
|
405
|
+
* @author Jeongho Nam - https://github.com/samchon
|
|
406
|
+
*/
|
|
407
|
+
export function schema(): never;
|
|
408
|
+
|
|
409
|
+
/**
|
|
410
|
+
* TypeScript type to LLM type schema.
|
|
411
|
+
*
|
|
412
|
+
* Creates an LLM (Large Language Model) type schema, a type metadata that is used in the
|
|
413
|
+
* [LLM function calling](@reference https://platform.openai.com/docs/guides/function-calling),
|
|
414
|
+
* from a TypeScript type.
|
|
415
|
+
*
|
|
416
|
+
* The returned {@link ILlmSchema} type would be specified by the `Model` argument,
|
|
417
|
+
* and here is the list of available `Model` types with their corresponding LLM schema:
|
|
418
|
+
*
|
|
419
|
+
* - LLM provider schemas
|
|
420
|
+
* - `chatgpt`: [`IChatGptSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IChatGptSchema.ts)
|
|
421
|
+
* - `claude`: [`IClaudeSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IClaudeSchema.ts)
|
|
422
|
+
* - `gemini`: [`IGeminiSchema`](https://github.com/samchon/openapi/blob/master/src/structures/IGeminiSchema.ts)
|
|
423
|
+
* - `llama`: [`ILlamaSchema`](https://github.com/samchon/openapi/blob/master/src/structures/ILlamaSchema.ts)
|
|
424
|
+
* - Midldle layer schemas
|
|
425
|
+
* - `3.0`: [`ILlmSchemaV3`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3.ts)
|
|
426
|
+
* - `3.1`: [`ILlmSchemaV3_1`](https://github.com/samchon/openapi/blob/master/src/structures/ILlmSchemaV3_1.ts)
|
|
427
|
+
*
|
|
428
|
+
* If you actually want to perform the LLM function calling with TypeScript functions,
|
|
429
|
+
* you can do it with the {@link application} function. Otherwise you hope to perform the
|
|
430
|
+
* structured output, {@link parameters} function is better. Let's enjoy the LLM function calling
|
|
431
|
+
* and structured output with the native TypeScript functions and types.
|
|
432
|
+
*
|
|
433
|
+
* > **What LLM function calling is?
|
|
434
|
+
* >
|
|
435
|
+
* > LLM (Large Language Model) selects propert function and fill the arguments,
|
|
436
|
+
* > but actuall function call execution is not by LLM, but by you.
|
|
437
|
+
* >
|
|
438
|
+
* > In nowadays, most LLM (Large Language Model) like OpenAI are supporting
|
|
439
|
+
* > "function calling" feature. The "function calling" means that LLM automatically selects
|
|
440
|
+
* > a proper function and compose parameter values from the user's chatting text.
|
|
441
|
+
* >
|
|
442
|
+
* > When LLM selects the proper function and its arguments, you just call the function
|
|
443
|
+
* > with the arguments. And then informs the return value to the LLM by system prompt,
|
|
444
|
+
* > LLM will continue the next conversation based on the return value.
|
|
445
|
+
*
|
|
446
|
+
* @template T Target type
|
|
447
|
+
* @template Model LLM schema model
|
|
448
|
+
* @template Config Configuration of LLM schema composition
|
|
449
|
+
* @returns LLM schema
|
|
450
|
+
* @reference https://platform.openai.com/docs/guides/function-calling
|
|
451
|
+
* @reference https://platform.openai.com/docs/guides/structured-outputs
|
|
452
|
+
* @author Jeongho Nam - https://github.com/samchon
|
|
453
|
+
*/
|
|
454
|
+
export function schema<
|
|
455
|
+
T,
|
|
456
|
+
Model extends ILlmSchema.Model,
|
|
457
|
+
Config extends Partial<ILlmSchema.ModelConfig[Model]> = {},
|
|
458
|
+
>(
|
|
459
|
+
...$defs: Extract<
|
|
460
|
+
ILlmSchema.ModelSchema[Model],
|
|
461
|
+
{ $ref: string }
|
|
462
|
+
> extends never
|
|
463
|
+
? []
|
|
464
|
+
: [Record<string, ILlmSchema.ModelSchema[Model]>]
|
|
465
|
+
): ILlmSchema.ModelSchema[Model];
|
|
466
|
+
|
|
467
|
+
/**
|
|
468
|
+
* @internal
|
|
469
|
+
*/
|
|
470
|
+
export function schema(): never {
|
|
471
|
+
halt("schema");
|
|
472
|
+
}
|
|
473
|
+
|
|
474
|
+
/**
|
|
475
|
+
* @internal
|
|
476
|
+
*/
|
|
477
|
+
function halt(name: string): never {
|
|
478
|
+
throw new Error(
|
|
479
|
+
`Error on typia.llm.${name}(): no transform has been configured. Read and follow https://typia.io/docs/setup please.`,
|
|
480
|
+
);
|
|
481
|
+
}
|