langchain 0.0.203 → 0.0.204-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/toolkits/aws_sfn.cjs +3 -75
- package/dist/agents/toolkits/aws_sfn.d.ts +2 -45
- package/dist/agents/toolkits/aws_sfn.js +2 -73
- package/dist/agents/toolkits/base.cjs +15 -9
- package/dist/agents/toolkits/base.d.ts +1 -9
- package/dist/agents/toolkits/base.js +1 -7
- package/dist/agents/toolkits/connery/index.cjs +15 -37
- package/dist/agents/toolkits/connery/index.d.ts +1 -23
- package/dist/agents/toolkits/connery/index.js +1 -35
- package/dist/cache/cloudflare_kv.cjs +15 -69
- package/dist/cache/cloudflare_kv.d.ts +1 -42
- package/dist/cache/cloudflare_kv.js +1 -67
- package/dist/cache/ioredis.cjs +15 -77
- package/dist/cache/ioredis.d.ts +1 -40
- package/dist/cache/ioredis.js +1 -75
- package/dist/cache/momento.cjs +15 -135
- package/dist/cache/momento.d.ts +1 -91
- package/dist/cache/momento.js +1 -133
- package/dist/cache/upstash_redis.cjs +15 -72
- package/dist/cache/upstash_redis.d.ts +1 -45
- package/dist/cache/upstash_redis.js +1 -70
- package/dist/callbacks/handlers/llmonitor.cjs +14 -238
- package/dist/callbacks/handlers/llmonitor.d.ts +1 -35
- package/dist/callbacks/handlers/llmonitor.js +1 -233
- package/dist/chains/openai_moderation.cjs +2 -2
- package/dist/chains/openai_moderation.d.ts +1 -1
- package/dist/chains/openai_moderation.js +1 -1
- package/dist/chat_models/anthropic.d.ts +4 -4
- package/dist/chat_models/baiduwenxin.cjs +15 -411
- package/dist/chat_models/baiduwenxin.d.ts +1 -150
- package/dist/chat_models/baiduwenxin.js +1 -409
- package/dist/chat_models/bedrock/index.cjs +15 -33
- package/dist/chat_models/bedrock/index.d.ts +1 -23
- package/dist/chat_models/bedrock/index.js +1 -29
- package/dist/chat_models/bedrock/web.cjs +15 -360
- package/dist/chat_models/bedrock/web.d.ts +1 -82
- package/dist/chat_models/bedrock/web.js +1 -356
- package/dist/chat_models/cloudflare_workersai.cjs +15 -216
- package/dist/chat_models/cloudflare_workersai.d.ts +1 -68
- package/dist/chat_models/cloudflare_workersai.js +1 -214
- package/dist/chat_models/fireworks.cjs +15 -89
- package/dist/chat_models/fireworks.d.ts +1 -43
- package/dist/chat_models/fireworks.js +1 -87
- package/dist/chat_models/googlepalm.cjs +15 -235
- package/dist/chat_models/googlepalm.d.ts +1 -108
- package/dist/chat_models/googlepalm.js +1 -233
- package/dist/chat_models/googlevertexai/index.cjs +15 -40
- package/dist/chat_models/googlevertexai/index.d.ts +1 -28
- package/dist/chat_models/googlevertexai/index.js +1 -38
- package/dist/chat_models/googlevertexai/web.cjs +15 -39
- package/dist/chat_models/googlevertexai/web.d.ts +1 -28
- package/dist/chat_models/googlevertexai/web.js +1 -37
- package/dist/chat_models/iflytek_xinghuo/index.cjs +14 -40
- package/dist/chat_models/iflytek_xinghuo/index.d.ts +1 -13
- package/dist/chat_models/iflytek_xinghuo/index.js +1 -36
- package/dist/chat_models/iflytek_xinghuo/web.cjs +15 -36
- package/dist/chat_models/iflytek_xinghuo/web.d.ts +1 -13
- package/dist/chat_models/iflytek_xinghuo/web.js +1 -34
- package/dist/chat_models/llama_cpp.cjs +15 -275
- package/dist/chat_models/llama_cpp.d.ts +1 -71
- package/dist/chat_models/llama_cpp.js +1 -273
- package/dist/chat_models/minimax.cjs +15 -578
- package/dist/chat_models/minimax.d.ts +1 -369
- package/dist/chat_models/minimax.js +1 -576
- package/dist/chat_models/ollama.cjs +15 -394
- package/dist/chat_models/ollama.d.ts +1 -127
- package/dist/chat_models/ollama.js +1 -392
- package/dist/chat_models/openai.cjs +4 -698
- package/dist/chat_models/openai.d.ts +4 -137
- package/dist/chat_models/openai.js +2 -695
- package/dist/chat_models/portkey.cjs +15 -167
- package/dist/chat_models/portkey.d.ts +1 -17
- package/dist/chat_models/portkey.js +1 -165
- package/dist/chat_models/yandex.cjs +15 -132
- package/dist/chat_models/yandex.d.ts +1 -30
- package/dist/chat_models/yandex.js +1 -130
- package/dist/document_loaders/fs/openai_whisper_audio.cjs +2 -2
- package/dist/document_loaders/fs/openai_whisper_audio.d.ts +1 -1
- package/dist/document_loaders/fs/openai_whisper_audio.js +1 -1
- package/dist/document_transformers/html_to_text.cjs +15 -47
- package/dist/document_transformers/html_to_text.d.ts +1 -30
- package/dist/document_transformers/html_to_text.js +1 -45
- package/dist/document_transformers/mozilla_readability.cjs +15 -53
- package/dist/document_transformers/mozilla_readability.d.ts +1 -31
- package/dist/document_transformers/mozilla_readability.js +1 -51
- package/dist/embeddings/bedrock.cjs +15 -109
- package/dist/embeddings/bedrock.d.ts +1 -71
- package/dist/embeddings/bedrock.js +1 -107
- package/dist/embeddings/cloudflare_workersai.cjs +15 -67
- package/dist/embeddings/cloudflare_workersai.d.ts +1 -28
- package/dist/embeddings/cloudflare_workersai.js +1 -65
- package/dist/embeddings/cohere.cjs +15 -124
- package/dist/embeddings/cohere.d.ts +1 -66
- package/dist/embeddings/cohere.js +1 -122
- package/dist/embeddings/googlepalm.cjs +15 -88
- package/dist/embeddings/googlepalm.d.ts +1 -61
- package/dist/embeddings/googlepalm.js +1 -86
- package/dist/embeddings/googlevertexai.cjs +15 -81
- package/dist/embeddings/googlevertexai.d.ts +1 -54
- package/dist/embeddings/googlevertexai.js +1 -79
- package/dist/embeddings/gradient_ai.cjs +15 -101
- package/dist/embeddings/gradient_ai.d.ts +1 -48
- package/dist/embeddings/gradient_ai.js +1 -99
- package/dist/embeddings/hf.cjs +15 -75
- package/dist/embeddings/hf.d.ts +1 -41
- package/dist/embeddings/hf.js +1 -73
- package/dist/embeddings/hf_transformers.cjs +15 -87
- package/dist/embeddings/hf_transformers.d.ts +1 -47
- package/dist/embeddings/hf_transformers.js +1 -85
- package/dist/embeddings/llama_cpp.cjs +15 -81
- package/dist/embeddings/llama_cpp.d.ts +1 -42
- package/dist/embeddings/llama_cpp.js +1 -79
- package/dist/embeddings/minimax.cjs +15 -165
- package/dist/embeddings/minimax.d.ts +1 -119
- package/dist/embeddings/minimax.js +1 -163
- package/dist/embeddings/ollama.cjs +15 -118
- package/dist/embeddings/ollama.d.ts +1 -34
- package/dist/embeddings/ollama.js +1 -116
- package/dist/embeddings/openai.cjs +2 -240
- package/dist/embeddings/openai.d.ts +1 -82
- package/dist/embeddings/openai.js +1 -239
- package/dist/embeddings/tensorflow.cjs +3 -96
- package/dist/embeddings/tensorflow.d.ts +1 -59
- package/dist/embeddings/tensorflow.js +1 -83
- package/dist/embeddings/voyage.cjs +15 -118
- package/dist/embeddings/voyage.d.ts +1 -66
- package/dist/embeddings/voyage.js +1 -116
- package/dist/experimental/chat_models/ollama_functions.d.ts +1 -1
- package/dist/experimental/openai_assistant/index.cjs +3 -3
- package/dist/experimental/openai_assistant/index.d.ts +1 -1
- package/dist/experimental/openai_assistant/index.js +1 -1
- package/dist/experimental/openai_assistant/schema.d.ts +1 -1
- package/dist/experimental/openai_files/index.cjs +2 -2
- package/dist/experimental/openai_files/index.d.ts +1 -1
- package/dist/experimental/openai_files/index.js +1 -1
- package/dist/graphs/neo4j_graph.cjs +3 -254
- package/dist/graphs/neo4j_graph.d.ts +1 -63
- package/dist/graphs/neo4j_graph.js +1 -241
- package/dist/llms/ai21.cjs +15 -200
- package/dist/llms/ai21.d.ts +1 -100
- package/dist/llms/ai21.js +1 -198
- package/dist/llms/aleph_alpha.cjs +15 -374
- package/dist/llms/aleph_alpha.d.ts +1 -161
- package/dist/llms/aleph_alpha.js +1 -372
- package/dist/llms/bedrock/web.cjs +15 -310
- package/dist/llms/bedrock/web.d.ts +1 -66
- package/dist/llms/bedrock/web.js +1 -308
- package/dist/llms/cloudflare_workersai.cjs +15 -168
- package/dist/llms/cloudflare_workersai.d.ts +1 -55
- package/dist/llms/cloudflare_workersai.js +1 -166
- package/dist/llms/cohere.cjs +15 -110
- package/dist/llms/cohere.d.ts +1 -54
- package/dist/llms/cohere.js +1 -108
- package/dist/llms/fireworks.cjs +15 -90
- package/dist/llms/fireworks.d.ts +1 -33
- package/dist/llms/fireworks.js +1 -88
- package/dist/llms/googlepalm.cjs +15 -134
- package/dist/llms/googlepalm.d.ts +1 -92
- package/dist/llms/googlepalm.js +1 -132
- package/dist/llms/googlevertexai/index.cjs +15 -45
- package/dist/llms/googlevertexai/index.d.ts +1 -38
- package/dist/llms/googlevertexai/index.js +1 -43
- package/dist/llms/googlevertexai/web.cjs +15 -42
- package/dist/llms/googlevertexai/web.d.ts +1 -36
- package/dist/llms/googlevertexai/web.js +1 -40
- package/dist/llms/gradient_ai.cjs +15 -110
- package/dist/llms/gradient_ai.d.ts +1 -55
- package/dist/llms/gradient_ai.js +1 -108
- package/dist/llms/hf.cjs +15 -136
- package/dist/llms/hf.d.ts +1 -68
- package/dist/llms/hf.js +1 -134
- package/dist/llms/llama_cpp.cjs +15 -111
- package/dist/llms/llama_cpp.d.ts +1 -41
- package/dist/llms/llama_cpp.js +1 -109
- package/dist/llms/ollama.cjs +15 -353
- package/dist/llms/ollama.d.ts +1 -106
- package/dist/llms/ollama.js +1 -351
- package/dist/llms/openai-chat.cjs +3 -445
- package/dist/llms/openai-chat.d.ts +4 -123
- package/dist/llms/openai-chat.js +2 -443
- package/dist/llms/openai.cjs +6 -530
- package/dist/llms/openai.d.ts +4 -123
- package/dist/llms/openai.js +2 -525
- package/dist/llms/portkey.cjs +14 -168
- package/dist/llms/portkey.d.ts +1 -57
- package/dist/llms/portkey.js +1 -162
- package/dist/llms/raycast.cjs +15 -94
- package/dist/llms/raycast.d.ts +1 -50
- package/dist/llms/raycast.js +1 -92
- package/dist/llms/replicate.cjs +15 -132
- package/dist/llms/replicate.d.ts +1 -51
- package/dist/llms/replicate.js +1 -130
- package/dist/llms/sagemaker_endpoint.cjs +15 -203
- package/dist/llms/sagemaker_endpoint.d.ts +1 -121
- package/dist/llms/sagemaker_endpoint.js +1 -200
- package/dist/llms/watsonx_ai.cjs +15 -152
- package/dist/llms/watsonx_ai.d.ts +1 -72
- package/dist/llms/watsonx_ai.js +1 -150
- package/dist/llms/writer.cjs +15 -165
- package/dist/llms/writer.d.ts +1 -60
- package/dist/llms/writer.js +1 -163
- package/dist/llms/yandex.cjs +15 -98
- package/dist/llms/yandex.d.ts +1 -40
- package/dist/llms/yandex.js +1 -96
- package/dist/load/index.cjs +3 -1
- package/dist/load/index.d.ts +1 -1
- package/dist/load/index.js +3 -1
- package/dist/memory/chat_memory.cjs +15 -61
- package/dist/memory/chat_memory.d.ts +1 -36
- package/dist/memory/chat_memory.js +1 -59
- package/dist/memory/motorhead_memory.cjs +15 -161
- package/dist/memory/motorhead_memory.d.ts +1 -63
- package/dist/memory/motorhead_memory.js +1 -159
- package/dist/memory/zep.cjs +15 -222
- package/dist/memory/zep.d.ts +1 -86
- package/dist/memory/zep.js +1 -220
- package/dist/retrievers/amazon_kendra.cjs +15 -273
- package/dist/retrievers/amazon_kendra.d.ts +1 -142
- package/dist/retrievers/amazon_kendra.js +1 -271
- package/dist/retrievers/chaindesk.cjs +15 -88
- package/dist/retrievers/chaindesk.d.ts +1 -31
- package/dist/retrievers/chaindesk.js +1 -86
- package/dist/retrievers/databerry.cjs +15 -82
- package/dist/retrievers/databerry.d.ts +1 -34
- package/dist/retrievers/databerry.js +1 -80
- package/dist/retrievers/metal.cjs +15 -51
- package/dist/retrievers/metal.d.ts +1 -36
- package/dist/retrievers/metal.js +1 -49
- package/dist/retrievers/supabase.cjs +15 -167
- package/dist/retrievers/supabase.d.ts +1 -82
- package/dist/retrievers/supabase.js +1 -165
- package/dist/retrievers/tavily_search_api.cjs +15 -142
- package/dist/retrievers/tavily_search_api.d.ts +1 -38
- package/dist/retrievers/tavily_search_api.js +1 -140
- package/dist/retrievers/zep.cjs +15 -150
- package/dist/retrievers/zep.d.ts +1 -76
- package/dist/retrievers/zep.js +1 -148
- package/dist/schema/document.cjs +2 -16
- package/dist/schema/document.d.ts +1 -11
- package/dist/schema/document.js +1 -16
- package/dist/schema/index.cjs +2 -7
- package/dist/schema/index.d.ts +2 -10
- package/dist/schema/index.js +1 -6
- package/dist/storage/convex.cjs +15 -143
- package/dist/storage/convex.d.ts +1 -106
- package/dist/storage/convex.js +1 -141
- package/dist/storage/ioredis.cjs +15 -158
- package/dist/storage/ioredis.d.ts +1 -71
- package/dist/storage/ioredis.js +1 -156
- package/dist/storage/upstash_redis.cjs +15 -172
- package/dist/storage/upstash_redis.d.ts +1 -78
- package/dist/storage/upstash_redis.js +1 -170
- package/dist/storage/vercel_kv.cjs +15 -161
- package/dist/storage/vercel_kv.d.ts +1 -62
- package/dist/storage/vercel_kv.js +1 -159
- package/dist/stores/doc/in_memory.cjs +15 -105
- package/dist/stores/doc/in_memory.d.ts +1 -46
- package/dist/stores/doc/in_memory.js +1 -102
- package/dist/stores/message/cassandra.cjs +15 -158
- package/dist/stores/message/cassandra.d.ts +1 -69
- package/dist/stores/message/cassandra.js +1 -156
- package/dist/stores/message/cloudflare_d1.cjs +15 -150
- package/dist/stores/message/cloudflare_d1.d.ts +1 -67
- package/dist/stores/message/cloudflare_d1.js +1 -148
- package/dist/stores/message/convex.cjs +15 -118
- package/dist/stores/message/convex.d.ts +1 -81
- package/dist/stores/message/convex.js +1 -116
- package/dist/stores/message/dynamodb.cjs +15 -159
- package/dist/stores/message/dynamodb.d.ts +1 -51
- package/dist/stores/message/dynamodb.js +1 -157
- package/dist/stores/message/firestore.cjs +15 -185
- package/dist/stores/message/firestore.d.ts +1 -68
- package/dist/stores/message/firestore.js +1 -183
- package/dist/stores/message/in_memory.cjs +15 -49
- package/dist/stores/message/in_memory.d.ts +1 -28
- package/dist/stores/message/in_memory.js +1 -47
- package/dist/stores/message/ioredis.cjs +15 -98
- package/dist/stores/message/ioredis.d.ts +1 -64
- package/dist/stores/message/ioredis.js +1 -96
- package/dist/stores/message/momento.cjs +15 -160
- package/dist/stores/message/momento.d.ts +1 -97
- package/dist/stores/message/momento.js +1 -158
- package/dist/stores/message/mongodb.cjs +15 -57
- package/dist/stores/message/mongodb.d.ts +1 -26
- package/dist/stores/message/mongodb.js +1 -55
- package/dist/stores/message/planetscale.cjs +15 -170
- package/dist/stores/message/planetscale.d.ts +1 -75
- package/dist/stores/message/planetscale.js +1 -168
- package/dist/stores/message/redis.cjs +15 -114
- package/dist/stores/message/redis.d.ts +1 -69
- package/dist/stores/message/redis.js +1 -112
- package/dist/stores/message/upstash_redis.cjs +15 -85
- package/dist/stores/message/upstash_redis.d.ts +1 -43
- package/dist/stores/message/upstash_redis.js +1 -83
- package/dist/stores/message/utils.cjs +3 -23
- package/dist/stores/message/utils.d.ts +1 -17
- package/dist/stores/message/utils.js +1 -21
- package/dist/stores/message/xata.cjs +15 -216
- package/dist/stores/message/xata.d.ts +1 -76
- package/dist/stores/message/xata.js +1 -214
- package/dist/tools/IFTTTWebhook.cjs +15 -87
- package/dist/tools/IFTTTWebhook.d.ts +1 -50
- package/dist/tools/IFTTTWebhook.js +1 -85
- package/dist/tools/aiplugin.cjs +15 -73
- package/dist/tools/aiplugin.d.ts +1 -34
- package/dist/tools/aiplugin.js +1 -71
- package/dist/tools/aws_lambda.cjs +15 -83
- package/dist/tools/aws_lambda.d.ts +1 -25
- package/dist/tools/aws_lambda.js +1 -82
- package/dist/tools/aws_sfn.cjs +15 -201
- package/dist/tools/aws_sfn.d.ts +1 -57
- package/dist/tools/aws_sfn.js +1 -197
- package/dist/tools/bingserpapi.cjs +15 -76
- package/dist/tools/bingserpapi.d.ts +1 -23
- package/dist/tools/bingserpapi.js +1 -75
- package/dist/tools/brave_search.cjs +15 -64
- package/dist/tools/brave_search.d.ts +1 -23
- package/dist/tools/brave_search.js +1 -62
- package/dist/tools/connery.cjs +15 -277
- package/dist/tools/connery.d.ts +1 -145
- package/dist/tools/connery.js +1 -274
- package/dist/tools/convert_to_openai.d.ts +1 -1
- package/dist/tools/dadjokeapi.cjs +15 -46
- package/dist/tools/dadjokeapi.d.ts +1 -15
- package/dist/tools/dadjokeapi.js +1 -45
- package/dist/tools/dataforseo_api_search.cjs +15 -280
- package/dist/tools/dataforseo_api_search.d.ts +1 -170
- package/dist/tools/dataforseo_api_search.js +1 -278
- package/dist/tools/dynamic.cjs +15 -87
- package/dist/tools/dynamic.d.ts +1 -48
- package/dist/tools/dynamic.js +1 -84
- package/dist/tools/gmail/index.cjs +15 -11
- package/dist/tools/gmail/index.d.ts +1 -11
- package/dist/tools/gmail/index.js +1 -5
- package/dist/tools/google_custom_search.cjs +15 -70
- package/dist/tools/google_custom_search.d.ts +1 -25
- package/dist/tools/google_custom_search.js +1 -68
- package/dist/tools/google_places.cjs +15 -79
- package/dist/tools/google_places.d.ts +1 -21
- package/dist/tools/google_places.js +1 -77
- package/dist/tools/searchapi.cjs +15 -164
- package/dist/tools/searchapi.d.ts +1 -91
- package/dist/tools/searchapi.js +1 -162
- package/dist/tools/searxng_search.cjs +15 -156
- package/dist/tools/searxng_search.d.ts +1 -114
- package/dist/tools/searxng_search.js +1 -154
- package/dist/tools/serpapi.cjs +15 -199
- package/dist/tools/serpapi.d.ts +1 -302
- package/dist/tools/serpapi.js +1 -197
- package/dist/tools/serper.cjs +15 -94
- package/dist/tools/serper.d.ts +1 -32
- package/dist/tools/serper.js +1 -92
- package/dist/tools/wikipedia_query_run.cjs +15 -133
- package/dist/tools/wikipedia_query_run.d.ts +1 -54
- package/dist/tools/wikipedia_query_run.js +1 -131
- package/dist/tools/wolframalpha.cjs +15 -47
- package/dist/tools/wolframalpha.d.ts +1 -21
- package/dist/tools/wolframalpha.js +1 -45
- package/dist/types/openai-types.d.ts +1 -1
- package/dist/util/convex.cjs +15 -75
- package/dist/util/convex.d.ts +1 -26
- package/dist/util/convex.js +1 -74
- package/dist/util/event-source-parse.cjs +15 -224
- package/dist/util/event-source-parse.d.ts +1 -39
- package/dist/util/event-source-parse.js +1 -219
- package/dist/util/math.cjs +15 -123
- package/dist/util/math.d.ts +1 -40
- package/dist/util/math.js +1 -116
- package/dist/util/openapi.d.ts +2 -2
- package/dist/vectorstores/analyticdb.cjs +3 -333
- package/dist/vectorstores/analyticdb.d.ts +1 -110
- package/dist/vectorstores/analyticdb.js +1 -317
- package/dist/vectorstores/base.cjs +15 -165
- package/dist/vectorstores/base.d.ts +1 -104
- package/dist/vectorstores/base.js +1 -161
- package/dist/vectorstores/cassandra.cjs +15 -478
- package/dist/vectorstores/cassandra.d.ts +1 -162
- package/dist/vectorstores/cassandra.js +1 -476
- package/dist/vectorstores/chroma.cjs +3 -312
- package/dist/vectorstores/chroma.d.ts +1 -133
- package/dist/vectorstores/chroma.js +1 -299
- package/dist/vectorstores/clickhouse.cjs +3 -272
- package/dist/vectorstores/clickhouse.d.ts +1 -126
- package/dist/vectorstores/clickhouse.js +1 -259
- package/dist/vectorstores/closevector/node.cjs +15 -107
- package/dist/vectorstores/closevector/node.d.ts +1 -83
- package/dist/vectorstores/closevector/node.js +1 -105
- package/dist/vectorstores/closevector/web.cjs +15 -107
- package/dist/vectorstores/closevector/web.d.ts +1 -80
- package/dist/vectorstores/closevector/web.js +1 -105
- package/dist/vectorstores/cloudflare_vectorize.cjs +3 -186
- package/dist/vectorstores/cloudflare_vectorize.d.ts +1 -90
- package/dist/vectorstores/cloudflare_vectorize.js +1 -173
- package/dist/vectorstores/convex.cjs +15 -175
- package/dist/vectorstores/convex.d.ts +1 -134
- package/dist/vectorstores/convex.js +1 -173
- package/dist/vectorstores/elasticsearch.cjs +3 -287
- package/dist/vectorstores/elasticsearch.d.ts +1 -143
- package/dist/vectorstores/elasticsearch.js +1 -274
- package/dist/vectorstores/faiss.cjs +3 -378
- package/dist/vectorstores/faiss.d.ts +1 -127
- package/dist/vectorstores/faiss.js +1 -365
- package/dist/vectorstores/googlevertexai.cjs +3 -538
- package/dist/vectorstores/googlevertexai.d.ts +1 -181
- package/dist/vectorstores/googlevertexai.js +1 -521
- package/dist/vectorstores/hnswlib.cjs +15 -273
- package/dist/vectorstores/hnswlib.d.ts +1 -122
- package/dist/vectorstores/hnswlib.js +1 -271
- package/dist/vectorstores/lancedb.cjs +15 -126
- package/dist/vectorstores/lancedb.d.ts +1 -61
- package/dist/vectorstores/lancedb.js +1 -124
- package/dist/vectorstores/memory.cjs +6 -6
- package/dist/vectorstores/memory.d.ts +3 -3
- package/dist/vectorstores/memory.js +2 -2
- package/dist/vectorstores/milvus.cjs +3 -592
- package/dist/vectorstores/milvus.d.ts +1 -139
- package/dist/vectorstores/milvus.js +1 -579
- package/dist/vectorstores/momento_vector_index.cjs +3 -305
- package/dist/vectorstores/momento_vector_index.d.ts +1 -143
- package/dist/vectorstores/momento_vector_index.js +1 -292
- package/dist/vectorstores/mongodb_atlas.cjs +15 -218
- package/dist/vectorstores/mongodb_atlas.d.ts +1 -118
- package/dist/vectorstores/mongodb_atlas.js +1 -216
- package/dist/vectorstores/myscale.cjs +3 -263
- package/dist/vectorstores/myscale.d.ts +1 -129
- package/dist/vectorstores/myscale.js +1 -250
- package/dist/vectorstores/neo4j_vector.cjs +3 -578
- package/dist/vectorstores/neo4j_vector.d.ts +1 -75
- package/dist/vectorstores/neo4j_vector.js +1 -562
- package/dist/vectorstores/opensearch.cjs +3 -274
- package/dist/vectorstores/opensearch.d.ts +1 -119
- package/dist/vectorstores/opensearch.js +1 -261
- package/dist/vectorstores/pgvector.cjs +14 -404
- package/dist/vectorstores/pgvector.d.ts +1 -160
- package/dist/vectorstores/pgvector.js +1 -400
- package/dist/vectorstores/pinecone.cjs +3 -280
- package/dist/vectorstores/pinecone.d.ts +1 -120
- package/dist/vectorstores/pinecone.js +1 -264
- package/dist/vectorstores/prisma.cjs +15 -305
- package/dist/vectorstores/prisma.d.ts +1 -180
- package/dist/vectorstores/prisma.js +1 -303
- package/dist/vectorstores/qdrant.cjs +15 -195
- package/dist/vectorstores/qdrant.d.ts +1 -95
- package/dist/vectorstores/qdrant.js +1 -193
- package/dist/vectorstores/redis.cjs +15 -333
- package/dist/vectorstores/redis.d.ts +1 -186
- package/dist/vectorstores/redis.js +1 -331
- package/dist/vectorstores/rockset.cjs +15 -351
- package/dist/vectorstores/rockset.d.ts +1 -202
- package/dist/vectorstores/rockset.js +1 -347
- package/dist/vectorstores/singlestore.cjs +15 -220
- package/dist/vectorstores/singlestore.d.ts +1 -88
- package/dist/vectorstores/singlestore.js +1 -218
- package/dist/vectorstores/supabase.cjs +15 -217
- package/dist/vectorstores/supabase.d.ts +1 -118
- package/dist/vectorstores/supabase.js +1 -215
- package/dist/vectorstores/tigris.cjs +3 -143
- package/dist/vectorstores/tigris.d.ts +1 -74
- package/dist/vectorstores/tigris.js +1 -130
- package/dist/vectorstores/typeorm.cjs +15 -259
- package/dist/vectorstores/typeorm.d.ts +1 -109
- package/dist/vectorstores/typeorm.js +1 -256
- package/dist/vectorstores/typesense.cjs +15 -223
- package/dist/vectorstores/typesense.d.ts +1 -131
- package/dist/vectorstores/typesense.js +1 -221
- package/dist/vectorstores/usearch.cjs +3 -203
- package/dist/vectorstores/usearch.d.ts +1 -85
- package/dist/vectorstores/usearch.js +1 -187
- package/dist/vectorstores/vectara.cjs +3 -409
- package/dist/vectorstores/vectara.d.ts +1 -148
- package/dist/vectorstores/vectara.js +1 -396
- package/dist/vectorstores/vercel_postgres.cjs +15 -320
- package/dist/vectorstores/vercel_postgres.d.ts +1 -145
- package/dist/vectorstores/vercel_postgres.js +1 -318
- package/dist/vectorstores/voy.cjs +15 -156
- package/dist/vectorstores/voy.d.ts +1 -73
- package/dist/vectorstores/voy.js +1 -154
- package/dist/vectorstores/weaviate.cjs +3 -347
- package/dist/vectorstores/weaviate.d.ts +1 -140
- package/dist/vectorstores/weaviate.js +1 -333
- package/dist/vectorstores/xata.cjs +15 -108
- package/dist/vectorstores/xata.d.ts +1 -70
- package/dist/vectorstores/xata.js +1 -106
- package/dist/vectorstores/zep.cjs +15 -313
- package/dist/vectorstores/zep.d.ts +1 -146
- package/dist/vectorstores/zep.js +1 -311
- package/package.json +4 -265
- package/dist/chat_models/googlevertexai/common.cjs +0 -274
- package/dist/chat_models/googlevertexai/common.d.ts +0 -123
- package/dist/chat_models/googlevertexai/common.js +0 -269
- package/dist/chat_models/iflytek_xinghuo/common.cjs +0 -340
- package/dist/chat_models/iflytek_xinghuo/common.d.ts +0 -165
- package/dist/chat_models/iflytek_xinghuo/common.js +0 -336
- package/dist/llms/googlevertexai/common.cjs +0 -168
- package/dist/llms/googlevertexai/common.d.ts +0 -82
- package/dist/llms/googlevertexai/common.js +0 -164
- package/dist/tools/gmail/base.cjs +0 -69
- package/dist/tools/gmail/base.d.ts +0 -19
- package/dist/tools/gmail/base.js +0 -65
- package/dist/tools/gmail/create_draft.cjs +0 -62
- package/dist/tools/gmail/create_draft.d.ts +0 -35
- package/dist/tools/gmail/create_draft.js +0 -58
- package/dist/tools/gmail/descriptions.cjs +0 -118
- package/dist/tools/gmail/descriptions.d.ts +0 -5
- package/dist/tools/gmail/descriptions.js +0 -115
- package/dist/tools/gmail/get_message.cjs +0 -83
- package/dist/tools/gmail/get_message.d.ts +0 -18
- package/dist/tools/gmail/get_message.js +0 -79
- package/dist/tools/gmail/get_thread.cjs +0 -89
- package/dist/tools/gmail/get_thread.d.ts +0 -18
- package/dist/tools/gmail/get_thread.js +0 -85
- package/dist/tools/gmail/search.cjs +0 -118
- package/dist/tools/gmail/search.d.ts +0 -29
- package/dist/tools/gmail/search.js +0 -114
- package/dist/tools/gmail/send_message.cjs +0 -74
- package/dist/tools/gmail/send_message.d.ts +0 -35
- package/dist/tools/gmail/send_message.js +0 -70
- package/dist/util/chunk.cjs +0 -11
- package/dist/util/chunk.d.ts +0 -1
- package/dist/util/chunk.js +0 -7
- package/dist/util/googlevertexai-gauth.cjs +0 -36
- package/dist/util/googlevertexai-gauth.d.ts +0 -8
- package/dist/util/googlevertexai-gauth.js +0 -32
- package/dist/util/googlevertexai-webauth.cjs +0 -96
- package/dist/util/googlevertexai-webauth.d.ts +0 -22
- package/dist/util/googlevertexai-webauth.js +0 -92
- package/dist/util/iflytek_websocket_stream.cjs +0 -81
- package/dist/util/iflytek_websocket_stream.d.ts +0 -27
- package/dist/util/iflytek_websocket_stream.js +0 -77
- package/dist/util/llama_cpp.cjs +0 -34
- package/dist/util/llama_cpp.d.ts +0 -46
- package/dist/util/llama_cpp.js +0 -28
- package/dist/util/momento.cjs +0 -26
- package/dist/util/momento.d.ts +0 -9
- package/dist/util/momento.js +0 -22
- package/dist/util/openai-format-fndef.cjs +0 -81
- package/dist/util/openai-format-fndef.d.ts +0 -44
- package/dist/util/openai-format-fndef.js +0 -77
- package/dist/util/openai.cjs +0 -21
- package/dist/util/openai.d.ts +0 -1
- package/dist/util/openai.js +0 -17
- package/dist/vectorstores/closevector/common.cjs +0 -128
- package/dist/vectorstores/closevector/common.d.ts +0 -82
- package/dist/vectorstores/closevector/common.js +0 -124
|
@@ -1,277 +1,17 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
14
|
+
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
|
+
};
|
|
2
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports
|
|
4
|
-
const node_llama_cpp_1 = require("node-llama-cpp");
|
|
5
|
-
const base_js_1 = require("./base.cjs");
|
|
6
|
-
const llama_cpp_js_1 = require("../util/llama_cpp.cjs");
|
|
7
|
-
const index_js_1 = require("../schema/index.cjs");
|
|
8
|
-
/**
|
|
9
|
-
* To use this model you need to have the `node-llama-cpp` module installed.
|
|
10
|
-
* This can be installed using `npm install -S node-llama-cpp` and the minimum
|
|
11
|
-
* version supported in version 2.0.0.
|
|
12
|
-
* This also requires that have a locally built version of Llama2 installed.
|
|
13
|
-
* @example
|
|
14
|
-
* ```typescript
|
|
15
|
-
* // Initialize the ChatLlamaCpp model with the path to the model binary file.
|
|
16
|
-
* const model = new ChatLlamaCpp({
|
|
17
|
-
* modelPath: "/Replace/with/path/to/your/model/gguf-llama2-q4_0.bin",
|
|
18
|
-
* temperature: 0.5,
|
|
19
|
-
* });
|
|
20
|
-
*
|
|
21
|
-
* // Call the model with a message and await the response.
|
|
22
|
-
* const response = await model.call([
|
|
23
|
-
* new HumanMessage({ content: "My name is John." }),
|
|
24
|
-
* ]);
|
|
25
|
-
*
|
|
26
|
-
* // Log the response to the console.
|
|
27
|
-
* console.log({ response });
|
|
28
|
-
*
|
|
29
|
-
* ```
|
|
30
|
-
*/
|
|
31
|
-
class ChatLlamaCpp extends base_js_1.SimpleChatModel {
|
|
32
|
-
static lc_name() {
|
|
33
|
-
return "ChatLlamaCpp";
|
|
34
|
-
}
|
|
35
|
-
constructor(inputs) {
|
|
36
|
-
super(inputs);
|
|
37
|
-
Object.defineProperty(this, "maxTokens", {
|
|
38
|
-
enumerable: true,
|
|
39
|
-
configurable: true,
|
|
40
|
-
writable: true,
|
|
41
|
-
value: void 0
|
|
42
|
-
});
|
|
43
|
-
Object.defineProperty(this, "temperature", {
|
|
44
|
-
enumerable: true,
|
|
45
|
-
configurable: true,
|
|
46
|
-
writable: true,
|
|
47
|
-
value: void 0
|
|
48
|
-
});
|
|
49
|
-
Object.defineProperty(this, "topK", {
|
|
50
|
-
enumerable: true,
|
|
51
|
-
configurable: true,
|
|
52
|
-
writable: true,
|
|
53
|
-
value: void 0
|
|
54
|
-
});
|
|
55
|
-
Object.defineProperty(this, "topP", {
|
|
56
|
-
enumerable: true,
|
|
57
|
-
configurable: true,
|
|
58
|
-
writable: true,
|
|
59
|
-
value: void 0
|
|
60
|
-
});
|
|
61
|
-
Object.defineProperty(this, "trimWhitespaceSuffix", {
|
|
62
|
-
enumerable: true,
|
|
63
|
-
configurable: true,
|
|
64
|
-
writable: true,
|
|
65
|
-
value: void 0
|
|
66
|
-
});
|
|
67
|
-
Object.defineProperty(this, "_model", {
|
|
68
|
-
enumerable: true,
|
|
69
|
-
configurable: true,
|
|
70
|
-
writable: true,
|
|
71
|
-
value: void 0
|
|
72
|
-
});
|
|
73
|
-
Object.defineProperty(this, "_context", {
|
|
74
|
-
enumerable: true,
|
|
75
|
-
configurable: true,
|
|
76
|
-
writable: true,
|
|
77
|
-
value: void 0
|
|
78
|
-
});
|
|
79
|
-
Object.defineProperty(this, "_session", {
|
|
80
|
-
enumerable: true,
|
|
81
|
-
configurable: true,
|
|
82
|
-
writable: true,
|
|
83
|
-
value: void 0
|
|
84
|
-
});
|
|
85
|
-
this.maxTokens = inputs?.maxTokens;
|
|
86
|
-
this.temperature = inputs?.temperature;
|
|
87
|
-
this.topK = inputs?.topK;
|
|
88
|
-
this.topP = inputs?.topP;
|
|
89
|
-
this.trimWhitespaceSuffix = inputs?.trimWhitespaceSuffix;
|
|
90
|
-
this._model = (0, llama_cpp_js_1.createLlamaModel)(inputs);
|
|
91
|
-
this._context = (0, llama_cpp_js_1.createLlamaContext)(this._model, inputs);
|
|
92
|
-
this._session = null;
|
|
93
|
-
}
|
|
94
|
-
_llmType() {
|
|
95
|
-
return "llama2_cpp";
|
|
96
|
-
}
|
|
97
|
-
/** @ignore */
|
|
98
|
-
_combineLLMOutput() {
|
|
99
|
-
return {};
|
|
100
|
-
}
|
|
101
|
-
invocationParams() {
|
|
102
|
-
return {
|
|
103
|
-
maxTokens: this.maxTokens,
|
|
104
|
-
temperature: this.temperature,
|
|
105
|
-
topK: this.topK,
|
|
106
|
-
topP: this.topP,
|
|
107
|
-
trimWhitespaceSuffix: this.trimWhitespaceSuffix,
|
|
108
|
-
};
|
|
109
|
-
}
|
|
110
|
-
/** @ignore */
|
|
111
|
-
async _call(messages, options) {
|
|
112
|
-
let prompt = "";
|
|
113
|
-
if (messages.length > 1) {
|
|
114
|
-
// We need to build a new _session
|
|
115
|
-
prompt = this._buildSession(messages);
|
|
116
|
-
}
|
|
117
|
-
else if (!this._session) {
|
|
118
|
-
prompt = this._buildSession(messages);
|
|
119
|
-
}
|
|
120
|
-
else {
|
|
121
|
-
if (typeof messages[0].content !== "string") {
|
|
122
|
-
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
123
|
-
}
|
|
124
|
-
// If we already have a session then we should just have a single prompt
|
|
125
|
-
prompt = messages[0].content;
|
|
126
|
-
}
|
|
127
|
-
try {
|
|
128
|
-
const promptOptions = {
|
|
129
|
-
onToken: options.onToken,
|
|
130
|
-
maxTokens: this?.maxTokens,
|
|
131
|
-
temperature: this?.temperature,
|
|
132
|
-
topK: this?.topK,
|
|
133
|
-
topP: this?.topP,
|
|
134
|
-
trimWhitespaceSuffix: this?.trimWhitespaceSuffix,
|
|
135
|
-
};
|
|
136
|
-
// @ts-expect-error - TS2531: Object is possibly 'null'.
|
|
137
|
-
const completion = await this._session.prompt(prompt, promptOptions);
|
|
138
|
-
return completion;
|
|
139
|
-
}
|
|
140
|
-
catch (e) {
|
|
141
|
-
throw new Error("Error getting prompt completion.");
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
async *_streamResponseChunks(input, _options, runManager) {
|
|
145
|
-
const promptOptions = {
|
|
146
|
-
temperature: this?.temperature,
|
|
147
|
-
topK: this?.topK,
|
|
148
|
-
topP: this?.topP,
|
|
149
|
-
};
|
|
150
|
-
const prompt = this._buildPrompt(input);
|
|
151
|
-
const stream = await this.caller.call(async () => this._context.evaluate(this._context.encode(prompt), promptOptions));
|
|
152
|
-
for await (const chunk of stream) {
|
|
153
|
-
yield new index_js_1.ChatGenerationChunk({
|
|
154
|
-
text: this._context.decode([chunk]),
|
|
155
|
-
message: new index_js_1.AIMessageChunk({
|
|
156
|
-
content: this._context.decode([chunk]),
|
|
157
|
-
}),
|
|
158
|
-
generationInfo: {},
|
|
159
|
-
});
|
|
160
|
-
await runManager?.handleLLMNewToken(this._context.decode([chunk]) ?? "");
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
// This constructs a new session if we need to adding in any sys messages or previous chats
|
|
164
|
-
_buildSession(messages) {
|
|
165
|
-
let prompt = "";
|
|
166
|
-
let sysMessage = "";
|
|
167
|
-
let noSystemMessages = [];
|
|
168
|
-
let interactions = [];
|
|
169
|
-
// Let's see if we have a system message
|
|
170
|
-
if (messages.findIndex((msg) => msg._getType() === "system") !== -1) {
|
|
171
|
-
const sysMessages = messages.filter((message) => message._getType() === "system");
|
|
172
|
-
const systemMessageContent = sysMessages[sysMessages.length - 1].content;
|
|
173
|
-
if (typeof systemMessageContent !== "string") {
|
|
174
|
-
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
175
|
-
}
|
|
176
|
-
// Only use the last provided system message
|
|
177
|
-
sysMessage = systemMessageContent;
|
|
178
|
-
// Now filter out the system messages
|
|
179
|
-
noSystemMessages = messages.filter((message) => message._getType() !== "system");
|
|
180
|
-
}
|
|
181
|
-
else {
|
|
182
|
-
noSystemMessages = messages;
|
|
183
|
-
}
|
|
184
|
-
// Lets see if we just have a prompt left or are their previous interactions?
|
|
185
|
-
if (noSystemMessages.length > 1) {
|
|
186
|
-
// Is the last message a prompt?
|
|
187
|
-
if (noSystemMessages[noSystemMessages.length - 1]._getType() === "human") {
|
|
188
|
-
const finalMessageContent = noSystemMessages[noSystemMessages.length - 1].content;
|
|
189
|
-
if (typeof finalMessageContent !== "string") {
|
|
190
|
-
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
191
|
-
}
|
|
192
|
-
prompt = finalMessageContent;
|
|
193
|
-
interactions = this._convertMessagesToInteractions(noSystemMessages.slice(0, noSystemMessages.length - 1));
|
|
194
|
-
}
|
|
195
|
-
else {
|
|
196
|
-
interactions = this._convertMessagesToInteractions(noSystemMessages);
|
|
197
|
-
}
|
|
198
|
-
}
|
|
199
|
-
else {
|
|
200
|
-
if (typeof noSystemMessages[0].content !== "string") {
|
|
201
|
-
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
202
|
-
}
|
|
203
|
-
// If there was only a single message we assume it's a prompt
|
|
204
|
-
prompt = noSystemMessages[0].content;
|
|
205
|
-
}
|
|
206
|
-
// Now lets construct a session according to what we got
|
|
207
|
-
if (sysMessage !== "" && interactions.length > 0) {
|
|
208
|
-
this._session = new node_llama_cpp_1.LlamaChatSession({
|
|
209
|
-
context: this._context,
|
|
210
|
-
conversationHistory: interactions,
|
|
211
|
-
systemPrompt: sysMessage,
|
|
212
|
-
});
|
|
213
|
-
}
|
|
214
|
-
else if (sysMessage !== "" && interactions.length === 0) {
|
|
215
|
-
this._session = new node_llama_cpp_1.LlamaChatSession({
|
|
216
|
-
context: this._context,
|
|
217
|
-
systemPrompt: sysMessage,
|
|
218
|
-
});
|
|
219
|
-
}
|
|
220
|
-
else if (sysMessage === "" && interactions.length > 0) {
|
|
221
|
-
this._session = new node_llama_cpp_1.LlamaChatSession({
|
|
222
|
-
context: this._context,
|
|
223
|
-
conversationHistory: interactions,
|
|
224
|
-
});
|
|
225
|
-
}
|
|
226
|
-
else {
|
|
227
|
-
this._session = new node_llama_cpp_1.LlamaChatSession({
|
|
228
|
-
context: this._context,
|
|
229
|
-
});
|
|
230
|
-
}
|
|
231
|
-
return prompt;
|
|
232
|
-
}
|
|
233
|
-
// This builds a an array of interactions
|
|
234
|
-
_convertMessagesToInteractions(messages) {
|
|
235
|
-
const result = [];
|
|
236
|
-
for (let i = 0; i < messages.length; i += 2) {
|
|
237
|
-
if (i + 1 < messages.length) {
|
|
238
|
-
const prompt = messages[i].content;
|
|
239
|
-
const response = messages[i + 1].content;
|
|
240
|
-
if (typeof prompt !== "string" || typeof response !== "string") {
|
|
241
|
-
throw new Error("ChatLlamaCpp does not support non-string message content.");
|
|
242
|
-
}
|
|
243
|
-
result.push({
|
|
244
|
-
prompt,
|
|
245
|
-
response,
|
|
246
|
-
});
|
|
247
|
-
}
|
|
248
|
-
}
|
|
249
|
-
return result;
|
|
250
|
-
}
|
|
251
|
-
_buildPrompt(input) {
|
|
252
|
-
const prompt = input
|
|
253
|
-
.map((message) => {
|
|
254
|
-
let messageText;
|
|
255
|
-
if (message._getType() === "human") {
|
|
256
|
-
messageText = `[INST] ${message.content} [/INST]`;
|
|
257
|
-
}
|
|
258
|
-
else if (message._getType() === "ai") {
|
|
259
|
-
messageText = message.content;
|
|
260
|
-
}
|
|
261
|
-
else if (message._getType() === "system") {
|
|
262
|
-
messageText = `<<SYS>> ${message.content} <</SYS>>`;
|
|
263
|
-
}
|
|
264
|
-
else if (index_js_1.ChatMessage.isInstance(message)) {
|
|
265
|
-
messageText = `\n\n${message.role[0].toUpperCase()}${message.role.slice(1)}: ${message.content}`;
|
|
266
|
-
}
|
|
267
|
-
else {
|
|
268
|
-
console.warn(`Unsupported message type passed to llama_cpp: "${message._getType()}"`);
|
|
269
|
-
messageText = "";
|
|
270
|
-
}
|
|
271
|
-
return messageText;
|
|
272
|
-
})
|
|
273
|
-
.join("\n");
|
|
274
|
-
return prompt;
|
|
275
|
-
}
|
|
276
|
-
}
|
|
277
|
-
exports.ChatLlamaCpp = ChatLlamaCpp;
|
|
17
|
+
__exportStar(require("@langchain/community/chat_models/llama_cpp"), exports);
|
|
@@ -1,71 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import { SimpleChatModel, BaseChatModelParams } from "./base.js";
|
|
3
|
-
import { LlamaBaseCppInputs } from "../util/llama_cpp.js";
|
|
4
|
-
import { BaseLanguageModelCallOptions } from "../base_language/index.js";
|
|
5
|
-
import { CallbackManagerForLLMRun } from "../callbacks/manager.js";
|
|
6
|
-
import { BaseMessage, ChatGenerationChunk } from "../schema/index.js";
|
|
7
|
-
/**
|
|
8
|
-
* Note that the modelPath is the only required parameter. For testing you
|
|
9
|
-
* can set this in the environment variable `LLAMA_PATH`.
|
|
10
|
-
*/
|
|
11
|
-
export interface LlamaCppInputs extends LlamaBaseCppInputs, BaseChatModelParams {
|
|
12
|
-
}
|
|
13
|
-
export interface LlamaCppCallOptions extends BaseLanguageModelCallOptions {
|
|
14
|
-
/** The maximum number of tokens the response should contain. */
|
|
15
|
-
maxTokens?: number;
|
|
16
|
-
/** A function called when matching the provided token array */
|
|
17
|
-
onToken?: (tokens: number[]) => void;
|
|
18
|
-
}
|
|
19
|
-
/**
|
|
20
|
-
* To use this model you need to have the `node-llama-cpp` module installed.
|
|
21
|
-
* This can be installed using `npm install -S node-llama-cpp` and the minimum
|
|
22
|
-
* version supported in version 2.0.0.
|
|
23
|
-
* This also requires that have a locally built version of Llama2 installed.
|
|
24
|
-
* @example
|
|
25
|
-
* ```typescript
|
|
26
|
-
* // Initialize the ChatLlamaCpp model with the path to the model binary file.
|
|
27
|
-
* const model = new ChatLlamaCpp({
|
|
28
|
-
* modelPath: "/Replace/with/path/to/your/model/gguf-llama2-q4_0.bin",
|
|
29
|
-
* temperature: 0.5,
|
|
30
|
-
* });
|
|
31
|
-
*
|
|
32
|
-
* // Call the model with a message and await the response.
|
|
33
|
-
* const response = await model.call([
|
|
34
|
-
* new HumanMessage({ content: "My name is John." }),
|
|
35
|
-
* ]);
|
|
36
|
-
*
|
|
37
|
-
* // Log the response to the console.
|
|
38
|
-
* console.log({ response });
|
|
39
|
-
*
|
|
40
|
-
* ```
|
|
41
|
-
*/
|
|
42
|
-
export declare class ChatLlamaCpp extends SimpleChatModel<LlamaCppCallOptions> {
|
|
43
|
-
CallOptions: LlamaCppCallOptions;
|
|
44
|
-
static inputs: LlamaCppInputs;
|
|
45
|
-
maxTokens?: number;
|
|
46
|
-
temperature?: number;
|
|
47
|
-
topK?: number;
|
|
48
|
-
topP?: number;
|
|
49
|
-
trimWhitespaceSuffix?: boolean;
|
|
50
|
-
_model: LlamaModel;
|
|
51
|
-
_context: LlamaContext;
|
|
52
|
-
_session: LlamaChatSession | null;
|
|
53
|
-
static lc_name(): string;
|
|
54
|
-
constructor(inputs: LlamaCppInputs);
|
|
55
|
-
_llmType(): string;
|
|
56
|
-
/** @ignore */
|
|
57
|
-
_combineLLMOutput(): {};
|
|
58
|
-
invocationParams(): {
|
|
59
|
-
maxTokens: number | undefined;
|
|
60
|
-
temperature: number | undefined;
|
|
61
|
-
topK: number | undefined;
|
|
62
|
-
topP: number | undefined;
|
|
63
|
-
trimWhitespaceSuffix: boolean | undefined;
|
|
64
|
-
};
|
|
65
|
-
/** @ignore */
|
|
66
|
-
_call(messages: BaseMessage[], options: this["ParsedCallOptions"]): Promise<string>;
|
|
67
|
-
_streamResponseChunks(input: BaseMessage[], _options: this["ParsedCallOptions"], runManager?: CallbackManagerForLLMRun): AsyncGenerator<ChatGenerationChunk>;
|
|
68
|
-
protected _buildSession(messages: BaseMessage[]): string;
|
|
69
|
-
protected _convertMessagesToInteractions(messages: BaseMessage[]): ConversationInteraction[];
|
|
70
|
-
protected _buildPrompt(input: BaseMessage[]): string;
|
|
71
|
-
}
|
|
1
|
+
export * from "@langchain/community/chat_models/llama_cpp";
|
|
@@ -1,273 +1 @@
|
|
|
1
|
-
|
|
2
|
-
import { SimpleChatModel } from "./base.js";
|
|
3
|
-
import { createLlamaModel, createLlamaContext, } from "../util/llama_cpp.js";
|
|
4
|
-
import { ChatGenerationChunk, AIMessageChunk, ChatMessage, } from "../schema/index.js";
|
|
5
|
-
/**
|
|
6
|
-
* To use this model you need to have the `node-llama-cpp` module installed.
|
|
7
|
-
* This can be installed using `npm install -S node-llama-cpp` and the minimum
|
|
8
|
-
* version supported in version 2.0.0.
|
|
9
|
-
* This also requires that have a locally built version of Llama2 installed.
|
|
10
|
-
* @example
|
|
11
|
-
* ```typescript
|
|
12
|
-
* // Initialize the ChatLlamaCpp model with the path to the model binary file.
|
|
13
|
-
* const model = new ChatLlamaCpp({
|
|
14
|
-
* modelPath: "/Replace/with/path/to/your/model/gguf-llama2-q4_0.bin",
|
|
15
|
-
* temperature: 0.5,
|
|
16
|
-
* });
|
|
17
|
-
*
|
|
18
|
-
* // Call the model with a message and await the response.
|
|
19
|
-
* const response = await model.call([
|
|
20
|
-
* new HumanMessage({ content: "My name is John." }),
|
|
21
|
-
* ]);
|
|
22
|
-
*
|
|
23
|
-
* // Log the response to the console.
|
|
24
|
-
* console.log({ response });
|
|
25
|
-
*
|
|
26
|
-
* ```
|
|
27
|
-
*/
|
|
28
|
-
export class ChatLlamaCpp extends SimpleChatModel {
|
|
29
|
-
static lc_name() {
|
|
30
|
-
return "ChatLlamaCpp";
|
|
31
|
-
}
|
|
32
|
-
constructor(inputs) {
|
|
33
|
-
super(inputs);
|
|
34
|
-
Object.defineProperty(this, "maxTokens", {
|
|
35
|
-
enumerable: true,
|
|
36
|
-
configurable: true,
|
|
37
|
-
writable: true,
|
|
38
|
-
value: void 0
|
|
39
|
-
});
|
|
40
|
-
Object.defineProperty(this, "temperature", {
|
|
41
|
-
enumerable: true,
|
|
42
|
-
configurable: true,
|
|
43
|
-
writable: true,
|
|
44
|
-
value: void 0
|
|
45
|
-
});
|
|
46
|
-
Object.defineProperty(this, "topK", {
|
|
47
|
-
enumerable: true,
|
|
48
|
-
configurable: true,
|
|
49
|
-
writable: true,
|
|
50
|
-
value: void 0
|
|
51
|
-
});
|
|
52
|
-
Object.defineProperty(this, "topP", {
|
|
53
|
-
enumerable: true,
|
|
54
|
-
configurable: true,
|
|
55
|
-
writable: true,
|
|
56
|
-
value: void 0
|
|
57
|
-
});
|
|
58
|
-
Object.defineProperty(this, "trimWhitespaceSuffix", {
|
|
59
|
-
enumerable: true,
|
|
60
|
-
configurable: true,
|
|
61
|
-
writable: true,
|
|
62
|
-
value: void 0
|
|
63
|
-
});
|
|
64
|
-
Object.defineProperty(this, "_model", {
|
|
65
|
-
enumerable: true,
|
|
66
|
-
configurable: true,
|
|
67
|
-
writable: true,
|
|
68
|
-
value: void 0
|
|
69
|
-
});
|
|
70
|
-
Object.defineProperty(this, "_context", {
|
|
71
|
-
enumerable: true,
|
|
72
|
-
configurable: true,
|
|
73
|
-
writable: true,
|
|
74
|
-
value: void 0
|
|
75
|
-
});
|
|
76
|
-
Object.defineProperty(this, "_session", {
|
|
77
|
-
enumerable: true,
|
|
78
|
-
configurable: true,
|
|
79
|
-
writable: true,
|
|
80
|
-
value: void 0
|
|
81
|
-
});
|
|
82
|
-
this.maxTokens = inputs?.maxTokens;
|
|
83
|
-
this.temperature = inputs?.temperature;
|
|
84
|
-
this.topK = inputs?.topK;
|
|
85
|
-
this.topP = inputs?.topP;
|
|
86
|
-
this.trimWhitespaceSuffix = inputs?.trimWhitespaceSuffix;
|
|
87
|
-
this._model = createLlamaModel(inputs);
|
|
88
|
-
this._context = createLlamaContext(this._model, inputs);
|
|
89
|
-
this._session = null;
|
|
90
|
-
}
|
|
91
|
-
_llmType() {
|
|
92
|
-
return "llama2_cpp";
|
|
93
|
-
}
|
|
94
|
-
/** @ignore */
|
|
95
|
-
_combineLLMOutput() {
|
|
96
|
-
return {};
|
|
97
|
-
}
|
|
98
|
-
invocationParams() {
|
|
99
|
-
return {
|
|
100
|
-
maxTokens: this.maxTokens,
|
|
101
|
-
temperature: this.temperature,
|
|
102
|
-
topK: this.topK,
|
|
103
|
-
topP: this.topP,
|
|
104
|
-
trimWhitespaceSuffix: this.trimWhitespaceSuffix,
|
|
105
|
-
};
|
|
106
|
-
}
|
|
107
|
-
/** @ignore */
|
|
108
|
-
async _call(messages, options) {
|
|
109
|
-
let prompt = "";
|
|
110
|
-
if (messages.length > 1) {
|
|
111
|
-
// We need to build a new _session
|
|
112
|
-
prompt = this._buildSession(messages);
|
|
113
|
-
}
|
|
114
|
-
else if (!this._session) {
|
|
115
|
-
prompt = this._buildSession(messages);
|
|
116
|
-
}
|
|
117
|
-
else {
|
|
118
|
-
if (typeof messages[0].content !== "string") {
|
|
119
|
-
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
120
|
-
}
|
|
121
|
-
// If we already have a session then we should just have a single prompt
|
|
122
|
-
prompt = messages[0].content;
|
|
123
|
-
}
|
|
124
|
-
try {
|
|
125
|
-
const promptOptions = {
|
|
126
|
-
onToken: options.onToken,
|
|
127
|
-
maxTokens: this?.maxTokens,
|
|
128
|
-
temperature: this?.temperature,
|
|
129
|
-
topK: this?.topK,
|
|
130
|
-
topP: this?.topP,
|
|
131
|
-
trimWhitespaceSuffix: this?.trimWhitespaceSuffix,
|
|
132
|
-
};
|
|
133
|
-
// @ts-expect-error - TS2531: Object is possibly 'null'.
|
|
134
|
-
const completion = await this._session.prompt(prompt, promptOptions);
|
|
135
|
-
return completion;
|
|
136
|
-
}
|
|
137
|
-
catch (e) {
|
|
138
|
-
throw new Error("Error getting prompt completion.");
|
|
139
|
-
}
|
|
140
|
-
}
|
|
141
|
-
async *_streamResponseChunks(input, _options, runManager) {
|
|
142
|
-
const promptOptions = {
|
|
143
|
-
temperature: this?.temperature,
|
|
144
|
-
topK: this?.topK,
|
|
145
|
-
topP: this?.topP,
|
|
146
|
-
};
|
|
147
|
-
const prompt = this._buildPrompt(input);
|
|
148
|
-
const stream = await this.caller.call(async () => this._context.evaluate(this._context.encode(prompt), promptOptions));
|
|
149
|
-
for await (const chunk of stream) {
|
|
150
|
-
yield new ChatGenerationChunk({
|
|
151
|
-
text: this._context.decode([chunk]),
|
|
152
|
-
message: new AIMessageChunk({
|
|
153
|
-
content: this._context.decode([chunk]),
|
|
154
|
-
}),
|
|
155
|
-
generationInfo: {},
|
|
156
|
-
});
|
|
157
|
-
await runManager?.handleLLMNewToken(this._context.decode([chunk]) ?? "");
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
// This constructs a new session if we need to adding in any sys messages or previous chats
|
|
161
|
-
_buildSession(messages) {
|
|
162
|
-
let prompt = "";
|
|
163
|
-
let sysMessage = "";
|
|
164
|
-
let noSystemMessages = [];
|
|
165
|
-
let interactions = [];
|
|
166
|
-
// Let's see if we have a system message
|
|
167
|
-
if (messages.findIndex((msg) => msg._getType() === "system") !== -1) {
|
|
168
|
-
const sysMessages = messages.filter((message) => message._getType() === "system");
|
|
169
|
-
const systemMessageContent = sysMessages[sysMessages.length - 1].content;
|
|
170
|
-
if (typeof systemMessageContent !== "string") {
|
|
171
|
-
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
172
|
-
}
|
|
173
|
-
// Only use the last provided system message
|
|
174
|
-
sysMessage = systemMessageContent;
|
|
175
|
-
// Now filter out the system messages
|
|
176
|
-
noSystemMessages = messages.filter((message) => message._getType() !== "system");
|
|
177
|
-
}
|
|
178
|
-
else {
|
|
179
|
-
noSystemMessages = messages;
|
|
180
|
-
}
|
|
181
|
-
// Lets see if we just have a prompt left or are their previous interactions?
|
|
182
|
-
if (noSystemMessages.length > 1) {
|
|
183
|
-
// Is the last message a prompt?
|
|
184
|
-
if (noSystemMessages[noSystemMessages.length - 1]._getType() === "human") {
|
|
185
|
-
const finalMessageContent = noSystemMessages[noSystemMessages.length - 1].content;
|
|
186
|
-
if (typeof finalMessageContent !== "string") {
|
|
187
|
-
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
188
|
-
}
|
|
189
|
-
prompt = finalMessageContent;
|
|
190
|
-
interactions = this._convertMessagesToInteractions(noSystemMessages.slice(0, noSystemMessages.length - 1));
|
|
191
|
-
}
|
|
192
|
-
else {
|
|
193
|
-
interactions = this._convertMessagesToInteractions(noSystemMessages);
|
|
194
|
-
}
|
|
195
|
-
}
|
|
196
|
-
else {
|
|
197
|
-
if (typeof noSystemMessages[0].content !== "string") {
|
|
198
|
-
throw new Error("ChatLlamaCpp does not support non-string message content in sessions.");
|
|
199
|
-
}
|
|
200
|
-
// If there was only a single message we assume it's a prompt
|
|
201
|
-
prompt = noSystemMessages[0].content;
|
|
202
|
-
}
|
|
203
|
-
// Now lets construct a session according to what we got
|
|
204
|
-
if (sysMessage !== "" && interactions.length > 0) {
|
|
205
|
-
this._session = new LlamaChatSession({
|
|
206
|
-
context: this._context,
|
|
207
|
-
conversationHistory: interactions,
|
|
208
|
-
systemPrompt: sysMessage,
|
|
209
|
-
});
|
|
210
|
-
}
|
|
211
|
-
else if (sysMessage !== "" && interactions.length === 0) {
|
|
212
|
-
this._session = new LlamaChatSession({
|
|
213
|
-
context: this._context,
|
|
214
|
-
systemPrompt: sysMessage,
|
|
215
|
-
});
|
|
216
|
-
}
|
|
217
|
-
else if (sysMessage === "" && interactions.length > 0) {
|
|
218
|
-
this._session = new LlamaChatSession({
|
|
219
|
-
context: this._context,
|
|
220
|
-
conversationHistory: interactions,
|
|
221
|
-
});
|
|
222
|
-
}
|
|
223
|
-
else {
|
|
224
|
-
this._session = new LlamaChatSession({
|
|
225
|
-
context: this._context,
|
|
226
|
-
});
|
|
227
|
-
}
|
|
228
|
-
return prompt;
|
|
229
|
-
}
|
|
230
|
-
// This builds a an array of interactions
|
|
231
|
-
_convertMessagesToInteractions(messages) {
|
|
232
|
-
const result = [];
|
|
233
|
-
for (let i = 0; i < messages.length; i += 2) {
|
|
234
|
-
if (i + 1 < messages.length) {
|
|
235
|
-
const prompt = messages[i].content;
|
|
236
|
-
const response = messages[i + 1].content;
|
|
237
|
-
if (typeof prompt !== "string" || typeof response !== "string") {
|
|
238
|
-
throw new Error("ChatLlamaCpp does not support non-string message content.");
|
|
239
|
-
}
|
|
240
|
-
result.push({
|
|
241
|
-
prompt,
|
|
242
|
-
response,
|
|
243
|
-
});
|
|
244
|
-
}
|
|
245
|
-
}
|
|
246
|
-
return result;
|
|
247
|
-
}
|
|
248
|
-
_buildPrompt(input) {
|
|
249
|
-
const prompt = input
|
|
250
|
-
.map((message) => {
|
|
251
|
-
let messageText;
|
|
252
|
-
if (message._getType() === "human") {
|
|
253
|
-
messageText = `[INST] ${message.content} [/INST]`;
|
|
254
|
-
}
|
|
255
|
-
else if (message._getType() === "ai") {
|
|
256
|
-
messageText = message.content;
|
|
257
|
-
}
|
|
258
|
-
else if (message._getType() === "system") {
|
|
259
|
-
messageText = `<<SYS>> ${message.content} <</SYS>>`;
|
|
260
|
-
}
|
|
261
|
-
else if (ChatMessage.isInstance(message)) {
|
|
262
|
-
messageText = `\n\n${message.role[0].toUpperCase()}${message.role.slice(1)}: ${message.content}`;
|
|
263
|
-
}
|
|
264
|
-
else {
|
|
265
|
-
console.warn(`Unsupported message type passed to llama_cpp: "${message._getType()}"`);
|
|
266
|
-
messageText = "";
|
|
267
|
-
}
|
|
268
|
-
return messageText;
|
|
269
|
-
})
|
|
270
|
-
.join("\n");
|
|
271
|
-
return prompt;
|
|
272
|
-
}
|
|
273
|
-
}
|
|
1
|
+
export * from "@langchain/community/chat_models/llama_cpp";
|