langchainrb 0.11.4 → 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 22781a12fbb032cfd70fe45c9a0b96621b2d47f4d6ef912547b327790d6dfcaa
4
- data.tar.gz: c8ab04c3bf520e7af0d298f4cd15e9c9df149fd7618d8998c53ec8b684ddddea
3
+ metadata.gz: 7f29aad35bc35dc95eb8673b11578b51c7449a19818989d9da5e640c6fb219c7
4
+ data.tar.gz: 4d0c4d3d424a82c7f02fb9e49ca52a5bdca5dfbce19fbfa22f2d74ef46d81eb7
5
5
  SHA512:
6
- metadata.gz: 65c2068d4e0289b4aa081ab659b4470a312c387a138b4a367cf5717a9d232b39ab44f7dcbfddcb382e0f8ea1b735553c630e5106d49884e6d91c6cee667fcd1d
7
- data.tar.gz: a2a15c08e593903e6fdf67cc2ee1e3a5188c9d2aaae6a2c3dad991dbdb1b13c75cd76f5a557bbbc2581b30f87860d5719b75bb65ecb4c1c83e05b8e7aedc5714
6
+ metadata.gz: 91b6f4fc5056308eab9119dcfda1be16857e6e9e6e531977148b1e8f31b72090794b67e6855afb95633b8f836b8d20921bc5a069afdc745d1114892143a177e1
7
+ data.tar.gz: f7a7949ab2efd960eacf3a93f7beaa9104403a93619b8c95ea094901c2d3d19b89980c81d293ae16035c5ff51fe021a09f2e81e2c0ed6854bff87d30e6def925
data/CHANGELOG.md CHANGED
@@ -1,5 +1,8 @@
1
1
  ## [Unreleased]
2
2
 
3
+ ## [0.12.0] - 2024-04-22
4
+ - [BREAKING] Rename `dimension` parameter to `dimensions` everywhere
5
+
3
6
  ## [0.11.4] - 2024-04-19
4
7
  - New `Langchain::LLM::AWSBedrock#chat()` to wrap Bedrock Claude requests
5
8
  - New `Langchain::LLM::OllamaResponse#total_tokens()` method
@@ -24,8 +24,8 @@ module Langchain::LLM
24
24
  # A client for communicating with the LLM
25
25
  attr_reader :client
26
26
 
27
- def default_dimension
28
- self.class.const_get(:DEFAULTS).dig(:dimension)
27
+ def default_dimensions
28
+ self.class.const_get(:DEFAULTS).dig(:dimensions)
29
29
  end
30
30
 
31
31
  #
@@ -15,7 +15,7 @@ module Langchain::LLM
15
15
  temperature: 0.0,
16
16
  completion_model_name: "command",
17
17
  embeddings_model_name: "small",
18
- dimension: 1024,
18
+ dimensions: 1024,
19
19
  truncate: "START"
20
20
  }.freeze
21
21
 
@@ -13,7 +13,7 @@ module Langchain::LLM
13
13
  class GooglePalm < Base
14
14
  DEFAULTS = {
15
15
  temperature: 0.0,
16
- dimension: 768, # This is what the `embedding-gecko-001` model generates
16
+ dimensions: 768, # This is what the `embedding-gecko-001` model generates
17
17
  completion_model_name: "text-bison-001",
18
18
  chat_completion_model_name: "chat-bison-001",
19
19
  embeddings_model_name: "embedding-gecko-001"
@@ -16,7 +16,7 @@ module Langchain::LLM
16
16
  max_output_tokens: 1000,
17
17
  top_p: 0.8,
18
18
  top_k: 40,
19
- dimension: 768,
19
+ dimensions: 768,
20
20
  completion_model_name: "text-bison", # Optional: tect-bison@001
21
21
  embeddings_model_name: "textembedding-gecko"
22
22
  }.freeze
@@ -16,7 +16,7 @@ module Langchain::LLM
16
16
  DEFAULTS = {
17
17
  temperature: 0.0,
18
18
  embeddings_model_name: "sentence-transformers/all-MiniLM-L6-v2",
19
- dimension: 384 # Vector size generated by the above model
19
+ dimensions: 384 # Vector size generated by the above model
20
20
  }.freeze
21
21
 
22
22
  #
@@ -41,9 +41,9 @@ module Langchain::LLM
41
41
 
42
42
  # Returns the # of vector dimensions for the embeddings
43
43
  # @return [Integer] The # of vector dimensions
44
- def default_dimension
44
+ def default_dimensions
45
45
  # since Ollama can run multiple models, look it up or generate an embedding and return the size
46
- @default_dimension ||=
46
+ @default_dimensions ||=
47
47
  EMBEDDING_SIZES.fetch(defaults[:embeddings_model_name].to_sym) do
48
48
  embed(text: "test").embedding.size
49
49
  end
@@ -184,7 +184,7 @@ module Langchain::LLM
184
184
  complete(prompt: prompt)
185
185
  end
186
186
 
187
- def default_dimension
187
+ def default_dimensions
188
188
  @defaults[:dimensions] || EMBEDDING_SIZES.fetch(defaults[:embeddings_model_name])
189
189
  end
190
190
 
@@ -24,7 +24,7 @@ module Langchain::LLM
24
24
  # TODO: Design the interface to pass and use different models
25
25
  completion_model_name: "replicate/vicuna-13b",
26
26
  embeddings_model_name: "creatorrr/all-mpnet-base-v2",
27
- dimension: 384
27
+ dimensions: 384
28
28
  }.freeze
29
29
 
30
30
  #
@@ -104,7 +104,7 @@ module Langchain::Vectorsearch
104
104
  end
105
105
 
106
106
  def default_vector_settings
107
- {type: "dense_vector", dims: llm.default_dimension}
107
+ {type: "dense_vector", dims: llm.default_dimensions}
108
108
  end
109
109
 
110
110
  def vector_settings
@@ -54,7 +54,7 @@ module Langchain::Vectorsearch
54
54
  @db_path = db_path
55
55
  @table_name = index_name
56
56
 
57
- @vector_dimension = llm.default_dimension
57
+ @vector_dimensions = llm.default_dimensions
58
58
 
59
59
  super(llm: llm)
60
60
  end
@@ -64,7 +64,7 @@ module Langchain::Vectorsearch
64
64
  status_code, response = @client.database.create_table(@table_name, [
65
65
  {"name" => "ID", "dataType" => "STRING", "primaryKey" => true},
66
66
  {"name" => "Doc", "dataType" => "STRING"},
67
- {"name" => "Embedding", "dataType" => "VECTOR_FLOAT", "dimensions" => @vector_dimension}
67
+ {"name" => "Embedding", "dataType" => "VECTOR_FLOAT", "dimensions" => @vector_dimensions}
68
68
  ])
69
69
  raise "Failed to create table: #{response}" if status_code != 200
70
70
 
@@ -26,7 +26,7 @@ module Langchain::Vectorsearch
26
26
 
27
27
  super(llm: llm)
28
28
 
29
- @client = ::Hnswlib::HierarchicalNSW.new(space: DEFAULT_METRIC, dim: llm.default_dimension)
29
+ @client = ::Hnswlib::HierarchicalNSW.new(space: DEFAULT_METRIC, dim: llm.default_dimensions)
30
30
  @path_to_index = path_to_index
31
31
 
32
32
  initialize_index
@@ -71,7 +71,7 @@ module Langchain::Vectorsearch
71
71
  type_params: [
72
72
  {
73
73
  key: "dim",
74
- value: llm.default_dimension.to_s
74
+ value: llm.default_dimensions.to_s
75
75
  }
76
76
  ]
77
77
  }
@@ -101,11 +101,11 @@ module Langchain::Vectorsearch
101
101
  def create_default_schema
102
102
  db.run "CREATE EXTENSION IF NOT EXISTS vector"
103
103
  namespace_column = @namespace_column
104
- vector_dimension = llm.default_dimension
104
+ vector_dimensions = llm.default_dimensions
105
105
  db.create_table? table_name.to_sym do
106
106
  primary_key :id
107
107
  text :content
108
- column :vectors, "vector(#{vector_dimension})"
108
+ column :vectors, "vector(#{vector_dimensions})"
109
109
  text namespace_column.to_sym, default: nil
110
110
  end
111
111
  end
@@ -104,7 +104,7 @@ module Langchain::Vectorsearch
104
104
  client.create_index(
105
105
  metric: DEFAULT_METRIC,
106
106
  name: index_name,
107
- dimension: llm.default_dimension
107
+ dimension: llm.default_dimensions
108
108
  )
109
109
  end
110
110
 
@@ -93,7 +93,7 @@ module Langchain::Vectorsearch
93
93
  collection_name: index_name,
94
94
  vectors: {
95
95
  distance: DEFAULT_METRIC.capitalize,
96
- size: llm.default_dimension
96
+ size: llm.default_dimensions
97
97
  }
98
98
  )
99
99
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Langchain
4
- VERSION = "0.11.4"
4
+ VERSION = "0.12.0"
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: langchainrb
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.11.4
4
+ version: 0.12.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Andrei Bondarev
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-04-19 00:00:00.000000000 Z
11
+ date: 2024-04-22 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activesupport