langflow-base-nightly 0.6.5.dev2__py3-none-any.whl → 0.6.5.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1791,7 +1791,7 @@
1791
1791
  "legacy": false,
1792
1792
  "lf_version": "1.4.2",
1793
1793
  "metadata": {
1794
- "code_hash": "8607e963fdef",
1794
+ "code_hash": "4e4f8ff4ae43",
1795
1795
  "dependencies": {
1796
1796
  "dependencies": [
1797
1797
  {
@@ -1801,9 +1801,21 @@
1801
1801
  {
1802
1802
  "name": "lfx",
1803
1803
  "version": null
1804
+ },
1805
+ {
1806
+ "name": "langchain_ollama",
1807
+ "version": "0.2.1"
1808
+ },
1809
+ {
1810
+ "name": "langchain_community",
1811
+ "version": "0.3.21"
1812
+ },
1813
+ {
1814
+ "name": "langchain_ibm",
1815
+ "version": "0.3.19"
1804
1816
  }
1805
1817
  ],
1806
- "total_dependencies": 2
1818
+ "total_dependencies": 5
1807
1819
  },
1808
1820
  "module": "lfx.components.models.embedding_model.EmbeddingModelComponent"
1809
1821
  },
@@ -1903,7 +1915,7 @@
1903
1915
  "show": true,
1904
1916
  "title_case": false,
1905
1917
  "type": "code",
1906
- "value": "from typing import Any\n\nfrom langchain_openai import OpenAIEmbeddings\n\nfrom lfx.base.embeddings.model import LCEmbeddingsModel\nfrom lfx.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom lfx.field_typing import Embeddings\nfrom lfx.io import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageTextInput,\n SecretStrInput,\n)\nfrom lfx.schema.dotdict import dotdict\n\n\nclass EmbeddingModelComponent(LCEmbeddingsModel):\n display_name = \"Embedding Model\"\n description = \"Generate embeddings using a specified provider.\"\n documentation: str = \"https://docs.langflow.org/components-embedding-models\"\n icon = \"binary\"\n name = \"EmbeddingModel\"\n category = \"models\"\n\n inputs = [\n DropdownInput(\n name=\"provider\",\n display_name=\"Model Provider\",\n options=[\"OpenAI\"],\n value=\"OpenAI\",\n info=\"Select the embedding model provider\",\n real_time_refresh=True,\n options_metadata=[{\"icon\": \"OpenAI\"}],\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=OPENAI_EMBEDDING_MODEL_NAMES[0],\n info=\"Select the embedding model to use\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"Model Provider API key\",\n required=True,\n show=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"api_base\",\n display_name=\"API Base URL\",\n info=\"Base URL for the API. Leave empty for default.\",\n advanced=True,\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", advanced=True, value=3),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n provider = self.provider\n model = self.model\n api_key = self.api_key\n api_base = self.api_base\n dimensions = self.dimensions\n chunk_size = self.chunk_size\n request_timeout = self.request_timeout\n max_retries = self.max_retries\n show_progress_bar = self.show_progress_bar\n model_kwargs = self.model_kwargs or {}\n\n if provider == \"OpenAI\":\n if not api_key:\n msg = \"OpenAI API key is required when using OpenAI provider\"\n raise ValueError(msg)\n return OpenAIEmbeddings(\n model=model,\n dimensions=dimensions or None,\n base_url=api_base or None,\n api_key=api_key,\n chunk_size=chunk_size,\n max_retries=max_retries,\n timeout=request_timeout or None,\n show_progress_bar=show_progress_bar,\n model_kwargs=model_kwargs,\n )\n msg = f\"Unknown provider: {provider}\"\n raise ValueError(msg)\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:\n if field_name == \"provider\" and field_value == \"OpenAI\":\n build_config[\"model\"][\"options\"] = OPENAI_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = OPENAI_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"OpenAI API Key\"\n build_config[\"api_base\"][\"display_name\"] = \"OpenAI API Base URL\"\n return build_config\n"
1918
+ "value": "from typing import Any\n\nfrom langchain_openai import OpenAIEmbeddings\n\nfrom lfx.base.embeddings.model import LCEmbeddingsModel\nfrom lfx.base.models.ollama_constants import OLLAMA_EMBEDDING_MODELS\nfrom lfx.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom lfx.base.models.watsonx_constants import WATSONX_EMBEDDING_MODEL_NAMES\nfrom lfx.field_typing import Embeddings\nfrom lfx.io import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageTextInput,\n SecretStrInput,\n)\nfrom lfx.schema.dotdict import dotdict\n\n\nclass EmbeddingModelComponent(LCEmbeddingsModel):\n display_name = \"Embedding Model\"\n description = \"Generate embeddings using a specified provider.\"\n documentation: str = \"https://docs.langflow.org/components-embedding-models\"\n icon = \"binary\"\n name = \"EmbeddingModel\"\n category = \"models\"\n\n inputs = [\n DropdownInput(\n name=\"provider\",\n display_name=\"Model Provider\",\n options=[\"OpenAI\", \"Ollama\", \"WatsonX\"],\n value=\"OpenAI\",\n info=\"Select the embedding model provider\",\n real_time_refresh=True,\n options_metadata=[{\"icon\": \"OpenAI\"}, {\"icon\": \"Ollama\"}, {\"icon\": \"WatsonxAI\"}],\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=OPENAI_EMBEDDING_MODEL_NAMES[0],\n info=\"Select the embedding model to use\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"Model Provider API key\",\n required=True,\n show=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"api_base\",\n display_name=\"API Base URL\",\n info=\"Base URL for the API. Leave empty for default.\",\n advanced=True,\n ),\n # Watson-specific inputs\n MessageTextInput(\n name=\"project_id\",\n display_name=\"Project ID\",\n info=\"Watson AI Project ID (required for WatsonX)\",\n show=False,\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", advanced=True, value=3),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n provider = self.provider\n model = self.model\n api_key = self.api_key\n api_base = self.api_base\n dimensions = self.dimensions\n chunk_size = self.chunk_size\n request_timeout = self.request_timeout\n max_retries = self.max_retries\n show_progress_bar = self.show_progress_bar\n model_kwargs = self.model_kwargs or {}\n\n if provider == \"OpenAI\":\n if not api_key:\n msg = \"OpenAI API key is required when using OpenAI provider\"\n raise ValueError(msg)\n return OpenAIEmbeddings(\n model=model,\n dimensions=dimensions or None,\n base_url=api_base or None,\n api_key=api_key,\n chunk_size=chunk_size,\n max_retries=max_retries,\n timeout=request_timeout or None,\n show_progress_bar=show_progress_bar,\n model_kwargs=model_kwargs,\n )\n\n if provider == \"Ollama\":\n try:\n from langchain_ollama import OllamaEmbeddings\n except ImportError:\n try:\n from langchain_community.embeddings import OllamaEmbeddings\n except ImportError:\n msg = \"Please install langchain-ollama: pip install langchain-ollama\"\n raise ImportError(msg) from None\n\n return OllamaEmbeddings(\n model=model,\n base_url=api_base or \"http://localhost:11434\",\n **model_kwargs,\n )\n\n if provider == \"WatsonX\":\n try:\n from langchain_ibm import WatsonxEmbeddings\n except ImportError:\n msg = \"Please install langchain-ibm: pip install langchain-ibm\"\n raise ImportError(msg) from None\n\n if not api_key:\n msg = \"Watson AI API key is required when using WatsonX provider\"\n raise ValueError(msg)\n\n project_id = self.project_id\n\n if not project_id:\n msg = \"Project ID is required for WatsonX\"\n raise ValueError(msg)\n\n params = {\n \"model_id\": model,\n \"url\": api_base or \"https://us-south.ml.cloud.ibm.com\",\n \"apikey\": api_key,\n }\n\n params[\"project_id\"] = project_id\n\n return WatsonxEmbeddings(**params)\n\n msg = f\"Unknown provider: {provider}\"\n raise ValueError(msg)\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:\n if field_name == \"provider\":\n if field_value == \"OpenAI\":\n build_config[\"model\"][\"options\"] = OPENAI_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = OPENAI_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"OpenAI API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"OpenAI API Base URL\"\n build_config[\"project_id\"][\"show\"] = False\n\n elif field_value == \"Ollama\":\n build_config[\"model\"][\"options\"] = OLLAMA_EMBEDDING_MODELS\n build_config[\"model\"][\"value\"] = OLLAMA_EMBEDDING_MODELS[0]\n build_config[\"api_key\"][\"display_name\"] = \"API Key (Optional)\"\n build_config[\"api_key\"][\"required\"] = False\n build_config[\"api_key\"][\"show\"] = False\n build_config[\"api_base\"][\"display_name\"] = \"Ollama Base URL\"\n build_config[\"api_base\"][\"value\"] = \"http://localhost:11434\"\n build_config[\"project_id\"][\"show\"] = False\n\n elif field_value == \"WatsonX\":\n build_config[\"model\"][\"options\"] = WATSONX_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = WATSONX_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"Watson AI API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"Watson AI URL\"\n build_config[\"api_base\"][\"value\"] = \"https://us-south.ml.cloud.ibm.com\"\n build_config[\"project_id\"][\"show\"] = True\n\n return build_config\n"
1907
1919
  },
1908
1920
  "dimensions": {
1909
1921
  "_input_type": "IntInput",
@@ -1984,6 +1996,29 @@
1984
1996
  "type": "dict",
1985
1997
  "value": {}
1986
1998
  },
1999
+ "project_id": {
2000
+ "_input_type": "MessageTextInput",
2001
+ "advanced": false,
2002
+ "display_name": "Project ID",
2003
+ "dynamic": false,
2004
+ "info": "Watson AI Project ID (required for WatsonX)",
2005
+ "input_types": [
2006
+ "Message"
2007
+ ],
2008
+ "list": false,
2009
+ "list_add_label": "Add More",
2010
+ "load_from_db": false,
2011
+ "name": "project_id",
2012
+ "placeholder": "",
2013
+ "required": false,
2014
+ "show": false,
2015
+ "title_case": false,
2016
+ "tool_mode": false,
2017
+ "trace_as_input": true,
2018
+ "trace_as_metadata": true,
2019
+ "type": "str",
2020
+ "value": ""
2021
+ },
1987
2022
  "provider": {
1988
2023
  "_input_type": "DropdownInput",
1989
2024
  "advanced": false,
@@ -1994,7 +2029,9 @@
1994
2029
  "info": "Select the embedding model provider",
1995
2030
  "name": "provider",
1996
2031
  "options": [
1997
- "OpenAI"
2032
+ "OpenAI",
2033
+ "Ollama",
2034
+ "WatsonX"
1998
2035
  ],
1999
2036
  "options_metadata": [
2000
2037
  {
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langflow-base-nightly
3
- Version: 0.6.5.dev2
3
+ Version: 0.6.5.dev3
4
4
  Summary: A Python package with a built-in web application
5
5
  Project-URL: Repository, https://github.com/langflow-ai/langflow
6
6
  Project-URL: Documentation, https://docs.langflow.org
@@ -45,7 +45,7 @@ Requires-Dist: langchain-experimental<1.0.0,>=0.3.4
45
45
  Requires-Dist: langchain-ibm<1.0.0,>=0.3.8
46
46
  Requires-Dist: langchainhub~=0.1.15
47
47
  Requires-Dist: langchain~=0.3.21
48
- Requires-Dist: lfx-nightly==0.1.13.dev4
48
+ Requires-Dist: lfx-nightly==0.1.13.dev5
49
49
  Requires-Dist: loguru<1.0.0,>=0.7.1
50
50
  Requires-Dist: mcp~=1.10.1
51
51
  Requires-Dist: multiprocess<1.0.0,>=0.70.14
@@ -2011,7 +2011,7 @@ langflow/initial_setup/starter_projects/Market Research.json,sha256=zh9jimAKCGCr
2011
2011
  langflow/initial_setup/starter_projects/Meeting Summary.json,sha256=higOgjzBNmM3Zg5N4zpeVOWsu6hEyY-a_QdxBtB0pKE,194311
2012
2012
  langflow/initial_setup/starter_projects/Memory Chatbot.json,sha256=KLa8rgXIkIquti9agKnoTLYbjy_Z42ZZwxHDgDsc-Fk,85022
2013
2013
  langflow/initial_setup/starter_projects/News Aggregator.json,sha256=xXYUIeFm3lSzhevg1keLOldHpGxYOjmKfn1wANzfBvE,154938
2014
- langflow/initial_setup/starter_projects/Nvidia Remix.json,sha256=3DO_Rgz72V1OSfvSZt3_Q2DFGeYEZLTpc9l81ZdyJAs,329976
2014
+ langflow/initial_setup/starter_projects/Nvidia Remix.json,sha256=PqTtMg1yiHHEbP4jJOIdtVF4m0GG6mM_arv7uk8vPz4,334796
2015
2015
  langflow/initial_setup/starter_projects/Pokédex Agent.json,sha256=uSML43jDgcVfk1_iMP7ZvV14Yo1SHlRoj5hfY_PKI3A,126971
2016
2016
  langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json,sha256=e8uNXegPzpfn7bXa5fc4HkavFe-DWcVaSZUbTmEt1LU,151839
2017
2017
  langflow/initial_setup/starter_projects/Price Deal Finder.json,sha256=VA8Me1v3bEJ-VUAcXNKasEREsjOPzpuxej7fKNuL52g,137279
@@ -2230,7 +2230,7 @@ langflow/utils/util.py,sha256=bZqi9Fqj2mlp9tKUA-Q4ePpooxtbuVLjlAvdml4kcjs,1516
2230
2230
  langflow/utils/validate.py,sha256=BPqoIMvjl4wbMJTTWo1zMHP0kQCa2TfmDT9f-nPT9Ng,112
2231
2231
  langflow/utils/version.py,sha256=OjSj0smls9XnPd4-LpTH9AWyUO_NAn5mncqKkkXl_fw,2840
2232
2232
  langflow/utils/voice_utils.py,sha256=Ypxg8s5jFd1o5wBbx1W8oKK7vh4kwo0-iuTcFqIwy5I,3350
2233
- langflow_base_nightly-0.6.5.dev2.dist-info/METADATA,sha256=wESwu5-LcNA1Qs1lc0UL1DdPqRPMxJa-OpYVCOfdSTk,4376
2234
- langflow_base_nightly-0.6.5.dev2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
2235
- langflow_base_nightly-0.6.5.dev2.dist-info/entry_points.txt,sha256=JvuLdXSrkeDmDdpb8M-VvFIzb84n4HmqUcIP10_EIF8,57
2236
- langflow_base_nightly-0.6.5.dev2.dist-info/RECORD,,
2233
+ langflow_base_nightly-0.6.5.dev3.dist-info/METADATA,sha256=irrO4ZnarzjdFlfqRv6b8M4qhizL0AWhOFdXHJlpDnI,4376
2234
+ langflow_base_nightly-0.6.5.dev3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
2235
+ langflow_base_nightly-0.6.5.dev3.dist-info/entry_points.txt,sha256=JvuLdXSrkeDmDdpb8M-VvFIzb84n4HmqUcIP10_EIF8,57
2236
+ langflow_base_nightly-0.6.5.dev3.dist-info/RECORD,,