langflow-base-nightly 1.7.0.dev1__py3-none-any.whl → 1.7.0.dev3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1849,7 +1849,7 @@
1849
1849
  "legacy": false,
1850
1850
  "lf_version": "1.4.2",
1851
1851
  "metadata": {
1852
- "code_hash": "437f01c42e31",
1852
+ "code_hash": "bfe1d527184b",
1853
1853
  "dependencies": {
1854
1854
  "dependencies": [
1855
1855
  {
@@ -1939,6 +1939,36 @@
1939
1939
  "type": "str",
1940
1940
  "value": ""
1941
1941
  },
1942
+ "base_url_ibm_watsonx": {
1943
+ "_input_type": "DropdownInput",
1944
+ "advanced": false,
1945
+ "combobox": false,
1946
+ "dialog_inputs": {},
1947
+ "display_name": "watsonx API Endpoint",
1948
+ "dynamic": false,
1949
+ "external_options": {},
1950
+ "info": "The base URL of the API (IBM watsonx.ai only)",
1951
+ "name": "base_url_ibm_watsonx",
1952
+ "options": [
1953
+ "https://us-south.ml.cloud.ibm.com",
1954
+ "https://eu-de.ml.cloud.ibm.com",
1955
+ "https://eu-gb.ml.cloud.ibm.com",
1956
+ "https://au-syd.ml.cloud.ibm.com",
1957
+ "https://jp-tok.ml.cloud.ibm.com",
1958
+ "https://ca-tor.ml.cloud.ibm.com"
1959
+ ],
1960
+ "options_metadata": [],
1961
+ "placeholder": "",
1962
+ "real_time_refresh": true,
1963
+ "required": false,
1964
+ "show": false,
1965
+ "title_case": false,
1966
+ "toggle": false,
1967
+ "tool_mode": false,
1968
+ "trace_as_metadata": true,
1969
+ "type": "str",
1970
+ "value": "https://us-south.ml.cloud.ibm.com"
1971
+ },
1942
1972
  "chunk_size": {
1943
1973
  "_input_type": "IntInput",
1944
1974
  "advanced": true,
@@ -1973,7 +2003,7 @@
1973
2003
  "show": true,
1974
2004
  "title_case": false,
1975
2005
  "type": "code",
1976
- "value": "from typing import Any\n\nfrom langchain_openai import OpenAIEmbeddings\n\nfrom lfx.base.embeddings.model import LCEmbeddingsModel\nfrom lfx.base.models.ollama_constants import OLLAMA_EMBEDDING_MODELS\nfrom lfx.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom lfx.base.models.watsonx_constants import WATSONX_EMBEDDING_MODEL_NAMES\nfrom lfx.field_typing import Embeddings\nfrom lfx.io import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageTextInput,\n SecretStrInput,\n)\nfrom lfx.schema.dotdict import dotdict\n\n\nclass EmbeddingModelComponent(LCEmbeddingsModel):\n display_name = \"Embedding Model\"\n description = \"Generate embeddings using a specified provider.\"\n documentation: str = \"https://docs.langflow.org/components-embedding-models\"\n icon = \"binary\"\n name = \"EmbeddingModel\"\n category = \"models\"\n\n inputs = [\n DropdownInput(\n name=\"provider\",\n display_name=\"Model Provider\",\n options=[\"OpenAI\", \"Ollama\", \"IBM watsonx.ai\"],\n value=\"OpenAI\",\n info=\"Select the embedding model provider\",\n real_time_refresh=True,\n options_metadata=[{\"icon\": \"OpenAI\"}, {\"icon\": \"Ollama\"}, {\"icon\": \"WatsonxAI\"}],\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=OPENAI_EMBEDDING_MODEL_NAMES[0],\n info=\"Select the embedding model to use\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"Model Provider API key\",\n required=True,\n show=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"api_base\",\n display_name=\"API Base URL\",\n info=\"Base URL for the API. Leave empty for default.\",\n advanced=True,\n ),\n # Watson-specific inputs\n MessageTextInput(\n name=\"project_id\",\n display_name=\"Project ID\",\n info=\"IBM watsonx.ai Project ID (required for IBM watsonx.ai)\",\n show=False,\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", advanced=True, value=3),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n provider = self.provider\n model = self.model\n api_key = self.api_key\n api_base = self.api_base\n dimensions = self.dimensions\n chunk_size = self.chunk_size\n request_timeout = self.request_timeout\n max_retries = self.max_retries\n show_progress_bar = self.show_progress_bar\n model_kwargs = self.model_kwargs or {}\n\n if provider == \"OpenAI\":\n if not api_key:\n msg = \"OpenAI API key is required when using OpenAI provider\"\n raise ValueError(msg)\n return OpenAIEmbeddings(\n model=model,\n dimensions=dimensions or None,\n base_url=api_base or None,\n api_key=api_key,\n chunk_size=chunk_size,\n max_retries=max_retries,\n timeout=request_timeout or None,\n show_progress_bar=show_progress_bar,\n model_kwargs=model_kwargs,\n )\n\n if provider == \"Ollama\":\n try:\n from langchain_ollama import OllamaEmbeddings\n except ImportError:\n try:\n from langchain_community.embeddings import OllamaEmbeddings\n except ImportError:\n msg = \"Please install langchain-ollama: pip install langchain-ollama\"\n raise ImportError(msg) from None\n\n return OllamaEmbeddings(\n model=model,\n base_url=api_base or \"http://localhost:11434\",\n **model_kwargs,\n )\n\n if provider == \"IBM watsonx.ai\":\n try:\n from langchain_ibm import WatsonxEmbeddings\n except ImportError:\n msg = \"Please install langchain-ibm: pip install langchain-ibm\"\n raise ImportError(msg) from None\n\n if not api_key:\n msg = \"IBM watsonx.ai API key is required when using IBM watsonx.ai provider\"\n raise ValueError(msg)\n\n project_id = self.project_id\n\n if not project_id:\n msg = \"Project ID is required for IBM watsonx.ai provider\"\n raise ValueError(msg)\n\n params = {\n \"model_id\": model,\n \"url\": api_base or \"https://us-south.ml.cloud.ibm.com\",\n \"apikey\": api_key,\n }\n\n params[\"project_id\"] = project_id\n\n return WatsonxEmbeddings(**params)\n\n msg = f\"Unknown provider: {provider}\"\n raise ValueError(msg)\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:\n if field_name == \"provider\":\n if field_value == \"OpenAI\":\n build_config[\"model\"][\"options\"] = OPENAI_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = OPENAI_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"OpenAI API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"OpenAI API Base URL\"\n build_config[\"api_base\"][\"advanced\"] = True\n build_config[\"project_id\"][\"show\"] = False\n\n elif field_value == \"Ollama\":\n build_config[\"model\"][\"options\"] = OLLAMA_EMBEDDING_MODELS\n build_config[\"model\"][\"value\"] = OLLAMA_EMBEDDING_MODELS[0]\n build_config[\"api_key\"][\"display_name\"] = \"API Key (Optional)\"\n build_config[\"api_key\"][\"required\"] = False\n build_config[\"api_key\"][\"show\"] = False\n build_config[\"api_base\"][\"display_name\"] = \"Ollama Base URL\"\n build_config[\"api_base\"][\"value\"] = \"http://localhost:11434\"\n build_config[\"api_base\"][\"advanced\"] = True\n build_config[\"project_id\"][\"show\"] = False\n\n elif field_value == \"IBM watsonx.ai\":\n build_config[\"model\"][\"options\"] = WATSONX_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = WATSONX_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"IBM watsonx.ai API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"IBM watsonx.ai URL\"\n build_config[\"api_base\"][\"value\"] = \"https://us-south.ml.cloud.ibm.com\"\n build_config[\"api_base\"][\"advanced\"] = False\n build_config[\"project_id\"][\"show\"] = True\n\n return build_config\n"
2006
+ "value": "from typing import Any\n\nfrom langchain_openai import OpenAIEmbeddings\n\nfrom lfx.base.embeddings.model import LCEmbeddingsModel\nfrom lfx.base.models.ollama_constants import OLLAMA_EMBEDDING_MODELS\nfrom lfx.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom lfx.base.models.watsonx_constants import IBM_WATSONX_URLS, WATSONX_EMBEDDING_MODEL_NAMES\nfrom lfx.field_typing import Embeddings\nfrom lfx.io import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageTextInput,\n SecretStrInput,\n)\nfrom lfx.schema.dotdict import dotdict\n\n\nclass EmbeddingModelComponent(LCEmbeddingsModel):\n display_name = \"Embedding Model\"\n description = \"Generate embeddings using a specified provider.\"\n documentation: str = \"https://docs.langflow.org/components-embedding-models\"\n icon = \"binary\"\n name = \"EmbeddingModel\"\n category = \"models\"\n\n inputs = [\n DropdownInput(\n name=\"provider\",\n display_name=\"Model Provider\",\n options=[\"OpenAI\", \"Ollama\", \"IBM watsonx.ai\"],\n value=\"OpenAI\",\n info=\"Select the embedding model provider\",\n real_time_refresh=True,\n options_metadata=[{\"icon\": \"OpenAI\"}, {\"icon\": \"Ollama\"}, {\"icon\": \"WatsonxAI\"}],\n ),\n MessageTextInput(\n name=\"api_base\",\n display_name=\"API Base URL\",\n info=\"Base URL for the API. Leave empty for default.\",\n advanced=True,\n ),\n DropdownInput(\n name=\"base_url_ibm_watsonx\",\n display_name=\"watsonx API Endpoint\",\n info=\"The base URL of the API (IBM watsonx.ai only)\",\n options=IBM_WATSONX_URLS,\n value=IBM_WATSONX_URLS[0],\n show=False,\n real_time_refresh=True,\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=OPENAI_EMBEDDING_MODEL_NAMES[0],\n info=\"Select the embedding model to use\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"Model Provider API key\",\n required=True,\n show=True,\n real_time_refresh=True,\n ),\n # Watson-specific inputs\n MessageTextInput(\n name=\"project_id\",\n display_name=\"Project ID\",\n info=\"IBM watsonx.ai Project ID (required for IBM watsonx.ai)\",\n show=False,\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", advanced=True, value=3),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n provider = self.provider\n model = self.model\n api_key = self.api_key\n api_base = self.api_base\n base_url_ibm_watsonx = self.base_url_ibm_watsonx\n dimensions = self.dimensions\n chunk_size = self.chunk_size\n request_timeout = self.request_timeout\n max_retries = self.max_retries\n show_progress_bar = self.show_progress_bar\n model_kwargs = self.model_kwargs or {}\n\n if provider == \"OpenAI\":\n if not api_key:\n msg = \"OpenAI API key is required when using OpenAI provider\"\n raise ValueError(msg)\n return OpenAIEmbeddings(\n model=model,\n dimensions=dimensions or None,\n base_url=api_base or None,\n api_key=api_key,\n chunk_size=chunk_size,\n max_retries=max_retries,\n timeout=request_timeout or None,\n show_progress_bar=show_progress_bar,\n model_kwargs=model_kwargs,\n )\n\n if provider == \"Ollama\":\n try:\n from langchain_ollama import OllamaEmbeddings\n except ImportError:\n try:\n from langchain_community.embeddings import OllamaEmbeddings\n except ImportError:\n msg = \"Please install langchain-ollama: pip install langchain-ollama\"\n raise ImportError(msg) from None\n\n return OllamaEmbeddings(\n model=model,\n base_url=api_base or \"http://localhost:11434\",\n **model_kwargs,\n )\n\n if provider == \"IBM watsonx.ai\":\n try:\n from langchain_ibm import WatsonxEmbeddings\n except ImportError:\n msg = \"Please install langchain-ibm: pip install langchain-ibm\"\n raise ImportError(msg) from None\n\n if not api_key:\n msg = \"IBM watsonx.ai API key is required when using IBM watsonx.ai provider\"\n raise ValueError(msg)\n\n project_id = self.project_id\n\n if not project_id:\n msg = \"Project ID is required for IBM watsonx.ai provider\"\n raise ValueError(msg)\n\n params = {\n \"model_id\": model,\n \"url\": base_url_ibm_watsonx or \"https://us-south.ml.cloud.ibm.com\",\n \"apikey\": api_key,\n }\n\n params[\"project_id\"] = project_id\n\n return WatsonxEmbeddings(**params)\n\n msg = f\"Unknown provider: {provider}\"\n raise ValueError(msg)\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:\n if field_name == \"provider\":\n if field_value == \"OpenAI\":\n build_config[\"model\"][\"options\"] = OPENAI_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = OPENAI_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"OpenAI API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"OpenAI API Base URL\"\n build_config[\"api_base\"][\"advanced\"] = True\n build_config[\"project_id\"][\"show\"] = False\n build_config[\"base_url_ibm_watsonx\"][\"show\"] = False\n\n elif field_value == \"Ollama\":\n build_config[\"model\"][\"options\"] = OLLAMA_EMBEDDING_MODELS\n build_config[\"model\"][\"value\"] = OLLAMA_EMBEDDING_MODELS[0]\n build_config[\"api_key\"][\"display_name\"] = \"API Key (Optional)\"\n build_config[\"api_key\"][\"required\"] = False\n build_config[\"api_key\"][\"show\"] = False\n build_config[\"api_base\"][\"display_name\"] = \"Ollama Base URL\"\n build_config[\"api_base\"][\"value\"] = \"http://localhost:11434\"\n build_config[\"api_base\"][\"advanced\"] = False\n build_config[\"project_id\"][\"show\"] = False\n build_config[\"base_url_ibm_watsonx\"][\"show\"] = False\n\n elif field_value == \"IBM watsonx.ai\":\n build_config[\"model\"][\"options\"] = WATSONX_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = WATSONX_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"IBM watsonx.ai API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"show\"] = False\n build_config[\"base_url_ibm_watsonx\"][\"show\"] = True\n build_config[\"project_id\"][\"show\"] = True\n\n return build_config\n"
1977
2007
  },
1978
2008
  "dimensions": {
1979
2009
  "_input_type": "IntInput",
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langflow-base-nightly
3
- Version: 1.7.0.dev1
3
+ Version: 1.7.0.dev3
4
4
  Summary: A Python package with a built-in web application
5
5
  Project-URL: Repository, https://github.com/langflow-ai/langflow
6
6
  Project-URL: Documentation, https://docs.langflow.org
@@ -45,7 +45,7 @@ Requires-Dist: langchain-experimental<1.0.0,>=0.3.4
45
45
  Requires-Dist: langchain-ibm<1.0.0,>=0.3.8
46
46
  Requires-Dist: langchainhub~=0.1.15
47
47
  Requires-Dist: langchain~=0.3.21
48
- Requires-Dist: lfx-nightly==0.2.0.dev1
48
+ Requires-Dist: lfx-nightly==0.2.0.dev3
49
49
  Requires-Dist: loguru<1.0.0,>=0.7.1
50
50
  Requires-Dist: mcp~=1.10.1
51
51
  Requires-Dist: multiprocess<1.0.0,>=0.70.14
@@ -2011,7 +2011,7 @@ langflow/initial_setup/starter_projects/Market Research.json,sha256=v4vXRQBSEx4F
2011
2011
  langflow/initial_setup/starter_projects/Meeting Summary.json,sha256=xX7Ft458_DPiKB4vg8OOToKdYd_EqXPnSJ6P7EZbC90,213405
2012
2012
  langflow/initial_setup/starter_projects/Memory Chatbot.json,sha256=lFruK6vHpUJK3xaqh-83tk0_iRd2_2ZdvrX5n1TZUv0,94569
2013
2013
  langflow/initial_setup/starter_projects/News Aggregator.json,sha256=PbwmBSo1-6Iqj2ipt9DYN0ZDXv9S78r2mkcpCoqfuRM,158227
2014
- langflow/initial_setup/starter_projects/Nvidia Remix.json,sha256=oceT1jL2f3cHvR9yjHWEd7X5zFShxrJv1_5J1bXB3Xw,338999
2014
+ langflow/initial_setup/starter_projects/Nvidia Remix.json,sha256=GRlfJnaPVmtem4sRX4y-N4W5-iuOf9NkvtTk7TQc6D4,340718
2015
2015
  langflow/initial_setup/starter_projects/Pokédex Agent.json,sha256=nBEon5Ckdtw9daBKK4aF1LUIhrCNZ1xHs2Cnc0ghWrk,130260
2016
2016
  langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json,sha256=MSXhGpgGD2J_zFQgOQEhBuqQAHs8t6LepteKvmvNYrk,172871
2017
2017
  langflow/initial_setup/starter_projects/Price Deal Finder.json,sha256=JfU5OYyoYjwCfG7_oSExM2hWGYCoAV3NoGwXIMtbVaM,140568
@@ -2230,7 +2230,7 @@ langflow/utils/util.py,sha256=bZqi9Fqj2mlp9tKUA-Q4ePpooxtbuVLjlAvdml4kcjs,1516
2230
2230
  langflow/utils/validate.py,sha256=BPqoIMvjl4wbMJTTWo1zMHP0kQCa2TfmDT9f-nPT9Ng,112
2231
2231
  langflow/utils/version.py,sha256=OjSj0smls9XnPd4-LpTH9AWyUO_NAn5mncqKkkXl_fw,2840
2232
2232
  langflow/utils/voice_utils.py,sha256=Ypxg8s5jFd1o5wBbx1W8oKK7vh4kwo0-iuTcFqIwy5I,3350
2233
- langflow_base_nightly-1.7.0.dev1.dist-info/METADATA,sha256=uEaj9Fsy2HyDKIQHjG2-9thW6x3D83Io1Z9N4-Om2-c,4375
2234
- langflow_base_nightly-1.7.0.dev1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
2235
- langflow_base_nightly-1.7.0.dev1.dist-info/entry_points.txt,sha256=JvuLdXSrkeDmDdpb8M-VvFIzb84n4HmqUcIP10_EIF8,57
2236
- langflow_base_nightly-1.7.0.dev1.dist-info/RECORD,,
2233
+ langflow_base_nightly-1.7.0.dev3.dist-info/METADATA,sha256=wTOfR9qP1aFr87aVw_BxP6ab7DwyRMV_y1O-ece2wko,4375
2234
+ langflow_base_nightly-1.7.0.dev3.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
2235
+ langflow_base_nightly-1.7.0.dev3.dist-info/entry_points.txt,sha256=JvuLdXSrkeDmDdpb8M-VvFIzb84n4HmqUcIP10_EIF8,57
2236
+ langflow_base_nightly-1.7.0.dev3.dist-info/RECORD,,