langflow-base-nightly 0.6.5.dev8__py3-none-any.whl → 0.6.5.dev9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of langflow-base-nightly might be problematic. Click here for more details.

@@ -1849,7 +1849,7 @@
1849
1849
  "legacy": false,
1850
1850
  "lf_version": "1.4.2",
1851
1851
  "metadata": {
1852
- "code_hash": "4e4f8ff4ae43",
1852
+ "code_hash": "437f01c42e31",
1853
1853
  "dependencies": {
1854
1854
  "dependencies": [
1855
1855
  {
@@ -1973,7 +1973,7 @@
1973
1973
  "show": true,
1974
1974
  "title_case": false,
1975
1975
  "type": "code",
1976
- "value": "from typing import Any\n\nfrom langchain_openai import OpenAIEmbeddings\n\nfrom lfx.base.embeddings.model import LCEmbeddingsModel\nfrom lfx.base.models.ollama_constants import OLLAMA_EMBEDDING_MODELS\nfrom lfx.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom lfx.base.models.watsonx_constants import WATSONX_EMBEDDING_MODEL_NAMES\nfrom lfx.field_typing import Embeddings\nfrom lfx.io import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageTextInput,\n SecretStrInput,\n)\nfrom lfx.schema.dotdict import dotdict\n\n\nclass EmbeddingModelComponent(LCEmbeddingsModel):\n display_name = \"Embedding Model\"\n description = \"Generate embeddings using a specified provider.\"\n documentation: str = \"https://docs.langflow.org/components-embedding-models\"\n icon = \"binary\"\n name = \"EmbeddingModel\"\n category = \"models\"\n\n inputs = [\n DropdownInput(\n name=\"provider\",\n display_name=\"Model Provider\",\n options=[\"OpenAI\", \"Ollama\", \"WatsonX\"],\n value=\"OpenAI\",\n info=\"Select the embedding model provider\",\n real_time_refresh=True,\n options_metadata=[{\"icon\": \"OpenAI\"}, {\"icon\": \"Ollama\"}, {\"icon\": \"WatsonxAI\"}],\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=OPENAI_EMBEDDING_MODEL_NAMES[0],\n info=\"Select the embedding model to use\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"Model Provider API key\",\n required=True,\n show=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"api_base\",\n display_name=\"API Base URL\",\n info=\"Base URL for the API. Leave empty for default.\",\n advanced=True,\n ),\n # Watson-specific inputs\n MessageTextInput(\n name=\"project_id\",\n display_name=\"Project ID\",\n info=\"Watson AI Project ID (required for WatsonX)\",\n show=False,\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", advanced=True, value=3),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n provider = self.provider\n model = self.model\n api_key = self.api_key\n api_base = self.api_base\n dimensions = self.dimensions\n chunk_size = self.chunk_size\n request_timeout = self.request_timeout\n max_retries = self.max_retries\n show_progress_bar = self.show_progress_bar\n model_kwargs = self.model_kwargs or {}\n\n if provider == \"OpenAI\":\n if not api_key:\n msg = \"OpenAI API key is required when using OpenAI provider\"\n raise ValueError(msg)\n return OpenAIEmbeddings(\n model=model,\n dimensions=dimensions or None,\n base_url=api_base or None,\n api_key=api_key,\n chunk_size=chunk_size,\n max_retries=max_retries,\n timeout=request_timeout or None,\n show_progress_bar=show_progress_bar,\n model_kwargs=model_kwargs,\n )\n\n if provider == \"Ollama\":\n try:\n from langchain_ollama import OllamaEmbeddings\n except ImportError:\n try:\n from langchain_community.embeddings import OllamaEmbeddings\n except ImportError:\n msg = \"Please install langchain-ollama: pip install langchain-ollama\"\n raise ImportError(msg) from None\n\n return OllamaEmbeddings(\n model=model,\n base_url=api_base or \"http://localhost:11434\",\n **model_kwargs,\n )\n\n if provider == \"WatsonX\":\n try:\n from langchain_ibm import WatsonxEmbeddings\n except ImportError:\n msg = \"Please install langchain-ibm: pip install langchain-ibm\"\n raise ImportError(msg) from None\n\n if not api_key:\n msg = \"Watson AI API key is required when using WatsonX provider\"\n raise ValueError(msg)\n\n project_id = self.project_id\n\n if not project_id:\n msg = \"Project ID is required for WatsonX\"\n raise ValueError(msg)\n\n params = {\n \"model_id\": model,\n \"url\": api_base or \"https://us-south.ml.cloud.ibm.com\",\n \"apikey\": api_key,\n }\n\n params[\"project_id\"] = project_id\n\n return WatsonxEmbeddings(**params)\n\n msg = f\"Unknown provider: {provider}\"\n raise ValueError(msg)\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:\n if field_name == \"provider\":\n if field_value == \"OpenAI\":\n build_config[\"model\"][\"options\"] = OPENAI_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = OPENAI_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"OpenAI API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"OpenAI API Base URL\"\n build_config[\"project_id\"][\"show\"] = False\n\n elif field_value == \"Ollama\":\n build_config[\"model\"][\"options\"] = OLLAMA_EMBEDDING_MODELS\n build_config[\"model\"][\"value\"] = OLLAMA_EMBEDDING_MODELS[0]\n build_config[\"api_key\"][\"display_name\"] = \"API Key (Optional)\"\n build_config[\"api_key\"][\"required\"] = False\n build_config[\"api_key\"][\"show\"] = False\n build_config[\"api_base\"][\"display_name\"] = \"Ollama Base URL\"\n build_config[\"api_base\"][\"value\"] = \"http://localhost:11434\"\n build_config[\"project_id\"][\"show\"] = False\n\n elif field_value == \"WatsonX\":\n build_config[\"model\"][\"options\"] = WATSONX_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = WATSONX_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"Watson AI API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"Watson AI URL\"\n build_config[\"api_base\"][\"value\"] = \"https://us-south.ml.cloud.ibm.com\"\n build_config[\"project_id\"][\"show\"] = True\n\n return build_config\n"
1976
+ "value": "from typing import Any\n\nfrom langchain_openai import OpenAIEmbeddings\n\nfrom lfx.base.embeddings.model import LCEmbeddingsModel\nfrom lfx.base.models.ollama_constants import OLLAMA_EMBEDDING_MODELS\nfrom lfx.base.models.openai_constants import OPENAI_EMBEDDING_MODEL_NAMES\nfrom lfx.base.models.watsonx_constants import WATSONX_EMBEDDING_MODEL_NAMES\nfrom lfx.field_typing import Embeddings\nfrom lfx.io import (\n BoolInput,\n DictInput,\n DropdownInput,\n FloatInput,\n IntInput,\n MessageTextInput,\n SecretStrInput,\n)\nfrom lfx.schema.dotdict import dotdict\n\n\nclass EmbeddingModelComponent(LCEmbeddingsModel):\n display_name = \"Embedding Model\"\n description = \"Generate embeddings using a specified provider.\"\n documentation: str = \"https://docs.langflow.org/components-embedding-models\"\n icon = \"binary\"\n name = \"EmbeddingModel\"\n category = \"models\"\n\n inputs = [\n DropdownInput(\n name=\"provider\",\n display_name=\"Model Provider\",\n options=[\"OpenAI\", \"Ollama\", \"IBM watsonx.ai\"],\n value=\"OpenAI\",\n info=\"Select the embedding model provider\",\n real_time_refresh=True,\n options_metadata=[{\"icon\": \"OpenAI\"}, {\"icon\": \"Ollama\"}, {\"icon\": \"WatsonxAI\"}],\n ),\n DropdownInput(\n name=\"model\",\n display_name=\"Model Name\",\n options=OPENAI_EMBEDDING_MODEL_NAMES,\n value=OPENAI_EMBEDDING_MODEL_NAMES[0],\n info=\"Select the embedding model to use\",\n ),\n SecretStrInput(\n name=\"api_key\",\n display_name=\"OpenAI API Key\",\n info=\"Model Provider API key\",\n required=True,\n show=True,\n real_time_refresh=True,\n ),\n MessageTextInput(\n name=\"api_base\",\n display_name=\"API Base URL\",\n info=\"Base URL for the API. Leave empty for default.\",\n advanced=True,\n ),\n # Watson-specific inputs\n MessageTextInput(\n name=\"project_id\",\n display_name=\"Project ID\",\n info=\"IBM watsonx.ai Project ID (required for IBM watsonx.ai)\",\n show=False,\n ),\n IntInput(\n name=\"dimensions\",\n display_name=\"Dimensions\",\n info=\"The number of dimensions the resulting output embeddings should have. \"\n \"Only supported by certain models.\",\n advanced=True,\n ),\n IntInput(name=\"chunk_size\", display_name=\"Chunk Size\", advanced=True, value=1000),\n FloatInput(name=\"request_timeout\", display_name=\"Request Timeout\", advanced=True),\n IntInput(name=\"max_retries\", display_name=\"Max Retries\", advanced=True, value=3),\n BoolInput(name=\"show_progress_bar\", display_name=\"Show Progress Bar\", advanced=True),\n DictInput(\n name=\"model_kwargs\",\n display_name=\"Model Kwargs\",\n advanced=True,\n info=\"Additional keyword arguments to pass to the model.\",\n ),\n ]\n\n def build_embeddings(self) -> Embeddings:\n provider = self.provider\n model = self.model\n api_key = self.api_key\n api_base = self.api_base\n dimensions = self.dimensions\n chunk_size = self.chunk_size\n request_timeout = self.request_timeout\n max_retries = self.max_retries\n show_progress_bar = self.show_progress_bar\n model_kwargs = self.model_kwargs or {}\n\n if provider == \"OpenAI\":\n if not api_key:\n msg = \"OpenAI API key is required when using OpenAI provider\"\n raise ValueError(msg)\n return OpenAIEmbeddings(\n model=model,\n dimensions=dimensions or None,\n base_url=api_base or None,\n api_key=api_key,\n chunk_size=chunk_size,\n max_retries=max_retries,\n timeout=request_timeout or None,\n show_progress_bar=show_progress_bar,\n model_kwargs=model_kwargs,\n )\n\n if provider == \"Ollama\":\n try:\n from langchain_ollama import OllamaEmbeddings\n except ImportError:\n try:\n from langchain_community.embeddings import OllamaEmbeddings\n except ImportError:\n msg = \"Please install langchain-ollama: pip install langchain-ollama\"\n raise ImportError(msg) from None\n\n return OllamaEmbeddings(\n model=model,\n base_url=api_base or \"http://localhost:11434\",\n **model_kwargs,\n )\n\n if provider == \"IBM watsonx.ai\":\n try:\n from langchain_ibm import WatsonxEmbeddings\n except ImportError:\n msg = \"Please install langchain-ibm: pip install langchain-ibm\"\n raise ImportError(msg) from None\n\n if not api_key:\n msg = \"IBM watsonx.ai API key is required when using IBM watsonx.ai provider\"\n raise ValueError(msg)\n\n project_id = self.project_id\n\n if not project_id:\n msg = \"Project ID is required for IBM watsonx.ai provider\"\n raise ValueError(msg)\n\n params = {\n \"model_id\": model,\n \"url\": api_base or \"https://us-south.ml.cloud.ibm.com\",\n \"apikey\": api_key,\n }\n\n params[\"project_id\"] = project_id\n\n return WatsonxEmbeddings(**params)\n\n msg = f\"Unknown provider: {provider}\"\n raise ValueError(msg)\n\n def update_build_config(self, build_config: dotdict, field_value: Any, field_name: str | None = None) -> dotdict:\n if field_name == \"provider\":\n if field_value == \"OpenAI\":\n build_config[\"model\"][\"options\"] = OPENAI_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = OPENAI_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"OpenAI API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"OpenAI API Base URL\"\n build_config[\"api_base\"][\"advanced\"] = True\n build_config[\"project_id\"][\"show\"] = False\n\n elif field_value == \"Ollama\":\n build_config[\"model\"][\"options\"] = OLLAMA_EMBEDDING_MODELS\n build_config[\"model\"][\"value\"] = OLLAMA_EMBEDDING_MODELS[0]\n build_config[\"api_key\"][\"display_name\"] = \"API Key (Optional)\"\n build_config[\"api_key\"][\"required\"] = False\n build_config[\"api_key\"][\"show\"] = False\n build_config[\"api_base\"][\"display_name\"] = \"Ollama Base URL\"\n build_config[\"api_base\"][\"value\"] = \"http://localhost:11434\"\n build_config[\"api_base\"][\"advanced\"] = True\n build_config[\"project_id\"][\"show\"] = False\n\n elif field_value == \"IBM watsonx.ai\":\n build_config[\"model\"][\"options\"] = WATSONX_EMBEDDING_MODEL_NAMES\n build_config[\"model\"][\"value\"] = WATSONX_EMBEDDING_MODEL_NAMES[0]\n build_config[\"api_key\"][\"display_name\"] = \"IBM watsonx.ai API Key\"\n build_config[\"api_key\"][\"required\"] = True\n build_config[\"api_key\"][\"show\"] = True\n build_config[\"api_base\"][\"display_name\"] = \"IBM watsonx.ai URL\"\n build_config[\"api_base\"][\"value\"] = \"https://us-south.ml.cloud.ibm.com\"\n build_config[\"api_base\"][\"advanced\"] = False\n build_config[\"project_id\"][\"show\"] = True\n\n return build_config\n"
1977
1977
  },
1978
1978
  "dimensions": {
1979
1979
  "_input_type": "IntInput",
@@ -2059,7 +2059,7 @@
2059
2059
  "advanced": false,
2060
2060
  "display_name": "Project ID",
2061
2061
  "dynamic": false,
2062
- "info": "Watson AI Project ID (required for WatsonX)",
2062
+ "info": "IBM watsonx.ai Project ID (required for IBM watsonx.ai)",
2063
2063
  "input_types": [
2064
2064
  "Message"
2065
2065
  ],
@@ -2089,7 +2089,7 @@
2089
2089
  "options": [
2090
2090
  "OpenAI",
2091
2091
  "Ollama",
2092
- "WatsonX"
2092
+ "IBM watsonx.ai"
2093
2093
  ],
2094
2094
  "options_metadata": [
2095
2095
  {
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: langflow-base-nightly
3
- Version: 0.6.5.dev8
3
+ Version: 0.6.5.dev9
4
4
  Summary: A Python package with a built-in web application
5
5
  Project-URL: Repository, https://github.com/langflow-ai/langflow
6
6
  Project-URL: Documentation, https://docs.langflow.org
@@ -45,7 +45,7 @@ Requires-Dist: langchain-experimental<1.0.0,>=0.3.4
45
45
  Requires-Dist: langchain-ibm<1.0.0,>=0.3.8
46
46
  Requires-Dist: langchainhub~=0.1.15
47
47
  Requires-Dist: langchain~=0.3.21
48
- Requires-Dist: lfx-nightly==0.1.13.dev10
48
+ Requires-Dist: lfx-nightly==0.1.13.dev11
49
49
  Requires-Dist: loguru<1.0.0,>=0.7.1
50
50
  Requires-Dist: mcp~=1.10.1
51
51
  Requires-Dist: multiprocess<1.0.0,>=0.70.14
@@ -2011,7 +2011,7 @@ langflow/initial_setup/starter_projects/Market Research.json,sha256=5hcDuvbjZzKs
2011
2011
  langflow/initial_setup/starter_projects/Meeting Summary.json,sha256=x6Qm5rbjfAS0PomDzk-_m1aoibAta3ZfzcKRiUXVxoc,194311
2012
2012
  langflow/initial_setup/starter_projects/Memory Chatbot.json,sha256=YErIgD-27xnCgabdTTYfm8TFNd8-BEZkCj1QVjxh8Ak,85022
2013
2013
  langflow/initial_setup/starter_projects/News Aggregator.json,sha256=qSULAHe52JZNnuPGTkFri8xcLZXYfp3IJRv_oz6sakc,158227
2014
- langflow/initial_setup/starter_projects/Nvidia Remix.json,sha256=SXIvJc86uqOrDr42FSfaiWlt_1z_cocwFID1uQgtBCk,338713
2014
+ langflow/initial_setup/starter_projects/Nvidia Remix.json,sha256=JEIRl5jIuvQrd0zH-X0avBWlpB5LtYxCfHSKHx0X-zk,338999
2015
2015
  langflow/initial_setup/starter_projects/Pokédex Agent.json,sha256=CT_CNPmbAIQe05l_lmx8JmplOvWMRS4J56bOhmEDtO4,130260
2016
2016
  langflow/initial_setup/starter_projects/Portfolio Website Code Generator.json,sha256=i-glgYxkJvINoRZtj28XWp0Oo-lBS69GErb9zssvjjA,153777
2017
2017
  langflow/initial_setup/starter_projects/Price Deal Finder.json,sha256=cpWCyhULlThIqZCr25fCI8jV7gZ5myhVuB07_BtMpdQ,140568
@@ -2230,7 +2230,7 @@ langflow/utils/util.py,sha256=bZqi9Fqj2mlp9tKUA-Q4ePpooxtbuVLjlAvdml4kcjs,1516
2230
2230
  langflow/utils/validate.py,sha256=BPqoIMvjl4wbMJTTWo1zMHP0kQCa2TfmDT9f-nPT9Ng,112
2231
2231
  langflow/utils/version.py,sha256=OjSj0smls9XnPd4-LpTH9AWyUO_NAn5mncqKkkXl_fw,2840
2232
2232
  langflow/utils/voice_utils.py,sha256=Ypxg8s5jFd1o5wBbx1W8oKK7vh4kwo0-iuTcFqIwy5I,3350
2233
- langflow_base_nightly-0.6.5.dev8.dist-info/METADATA,sha256=1BsWy1mwuoQRWDHFF_4pyrffyH9kcoGAOzoauPb21GM,4377
2234
- langflow_base_nightly-0.6.5.dev8.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
2235
- langflow_base_nightly-0.6.5.dev8.dist-info/entry_points.txt,sha256=JvuLdXSrkeDmDdpb8M-VvFIzb84n4HmqUcIP10_EIF8,57
2236
- langflow_base_nightly-0.6.5.dev8.dist-info/RECORD,,
2233
+ langflow_base_nightly-0.6.5.dev9.dist-info/METADATA,sha256=_cptTb2AnhoBu5hgGn7rSlg4gNErM4QziyfxVshrjW4,4377
2234
+ langflow_base_nightly-0.6.5.dev9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
2235
+ langflow_base_nightly-0.6.5.dev9.dist-info/entry_points.txt,sha256=JvuLdXSrkeDmDdpb8M-VvFIzb84n4HmqUcIP10_EIF8,57
2236
+ langflow_base_nightly-0.6.5.dev9.dist-info/RECORD,,