langroid 0.9.2__py3-none-any.whl → 0.9.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1017,7 +1017,8 @@ class OpenAIGPT(LanguageModel):
1017
1017
  LLMResponse(
1018
1018
  message=completion,
1019
1019
  cached=False,
1020
- oai_tool_calls=tool_calls or None, # don't allow empty list [] here
1020
+ # don't allow empty list [] here
1021
+ oai_tool_calls=tool_calls or None if len(tool_deltas) > 0 else None,
1021
1022
  function_call=function_call if has_function else None,
1022
1023
  ),
1023
1024
  openai_response.dict(),
@@ -18,7 +18,7 @@ from langroid.language_models.prompt_formatter.base import PromptFormatter
18
18
  logger = logging.getLogger(__name__)
19
19
 
20
20
 
21
- def try_import_hf_modules() -> Tuple[Type[Any], Type[Any], Type[Any]]:
21
+ def try_import_hf_modules() -> Tuple[Type[Any], Type[Any]]:
22
22
  """
23
23
  Attempts to import the AutoTokenizer class from the transformers package.
24
24
  Returns:
@@ -27,17 +27,16 @@ def try_import_hf_modules() -> Tuple[Type[Any], Type[Any], Type[Any]]:
27
27
  ImportError: If the transformers package is not installed.
28
28
  """
29
29
  try:
30
- from huggingface_hub import HfApi, ModelFilter
30
+ from huggingface_hub import HfApi
31
31
  from transformers import AutoTokenizer
32
32
 
33
- return AutoTokenizer, HfApi, ModelFilter
33
+ return AutoTokenizer, HfApi
34
34
  except ImportError:
35
35
  raise ImportError(
36
36
  """
37
37
  You are trying to use some/all of:
38
38
  HuggingFace transformers.AutoTokenizer,
39
- huggingface_hub.HfApi,
40
- huggingface_hub.ModelFilter,
39
+ huggingface_hub.HfApi,
41
40
  but these are not not installed
42
41
  by default with Langroid. Please install langroid using the
43
42
  `transformers` extra, like so:
@@ -48,7 +47,7 @@ def try_import_hf_modules() -> Tuple[Type[Any], Type[Any], Type[Any]]:
48
47
 
49
48
 
50
49
  def find_hf_formatter(model_name: str) -> str:
51
- AutoTokenizer, HfApi, ModelFilter = try_import_hf_modules()
50
+ AutoTokenizer, HfApi = try_import_hf_modules()
52
51
  hf_api = HfApi()
53
52
  # try to find a matching model, with progressivly shorter prefixes of model_name
54
53
  model_name = model_name.lower().split("/")[-1]
@@ -57,19 +56,19 @@ def find_hf_formatter(model_name: str) -> str:
57
56
  for i in range(len(parts), 0, -1):
58
57
  prefix = "-".join(parts[:i])
59
58
  models = hf_api.list_models(
60
- filter=ModelFilter(
61
- task="text-generation",
62
- model_name=prefix,
63
- )
59
+ task="text-generation",
60
+ model_name=prefix,
64
61
  )
65
62
  try:
66
63
  mdl = next(models)
67
- except StopIteration:
64
+ tokenizer = AutoTokenizer.from_pretrained(mdl.id)
65
+ if tokenizer.chat_template is not None:
66
+ return str(mdl.id)
67
+ else:
68
+ continue
69
+ except Exception:
68
70
  continue
69
71
 
70
- tokenizer = AutoTokenizer.from_pretrained(mdl.id)
71
- if tokenizer.chat_template is not None:
72
- return str(mdl.id)
73
72
  return ""
74
73
 
75
74
 
@@ -78,14 +77,12 @@ class HFFormatter(PromptFormatter):
78
77
 
79
78
  def __init__(self, config: HFPromptFormatterConfig):
80
79
  super().__init__(config)
81
- AutoTokenizer, HfApi, ModelFilter = try_import_hf_modules()
80
+ AutoTokenizer, HfApi = try_import_hf_modules()
82
81
  self.config: HFPromptFormatterConfig = config
83
82
  hf_api = HfApi()
84
83
  models = hf_api.list_models(
85
- filter=ModelFilter(
86
- task="text-generation",
87
- model_name=config.model_name,
88
- )
84
+ task="text-generation",
85
+ model_name=config.model_name,
89
86
  )
90
87
  try:
91
88
  mdl = next(models)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: langroid
3
- Version: 0.9.2
3
+ Version: 0.9.4
4
4
  Summary: Harness LLMs with Multi-Agent Programming
5
5
  License: MIT
6
6
  Author: Prasad Chalasani
@@ -71,10 +71,10 @@ langroid/language_models/azure_openai.py,sha256=G4le3j4YLHV7IwgB2C37hO3MKijZ1Kjy
71
71
  langroid/language_models/base.py,sha256=ytJ_0Jw5erbqrqLPp4JMCo_nIkwzUvBqoKUr8Sae9Qg,21792
72
72
  langroid/language_models/config.py,sha256=9Q8wk5a7RQr8LGMT_0WkpjY8S4ywK06SalVRjXlfCiI,378
73
73
  langroid/language_models/mock_lm.py,sha256=2Ka05SVGSUy096bsa2AyjaqC5jmcFoe7HycpdnICTIw,3031
74
- langroid/language_models/openai_gpt.py,sha256=2IS-1vWVHJvfGWOtTaJAkHv9WxtlStBxhUvXLqNRwRU,61337
74
+ langroid/language_models/openai_gpt.py,sha256=T-Gznbv8Nqrkf8rmO3L6pRVracYE1oG_LhrfenzdfNA,61386
75
75
  langroid/language_models/prompt_formatter/__init__.py,sha256=2-5cdE24XoFDhifOLl8yiscohil1ogbP1ECkYdBlBsk,372
76
76
  langroid/language_models/prompt_formatter/base.py,sha256=eDS1sgRNZVnoajwV_ZIha6cba5Dt8xjgzdRbPITwx3Q,1221
77
- langroid/language_models/prompt_formatter/hf_formatter.py,sha256=TFL6ppmeQWnzr6CKQzRZFYY810zE1mr8DZnhw6i85ok,5217
77
+ langroid/language_models/prompt_formatter/hf_formatter.py,sha256=PVJppmjRvD-2DF-XNC6mE05vTZ9wbu37SmXwZBQhad0,5055
78
78
  langroid/language_models/prompt_formatter/llama2_formatter.py,sha256=YdcO88qyBeuMENVIVvVqSYuEpvYSTndUe_jd6hVTko4,2899
79
79
  langroid/language_models/utils.py,sha256=o6Zo2cnnvKrfSgF26knVQ1xkSxEoE7yN85296gNdVOw,4858
80
80
  langroid/mytypes.py,sha256=ptAFxEAtiwmIfUnGisNotTe8wT9LKBf22lOfPgZoQIY,2368
@@ -133,8 +133,8 @@ langroid/vector_store/meilisearch.py,sha256=6frB7GFWeWmeKzRfLZIvzRjllniZ1cYj3Hmh
133
133
  langroid/vector_store/momento.py,sha256=qR-zBF1RKVHQZPZQYW_7g-XpTwr46p8HJuYPCkfJbM4,10534
134
134
  langroid/vector_store/qdrant_cloud.py,sha256=3im4Mip0QXLkR6wiqVsjV1QvhSElfxdFSuDKddBDQ-4,188
135
135
  langroid/vector_store/qdrantdb.py,sha256=v88lqFkepADvlN6lByUj9I4NEKa9X9lWH16uTPPbYrE,17457
136
- pyproject.toml,sha256=SQdwuOfdXmFdiLWiKfHRsjfp9H4XJ-GidVwwHufYHa4,7087
137
- langroid-0.9.2.dist-info/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
138
- langroid-0.9.2.dist-info/METADATA,sha256=WNUNN7UuPVe7pvYknP33B6XXwHT2wOD9S14IwNRgoeQ,54751
139
- langroid-0.9.2.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
140
- langroid-0.9.2.dist-info/RECORD,,
136
+ pyproject.toml,sha256=P5_kgCb2FOLB3-jH84pXgSbuNaIyI69vHPRTpLDh-5w,7087
137
+ langroid-0.9.4.dist-info/LICENSE,sha256=EgVbvA6VSYgUlvC3RvPKehSg7MFaxWDsFuzLOsPPfJg,1065
138
+ langroid-0.9.4.dist-info/METADATA,sha256=ZG-cNsAaQviaQZVnTDjFD1rM6Bs5VWdIgQG59N5DcLA,54751
139
+ langroid-0.9.4.dist-info/WHEEL,sha256=FMvqSimYX_P7y0a7UY-_Mc83r5zkBZsCYPm7Lr0Bsq4,88
140
+ langroid-0.9.4.dist-info/RECORD,,
pyproject.toml CHANGED
@@ -1,6 +1,6 @@
1
1
  [tool.poetry]
2
2
  name = "langroid"
3
- version = "0.9.2"
3
+ version = "0.9.4"
4
4
  description = "Harness LLMs with Multi-Agent Programming"
5
5
  authors = ["Prasad Chalasani <pchalasani@gmail.com>"]
6
6
  readme = "README.md"