modaic 0.2.0__tar.gz → 0.3.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of modaic might be problematic. Click here for more details.

Files changed (51) hide show
  1. {modaic-0.2.0/src/modaic.egg-info → modaic-0.3.0}/PKG-INFO +2 -2
  2. {modaic-0.2.0 → modaic-0.3.0}/pyproject.toml +3 -2
  3. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/module_utils.py +64 -26
  4. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/precompiled.py +1 -0
  5. {modaic-0.2.0 → modaic-0.3.0/src/modaic.egg-info}/PKG-INFO +2 -2
  6. modaic-0.3.0/tests/test_auto.py +341 -0
  7. modaic-0.2.0/tests/test_auto.py +0 -180
  8. {modaic-0.2.0 → modaic-0.3.0}/LICENSE +0 -0
  9. {modaic-0.2.0 → modaic-0.3.0}/README.md +0 -0
  10. {modaic-0.2.0 → modaic-0.3.0}/setup.cfg +0 -0
  11. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/__init__.py +0 -0
  12. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/agents/rag_agent.py +0 -0
  13. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/agents/registry.py +0 -0
  14. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/auto.py +0 -0
  15. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/context/__init__.py +0 -0
  16. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/context/base.py +0 -0
  17. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/context/dtype_mapping.py +0 -0
  18. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/context/table.py +0 -0
  19. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/context/text.py +0 -0
  20. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/__init__.py +0 -0
  21. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/graph_database.py +0 -0
  22. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/sql_database.py +0 -0
  23. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/__init__.py +0 -0
  24. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/benchmarks/baseline.py +0 -0
  25. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/benchmarks/common.py +0 -0
  26. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/benchmarks/fork.py +0 -0
  27. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/benchmarks/threaded.py +0 -0
  28. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/vector_database.py +0 -0
  29. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/vendors/milvus.py +0 -0
  30. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/vendors/mongodb.py +0 -0
  31. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/vendors/pinecone.py +0 -0
  32. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/databases/vector_database/vendors/qdrant.py +0 -0
  33. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/datasets.py +0 -0
  34. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/exceptions.py +0 -0
  35. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/hub.py +0 -0
  36. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/indexing.py +0 -0
  37. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/observability.py +0 -0
  38. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/query_language.py +0 -0
  39. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/storage/__init__.py +0 -0
  40. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/storage/file_store.py +0 -0
  41. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/storage/pickle_store.py +0 -0
  42. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/types.py +0 -0
  43. {modaic-0.2.0 → modaic-0.3.0}/src/modaic/utils.py +0 -0
  44. {modaic-0.2.0 → modaic-0.3.0}/src/modaic.egg-info/SOURCES.txt +0 -0
  45. {modaic-0.2.0 → modaic-0.3.0}/src/modaic.egg-info/dependency_links.txt +0 -0
  46. {modaic-0.2.0 → modaic-0.3.0}/src/modaic.egg-info/requires.txt +0 -0
  47. {modaic-0.2.0 → modaic-0.3.0}/src/modaic.egg-info/top_level.txt +0 -0
  48. {modaic-0.2.0 → modaic-0.3.0}/tests/test_observability.py +0 -0
  49. {modaic-0.2.0 → modaic-0.3.0}/tests/test_precompiled.py +0 -0
  50. {modaic-0.2.0 → modaic-0.3.0}/tests/test_query_language.py +0 -0
  51. {modaic-0.2.0 → modaic-0.3.0}/tests/test_types.py +0 -0
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: modaic
3
- Version: 0.2.0
4
- Summary: Modular Agent Infrastructure Collective, a python framework for managing and sharing DSPy agents
3
+ Version: 0.3.0
4
+ Summary: Modular Agent Infrastructure Collection, a python framework for managing and sharing DSPy agents
5
5
  Author-email: Tyrin <tytodd@mit.edu>, Farouk <farouk@modaic.dev>
6
6
  License: MIT License
7
7
 
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  name = "modaic"
3
- version = "0.2.0"
4
- description = "Modular Agent Infrastructure Collective, a python framework for managing and sharing DSPy agents"
3
+ version = "0.3.0"
4
+ description = "Modular Agent Infrastructure Collection, a python framework for managing and sharing DSPy agents"
5
5
  authors = [{ name = "Tyrin", email = "tytodd@mit.edu" }, {name = "Farouk", email = "farouk@modaic.dev"}]
6
6
  readme = "README.md"
7
7
  license = {file = "LICENSE"}
@@ -87,4 +87,5 @@ members = [
87
87
  "tests/artifacts/test_repos/nested_repo_2",
88
88
  "tests/artifacts/test_repos/nested_repo_3",
89
89
  "tests/artifacts/test_repos/multi_module_repo",
90
+ "tests/artifacts/test_repos/failing_repo",
90
91
  ]
@@ -243,6 +243,12 @@ def init_agent_repo(repo_path: str, with_code: bool = True) -> Path:
243
243
  if src_init.exists() and not dest_init.exists():
244
244
  shutil.copy2(src_init, dest_init)
245
245
 
246
+ for extra_file in get_extra_files():
247
+ if extra_file.is_dir():
248
+ shutil.copytree(extra_file, repo_dir / extra_file.relative_to(project_root))
249
+ else:
250
+ shutil.copy2(extra_file, repo_dir / extra_file.relative_to(project_root))
251
+
246
252
  return repo_dir
247
253
 
248
254
 
@@ -272,23 +278,52 @@ def get_ignored_files() -> list[Path]:
272
278
  pyproject_path = Path("pyproject.toml")
273
279
  doc = tomlk.parse(pyproject_path.read_text(encoding="utf-8"))
274
280
 
275
- # Safely get [tool.modaic.ignore]
276
- ignore_table = (
281
+ # Safely get [tool.modaic.exclude]
282
+ files = (
277
283
  doc.get("tool", {}) # [tool]
278
284
  .get("modaic", {}) # [tool.modaic]
279
- .get("ignore") # [tool.modaic.ignore]
285
+ .get("exclude", {}) # [tool.modaic.exclude]
286
+ .get("files", []) # [tool.modaic.exclude] files = ["file1", "file2"]
280
287
  )
281
288
 
282
- if ignore_table is None or "files" not in ignore_table:
283
- return []
289
+ excluded: list[Path] = []
290
+ for entry in files:
291
+ entry = Path(entry)
292
+ if not entry.is_absolute():
293
+ entry = project_root / entry
294
+ if entry.exists():
295
+ excluded.append(entry)
296
+ return excluded
284
297
 
285
- ignored: list[Path] = []
286
- for entry in ignore_table["files"]:
287
- try:
288
- ignored.append((project_root / entry).resolve())
289
- except OSError:
290
- continue
291
- return ignored
298
+
299
+ def get_extra_files() -> list[Path]:
300
+ """Return a list of extra files that should be excluded from staging."""
301
+ project_root = resolve_project_root()
302
+ pyproject_path = Path("pyproject.toml")
303
+ doc = tomlk.parse(pyproject_path.read_text(encoding="utf-8"))
304
+ files = (
305
+ doc.get("tool", {}) # [tool]
306
+ .get("modaic", {}) # [tool.modaic]
307
+ .get("include", {}) # [tool.modaic.include]
308
+ .get("files", []) # [tool.modaic.include] files = ["file1", "file2"]
309
+ )
310
+ included: list[Path] = []
311
+ for entry in files:
312
+ entry = Path(entry)
313
+ if entry.is_absolute():
314
+ try:
315
+ entry = entry.resolve()
316
+ entry.relative_to(project_root.resolve())
317
+ except ValueError:
318
+ warnings.warn(
319
+ f"{entry} will not be bundled because it is not inside the current working directory", stacklevel=4
320
+ )
321
+ else:
322
+ entry = project_root / entry
323
+ if entry.resolve().exists():
324
+ included.append(entry)
325
+
326
+ return included
292
327
 
293
328
 
294
329
  def create_pyproject_toml(repo_dir: Path, package_name: str):
@@ -304,7 +339,7 @@ def create_pyproject_toml(repo_dir: Path, package_name: str):
304
339
  if "project" not in doc_old:
305
340
  raise KeyError("No [project] table in old TOML")
306
341
  doc_new["project"] = doc_old["project"]
307
- doc_new["project"]["dependencies"] = get_filtered_dependencies(doc_old["project"]["dependencies"])
342
+ doc_new["project"]["dependencies"] = get_final_dependencies(doc_old["project"]["dependencies"])
308
343
  if "tool" in doc_old and "uv" in doc_old["tool"] and "sources" in doc_old["tool"]["uv"]:
309
344
  doc_new["tool"] = {"uv": {"sources": doc_old["tool"]["uv"]["sources"]}}
310
345
  warn_if_local(doc_new["tool"]["uv"]["sources"])
@@ -315,29 +350,32 @@ def create_pyproject_toml(repo_dir: Path, package_name: str):
315
350
  tomlk.dump(doc_new, fp)
316
351
 
317
352
 
318
- def get_filtered_dependencies(dependencies: list[str]) -> list[str]:
353
+ def get_final_dependencies(dependencies: list[str]) -> list[str]:
319
354
  """
320
355
  Get the dependencies that should be included in the bundled agent.
356
+ Filters out "[tool.modaic.ignore] dependencies. Adds [tool.modaic.include] dependencies.
321
357
  """
322
358
  pyproject_path = Path("pyproject.toml")
323
359
  doc = tomlk.parse(pyproject_path.read_text(encoding="utf-8"))
324
360
 
325
- # Safely get [tool.modaic.ignore]
326
- ignore_table = (
361
+ # Safely get [tool.modaic.exclude]
362
+ exclude_deps = (
327
363
  doc.get("tool", {}) # [tool]
328
364
  .get("modaic", {}) # [tool.modaic]
329
- .get("ignore", {}) # [tool.modaic.ignore]
365
+ .get("exclude", {}) # [tool.modaic.exclude]
366
+ .get("dependencies", []) # [tool.modaic.exclude] dependencies = ["praw", "sagemaker"]
367
+ )
368
+ include_deps = (
369
+ doc.get("tool", {}) # [tool]
370
+ .get("modaic", {}) # [tool.modaic]
371
+ .get("include", {}) # [tool.modaic.include]
372
+ .get("dependencies", []) # [tool.modaic.include] dependencies = ["praw", "sagemaker"]
330
373
  )
331
374
 
332
- if "dependencies" not in ignore_table:
333
- return dependencies
334
-
335
- ignored_dependencies = ignore_table["dependencies"]
336
- if not ignored_dependencies:
337
- return dependencies
338
- pattern = re.compile(r"\b(" + "|".join(map(re.escape, ignored_dependencies)) + r")\b")
339
- filtered_dependencies = [pkg for pkg in dependencies if not pattern.search(pkg)]
340
- return filtered_dependencies
375
+ if exclude_deps:
376
+ pattern = re.compile(r"\b(" + "|".join(map(re.escape, exclude_deps)) + r")\b")
377
+ dependencies = [pkg for pkg in dependencies if not pattern.search(pkg)]
378
+ return dependencies + include_deps
341
379
 
342
380
 
343
381
  def warn_if_local(sources: dict[str, dict]):
@@ -128,6 +128,7 @@ class PrecompiledConfig(BaseModel):
128
128
  return self.model_dump_json()
129
129
 
130
130
 
131
+ # Use a metaclass to enforce super().__init__() with config
131
132
  class PrecompiledAgent(dspy.Module):
132
133
  """
133
134
  Bases: `dspy.Module`
@@ -1,7 +1,7 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: modaic
3
- Version: 0.2.0
4
- Summary: Modular Agent Infrastructure Collective, a python framework for managing and sharing DSPy agents
3
+ Version: 0.3.0
4
+ Summary: Modular Agent Infrastructure Collection, a python framework for managing and sharing DSPy agents
5
5
  Author-email: Tyrin <tytodd@mit.edu>, Farouk <farouk@modaic.dev>
6
6
  License: MIT License
7
7
 
@@ -0,0 +1,341 @@
1
+ import os
2
+ import pathlib
3
+ import shutil
4
+ import subprocess
5
+ from pathlib import Path
6
+ from typing import Union
7
+
8
+ import pytest
9
+ import tomlkit as tomlk
10
+
11
+ from modaic import AutoAgent, AutoConfig, AutoRetriever
12
+ from modaic.hub import MODAIC_CACHE, get_user_info
13
+ from tests.testing_utils import delete_agent_repo
14
+
15
+ MODAIC_TOKEN = os.getenv("MODAIC_TOKEN")
16
+ INSTALL_TEST_REPO_DEPS = os.getenv("INSTALL_TEST_REPO_DEPS", "True").lower() == "true"
17
+ USERNAME = get_user_info(os.environ["MODAIC_TOKEN"])["login"]
18
+
19
+
20
+ def get_cached_agent_dir(repo_name: str) -> Path:
21
+ return MODAIC_CACHE / "agents" / repo_name
22
+
23
+
24
+ def clean_modaic_cache() -> None:
25
+ """Remove the MODAIC cache directory if it exists.
26
+
27
+ Params:
28
+ None
29
+
30
+ Returns:
31
+ None
32
+ """
33
+ shutil.rmtree(MODAIC_CACHE, ignore_errors=True)
34
+
35
+
36
+ def prepare_repo(repo_name: str) -> None:
37
+ """Clean cache and ensure remote hub repo is deleted before test run.
38
+
39
+ Params:
40
+ repo_name (str): The name of the test repository in artifacts/test_repos.
41
+
42
+ Returns:
43
+ None
44
+ """
45
+ clean_modaic_cache()
46
+ if not MODAIC_TOKEN:
47
+ pytest.skip("Skipping because MODAIC_TOKEN is not set")
48
+ delete_agent_repo(username=USERNAME, agent_name=repo_name)
49
+
50
+
51
+ def run_script(repo_name: str, run_path: str = "compile.py") -> None:
52
+ """Run the repository's compile script inside its own uv environment.
53
+
54
+ Params:
55
+ repo_name (str): The name of the test repository directory to compile.
56
+
57
+ Returns:
58
+ None
59
+ """
60
+ env = os.environ.copy()
61
+ env.update(
62
+ {
63
+ "MODAIC_CACHE": "../../temp/modaic_cache",
64
+ }
65
+ )
66
+ repo_dir = pathlib.Path("tests/artifacts/test_repos") / repo_name
67
+ if INSTALL_TEST_REPO_DEPS:
68
+ subprocess.run(["uv", "sync"], cwd=repo_dir, check=True, env=env)
69
+ # Ensure the root package is available in the subproject env
70
+ # Run as file
71
+ if run_path.endswith(".py"):
72
+ subprocess.run(["uv", "run", run_path, USERNAME], cwd=repo_dir, check=True, env=env)
73
+ # Run as module
74
+ else:
75
+ subprocess.run(["uv", "run", "-m", run_path, USERNAME], cwd=repo_dir, check=True, env=env)
76
+ # clean cache
77
+ shutil.rmtree("tests/artifacts/temp/modaic_cache", ignore_errors=True)
78
+
79
+
80
+ # recursive dict/list of dicts/lists of strs representing a folder structure
81
+ FolderLayout = dict[str, Union[str, "FolderLayout"]] | list[Union[str, "FolderLayout"]]
82
+
83
+
84
+ def assert_expected_files(cache_dir: Path, extra_expected_files: FolderLayout):
85
+ default_expected = ["agent.json", "auto_classes.json", "config.json", "pyproject.toml", "README.md", ".git"]
86
+ if isinstance(extra_expected_files, list):
87
+ expected = extra_expected_files + default_expected
88
+ elif isinstance(extra_expected_files, dict):
89
+ expected = [extra_expected_files] + default_expected
90
+ else:
91
+ raise ValueError(f"Invalid folder layout: {extra_expected_files}")
92
+ assert_folder_layout(cache_dir, expected)
93
+
94
+
95
+ def assert_top_level_names(dir: Path, expected_files: FolderLayout | str, root: bool = True):
96
+ if isinstance(expected_files, list):
97
+ expected_names = []
98
+ for obj in expected_files:
99
+ if isinstance(obj, str):
100
+ expected_names.append(obj)
101
+ elif isinstance(obj, dict):
102
+ expected_names.extend(list(obj.keys()))
103
+ else:
104
+ raise ValueError(f"Invalid folder layout: {expected_files}")
105
+ elif isinstance(expected_files, dict):
106
+ expected_names = list(expected_files.keys())
107
+ elif isinstance(expected_files, str):
108
+ expected_names = [expected_files]
109
+ else:
110
+ raise ValueError(f"Invalid folder layout: {expected_files}")
111
+ expected_names = expected_names if root else expected_names + ["__init__.py"]
112
+ missing = set(expected_names) - set(os.listdir(dir))
113
+ assert missing == set(), f"Missing files, in {dir}, {missing}"
114
+ unexpected = set(os.listdir(dir)) - set(expected_names)
115
+ assert unexpected.issubset(set(["__pycache__", "__init__.py"])), (
116
+ f"Unexpected files in {dir}, {unexpected - set(['__pycache__', '__init__.py'])}"
117
+ )
118
+
119
+
120
+ def assert_folder_layout(
121
+ dir: Path, expected_files: FolderLayout | str, root: bool = True, assert_top_level: bool = True
122
+ ):
123
+ """
124
+ Asserts that the files in the directory match the expected folder structure.
125
+ Checking that only expected files are included. Will raise assertion error if unexpected files are included.
126
+ Args:
127
+ dir: The directory to assert the files in.
128
+ expected_files: The expected folder structure.
129
+
130
+ Raises:
131
+ Assertion error if expected file not found in path or if unexpected file found in path
132
+ """
133
+ # dir is a single file folder
134
+ if isinstance(expected_files, str):
135
+ assert_top_level_names(dir, expected_files, root)
136
+ # dir is a folder containg multiples files or subfolders
137
+ elif isinstance(expected_files, list):
138
+ assert_top_level_names(dir, expected_files, root)
139
+ for file in expected_files:
140
+ if isinstance(file, dict):
141
+ assert_folder_layout(dir, file, root=False, assert_top_level=False)
142
+ elif not isinstance(file, str):
143
+ raise ValueError(f"Invalid folder layout: {expected_files}")
144
+ # dir contains subfolders, however don't check top level because we don't know if this is the entirety of dir or a subset
145
+ elif isinstance(expected_files, dict):
146
+ for key, value in expected_files.items():
147
+ assert_folder_layout(dir / key, value, root=False)
148
+ else:
149
+ raise ValueError(f"Invalid folder layout: {expected_files}")
150
+
151
+
152
+ def assert_dependencies(cache_dir: Path, extra_expected_dependencies: list[str]):
153
+ expected_dependencies = extra_expected_dependencies + ["dspy", "modaic"]
154
+
155
+ pyproject_path = cache_dir / "pyproject.toml"
156
+ doc = tomlk.parse(pyproject_path.read_text(encoding="utf-8"))
157
+ actual_dependencies = doc.get("project", {}).get("dependencies", [])
158
+
159
+ missing = set(expected_dependencies) - set(actual_dependencies)
160
+ assert missing == set(), f"Missing dependencies, {missing}"
161
+ unexpected = set(actual_dependencies) - set(expected_dependencies)
162
+ assert unexpected == set(), f"Unexpected dependencies, {unexpected}"
163
+
164
+
165
+ def test_simple_repo() -> None:
166
+ prepare_repo("simple_repo")
167
+ run_script("simple_repo", run_path="agent.py")
168
+ clean_modaic_cache()
169
+ config = AutoConfig.from_precompiled(f"{USERNAME}/simple_repo")
170
+ assert config.lm == "openai/gpt-4o"
171
+ assert config.output_type == "str"
172
+ assert config.number == 1
173
+ cache_dir = get_cached_agent_dir(f"{USERNAME}/simple_repo")
174
+ assert_expected_files(cache_dir, ["agent.py"])
175
+ assert_dependencies(cache_dir, ["dspy", "modaic", "praw"])
176
+
177
+ clean_modaic_cache()
178
+ agent = AutoAgent.from_precompiled(f"{USERNAME}/simple_repo", runtime_param="Hello")
179
+ assert agent.config.lm == "openai/gpt-4o"
180
+ assert agent.config.output_type == "str"
181
+ assert agent.config.number == 1
182
+ assert agent.runtime_param == "Hello"
183
+ clean_modaic_cache()
184
+ agent = AutoAgent.from_precompiled(
185
+ f"{USERNAME}/simple_repo", runtime_param="Hello", config_options={"lm": "openai/gpt-4o-mini"}
186
+ )
187
+ assert agent.config.lm == "openai/gpt-4o-mini"
188
+ assert agent.config.output_type == "str"
189
+ assert agent.config.number == 1
190
+ assert agent.runtime_param == "Hello"
191
+ # TODO: test third party deps installation
192
+
193
+
194
+ simple_repo_with_compile_extra_files = [{"agent": ["agent.py", "mod.py"]}, "compile.py", "include_me_too.txt"]
195
+
196
+
197
+ def test_simple_repo_with_compile():
198
+ prepare_repo("simple_repo_with_compile")
199
+ run_script("simple_repo_with_compile", run_path="compile.py")
200
+ clean_modaic_cache()
201
+ config = AutoConfig.from_precompiled(f"{USERNAME}/simple_repo_with_compile")
202
+ assert config.lm == "openai/gpt-4o"
203
+ assert config.output_type == "str"
204
+ assert config.number == 1
205
+ cache_dir = get_cached_agent_dir(f"{USERNAME}/simple_repo_with_compile")
206
+ assert os.path.exists(cache_dir / "config.json")
207
+ assert os.path.exists(cache_dir / "agent.json")
208
+ assert os.path.exists(cache_dir / "auto_classes.json")
209
+ assert os.path.exists(cache_dir / "README.md")
210
+ assert os.path.exists(cache_dir / "agent" / "agent.py")
211
+ assert os.path.exists(cache_dir / "agent" / "mod.py")
212
+ assert os.path.exists(cache_dir / "pyproject.toml")
213
+ assert os.path.exists(cache_dir / "include_me_too.txt")
214
+ extra_files = [{"agent": ["agent.py", "mod.py"]}, "compile.py", "include_me_too.txt"]
215
+ assert_expected_files(cache_dir, extra_files)
216
+ assert_dependencies(cache_dir, ["dspy", "modaic"])
217
+ clean_modaic_cache()
218
+ agent = AutoAgent.from_precompiled(f"{USERNAME}/simple_repo_with_compile", runtime_param="Hello")
219
+ assert agent.config.lm == "openai/gpt-4o"
220
+ assert agent.config.output_type == "str"
221
+ assert agent.config.number == 1
222
+ assert agent.runtime_param == "Hello"
223
+ clean_modaic_cache()
224
+ agent = AutoAgent.from_precompiled(
225
+ f"{USERNAME}/simple_repo_with_compile", runtime_param="Hello", config_options={"lm": "openai/gpt-4o-mini"}
226
+ )
227
+ assert agent.config.lm == "openai/gpt-4o-mini"
228
+ assert agent.config.output_type == "str"
229
+ assert agent.config.number == 1
230
+ assert agent.runtime_param == "Hello"
231
+ # TODO: test third party deps installation
232
+
233
+
234
+ nested_repo_extra_files = {
235
+ "agent": [
236
+ {
237
+ "tools": {"google": "google_search.py", "jira": "jira_api_tools.py"},
238
+ "utils": ["second_degree_import.py", "used.py"],
239
+ },
240
+ "agent.py",
241
+ "compile.py",
242
+ "config.py",
243
+ "retriever.py",
244
+ ]
245
+ }
246
+ nested_repo_2_extra_files = [
247
+ {
248
+ "agent": [
249
+ {
250
+ "tools": {"google": "google_search.py", "jira": "jira_api_tools.py"},
251
+ "utils": [
252
+ "second_degree_import.py",
253
+ "unused_but_included.py",
254
+ "used.py",
255
+ ],
256
+ },
257
+ "agent.py",
258
+ "config.py",
259
+ "retriever.py",
260
+ ]
261
+ },
262
+ {"unused_but_included_folder": [".env", "folder_content1.py", "folder_content2.txt"]},
263
+ "compile.py",
264
+ ]
265
+ nested_repo_3_extra_files = {
266
+ "agent": [
267
+ {
268
+ "tools": [{"google": "google_search.py", "jira": "jira_api_tools.py"}, "unused_but_included2.py"],
269
+ "utils": ["second_degree_import.py", "unused_but_included.py", "used.py"],
270
+ },
271
+ "agent.py",
272
+ "config.py",
273
+ "retriever.py",
274
+ ],
275
+ }
276
+
277
+
278
+ @pytest.mark.parametrize(
279
+ "repo_name, run_path, extra_expected_files, extra_expected_dependencies",
280
+ [
281
+ (
282
+ "nested_repo",
283
+ "agent.compile",
284
+ nested_repo_extra_files,
285
+ [],
286
+ ),
287
+ (
288
+ "nested_repo_2",
289
+ "compile.py",
290
+ nested_repo_2_extra_files,
291
+ ["dspy", "modaic", "praw", "sagemaker"],
292
+ ),
293
+ (
294
+ "nested_repo_3",
295
+ "agent.agent",
296
+ nested_repo_3_extra_files,
297
+ ["dspy", "modaic"],
298
+ ),
299
+ ],
300
+ )
301
+ def test_nested_repo(
302
+ repo_name: str, run_path: str, extra_expected_files: FolderLayout, extra_expected_dependencies: list[str]
303
+ ):
304
+ prepare_repo(repo_name)
305
+ run_script(repo_name, run_path=run_path)
306
+ clean_modaic_cache()
307
+ config = AutoConfig.from_precompiled(f"{USERNAME}/{repo_name}", clients={"get_replaced": "noob"})
308
+ assert config.num_fetch == 1
309
+ assert config.lm == "openai/gpt-4o-mini"
310
+ assert config.embedder == "openai/text-embedding-3-small"
311
+ assert config.clients == {"get_replaced": "noob"}
312
+
313
+ cache_dir = get_cached_agent_dir(f"{USERNAME}/{repo_name}")
314
+ assert_expected_files(cache_dir, extra_expected_files)
315
+ assert_dependencies(cache_dir, extra_expected_dependencies)
316
+
317
+ clean_modaic_cache()
318
+ retriever = AutoRetriever.from_precompiled(f"{USERNAME}/{repo_name}", needed_param="hello")
319
+ agent = AutoAgent.from_precompiled(f"{USERNAME}/{repo_name}", retriever=retriever)
320
+ assert agent.config.num_fetch == 1
321
+ assert agent.config.lm == "openai/gpt-4o-mini"
322
+ assert agent.config.embedder == "openai/text-embedding-3-small"
323
+ assert agent.config.clients == {"mit": ["csail", "mit-media-lab"], "berkeley": ["bear"]}
324
+ assert retriever.needed_param == "hello"
325
+ assert agent.forward("my query") == "Retrieved 1 results for my query"
326
+ clean_modaic_cache()
327
+ config_options = {"lm": "openai/gpt-4o"}
328
+ retriever = AutoRetriever.from_precompiled(
329
+ f"{USERNAME}/{repo_name}", needed_param="hello", config_options=config_options
330
+ )
331
+ agent = AutoAgent.from_precompiled(f"{USERNAME}/{repo_name}", retriever=retriever, config_options=config_options)
332
+ assert agent.config.num_fetch == 1
333
+ assert agent.config.lm == "openai/gpt-4o"
334
+ assert agent.config.embedder == "openai/text-embedding-3-small"
335
+ assert agent.config.clients == {"mit": ["csail", "mit-media-lab"], "berkeley": ["bear"]}
336
+ assert retriever.needed_param == "hello"
337
+ assert agent.forward("my query") == "Retrieved 1 results for my query"
338
+
339
+
340
+ def test_auth():
341
+ pass
@@ -1,180 +0,0 @@
1
- import os
2
- import pathlib
3
- import shutil
4
- import subprocess
5
- from pathlib import Path
6
-
7
- import pytest
8
-
9
- from modaic import AutoAgent, AutoConfig, AutoRetriever
10
- from modaic.hub import MODAIC_CACHE, get_user_info
11
- from tests.testing_utils import delete_agent_repo
12
-
13
- MODAIC_TOKEN = os.getenv("MODAIC_TOKEN")
14
- INSTALL_TEST_REPO_DEPS = os.getenv("INSTALL_TEST_REPO_DEPS", "True").lower() == "true"
15
- USERNAME = get_user_info(os.environ["MODAIC_TOKEN"])["login"]
16
-
17
-
18
- def get_cached_agent_dir(repo_name: str) -> Path:
19
- return MODAIC_CACHE / "agents" / repo_name
20
-
21
-
22
- def clean_modaic_cache() -> None:
23
- """Remove the MODAIC cache directory if it exists.
24
-
25
- Params:
26
- None
27
-
28
- Returns:
29
- None
30
- """
31
- shutil.rmtree(MODAIC_CACHE, ignore_errors=True)
32
-
33
-
34
- def prepare_repo(repo_name: str) -> None:
35
- """Clean cache and ensure remote hub repo is deleted before test run.
36
-
37
- Params:
38
- repo_name (str): The name of the test repository in artifacts/test_repos.
39
-
40
- Returns:
41
- None
42
- """
43
- clean_modaic_cache()
44
- if not MODAIC_TOKEN:
45
- pytest.skip("Skipping because MODAIC_TOKEN is not set")
46
- delete_agent_repo(username=USERNAME, agent_name=repo_name)
47
-
48
-
49
- def run_script(repo_name: str, run_path: str = "compile.py", module_mode: bool = False) -> None:
50
- """Run the repository's compile script inside its own uv environment.
51
-
52
- Params:
53
- repo_name (str): The name of the test repository directory to compile.
54
-
55
- Returns:
56
- None
57
- """
58
- env = os.environ.copy()
59
- env.update(
60
- {
61
- "MODAIC_CACHE": "../../temp/modaic_cache",
62
- }
63
- )
64
- repo_dir = pathlib.Path("tests/artifacts/test_repos") / repo_name
65
- if INSTALL_TEST_REPO_DEPS:
66
- subprocess.run(["uv", "sync"], cwd=repo_dir, check=True, env=env)
67
- # Ensure the root package is available in the subproject env
68
- if module_mode:
69
- subprocess.run(["uv", "run", "-m", run_path, USERNAME], cwd=repo_dir, check=True, env=env)
70
- else:
71
- subprocess.run(["uv", "run", run_path, USERNAME], cwd=repo_dir, check=True, env=env)
72
- # clean cache
73
- shutil.rmtree("tests/artifacts/temp/modaic_cache", ignore_errors=True)
74
-
75
-
76
- def test_simple_repo() -> None:
77
- prepare_repo("simple_repo")
78
- run_script("simple_repo", run_path="agent.py")
79
- clean_modaic_cache()
80
- config = AutoConfig.from_precompiled(f"{USERNAME}/simple_repo")
81
- assert config.lm == "openai/gpt-4o"
82
- assert config.output_type == "str"
83
- assert config.number == 1
84
- cache_dir = get_cached_agent_dir(f"{USERNAME}/simple_repo")
85
- assert os.path.exists(cache_dir / "config.json")
86
- assert os.path.exists(cache_dir / "agent.json")
87
- assert os.path.exists(cache_dir / "auto_classes.json")
88
- assert os.path.exists(cache_dir / "README.md")
89
- assert os.path.exists(cache_dir / "agent.py")
90
- assert os.path.exists(cache_dir / "pyproject.toml")
91
- clean_modaic_cache()
92
- agent = AutoAgent.from_precompiled(f"{USERNAME}/simple_repo", runtime_param="Hello")
93
- assert agent.config.lm == "openai/gpt-4o"
94
- assert agent.config.output_type == "str"
95
- assert agent.config.number == 1
96
- assert agent.runtime_param == "Hello"
97
- clean_modaic_cache()
98
- agent = AutoAgent.from_precompiled(
99
- f"{USERNAME}/simple_repo", runtime_param="Hello", config_options={"lm": "openai/gpt-4o-mini"}
100
- )
101
- assert agent.config.lm == "openai/gpt-4o-mini"
102
- assert agent.config.output_type == "str"
103
- assert agent.config.number == 1
104
- assert agent.runtime_param == "Hello"
105
- # TODO: test third party deps installation
106
-
107
-
108
- def test_simple_repo_with_compile():
109
- prepare_repo("simple_repo_with_compile")
110
- run_script("simple_repo_with_compile", run_path="compile.py")
111
- clean_modaic_cache()
112
- config = AutoConfig.from_precompiled(f"{USERNAME}/simple_repo_with_compile")
113
- assert config.lm == "openai/gpt-4o"
114
- assert config.output_type == "str"
115
- assert config.number == 1
116
- cache_dir = get_cached_agent_dir(f"{USERNAME}/simple_repo_with_compile")
117
- assert os.path.exists(cache_dir / "config.json")
118
- assert os.path.exists(cache_dir / "agent.json")
119
- assert os.path.exists(cache_dir / "auto_classes.json")
120
- assert os.path.exists(cache_dir / "README.md")
121
- assert os.path.exists(cache_dir / "agent" / "agent.py")
122
- assert os.path.exists(cache_dir / "agent" / "mod.py")
123
- assert os.path.exists(cache_dir / "pyproject.toml")
124
- clean_modaic_cache()
125
- agent = AutoAgent.from_precompiled(f"{USERNAME}/simple_repo_with_compile", runtime_param="Hello")
126
- assert agent.config.lm == "openai/gpt-4o"
127
- assert agent.config.output_type == "str"
128
- assert agent.config.number == 1
129
- assert agent.runtime_param == "Hello"
130
- clean_modaic_cache()
131
- agent = AutoAgent.from_precompiled(
132
- f"{USERNAME}/simple_repo_with_compile", runtime_param="Hello", config_options={"lm": "openai/gpt-4o-mini"}
133
- )
134
- assert agent.config.lm == "openai/gpt-4o-mini"
135
- assert agent.config.output_type == "str"
136
- assert agent.config.number == 1
137
- assert agent.runtime_param == "Hello"
138
- # TODO: test third party deps installation
139
-
140
-
141
- @pytest.mark.parametrize("repo_name", ["nested_repo", "nested_repo_2", "nested_repo_3"])
142
- def test_nested_repo(repo_name: str):
143
- prepare_repo(repo_name)
144
- if repo_name == "nested_repo":
145
- run_script(repo_name, run_path="agent.compile", module_mode=True)
146
- elif repo_name == "nested_repo_2":
147
- run_script(repo_name, run_path="compile.py")
148
- else:
149
- run_script(repo_name, run_path="agent.agent", module_mode=True)
150
- clean_modaic_cache()
151
- config = AutoConfig.from_precompiled(f"{USERNAME}/{repo_name}", clients={"get_replaced": "noob"})
152
- assert config.num_fetch == 1
153
- assert config.lm == "openai/gpt-4o-mini"
154
- assert config.embedder == "openai/text-embedding-3-small"
155
- assert config.clients == {"get_replaced": "noob"}
156
- clean_modaic_cache()
157
- retriever = AutoRetriever.from_precompiled(f"{USERNAME}/{repo_name}", needed_param="hello")
158
- agent = AutoAgent.from_precompiled(f"{USERNAME}/{repo_name}", retriever=retriever)
159
- assert agent.config.num_fetch == 1
160
- assert agent.config.lm == "openai/gpt-4o-mini"
161
- assert agent.config.embedder == "openai/text-embedding-3-small"
162
- assert agent.config.clients == {"mit": ["csail", "mit-media-lab"], "berkeley": ["bear"]}
163
- assert retriever.needed_param == "hello"
164
- assert agent.forward("my query") == "Retrieved 1 results for my query"
165
- clean_modaic_cache()
166
- config_options = {"lm": "openai/gpt-4o"}
167
- retriever = AutoRetriever.from_precompiled(
168
- f"{USERNAME}/{repo_name}", needed_param="hello", config_options=config_options
169
- )
170
- agent = AutoAgent.from_precompiled(f"{USERNAME}/{repo_name}", retriever=retriever, config_options=config_options)
171
- assert agent.config.num_fetch == 1
172
- assert agent.config.lm == "openai/gpt-4o"
173
- assert agent.config.embedder == "openai/text-embedding-3-small"
174
- assert agent.config.clients == {"mit": ["csail", "mit-media-lab"], "berkeley": ["bear"]}
175
- assert retriever.needed_param == "hello"
176
- assert agent.forward("my query") == "Retrieved 1 results for my query"
177
-
178
-
179
- def test_auth():
180
- pass
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes