acontext 0.0.18__tar.gz → 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. {acontext-0.0.18 → acontext-0.1.0}/PKG-INFO +2 -1
  2. {acontext-0.0.18 → acontext-0.1.0}/pyproject.toml +2 -1
  3. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/agent/__init__.py +0 -1
  4. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/agent/base.py +13 -0
  5. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/agent/disk.py +210 -47
  6. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/agent/skill.py +152 -0
  7. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/async_disks.py +104 -20
  8. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/async_sessions.py +20 -15
  9. {acontext-0.0.18 → acontext-0.1.0}/README.md +0 -0
  10. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/__init__.py +0 -0
  11. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/_constants.py +0 -0
  12. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/_utils.py +0 -0
  13. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/async_client.py +0 -0
  14. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/client.py +0 -0
  15. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/client_types.py +0 -0
  16. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/errors.py +0 -0
  17. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/messages.py +0 -0
  18. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/py.typed +0 -0
  19. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/__init__.py +0 -0
  20. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/async_blocks.py +0 -0
  21. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/async_skills.py +0 -0
  22. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/async_spaces.py +0 -0
  23. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/async_tools.py +0 -0
  24. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/async_users.py +0 -0
  25. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/blocks.py +0 -0
  26. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/disks.py +0 -0
  27. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/sessions.py +0 -0
  28. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/skills.py +0 -0
  29. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/spaces.py +0 -0
  30. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/tools.py +0 -0
  31. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/resources/users.py +0 -0
  32. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/__init__.py +0 -0
  33. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/block.py +0 -0
  34. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/common.py +0 -0
  35. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/disk.py +0 -0
  36. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/session.py +0 -0
  37. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/skill.py +0 -0
  38. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/space.py +0 -0
  39. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/tool.py +0 -0
  40. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/types/user.py +0 -0
  41. {acontext-0.0.18 → acontext-0.1.0}/src/acontext/uploads.py +0 -0
@@ -1,12 +1,13 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: acontext
3
- Version: 0.0.18
3
+ Version: 0.1.0
4
4
  Summary: Python SDK for the Acontext API
5
5
  Keywords: acontext,sdk,client,api
6
6
  Requires-Dist: httpx>=0.28.1
7
7
  Requires-Dist: openai>=2.6.1
8
8
  Requires-Dist: anthropic>=0.72.0
9
9
  Requires-Dist: pydantic>=2.12.3
10
+ Requires-Dist: urllib3>=2.6.3
10
11
  Requires-Python: >=3.10
11
12
  Project-URL: Homepage, https://github.com/memodb-io/Acontext
12
13
  Project-URL: Issues, https://github.com/memodb-io/Acontext/issues
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "acontext"
3
- version = "0.0.18"
3
+ version = "0.1.0"
4
4
  description = "Python SDK for the Acontext API"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.10"
@@ -9,6 +9,7 @@ dependencies = [
9
9
  "openai>=2.6.1",
10
10
  "anthropic>=0.72.0",
11
11
  "pydantic>=2.12.3",
12
+ "urllib3>=2.6.3",
12
13
  ]
13
14
  keywords = ["acontext", "sdk", "client", "api"]
14
15
 
@@ -7,4 +7,3 @@ __all__ = [
7
7
  "DISK_TOOLS",
8
8
  "SKILL_TOOLS",
9
9
  ]
10
-
@@ -33,6 +33,9 @@ class BaseTool(BaseConverter):
33
33
  def execute(self, ctx: BaseContext, llm_arguments: dict) -> str:
34
34
  raise NotImplementedError
35
35
 
36
+ async def async_execute(self, ctx: BaseContext, llm_arguments: dict) -> str:
37
+ raise NotImplementedError
38
+
36
39
  def to_openai_tool_schema(self) -> dict:
37
40
  return {
38
41
  "type": "function",
@@ -90,6 +93,13 @@ class BaseToolPool(BaseConverter):
90
93
  r = tool.execute(ctx, llm_arguments)
91
94
  return r.strip()
92
95
 
96
+ async def async_execute_tool(
97
+ self, ctx: BaseContext, tool_name: str, llm_arguments: dict
98
+ ) -> str:
99
+ tool = self.tools[tool_name]
100
+ r = await tool.async_execute(ctx, llm_arguments)
101
+ return r.strip()
102
+
93
103
  def tool_exists(self, tool_name: str) -> bool:
94
104
  return tool_name in self.tools
95
105
 
@@ -104,3 +114,6 @@ class BaseToolPool(BaseConverter):
104
114
 
105
115
  def format_context(self, *args, **kwargs) -> BaseContext:
106
116
  raise NotImplementedError
117
+
118
+ async def async_format_context(self, *args, **kwargs) -> BaseContext:
119
+ raise NotImplementedError
@@ -2,6 +2,7 @@ from dataclasses import dataclass
2
2
 
3
3
  from .base import BaseContext, BaseTool, BaseToolPool
4
4
  from ..client import AcontextClient
5
+ from ..async_client import AcontextAsyncClient
5
6
  from ..uploads import FileUpload
6
7
 
7
8
 
@@ -11,6 +12,12 @@ class DiskContext(BaseContext):
11
12
  disk_id: str
12
13
 
13
14
 
15
+ @dataclass
16
+ class AsyncDiskContext(BaseContext):
17
+ client: AcontextAsyncClient
18
+ disk_id: str
19
+
20
+
14
21
  def _normalize_path(path: str | None) -> str:
15
22
  """Normalize a file path to ensure it starts with '/'."""
16
23
  if not path:
@@ -73,6 +80,26 @@ class WriteFileTool(BaseTool):
73
80
  )
74
81
  return f"File '{artifact.filename}' written successfully to '{artifact.path}'"
75
82
 
83
+ async def async_execute(self, ctx: AsyncDiskContext, llm_arguments: dict) -> str:
84
+ """Write text content to a file (async)."""
85
+ filename = llm_arguments.get("filename")
86
+ content = llm_arguments.get("content")
87
+ file_path = llm_arguments.get("file_path")
88
+
89
+ if not filename:
90
+ raise ValueError("filename is required")
91
+ if not content:
92
+ raise ValueError("content is required")
93
+
94
+ normalized_path = _normalize_path(file_path)
95
+ payload = FileUpload(filename=filename, content=content.encode("utf-8"))
96
+ artifact = await ctx.client.disks.artifacts.upsert(
97
+ ctx.disk_id,
98
+ file=payload,
99
+ file_path=normalized_path,
100
+ )
101
+ return f"File '{artifact.filename}' written successfully to '{artifact.path}'"
102
+
76
103
 
77
104
  class ReadFileTool(BaseTool):
78
105
  """Tool for reading a text file from the Acontext disk."""
@@ -138,6 +165,34 @@ class ReadFileTool(BaseTool):
138
165
  preview = "\n".join(lines[line_start:line_end])
139
166
  return f"[{normalized_path}{filename} - showing L{line_start}-{line_end} of {len(lines)} lines]\n{preview}"
140
167
 
168
+ async def async_execute(self, ctx: AsyncDiskContext, llm_arguments: dict) -> str:
169
+ """Read a text file and return its content preview (async)."""
170
+ filename = llm_arguments.get("filename")
171
+ file_path = llm_arguments.get("file_path")
172
+ line_offset = llm_arguments.get("line_offset", 0)
173
+ line_limit = llm_arguments.get("line_limit", 100)
174
+
175
+ if not filename:
176
+ raise ValueError("filename is required")
177
+
178
+ normalized_path = _normalize_path(file_path)
179
+ result = await ctx.client.disks.artifacts.get(
180
+ ctx.disk_id,
181
+ file_path=normalized_path,
182
+ filename=filename,
183
+ with_content=True,
184
+ )
185
+
186
+ if not result.content:
187
+ raise RuntimeError("Failed to read file: server did not return content.")
188
+
189
+ content_str = result.content.raw
190
+ lines = content_str.split("\n")
191
+ line_start = min(line_offset, len(lines) - 1)
192
+ line_end = min(line_start + line_limit, len(lines))
193
+ preview = "\n".join(lines[line_start:line_end])
194
+ return f"[{normalized_path}{filename} - showing L{line_start}-{line_end} of {len(lines)} lines]\n{preview}"
195
+
141
196
 
142
197
  class ReplaceStringTool(BaseTool):
143
198
  """Tool for replacing an old string with a new string in a file on the Acontext disk."""
@@ -221,6 +276,52 @@ class ReplaceStringTool(BaseTool):
221
276
 
222
277
  return f"Found {replacement_count} old_string in {normalized_path}{filename} and replaced it."
223
278
 
279
+ async def async_execute(self, ctx: AsyncDiskContext, llm_arguments: dict) -> str:
280
+ """Replace an old string with a new string in a file (async)."""
281
+ filename = llm_arguments.get("filename")
282
+ file_path = llm_arguments.get("file_path")
283
+ old_string = llm_arguments.get("old_string")
284
+ new_string = llm_arguments.get("new_string")
285
+
286
+ if not filename:
287
+ raise ValueError("filename is required")
288
+ if old_string is None:
289
+ raise ValueError("old_string is required")
290
+ if new_string is None:
291
+ raise ValueError("new_string is required")
292
+
293
+ normalized_path = _normalize_path(file_path)
294
+
295
+ # Read the file content
296
+ result = await ctx.client.disks.artifacts.get(
297
+ ctx.disk_id,
298
+ file_path=normalized_path,
299
+ filename=filename,
300
+ with_content=True,
301
+ )
302
+
303
+ if not result.content:
304
+ raise RuntimeError("Failed to read file: server did not return content.")
305
+
306
+ content_str = result.content.raw
307
+
308
+ # Perform the replacement
309
+ if old_string not in content_str:
310
+ return f"String '{old_string}' not found in file '{filename}'"
311
+
312
+ updated_content = content_str.replace(old_string, new_string)
313
+ replacement_count = content_str.count(old_string)
314
+
315
+ # Write the updated content back
316
+ payload = FileUpload(filename=filename, content=updated_content.encode("utf-8"))
317
+ await ctx.client.disks.artifacts.upsert(
318
+ ctx.disk_id,
319
+ file=payload,
320
+ file_path=normalized_path,
321
+ )
322
+
323
+ return f"Found {replacement_count} old_string in {normalized_path}{filename} and replaced it."
324
+
224
325
 
225
326
  class ListTool(BaseTool):
226
327
  """Tool for listing files in a directory on the Acontext disk."""
@@ -271,6 +372,31 @@ Directories:
271
372
  Files:
272
373
  {file_sect}"""
273
374
 
375
+ async def async_execute(self, ctx: AsyncDiskContext, llm_arguments: dict) -> str:
376
+ """List all files in a specified path (async)."""
377
+ file_path = llm_arguments.get("file_path")
378
+ normalized_path = _normalize_path(file_path)
379
+
380
+ result = await ctx.client.disks.artifacts.list(
381
+ ctx.disk_id,
382
+ path=normalized_path,
383
+ )
384
+
385
+ artifacts_list = [artifact.filename for artifact in result.artifacts]
386
+
387
+ if not artifacts_list and not result.directories:
388
+ return f"No files or directories found in '{normalized_path}'"
389
+
390
+ file_sect = "\n".join(artifacts_list) or "[NO FILE]"
391
+ dir_sect = (
392
+ "\n".join([d.rstrip("/") + "/" for d in result.directories]) or "[NO DIR]"
393
+ )
394
+ return f"""[Listing in {normalized_path}]
395
+ Directories:
396
+ {dir_sect}
397
+ Files:
398
+ {file_sect}"""
399
+
274
400
 
275
401
  class DownloadFileTool(BaseTool):
276
402
  """Tool for getting a public download URL for a file on the Acontext disk."""
@@ -327,6 +453,29 @@ class DownloadFileTool(BaseTool):
327
453
 
328
454
  return f"Public download URL for '{normalized_path}{filename}' (expires in {expire}s):\n{result.public_url}"
329
455
 
456
+ async def async_execute(self, ctx: AsyncDiskContext, llm_arguments: dict) -> str:
457
+ """Get a public download URL for a file (async)."""
458
+ filename = llm_arguments.get("filename")
459
+ file_path = llm_arguments.get("file_path")
460
+ expire = llm_arguments.get("expire", 3600)
461
+
462
+ if not filename:
463
+ raise ValueError("filename is required")
464
+
465
+ normalized_path = _normalize_path(file_path)
466
+ result = await ctx.client.disks.artifacts.get(
467
+ ctx.disk_id,
468
+ file_path=normalized_path,
469
+ filename=filename,
470
+ with_public_url=True,
471
+ expire=expire,
472
+ )
473
+
474
+ if not result.public_url:
475
+ raise RuntimeError("Failed to get public URL: server did not return a URL.")
476
+
477
+ return f"Public download URL for '{normalized_path}{filename}' (expires in {expire}s):\n{result.public_url}"
478
+
330
479
 
331
480
  class GrepArtifactsTool(BaseTool):
332
481
  """Tool for searching artifact content using regex patterns."""
@@ -377,7 +526,34 @@ class GrepArtifactsTool(BaseTool):
377
526
  for artifact in results:
378
527
  matches.append(f"{artifact.path}{artifact.filename}")
379
528
 
380
- return f"Found {len(matches)} file(s) matching '{query}':\n" + "\n".join(matches)
529
+ return f"Found {len(matches)} file(s) matching '{query}':\n" + "\n".join(
530
+ matches
531
+ )
532
+
533
+ async def async_execute(self, ctx: AsyncDiskContext, llm_arguments: dict) -> str:
534
+ """Search artifact content using regex pattern (async)."""
535
+ query = llm_arguments.get("query")
536
+ limit = llm_arguments.get("limit", 100)
537
+
538
+ if not query:
539
+ raise ValueError("query is required")
540
+
541
+ results = await ctx.client.disks.artifacts.grep_artifacts(
542
+ ctx.disk_id,
543
+ query=query,
544
+ limit=limit,
545
+ )
546
+
547
+ if not results:
548
+ return f"No matches found for pattern '{query}'"
549
+
550
+ matches = []
551
+ for artifact in results:
552
+ matches.append(f"{artifact.path}{artifact.filename}")
553
+
554
+ return f"Found {len(matches)} file(s) matching '{query}':\n" + "\n".join(
555
+ matches
556
+ )
381
557
 
382
558
 
383
559
  class GlobArtifactsTool(BaseTool):
@@ -429,7 +605,34 @@ class GlobArtifactsTool(BaseTool):
429
605
  for artifact in results:
430
606
  matches.append(f"{artifact.path}{artifact.filename}")
431
607
 
432
- return f"Found {len(matches)} file(s) matching '{query}':\n" + "\n".join(matches)
608
+ return f"Found {len(matches)} file(s) matching '{query}':\n" + "\n".join(
609
+ matches
610
+ )
611
+
612
+ async def async_execute(self, ctx: AsyncDiskContext, llm_arguments: dict) -> str:
613
+ """Search artifact paths using glob pattern (async)."""
614
+ query = llm_arguments.get("query")
615
+ limit = llm_arguments.get("limit", 100)
616
+
617
+ if not query:
618
+ raise ValueError("query is required")
619
+
620
+ results = await ctx.client.disks.artifacts.glob_artifacts(
621
+ ctx.disk_id,
622
+ query=query,
623
+ limit=limit,
624
+ )
625
+
626
+ if not results:
627
+ return f"No files found matching pattern '{query}'"
628
+
629
+ matches = []
630
+ for artifact in results:
631
+ matches.append(f"{artifact.path}{artifact.filename}")
632
+
633
+ return f"Found {len(matches)} file(s) matching '{query}':\n" + "\n".join(
634
+ matches
635
+ )
433
636
 
434
637
 
435
638
  class DiskToolPool(BaseToolPool):
@@ -438,6 +641,11 @@ class DiskToolPool(BaseToolPool):
438
641
  def format_context(self, client: AcontextClient, disk_id: str) -> DiskContext:
439
642
  return DiskContext(client=client, disk_id=disk_id)
440
643
 
644
+ async def async_format_context(
645
+ self, client: AcontextAsyncClient, disk_id: str
646
+ ) -> AsyncDiskContext:
647
+ return AsyncDiskContext(client=client, disk_id=disk_id)
648
+
441
649
 
442
650
  DISK_TOOLS = DiskToolPool()
443
651
  DISK_TOOLS.add_tool(WriteFileTool())
@@ -447,48 +655,3 @@ DISK_TOOLS.add_tool(ListTool())
447
655
  DISK_TOOLS.add_tool(GrepArtifactsTool())
448
656
  DISK_TOOLS.add_tool(GlobArtifactsTool())
449
657
  DISK_TOOLS.add_tool(DownloadFileTool())
450
-
451
-
452
- if __name__ == "__main__":
453
- client = AcontextClient(
454
- api_key="sk-ac-your-root-api-bearer-token",
455
- base_url="http://localhost:8029/api/v1",
456
- )
457
- print(client.ping())
458
- new_disk = client.disks.create()
459
-
460
- ctx = DISK_TOOLS.format_context(client, new_disk.id)
461
- r = DISK_TOOLS.execute_tool(
462
- ctx,
463
- "write_file",
464
- {"filename": "test.txt", "file_path": "/try/", "content": "Hello, world!"},
465
- )
466
- print(r)
467
- r = DISK_TOOLS.execute_tool(
468
- ctx, "read_file", {"filename": "test.txt", "file_path": "/try/"}
469
- )
470
- print(r)
471
- r = DISK_TOOLS.execute_tool(ctx, "list_artifacts", {"file_path": "/"})
472
- print(r)
473
-
474
- r = DISK_TOOLS.execute_tool(
475
- ctx,
476
- "replace_string",
477
- {
478
- "filename": "test.txt",
479
- "file_path": "/try/",
480
- "old_string": "Hello",
481
- "new_string": "Hi",
482
- },
483
- )
484
- print(r)
485
- r = DISK_TOOLS.execute_tool(
486
- ctx, "read_file", {"filename": "test.txt", "file_path": "/try/"}
487
- )
488
- print(r)
489
- r = DISK_TOOLS.execute_tool(
490
- ctx,
491
- "download_file",
492
- {"filename": "test.txt", "file_path": "/try/", "expire": 300},
493
- )
494
- print(r)
@@ -6,6 +6,7 @@ from dataclasses import dataclass, field
6
6
 
7
7
  from .base import BaseContext, BaseTool, BaseToolPool
8
8
  from ..client import AcontextClient
9
+ from ..async_client import AcontextAsyncClient
9
10
  from ..types.skill import Skill
10
11
 
11
12
 
@@ -65,6 +66,64 @@ class SkillContext(BaseContext):
65
66
  return list(self.skills.keys())
66
67
 
67
68
 
69
+ @dataclass
70
+ class AsyncSkillContext(BaseContext):
71
+ """Async context for skill tools with preloaded skill name mapping."""
72
+
73
+ client: AcontextAsyncClient
74
+ skills: dict[str, Skill] = field(default_factory=dict)
75
+
76
+ @classmethod
77
+ async def create(
78
+ cls, client: AcontextAsyncClient, skill_ids: list[str]
79
+ ) -> "AsyncSkillContext":
80
+ """Create an AsyncSkillContext by preloading skills from a list of skill IDs.
81
+
82
+ Args:
83
+ client: The Acontext async client instance.
84
+ skill_ids: List of skill UUIDs to preload.
85
+
86
+ Returns:
87
+ AsyncSkillContext with preloaded skills mapped by name.
88
+
89
+ Raises:
90
+ ValueError: If duplicate skill names are found.
91
+ """
92
+ skills: dict[str, Skill] = {}
93
+ for skill_id in skill_ids:
94
+ skill = await client.skills.get(skill_id)
95
+ if skill.name in skills:
96
+ raise ValueError(
97
+ f"Duplicate skill name '{skill.name}' found. "
98
+ f"Existing ID: {skills[skill.name].id}, New ID: {skill.id}"
99
+ )
100
+ skills[skill.name] = skill
101
+ return cls(client=client, skills=skills)
102
+
103
+ def get_skill(self, skill_name: str) -> Skill:
104
+ """Get a skill by name from the preloaded skills.
105
+
106
+ Args:
107
+ skill_name: The name of the skill.
108
+
109
+ Returns:
110
+ The Skill object.
111
+
112
+ Raises:
113
+ ValueError: If the skill is not found in the context.
114
+ """
115
+ if skill_name not in self.skills:
116
+ available = ", ".join(self.skills.keys()) if self.skills else "[none]"
117
+ raise ValueError(
118
+ f"Skill '{skill_name}' not found in context. Available skills: {available}"
119
+ )
120
+ return self.skills[skill_name]
121
+
122
+ def list_skill_names(self) -> list[str]:
123
+ """Return list of available skill names in this context."""
124
+ return list(self.skills.keys())
125
+
126
+
68
127
  class GetSkillTool(BaseTool):
69
128
  """Tool for getting a skill by name."""
70
129
 
@@ -121,6 +180,35 @@ class GetSkillTool(BaseTool):
121
180
  f"{file_list}"
122
181
  )
123
182
 
183
+ async def async_execute(self, ctx: AsyncSkillContext, llm_arguments: dict) -> str:
184
+ """Get a skill by name (async)."""
185
+ skill_name = llm_arguments.get("skill_name")
186
+
187
+ if not skill_name:
188
+ raise ValueError("skill_name is required")
189
+
190
+ skill = ctx.get_skill(skill_name)
191
+
192
+ file_count = len(skill.file_index)
193
+
194
+ # Format all files with path and MIME type
195
+ if skill.file_index:
196
+ file_list = "\n".join(
197
+ [
198
+ f" - {file_info.path} ({file_info.mime})"
199
+ for file_info in skill.file_index
200
+ ]
201
+ )
202
+ else:
203
+ file_list = " [NO FILES]"
204
+
205
+ return (
206
+ f"Skill: {skill.name} (ID: {skill.id})\n"
207
+ f"Description: {skill.description}\n"
208
+ f"Files: {file_count} file(s)\n"
209
+ f"{file_list}"
210
+ )
211
+
124
212
 
125
213
  class GetSkillFileTool(BaseTool):
126
214
  """Tool for getting a file from a skill."""
@@ -197,6 +285,45 @@ class GetSkillFileTool(BaseTool):
197
285
 
198
286
  return "\n".join(output_parts)
199
287
 
288
+ async def async_execute(self, ctx: AsyncSkillContext, llm_arguments: dict) -> str:
289
+ """Get a skill file (async)."""
290
+ skill_name = llm_arguments.get("skill_name")
291
+ file_path = llm_arguments.get("file_path")
292
+ expire = llm_arguments.get("expire")
293
+
294
+ if not skill_name:
295
+ raise ValueError("skill_name is required")
296
+ if not file_path:
297
+ raise ValueError("file_path is required")
298
+
299
+ skill = ctx.get_skill(skill_name)
300
+
301
+ result = await ctx.client.skills.get_file(
302
+ skill_id=skill.id,
303
+ file_path=file_path,
304
+ expire=expire,
305
+ )
306
+
307
+ output_parts = [
308
+ f"File '{result.path}' (MIME: {result.mime}) from skill '{skill_name}':"
309
+ ]
310
+
311
+ if result.content:
312
+ output_parts.append(f"\nContent (type: {result.content.type}):")
313
+ output_parts.append(result.content.raw)
314
+
315
+ if result.url:
316
+ expire_seconds = expire if expire is not None else 900
317
+ output_parts.append(
318
+ f"\nDownload URL (expires in {expire_seconds} seconds):"
319
+ )
320
+ output_parts.append(result.url)
321
+
322
+ if not result.content and not result.url:
323
+ return f"File '{result.path}' retrieved but no content or URL returned."
324
+
325
+ return "\n".join(output_parts)
326
+
200
327
 
201
328
  class ListSkillsTool(BaseTool):
202
329
  """Tool for listing available skills in the context."""
@@ -228,6 +355,17 @@ class ListSkillsTool(BaseTool):
228
355
 
229
356
  return f"Available skills ({len(ctx.skills)}):\n" + "\n".join(skill_list)
230
357
 
358
+ async def async_execute(self, ctx: AsyncSkillContext, llm_arguments: dict) -> str:
359
+ """List all available skills (async)."""
360
+ if not ctx.skills:
361
+ return "No skills available in the current context."
362
+
363
+ skill_list = []
364
+ for skill_name, skill in ctx.skills.items():
365
+ skill_list.append(f"- {skill_name}: {skill.description}")
366
+
367
+ return f"Available skills ({len(ctx.skills)}):\n" + "\n".join(skill_list)
368
+
231
369
 
232
370
  class SkillToolPool(BaseToolPool):
233
371
  """Tool pool for skill operations on Acontext skills."""
@@ -246,6 +384,20 @@ class SkillToolPool(BaseToolPool):
246
384
  """
247
385
  return SkillContext.create(client=client, skill_ids=skill_ids)
248
386
 
387
+ async def async_format_context(
388
+ self, client: AcontextAsyncClient, skill_ids: list[str]
389
+ ) -> AsyncSkillContext:
390
+ """Create an AsyncSkillContext by preloading skills from a list of skill IDs.
391
+
392
+ Args:
393
+ client: The Acontext async client instance.
394
+ skill_ids: List of skill UUIDs to preload.
395
+
396
+ Returns:
397
+ AsyncSkillContext with preloaded skills mapped by name.
398
+ """
399
+ return await AsyncSkillContext.create(client=client, skill_ids=skill_ids)
400
+
249
401
 
250
402
  SKILL_TOOLS = SkillToolPool()
251
403
  SKILL_TOOLS.add_tool(ListSkillsTool())
@@ -33,26 +33,28 @@ class AsyncDisksAPI:
33
33
  time_desc: bool | None = None,
34
34
  ) -> ListDisksOutput:
35
35
  """List all disks in the project.
36
-
36
+
37
37
  Args:
38
38
  user: Filter by user identifier. Defaults to None.
39
39
  limit: Maximum number of disks to return. Defaults to None.
40
40
  cursor: Cursor for pagination. Defaults to None.
41
41
  time_desc: Order by created_at descending if True, ascending if False. Defaults to None.
42
-
42
+
43
43
  Returns:
44
44
  ListDisksOutput containing the list of disks and pagination information.
45
45
  """
46
- params = build_params(user=user, limit=limit, cursor=cursor, time_desc=time_desc)
46
+ params = build_params(
47
+ user=user, limit=limit, cursor=cursor, time_desc=time_desc
48
+ )
47
49
  data = await self._requester.request("GET", "/disk", params=params or None)
48
50
  return ListDisksOutput.model_validate(data)
49
51
 
50
52
  async def create(self, *, user: str | None = None) -> Disk:
51
53
  """Create a new disk.
52
-
54
+
53
55
  Args:
54
56
  user: Optional user identifier string. Defaults to None.
55
-
57
+
56
58
  Returns:
57
59
  The created Disk object.
58
60
  """
@@ -64,7 +66,7 @@ class AsyncDisksAPI:
64
66
 
65
67
  async def delete(self, disk_id: str) -> None:
66
68
  """Delete a disk by its ID.
67
-
69
+
68
70
  Args:
69
71
  disk_id: The UUID of the disk to delete.
70
72
  """
@@ -79,20 +81,22 @@ class AsyncDiskArtifactsAPI:
79
81
  self,
80
82
  disk_id: str,
81
83
  *,
82
- file: FileUpload
83
- | tuple[str, BinaryIO | bytes]
84
- | tuple[str, BinaryIO | bytes, str],
84
+ file: (
85
+ FileUpload
86
+ | tuple[str, BinaryIO | bytes]
87
+ | tuple[str, BinaryIO | bytes, str]
88
+ ),
85
89
  file_path: str | None = None,
86
90
  meta: Mapping[str, Any] | None = None,
87
91
  ) -> Artifact:
88
92
  """Upload a file to create or update an artifact.
89
-
93
+
90
94
  Args:
91
95
  disk_id: The UUID of the disk.
92
96
  file: The file to upload (FileUpload object or tuple format).
93
97
  file_path: Directory path (not including filename), defaults to "/".
94
98
  meta: Custom metadata as JSON-serializable dict, defaults to None.
95
-
99
+
96
100
  Returns:
97
101
  Artifact containing the created/updated artifact information.
98
102
  """
@@ -122,7 +126,7 @@ class AsyncDiskArtifactsAPI:
122
126
  expire: int | None = None,
123
127
  ) -> GetArtifactResp:
124
128
  """Get an artifact by disk ID, file path, and filename.
125
-
129
+
126
130
  Args:
127
131
  disk_id: The UUID of the disk.
128
132
  file_path: Directory path (not including filename).
@@ -130,7 +134,7 @@ class AsyncDiskArtifactsAPI:
130
134
  with_public_url: Whether to include a presigned public URL. Defaults to None.
131
135
  with_content: Whether to include file content. Defaults to None.
132
136
  expire: URL expiration time in seconds. Defaults to None.
133
-
137
+
134
138
  Returns:
135
139
  GetArtifactResp containing the artifact and optionally public URL and content.
136
140
  """
@@ -141,7 +145,9 @@ class AsyncDiskArtifactsAPI:
141
145
  with_content=with_content,
142
146
  expire=expire,
143
147
  )
144
- data = await self._requester.request("GET", f"/disk/{disk_id}/artifact", params=params)
148
+ data = await self._requester.request(
149
+ "GET", f"/disk/{disk_id}/artifact", params=params
150
+ )
145
151
  return GetArtifactResp.model_validate(data)
146
152
 
147
153
  async def update(
@@ -153,13 +159,13 @@ class AsyncDiskArtifactsAPI:
153
159
  meta: Mapping[str, Any],
154
160
  ) -> UpdateArtifactResp:
155
161
  """Update an artifact's metadata.
156
-
162
+
157
163
  Args:
158
164
  disk_id: The UUID of the disk.
159
165
  file_path: Directory path (not including filename).
160
166
  filename: The filename of the artifact.
161
167
  meta: Custom metadata as JSON-serializable dict.
162
-
168
+
163
169
  Returns:
164
170
  UpdateArtifactResp containing the updated artifact information.
165
171
  """
@@ -168,7 +174,9 @@ class AsyncDiskArtifactsAPI:
168
174
  "file_path": full_path,
169
175
  "meta": json.dumps(cast(Mapping[str, Any], meta)),
170
176
  }
171
- data = await self._requester.request("PUT", f"/disk/{disk_id}/artifact", json_data=payload)
177
+ data = await self._requester.request(
178
+ "PUT", f"/disk/{disk_id}/artifact", json_data=payload
179
+ )
172
180
  return UpdateArtifactResp.model_validate(data)
173
181
 
174
182
  async def delete(
@@ -179,7 +187,7 @@ class AsyncDiskArtifactsAPI:
179
187
  filename: str,
180
188
  ) -> None:
181
189
  """Delete an artifact by disk ID, file path, and filename.
182
-
190
+
183
191
  Args:
184
192
  disk_id: The UUID of the disk.
185
193
  file_path: Directory path (not including filename).
@@ -187,7 +195,9 @@ class AsyncDiskArtifactsAPI:
187
195
  """
188
196
  full_path = f"{file_path.rstrip('/')}/{filename}"
189
197
  params = {"file_path": full_path}
190
- await self._requester.request("DELETE", f"/disk/{disk_id}/artifact", params=params)
198
+ await self._requester.request(
199
+ "DELETE", f"/disk/{disk_id}/artifact", params=params
200
+ )
191
201
 
192
202
  async def list(
193
203
  self,
@@ -195,9 +205,83 @@ class AsyncDiskArtifactsAPI:
195
205
  *,
196
206
  path: str | None = None,
197
207
  ) -> ListArtifactsResp:
208
+ """List artifacts in a disk at a specific path.
209
+
210
+ Args:
211
+ disk_id: The UUID of the disk.
212
+ path: Directory path to list. Defaults to None (root).
213
+
214
+ Returns:
215
+ ListArtifactsResp containing the list of artifacts.
216
+ """
198
217
  params: dict[str, Any] = {}
199
218
  if path is not None:
200
219
  params["path"] = path
201
- data = await self._requester.request("GET", f"/disk/{disk_id}/artifact/ls", params=params or None)
220
+ data = await self._requester.request(
221
+ "GET", f"/disk/{disk_id}/artifact/ls", params=params or None
222
+ )
202
223
  return ListArtifactsResp.model_validate(data)
203
224
 
225
+ async def grep_artifacts(
226
+ self,
227
+ disk_id: str,
228
+ *,
229
+ query: str,
230
+ limit: int = 100,
231
+ ) -> list[Artifact]:
232
+ """Search artifact content using regex pattern.
233
+
234
+ Args:
235
+ disk_id: The disk ID to search in
236
+ query: Regex pattern to search for in file content
237
+ limit: Maximum number of results (default 100, max 1000)
238
+
239
+ Returns:
240
+ List of matching artifacts
241
+
242
+ Example:
243
+ ```python
244
+ # Search for TODO comments in code
245
+ results = await client.disks.artifacts.grep_artifacts(
246
+ disk_id="disk-uuid",
247
+ query="TODO.*bug"
248
+ )
249
+ ```
250
+ """
251
+ params = build_params(query=query, limit=limit)
252
+ data = await self._requester.request(
253
+ "GET", f"/disk/{disk_id}/artifact/grep", params=params
254
+ )
255
+ return [Artifact.model_validate(item) for item in data]
256
+
257
+ async def glob_artifacts(
258
+ self,
259
+ disk_id: str,
260
+ *,
261
+ query: str,
262
+ limit: int = 100,
263
+ ) -> list[Artifact]:
264
+ """Search artifact paths using glob pattern.
265
+
266
+ Args:
267
+ disk_id: The disk ID to search in
268
+ query: Glob pattern (e.g., '**/*.py', '*.txt')
269
+ limit: Maximum number of results (default 100, max 1000)
270
+
271
+ Returns:
272
+ List of matching artifacts
273
+
274
+ Example:
275
+ ```python
276
+ # Find all Python files
277
+ results = await client.disks.artifacts.glob_artifacts(
278
+ disk_id="disk-uuid",
279
+ query="**/*.py"
280
+ )
281
+ ```
282
+ """
283
+ params = build_params(query=query, limit=limit)
284
+ data = await self._requester.request(
285
+ "GET", f"/disk/{disk_id}/artifact/glob", params=params
286
+ )
287
+ return [Artifact.model_validate(item) for item in data]
@@ -78,6 +78,7 @@ class AsyncSessionsAPI:
78
78
  *,
79
79
  user: str | None = None,
80
80
  space_id: str | None = None,
81
+ disable_task_tracking: bool | None = None,
81
82
  configs: Mapping[str, Any] | None = None,
82
83
  ) -> Session:
83
84
  """Create a new session.
@@ -85,6 +86,7 @@ class AsyncSessionsAPI:
85
86
  Args:
86
87
  user: Optional user identifier string. Defaults to None.
87
88
  space_id: Optional space ID to associate with the session. Defaults to None.
89
+ disable_task_tracking: Whether to disable task tracking for this session. Defaults to None (server default: False).
88
90
  configs: Optional session configuration dictionary. Defaults to None.
89
91
 
90
92
  Returns:
@@ -95,6 +97,8 @@ class AsyncSessionsAPI:
95
97
  payload["user"] = user
96
98
  if space_id:
97
99
  payload["space_id"] = space_id
100
+ if disable_task_tracking is not None:
101
+ payload["disable_task_tracking"] = disable_task_tracking
98
102
  if configs is not None:
99
103
  payload["configs"] = configs
100
104
  data = await self._requester.request("POST", "/session", json_data=payload)
@@ -367,21 +371,22 @@ class AsyncSessionsAPI:
367
371
  )
368
372
  return TokenCounts.model_validate(data)
369
373
 
374
+ async def messages_observing_status(
375
+ self, session_id: str
376
+ ) -> MessageObservingStatus:
377
+ """Get message observing status counts for a session.
370
378
 
371
- async def messages_observing_status(self, session_id: str) -> MessageObservingStatus:
372
- """Get message observing status counts for a session.
379
+ Returns the count of messages by their observing status:
380
+ observed, in_process, and pending.
373
381
 
374
- Returns the count of messages by their observing status:
375
- observed, in_process, and pending.
376
-
377
- Args:
378
- session_id: The UUID of the session.
382
+ Args:
383
+ session_id: The UUID of the session.
379
384
 
380
- Returns:
381
- MessageObservingStatus object containing observed, in_process,
382
- pending counts and updated_at timestamp.
383
- """
384
- data = await self._requester.request(
385
- "GET", f"/session/{session_id}/observing_status"
386
- )
387
- return MessageObservingStatus.model_validate(data)
385
+ Returns:
386
+ MessageObservingStatus object containing observed, in_process,
387
+ pending counts and updated_at timestamp.
388
+ """
389
+ data = await self._requester.request(
390
+ "GET", f"/session/{session_id}/observing_status"
391
+ )
392
+ return MessageObservingStatus.model_validate(data)
File without changes