agentfense 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- agentfense/__init__.py +191 -0
- agentfense/_async/__init__.py +21 -0
- agentfense/_async/client.py +679 -0
- agentfense/_async/sandbox.py +667 -0
- agentfense/_gen/__init__.py +0 -0
- agentfense/_gen/codebase_pb2.py +78 -0
- agentfense/_gen/codebase_pb2.pyi +141 -0
- agentfense/_gen/codebase_pb2_grpc.py +366 -0
- agentfense/_gen/common_pb2.py +47 -0
- agentfense/_gen/common_pb2.pyi +68 -0
- agentfense/_gen/common_pb2_grpc.py +24 -0
- agentfense/_gen/sandbox_pb2.py +123 -0
- agentfense/_gen/sandbox_pb2.pyi +255 -0
- agentfense/_gen/sandbox_pb2_grpc.py +678 -0
- agentfense/_shared.py +238 -0
- agentfense/client.py +751 -0
- agentfense/exceptions.py +333 -0
- agentfense/presets.py +192 -0
- agentfense/sandbox.py +672 -0
- agentfense/types.py +256 -0
- agentfense/utils.py +286 -0
- agentfense-0.2.1.dist-info/METADATA +378 -0
- agentfense-0.2.1.dist-info/RECORD +25 -0
- agentfense-0.2.1.dist-info/WHEEL +5 -0
- agentfense-0.2.1.dist-info/top_level.txt +1 -0
agentfense/types.py
ADDED
|
@@ -0,0 +1,256 @@
|
|
|
1
|
+
"""Type definitions for the Sandbox SDK."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass, field
|
|
4
|
+
from datetime import datetime, timedelta
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from typing import Dict, List, Optional
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class Permission(str, Enum):
|
|
10
|
+
"""Permission levels for file access."""
|
|
11
|
+
NONE = "none"
|
|
12
|
+
VIEW = "view"
|
|
13
|
+
READ = "read"
|
|
14
|
+
WRITE = "write"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PatternType(str, Enum):
|
|
18
|
+
"""Types of permission patterns."""
|
|
19
|
+
GLOB = "glob"
|
|
20
|
+
DIRECTORY = "directory"
|
|
21
|
+
FILE = "file"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class SandboxStatus(str, Enum):
|
|
25
|
+
"""Status of a sandbox."""
|
|
26
|
+
PENDING = "pending"
|
|
27
|
+
RUNNING = "running"
|
|
28
|
+
STOPPED = "stopped"
|
|
29
|
+
ERROR = "error"
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class SessionStatus(str, Enum):
|
|
33
|
+
"""Status of a shell session."""
|
|
34
|
+
UNKNOWN = "unknown" # Unspecified or unknown status
|
|
35
|
+
ACTIVE = "active"
|
|
36
|
+
CLOSED = "closed"
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class RuntimeType(str, Enum):
|
|
40
|
+
"""Runtime type for sandbox execution.
|
|
41
|
+
|
|
42
|
+
Determines the isolation mechanism used for the sandbox.
|
|
43
|
+
"""
|
|
44
|
+
BWRAP = "bwrap" # Bubblewrap - lightweight namespace isolation
|
|
45
|
+
DOCKER = "docker" # Docker container - stronger isolation
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@dataclass
|
|
49
|
+
class ResourceLimits:
|
|
50
|
+
"""Resource constraints for a sandbox.
|
|
51
|
+
|
|
52
|
+
Defines limits on CPU, memory, and process count that the sandbox
|
|
53
|
+
can use. These limits are enforced by the runtime (Docker or bwrap).
|
|
54
|
+
|
|
55
|
+
Attributes:
|
|
56
|
+
memory_bytes: Maximum memory in bytes (e.g., 512*1024*1024 for 512MB).
|
|
57
|
+
cpu_quota: CPU quota in microseconds per 100ms period.
|
|
58
|
+
cpu_shares: CPU shares (relative weight, default 1024).
|
|
59
|
+
pids_limit: Maximum number of processes/threads.
|
|
60
|
+
|
|
61
|
+
Example:
|
|
62
|
+
>>> limits = ResourceLimits(
|
|
63
|
+
... memory_bytes=512 * 1024 * 1024, # 512 MB
|
|
64
|
+
... pids_limit=100,
|
|
65
|
+
... )
|
|
66
|
+
"""
|
|
67
|
+
memory_bytes: Optional[int] = None
|
|
68
|
+
cpu_quota: Optional[int] = None
|
|
69
|
+
cpu_shares: Optional[int] = None
|
|
70
|
+
pids_limit: Optional[int] = None
|
|
71
|
+
|
|
72
|
+
def to_dict(self) -> Dict:
|
|
73
|
+
"""Convert to dictionary, excluding None values."""
|
|
74
|
+
return {
|
|
75
|
+
k: v for k, v in {
|
|
76
|
+
"memory_bytes": self.memory_bytes,
|
|
77
|
+
"cpu_quota": self.cpu_quota,
|
|
78
|
+
"cpu_shares": self.cpu_shares,
|
|
79
|
+
"pids_limit": self.pids_limit,
|
|
80
|
+
}.items() if v is not None
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
@classmethod
|
|
84
|
+
def from_dict(cls, data: Dict) -> "ResourceLimits":
|
|
85
|
+
"""Create ResourceLimits from a dictionary."""
|
|
86
|
+
return cls(
|
|
87
|
+
memory_bytes=data.get("memory_bytes"),
|
|
88
|
+
cpu_quota=data.get("cpu_quota"),
|
|
89
|
+
cpu_shares=data.get("cpu_shares"),
|
|
90
|
+
pids_limit=data.get("pids_limit"),
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
@dataclass
|
|
95
|
+
class PermissionRule:
|
|
96
|
+
"""A rule defining file permissions."""
|
|
97
|
+
pattern: str
|
|
98
|
+
permission: Permission = Permission.READ
|
|
99
|
+
type: PatternType = PatternType.GLOB
|
|
100
|
+
priority: int = 0
|
|
101
|
+
|
|
102
|
+
def to_dict(self) -> Dict:
|
|
103
|
+
"""Convert to dictionary for API calls."""
|
|
104
|
+
return {
|
|
105
|
+
"pattern": self.pattern,
|
|
106
|
+
"permission": self.permission.value,
|
|
107
|
+
"type": self.type.value,
|
|
108
|
+
"priority": self.priority,
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
@dataclass
|
|
113
|
+
class Sandbox:
|
|
114
|
+
"""Represents a sandbox instance.
|
|
115
|
+
|
|
116
|
+
Attributes:
|
|
117
|
+
id: Unique identifier for the sandbox.
|
|
118
|
+
codebase_id: ID of the associated codebase.
|
|
119
|
+
status: Current status (pending, running, stopped, error).
|
|
120
|
+
permissions: List of permission rules for file access.
|
|
121
|
+
labels: User-defined labels for organization.
|
|
122
|
+
runtime: Runtime type (bwrap or docker).
|
|
123
|
+
image: Docker image name (for docker runtime).
|
|
124
|
+
resources: Resource limits (memory, CPU, etc.).
|
|
125
|
+
created_at: When the sandbox was created.
|
|
126
|
+
started_at: When the sandbox was started.
|
|
127
|
+
stopped_at: When the sandbox was stopped.
|
|
128
|
+
expires_at: When the sandbox will expire.
|
|
129
|
+
"""
|
|
130
|
+
id: str
|
|
131
|
+
codebase_id: str
|
|
132
|
+
status: SandboxStatus
|
|
133
|
+
permissions: List[PermissionRule] = field(default_factory=list)
|
|
134
|
+
labels: Dict[str, str] = field(default_factory=dict)
|
|
135
|
+
runtime: RuntimeType = RuntimeType.BWRAP
|
|
136
|
+
image: Optional[str] = None
|
|
137
|
+
resources: Optional[ResourceLimits] = None
|
|
138
|
+
created_at: Optional[datetime] = None
|
|
139
|
+
started_at: Optional[datetime] = None
|
|
140
|
+
stopped_at: Optional[datetime] = None
|
|
141
|
+
expires_at: Optional[datetime] = None
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@dataclass
|
|
145
|
+
class Codebase:
|
|
146
|
+
"""Represents a codebase (file storage)."""
|
|
147
|
+
id: str
|
|
148
|
+
name: str
|
|
149
|
+
owner_id: str
|
|
150
|
+
size: int = 0
|
|
151
|
+
file_count: int = 0
|
|
152
|
+
created_at: Optional[datetime] = None
|
|
153
|
+
updated_at: Optional[datetime] = None
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
@dataclass
|
|
157
|
+
class ExecResult:
|
|
158
|
+
"""Result of command execution.
|
|
159
|
+
|
|
160
|
+
Attributes:
|
|
161
|
+
stdout: Standard output from the command.
|
|
162
|
+
stderr: Standard error from the command.
|
|
163
|
+
exit_code: Exit code of the command (0 typically means success).
|
|
164
|
+
duration: How long the command took to execute.
|
|
165
|
+
command: The command that was executed (for debugging).
|
|
166
|
+
|
|
167
|
+
Example:
|
|
168
|
+
>>> result = sandbox.run("python --version")
|
|
169
|
+
>>> if result.success:
|
|
170
|
+
... print(result.stdout)
|
|
171
|
+
>>> else:
|
|
172
|
+
... print(f"Failed: {result.output}")
|
|
173
|
+
|
|
174
|
+
>>> # Chain with raise_on_error for fail-fast behavior
|
|
175
|
+
>>> result = sandbox.run("make build").raise_on_error()
|
|
176
|
+
"""
|
|
177
|
+
stdout: str
|
|
178
|
+
stderr: str
|
|
179
|
+
exit_code: int
|
|
180
|
+
duration: Optional[timedelta] = None
|
|
181
|
+
command: Optional[str] = None
|
|
182
|
+
|
|
183
|
+
@property
|
|
184
|
+
def success(self) -> bool:
|
|
185
|
+
"""True if the command exited with code 0."""
|
|
186
|
+
return self.exit_code == 0
|
|
187
|
+
|
|
188
|
+
@property
|
|
189
|
+
def output(self) -> str:
|
|
190
|
+
"""Combined stdout and stderr output.
|
|
191
|
+
|
|
192
|
+
Useful when you want all output regardless of stream.
|
|
193
|
+
"""
|
|
194
|
+
parts = []
|
|
195
|
+
if self.stdout:
|
|
196
|
+
parts.append(self.stdout)
|
|
197
|
+
if self.stderr:
|
|
198
|
+
parts.append(self.stderr)
|
|
199
|
+
return "\n".join(parts) if parts else ""
|
|
200
|
+
|
|
201
|
+
def raise_on_error(self) -> "ExecResult":
|
|
202
|
+
"""Raise CommandExecutionError if the command failed.
|
|
203
|
+
|
|
204
|
+
Returns self for method chaining, allowing patterns like:
|
|
205
|
+
result = sandbox.run("make").raise_on_error()
|
|
206
|
+
|
|
207
|
+
Raises:
|
|
208
|
+
CommandExecutionError: If exit_code is non-zero.
|
|
209
|
+
|
|
210
|
+
Returns:
|
|
211
|
+
Self, for method chaining.
|
|
212
|
+
"""
|
|
213
|
+
if not self.success:
|
|
214
|
+
from .exceptions import CommandExecutionError
|
|
215
|
+
raise CommandExecutionError(
|
|
216
|
+
command=self.command or "<unknown>",
|
|
217
|
+
exit_code=self.exit_code,
|
|
218
|
+
stdout=self.stdout,
|
|
219
|
+
stderr=self.stderr,
|
|
220
|
+
)
|
|
221
|
+
return self
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
@dataclass
|
|
225
|
+
class FileInfo:
|
|
226
|
+
"""Information about a file in a codebase."""
|
|
227
|
+
path: str
|
|
228
|
+
name: str
|
|
229
|
+
is_dir: bool
|
|
230
|
+
size: int
|
|
231
|
+
modified_at: Optional[datetime] = None
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
@dataclass
|
|
235
|
+
class UploadResult:
|
|
236
|
+
"""Result of file upload."""
|
|
237
|
+
codebase_id: str
|
|
238
|
+
file_path: str
|
|
239
|
+
size: int
|
|
240
|
+
checksum: str
|
|
241
|
+
|
|
242
|
+
|
|
243
|
+
@dataclass
|
|
244
|
+
class Session:
|
|
245
|
+
"""Represents a shell session within a sandbox.
|
|
246
|
+
|
|
247
|
+
A session maintains a persistent shell process that preserves
|
|
248
|
+
working directory, environment variables, and background processes
|
|
249
|
+
across multiple command executions.
|
|
250
|
+
"""
|
|
251
|
+
id: str
|
|
252
|
+
sandbox_id: str
|
|
253
|
+
status: SessionStatus
|
|
254
|
+
shell: str = "/bin/bash"
|
|
255
|
+
created_at: Optional[datetime] = None
|
|
256
|
+
closed_at: Optional[datetime] = None
|
agentfense/utils.py
ADDED
|
@@ -0,0 +1,286 @@
|
|
|
1
|
+
"""Utility functions for the Sandbox SDK.
|
|
2
|
+
|
|
3
|
+
This module provides helper functions for common tasks like
|
|
4
|
+
walking directories, parsing ignore files, and formatting output.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import fnmatch
|
|
8
|
+
import os
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Iterator, List, Optional, Set, Tuple
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
# Default patterns to always ignore
|
|
14
|
+
DEFAULT_IGNORE_PATTERNS = [
|
|
15
|
+
".git",
|
|
16
|
+
".git/**",
|
|
17
|
+
"__pycache__",
|
|
18
|
+
"__pycache__/**",
|
|
19
|
+
"*.pyc",
|
|
20
|
+
"*.pyo",
|
|
21
|
+
".DS_Store",
|
|
22
|
+
"Thumbs.db",
|
|
23
|
+
"node_modules",
|
|
24
|
+
"node_modules/**",
|
|
25
|
+
".venv",
|
|
26
|
+
".venv/**",
|
|
27
|
+
"venv",
|
|
28
|
+
"venv/**",
|
|
29
|
+
".env",
|
|
30
|
+
"*.egg-info",
|
|
31
|
+
"*.egg-info/**",
|
|
32
|
+
".tox",
|
|
33
|
+
".tox/**",
|
|
34
|
+
".pytest_cache",
|
|
35
|
+
".pytest_cache/**",
|
|
36
|
+
".mypy_cache",
|
|
37
|
+
".mypy_cache/**",
|
|
38
|
+
".ruff_cache",
|
|
39
|
+
".ruff_cache/**",
|
|
40
|
+
]
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def parse_ignore_file(path: str) -> List[str]:
|
|
44
|
+
"""Parse a .gitignore or .sandboxignore file.
|
|
45
|
+
|
|
46
|
+
Args:
|
|
47
|
+
path: Path to the ignore file.
|
|
48
|
+
|
|
49
|
+
Returns:
|
|
50
|
+
List of ignore patterns.
|
|
51
|
+
|
|
52
|
+
Note:
|
|
53
|
+
- Empty lines and comments (starting with #) are skipped.
|
|
54
|
+
- Patterns are returned as-is (gitignore syntax).
|
|
55
|
+
"""
|
|
56
|
+
patterns = []
|
|
57
|
+
ignore_path = Path(path)
|
|
58
|
+
|
|
59
|
+
if not ignore_path.exists():
|
|
60
|
+
return patterns
|
|
61
|
+
|
|
62
|
+
with open(ignore_path, "r", encoding="utf-8") as f:
|
|
63
|
+
for line in f:
|
|
64
|
+
line = line.strip()
|
|
65
|
+
# Skip empty lines and comments
|
|
66
|
+
if not line or line.startswith("#"):
|
|
67
|
+
continue
|
|
68
|
+
patterns.append(line)
|
|
69
|
+
|
|
70
|
+
return patterns
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def _normalize_pattern(pattern: str) -> str:
|
|
74
|
+
"""Normalize a gitignore pattern for fnmatch.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
pattern: A gitignore-style pattern.
|
|
78
|
+
|
|
79
|
+
Returns:
|
|
80
|
+
A pattern suitable for fnmatch.
|
|
81
|
+
"""
|
|
82
|
+
# Remove leading slash (gitignore root anchor)
|
|
83
|
+
if pattern.startswith("/"):
|
|
84
|
+
pattern = pattern[1:]
|
|
85
|
+
|
|
86
|
+
# Handle negation (we don't support this yet, just strip it)
|
|
87
|
+
if pattern.startswith("!"):
|
|
88
|
+
return ""
|
|
89
|
+
|
|
90
|
+
# Handle directory-only patterns (trailing /)
|
|
91
|
+
if pattern.endswith("/"):
|
|
92
|
+
pattern = pattern[:-1]
|
|
93
|
+
|
|
94
|
+
return pattern
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def _should_ignore(
|
|
98
|
+
rel_path: str,
|
|
99
|
+
patterns: List[str],
|
|
100
|
+
is_dir: bool = False,
|
|
101
|
+
) -> bool:
|
|
102
|
+
"""Check if a path should be ignored based on patterns.
|
|
103
|
+
|
|
104
|
+
Args:
|
|
105
|
+
rel_path: Relative path to check.
|
|
106
|
+
patterns: List of ignore patterns.
|
|
107
|
+
is_dir: Whether the path is a directory.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
True if the path should be ignored.
|
|
111
|
+
"""
|
|
112
|
+
# Normalize path separators
|
|
113
|
+
rel_path = rel_path.replace(os.sep, "/")
|
|
114
|
+
|
|
115
|
+
for pattern in patterns:
|
|
116
|
+
normalized = _normalize_pattern(pattern)
|
|
117
|
+
if not normalized:
|
|
118
|
+
continue
|
|
119
|
+
|
|
120
|
+
# Check if pattern matches the path or any parent
|
|
121
|
+
if fnmatch.fnmatch(rel_path, normalized):
|
|
122
|
+
return True
|
|
123
|
+
|
|
124
|
+
# Also check just the filename
|
|
125
|
+
filename = os.path.basename(rel_path)
|
|
126
|
+
if fnmatch.fnmatch(filename, normalized):
|
|
127
|
+
return True
|
|
128
|
+
|
|
129
|
+
# For patterns with **, check against full path
|
|
130
|
+
if "**" in normalized:
|
|
131
|
+
if fnmatch.fnmatch(rel_path, normalized):
|
|
132
|
+
return True
|
|
133
|
+
|
|
134
|
+
return False
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def walk_directory(
|
|
138
|
+
path: str,
|
|
139
|
+
ignore_patterns: Optional[List[str]] = None,
|
|
140
|
+
include_default_ignores: bool = True,
|
|
141
|
+
max_file_size: int = 50 * 1024 * 1024, # 50 MB
|
|
142
|
+
) -> Iterator[Tuple[str, bytes]]:
|
|
143
|
+
"""Recursively walk a directory and yield file paths with content.
|
|
144
|
+
|
|
145
|
+
Args:
|
|
146
|
+
path: Root directory path.
|
|
147
|
+
ignore_patterns: Additional patterns to ignore (gitignore syntax).
|
|
148
|
+
include_default_ignores: Whether to include default ignore patterns.
|
|
149
|
+
max_file_size: Maximum file size to read (larger files are skipped).
|
|
150
|
+
|
|
151
|
+
Yields:
|
|
152
|
+
Tuples of (relative_path, file_content).
|
|
153
|
+
|
|
154
|
+
Example:
|
|
155
|
+
>>> for rel_path, content in walk_directory("./my-project"):
|
|
156
|
+
... print(f"File: {rel_path}, Size: {len(content)} bytes")
|
|
157
|
+
"""
|
|
158
|
+
root_path = Path(path).resolve()
|
|
159
|
+
|
|
160
|
+
# Build ignore patterns
|
|
161
|
+
patterns: List[str] = []
|
|
162
|
+
if include_default_ignores:
|
|
163
|
+
patterns.extend(DEFAULT_IGNORE_PATTERNS)
|
|
164
|
+
if ignore_patterns:
|
|
165
|
+
patterns.extend(ignore_patterns)
|
|
166
|
+
|
|
167
|
+
# Check for .gitignore and .sandboxignore in root
|
|
168
|
+
for ignore_file in [".gitignore", ".sandboxignore"]:
|
|
169
|
+
ignore_path = root_path / ignore_file
|
|
170
|
+
if ignore_path.exists():
|
|
171
|
+
patterns.extend(parse_ignore_file(str(ignore_path)))
|
|
172
|
+
|
|
173
|
+
# Track ignored directories to skip their contents
|
|
174
|
+
ignored_dirs: Set[Path] = set()
|
|
175
|
+
|
|
176
|
+
for current_path in root_path.rglob("*"):
|
|
177
|
+
# Skip directories themselves (we only yield files)
|
|
178
|
+
if current_path.is_dir():
|
|
179
|
+
rel_path = str(current_path.relative_to(root_path))
|
|
180
|
+
if _should_ignore(rel_path, patterns, is_dir=True):
|
|
181
|
+
ignored_dirs.add(current_path)
|
|
182
|
+
continue
|
|
183
|
+
|
|
184
|
+
# Skip if parent is ignored
|
|
185
|
+
if any(current_path.is_relative_to(d) for d in ignored_dirs):
|
|
186
|
+
continue
|
|
187
|
+
|
|
188
|
+
# Get relative path
|
|
189
|
+
rel_path = str(current_path.relative_to(root_path))
|
|
190
|
+
|
|
191
|
+
# Check if file should be ignored
|
|
192
|
+
if _should_ignore(rel_path, patterns):
|
|
193
|
+
continue
|
|
194
|
+
|
|
195
|
+
# Skip files that are too large
|
|
196
|
+
try:
|
|
197
|
+
size = current_path.stat().st_size
|
|
198
|
+
if size > max_file_size:
|
|
199
|
+
continue
|
|
200
|
+
except OSError:
|
|
201
|
+
continue
|
|
202
|
+
|
|
203
|
+
# Read file content
|
|
204
|
+
try:
|
|
205
|
+
content = current_path.read_bytes()
|
|
206
|
+
yield rel_path, content
|
|
207
|
+
except (IOError, PermissionError):
|
|
208
|
+
# Skip files we can't read
|
|
209
|
+
continue
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def human_readable_size(size: int) -> str:
|
|
213
|
+
"""Format a file size in human-readable form.
|
|
214
|
+
|
|
215
|
+
Args:
|
|
216
|
+
size: Size in bytes.
|
|
217
|
+
|
|
218
|
+
Returns:
|
|
219
|
+
Human-readable string like "1.5 MB".
|
|
220
|
+
|
|
221
|
+
Example:
|
|
222
|
+
>>> human_readable_size(1536)
|
|
223
|
+
'1.5 KB'
|
|
224
|
+
>>> human_readable_size(1048576)
|
|
225
|
+
'1.0 MB'
|
|
226
|
+
"""
|
|
227
|
+
for unit in ["B", "KB", "MB", "GB", "TB"]:
|
|
228
|
+
if abs(size) < 1024.0:
|
|
229
|
+
return f"{size:.1f} {unit}"
|
|
230
|
+
size /= 1024.0
|
|
231
|
+
return f"{size:.1f} PB"
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def generate_codebase_name(path: str) -> str:
|
|
235
|
+
"""Generate a codebase name from a directory path.
|
|
236
|
+
|
|
237
|
+
Args:
|
|
238
|
+
path: Directory path.
|
|
239
|
+
|
|
240
|
+
Returns:
|
|
241
|
+
A sanitized name suitable for a codebase.
|
|
242
|
+
"""
|
|
243
|
+
dir_name = Path(path).resolve().name
|
|
244
|
+
# Sanitize: replace spaces and special chars
|
|
245
|
+
name = "".join(c if c.isalnum() or c in "-_" else "-" for c in dir_name)
|
|
246
|
+
# Remove consecutive dashes
|
|
247
|
+
while "--" in name:
|
|
248
|
+
name = name.replace("--", "-")
|
|
249
|
+
# Trim dashes from ends
|
|
250
|
+
return name.strip("-") or "unnamed-project"
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def generate_owner_id() -> str:
|
|
254
|
+
"""Generate a default owner ID.
|
|
255
|
+
|
|
256
|
+
Returns:
|
|
257
|
+
A default owner ID based on the current user.
|
|
258
|
+
"""
|
|
259
|
+
import getpass
|
|
260
|
+
import hashlib
|
|
261
|
+
|
|
262
|
+
username = getpass.getuser()
|
|
263
|
+
# Create a simple hash-based ID
|
|
264
|
+
hash_input = f"sandbox-sdk-{username}".encode()
|
|
265
|
+
short_hash = hashlib.sha256(hash_input).hexdigest()[:8]
|
|
266
|
+
return f"user_{short_hash}"
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
def count_files(path: str, ignore_patterns: Optional[List[str]] = None) -> Tuple[int, int]:
|
|
270
|
+
"""Count files and total size in a directory.
|
|
271
|
+
|
|
272
|
+
Args:
|
|
273
|
+
path: Root directory path.
|
|
274
|
+
ignore_patterns: Patterns to ignore.
|
|
275
|
+
|
|
276
|
+
Returns:
|
|
277
|
+
Tuple of (file_count, total_size_bytes).
|
|
278
|
+
"""
|
|
279
|
+
file_count = 0
|
|
280
|
+
total_size = 0
|
|
281
|
+
|
|
282
|
+
for _, content in walk_directory(path, ignore_patterns):
|
|
283
|
+
file_count += 1
|
|
284
|
+
total_size += len(content)
|
|
285
|
+
|
|
286
|
+
return file_count, total_size
|