indent 0.1.26__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. exponent/__init__.py +34 -0
  2. exponent/cli.py +110 -0
  3. exponent/commands/cloud_commands.py +585 -0
  4. exponent/commands/common.py +411 -0
  5. exponent/commands/config_commands.py +334 -0
  6. exponent/commands/run_commands.py +222 -0
  7. exponent/commands/settings.py +56 -0
  8. exponent/commands/types.py +111 -0
  9. exponent/commands/upgrade.py +29 -0
  10. exponent/commands/utils.py +146 -0
  11. exponent/core/config.py +180 -0
  12. exponent/core/graphql/__init__.py +0 -0
  13. exponent/core/graphql/client.py +61 -0
  14. exponent/core/graphql/get_chats_query.py +47 -0
  15. exponent/core/graphql/mutations.py +160 -0
  16. exponent/core/graphql/queries.py +146 -0
  17. exponent/core/graphql/subscriptions.py +16 -0
  18. exponent/core/remote_execution/checkpoints.py +212 -0
  19. exponent/core/remote_execution/cli_rpc_types.py +499 -0
  20. exponent/core/remote_execution/client.py +999 -0
  21. exponent/core/remote_execution/code_execution.py +77 -0
  22. exponent/core/remote_execution/default_env.py +31 -0
  23. exponent/core/remote_execution/error_info.py +45 -0
  24. exponent/core/remote_execution/exceptions.py +10 -0
  25. exponent/core/remote_execution/file_write.py +35 -0
  26. exponent/core/remote_execution/files.py +330 -0
  27. exponent/core/remote_execution/git.py +268 -0
  28. exponent/core/remote_execution/http_fetch.py +94 -0
  29. exponent/core/remote_execution/languages/python_execution.py +239 -0
  30. exponent/core/remote_execution/languages/shell_streaming.py +226 -0
  31. exponent/core/remote_execution/languages/types.py +20 -0
  32. exponent/core/remote_execution/port_utils.py +73 -0
  33. exponent/core/remote_execution/session.py +128 -0
  34. exponent/core/remote_execution/system_context.py +26 -0
  35. exponent/core/remote_execution/terminal_session.py +375 -0
  36. exponent/core/remote_execution/terminal_types.py +29 -0
  37. exponent/core/remote_execution/tool_execution.py +595 -0
  38. exponent/core/remote_execution/tool_type_utils.py +39 -0
  39. exponent/core/remote_execution/truncation.py +296 -0
  40. exponent/core/remote_execution/types.py +635 -0
  41. exponent/core/remote_execution/utils.py +477 -0
  42. exponent/core/types/__init__.py +0 -0
  43. exponent/core/types/command_data.py +206 -0
  44. exponent/core/types/event_types.py +89 -0
  45. exponent/core/types/generated/__init__.py +0 -0
  46. exponent/core/types/generated/strategy_info.py +213 -0
  47. exponent/migration-docs/login.md +112 -0
  48. exponent/py.typed +4 -0
  49. exponent/utils/__init__.py +0 -0
  50. exponent/utils/colors.py +92 -0
  51. exponent/utils/version.py +289 -0
  52. indent-0.1.26.dist-info/METADATA +38 -0
  53. indent-0.1.26.dist-info/RECORD +55 -0
  54. indent-0.1.26.dist-info/WHEEL +4 -0
  55. indent-0.1.26.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,77 @@
1
+ from collections.abc import AsyncGenerator, Callable
2
+
3
+ from exponent.core.remote_execution.cli_rpc_types import (
4
+ StreamingCodeExecutionRequest,
5
+ StreamingCodeExecutionResponse,
6
+ StreamingCodeExecutionResponseChunk,
7
+ )
8
+ from exponent.core.remote_execution.languages.python_execution import (
9
+ execute_python_streaming,
10
+ )
11
+ from exponent.core.remote_execution.languages.shell_streaming import (
12
+ execute_shell_streaming,
13
+ )
14
+ from exponent.core.remote_execution.languages.types import StreamedOutputPiece
15
+ from exponent.core.remote_execution.session import RemoteExecutionClientSession
16
+
17
+ EMPTY_OUTPUT_STRING = "(No output)"
18
+ MAX_OUTPUT_LENGTH = 50000 # Maximum characters to keep in final output
19
+
20
+
21
+ async def execute_code_streaming(
22
+ request: StreamingCodeExecutionRequest,
23
+ session: RemoteExecutionClientSession,
24
+ working_directory: str,
25
+ should_halt: Callable[[], bool] | None = None,
26
+ ) -> AsyncGenerator[
27
+ StreamingCodeExecutionResponseChunk | StreamingCodeExecutionResponse, None
28
+ ]:
29
+ if request.language == "python":
30
+ async for output in execute_python_streaming(
31
+ request.content, session.kernel, user_interrupted=should_halt
32
+ ):
33
+ if isinstance(output, StreamedOutputPiece):
34
+ yield StreamingCodeExecutionResponseChunk(
35
+ content=output.content, correlation_id=request.correlation_id
36
+ )
37
+ else:
38
+ final_output = output.output or EMPTY_OUTPUT_STRING
39
+ truncated = len(final_output) > MAX_OUTPUT_LENGTH
40
+ if truncated:
41
+ final_output = final_output[-MAX_OUTPUT_LENGTH:]
42
+ final_output = (
43
+ f"[Truncated to last {MAX_OUTPUT_LENGTH} characters]\n\n"
44
+ + final_output
45
+ )
46
+ yield StreamingCodeExecutionResponse(
47
+ correlation_id=request.correlation_id,
48
+ content=final_output,
49
+ truncated=truncated,
50
+ halted=output.halted,
51
+ )
52
+
53
+ elif request.language == "shell":
54
+ async for shell_output in execute_shell_streaming(
55
+ request.content, working_directory, request.timeout, should_halt
56
+ ):
57
+ if isinstance(shell_output, StreamedOutputPiece):
58
+ yield StreamingCodeExecutionResponseChunk(
59
+ content=shell_output.content, correlation_id=request.correlation_id
60
+ )
61
+ else:
62
+ final_output = shell_output.output or EMPTY_OUTPUT_STRING
63
+ truncated = len(final_output) > MAX_OUTPUT_LENGTH
64
+ if truncated:
65
+ final_output = final_output[-MAX_OUTPUT_LENGTH:]
66
+ final_output = (
67
+ f"[Truncated to last {MAX_OUTPUT_LENGTH} characters]\n\n"
68
+ + final_output
69
+ )
70
+ yield StreamingCodeExecutionResponse(
71
+ correlation_id=request.correlation_id,
72
+ content=final_output,
73
+ truncated=truncated,
74
+ halted=shell_output.halted,
75
+ exit_code=shell_output.exit_code,
76
+ cancelled_for_timeout=shell_output.cancelled_for_timeout,
77
+ )
@@ -0,0 +1,31 @@
1
+ import os
2
+
3
+
4
+ def get_default_env() -> dict[str, str]:
5
+ """
6
+ Returns default environment variables for CLI-spawned processes.
7
+ These are merged with the parent process environment.
8
+ """
9
+ return {
10
+ "GIT_EDITOR": "true",
11
+ }
12
+
13
+
14
+ def get_process_env(env_overrides: dict[str, str] | None = None) -> dict[str, str]:
15
+ """
16
+ Returns the complete environment for spawned processes.
17
+ Merges parent environment with default variables, then applies overrides.
18
+
19
+ Priority order (lowest to highest):
20
+ 1. Parent process environment (os.environ)
21
+ 2. Default environment variables (get_default_env())
22
+ 3. Explicit overrides (env_overrides parameter)
23
+
24
+ Args:
25
+ env_overrides: Optional dict of environment variables that override defaults
26
+ """
27
+ env = os.environ.copy()
28
+ env.update(get_default_env())
29
+ if env_overrides:
30
+ env.update(env_overrides)
31
+ return env
@@ -0,0 +1,45 @@
1
+ import traceback
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+
6
+
7
+ class SerializableErrorInfo(BaseModel):
8
+ message: str
9
+ stack: list[str]
10
+ cls_name: str | None
11
+ cause: Optional["SerializableErrorInfo"]
12
+ context: Optional["SerializableErrorInfo"]
13
+
14
+ def __str__(self) -> str:
15
+ return self.to_string()
16
+
17
+ def to_string(self) -> str:
18
+ stack_str = "\nStack Trace:\n" + "".join(self.stack) if self.stack else ""
19
+ cause_str = (
20
+ "\nThe above exception was caused by the following exception:\n"
21
+ + self.cause.to_string()
22
+ if self.cause
23
+ else ""
24
+ )
25
+ context_str = (
26
+ "\nThe above exception occurred during handling of the following exception:\n"
27
+ + self.context.to_string()
28
+ if self.context
29
+ else ""
30
+ )
31
+
32
+ return f"{self.message}{stack_str}{cause_str}{context_str}"
33
+
34
+
35
+ SerializableErrorInfo.model_rebuild()
36
+
37
+
38
+ def serialize_error_info(error: BaseException) -> SerializableErrorInfo:
39
+ return SerializableErrorInfo(
40
+ message=str(error),
41
+ stack=traceback.format_tb(error.__traceback__),
42
+ cls_name=error.__class__.__name__,
43
+ cause=serialize_error_info(error.__cause__) if error.__cause__ else None,
44
+ context=serialize_error_info(error.__context__) if error.__context__ else None,
45
+ )
@@ -0,0 +1,10 @@
1
+ class ExponentError(Exception):
2
+ pass
3
+
4
+
5
+ class HandledExponentError(Exception):
6
+ pass
7
+
8
+
9
+ class RateLimitError(Exception):
10
+ pass
@@ -0,0 +1,35 @@
1
+ import os
2
+
3
+ from anyio import Path as AsyncPath
4
+
5
+ from exponent.core.remote_execution.types import (
6
+ FilePath,
7
+ )
8
+ from exponent.core.remote_execution.utils import (
9
+ safe_write_file,
10
+ )
11
+
12
+
13
+ async def execute_full_file_rewrite(
14
+ file_path: FilePath, content: str, working_directory: str
15
+ ) -> str:
16
+ try:
17
+ # Construct the absolute path
18
+ full_file_path = AsyncPath(os.path.join(working_directory, file_path))
19
+
20
+ # Check if the directory exists, if not, create it
21
+ await full_file_path.parent.mkdir(parents=True, exist_ok=True)
22
+ exists = await full_file_path.exists()
23
+
24
+ await safe_write_file(full_file_path, content)
25
+
26
+ # Determine if the file exists and write the new content
27
+ if exists:
28
+ result = f"Modified file {file_path} successfully"
29
+ else:
30
+ result = f"Created file {file_path} successfully"
31
+
32
+ return result
33
+
34
+ except Exception as e:
35
+ return f"An error occurred: {e!s}"
@@ -0,0 +1,330 @@
1
+ import os
2
+ from asyncio import to_thread
3
+ from typing import Final, cast
4
+
5
+ from anyio import Path as AsyncPath
6
+ from python_ripgrep import PySortMode, PySortModeKind, files, search
7
+
8
+ from exponent.core.remote_execution.cli_rpc_types import ErrorToolResult, GrepToolResult
9
+ from exponent.core.remote_execution.types import (
10
+ FilePath,
11
+ ListFilesRequest,
12
+ ListFilesResponse,
13
+ RemoteFile,
14
+ )
15
+ from exponent.core.remote_execution.utils import safe_read_file
16
+
17
+ MAX_MATCHING_FILES: Final[int] = 10
18
+ FILE_NOT_FOUND: Final[str] = "File {} does not exist"
19
+ MAX_FILES_TO_WALK: Final[int] = 10_000
20
+
21
+ GLOB_MAX_COUNT: Final[int] = 1000
22
+ GREP_MAX_RESULTS = 100
23
+
24
+
25
+ class FileCache:
26
+ """A cache of the files in a working directory.
27
+
28
+ Args:
29
+ working_directory: The working directory to cache the files from.
30
+ """
31
+
32
+ def __init__(self, working_directory: str) -> None:
33
+ self.working_directory = working_directory
34
+ self._cache: list[str] | None = None
35
+
36
+ async def get_files(self) -> list[str]:
37
+ """Get the files in the working directory.
38
+
39
+ Returns:
40
+ A list of file paths in the working directory.
41
+ """
42
+ if self._cache is None:
43
+ self._cache = await file_walk(self.working_directory)
44
+
45
+ return self._cache
46
+
47
+
48
+ async def list_files(list_files_request: ListFilesRequest) -> ListFilesResponse:
49
+ """Get a list of files in the specified directory.
50
+
51
+ Args:
52
+ list_files_request: An object containing the directory to list files from.
53
+
54
+ Returns:
55
+ A list of RemoteFile objects representing the files in the directory.
56
+ """
57
+
58
+ filenames = [
59
+ entry.name async for entry in AsyncPath(list_files_request.directory).iterdir()
60
+ ]
61
+
62
+ return ListFilesResponse(
63
+ files=[
64
+ RemoteFile(
65
+ file_path=filename,
66
+ working_directory=list_files_request.directory,
67
+ )
68
+ for filename in filenames
69
+ ],
70
+ correlation_id=list_files_request.correlation_id,
71
+ )
72
+
73
+
74
+ async def get_file_content(
75
+ absolute_path: FilePath, offset: int | None = None, limit: int | None = None
76
+ ) -> tuple[str, bool]:
77
+ """Get the content of the file at the specified path.
78
+
79
+ Args:
80
+ absolute_path: The absolute path to the file.
81
+
82
+ Returns:
83
+ A tuple containing the content of the file and a boolean indicating if the file exists.
84
+ """
85
+ file = AsyncPath(absolute_path)
86
+ exists = await file.exists()
87
+
88
+ if not exists:
89
+ return FILE_NOT_FOUND.format(absolute_path), False
90
+
91
+ if await file.is_dir():
92
+ return "File is a directory", True
93
+
94
+ content = await safe_read_file(file)
95
+
96
+ if offset or limit:
97
+ offset = offset or 0
98
+ limit = limit or -1
99
+
100
+ content_lines = content.splitlines()
101
+ content_lines = content_lines[offset:]
102
+ content_lines = content_lines[:limit]
103
+
104
+ content = "\n".join(content_lines)
105
+
106
+ return content, exists
107
+
108
+
109
+ async def search_files(
110
+ path_str: str,
111
+ file_pattern: str | None,
112
+ regex: str,
113
+ working_directory: str,
114
+ multiline: bool | None = None,
115
+ ) -> GrepToolResult | ErrorToolResult:
116
+ path = AsyncPath(working_directory) / path_str
117
+
118
+ if not await path.exists():
119
+ return ErrorToolResult(
120
+ error_message=f"Path does not exist: {path_str}",
121
+ )
122
+
123
+ path_resolved = await path.resolve()
124
+ globs = [file_pattern] if file_pattern else None
125
+
126
+ if globs:
127
+ matched_files = await to_thread(
128
+ files,
129
+ patterns=[],
130
+ paths=[str(path_resolved)],
131
+ globs=globs,
132
+ max_count=1,
133
+ )
134
+ if not matched_files:
135
+ return ErrorToolResult(
136
+ error_message=f"No files matched the include glob pattern: {file_pattern} at {path_str}",
137
+ )
138
+
139
+ results = await to_thread(
140
+ search,
141
+ patterns=[regex],
142
+ paths=[str(path_resolved)],
143
+ globs=globs,
144
+ after_context=3,
145
+ before_context=5,
146
+ heading=True,
147
+ separator_field_context="|",
148
+ separator_field_match="|",
149
+ separator_context="\n...\n",
150
+ multiline=multiline,
151
+ )
152
+
153
+ return GrepToolResult(
154
+ matches=results[:GREP_MAX_RESULTS],
155
+ truncated=bool(len(results) > GREP_MAX_RESULTS),
156
+ )
157
+
158
+
159
+ async def get_all_file_contents(
160
+ working_directory: str,
161
+ ) -> list[list[str]]:
162
+ path_resolved = await AsyncPath(working_directory).resolve()
163
+
164
+ results = await to_thread(
165
+ search,
166
+ patterns=[".*"],
167
+ paths=[str(path_resolved)],
168
+ globs=["!**/poetry.lock", "!**/pnpm-lock.yaml"],
169
+ heading=True,
170
+ line_number=False,
171
+ )
172
+
173
+ result_sizes = [len(result) for result in results]
174
+ total_size = sum(result_sizes)
175
+ batch_size = total_size // 10
176
+
177
+ batches = []
178
+ current_batch: list[str] = []
179
+ current_size = 0
180
+
181
+ for i, result in enumerate(results):
182
+ if current_size + result_sizes[i] > batch_size:
183
+ batches.append(current_batch)
184
+ current_batch = []
185
+ current_size = 0
186
+
187
+ current_batch.append(result)
188
+ current_size += result_sizes[i]
189
+
190
+ batches.append(current_batch)
191
+
192
+ return batches
193
+
194
+
195
+ async def normalize_files(
196
+ working_directory: str, file_paths: list[FilePath]
197
+ ) -> list[RemoteFile]:
198
+ """Normalize file paths to be relative to the working directory.
199
+
200
+ Args:
201
+ working_directory: The working directory to normalize the file paths against.
202
+ file_paths: A list of file paths to normalize.
203
+
204
+ Returns:
205
+ A list of RemoteFile objects with normalized file paths.
206
+ """
207
+ working_path = await AsyncPath(working_directory).resolve()
208
+ normalized_files = []
209
+
210
+ for file_path in file_paths:
211
+ path = AsyncPath(file_path)
212
+
213
+ if path.is_absolute():
214
+ path = path.relative_to(working_path)
215
+
216
+ normalized_files.append(
217
+ RemoteFile(
218
+ file_path=str(path),
219
+ working_directory=working_directory,
220
+ )
221
+ )
222
+
223
+ return sorted(normalized_files)
224
+
225
+
226
+ def _format_ignore_globs(ignore_extra: list[str] | None) -> list[str]:
227
+ if ignore_extra is None:
228
+ return []
229
+
230
+ return [f"!**/{ignore}" for ignore in ignore_extra]
231
+
232
+
233
+ async def file_walk(
234
+ directory: str,
235
+ ignore_extra: list[str] | None = None,
236
+ max_files: int = MAX_FILES_TO_WALK,
237
+ ) -> list[str]:
238
+ """
239
+ Walk through a directory and return all file paths, respecting .gitignore and additional ignore patterns.
240
+
241
+ Args:
242
+ directory: The directory to walk through
243
+ ignore_extra: Additional directory paths to ignore, follows the gitignore format.
244
+ max_files: The maximal number of files to return
245
+
246
+ Returns:
247
+ A list of file paths in the directory.
248
+ """
249
+ working_path = str(await AsyncPath(directory).resolve())
250
+
251
+ results: list[str] = await to_thread(
252
+ files,
253
+ patterns=[""],
254
+ paths=[working_path],
255
+ globs=_format_ignore_globs(ignore_extra),
256
+ sort=PySortMode(kind=PySortModeKind.Path),
257
+ max_count=max_files,
258
+ )
259
+
260
+ # Create relative paths using os.path functions which handle platform differences
261
+ relative_results = []
262
+ for result in results:
263
+ # Check if the path is inside the working directory
264
+ if os.path.commonpath([working_path, result]) == working_path:
265
+ # Create relative path
266
+ rel_path = os.path.relpath(result, working_path)
267
+ relative_results.append(rel_path)
268
+ else:
269
+ # Fallback to just using the filename
270
+ relative_results.append(os.path.basename(result))
271
+
272
+ return relative_results
273
+
274
+
275
+ async def get_all_non_ignored_files(working_directory: str) -> list[RemoteFile]:
276
+ file_paths = await file_walk(working_directory, ignore_extra=DEFAULT_IGNORES)
277
+
278
+ return await normalize_files(working_directory, cast(list[FilePath], file_paths))
279
+
280
+
281
+ async def glob(
282
+ path: str,
283
+ glob_pattern: str,
284
+ ) -> list[str]:
285
+ return await to_thread(
286
+ files,
287
+ patterns=[],
288
+ paths=[path],
289
+ globs=[glob_pattern],
290
+ sort=PySortMode(kind=PySortModeKind.Path),
291
+ max_count=GLOB_MAX_COUNT,
292
+ )
293
+
294
+
295
+ DEFAULT_IGNORES = [
296
+ "**/.git/",
297
+ ".venv/",
298
+ ".mypy_cache",
299
+ ".pytest_cache",
300
+ "node_modules/",
301
+ "venv/",
302
+ ".pyenv",
303
+ "__pycache__",
304
+ ".ipynb_checkpoints",
305
+ ".vercel",
306
+ "__pycache__/",
307
+ "*.py[cod]",
308
+ "*$py.class",
309
+ ".env",
310
+ "*.so",
311
+ ".Python",
312
+ "build/",
313
+ "develop-eggs/",
314
+ "dist/",
315
+ "downloads/",
316
+ "eggs/",
317
+ ".eggs/",
318
+ "lib/",
319
+ "lib64/",
320
+ "parts/",
321
+ "sdist/",
322
+ "var/",
323
+ "wheels/",
324
+ "pip-wheel-metadata/",
325
+ "share/python-wheels/",
326
+ "*.egg-info/",
327
+ ".installed.cfg",
328
+ "*.egg",
329
+ "MANIFEST",
330
+ ]