indent 0.0.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of indent might be problematic. Click here for more details.

Files changed (56) hide show
  1. exponent/__init__.py +1 -0
  2. exponent/cli.py +112 -0
  3. exponent/commands/cloud_commands.py +85 -0
  4. exponent/commands/common.py +434 -0
  5. exponent/commands/config_commands.py +581 -0
  6. exponent/commands/github_app_commands.py +211 -0
  7. exponent/commands/listen_commands.py +96 -0
  8. exponent/commands/run_commands.py +208 -0
  9. exponent/commands/settings.py +56 -0
  10. exponent/commands/shell_commands.py +2840 -0
  11. exponent/commands/theme.py +246 -0
  12. exponent/commands/types.py +111 -0
  13. exponent/commands/upgrade.py +29 -0
  14. exponent/commands/utils.py +236 -0
  15. exponent/core/config.py +180 -0
  16. exponent/core/graphql/__init__.py +0 -0
  17. exponent/core/graphql/client.py +59 -0
  18. exponent/core/graphql/cloud_config_queries.py +77 -0
  19. exponent/core/graphql/get_chats_query.py +47 -0
  20. exponent/core/graphql/github_config_queries.py +56 -0
  21. exponent/core/graphql/mutations.py +75 -0
  22. exponent/core/graphql/queries.py +110 -0
  23. exponent/core/graphql/subscriptions.py +452 -0
  24. exponent/core/remote_execution/checkpoints.py +212 -0
  25. exponent/core/remote_execution/cli_rpc_types.py +214 -0
  26. exponent/core/remote_execution/client.py +545 -0
  27. exponent/core/remote_execution/code_execution.py +58 -0
  28. exponent/core/remote_execution/command_execution.py +105 -0
  29. exponent/core/remote_execution/error_info.py +45 -0
  30. exponent/core/remote_execution/exceptions.py +10 -0
  31. exponent/core/remote_execution/file_write.py +410 -0
  32. exponent/core/remote_execution/files.py +415 -0
  33. exponent/core/remote_execution/git.py +268 -0
  34. exponent/core/remote_execution/languages/python_execution.py +239 -0
  35. exponent/core/remote_execution/languages/shell_streaming.py +221 -0
  36. exponent/core/remote_execution/languages/types.py +20 -0
  37. exponent/core/remote_execution/session.py +128 -0
  38. exponent/core/remote_execution/system_context.py +54 -0
  39. exponent/core/remote_execution/tool_execution.py +289 -0
  40. exponent/core/remote_execution/truncation.py +284 -0
  41. exponent/core/remote_execution/types.py +670 -0
  42. exponent/core/remote_execution/utils.py +600 -0
  43. exponent/core/types/__init__.py +0 -0
  44. exponent/core/types/command_data.py +206 -0
  45. exponent/core/types/event_types.py +89 -0
  46. exponent/core/types/generated/__init__.py +0 -0
  47. exponent/core/types/generated/strategy_info.py +225 -0
  48. exponent/migration-docs/login.md +112 -0
  49. exponent/py.typed +4 -0
  50. exponent/utils/__init__.py +0 -0
  51. exponent/utils/colors.py +92 -0
  52. exponent/utils/version.py +289 -0
  53. indent-0.0.8.dist-info/METADATA +36 -0
  54. indent-0.0.8.dist-info/RECORD +56 -0
  55. indent-0.0.8.dist-info/WHEEL +4 -0
  56. indent-0.0.8.dist-info/entry_points.txt +2 -0
@@ -0,0 +1,105 @@
1
+ from pathlib import Path
2
+ from typing import Any
3
+
4
+ from exponent.core.remote_execution import files
5
+ from exponent.core.remote_execution.types import (
6
+ CommandRequest,
7
+ CommandResponse,
8
+ )
9
+ from exponent.core.types.command_data import (
10
+ FileReadCommandData,
11
+ PrototypeCommandData,
12
+ )
13
+
14
+ # Intentionally split the separator into two parts
15
+ # to avoid matching it in the content
16
+ CONTEXT_BATCH_SEPARATOR = "\n<batch_sep" + "arator>\n"
17
+ CONTEXT_FILE_SEPARATOR = "\n<file_sep" + "arator>\n"
18
+
19
+
20
+ async def execute_command(
21
+ request: CommandRequest,
22
+ working_directory: str,
23
+ ) -> CommandResponse:
24
+ try:
25
+ if isinstance(request.data, FileReadCommandData):
26
+ correlation_id = request.correlation_id
27
+ file_path = request.data.file_path
28
+ path = Path(working_directory, file_path)
29
+ content, _ = await files.get_file_content(
30
+ path, request.data.offset, request.data.limit
31
+ )
32
+
33
+ return CommandResponse(
34
+ subcommand=request.data.type.value,
35
+ content=content,
36
+ correlation_id=correlation_id,
37
+ )
38
+ elif isinstance(request.data, PrototypeCommandData):
39
+ correlation_id = request.correlation_id
40
+ command_name = request.data.command_name
41
+ content_json = request.data.content_json
42
+ content_raw = request.data.content_raw
43
+ content_rendered = request.data.content_rendered
44
+
45
+ content = await execute_prototype_command(
46
+ command_name=command_name,
47
+ content_json=content_json,
48
+ content_raw=content_raw,
49
+ content_rendered=content_rendered,
50
+ working_directory=working_directory,
51
+ )
52
+
53
+ return CommandResponse(
54
+ subcommand=command_name,
55
+ content=content,
56
+ correlation_id=correlation_id,
57
+ )
58
+ else:
59
+ raise ValueError(f"Unknown command request: {request}")
60
+ except Exception as e: # noqa: BLE001 - TODO (Josh): Specialize errors for execution
61
+ return CommandResponse(
62
+ content="An error occurred during command execution: " + str(e),
63
+ correlation_id=request.correlation_id,
64
+ )
65
+
66
+
67
+ async def execute_prototype_command(
68
+ command_name: str,
69
+ content_json: dict[str, Any],
70
+ content_raw: str,
71
+ content_rendered: str,
72
+ working_directory: str,
73
+ ) -> str:
74
+ if command_name == "file_open":
75
+ return f'Successfully opened file "{content_json["file_path"]}"'
76
+ elif command_name == "search_files":
77
+ results = await files.search_files(
78
+ path_str=content_json["path"],
79
+ file_pattern=content_json["file_pattern"],
80
+ regex=content_json["regex"],
81
+ working_directory=working_directory,
82
+ )
83
+ return "\n".join(results)
84
+ elif command_name == "codebase_context":
85
+ batches = await files.get_all_file_contents(
86
+ working_directory=working_directory,
87
+ )
88
+
89
+ return CONTEXT_BATCH_SEPARATOR.join(
90
+ CONTEXT_FILE_SEPARATOR.join(batch) for batch in batches
91
+ )
92
+ elif command_name == "ls":
93
+ results = await files.file_walk(
94
+ directory=content_json["path"],
95
+ ignore_extra=files.DEFAULT_IGNORES,
96
+ max_files=1000,
97
+ )
98
+ return "\n".join(results)
99
+ elif command_name == "glob":
100
+ results = await files.glob(
101
+ path=content_json["path"],
102
+ glob_pattern=content_json["glob"],
103
+ )
104
+ return "\n".join(results)
105
+ raise ValueError(f"Unhandled prototype command: {command_name}")
@@ -0,0 +1,45 @@
1
+ import traceback
2
+ from typing import Optional
3
+
4
+ from pydantic import BaseModel
5
+
6
+
7
+ class SerializableErrorInfo(BaseModel):
8
+ message: str
9
+ stack: list[str]
10
+ cls_name: str | None
11
+ cause: Optional["SerializableErrorInfo"]
12
+ context: Optional["SerializableErrorInfo"]
13
+
14
+ def __str__(self) -> str:
15
+ return self.to_string()
16
+
17
+ def to_string(self) -> str:
18
+ stack_str = "\nStack Trace:\n" + "".join(self.stack) if self.stack else ""
19
+ cause_str = (
20
+ "\nThe above exception was caused by the following exception:\n"
21
+ + self.cause.to_string()
22
+ if self.cause
23
+ else ""
24
+ )
25
+ context_str = (
26
+ "\nThe above exception occurred during handling of the following exception:\n"
27
+ + self.context.to_string()
28
+ if self.context
29
+ else ""
30
+ )
31
+
32
+ return f"{self.message}{stack_str}{cause_str}{context_str}"
33
+
34
+
35
+ SerializableErrorInfo.model_rebuild()
36
+
37
+
38
+ def serialize_error_info(error: BaseException) -> SerializableErrorInfo:
39
+ return SerializableErrorInfo(
40
+ message=str(error),
41
+ stack=traceback.format_tb(error.__traceback__),
42
+ cls_name=error.__class__.__name__,
43
+ cause=serialize_error_info(error.__cause__) if error.__cause__ else None,
44
+ context=serialize_error_info(error.__context__) if error.__context__ else None,
45
+ )
@@ -0,0 +1,10 @@
1
+ class ExponentError(Exception):
2
+ pass
3
+
4
+
5
+ class HandledExponentError(Exception):
6
+ pass
7
+
8
+
9
+ class RateLimitError(Exception):
10
+ pass
@@ -0,0 +1,410 @@
1
+ import logging
2
+ import os
3
+ import re
4
+ import subprocess
5
+ from collections.abc import Callable
6
+ from textwrap import dedent, indent
7
+
8
+ from anyio import Path as AsyncPath
9
+ from diff_match_patch import diff_match_patch
10
+ from pydantic import BaseModel
11
+
12
+ from exponent.core.remote_execution.types import (
13
+ FilePath,
14
+ FileWriteRequest,
15
+ FileWriteResponse,
16
+ )
17
+ from exponent.core.remote_execution.utils import (
18
+ assert_unreachable,
19
+ safe_read_file,
20
+ safe_write_file,
21
+ )
22
+ from exponent.core.types.command_data import (
23
+ WRITE_STRATEGY_FULL_FILE_REWRITE,
24
+ WRITE_STRATEGY_NATURAL_EDIT,
25
+ WRITE_STRATEGY_SEARCH_REPLACE,
26
+ WRITE_STRATEGY_UDIFF,
27
+ )
28
+
29
+ logger = logging.getLogger(__name__)
30
+
31
+
32
+ class FileEditResult(BaseModel):
33
+ content: str | None
34
+ failed_edits: list[tuple[str, str]]
35
+
36
+
37
+ async def execute_file_write(
38
+ event: FileWriteRequest, working_directory: str
39
+ ) -> FileWriteResponse:
40
+ write_strategy = event.write_strategy
41
+ content = event.content
42
+
43
+ if write_strategy == WRITE_STRATEGY_FULL_FILE_REWRITE:
44
+ result = await execute_full_file_rewrite(
45
+ event.file_path, content, working_directory
46
+ )
47
+ elif write_strategy == WRITE_STRATEGY_UDIFF:
48
+ result = await execute_udiff_edit(event.file_path, content, working_directory)
49
+ elif write_strategy == WRITE_STRATEGY_SEARCH_REPLACE:
50
+ result = await execute_search_replace_edit(
51
+ event.file_path, content, working_directory
52
+ )
53
+ elif write_strategy == WRITE_STRATEGY_NATURAL_EDIT:
54
+ result = await execute_full_file_rewrite(
55
+ event.file_path, content, working_directory
56
+ )
57
+ else:
58
+ assert_unreachable(write_strategy)
59
+ return FileWriteResponse(
60
+ content=result,
61
+ correlation_id=event.correlation_id,
62
+ )
63
+
64
+
65
+ def lint_file(file_path: str, working_directory: str) -> str:
66
+ try:
67
+ # Construct the absolute path
68
+ full_file_path = os.path.join(working_directory, file_path)
69
+
70
+ # Run ruff check --fix on the file
71
+ result = subprocess.run(
72
+ ["ruff", "check", "--fix", full_file_path],
73
+ capture_output=True,
74
+ text=True,
75
+ check=True,
76
+ )
77
+
78
+ # If the subprocess ran successfully, return a success message
79
+ return f"Lint results:\n\n{result.stdout}\n\n{result.stderr}"
80
+ except Exception as e: # noqa: BLE001
81
+ # For any other errors, return a generic error message
82
+ return f"An error occurred while linting: {e!s}"
83
+
84
+
85
+ async def execute_full_file_rewrite(
86
+ file_path: FilePath, content: str, working_directory: str
87
+ ) -> str:
88
+ try:
89
+ # Construct the absolute path
90
+ full_file_path = AsyncPath(os.path.join(working_directory, file_path))
91
+
92
+ # Check if the directory exists, if not, create it
93
+ await full_file_path.parent.mkdir(parents=True, exist_ok=True)
94
+ exists = await full_file_path.exists()
95
+
96
+ await safe_write_file(full_file_path, content)
97
+
98
+ # Determine if the file exists and write the new content
99
+ if exists:
100
+ result = f"Modified file {file_path} successfully"
101
+ else:
102
+ result = f"Created file {file_path} successfully"
103
+
104
+ return result
105
+
106
+ except Exception as e: # noqa: BLE001
107
+ return f"An error occurred: {e!s}"
108
+
109
+
110
+ async def execute_udiff_edit(
111
+ file_path: str, content: str, working_directory: str
112
+ ) -> str:
113
+ return await execute_partial_edit(
114
+ file_path, content, working_directory, apply_udiff
115
+ )
116
+
117
+
118
+ async def execute_search_replace_edit(
119
+ file_path: str, content: str, working_directory: str
120
+ ) -> str:
121
+ return await execute_partial_edit(
122
+ file_path, content, working_directory, apply_all_search_replace
123
+ )
124
+
125
+
126
+ async def execute_partial_edit(
127
+ file_path: str,
128
+ edit_content: str,
129
+ working_directory: str,
130
+ edit_function: Callable[[str, str], FileEditResult],
131
+ ) -> str:
132
+ try:
133
+ # Construct the absolute path
134
+ full_file_path = AsyncPath(os.path.join(working_directory, file_path))
135
+
136
+ # Check if the directory exists, if not, create it
137
+ await full_file_path.parent.mkdir(parents=True, exist_ok=True)
138
+
139
+ # Determine if the file exists and write the new content
140
+ file_content, created = await read_or_init_file(full_file_path)
141
+
142
+ success = await open_file_and_apply_edit(
143
+ file_path=full_file_path,
144
+ file_content=file_content,
145
+ edit_content=edit_content,
146
+ edit_function=edit_function,
147
+ )
148
+
149
+ if success:
150
+ verb = "Created" if created else "Modified"
151
+ return f"{verb} file {file_path}"
152
+ else:
153
+ verb = "create" if created else "modify"
154
+ return f"Failed to {verb} file {file_path}"
155
+
156
+ except Exception as e:
157
+ raise e
158
+
159
+
160
+ async def read_or_init_file(file_path: FilePath) -> tuple[str, bool]:
161
+ path = AsyncPath(file_path)
162
+
163
+ if not (await path.exists()):
164
+ await path.touch()
165
+ return "", True
166
+
167
+ content = await safe_read_file(path)
168
+ return content, False
169
+
170
+
171
+ async def open_file_and_apply_edit(
172
+ file_path: FilePath,
173
+ file_content: str,
174
+ edit_content: str,
175
+ edit_function: Callable[[str, str], FileEditResult],
176
+ ) -> bool:
177
+ result = edit_function(file_content, edit_content)
178
+
179
+ if not result.content:
180
+ return False
181
+
182
+ await safe_write_file(file_path, result.content)
183
+
184
+ return True
185
+
186
+
187
+ def find_leading_whitespace(existing_content: str, search: str) -> str | None:
188
+ existing_lines = existing_content.splitlines()
189
+
190
+ search_line_count = len(search.splitlines())
191
+ dedented_search = dedent(search)
192
+
193
+ for i in range(len(existing_lines)):
194
+ existing_window_content = "\n".join(existing_lines[i : i + search_line_count])
195
+ dedented_existing_window = dedent(existing_window_content)
196
+
197
+ leading_ws_len = len(existing_window_content) - len(
198
+ existing_window_content.lstrip()
199
+ )
200
+ leading_ws = existing_window_content[:leading_ws_len]
201
+
202
+ if dedented_existing_window == dedented_search:
203
+ return leading_ws
204
+
205
+ return None
206
+
207
+
208
+ def try_fix_whitespace(
209
+ existing_content: str, search: str, replace: str
210
+ ) -> tuple[str, str] | None:
211
+ # Try to fix the whitespace of the search and replace
212
+ # to make the edit more likely to apply
213
+ leading_ws = find_leading_whitespace(existing_content, search)
214
+ if leading_ws is None:
215
+ return None
216
+
217
+ dedented_search = dedent(search)
218
+ dedented_replace = dedent(replace)
219
+
220
+ return indent(dedented_search, leading_ws), indent(dedented_replace, leading_ws)
221
+
222
+
223
+ def try_search_replace(existing_content: str, search: str, replace: str) -> str | None:
224
+ # Try simple search and replace first
225
+ new_content = simple_search_and_replace(existing_content, search, replace)
226
+ if new_content:
227
+ return new_content
228
+
229
+ fixed_ws = try_fix_whitespace(existing_content, search, replace)
230
+ if not fixed_ws:
231
+ return None
232
+
233
+ search, replace = fixed_ws
234
+
235
+ new_content = simple_search_and_replace(existing_content, search, replace)
236
+ if new_content:
237
+ return new_content
238
+
239
+ return None
240
+
241
+
242
+ def try_diff_patch(existing_content: str, search: str, replace: str) -> str | None:
243
+ new_content = diff_patch_search_and_replace(existing_content, search, replace)
244
+ if new_content:
245
+ print("Applied diff patch search and replace")
246
+ return new_content
247
+
248
+ return None
249
+
250
+
251
+ def apply_udiff(existing_content: str, diff_content: str) -> FileEditResult:
252
+ hunks = get_raw_udiff_hunks(diff_content)
253
+
254
+ for hunk in hunks:
255
+ if not hunk:
256
+ continue
257
+
258
+ search, replace = split_hunk_for_search_and_replace(hunk)
259
+
260
+ # Exact match
261
+ new_content = try_search_replace(existing_content, search, replace)
262
+ if new_content is not None:
263
+ print("Applied successfully!")
264
+ return FileEditResult(content=new_content, failed_edits=[])
265
+
266
+ # Fuzzy match
267
+ new_content = try_diff_patch(existing_content, search, replace)
268
+ if new_content is not None:
269
+ print("Applied successfully!")
270
+ return FileEditResult(content=new_content, failed_edits=[])
271
+
272
+ print("Failed to apply hunk, exiting!")
273
+ return FileEditResult(content=None, failed_edits=[(search, replace)])
274
+
275
+ return FileEditResult(content=existing_content, failed_edits=[])
276
+
277
+
278
+ def get_raw_udiff_hunks(content: str) -> list[list[str]]:
279
+ lines = content.splitlines(keepends=True)
280
+ hunks: list[list[str]] = []
281
+ current_hunk: list[str] = []
282
+ for line in lines:
283
+ if line.startswith("@@"):
284
+ if current_hunk:
285
+ hunks.append(current_hunk)
286
+ current_hunk = []
287
+ else:
288
+ current_hunk.append(line)
289
+ if current_hunk:
290
+ hunks.append(current_hunk)
291
+ return hunks
292
+
293
+
294
+ def split_hunk_for_search_and_replace(hunk: list[str]) -> tuple[str, str]:
295
+ search_lines = []
296
+ replace_lines = []
297
+
298
+ search_prefixes = ["-", " "]
299
+ replace_prefixes = ["+", " "]
300
+ for line in hunk:
301
+ if not line:
302
+ continue
303
+ prefix, content = line[0], line[1:]
304
+ if not content:
305
+ continue
306
+ if prefix in search_prefixes:
307
+ search_lines.append(content)
308
+ if prefix in replace_prefixes:
309
+ replace_lines.append(content)
310
+ return "".join(search_lines), "".join(replace_lines)
311
+
312
+
313
+ def simple_search_and_replace(content: str, search: str, replace: str) -> str | None:
314
+ if content.count(search) >= 1:
315
+ return content.replace(search, replace)
316
+ return None
317
+
318
+
319
+ def diff_patch_search_and_replace(
320
+ content: str, search: str, replace: str
321
+ ) -> str | None:
322
+ patcher = diff_match_patch()
323
+ # 3 second tieout for computing diffs
324
+ patcher.Diff_Timeout = 3
325
+ patcher.Match_Threshold = 0.95
326
+ patcher.Match_Distance = 500
327
+ patcher.Match_MaxBits = 128
328
+ patcher.Patch_Margin = 32
329
+ search_vs_replace_diff = patcher.diff_main(search, replace, False)
330
+
331
+ # Simplify the diff as much as possible
332
+ patcher.diff_cleanupEfficiency(search_vs_replace_diff)
333
+ patcher.diff_cleanupSemantic(search_vs_replace_diff)
334
+
335
+ original_vs_search_diff = patcher.diff_main(search, content)
336
+ new_diffs = patcher.patch_make(search, search_vs_replace_diff)
337
+ # Offset the search vs. replace diffs with the offset
338
+ # of the search diff within the original content.
339
+ for new_diff in new_diffs:
340
+ new_diff.start1 = patcher.diff_xIndex(original_vs_search_diff, new_diff.start1)
341
+ new_diff.start2 = patcher.diff_xIndex(original_vs_search_diff, new_diff.start2)
342
+
343
+ new_content, successes = patcher.patch_apply(new_diffs, content)
344
+ if not all(successes):
345
+ return None
346
+
347
+ return str(new_content)
348
+
349
+
350
+ SEARCH_REPLACE_RE = re.compile(
351
+ r"[^<>]*<<<+\s*SEARCH\n((?P<search>.*?)\n)??===+\n((?P<replace>.*?)\n)??>>>+\s*?REPLACE\s*?[^<>]*",
352
+ re.DOTALL,
353
+ )
354
+
355
+ TAGGED_SEARCH_REPLACE_RE = re.compile(
356
+ r"<search>(?P<search>.*?)??</search>\s*?<replace>(?P<replace>.*?)??</replace>",
357
+ re.DOTALL,
358
+ )
359
+
360
+
361
+ def apply_search_replace(result: str, search: str, replace: str) -> str | None:
362
+ if not search and not replace:
363
+ # Nonsense
364
+ return None
365
+
366
+ if not search and not result:
367
+ # New file, just return replace
368
+ return replace
369
+
370
+ if not search.strip():
371
+ # Search on just whitespace,
372
+ # too dangerous to apply
373
+ return None
374
+
375
+ return try_search_replace(result, search, replace)
376
+
377
+
378
+ def apply_all_search_replace(
379
+ existing_content: str,
380
+ sr_content: str,
381
+ match_re: re.Pattern[str] = SEARCH_REPLACE_RE,
382
+ ) -> FileEditResult:
383
+ # Same as apply_search_replace, but applies all search and replace pairs
384
+ # in the sr_content to the existing_content
385
+
386
+ result = existing_content
387
+ failed_edits: list[tuple[str, str]] = []
388
+
389
+ for match in match_re.finditer(sr_content):
390
+ match_dict = match.groupdict()
391
+ search, replace = match_dict.get("search"), match_dict.get("replace")
392
+ search = search or ""
393
+ replace = replace or ""
394
+
395
+ new_result = apply_search_replace(result, search, replace)
396
+ if new_result is None:
397
+ failed_edits.append((search, replace))
398
+ continue
399
+
400
+ result = new_result
401
+
402
+ return FileEditResult(content=result, failed_edits=failed_edits)
403
+
404
+
405
+ def apply_all_tagged_search_replace(
406
+ existing_content: str, sr_content: str
407
+ ) -> FileEditResult:
408
+ return apply_all_search_replace(
409
+ existing_content, sr_content, TAGGED_SEARCH_REPLACE_RE
410
+ )