batrachian-toad 0.5.22__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. batrachian_toad-0.5.22.dist-info/METADATA +197 -0
  2. batrachian_toad-0.5.22.dist-info/RECORD +120 -0
  3. batrachian_toad-0.5.22.dist-info/WHEEL +4 -0
  4. batrachian_toad-0.5.22.dist-info/entry_points.txt +2 -0
  5. batrachian_toad-0.5.22.dist-info/licenses/LICENSE +661 -0
  6. toad/__init__.py +46 -0
  7. toad/__main__.py +4 -0
  8. toad/_loop.py +86 -0
  9. toad/about.py +90 -0
  10. toad/acp/agent.py +671 -0
  11. toad/acp/api.py +47 -0
  12. toad/acp/encode_tool_call_id.py +12 -0
  13. toad/acp/messages.py +138 -0
  14. toad/acp/prompt.py +54 -0
  15. toad/acp/protocol.py +426 -0
  16. toad/agent.py +62 -0
  17. toad/agent_schema.py +70 -0
  18. toad/agents.py +45 -0
  19. toad/ansi/__init__.py +1 -0
  20. toad/ansi/_ansi.py +1612 -0
  21. toad/ansi/_ansi_colors.py +264 -0
  22. toad/ansi/_control_codes.py +37 -0
  23. toad/ansi/_keys.py +251 -0
  24. toad/ansi/_sgr_styles.py +64 -0
  25. toad/ansi/_stream_parser.py +418 -0
  26. toad/answer.py +22 -0
  27. toad/app.py +557 -0
  28. toad/atomic.py +37 -0
  29. toad/cli.py +257 -0
  30. toad/code_analyze.py +28 -0
  31. toad/complete.py +34 -0
  32. toad/constants.py +58 -0
  33. toad/conversation_markdown.py +19 -0
  34. toad/danger.py +371 -0
  35. toad/data/agents/ampcode.com.toml +51 -0
  36. toad/data/agents/augmentcode.com.toml +40 -0
  37. toad/data/agents/claude.com.toml +41 -0
  38. toad/data/agents/docker.com.toml +59 -0
  39. toad/data/agents/geminicli.com.toml +28 -0
  40. toad/data/agents/goose.ai.toml +51 -0
  41. toad/data/agents/inference.huggingface.co.toml +33 -0
  42. toad/data/agents/kimi.com.toml +35 -0
  43. toad/data/agents/openai.com.toml +53 -0
  44. toad/data/agents/opencode.ai.toml +61 -0
  45. toad/data/agents/openhands.dev.toml +44 -0
  46. toad/data/agents/stakpak.dev.toml +61 -0
  47. toad/data/agents/vibe.mistral.ai.toml +27 -0
  48. toad/data/agents/vtcode.dev.toml +62 -0
  49. toad/data/images/frog.png +0 -0
  50. toad/data/sounds/turn-over.wav +0 -0
  51. toad/db.py +5 -0
  52. toad/dec.py +332 -0
  53. toad/directory.py +234 -0
  54. toad/directory_watcher.py +96 -0
  55. toad/fuzzy.py +140 -0
  56. toad/gist.py +2 -0
  57. toad/history.py +138 -0
  58. toad/jsonrpc.py +576 -0
  59. toad/menus.py +14 -0
  60. toad/messages.py +74 -0
  61. toad/option_content.py +51 -0
  62. toad/os.py +0 -0
  63. toad/path_complete.py +145 -0
  64. toad/path_filter.py +124 -0
  65. toad/paths.py +71 -0
  66. toad/pill.py +23 -0
  67. toad/prompt/extract.py +19 -0
  68. toad/prompt/resource.py +68 -0
  69. toad/protocol.py +28 -0
  70. toad/screens/action_modal.py +94 -0
  71. toad/screens/agent_modal.py +172 -0
  72. toad/screens/command_edit_modal.py +58 -0
  73. toad/screens/main.py +192 -0
  74. toad/screens/permissions.py +390 -0
  75. toad/screens/permissions.tcss +72 -0
  76. toad/screens/settings.py +254 -0
  77. toad/screens/settings.tcss +101 -0
  78. toad/screens/store.py +476 -0
  79. toad/screens/store.tcss +261 -0
  80. toad/settings.py +354 -0
  81. toad/settings_schema.py +318 -0
  82. toad/shell.py +263 -0
  83. toad/shell_read.py +42 -0
  84. toad/slash_command.py +34 -0
  85. toad/toad.tcss +752 -0
  86. toad/version.py +80 -0
  87. toad/visuals/columns.py +273 -0
  88. toad/widgets/agent_response.py +79 -0
  89. toad/widgets/agent_thought.py +41 -0
  90. toad/widgets/command_pane.py +224 -0
  91. toad/widgets/condensed_path.py +93 -0
  92. toad/widgets/conversation.py +1626 -0
  93. toad/widgets/danger_warning.py +65 -0
  94. toad/widgets/diff_view.py +709 -0
  95. toad/widgets/flash.py +81 -0
  96. toad/widgets/future_text.py +126 -0
  97. toad/widgets/grid_select.py +223 -0
  98. toad/widgets/highlighted_textarea.py +180 -0
  99. toad/widgets/mandelbrot.py +294 -0
  100. toad/widgets/markdown_note.py +13 -0
  101. toad/widgets/menu.py +147 -0
  102. toad/widgets/non_selectable_label.py +5 -0
  103. toad/widgets/note.py +18 -0
  104. toad/widgets/path_search.py +381 -0
  105. toad/widgets/plan.py +180 -0
  106. toad/widgets/project_directory_tree.py +74 -0
  107. toad/widgets/prompt.py +741 -0
  108. toad/widgets/question.py +337 -0
  109. toad/widgets/shell_result.py +35 -0
  110. toad/widgets/shell_terminal.py +18 -0
  111. toad/widgets/side_bar.py +74 -0
  112. toad/widgets/slash_complete.py +211 -0
  113. toad/widgets/strike_text.py +66 -0
  114. toad/widgets/terminal.py +526 -0
  115. toad/widgets/terminal_tool.py +338 -0
  116. toad/widgets/throbber.py +90 -0
  117. toad/widgets/tool_call.py +303 -0
  118. toad/widgets/user_input.py +23 -0
  119. toad/widgets/version.py +5 -0
  120. toad/widgets/welcome.py +31 -0
toad/directory.py ADDED
@@ -0,0 +1,234 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ from itertools import filterfalse
5
+ from typing import Callable
6
+ from time import time
7
+ from os import PathLike
8
+ from pathlib import Path
9
+
10
+ from textual._partition import partition
11
+
12
+ from toad.path_filter import PathFilter
13
+
14
+
15
+ class ScanJob:
16
+ """A single directory scanning job."""
17
+
18
+ def __init__(
19
+ self,
20
+ name: str,
21
+ queue: asyncio.Queue[Path],
22
+ results: list[Path],
23
+ path_filter: PathFilter | None = None,
24
+ add_directories=False,
25
+ ) -> None:
26
+ self.queue = queue
27
+ self.results = results
28
+ self.name = name
29
+ self.path_filter = path_filter
30
+ self.add_directories = add_directories
31
+
32
+ def start(self) -> None:
33
+ self._task = asyncio.create_task(self.run())
34
+
35
+ async def run(self) -> None:
36
+ queue = self.queue
37
+ results = self.results
38
+ add_directories = self.add_directories
39
+ while True:
40
+ try:
41
+ scan_path = await queue.get()
42
+ except asyncio.QueueShutDown:
43
+ break
44
+ paths, dir_paths = await asyncio.to_thread(
45
+ self._scan_directory, scan_path, self.path_filter
46
+ )
47
+ if add_directories:
48
+ results.extend(dir_paths)
49
+ results.extend(paths)
50
+ try:
51
+ for path in dir_paths:
52
+ await queue.put(path)
53
+ except asyncio.QueueShutDown:
54
+ break
55
+ queue.task_done()
56
+
57
+ def _scan_directory(
58
+ self, root: Path, path_filter: PathFilter | None = None
59
+ ) -> tuple[list[Path], list[Path]]:
60
+ """Perform a directory scan (done in a thread).
61
+
62
+ Args:
63
+ root: Path to scan.
64
+ path_filter: PathFilter object.
65
+
66
+ Returns:
67
+ A tuple of lists of paths (FILES, DIRECTORIES)
68
+ """
69
+ try:
70
+ paths = list(root.iterdir())
71
+ except IOError:
72
+ paths = []
73
+ if path_filter is not None:
74
+ paths = list(filterfalse(path_filter.match, paths))
75
+
76
+ try:
77
+ paths, dir_paths = partition(Path.is_dir, paths)
78
+ except IOError:
79
+ paths = []
80
+ dir_paths = []
81
+ return paths, dir_paths
82
+
83
+
84
+ async def scan(
85
+ root: Path,
86
+ *,
87
+ max_simultaneous: int = 5,
88
+ path_filter: PathFilter | None = None,
89
+ add_directories: bool = False,
90
+ max_duration: float | None = 5.0,
91
+ ) -> list[Path]:
92
+ """Scan a directory for paths.
93
+
94
+ Args:
95
+ root: Root directory to scan.
96
+ max_simultaneous: Maximum number of scan jobs.
97
+ path_filter: Path filter object.
98
+ add_directories: Also collect directories?
99
+ max_duration: Maximum time in seconds to scan for, or `None` for no maximum.
100
+
101
+ Returns:
102
+ A list of Paths.
103
+ """
104
+ queue: asyncio.Queue[Path] = asyncio.Queue()
105
+ results: list[Path] = []
106
+ jobs = [
107
+ ScanJob(
108
+ f"scan-job #{index}",
109
+ queue,
110
+ results,
111
+ path_filter=path_filter,
112
+ add_directories=add_directories,
113
+ )
114
+ for index in range(max_simultaneous)
115
+ ]
116
+ try:
117
+ await queue.put(root)
118
+ for job in jobs:
119
+ job.start()
120
+ if max_duration is not None:
121
+ try:
122
+ async with asyncio.timeout(max_duration):
123
+ await queue.join()
124
+ except asyncio.TimeoutError:
125
+ pass
126
+ else:
127
+ await queue.join()
128
+ except asyncio.CancelledError:
129
+ await queue.join()
130
+ queue.shutdown(immediate=True)
131
+ return results
132
+
133
+
134
+ class Scan:
135
+ """A scan of a single directory."""
136
+
137
+ def __init__(self, root: Path, on_complete: Callable[[Scan]]) -> None:
138
+ self.root = root
139
+ self._on_complete = on_complete
140
+ self._complete_event = asyncio.Event()
141
+ self._scan_result: list[Path] = []
142
+ self._scan_task: asyncio.Task | None = None
143
+ self._scan_time = time()
144
+
145
+ @property
146
+ def is_complete(self) -> bool:
147
+ """Has the scan finished?"""
148
+ return self._complete_event.is_set()
149
+
150
+ def start(self) -> None:
151
+ self._scan_time = time()
152
+ self._scan_task = asyncio.create_task(self._run(), name=f"scan {self.root!s}")
153
+
154
+ async def _run(self) -> None:
155
+ await asyncio.to_thread(self._scan)
156
+ self._complete_event.set()
157
+ self._on_complete(self)
158
+
159
+ def _scan(self) -> None:
160
+ self._scan_result = list(self.root.iterdir())
161
+
162
+ async def wait(self) -> list[Path]:
163
+ """Get the result of the scan, potentially waiting for it to finish first.
164
+
165
+ Returns:
166
+ A list of paths in the root.
167
+ """
168
+ await self._complete_event.wait()
169
+ assert self._scan_result is not None
170
+ self._scan_task = None
171
+ return self._scan_result
172
+
173
+
174
+ class DirectoryScanner:
175
+ """Object to recursively scan a directory."""
176
+
177
+ def __init__(self, root: PathLike) -> None:
178
+ self.root = Path(root)
179
+ self.directories: dict[Path, Scan] = {}
180
+
181
+ async def scan(
182
+ self, relative_directory_path: str, on_complete: Callable[[Scan]]
183
+ ) -> Scan:
184
+ """Get a scan.
185
+
186
+ Scans are created on demand, or returned previously scanned.
187
+
188
+ Args:
189
+ relative_directory_path: A path relative to the root.
190
+ on_complete: Callback when scan is complete, will be invoked with Scan instance.
191
+
192
+ Returns:
193
+ A scan instance.
194
+ """
195
+ scan_path = self.root / relative_directory_path
196
+ if scan := self.directories.get(scan_path):
197
+ if scan.is_complete:
198
+ on_complete(scan)
199
+ else:
200
+ self.directories[scan_path] = scan = Scan(
201
+ scan_path, on_complete=on_complete
202
+ )
203
+ scan.start()
204
+ return scan
205
+
206
+
207
+ if __name__ == "__main__":
208
+ import asyncio
209
+
210
+ import contextlib
211
+ from time import perf_counter
212
+ from typing import Generator
213
+
214
+ @contextlib.contextmanager
215
+ def timer(subject: str = "time") -> Generator[None, None, None]:
216
+ """print the elapsed time. (only used in debugging)"""
217
+ start = perf_counter()
218
+ yield
219
+ elapsed = perf_counter() - start
220
+ elapsed_ms = elapsed
221
+ print(f"{subject} elapsed {elapsed_ms:.4f}s")
222
+
223
+ from toad.path_filter import PathFilter
224
+
225
+ scan_path = Path("~/projects/textual").expanduser()
226
+
227
+ path_filter = PathFilter.from_git_root(scan_path)
228
+
229
+ async def run():
230
+ with timer("scan"):
231
+ return await scan(scan_path, path_filter=path_filter)
232
+
233
+ paths = asyncio.run(run())
234
+ print(len(paths))
@@ -0,0 +1,96 @@
1
+ from pathlib import Path
2
+ import rich.repr
3
+
4
+ import threading
5
+
6
+ from textual.message import Message
7
+ from textual.widget import Widget
8
+
9
+
10
+ from watchdog.events import (
11
+ FileSystemEvent,
12
+ FileSystemEventHandler,
13
+ FileCreatedEvent,
14
+ FileDeletedEvent,
15
+ FileMovedEvent,
16
+ DirCreatedEvent,
17
+ DirDeletedEvent,
18
+ DirMovedEvent,
19
+ )
20
+ from watchdog.observers import Observer
21
+ from watchdog.observers.polling import PollingObserver
22
+
23
+
24
+ class DirectoryChanged(Message):
25
+ """The directory was changed."""
26
+
27
+ def can_replace(self, message: Message) -> bool:
28
+ return isinstance(message, DirectoryChanged)
29
+
30
+
31
+ @rich.repr.auto
32
+ class DirectoryWatcher(threading.Thread, FileSystemEventHandler):
33
+ """Watch for changes to a directory, ignoring purely file data changes."""
34
+
35
+ def __init__(self, path: Path, widget: Widget) -> None:
36
+ """
37
+
38
+ Args:
39
+ path: Root path to monitor.
40
+ widget: Widget which will receive the `DirectoryChanged` event.
41
+ """
42
+ self._path = path
43
+ self._widget = widget
44
+ self._stop_event = threading.Event()
45
+ self._enabled = False
46
+ super().__init__(name=repr(self))
47
+
48
+ @property
49
+ def enabled(self) -> bool:
50
+ """Is the DirectoryWatcher currently watching?"""
51
+ return self._enabled
52
+
53
+ def on_any_event(self, event: FileSystemEvent) -> None:
54
+ """Send DirectoryChanged event when the FS is updated."""
55
+ self._widget.post_message(DirectoryChanged())
56
+
57
+ def __rich_repr__(self) -> rich.repr.Result:
58
+ yield self._path
59
+ yield self._widget
60
+
61
+ def run(self) -> None:
62
+ try:
63
+ observer = Observer()
64
+ except Exception:
65
+ return
66
+ if isinstance(observer, PollingObserver):
67
+ return
68
+ try:
69
+ observer.schedule(
70
+ self,
71
+ str(self._path),
72
+ recursive=True,
73
+ event_filter=[
74
+ FileCreatedEvent,
75
+ FileDeletedEvent,
76
+ FileMovedEvent,
77
+ DirCreatedEvent,
78
+ DirDeletedEvent,
79
+ DirMovedEvent,
80
+ ],
81
+ )
82
+ observer.start()
83
+ except Exception:
84
+ return
85
+ self._enabled = True
86
+ while not self._stop_event.wait(1):
87
+ pass
88
+ try:
89
+ observer.stop()
90
+ except Exception:
91
+ pass
92
+
93
+ def stop(self) -> None:
94
+ """Stop the watcher."""
95
+
96
+ self._stop_event.set()
toad/fuzzy.py ADDED
@@ -0,0 +1,140 @@
1
+ """
2
+ Fuzzy matcher.
3
+
4
+ This class is used by the [command palette](/guide/command_palette) to match search terms.
5
+
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from functools import lru_cache
11
+ from operator import itemgetter
12
+ from re2 import finditer
13
+ from typing import Iterable, Sequence
14
+
15
+
16
+ from textual.cache import LRUCache
17
+
18
+
19
+ class FuzzySearch:
20
+ """Performs a fuzzy search.
21
+
22
+ Unlike a regex solution, this will finds all possible matches.
23
+ """
24
+
25
+ def __init__(
26
+ self, case_sensitive: bool = False, *, cache_size: int = 1024 * 4
27
+ ) -> None:
28
+ """Initialize fuzzy search.
29
+
30
+ Args:
31
+ case_sensitive: Is the match case sensitive?
32
+ cache_size: Number of queries to cache.
33
+ """
34
+
35
+ self.case_sensitive = case_sensitive
36
+ self.cache: LRUCache[tuple[str, str], tuple[float, Sequence[int]]] = LRUCache(
37
+ cache_size
38
+ )
39
+
40
+ def match(self, query: str, candidate: str) -> tuple[float, Sequence[int]]:
41
+ """Match against a query.
42
+
43
+ Args:
44
+ query: The fuzzy query.
45
+ candidate: A candidate to check,.
46
+
47
+ Returns:
48
+ A pair of (score, tuple of offsets). `(0, ())` for no result.
49
+ """
50
+
51
+ cache_key = (query, candidate)
52
+ if cache_key in self.cache:
53
+ return self.cache[cache_key]
54
+ default: tuple[float, Sequence[int]] = (0.0, [])
55
+ result = max(self._match(query, candidate), key=itemgetter(0), default=default)
56
+ self.cache[cache_key] = result
57
+ return result
58
+
59
+ @classmethod
60
+ @lru_cache(maxsize=1024)
61
+ def get_first_letters(cls, candidate: str) -> frozenset[int]:
62
+ return frozenset({match.start() for match in finditer(r"\w+", candidate)})
63
+
64
+ def score(self, candidate: str, positions: Sequence[int]) -> float:
65
+ """Score a search.
66
+
67
+ Args:
68
+ search: Search object.
69
+
70
+ Returns:
71
+ Score.
72
+ """
73
+ first_letters = self.get_first_letters(candidate)
74
+ # This is a heuristic, and can be tweaked for better results
75
+ # Boost first letter matches
76
+ offset_count = len(positions)
77
+ score: float = offset_count + len(first_letters.intersection(positions))
78
+
79
+ groups = 1
80
+ last_offset, *offsets = positions
81
+ for offset in offsets:
82
+ if offset != last_offset + 1:
83
+ groups += 1
84
+ last_offset = offset
85
+
86
+ # Boost to favor less groups
87
+ normalized_groups = (offset_count - (groups - 1)) / offset_count
88
+ score *= 1 + (normalized_groups * normalized_groups)
89
+ return score
90
+
91
+ def _match(
92
+ self, query: str, candidate: str
93
+ ) -> Iterable[tuple[float, Sequence[int]]]:
94
+ letter_positions: list[list[int]] = []
95
+ position = 0
96
+
97
+ if not self.case_sensitive:
98
+ candidate = candidate.lower()
99
+ query = query.lower()
100
+
101
+ score = self.score
102
+
103
+ for offset, letter in enumerate(query):
104
+ last_index = len(candidate) - offset
105
+ positions: list[int] = []
106
+ letter_positions.append(positions)
107
+ index = position
108
+ while (location := candidate.find(letter, index)) != -1:
109
+ positions.append(location)
110
+ index = location + 1
111
+ if index >= last_index:
112
+ break
113
+ if not positions:
114
+ yield (0.0, ())
115
+ return
116
+ position = positions[0] + 1
117
+
118
+ possible_offsets: list[list[int]] = []
119
+ query_length = len(query)
120
+
121
+ def get_offsets(offsets: list[int], positions_index: int) -> None:
122
+ """Recursively match offsets.
123
+
124
+ Args:
125
+ offsets: A list of offsets.
126
+ positions_index: Index of query letter.
127
+
128
+ """
129
+ for offset in letter_positions[positions_index]:
130
+ if not offsets or offset > offsets[-1]:
131
+ new_offsets = [*offsets, offset]
132
+ if len(new_offsets) == query_length:
133
+ possible_offsets.append(new_offsets)
134
+ else:
135
+ get_offsets(new_offsets, positions_index + 1)
136
+
137
+ get_offsets([], 0)
138
+
139
+ for offsets in possible_offsets:
140
+ yield score(candidate, offsets), offsets
toad/gist.py ADDED
@@ -0,0 +1,2 @@
1
+ async def upload(content: str) -> None:
2
+ pass
toad/history.py ADDED
@@ -0,0 +1,138 @@
1
+ from typing import TypedDict
2
+ import asyncio
3
+ import json
4
+ from pathlib import Path
5
+ from time import time
6
+
7
+ import rich.repr
8
+
9
+ from toad.complete import Complete
10
+
11
+
12
+ class HistoryEntry(TypedDict):
13
+ """An entry in the history file."""
14
+
15
+ input: str
16
+ timestamp: float
17
+
18
+
19
+ @rich.repr.auto
20
+ class History:
21
+ """Manages a history file."""
22
+
23
+ def __init__(self, path: Path) -> None:
24
+ self.path = path
25
+ self._lines: list[str] = []
26
+ self._opened: bool = False
27
+ self._current: str | None = None
28
+ self.complete = Complete()
29
+
30
+ def __rich_repr__(self) -> rich.repr.Result:
31
+ yield self.path
32
+
33
+ @property
34
+ def current(self) -> str | None:
35
+ return self._current
36
+
37
+ @current.setter
38
+ def current(self, current: str) -> None:
39
+ self._current = current
40
+
41
+ @property
42
+ def size(self) -> int:
43
+ return len(self._lines)
44
+
45
+ async def open(self) -> bool:
46
+ """Open the history file, read initial lines.
47
+
48
+ Returns:
49
+ `True` if lines were read, otherwise `False`.
50
+ """
51
+ if self._opened:
52
+ return True
53
+
54
+ def read_history() -> bool:
55
+ """Read the history file (in a thread).
56
+
57
+ Returns:
58
+ `True` on success.
59
+ """
60
+ try:
61
+ self.path.touch(exist_ok=True)
62
+ with self.path.open("r") as history_file:
63
+ self._lines = history_file.readlines()
64
+
65
+ inputs: list[str] = []
66
+ for line in self._lines:
67
+ if (input := json.loads(line).get("input")) is not None:
68
+ inputs.append(input.split(" ", 1)[0])
69
+ self.complete.add_words(inputs)
70
+ except Exception:
71
+ return False
72
+ return True
73
+
74
+ self._opened = await asyncio.to_thread(read_history)
75
+ return self._opened
76
+
77
+ async def append(self, input: str) -> bool:
78
+ """Append a history entry.
79
+
80
+ Args:
81
+ text: Text in the history.
82
+ shell: Boolean that indicates if the text is shell (`True`) or prompt (`False`).
83
+
84
+ Returns:
85
+ `True` on success.
86
+ """
87
+
88
+ if not input:
89
+ return True
90
+ self.complete.add_words([input.split(" ")[0]])
91
+
92
+ def write_line() -> bool:
93
+ """Append a line to the history.
94
+
95
+ Returns:
96
+ `True` on success, `False` if write failed.
97
+ """
98
+ history_entry: HistoryEntry = {
99
+ "input": input,
100
+ "timestamp": time(),
101
+ }
102
+ line = json.dumps(history_entry)
103
+ self._lines.append(line)
104
+ try:
105
+ with self.path.open("a") as history_file:
106
+ history_file.write(f"{line}\n")
107
+ except Exception:
108
+ return False
109
+ self._current = None
110
+ return True
111
+
112
+ if not self._opened:
113
+ await self.open()
114
+
115
+ return await asyncio.to_thread(write_line)
116
+
117
+ async def get_entry(self, index: int) -> HistoryEntry:
118
+ """Get a history entry via its index.
119
+
120
+ Args:
121
+ index: Index of entry. 0 for the last entry, negative indexes for previous entries.
122
+
123
+ Returns:
124
+ A history entry dict.
125
+ """
126
+ if index > 0:
127
+ raise IndexError("History indices must be 0 or negative.")
128
+ if not self._opened:
129
+ await self.open()
130
+
131
+ if index == 0:
132
+ return {"input": self.current or "", "timestamp": time()}
133
+ try:
134
+ entry_line = self._lines[index]
135
+ except IndexError:
136
+ raise IndexError(f"No history entry at index {index}")
137
+ history_entry: HistoryEntry = json.loads(entry_line)
138
+ return history_entry