ldbg 0.1.0__py3-none-any.whl → 0.1.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ldbg/ldbg.py CHANGED
@@ -1,18 +1,25 @@
1
1
  import inspect
2
+ import linecache
2
3
  import re
3
4
  import textwrap
5
+ import time
4
6
  import traceback
5
- import linecache
6
7
  import os
8
+ import sys
7
9
  import pprint
8
10
  from types import FrameType
9
11
  from typing import cast
10
12
 
11
13
  from openai import OpenAI
12
14
 
13
- LENGTH_MAX = 10000
15
+ LENGTH_MAX = 50000
14
16
  CODE_BLOCK_REGEX = r"```(?:[\w+-]*)\n(.*?)```"
15
17
 
18
+ # Message in one line since the Debug Console shows the raw string
19
+ VSCODE_WARNING_MESSAGE = """It seems you are on VS Code. The answers will be printed in the Terminal while your inputs are made from the Debug Console. For optimal use, display the Debug Console and Terminal side-by-side. This message will be shown only once. Call the ldbg.gc() function again to make your request to the LLM."""
20
+
21
+ display_vscode_warning = any("debugpy" in mod for mod in sys.modules)
22
+
16
23
  if "OPENROUTER_API_KEY" in os.environ:
17
24
  client = OpenAI(
18
25
  base_url="https://openrouter.ai/api/v1",
@@ -27,30 +34,47 @@ def extract_code_blocks(markdown_text: str):
27
34
  return pattern.findall(markdown_text)
28
35
 
29
36
 
30
- def execute_code_block(code: str):
31
- exec(code, {})
37
+ def execute_code_block(code: str, locals: dict):
38
+ exec(code, locals)
32
39
 
33
40
 
34
- def execute_blocks(markdown_text: str | None) -> None:
41
+ def execute_blocks(markdown_text: str | None, locals: dict) -> None:
35
42
  """
36
43
  Extract the code blocks in the markdown and ask user if he wants to execute them
37
44
  """
38
45
  if markdown_text is None:
39
46
  return
40
47
  blocks = extract_code_blocks(markdown_text)
41
- for block in blocks:
42
- print("Would you like to execute the following code block:")
48
+ for n, block in enumerate(blocks):
49
+ print("\n\nWould you like to execute the following code block:\n")
43
50
  print(textwrap.indent(block, " "))
44
- confirm = input("(y/n)").lower()
45
- if confirm.lower() in ["yes", "y"]:
46
- execute_code_block(block)
51
+ while True:
52
+ before_input_time = time.time()
53
+ confirm = input("(y/n)").lower()
54
+ after_input_time = time.time()
55
+ if after_input_time - before_input_time < 0.5:
56
+ print(
57
+ f'Discard answer "{confirm}" since it is likely from a previous keyboard stroke. Please wait at least 0.5s to read the code and answer safely.'
58
+ )
59
+ continue
60
+ if confirm.lower() in ["yes", "y"]:
61
+ print(f"\nExecuting block {n}...\n\n")
62
+ execute_code_block(block, locals)
63
+ print("\n\n\nExecution done.")
64
+ break
65
+ if any("debugpy" in mod for mod in sys.modules):
66
+ print("\nReturn to the Debug Console to get more help.")
67
+
68
+
69
+ def indent(text, prefix=" " * 4):
70
+ return textwrap.indent(text, prefix)
47
71
 
48
72
 
49
73
  def generate_commands(
50
74
  prompt: str,
51
75
  frame=None,
52
76
  model="gpt-5-mini-2025-08-07",
53
- print_prompt=True,
77
+ print_prompt=False,
54
78
  length_max=LENGTH_MAX,
55
79
  context="",
56
80
  ):
@@ -105,38 +129,80 @@ def generate_commands(
105
129
 
106
130
  <<< user enters n
107
131
  """
132
+
133
+ global display_vscode_warning
134
+ if display_vscode_warning:
135
+ display_vscode_warning = False
136
+ return VSCODE_WARNING_MESSAGE
137
+
138
+ frame_info = None
108
139
  if frame is None:
109
- frame = cast(FrameType, inspect.currentframe().f_back) # type: ignore
140
+ frame_info = next(
141
+ fi
142
+ for fi in inspect.stack()
143
+ if "/.vscode/extensions/" not in fi.filename
144
+ and "<string>" not in fi.filename
145
+ and not (
146
+ fi.filename.endswith("ldbg.py") and fi.function == "generate_commands"
147
+ )
148
+ )
149
+
150
+ frame: FrameType = cast(FrameType, frame_info.frame)
110
151
 
111
152
  # Locals & globals preview
112
- locals_preview = pprint.pformat(frame.f_locals)[
113
- :length_max
114
- ] # {k: type(v).__name__ for k, v in frame.f_locals.items()}
115
- globals_preview = pprint.pformat(frame.f_globals)[
116
- :length_max
117
- ] # {k: type(v).__name__ for k, v in frame.f_globals.items()}
153
+ filtered_locals = {
154
+ key: value
155
+ for key, value in frame.f_locals.items()
156
+ if key not in ["__builtin__", "__builtins__"]
157
+ }
158
+
159
+ locals_preview = pprint.pformat(filtered_locals)
160
+ if len(locals_preview) > length_max:
161
+ locals_preview = (
162
+ locals_preview[:length_max]
163
+ + f" ... \nLocal variables are truncated because it is too long (more than {length_max} characters)!"
164
+ )
165
+
166
+ # globals_preview = pprint.pformat(frame.f_globals)[
167
+ # :length_max
168
+ # ]
118
169
 
119
170
  # Traceback / call stack
120
171
  stack_summary = traceback.format_stack(frame)
121
- stack_text = "".join(stack_summary[-15:]) # limit to avoid overload
172
+ stack_text = "".join(stack_summary[-20:]) # limit to avoid overload
122
173
 
123
174
  # Current function source
124
175
  try:
125
176
  source_lines, start_line = inspect.getsourcelines(frame)
126
- func_source = "".join(source_lines)
177
+ sources_lines_with_line_numbers = source_lines.copy()
178
+
179
+ for i, line in enumerate(source_lines):
180
+ prefix = "→ " if i + start_line == frame.f_lineno else " "
181
+ sources_lines_with_line_numbers[i] = (
182
+ f"{prefix}{i + start_line:4d}: {line.rstrip()}"
183
+ )
184
+
185
+ func_source = "".join(sources_lines_with_line_numbers)
127
186
  except (OSError, TypeError):
128
- func_source = "<source unavailable>"
129
-
130
- # Context like ipdb 'll'
131
- filename = frame.f_code.co_filename
132
- lineno = frame.f_lineno
133
- start_context = max(lineno - 10, 1)
134
- context_lines = []
135
- for i in range(start_context, lineno + 10):
136
- line = linecache.getline(filename, i)
137
- if line:
138
- context_lines.append(f"{i:4d}: {line}")
139
- context_text = "".join(context_lines)
187
+ try:
188
+ # fallback: print nearby lines from the source file
189
+ filename = frame.f_code.co_filename
190
+ start = frame.f_code.co_firstlineno
191
+ lines = linecache.getlines(filename)
192
+ func_source = "".join(lines[max(0, start - 5) : start + 200])
193
+ except Exception:
194
+ func_source = "<source unavailable>"
195
+
196
+ additional_context = textwrap.dedent(
197
+ f"""
198
+ Additional context:
199
+ {indent(context)}
200
+
201
+ ===================================
202
+ """
203
+ if context is not None and len(context) > 0
204
+ else ""
205
+ )
140
206
 
141
207
  # ldbg.generate_commands({prompt}, model={model}, code_only={code_only}, print_prompt={print_prompt}, print_answer={print_answer}, length_max={length_max})
142
208
  context = textwrap.dedent(f"""
@@ -145,23 +211,21 @@ def generate_commands(
145
211
 
146
212
  The user just ran `import ldbg; ldbg.gc({prompt}, model={model})` to ask you some help (gc stands for generate commands).
147
213
 
148
- Local variables and their types (`locals = pprint.pformat(inspect.currentframe().f_locals)[:length_max]`):
149
- {locals_preview}
214
+ Local variables (`locals = pprint.pformat(inspect.currentframe().f_locals)`):
215
+ {indent(locals_preview)}
150
216
 
151
- Global variables and their types (`globals = pprint.pformat(inspect.currentframe().f_globals)[:length_max]`):
152
- {globals_preview}
217
+ ===================================
153
218
 
154
219
  Current call stack (traceback):
155
- {stack_text}
220
+ {indent(stack_text)}
156
221
 
157
- Current function source:
158
- {func_source}
222
+ ===================================
159
223
 
160
- Nearby code (like ipdb 'll'):
161
- {context_text}
224
+ Current function source:
225
+ {indent(func_source)}
162
226
 
163
- Additional context:
164
- {context}
227
+ ===================================
228
+ {additional_context}
165
229
 
166
230
  If you need more context, a more detailed view of the local variables or the content of a source file,
167
231
  tell the user the commands he should run to print the details you need.
@@ -258,7 +322,7 @@ def generate_commands(
258
322
  pandas.DataFrame(unknown_data).describe()
259
323
  ```
260
324
 
261
- You could also use numpy.set_printoptions (or a library like numpyprint) to pretty print your array:
325
+ You could also use `numpy.set_printoptions` (or a library like numpyprint) to pretty print your array:
262
326
 
263
327
  ```
264
328
  with np.printoptions(precision=2, suppress=True, threshold=5):
@@ -266,14 +330,14 @@ def generate_commands(
266
330
  ```
267
331
 
268
332
  Always put the code to execute in triple backticks code blocks.
269
- Provide relatively short and concise answers.
333
+ Provide short and concise answers and code.
270
334
  """)
271
335
 
272
336
  if print_prompt:
273
337
  print("System prompt:")
274
338
  print(context)
275
- print("\nUser prompt:")
276
- print(prompt)
339
+
340
+ print(f'\n\nAsking {model} "{prompt}"...\n')
277
341
 
278
342
  resp = client.chat.completions.create(
279
343
  model=model,
@@ -292,10 +356,10 @@ def generate_commands(
292
356
  if response is None:
293
357
  return
294
358
 
295
- print(f"Model {model} says:")
359
+ print(f"Model {model} says:\n")
296
360
  print(textwrap.indent(response, " "))
297
361
 
298
- execute_blocks(response)
362
+ execute_blocks(response, frame.f_locals)
299
363
 
300
364
  return
301
365
 
@@ -1,9 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ldbg
3
- Version: 0.1.0
3
+ Version: 0.1.2
4
4
  Summary: Add your description here
5
5
  Author-email: Arthur Masson <arthur.masson@inria.fr>
6
- Requires-Python: >=3.12
6
+ Requires-Python: >=3.10
7
7
  Requires-Dist: openai>=1.107.3
8
8
  Description-Content-Type: text/markdown
9
9
 
@@ -11,25 +11,40 @@ Description-Content-Type: text/markdown
11
11
 
12
12
  ### A Minimal Python Library to debug with LLMs
13
13
 
14
- Use natural-language prompts while debugging. Prompts are augmented with your current stack, variables, and source context.
14
+ Ldbg enables to use natural-language prompts while debugging. Prompts are augmented with your current stack, variables, and source context.
15
+ It is like [ShellGPT](https://github.com/TheR1D/shell_gpt) but for pdb, ipdb, Jupyter, the VS Code Debug Console, etc.
15
16
 
16
17
  DO NOT USE THIS LIBRARY
17
18
 
18
19
  > “AI everywhere is rocket engines on a skateboard: a thrill that ends in wreckage. The planet pays in energy and emissions, and we pay in something subtler — the slow atrophy of our own intelligence, left idle while the machines do the heavy lifting.” ChatGPT
19
20
 
21
+ Here is [CJ Reynolds](https://www.youtube.com/watch?v=0ZUkQF6boNg) point of view:
22
+
23
+ > I used to enjoy programming. Now, my days are typically spent going back and forth with an LLM and pretty often yelling at it… And part of enjoying programming for me was enjoying the little wins, right? You would work really hard to make something… or to figure something out. And once you figured it out, you'd have that little win. You'd get that dopamine hit and you'd feel good about yourself and you could keep going. I don't get that when I'm using LLMs to write code. Once it's figured something out, I don't feel like I did any work to get there. And then I'm just mad that it's doing the wrong thing. And then we go through this back and forth cycle and it's not fun.
24
+
20
25
  ## Features
21
26
 
22
27
  - 🐍 Generate Python debug commands from natural-language instructions.
23
28
  - 🔍 Context-aware: prompt auto-includes call stack, local/global variable previews, current function - source, and nearby code.
24
- - ⚡ Works like an AI-augmented pdb: just ask what you want to inspect.
25
29
  - 🤖 Supports OpenRouter
26
30
 
31
+ **NOTE**: In VS Code, you enter the function the Debug Console, and get the output in the terminal ; so put both tabs (Debug Console and Terminal) side to side.
32
+
27
33
  ## Installation
28
34
 
29
- `pip install ldbg`
35
+ `uv add ldbg`, `pixi add --pypi ldbg` or `pip install ldbg`
30
36
 
31
37
  ## Quick Start
32
38
 
39
+ ### Example natural-language prompts
40
+
41
+ - "Describe my numpy arrays"
42
+ - "plot my_data['b'] as a histogram"
43
+ - "give me an example pandas dataframe about employees"
44
+ - "generate a 3x12x16 example Pillow image from a numpy array"
45
+ - "convert this Pillow image to grayscale"
46
+ - "open this 'image.ome.tiff' with bioio"
47
+
33
48
  ### Example Session
34
49
 
35
50
  ```python
@@ -43,13 +58,13 @@ The model "gpt-5-mini-2025-08-07" says:
43
58
 
44
59
  unknown_data is an numpy array which can be described with the following pandas code:
45
60
 
46
- ```code block 1
61
+ ```
47
62
  pandas.DataFrame(unknown_data).describe()
48
63
  ```
49
64
 
50
65
  Note: you can use numpy.set_printoptions (or a library like numpyprint) to pretty print your array:
51
66
 
52
- ```code block 2
67
+ ```
53
68
  with np.printoptions(precision=2, suppress=True, threshold=5):
54
69
  unknown_data
55
70
  ```
@@ -95,15 +110,6 @@ Would you like to execute the following code block:
95
110
  ...
96
111
  ```
97
112
 
98
- ### Example natural-language prompts
99
-
100
- - "Describe my numpy arrays"
101
- - "plot my_data['b'] as a histogram"
102
- - "give me an example pandas dataframe about employees"
103
- - "generate a 3x10x12 numpy array which will be used as an example image"
104
- - "convert this Pillow image to grayscale"
105
- - "open this 'image.ome.tiff' with bioio"
106
-
107
113
  ## Configuration
108
114
 
109
115
  By default, llm-debug uses the OpenAI client. So it reads the [OPENAI_API_KEY environment variable](https://platform.openai.com/docs/quickstart).
@@ -0,0 +1,6 @@
1
+ ldbg/__init__.py,sha256=pTjJ9KayMK3061FeYd8cE-E8mhwdYRB8OJz0-APm6zI,79
2
+ ldbg/ldbg.py,sha256=NS6_9WTrbtAE0Kv_xZ4M2jPDg3KvWU2fYz21By1Aqu8,12321
3
+ ldbg/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ ldbg-0.1.2.dist-info/METADATA,sha256=U37pXjEU--TdOWEFwIMucpPdrp_e850hLDVQdZDqZjQ,4085
5
+ ldbg-0.1.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
6
+ ldbg-0.1.2.dist-info/RECORD,,
@@ -1,6 +0,0 @@
1
- ldbg/__init__.py,sha256=pTjJ9KayMK3061FeYd8cE-E8mhwdYRB8OJz0-APm6zI,79
2
- ldbg/ldbg.py,sha256=0bIfFZUoMH4329OJzRqB3Zmah60RP_MCBQeTxP5c7xE,9869
3
- ldbg/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- ldbg-0.1.0.dist-info/METADATA,sha256=ApXXFHbt7YuoQcDGumT_O_TS4gdrGH-mwa9x2IKtgaY,3087
5
- ldbg-0.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
6
- ldbg-0.1.0.dist-info/RECORD,,
File without changes