sqlbench 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,542 @@
1
+ """AI-powered regex builder dialog using Ollama, OpenAI, or Anthropic."""
2
+
3
+ import os
4
+ import subprocess
5
+ import tkinter as tk
6
+ from tkinter import ttk, messagebox
7
+ import threading
8
+ import urllib.request
9
+ import urllib.error
10
+ import json
11
+
12
+
13
+ class RegexBuilderDialog:
14
+ """Dialog for building regex patterns from natural language using AI."""
15
+
16
+ OLLAMA_URL = "http://localhost:11434/api/generate"
17
+ OPENAI_URL = "https://api.openai.com/v1/chat/completions"
18
+ ANTHROPIC_URL = "https://api.anthropic.com/v1/messages"
19
+
20
+ SYSTEM_PROMPT = """You are a regex expert. Generate a regex pattern based on the user's description.
21
+ Rules:
22
+ - Output ONLY the regex pattern, nothing else
23
+ - No explanation, no markdown, no quotes
24
+ - The regex will be used with Python's re.search() with IGNORECASE flag
25
+ - Keep patterns simple and efficient
26
+ - For "contains X but not Y" patterns, use: ^(?!.*Y).*X
27
+ - For "contains X and Y" patterns, use: ^(?=.*X)(?=.*Y)
28
+ - For "starts with X" patterns, use: ^X
29
+ - For "ends with X" patterns, use: X$
30
+ - Negative lookahead (?!.*pattern) must check the ENTIRE string, not just one position"""
31
+
32
+ def __init__(self, parent, callback=None):
33
+ """Initialize the dialog.
34
+
35
+ Args:
36
+ parent: Parent window
37
+ callback: Function to call with the generated regex when "Use" is clicked
38
+ """
39
+ self.callback = callback
40
+ self.top = tk.Toplevel(parent)
41
+ self.top.title("AI Regex Builder")
42
+ self.top.geometry("520x400")
43
+ self.top.transient(parent)
44
+ self.top.grab_set()
45
+
46
+ # Detect available AI backends
47
+ self.openai_key = self._find_api_key("OPENAI_API_KEY", ["~/.openai/key", "~/.config/openai/key"])
48
+ self.anthropic_key = self._find_api_key("ANTHROPIC_API_KEY", ["~/.anthropic/key", "~/.config/anthropic/key"])
49
+ self.backend = self._detect_backend()
50
+
51
+ self._create_widgets()
52
+ self.description_entry.focus()
53
+
54
+ # ESC to close
55
+ self.top.bind("<Escape>", lambda e: self.top.destroy())
56
+
57
+ def _find_api_key(self, env_var, file_paths):
58
+ """Find API key from environment variable or config files."""
59
+ # Check environment variable first
60
+ key = os.environ.get(env_var)
61
+ if key:
62
+ return key
63
+
64
+ # Check common config file locations
65
+ for path in file_paths:
66
+ expanded = os.path.expanduser(path)
67
+ if os.path.exists(expanded):
68
+ try:
69
+ with open(expanded, "r") as f:
70
+ key = f.read().strip()
71
+ if key:
72
+ return key
73
+ except Exception:
74
+ pass
75
+ return None
76
+
77
+ def _check_claude_cli(self):
78
+ """Check if Claude CLI is installed."""
79
+ try:
80
+ result = subprocess.run(["which", "claude"], capture_output=True, timeout=5)
81
+ return result.returncode == 0
82
+ except Exception:
83
+ return False
84
+
85
+ def _detect_backend(self):
86
+ """Detect which AI backend to use."""
87
+ # Prefer Claude CLI if available (free, fast)
88
+ if self._check_claude_cli():
89
+ return "claude"
90
+ # Then web APIs if keys are set
91
+ if self.anthropic_key:
92
+ return "anthropic"
93
+ if self.openai_key:
94
+ return "openai"
95
+ return "ollama"
96
+
97
+ def _check_ollama_installed(self):
98
+ """Check if Ollama is installed."""
99
+ try:
100
+ result = subprocess.run(["which", "ollama"], capture_output=True, timeout=5)
101
+ return result.returncode == 0
102
+ except Exception:
103
+ return False
104
+
105
+ def _check_ollama_running(self):
106
+ """Check if Ollama is running."""
107
+ try:
108
+ req = urllib.request.Request(
109
+ "http://localhost:11434/api/tags",
110
+ method="GET"
111
+ )
112
+ with urllib.request.urlopen(req, timeout=2) as resp:
113
+ return True
114
+ except Exception:
115
+ return False
116
+
117
+ def _check_ollama_model(self, model="mistral"):
118
+ """Check if a model is downloaded."""
119
+ try:
120
+ result = subprocess.run(["ollama", "list"], capture_output=True, text=True, timeout=10)
121
+ return model in result.stdout
122
+ except Exception:
123
+ return False
124
+
125
+ def _get_ollama_status(self):
126
+ """Get Ollama status: 'not_installed', 'no_model', 'not_running', 'ready'."""
127
+ if not self._check_ollama_installed():
128
+ return "not_installed"
129
+ if not self._check_ollama_model():
130
+ return "no_model"
131
+ if not self._check_ollama_running():
132
+ return "not_running"
133
+ return "ready"
134
+
135
+ def _install_ollama(self):
136
+ """Install Ollama."""
137
+ if messagebox.askyesno("Install Ollama",
138
+ "This will download and install Ollama (~500MB).\n\n"
139
+ "Continue?", parent=self.top):
140
+ self.status_label.config(text="Installing Ollama...", foreground="black")
141
+ self.top.update()
142
+
143
+ def install():
144
+ try:
145
+ # Run the install script
146
+ result = subprocess.run(
147
+ ["bash", "-c", "curl -fsSL https://ollama.ai/install.sh | sh"],
148
+ capture_output=True, text=True, timeout=300
149
+ )
150
+ if result.returncode == 0:
151
+ self.top.after(0, lambda: self._on_ollama_installed())
152
+ else:
153
+ self.top.after(0, lambda: self._on_setup_error(f"Install failed: {result.stderr[:100]}"))
154
+ except Exception as e:
155
+ self.top.after(0, lambda: self._on_setup_error(str(e)))
156
+
157
+ threading.Thread(target=install, daemon=True).start()
158
+
159
+ def _download_model(self):
160
+ """Download the Ollama model."""
161
+ self.status_label.config(text="Downloading model (~4GB)...", foreground="black")
162
+ self.top.update()
163
+
164
+ def download():
165
+ try:
166
+ result = subprocess.run(
167
+ ["ollama", "pull", "mistral"],
168
+ capture_output=True, text=True, timeout=600
169
+ )
170
+ if result.returncode == 0:
171
+ self.top.after(0, lambda: self._on_model_downloaded())
172
+ else:
173
+ self.top.after(0, lambda: self._on_setup_error(f"Download failed: {result.stderr[:100]}"))
174
+ except Exception as e:
175
+ self.top.after(0, lambda: self._on_setup_error(str(e)))
176
+
177
+ threading.Thread(target=download, daemon=True).start()
178
+
179
+ def _start_ollama(self):
180
+ """Start the Ollama server."""
181
+ self.status_label.config(text="Starting Ollama...", foreground="black")
182
+ self.top.update()
183
+
184
+ def start():
185
+ try:
186
+ # Start ollama serve in background
187
+ subprocess.Popen(
188
+ ["ollama", "serve"],
189
+ stdout=subprocess.DEVNULL,
190
+ stderr=subprocess.DEVNULL,
191
+ start_new_session=True
192
+ )
193
+ # Wait a moment for it to start
194
+ import time
195
+ time.sleep(2)
196
+
197
+ if self._check_ollama_running():
198
+ self.top.after(0, lambda: self._on_ollama_ready())
199
+ else:
200
+ self.top.after(0, lambda: self._on_setup_error("Failed to start Ollama"))
201
+ except Exception as e:
202
+ self.top.after(0, lambda: self._on_setup_error(str(e)))
203
+
204
+ threading.Thread(target=start, daemon=True).start()
205
+
206
+ def _on_ollama_installed(self):
207
+ """Called after Ollama is installed."""
208
+ self.status_label.config(text="Installed! Downloading model...", foreground="green")
209
+ self._download_model()
210
+
211
+ def _on_model_downloaded(self):
212
+ """Called after model is downloaded."""
213
+ self.status_label.config(text="Model ready! Starting...", foreground="green")
214
+ self._start_ollama()
215
+
216
+ def _on_ollama_ready(self):
217
+ """Called when Ollama is ready to use."""
218
+ self.status_label.config(text="Ollama ready!", foreground="green")
219
+ self._update_ollama_status()
220
+
221
+ def _on_setup_error(self, error):
222
+ """Called on setup error."""
223
+ self.status_label.config(text=error[:50], foreground="red")
224
+ self._update_ollama_status()
225
+
226
+ def _create_widgets(self):
227
+ main_frame = ttk.Frame(self.top, padding=10)
228
+ main_frame.pack(fill=tk.BOTH, expand=True)
229
+
230
+ # Description input
231
+ ttk.Label(main_frame, text="Describe what you want to match:").pack(anchor=tk.W)
232
+
233
+ self.description_entry = ttk.Entry(main_frame, width=60)
234
+ self.description_entry.pack(fill=tk.X, pady=(5, 10))
235
+ self.description_entry.bind("<Return>", lambda e: self._generate())
236
+
237
+ # Examples
238
+ examples_text = "Examples: 'tables starting with user', 'contains order or invoice', 'ends with _log'"
239
+ ttk.Label(main_frame, text=examples_text, foreground="gray").pack(anchor=tk.W)
240
+
241
+ # Backend selector
242
+ backend_frame = ttk.Frame(main_frame)
243
+ backend_frame.pack(fill=tk.X, pady=(10, 0))
244
+ ttk.Label(backend_frame, text="AI Backend:").pack(side=tk.LEFT)
245
+
246
+ self.backend_var = tk.StringVar(value=self.backend)
247
+ backends = ["claude", "anthropic", "openai", "ollama"]
248
+
249
+ self.backend_combo = ttk.Combobox(backend_frame, textvariable=self.backend_var,
250
+ values=backends, state="readonly", width=12)
251
+ self.backend_combo.pack(side=tk.LEFT, padx=(5, 10))
252
+
253
+ backend_hint = {"claude": "(CLI)", "anthropic": "(API)", "openai": "(GPT)", "ollama": "(Local)"}
254
+ self.backend_hint_label = ttk.Label(backend_frame, text=backend_hint.get(self.backend, ""), foreground="gray")
255
+ self.backend_hint_label.pack(side=tk.LEFT)
256
+ self.backend_combo.bind("<<ComboboxSelected>>", self._on_backend_change)
257
+
258
+ # API Key input (for web backends)
259
+ self.key_frame = ttk.Frame(main_frame)
260
+ self.key_frame.pack(fill=tk.X, pady=(5, 0))
261
+ ttk.Label(self.key_frame, text="API Key:").pack(side=tk.LEFT)
262
+ self.api_key_var = tk.StringVar(value=self.anthropic_key or self.openai_key or "")
263
+ self.api_key_entry = ttk.Entry(self.key_frame, textvariable=self.api_key_var, width=40, show="*")
264
+ self.api_key_entry.pack(side=tk.LEFT, padx=(5, 0), fill=tk.X, expand=True)
265
+
266
+ # Ollama setup frame (for local backend)
267
+ self.ollama_frame = ttk.Frame(main_frame)
268
+ self.ollama_frame.pack(fill=tk.X, pady=(5, 0))
269
+ self.ollama_status_label = ttk.Label(self.ollama_frame, text="")
270
+ self.ollama_status_label.pack(side=tk.LEFT)
271
+ self.ollama_setup_btn = ttk.Button(self.ollama_frame, text="Setup", command=self._setup_ollama)
272
+ self.ollama_setup_btn.pack(side=tk.LEFT, padx=(10, 0))
273
+
274
+ self._update_backend_ui()
275
+
276
+ # Generate button
277
+ btn_frame = ttk.Frame(main_frame)
278
+ btn_frame.pack(fill=tk.X, pady=10)
279
+
280
+ self.generate_btn = ttk.Button(btn_frame, text="Generate", command=self._generate)
281
+ self.generate_btn.pack(side=tk.LEFT)
282
+
283
+ self.status_label = ttk.Label(btn_frame, text="")
284
+ self.status_label.pack(side=tk.LEFT, padx=10)
285
+
286
+ # Result display
287
+ ttk.Label(main_frame, text="Generated regex:").pack(anchor=tk.W, pady=(10, 5))
288
+
289
+ result_frame = ttk.Frame(main_frame)
290
+ result_frame.pack(fill=tk.X)
291
+
292
+ self.result_var = tk.StringVar()
293
+ self.result_entry = ttk.Entry(result_frame, textvariable=self.result_var, width=50, font=("monospace", 11))
294
+ self.result_entry.pack(side=tk.LEFT, fill=tk.X, expand=True)
295
+
296
+ ttk.Button(result_frame, text="Copy", width=6, command=self._copy).pack(side=tk.LEFT, padx=(5, 0))
297
+
298
+ # Bottom buttons
299
+ bottom_frame = ttk.Frame(main_frame)
300
+ bottom_frame.pack(fill=tk.X, pady=(20, 0))
301
+
302
+ ttk.Button(bottom_frame, text="Use", command=self._use).pack(side=tk.RIGHT, padx=(5, 0))
303
+ ttk.Button(bottom_frame, text="Close", command=self.top.destroy).pack(side=tk.RIGHT)
304
+
305
+ def _on_backend_change(self, event=None):
306
+ """Handle backend selection change."""
307
+ backend = self.backend_var.get()
308
+ hint = {"claude": "(CLI)", "anthropic": "(API)", "openai": "(GPT)", "ollama": "(Local)"}
309
+ self.backend_hint_label.config(text=hint.get(backend, ""))
310
+ self._update_backend_ui()
311
+
312
+ def _update_backend_ui(self):
313
+ """Show/hide API key or Ollama setup based on backend."""
314
+ backend = self.backend_var.get()
315
+ if backend == "claude":
316
+ # Claude CLI needs no setup
317
+ self.key_frame.pack_forget()
318
+ self.ollama_frame.pack_forget()
319
+ elif backend == "ollama":
320
+ self.key_frame.pack_forget()
321
+ self.ollama_frame.pack(fill=tk.X, pady=(5, 0), after=self.backend_combo.master)
322
+ self._update_ollama_status()
323
+ else:
324
+ self.ollama_frame.pack_forget()
325
+ self.key_frame.pack(fill=tk.X, pady=(5, 0), after=self.backend_combo.master)
326
+
327
+ def _update_ollama_status(self):
328
+ """Update Ollama status display."""
329
+ status = self._get_ollama_status()
330
+ status_text = {
331
+ "not_installed": "Not installed",
332
+ "no_model": "Model not downloaded",
333
+ "not_running": "Not running",
334
+ "ready": "Ready"
335
+ }
336
+ status_color = "green" if status == "ready" else "orange"
337
+ self.ollama_status_label.config(text=status_text.get(status, ""), foreground=status_color)
338
+
339
+ # Update button text and visibility
340
+ if status == "ready":
341
+ self.ollama_setup_btn.pack_forget()
342
+ else:
343
+ btn_text = {
344
+ "not_installed": "Install Ollama",
345
+ "no_model": "Download Model",
346
+ "not_running": "Start Ollama"
347
+ }
348
+ self.ollama_setup_btn.config(text=btn_text.get(status, "Setup"))
349
+ self.ollama_setup_btn.pack(side=tk.LEFT, padx=(10, 0))
350
+
351
+ def _setup_ollama(self):
352
+ """Handle Ollama setup based on current status."""
353
+ status = self._get_ollama_status()
354
+ if status == "not_installed":
355
+ self._install_ollama()
356
+ elif status == "no_model":
357
+ self._download_model()
358
+ elif status == "not_running":
359
+ self._start_ollama()
360
+
361
+ def _generate(self):
362
+ """Generate regex from description using selected AI backend."""
363
+ description = self.description_entry.get().strip()
364
+ if not description:
365
+ self.status_label.config(text="Enter a description", foreground="red")
366
+ return
367
+
368
+ self.generate_btn.config(state=tk.DISABLED)
369
+ self.status_label.config(text="Generating...", foreground="black")
370
+ self.result_var.set("")
371
+
372
+ backend = self.backend_var.get()
373
+
374
+ # Run in background thread
375
+ thread = threading.Thread(target=self._call_ai, args=(description, backend), daemon=True)
376
+ thread.start()
377
+
378
+ def _call_ai(self, description, backend):
379
+ """Call the selected AI backend."""
380
+ try:
381
+ # Get API key from entry field
382
+ api_key = self.api_key_var.get().strip()
383
+
384
+ if backend == "claude":
385
+ regex = self._call_claude_cli(description)
386
+ elif backend == "anthropic":
387
+ if not api_key:
388
+ raise Exception("API key required for Anthropic")
389
+ regex = self._call_anthropic(description, api_key)
390
+ elif backend == "openai":
391
+ if not api_key:
392
+ raise Exception("API key required for OpenAI")
393
+ regex = self._call_openai(description, api_key)
394
+ else:
395
+ regex = self._call_ollama(description)
396
+
397
+ self.top.after(0, lambda: self._on_result(regex))
398
+ except Exception as e:
399
+ self.top.after(0, lambda: self._on_error(str(e)))
400
+
401
+ def _clean_regex(self, regex):
402
+ """Clean up the regex response."""
403
+ regex = regex.strip()
404
+ regex = regex.strip("`").strip('"').strip("'").strip()
405
+ # Remove markdown code block markers
406
+ if regex.startswith("```"):
407
+ regex = regex.split("\n", 1)[-1]
408
+ if regex.endswith("```"):
409
+ regex = regex.rsplit("```", 1)[0]
410
+ regex = regex.strip()
411
+ # Take first line if multiple
412
+ if "\n" in regex:
413
+ regex = regex.split("\n")[0].strip()
414
+ return regex
415
+
416
+ def _call_claude_cli(self, description):
417
+ """Call Claude CLI in print mode."""
418
+ prompt = f"{self.SYSTEM_PROMPT}\n\nUser request: {description}"
419
+
420
+ try:
421
+ result = subprocess.run(
422
+ ["claude", "-p", prompt],
423
+ capture_output=True,
424
+ text=True,
425
+ timeout=60
426
+ )
427
+
428
+ if result.returncode != 0:
429
+ raise Exception(f"Claude CLI error: {result.stderr[:100]}")
430
+
431
+ return self._clean_regex(result.stdout)
432
+
433
+ except subprocess.TimeoutExpired:
434
+ raise Exception("Claude CLI timed out")
435
+ except FileNotFoundError:
436
+ raise Exception("Claude CLI not found. Install with: npm install -g @anthropic-ai/claude-code")
437
+
438
+ def _call_anthropic(self, description, api_key):
439
+ """Call Anthropic Claude API."""
440
+ payload = {
441
+ "model": "claude-sonnet-4-20250514",
442
+ "max_tokens": 100,
443
+ "messages": [
444
+ {"role": "user", "content": f"{self.SYSTEM_PROMPT}\n\nUser request: {description}"}
445
+ ]
446
+ }
447
+
448
+ req = urllib.request.Request(
449
+ self.ANTHROPIC_URL,
450
+ data=json.dumps(payload).encode("utf-8"),
451
+ headers={
452
+ "Content-Type": "application/json",
453
+ "x-api-key": api_key,
454
+ "anthropic-version": "2023-06-01"
455
+ }
456
+ )
457
+
458
+ with urllib.request.urlopen(req, timeout=30) as response:
459
+ result = json.loads(response.read().decode("utf-8"))
460
+ regex = result["content"][0]["text"]
461
+ return self._clean_regex(regex)
462
+
463
+ def _call_openai(self, description, api_key):
464
+ """Call OpenAI API."""
465
+ payload = {
466
+ "model": "gpt-4o-mini",
467
+ "max_tokens": 100,
468
+ "messages": [
469
+ {"role": "system", "content": self.SYSTEM_PROMPT},
470
+ {"role": "user", "content": description}
471
+ ]
472
+ }
473
+
474
+ req = urllib.request.Request(
475
+ self.OPENAI_URL,
476
+ data=json.dumps(payload).encode("utf-8"),
477
+ headers={
478
+ "Content-Type": "application/json",
479
+ "Authorization": f"Bearer {api_key}"
480
+ }
481
+ )
482
+
483
+ with urllib.request.urlopen(req, timeout=30) as response:
484
+ result = json.loads(response.read().decode("utf-8"))
485
+ regex = result["choices"][0]["message"]["content"]
486
+ return self._clean_regex(regex)
487
+
488
+ def _call_ollama(self, description):
489
+ """Call Ollama API."""
490
+ prompt = f"{self.SYSTEM_PROMPT}\n\nUser request: {description}\n\nRegex pattern:"
491
+
492
+ payload = {
493
+ "model": "mistral",
494
+ "prompt": prompt,
495
+ "stream": False,
496
+ "options": {
497
+ "temperature": 0.1,
498
+ "num_predict": 100
499
+ }
500
+ }
501
+
502
+ try:
503
+ req = urllib.request.Request(
504
+ self.OLLAMA_URL,
505
+ data=json.dumps(payload).encode("utf-8"),
506
+ headers={"Content-Type": "application/json"}
507
+ )
508
+
509
+ with urllib.request.urlopen(req, timeout=30) as response:
510
+ result = json.loads(response.read().decode("utf-8"))
511
+ regex = result.get("response", "")
512
+ return self._clean_regex(regex)
513
+
514
+ except urllib.error.URLError:
515
+ raise Exception("Ollama not running. Run: make ollama-start")
516
+
517
+ def _on_result(self, regex):
518
+ """Handle successful result."""
519
+ self.generate_btn.config(state=tk.NORMAL)
520
+ self.status_label.config(text="Done", foreground="green")
521
+ self.result_var.set(regex)
522
+ self.result_entry.select_range(0, tk.END)
523
+
524
+ def _on_error(self, error):
525
+ """Handle error."""
526
+ self.generate_btn.config(state=tk.NORMAL)
527
+ self.status_label.config(text=error[:50], foreground="red")
528
+
529
+ def _copy(self):
530
+ """Copy result to clipboard."""
531
+ regex = self.result_var.get()
532
+ if regex:
533
+ self.top.clipboard_clear()
534
+ self.top.clipboard_append(regex)
535
+ self.status_label.config(text="Copied!", foreground="green")
536
+
537
+ def _use(self):
538
+ """Use the generated regex."""
539
+ regex = self.result_var.get()
540
+ if regex and self.callback:
541
+ self.callback(regex)
542
+ self.top.destroy()
@@ -0,0 +1 @@
1
+ """Tab modules for SQLBench."""