sentinel-ai-os 1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. sentinel/__init__.py +0 -0
  2. sentinel/auth.py +40 -0
  3. sentinel/cli.py +9 -0
  4. sentinel/core/__init__.py +0 -0
  5. sentinel/core/agent.py +298 -0
  6. sentinel/core/audit.py +48 -0
  7. sentinel/core/cognitive.py +94 -0
  8. sentinel/core/config.py +99 -0
  9. sentinel/core/llm.py +143 -0
  10. sentinel/core/registry.py +351 -0
  11. sentinel/core/scheduler.py +61 -0
  12. sentinel/core/schema.py +11 -0
  13. sentinel/core/setup.py +101 -0
  14. sentinel/core/ui.py +112 -0
  15. sentinel/main.py +110 -0
  16. sentinel/paths.py +77 -0
  17. sentinel/tools/__init__.py +0 -0
  18. sentinel/tools/apps.py +462 -0
  19. sentinel/tools/audio.py +30 -0
  20. sentinel/tools/browser.py +66 -0
  21. sentinel/tools/calendar_ops.py +163 -0
  22. sentinel/tools/clock.py +25 -0
  23. sentinel/tools/context.py +40 -0
  24. sentinel/tools/desktop.py +116 -0
  25. sentinel/tools/email_ops.py +62 -0
  26. sentinel/tools/factory.py +125 -0
  27. sentinel/tools/file_ops.py +81 -0
  28. sentinel/tools/flights.py +62 -0
  29. sentinel/tools/gmail_auth.py +47 -0
  30. sentinel/tools/indexer.py +156 -0
  31. sentinel/tools/installer.py +69 -0
  32. sentinel/tools/macros.py +58 -0
  33. sentinel/tools/memory_ops.py +281 -0
  34. sentinel/tools/navigation.py +109 -0
  35. sentinel/tools/notes.py +78 -0
  36. sentinel/tools/office.py +67 -0
  37. sentinel/tools/organizer.py +150 -0
  38. sentinel/tools/smart_index.py +76 -0
  39. sentinel/tools/sql_index.py +186 -0
  40. sentinel/tools/system_ops.py +86 -0
  41. sentinel/tools/vision.py +94 -0
  42. sentinel/tools/weather_ops.py +59 -0
  43. sentinel_ai_os-1.0.dist-info/METADATA +282 -0
  44. sentinel_ai_os-1.0.dist-info/RECORD +48 -0
  45. sentinel_ai_os-1.0.dist-info/WHEEL +5 -0
  46. sentinel_ai_os-1.0.dist-info/entry_points.txt +2 -0
  47. sentinel_ai_os-1.0.dist-info/licenses/LICENSE +21 -0
  48. sentinel_ai_os-1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,109 @@
1
+ import googlemaps
2
+ import re
3
+ from sentinel.core.config import ConfigManager
4
+
5
+
6
+ def get_gmaps():
7
+ cfg = ConfigManager()
8
+ key = cfg.get_key("google_maps")
9
+ if not key:
10
+ return None
11
+ return googlemaps.Client(key=key)
12
+
13
+
14
+ def geocode(address):
15
+ gmaps = get_gmaps()
16
+ if not gmaps:
17
+ return "Error: Google Maps API key missing in config"
18
+
19
+ try:
20
+ result = gmaps.geocode(address)
21
+ if not result:
22
+ return "No results."
23
+
24
+ loc = result[0]["geometry"]["location"]
25
+ formatted = result[0]["formatted_address"]
26
+ return f"Address: {formatted}\nCoordinates: {loc['lat']}, {loc['lng']}"
27
+ except Exception as e:
28
+ return f"Geocode error: {e}"
29
+
30
+
31
+ def reverse_geocode(lat, lon):
32
+ gmaps = get_gmaps()
33
+ if not gmaps:
34
+ return "Error: Google Maps API key missing in config"
35
+
36
+ try:
37
+ lat = float(lat)
38
+ lon = float(lon)
39
+ result = gmaps.reverse_geocode((lat, lon))
40
+ return f"Address: {result[0]['formatted_address']}" if result else "No address."
41
+ except Exception as e:
42
+ return f"Reverse Geocode error: {e}"
43
+
44
+
45
+ def calc_distance(origin, destination, mode="driving"):
46
+ gmaps = get_gmaps()
47
+ if not gmaps:
48
+ return "Error: Google Maps API key missing in config"
49
+
50
+ try:
51
+ matrix = gmaps.distance_matrix(origin, destination, mode=mode)
52
+ row = matrix["rows"][0]["elements"][0]
53
+ if row["status"] != "OK":
54
+ return f"Error: {row['status']}"
55
+
56
+ return (
57
+ f"Trip: {origin} -> {destination} ({mode})\n"
58
+ f"Distance: {row['distance']['text']}\n"
59
+ f"Duration: {row['duration']['text']}"
60
+ )
61
+ except Exception as e:
62
+ return f"Distance error: {e}"
63
+
64
+
65
+ def get_directions(origin, destination, mode="driving"):
66
+ gmaps = get_gmaps()
67
+ if not gmaps:
68
+ return "Error: Google Maps API key missing in config"
69
+
70
+ try:
71
+ directions = gmaps.directions(origin, destination, mode=mode)
72
+ if not directions:
73
+ return "No directions."
74
+
75
+ steps = directions[0]["legs"][0]["steps"]
76
+ summary = [f"Directions ({mode}):"]
77
+
78
+ for i, step in enumerate(steps, 1):
79
+ instr = re.sub("<[^<]+?>", "", step["html_instructions"])
80
+ summary.append(f"{i}. {instr} ({step['distance']['text']})")
81
+
82
+ return "\n".join(summary)
83
+ except Exception as e:
84
+ return f"Directions error: {e}"
85
+
86
+
87
+ def find_nearby(lat, lon, type="restaurant", radius=1000):
88
+ gmaps = get_gmaps()
89
+ if not gmaps:
90
+ return "Error: Google Maps API key missing in config"
91
+
92
+ try:
93
+ location = (float(lat), float(lon))
94
+ places = gmaps.places_nearby(
95
+ location=location,
96
+ radius=radius,
97
+ type=type
98
+ )
99
+
100
+ results = []
101
+ for p in places.get("results", [])[:10]:
102
+ name = p.get("name")
103
+ addr = p.get("vicinity", "No address")
104
+ rating = p.get("rating", "N/A")
105
+ results.append(f"- {name} ({rating}*) - {addr}")
106
+
107
+ return "\n".join(results) if results else "No places found."
108
+ except Exception as e:
109
+ return f"Places error: {e}"
@@ -0,0 +1,78 @@
1
+ import json
2
+ import os
3
+ import datetime
4
+ from pathlib import Path
5
+
6
+ BASE_DIR = Path.home() / ".sentinel-1"
7
+ BASE_DIR.mkdir(exist_ok=True)
8
+ NOTES_FILE = BASE_DIR / "memory.json"
9
+
10
+
11
+ def _load_notes():
12
+ """Helper to load notes from JSON file"""
13
+ if not os.path.exists(NOTES_FILE):
14
+ return []
15
+ try:
16
+ with open(NOTES_FILE, 'r') as f:
17
+ return json.load(f)
18
+ except:
19
+ return []
20
+
21
+
22
+ def _save_notes(notes):
23
+ """Helper to save notes to JSON file"""
24
+ with open(NOTES_FILE, 'w') as f:
25
+ json.dump(notes, f, indent=2)
26
+
27
+
28
+ def add_note(category, content):
29
+ """
30
+ Saves a persistent memory.
31
+ Args:
32
+ category: (str) "user_pref", "project_status", "todo", "concept"
33
+ content: (str) The actual information to remember.
34
+ """
35
+ notes = _load_notes()
36
+ note = {
37
+ "id": len(notes) + 1,
38
+ "timestamp": str(datetime.datetime.now()),
39
+ "category": category,
40
+ "content": content
41
+ }
42
+ notes.append(note)
43
+ _save_notes(notes)
44
+ return f"Note added to {category}: {content}"
45
+
46
+
47
+ def list_notes(category=None):
48
+ """
49
+ Retrieves memories.
50
+ Args:
51
+ category: (Optional) Filter by "user_pref", "todo", etc.
52
+ """
53
+ notes = _load_notes()
54
+ if category:
55
+ filtered = [n for n in notes if n["category"].lower() == category.lower()]
56
+ if not filtered:
57
+ return f"No notes found in category '{category}'."
58
+ return json.dumps(filtered, indent=2)
59
+
60
+ # If no category, return all (summarized)
61
+ return json.dumps(notes, indent=2)
62
+
63
+
64
+ def delete_note(note_id):
65
+ """Removes a note by ID."""
66
+ notes = _load_notes()
67
+ # Handle string vs int ID mismatch
68
+ try:
69
+ note_id = int(note_id)
70
+ except:
71
+ pass
72
+
73
+ new_notes = [n for n in notes if n["id"] != note_id]
74
+ if len(notes) == len(new_notes):
75
+ return "Error: Note ID not found."
76
+
77
+ _save_notes(new_notes)
78
+ return f"Note {note_id} deleted."
@@ -0,0 +1,67 @@
1
+ import os
2
+ import pandas as pd
3
+ from docx import Document
4
+ from openpyxl import load_workbook
5
+
6
+
7
+ def create_word(filename, content):
8
+ """Creates a new Word document with the given content."""
9
+ if not filename.endswith(".docx"):
10
+ filename += ".docx"
11
+
12
+ try:
13
+ doc = Document()
14
+ doc.add_paragraph(content)
15
+ doc.save(filename)
16
+ return f"Success: Created Word doc at {os.path.abspath(filename)}"
17
+ except Exception as e:
18
+ return f"Error creating Word doc: {e}"
19
+
20
+
21
+ def create_excel(filename, data_list):
22
+ """
23
+ Creates a new Excel file from a list of dictionaries.
24
+ Example data_list: [{"Name": "Alice", "Age": 30}, {"Name": "Bob", "Age": 25}]
25
+ """
26
+ if not filename.endswith(".xlsx"):
27
+ filename += ".xlsx"
28
+
29
+ try:
30
+ df = pd.DataFrame(data_list)
31
+ df.to_excel(filename, index=False)
32
+ return f"Success: Created Excel sheet at {os.path.abspath(filename)}"
33
+ except Exception as e:
34
+ return f"Error creating Excel: {e}"
35
+
36
+
37
+ def append_excel(filename, data_list):
38
+ """Appends data to an existing Excel file."""
39
+ if not os.path.exists(filename):
40
+ return create_excel(filename, data_list)
41
+
42
+ try:
43
+ # Load existing data
44
+ df_existing = pd.read_excel(filename)
45
+ df_new = pd.DataFrame(data_list)
46
+
47
+ # Combine and save
48
+ df_final = pd.concat([df_existing, df_new], ignore_index=True)
49
+ df_final.to_excel(filename, index=False)
50
+ return f"Success: Appended {len(data_list)} rows to {filename}."
51
+ except Exception as e:
52
+ return f"Error appending to Excel: {e}"
53
+
54
+
55
+ def read_excel(filename):
56
+ """Reads an Excel file and returns a summary."""
57
+ if not os.path.exists(filename):
58
+ return "Error: File not found."
59
+
60
+ try:
61
+ df = pd.read_excel(filename)
62
+ # Return markdown string, limit to 50 rows to save tokens
63
+ if len(df) > 50:
64
+ return f"File found. Showing first 50 rows:\n{df.head(50).to_markdown(index=False)}"
65
+ return df.to_markdown(index=False)
66
+ except Exception as e:
67
+ return f"Error reading Excel: {e}"
@@ -0,0 +1,150 @@
1
+ import os
2
+ import shutil
3
+ import datetime
4
+ from pathlib import Path
5
+ from typing import List, Dict
6
+
7
+
8
+ def get_downloads_folder() -> str:
9
+ """
10
+ Returns the path to the user's Downloads folder.
11
+ Cross-platform implementation using pathlib.
12
+ """
13
+ # This works on Windows, macOS, and Linux
14
+ return str(Path.home() / "Downloads")
15
+
16
+
17
+ def _get_unique_name(destination_folder: Path, filename: str) -> Path:
18
+ """
19
+ If file exists, appends (1), (2), etc. to prevent overwrite.
20
+ """
21
+ file_path = destination_folder / filename
22
+ if not file_path.exists():
23
+ return file_path
24
+
25
+ stem = file_path.stem
26
+ suffix = file_path.suffix
27
+ counter = 1
28
+
29
+ while file_path.exists():
30
+ file_path = destination_folder / f"{stem} ({counter}){suffix}"
31
+ counter += 1
32
+
33
+ return file_path
34
+
35
+
36
+ def organize_files(directory: str, strategy: str = "extension") -> str:
37
+ """
38
+ Organizes files in a directory into subfolders.
39
+ Strategies:
40
+ - 'extension' (Groups by type: Images, Videos, Docs)
41
+ - 'date' (Groups by YYYY-MM-DD)
42
+ """
43
+ if directory.lower() == "downloads":
44
+ directory = get_downloads_folder()
45
+
46
+ target_dir = Path(directory)
47
+
48
+ if not target_dir.exists():
49
+ return f"Error: Directory '{directory}' not found."
50
+
51
+ actions_taken = []
52
+
53
+ # Smart grouping for cleaner organization
54
+ TYPE_MAP = {
55
+ # Images
56
+ "jpg": "Images", "jpeg": "Images", "png": "Images", "gif": "Images",
57
+ "svg": "Images", "webp": "Images", "tiff": "Images", "bmp": "Images",
58
+ # Video
59
+ "mp4": "Videos", "mov": "Videos", "avi": "Videos", "mkv": "Videos",
60
+ "wmv": "Videos", "flv": "Videos", "webm": "Videos",
61
+ # Audio
62
+ "mp3": "Audio", "wav": "Audio", "aac": "Audio", "flac": "Audio", "m4a": "Audio",
63
+ # Documents
64
+ "pdf": "Documents", "doc": "Documents", "docx": "Documents",
65
+ "txt": "Documents", "rtf": "Documents", "odt": "Documents",
66
+ "xls": "Spreadsheets", "xlsx": "Spreadsheets", "csv": "Spreadsheets",
67
+ "ppt": "Presentations", "pptx": "Presentations",
68
+ # Archives
69
+ "zip": "Archives", "rar": "Archives", "7z": "Archives",
70
+ "tar": "Archives", "gz": "Archives",
71
+ # Installers
72
+ "exe": "Installers", "msi": "Installers", "dmg": "Installers",
73
+ "deb": "Installers", "pkg": "Installers", "iso": "Installers",
74
+ # Code
75
+ "py": "Code", "js": "Code", "html": "Code", "css": "Code",
76
+ "json": "Code", "java": "Code", "cpp": "Code", "ts": "Code"
77
+ }
78
+
79
+ try:
80
+ # List all files (skip directories and hidden files)
81
+ files = [f for f in target_dir.iterdir() if f.is_file() and not f.name.startswith('.')]
82
+
83
+ for file_path in files:
84
+ folder_name = "Misc"
85
+
86
+ if strategy == "extension":
87
+ ext = file_path.suffix.lstrip('.').lower()
88
+ # Use mapped name if available, else generic folder
89
+ folder_name = TYPE_MAP.get(ext, f"{ext.upper()}_Files")
90
+
91
+ elif strategy == "date":
92
+ timestamp = file_path.stat().st_mtime
93
+ date = datetime.datetime.fromtimestamp(timestamp)
94
+ folder_name = date.strftime("%Y-%m-%d")
95
+
96
+ else:
97
+ return f"Unknown strategy: {strategy}"
98
+
99
+ # Create destination folder
100
+ dest_folder = target_dir / folder_name
101
+ dest_folder.mkdir(exist_ok=True)
102
+
103
+ # Move file safely (rename if collision)
104
+ final_path = _get_unique_name(dest_folder, file_path.name)
105
+ shutil.move(str(file_path), str(final_path))
106
+
107
+ actions_taken.append(f"Moved {file_path.name} -> {folder_name}/")
108
+
109
+ if not actions_taken:
110
+ return "No files needed organizing."
111
+
112
+ return f"Organized {len(actions_taken)} files:\n" + "\n".join(actions_taken[:10]) + (
113
+ "\n...and more." if len(actions_taken) > 10 else "")
114
+
115
+ except Exception as e:
116
+ return f"Organization failed: {e}"
117
+
118
+
119
+ def bulk_rename(directory: str, pattern: str, replace_with: str) -> str:
120
+ """
121
+ Renames files matching a pattern.
122
+ Example: bulk_rename(".", "Screen Shot", "Capture")
123
+ """
124
+ if directory.lower() == "downloads":
125
+ directory = get_downloads_folder()
126
+
127
+ target_dir = Path(directory)
128
+ if not target_dir.exists():
129
+ return f"Error: Directory '{directory}' not found."
130
+
131
+ count = 0
132
+ try:
133
+ for file_path in target_dir.iterdir():
134
+ if not file_path.is_file():
135
+ continue
136
+
137
+ if pattern in file_path.name:
138
+ new_name = file_path.name.replace(pattern, replace_with)
139
+ new_path = target_dir / new_name
140
+
141
+ # Prevent overwrite if destination exists
142
+ if new_path.exists():
143
+ new_path = _get_unique_name(target_dir, new_name)
144
+
145
+ file_path.rename(new_path)
146
+ count += 1
147
+
148
+ return f"Renamed {count} files."
149
+ except Exception as e:
150
+ return f"Rename Error: {e}"
@@ -0,0 +1,76 @@
1
+ import sqlite3, os, time
2
+ import numpy as np
3
+ from sentence_transformers import SentenceTransformer
4
+ from pathlib import Path
5
+
6
+ BASE_DIR = Path.home() / ".sentinel-1"
7
+ BASE_DIR.mkdir(exist_ok=True)
8
+
9
+ DB = BASE_DIR / "smart_files.db"
10
+ MODEL = SentenceTransformer("all-MiniLM-L6-v2")
11
+
12
+ def init():
13
+ conn = sqlite3.connect(DB)
14
+ conn.execute("""
15
+ CREATE TABLE IF NOT EXISTS files(
16
+ path TEXT PRIMARY KEY,
17
+ name TEXT,
18
+ ext TEXT,
19
+ embedding BLOB,
20
+ last_opened REAL,
21
+ last_modified REAL
22
+ )
23
+ """)
24
+ conn.commit()
25
+ conn.close()
26
+
27
+ init()
28
+
29
+ def embed(text):
30
+ return MODEL.encode(text).astype("float32").tobytes()
31
+
32
+ def index_file(path):
33
+ if not os.path.exists(path):
34
+ return
35
+
36
+ name = os.path.basename(path)
37
+ ext = os.path.splitext(path)[1]
38
+ text = name # IMPORTANT: only embed filename (fast)
39
+
40
+ emb = embed(text)
41
+
42
+ conn = sqlite3.connect(DB)
43
+ conn.execute("""
44
+ INSERT OR REPLACE INTO files
45
+ VALUES (?, ?, ?, ?, ?, ?)
46
+ """, (
47
+ path,
48
+ name,
49
+ ext,
50
+ emb,
51
+ time.time(),
52
+ os.path.getmtime(path)
53
+ ))
54
+ conn.commit()
55
+ conn.close()
56
+
57
+ def smart_find(query, limit=5):
58
+ q_emb = MODEL.encode(query)
59
+ conn = sqlite3.connect(DB)
60
+ rows = conn.execute(
61
+ "SELECT path, embedding, last_opened FROM files"
62
+ ).fetchall()
63
+ conn.close()
64
+
65
+ scored = []
66
+ for path, emb, last in rows:
67
+ vec = np.frombuffer(emb, dtype="float32")
68
+ sim = np.dot(q_emb, vec) / (
69
+ np.linalg.norm(q_emb)*np.linalg.norm(vec)
70
+ )
71
+ recency = 1 / (1 + (time.time() - last))
72
+ score = sim*0.7 + recency*0.3
73
+ scored.append((score, path))
74
+
75
+ scored.sort(reverse=True)
76
+ return [p for _, p in scored[:limit]]
@@ -0,0 +1,186 @@
1
+ # FILE: tools/sql_index.py
2
+ import sqlite3
3
+ import os
4
+ import datetime
5
+ import platform
6
+ from pathlib import Path
7
+
8
+ BASE_DIR = Path.home() / ".sentinel-1"
9
+ BASE_DIR.mkdir(exist_ok=True)
10
+
11
+ DB_FILE = BASE_DIR / "sentinel-1.db"
12
+
13
+ # --- CONFIGURATION: WHAT TO IGNORE ---
14
+ # These are the "System Ones" we skip. Everything else is fair game.
15
+ SKIP_DIRS = {
16
+ # System Folders
17
+ "Windows", "Program Files", "Program Files (x86)",
18
+ "AppData", "Application Data",
19
+ "$RECYCLE.BIN", "System Volume Information", "Recovery",
20
+ "boot", "efi",
21
+
22
+ # Dev Junk (Optional - keep these to avoid noise, or remove to index code)
23
+ "node_modules", "venv", ".venv", ".git", "__pycache__", ".idea", ".vscode"
24
+ }
25
+
26
+
27
+ def _get_conn():
28
+ conn = sqlite3.connect(DB_FILE)
29
+ conn.row_factory = sqlite3.Row
30
+ return conn
31
+
32
+
33
+ def get_all_drives():
34
+ """
35
+ Returns a list of all logical drives on the system.
36
+ On Windows: ['C:\\', 'D:\\', 'E:\\']
37
+ """
38
+ drives = []
39
+ if platform.system() == "Windows":
40
+ import string
41
+ from ctypes import windll
42
+ drives = [f"{d}:\\" for d in string.ascii_uppercase if os.path.exists(f"{d}:\\")]
43
+ else:
44
+ # Unix-based systems
45
+ drives = ["/"]
46
+ if os.path.exists("/Volumes"): # macOS external drives
47
+ drives.extend([os.path.join("/Volumes", d) for d in os.listdir("/Volumes")])
48
+ if os.path.exists("/media"): # Linux external drives
49
+ drives.extend([os.path.join("/media", d) for d in os.listdir("/media")])
50
+ return drives
51
+
52
+
53
+ def build_index(silent=True):
54
+ conn = _get_conn()
55
+ cursor = conn.cursor()
56
+
57
+ # 1. Setup Database
58
+ cursor.execute('''
59
+ CREATE TABLE IF NOT EXISTS files (
60
+ path TEXT PRIMARY KEY,
61
+ name TEXT,
62
+ extension TEXT,
63
+ size_mb REAL,
64
+ modified_date TEXT,
65
+ mtime_raw REAL
66
+ )
67
+ ''')
68
+ cursor.execute("CREATE INDEX IF NOT EXISTS idx_name ON files(name)")
69
+
70
+ # 2. Identify Scan Targets
71
+ # Strategy: Scan the User Profile on C:, plus the ROOT of every other drive.
72
+ targets = []
73
+
74
+ # A. Always get standard user folders (Safest way to scan C:)
75
+ home = os.path.expanduser("~")
76
+ targets.extend([
77
+ os.path.join(home, "Desktop"),
78
+ os.path.join(home, "Documents"),
79
+ os.path.join(home, "Downloads"),
80
+ os.path.join(home, "Pictures"),
81
+ os.path.join(home, "Music"),
82
+ os.path.join(home, "Videos"),
83
+ # Add your code folder explicitly if it's not in the above
84
+ r"D:\Python Projects"
85
+ ])
86
+
87
+ # B. Add ALL other drives (D:, E:, etc)
88
+ all_drives = get_all_drives()
89
+ for drive in all_drives:
90
+ # We generally avoid scanning C:\ root because it's 90% OS junk.
91
+ # But D:\, E:\, etc. are usually pure data, so we scan their ROOT.
92
+ if "C:" in drive.upper():
93
+ continue
94
+ targets.append(drive)
95
+
96
+ # Remove duplicates
97
+ targets = list(set(targets))
98
+
99
+ # 3. Load Cache (for speed)
100
+ if not silent: print("[System] Loading existing file index...")
101
+ cursor.execute("SELECT path, mtime_raw FROM files")
102
+ existing_files = {row['path']: row['mtime_raw'] for row in cursor.fetchall()}
103
+
104
+ updates = []
105
+ seen_paths = set()
106
+
107
+ if not silent:
108
+ print(f"[System] Scanning {len(targets)} locations:\n" + "\n".join([f" - {t}" for t in targets]))
109
+
110
+ # 4. The Scan Loop
111
+ for folder in targets:
112
+ if not os.path.exists(folder): continue
113
+
114
+ for root, dirs, filenames in os.walk(folder):
115
+ # --- FILTERING: This is where we skip "System Ones" ---
116
+ # We modify 'dirs' in-place so os.walk doesn't even enter them.
117
+ dirs[:] = [d for d in dirs if d not in SKIP_DIRS and not d.startswith(".")]
118
+
119
+ for f in filenames:
120
+ # Ignore temp/hidden files
121
+ if f.startswith("~$") or f.startswith("."): continue
122
+
123
+ fullpath = os.path.join(root, f)
124
+ seen_paths.add(fullpath)
125
+
126
+ try:
127
+ # Check Modification Time
128
+ stats = os.stat(fullpath)
129
+ current_mtime = stats.st_mtime
130
+
131
+ # Optimization: Skip if file hasn't changed since last scan
132
+ if fullpath in existing_files and abs(existing_files[fullpath] - current_mtime) < 1.0:
133
+ continue
134
+
135
+ size_mb = round(stats.st_size / (1024 * 1024), 2)
136
+ mtime_str = datetime.datetime.fromtimestamp(current_mtime).strftime('%Y-%m-%d %H:%M:%S')
137
+ ext = os.path.splitext(f)[1].lower()
138
+
139
+ updates.append((fullpath, f, ext, size_mb, mtime_str, current_mtime))
140
+
141
+ except (OSError, PermissionError):
142
+ continue
143
+
144
+ # 5. Commit Changes
145
+ if updates:
146
+ if not silent: print(f"[System] Indexing {len(updates)} new/changed files...")
147
+ cursor.executemany('''
148
+ INSERT OR REPLACE INTO files (path, name, extension, size_mb, modified_date, mtime_raw)
149
+ VALUES (?, ?, ?, ?, ?, ?)
150
+ ''', updates)
151
+
152
+ # 6. Cleanup (Remove files that were deleted from disk)
153
+ deleted_paths = set(existing_files.keys()) - seen_paths
154
+ if deleted_paths:
155
+ deleted_list = list(deleted_paths)
156
+ chunk_size = 900
157
+ for i in range(0, len(deleted_list), chunk_size):
158
+ chunk = deleted_list[i:i + chunk_size]
159
+ cursor.executemany("DELETE FROM files WHERE path = ?", [(p,) for p in chunk])
160
+
161
+ conn.commit()
162
+ conn.close()
163
+ return f"Index Updated: {len(updates)} new, {len(deleted_paths)} removed."
164
+
165
+
166
+ def search_db(query):
167
+ conn = _get_conn()
168
+ cursor = conn.cursor()
169
+
170
+ sql_query = f"%{query}%"
171
+
172
+ # Prioritize exact matches in name, then partial matches
173
+ cursor.execute('''
174
+ SELECT name, path, modified_date
175
+ FROM files
176
+ WHERE name LIKE ?
177
+ ORDER BY mtime_raw DESC
178
+ LIMIT 10
179
+ ''', (sql_query,))
180
+
181
+ results = cursor.fetchall()
182
+ conn.close()
183
+
184
+ if not results: return "No files found matching that name."
185
+
186
+ return "\n".join([f"- {r['name']} ({r['modified_date']})\n Path: {r['path']}" for r in results])