tklr-dgraham 0.0.0rc11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of tklr-dgraham might be problematic. Click here for more details.
- tklr/__init__.py +0 -0
- tklr/cli/main.py +253 -0
- tklr/cli/migrate_etm_to_tklr.py +764 -0
- tklr/common.py +1296 -0
- tklr/controller.py +2602 -0
- tklr/item.py +3765 -0
- tklr/list_colors.py +234 -0
- tklr/model.py +3973 -0
- tklr/shared.py +654 -0
- tklr/sounds/alert.mp3 +0 -0
- tklr/tklr_env.py +461 -0
- tklr/use_system.py +64 -0
- tklr/versioning.py +21 -0
- tklr/view.py +2912 -0
- tklr/view_agenda.py +236 -0
- tklr/view_textual.css +296 -0
- tklr_dgraham-0.0.0rc11.dist-info/METADATA +699 -0
- tklr_dgraham-0.0.0rc11.dist-info/RECORD +21 -0
- tklr_dgraham-0.0.0rc11.dist-info/WHEEL +5 -0
- tklr_dgraham-0.0.0rc11.dist-info/entry_points.txt +2 -0
- tklr_dgraham-0.0.0rc11.dist-info/top_level.txt +1 -0
tklr/controller.py
ADDED
|
@@ -0,0 +1,2602 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from packaging.version import parse as parse_version
|
|
3
|
+
from importlib.metadata import version
|
|
4
|
+
from functools import lru_cache
|
|
5
|
+
|
|
6
|
+
# TODO: Keep the display part - the model part will be in model.py
|
|
7
|
+
from datetime import datetime, timedelta, date
|
|
8
|
+
|
|
9
|
+
# from logging import log
|
|
10
|
+
from sre_compile import dis
|
|
11
|
+
from rich.console import Console
|
|
12
|
+
from rich.table import Table
|
|
13
|
+
from rich.box import HEAVY_EDGE
|
|
14
|
+
from rich import style
|
|
15
|
+
from rich.columns import Columns
|
|
16
|
+
from rich.console import Group, group
|
|
17
|
+
from rich.panel import Panel
|
|
18
|
+
from rich.layout import Layout
|
|
19
|
+
from rich import print as rprint
|
|
20
|
+
import re
|
|
21
|
+
import inspect
|
|
22
|
+
from rich.theme import Theme
|
|
23
|
+
from rich import box
|
|
24
|
+
from rich.text import Text
|
|
25
|
+
from typing import List, Tuple, Optional, Dict, Any, Set
|
|
26
|
+
from bisect import bisect_left, bisect_right
|
|
27
|
+
from typing import Iterator
|
|
28
|
+
|
|
29
|
+
import string
|
|
30
|
+
import shutil
|
|
31
|
+
import subprocess
|
|
32
|
+
import shlex
|
|
33
|
+
import textwrap
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
import json
|
|
37
|
+
from typing import Literal
|
|
38
|
+
from .item import Item
|
|
39
|
+
from .model import DatabaseManager, UrgencyComputer
|
|
40
|
+
from .model import _fmt_naive, _to_local_naive
|
|
41
|
+
from .list_colors import css_named_colors
|
|
42
|
+
from .versioning import get_version
|
|
43
|
+
|
|
44
|
+
from collections import defaultdict
|
|
45
|
+
from dataclasses import dataclass, field
|
|
46
|
+
from pathlib import Path
|
|
47
|
+
from zoneinfo import ZoneInfo
|
|
48
|
+
|
|
49
|
+
# import sqlite3
|
|
50
|
+
from .shared import (
|
|
51
|
+
TYPE_TO_COLOR,
|
|
52
|
+
log_msg,
|
|
53
|
+
HRS_MINS,
|
|
54
|
+
# ALERT_COMMANDS,
|
|
55
|
+
dt_as_utc_timestamp,
|
|
56
|
+
format_time_range,
|
|
57
|
+
format_timedelta,
|
|
58
|
+
datetime_from_timestamp,
|
|
59
|
+
format_datetime,
|
|
60
|
+
datetime_in_words,
|
|
61
|
+
truncate_string,
|
|
62
|
+
parse,
|
|
63
|
+
fmt_local_compact,
|
|
64
|
+
parse_local_compact,
|
|
65
|
+
fmt_utc_z,
|
|
66
|
+
parse_utc_z,
|
|
67
|
+
)
|
|
68
|
+
from tklr.tklr_env import TklrEnvironment
|
|
69
|
+
from tklr.view import ChildBinRow, ReminderRow
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
VERSION = get_version()
|
|
73
|
+
|
|
74
|
+
ISO_Z = "%Y%m%dT%H%MZ"
|
|
75
|
+
|
|
76
|
+
type_color = css_named_colors["goldenrod"]
|
|
77
|
+
at_color = css_named_colors["goldenrod"]
|
|
78
|
+
am_color = css_named_colors["goldenrod"]
|
|
79
|
+
# type_color = css_named_colors["burlywood"]
|
|
80
|
+
# at_color = css_named_colors["burlywood"]
|
|
81
|
+
# am_color = css_named_colors["burlywood"]
|
|
82
|
+
label_color = css_named_colors["lightskyblue"]
|
|
83
|
+
|
|
84
|
+
# The overall background color of the app is #2e2e2e - set in view_textual.css
|
|
85
|
+
CORNSILK = "#FFF8DC"
|
|
86
|
+
DARK_GRAY = "#A9A9A9"
|
|
87
|
+
DARK_GREY = "#A9A9A9" # same as DARK_GRAY
|
|
88
|
+
DARK_OLIVEGREEN = "#556B2F"
|
|
89
|
+
DARK_ORANGE = "#FF8C00"
|
|
90
|
+
DARK_SALMON = "#E9967A"
|
|
91
|
+
GOLD = "#FFD700"
|
|
92
|
+
GOLDENROD = "#DAA520"
|
|
93
|
+
KHAKI = "#F0E68C"
|
|
94
|
+
LAWN_GREEN = "#7CFC00"
|
|
95
|
+
LEMON_CHIFFON = "#FFFACD"
|
|
96
|
+
LIGHT_CORAL = "#F08080"
|
|
97
|
+
LIGHT_SKY_BLUE = "#87CEFA"
|
|
98
|
+
LIME_GREEN = "#32CD32"
|
|
99
|
+
ORANGE_RED = "#FF4500"
|
|
100
|
+
PALE_GREEN = "#98FB98"
|
|
101
|
+
PEACHPUFF = "#FFDAB9"
|
|
102
|
+
SALMON = "#FA8072"
|
|
103
|
+
SANDY_BROWN = "#F4A460"
|
|
104
|
+
SEA_GREEN = "#2E8B57"
|
|
105
|
+
SLATE_GREY = "#708090"
|
|
106
|
+
TOMATO = "#FF6347"
|
|
107
|
+
|
|
108
|
+
# Colors for UI elements
|
|
109
|
+
DAY_COLOR = LEMON_CHIFFON
|
|
110
|
+
FRAME_COLOR = KHAKI
|
|
111
|
+
HEADER_COLOR = LIGHT_SKY_BLUE
|
|
112
|
+
DIM_COLOR = DARK_GRAY
|
|
113
|
+
ALLDAY_COLOR = SANDY_BROWN
|
|
114
|
+
EVENT_COLOR = LIME_GREEN
|
|
115
|
+
NOTE_COLOR = DARK_SALMON
|
|
116
|
+
PASSED_EVENT = DARK_OLIVEGREEN
|
|
117
|
+
ACTIVE_EVENT = LAWN_GREEN
|
|
118
|
+
TASK_COLOR = LIGHT_SKY_BLUE
|
|
119
|
+
AVAILABLE_COLOR = LIGHT_SKY_BLUE
|
|
120
|
+
WAITING_COLOR = SLATE_GREY
|
|
121
|
+
FINISHED_COLOR = DARK_GREY
|
|
122
|
+
GOAL_COLOR = GOLDENROD
|
|
123
|
+
CHORE_COLOR = KHAKI
|
|
124
|
+
PASTDUE_COLOR = DARK_ORANGE
|
|
125
|
+
NOTICE_COLOR = GOLD
|
|
126
|
+
DRAFT_COLOR = ORANGE_RED
|
|
127
|
+
TODAY_COLOR = TOMATO
|
|
128
|
+
SELECTED_BACKGROUND = "#566573"
|
|
129
|
+
MATCH_COLOR = TOMATO
|
|
130
|
+
TITLE_COLOR = CORNSILK
|
|
131
|
+
BUSY_COLOR = "#9acd32"
|
|
132
|
+
BUSY_COLOR = "#adff2f"
|
|
133
|
+
CONF_COLOR = TOMATO
|
|
134
|
+
BUSY_FRAME_COLOR = "#5d5d5d"
|
|
135
|
+
|
|
136
|
+
# This one appears to be a Rich/Textual style string
|
|
137
|
+
SELECTED_COLOR = "bold yellow"
|
|
138
|
+
# SLOT_HOURS = [0, 4, 8, 12, 16, 20, 24]
|
|
139
|
+
SLOT_HOURS = [0, 6, 12, 18, 24]
|
|
140
|
+
SLOT_MINUTES = [x * 60 for x in SLOT_HOURS]
|
|
141
|
+
BUSY = "■" # U+25A0 this will be busy_bar busy and conflict character
|
|
142
|
+
FREE = "□" # U+25A1 this will be busy_bar free character
|
|
143
|
+
ADAY = "━" # U+2501 for all day events ━
|
|
144
|
+
NOTICE = "⋙"
|
|
145
|
+
|
|
146
|
+
SELECTED_COLOR = "yellow"
|
|
147
|
+
# SELECTED_COLOR = "bold yellow"
|
|
148
|
+
|
|
149
|
+
HEADER_COLOR = LEMON_CHIFFON
|
|
150
|
+
HEADER_STYLE = f"bold {LEMON_CHIFFON}"
|
|
151
|
+
FIELD_COLOR = LIGHT_SKY_BLUE
|
|
152
|
+
|
|
153
|
+
ONEDAY = timedelta(days=1)
|
|
154
|
+
ONEWK = 7 * ONEDAY
|
|
155
|
+
alpha = [x for x in string.ascii_lowercase]
|
|
156
|
+
|
|
157
|
+
# TYPE_TO_COLOR = {
|
|
158
|
+
# "*": EVENT_COLOR, # event
|
|
159
|
+
# "~": AVAILABLE_COLOR, # available task
|
|
160
|
+
# "x": FINISHED_COLOR, # finished task
|
|
161
|
+
# "^": AVAILABLE_COLOR, # available task
|
|
162
|
+
# "+": WAITING_COLOR, # waiting task
|
|
163
|
+
# "%": NOTE_COLOR, # note
|
|
164
|
+
# "<": PASTDUE_COLOR, # past due task
|
|
165
|
+
# ">": NOTICE_COLOR, # begin
|
|
166
|
+
# "!": GOAL_COLOR, # draft
|
|
167
|
+
# "?": DRAFT_COLOR, # draft
|
|
168
|
+
# }
|
|
169
|
+
#
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def _ensure_tokens_list(value):
|
|
173
|
+
"""Return a list[dict] for tokens whether DB returned JSON str or already-parsed list."""
|
|
174
|
+
if value is None:
|
|
175
|
+
return []
|
|
176
|
+
if isinstance(value, (list, tuple)):
|
|
177
|
+
return list(value)
|
|
178
|
+
if isinstance(value, (bytes, bytearray)):
|
|
179
|
+
value = value.decode("utf-8")
|
|
180
|
+
if isinstance(value, str):
|
|
181
|
+
return json.loads(value)
|
|
182
|
+
# last resort: try to coerce
|
|
183
|
+
return list(value)
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
# Stop at end-of-line or the start of another token-ish thing (@, &, +, %, - ...)
|
|
187
|
+
RE_BIN = re.compile(r"@b\s+([^\s].*?)\s*(?=$|[@&+%-])", re.IGNORECASE)
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def extract_bin_slashpath(line: str) -> str | None:
|
|
191
|
+
"""
|
|
192
|
+
Example:
|
|
193
|
+
"Pick up pastry @b Lille\\France\\places @t 9a" -> "Lille\\France\\places"
|
|
194
|
+
"""
|
|
195
|
+
m = RE_BIN.search(line or "")
|
|
196
|
+
return m.group(1) if m else None
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
def format_tokens(tokens, width, highlight=True):
|
|
200
|
+
if isinstance(tokens, str):
|
|
201
|
+
try:
|
|
202
|
+
tokens = json.loads(tokens)
|
|
203
|
+
except Exception:
|
|
204
|
+
pass
|
|
205
|
+
|
|
206
|
+
output_lines = []
|
|
207
|
+
current_line = ""
|
|
208
|
+
|
|
209
|
+
def strip_rich(s: str) -> str:
|
|
210
|
+
return re.sub(r"\[[^\]]+\]", "", s)
|
|
211
|
+
|
|
212
|
+
def apply_highlight(line: str) -> str:
|
|
213
|
+
if not highlight:
|
|
214
|
+
return strip_rich(line)
|
|
215
|
+
color = {"@": at_color, "&": am_color}
|
|
216
|
+
return re.sub(
|
|
217
|
+
r"(^|(?<=\s))([@&]\S\s)",
|
|
218
|
+
lambda m: m.group(1)
|
|
219
|
+
+ f"[{color[m.group(2)[0]]}]{m.group(2)}[/{color[m.group(2)[0]]}]",
|
|
220
|
+
line,
|
|
221
|
+
)
|
|
222
|
+
|
|
223
|
+
for t in tokens:
|
|
224
|
+
token_text = (t.get("token") or "").rstrip("\n")
|
|
225
|
+
ttype = t.get("t")
|
|
226
|
+
k = t.get("k") or t.get("key")
|
|
227
|
+
|
|
228
|
+
# ✅ PRESERVE itemtype char as the start of the line
|
|
229
|
+
if ttype == "itemtype":
|
|
230
|
+
if current_line:
|
|
231
|
+
output_lines.append(current_line)
|
|
232
|
+
current_line = token_text # start new line with '*', '-', '~', '^', etc.
|
|
233
|
+
continue
|
|
234
|
+
|
|
235
|
+
# @d blocks: own paragraph, preserve newlines/indent
|
|
236
|
+
if ttype == "@" and k == "d":
|
|
237
|
+
if current_line:
|
|
238
|
+
output_lines.append(current_line)
|
|
239
|
+
current_line = ""
|
|
240
|
+
# output_lines.append("")
|
|
241
|
+
for line in token_text.splitlines():
|
|
242
|
+
indent = len(line) - len(line.lstrip(" "))
|
|
243
|
+
wrapped = textwrap.wrap(
|
|
244
|
+
line, width=width, subsequent_indent=" " * indent
|
|
245
|
+
) or [""]
|
|
246
|
+
output_lines.extend(wrapped)
|
|
247
|
+
# output_lines.append("")
|
|
248
|
+
continue
|
|
249
|
+
|
|
250
|
+
# optional special-case for @~
|
|
251
|
+
if ttype == "@" and k == "~":
|
|
252
|
+
# if current_line:
|
|
253
|
+
output_lines.append(current_line)
|
|
254
|
+
current_line = " "
|
|
255
|
+
# if token_text:
|
|
256
|
+
# output_lines.append(token_text)
|
|
257
|
+
# continu # normal tokens
|
|
258
|
+
if not token_text:
|
|
259
|
+
continue
|
|
260
|
+
if current_line and len(current_line) + 1 + len(token_text) > width:
|
|
261
|
+
output_lines.append(current_line)
|
|
262
|
+
current_line = token_text
|
|
263
|
+
else:
|
|
264
|
+
current_line = current_line + " " + token_text
|
|
265
|
+
|
|
266
|
+
if current_line:
|
|
267
|
+
output_lines.append(current_line)
|
|
268
|
+
|
|
269
|
+
return "\n".join(apply_highlight(line) for line in output_lines)
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def wrap_preserve_newlines(text, width=70, initial_indent="", subsequent_indent=""):
|
|
273
|
+
lines = text.splitlines() # preserve \n boundaries
|
|
274
|
+
wrapped_lines = [
|
|
275
|
+
subline
|
|
276
|
+
for line in lines
|
|
277
|
+
for subline in textwrap.wrap(
|
|
278
|
+
line,
|
|
279
|
+
width=width,
|
|
280
|
+
initial_indent=initial_indent,
|
|
281
|
+
subsequent_indent=subsequent_indent,
|
|
282
|
+
)
|
|
283
|
+
or [""]
|
|
284
|
+
]
|
|
285
|
+
return wrapped_lines
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def format_rruleset_for_details(
|
|
289
|
+
rruleset: str, width: int, subsequent_indent: int = 11
|
|
290
|
+
) -> str:
|
|
291
|
+
"""
|
|
292
|
+
Wrap RDATE/EXDATE value lists on commas to fit `width`.
|
|
293
|
+
Continuation lines are indented by the length of header.
|
|
294
|
+
When a wrap occurs, the comma stays at the end of the line.
|
|
295
|
+
"""
|
|
296
|
+
|
|
297
|
+
def wrap_value_line(header: str, values_csv: str) -> list[str]:
|
|
298
|
+
# indent = " " * (len(header) + 2) # for colon and space
|
|
299
|
+
indent = " " * 2
|
|
300
|
+
tokens = [t.strip() for t in values_csv.split(",") if t.strip()]
|
|
301
|
+
out_lines: list[str] = []
|
|
302
|
+
cur = header # start with e.g. "RDATE:"
|
|
303
|
+
|
|
304
|
+
for i, tok in enumerate(tokens):
|
|
305
|
+
sep = "," if i < len(tokens) - 1 else "" # last token → no comma
|
|
306
|
+
candidate = f"{cur}{tok}{sep}"
|
|
307
|
+
|
|
308
|
+
if len(candidate) <= width:
|
|
309
|
+
cur = candidate + " "
|
|
310
|
+
else:
|
|
311
|
+
# flush current line before adding token
|
|
312
|
+
out_lines.append(cur.rstrip())
|
|
313
|
+
cur = f"{indent}{tok}{sep} "
|
|
314
|
+
if cur.strip():
|
|
315
|
+
out_lines.append(cur.rstrip())
|
|
316
|
+
return out_lines
|
|
317
|
+
|
|
318
|
+
out: list[str] = []
|
|
319
|
+
for line in (rruleset or "").splitlines():
|
|
320
|
+
if ":" in line:
|
|
321
|
+
prop, value = line.split(":", 1)
|
|
322
|
+
prop_up = prop.upper()
|
|
323
|
+
if prop_up.startswith("RDATE") or prop_up.startswith("EXDATE"):
|
|
324
|
+
out.extend(wrap_value_line(f"{prop_up}:", value.strip()))
|
|
325
|
+
continue
|
|
326
|
+
out.append(line)
|
|
327
|
+
# prepend = " " * (len("rruleset: ")) + "\n"
|
|
328
|
+
log_msg(f"{out = }")
|
|
329
|
+
return "\n ".join(out)
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
def format_hours_mins(dt: datetime, mode: Literal["24", "12"]) -> str:
|
|
333
|
+
"""
|
|
334
|
+
Format a datetime object as hours and minutes.
|
|
335
|
+
"""
|
|
336
|
+
if dt.minute > 0:
|
|
337
|
+
fmt = {
|
|
338
|
+
"24": "%H:%M",
|
|
339
|
+
"12": "%-I:%M%p",
|
|
340
|
+
}
|
|
341
|
+
else:
|
|
342
|
+
fmt = {
|
|
343
|
+
"24": "%H:%M",
|
|
344
|
+
"12": "%-I%p",
|
|
345
|
+
}
|
|
346
|
+
|
|
347
|
+
if mode == "12":
|
|
348
|
+
return dt.strftime(fmt[mode]).lower().rstrip("m")
|
|
349
|
+
return f"{dt.strftime(fmt[mode])}"
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def format_date_range(start_dt: datetime, end_dt: datetime):
|
|
353
|
+
"""
|
|
354
|
+
Format a datetime object as a week string, taking not to repeat the month subject unless the week spans two months.
|
|
355
|
+
"""
|
|
356
|
+
same_year = start_dt.year == end_dt.year
|
|
357
|
+
same_month = start_dt.month == end_dt.month
|
|
358
|
+
# same_day = start_dt.day == end_dt.day
|
|
359
|
+
if same_year and same_month:
|
|
360
|
+
return f"{start_dt.strftime('%b %-d')} - {end_dt.strftime('%-d, %Y')}"
|
|
361
|
+
elif same_year and not same_month:
|
|
362
|
+
return f"{start_dt.strftime('%b %-d')} - {end_dt.strftime('%b %-d, %Y')}"
|
|
363
|
+
else:
|
|
364
|
+
return f"{start_dt.strftime('%b %-d, %Y')} - {end_dt.strftime('%b %-d, %Y')}"
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
def format_iso_week(monday_date: datetime):
|
|
368
|
+
start_dt = monday_date.date()
|
|
369
|
+
end_dt = start_dt + timedelta(days=6)
|
|
370
|
+
iso_yr, iso_wk, _ = start_dt.isocalendar()
|
|
371
|
+
yr_wk = f"{iso_yr} #{iso_wk}"
|
|
372
|
+
same_month = start_dt.month == end_dt.month
|
|
373
|
+
# same_day = start_dt.day == end_dt.day
|
|
374
|
+
if same_month:
|
|
375
|
+
return f"{start_dt.strftime('%b %-d')} - {end_dt.strftime('%-d')}, {yr_wk}"
|
|
376
|
+
else:
|
|
377
|
+
return f"{start_dt.strftime('%b %-d')} - {end_dt.strftime('%b %-d')}, {yr_wk}"
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def get_previous_yrwk(year, week):
|
|
381
|
+
"""
|
|
382
|
+
Get the previous (year, week) from an ISO calendar (year, week).
|
|
383
|
+
"""
|
|
384
|
+
# Convert the ISO year and week to a Monday date
|
|
385
|
+
monday_date = datetime.strptime(f"{year} {week} 1", "%G %V %u")
|
|
386
|
+
# Subtract 1 week
|
|
387
|
+
previous_monday = monday_date - timedelta(weeks=1)
|
|
388
|
+
# Get the ISO year and week of the new date
|
|
389
|
+
return previous_monday.isocalendar()[:2]
|
|
390
|
+
|
|
391
|
+
|
|
392
|
+
def get_next_yrwk(year, week):
|
|
393
|
+
"""
|
|
394
|
+
Get the next (year, week) from an ISO calendar (year, week).
|
|
395
|
+
"""
|
|
396
|
+
# Convert the ISO year and week to a Monday date
|
|
397
|
+
monday_date = datetime.strptime(f"{year} {week} 1", "%G %V %u")
|
|
398
|
+
# Add 1 week
|
|
399
|
+
next_monday = monday_date + timedelta(weeks=1)
|
|
400
|
+
# Get the ISO year and week of the new date
|
|
401
|
+
return next_monday.isocalendar()[:2]
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def calculate_4_week_start():
|
|
405
|
+
"""
|
|
406
|
+
Calculate the starting date of the 4-week period, starting on a Monday.
|
|
407
|
+
"""
|
|
408
|
+
today = datetime.now()
|
|
409
|
+
iso_year, iso_week, iso_weekday = today.isocalendar()
|
|
410
|
+
start_of_week = today - timedelta(days=iso_weekday - 1)
|
|
411
|
+
weeks_into_cycle = (iso_week - 1) % 4
|
|
412
|
+
return start_of_week - timedelta(weeks=weeks_into_cycle)
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
def decimal_to_base26(decimal_num):
|
|
416
|
+
"""
|
|
417
|
+
Convert a decimal number to its equivalent base-26 string.
|
|
418
|
+
|
|
419
|
+
Args:
|
|
420
|
+
decimal_num (int): The decimal number to convert.
|
|
421
|
+
|
|
422
|
+
Returns:
|
|
423
|
+
str: The base-26 representation where 'a' = 0, 'b' = 1, ..., 'z' = 25.
|
|
424
|
+
"""
|
|
425
|
+
if decimal_num < 0:
|
|
426
|
+
raise ValueError("Decimal number must be non-negative.")
|
|
427
|
+
|
|
428
|
+
if decimal_num == 0:
|
|
429
|
+
return "a" # Special case for zero
|
|
430
|
+
|
|
431
|
+
base26 = ""
|
|
432
|
+
while decimal_num > 0:
|
|
433
|
+
digit = decimal_num % 26
|
|
434
|
+
base26 = chr(digit + ord("a")) + base26 # Map digit to 'a'-'z'
|
|
435
|
+
decimal_num //= 26
|
|
436
|
+
|
|
437
|
+
return base26
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
def base26_to_decimal(tag: str) -> int:
|
|
441
|
+
"""Decode 'a'..'z' (a=0) for any length."""
|
|
442
|
+
total = 0
|
|
443
|
+
for ch in tag:
|
|
444
|
+
total = total * 26 + (ord(ch) - ord("a"))
|
|
445
|
+
return total
|
|
446
|
+
|
|
447
|
+
|
|
448
|
+
def indx_to_tag(indx: int, fill: int = 1):
|
|
449
|
+
"""
|
|
450
|
+
Convert an index to a base-26 tag.
|
|
451
|
+
"""
|
|
452
|
+
return decimal_to_base26(indx).rjust(fill, "a")
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def event_tuple_to_minutes(start_dt: datetime, end_dt: datetime) -> Tuple[int, int]:
|
|
456
|
+
"""
|
|
457
|
+
Convert event start and end datetimes to minutes since midnight.
|
|
458
|
+
|
|
459
|
+
Args:
|
|
460
|
+
start_dt (datetime): Event start datetime.
|
|
461
|
+
end_dt (datetime): Event end datetime.
|
|
462
|
+
|
|
463
|
+
Returns:
|
|
464
|
+
Tuple(int, int): Tuple of start and end minutes since midnight.
|
|
465
|
+
"""
|
|
466
|
+
start_minutes = start_dt.hour * 60 + start_dt.minute
|
|
467
|
+
end_minutes = end_dt.hour * 60 + end_dt.minute if end_dt else start_minutes
|
|
468
|
+
return (start_minutes, end_minutes)
|
|
469
|
+
|
|
470
|
+
|
|
471
|
+
def get_busy_bar(events):
|
|
472
|
+
"""
|
|
473
|
+
Determine slot states (0: free, 1: busy, 2: conflict) for a list of events.
|
|
474
|
+
|
|
475
|
+
Args:
|
|
476
|
+
L (List[int]): Sorted list of slot boundaries.
|
|
477
|
+
events (List[Tuple[int, int]]): List of event tuples (start, end).
|
|
478
|
+
|
|
479
|
+
Returns:
|
|
480
|
+
List[int]: A list where 0 indicates a free slot, 1 indicates a busy slot,
|
|
481
|
+
and 2 indicates a conflicting slot.
|
|
482
|
+
"""
|
|
483
|
+
# Initialize slot usage as empty lists
|
|
484
|
+
L = SLOT_MINUTES
|
|
485
|
+
slot_events = [[] for _ in range(len(L) - 1)]
|
|
486
|
+
allday = 0
|
|
487
|
+
|
|
488
|
+
for b, e in events:
|
|
489
|
+
# Find the start and end slots for the current event
|
|
490
|
+
|
|
491
|
+
if b == 0 and e == 0:
|
|
492
|
+
allday += 1
|
|
493
|
+
if e == b and not allday:
|
|
494
|
+
continue
|
|
495
|
+
|
|
496
|
+
start_slot = bisect_left(L, b) - 1
|
|
497
|
+
end_slot = bisect_left(L, e) - 1
|
|
498
|
+
|
|
499
|
+
# Track the event in each affected slot
|
|
500
|
+
for i in range(start_slot, min(len(slot_events), end_slot + 1)):
|
|
501
|
+
if L[i + 1] > b and L[i] < e: # Ensure overlap with the slot
|
|
502
|
+
slot_events[i].append((b, e))
|
|
503
|
+
|
|
504
|
+
# Determine the state of each slot
|
|
505
|
+
slots_state = []
|
|
506
|
+
for i, events_in_slot in enumerate(slot_events):
|
|
507
|
+
if not events_in_slot:
|
|
508
|
+
# No events in the slot
|
|
509
|
+
slots_state.append(0)
|
|
510
|
+
elif len(events_in_slot) == 1:
|
|
511
|
+
# Only one event in the slot, so it's busy but not conflicting
|
|
512
|
+
slots_state.append(1)
|
|
513
|
+
else:
|
|
514
|
+
# Check for overlaps to determine if there's a conflict
|
|
515
|
+
events_in_slot.sort() # Sort events by start time
|
|
516
|
+
conflict = False
|
|
517
|
+
for j in range(len(events_in_slot) - 1):
|
|
518
|
+
_, end1 = events_in_slot[j]
|
|
519
|
+
start2, _ = events_in_slot[j + 1]
|
|
520
|
+
if start2 < end1: # Overlap detected
|
|
521
|
+
conflict = True
|
|
522
|
+
break
|
|
523
|
+
slots_state.append(2 if conflict else 1)
|
|
524
|
+
|
|
525
|
+
busy_bar = ["_" for _ in range(len(slots_state))]
|
|
526
|
+
have_busy = False
|
|
527
|
+
for i in range(len(slots_state)):
|
|
528
|
+
if slots_state[i] == 0:
|
|
529
|
+
busy_bar[i] = f"[dim]{FREE}[/dim]"
|
|
530
|
+
elif slots_state[i] == 1:
|
|
531
|
+
have_busy = True
|
|
532
|
+
busy_bar[i] = f"[{BUSY_COLOR}]{BUSY}[/{BUSY_COLOR}]"
|
|
533
|
+
else:
|
|
534
|
+
have_busy = True
|
|
535
|
+
busy_bar[i] = f"[{CONF_COLOR}]{BUSY}[/{CONF_COLOR}]"
|
|
536
|
+
|
|
537
|
+
# return slots_state, "".join(busy_bar)
|
|
538
|
+
busy_str = (
|
|
539
|
+
f"\n[{BUSY_FRAME_COLOR}]{''.join(busy_bar)}[/{BUSY_FRAME_COLOR}]"
|
|
540
|
+
if have_busy
|
|
541
|
+
else "\n"
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
aday_str = f"[{BUSY_COLOR}]{ADAY}[/{BUSY_COLOR}]" if allday > 0 else ""
|
|
545
|
+
|
|
546
|
+
return aday_str, busy_str
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
def ordinal(n: int) -> str:
|
|
550
|
+
"""Return ordinal representation of an integer (1 -> 1st)."""
|
|
551
|
+
if 10 <= n % 100 <= 20:
|
|
552
|
+
suffix = "th"
|
|
553
|
+
else:
|
|
554
|
+
suffix = {1: "st", 2: "nd", 3: "rd"}.get(n % 10, "th")
|
|
555
|
+
return f"{n}{suffix}"
|
|
556
|
+
|
|
557
|
+
|
|
558
|
+
def set_anniversary(subject: str, start: date, instance: date, freq: str) -> str:
|
|
559
|
+
"""
|
|
560
|
+
Replace {XXX} in subject with ordinal count of periods since start.
|
|
561
|
+
freq ∈ {'y','m','w','d'}.
|
|
562
|
+
"""
|
|
563
|
+
has_xxx = "{XXX}" in subject
|
|
564
|
+
log_msg(f"set_anniversary {subject = }, {has_xxx = }")
|
|
565
|
+
if not has_xxx:
|
|
566
|
+
return subject
|
|
567
|
+
|
|
568
|
+
if isinstance(start, datetime):
|
|
569
|
+
start = start.date()
|
|
570
|
+
if isinstance(instance, datetime):
|
|
571
|
+
instance = instance.date()
|
|
572
|
+
|
|
573
|
+
diff = instance - start
|
|
574
|
+
if freq == "y":
|
|
575
|
+
n = instance.year - start.year
|
|
576
|
+
elif freq == "m":
|
|
577
|
+
n = (instance.year - start.year) * 12 + (instance.month - start.month)
|
|
578
|
+
elif freq == "w":
|
|
579
|
+
n = diff.days // 7
|
|
580
|
+
else: # 'd'
|
|
581
|
+
n = diff.days
|
|
582
|
+
|
|
583
|
+
# n = max(n, 0) + 1 # treat first instance as "1st"
|
|
584
|
+
n = max(n, 0) # treat first instance as "1st"
|
|
585
|
+
|
|
586
|
+
new_subject = subject.replace("{XXX}", ordinal(n))
|
|
587
|
+
log_msg(f"{subject = }, {new_subject = }")
|
|
588
|
+
return new_subject
|
|
589
|
+
|
|
590
|
+
|
|
591
|
+
# A page is (rows, tag_map)
|
|
592
|
+
# rows: list[str] ready to render (header + content)
|
|
593
|
+
# tag_map: { 'a': ('bin', bin_id) | ('reminder', (record_id, job_id)) }
|
|
594
|
+
Page = Tuple[List[str], Dict[str, Tuple[str, object]]]
|
|
595
|
+
|
|
596
|
+
|
|
597
|
+
def page_tagger(
|
|
598
|
+
items: List[dict], page_size: int = 26
|
|
599
|
+
) -> List[Tuple[List[str], Dict[str, Tuple[int, int | None]]]]:
|
|
600
|
+
"""
|
|
601
|
+
Split 'items' into pages. Each item is a dict:
|
|
602
|
+
{ "record_id": int | None, "job_id": int | None, "text": str }
|
|
603
|
+
|
|
604
|
+
Returns a list of pages. Each page is a tuple:
|
|
605
|
+
(page_rows: list[str], page_tag_map: dict[str -> (record_id, job_id|None)])
|
|
606
|
+
|
|
607
|
+
Rules:
|
|
608
|
+
- Only record rows (record_id != None) receive single-letter tags 'a'..'z'.
|
|
609
|
+
- Exactly `page_size` records are tagged per page (except the last page).
|
|
610
|
+
- Headers (record_id is None) are kept in order.
|
|
611
|
+
- If a header's block of records spans pages, the header is duplicated at the
|
|
612
|
+
start of the next page with " (continued)" appended.
|
|
613
|
+
"""
|
|
614
|
+
pages: List[Tuple[List[str], Dict[str, Tuple[int, int | None]]]] = []
|
|
615
|
+
|
|
616
|
+
page_rows: List[str] = []
|
|
617
|
+
tag_map: Dict[str, Tuple[int, int | None]] = {}
|
|
618
|
+
tag_counter = 0 # number of record-tags on current page
|
|
619
|
+
last_header_text = None # text of the most recent header seen (if any)
|
|
620
|
+
|
|
621
|
+
def finalize_page(new_page_rows=None):
|
|
622
|
+
"""Close out the current page and start a fresh one optionally seeded with
|
|
623
|
+
new_page_rows (e.g., duplicated header)."""
|
|
624
|
+
nonlocal page_rows, tag_map, tag_counter
|
|
625
|
+
pages.append((page_rows, tag_map))
|
|
626
|
+
page_rows = new_page_rows[:] if new_page_rows else []
|
|
627
|
+
tag_map = {}
|
|
628
|
+
tag_counter = 0
|
|
629
|
+
|
|
630
|
+
for item in items:
|
|
631
|
+
# header row
|
|
632
|
+
if not isinstance(item, dict):
|
|
633
|
+
log_msg(f"error: {item} is not a dict")
|
|
634
|
+
continue
|
|
635
|
+
if item.get("record_id") is None:
|
|
636
|
+
hdr_text = item.get("text", "")
|
|
637
|
+
last_header_text = hdr_text
|
|
638
|
+
page_rows.append(hdr_text)
|
|
639
|
+
# continue; headers do not affect tag_counter
|
|
640
|
+
continue
|
|
641
|
+
|
|
642
|
+
# record row (taggable)
|
|
643
|
+
# If current page is already full (page_size tags), start a new page.
|
|
644
|
+
# IMPORTANT: when we create the new page, we want to preseed it with a
|
|
645
|
+
# duplicated header (if one exists) and mark it as "(continued)".
|
|
646
|
+
if tag_counter >= page_size:
|
|
647
|
+
# If we have a last_header_text, duplicate it at top of next page with continued.
|
|
648
|
+
if last_header_text:
|
|
649
|
+
continued_header = f"{last_header_text} (continued)"
|
|
650
|
+
finalize_page(new_page_rows=[continued_header])
|
|
651
|
+
else:
|
|
652
|
+
finalize_page()
|
|
653
|
+
|
|
654
|
+
# assign next tag on current page
|
|
655
|
+
tag = chr(ord("a") + tag_counter)
|
|
656
|
+
tag_map[tag] = (item["record_id"], item.get("job_id", None))
|
|
657
|
+
# Use small/dim tag formatting to match your UI style; adapt if needed
|
|
658
|
+
page_rows.append(f" [dim]{tag}[/dim] {item.get('text', '')}")
|
|
659
|
+
tag_counter += 1
|
|
660
|
+
|
|
661
|
+
# At end, still need to push the last page if it has any rows
|
|
662
|
+
if page_rows or tag_map:
|
|
663
|
+
pages.append((page_rows, tag_map))
|
|
664
|
+
|
|
665
|
+
return pages
|
|
666
|
+
|
|
667
|
+
|
|
668
|
+
@dataclass(frozen=True)
|
|
669
|
+
class _BackupInfo:
|
|
670
|
+
path: Path
|
|
671
|
+
day: date
|
|
672
|
+
mtime: float
|
|
673
|
+
|
|
674
|
+
|
|
675
|
+
_BACKUP_RE = re.compile(r"^(\d{4})-(\d{2})-(\d{2})\.db$")
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
class Controller:
|
|
679
|
+
def __init__(self, database_path: str, env: TklrEnvironment, reset: bool = False):
|
|
680
|
+
# Initialize the database manager
|
|
681
|
+
self.db_manager = DatabaseManager(database_path, env, reset=reset)
|
|
682
|
+
|
|
683
|
+
self.tag_to_id = {} # Maps tag numbers to event IDs
|
|
684
|
+
self.list_tag_to_id: dict[str, dict[str, object]] = {}
|
|
685
|
+
|
|
686
|
+
self.yrwk_to_pages = {} # Maps (iso_year, iso_week) to week description
|
|
687
|
+
self.rownum_to_yrwk = {} # Maps row numbers to (iso_year, iso_week)
|
|
688
|
+
self.start_date = calculate_4_week_start()
|
|
689
|
+
self.selected_week = tuple(datetime.now().isocalendar()[:2])
|
|
690
|
+
self.env = env
|
|
691
|
+
self.AMPM = env.config.ui.ampm
|
|
692
|
+
self._last_details_meta = None
|
|
693
|
+
self.afill_by_view: dict[str, int] = {} # e.g. {"events": 1, "tasks": 2}
|
|
694
|
+
self.afill_by_week: dict[Tuple[int, int], int] = {}
|
|
695
|
+
|
|
696
|
+
for view in ["next", "last", "find", "events", "tasks", "alerts"]:
|
|
697
|
+
self.list_tag_to_id.setdefault(view, {})
|
|
698
|
+
self.week_tag_to_id: dict[Tuple[int, int], dict[str, object]] = {}
|
|
699
|
+
self.width = shutil.get_terminal_size()[0] - 2
|
|
700
|
+
self.afill = 1
|
|
701
|
+
self._agenda_dirty = False
|
|
702
|
+
|
|
703
|
+
@property
|
|
704
|
+
def root_id(self) -> int:
|
|
705
|
+
"""Return the id of the root bin, creating it if necessary."""
|
|
706
|
+
self.db_manager.ensure_system_bins()
|
|
707
|
+
self.db_manager.cursor.execute("SELECT id FROM Bins WHERE name = 'root'")
|
|
708
|
+
row = self.db_manager.cursor.fetchone()
|
|
709
|
+
if not row:
|
|
710
|
+
raise RuntimeError(
|
|
711
|
+
"Root bin not found — database not initialized correctly."
|
|
712
|
+
)
|
|
713
|
+
return row[0]
|
|
714
|
+
|
|
715
|
+
def format_datetime(self, fmt_dt: str) -> str:
|
|
716
|
+
return format_datetime(fmt_dt, self.AMPM)
|
|
717
|
+
|
|
718
|
+
def datetime_in_words(self, fmt_dt: str) -> str:
|
|
719
|
+
return datetime_in_words(fmt_dt, self.AMPM)
|
|
720
|
+
|
|
721
|
+
def make_item(self, entry_str: str, final: bool = False) -> "Item":
|
|
722
|
+
return Item(entry_str, final=final) # or config=self.env.load_config()
|
|
723
|
+
|
|
724
|
+
def add_item(self, item: Item) -> int:
|
|
725
|
+
if item.itemtype in "~^x" and item.has_f:
|
|
726
|
+
log_msg(
|
|
727
|
+
f"{item.itemtype = } {item.has_f = } {item.itemtype in '~^' and item.has_f = }"
|
|
728
|
+
)
|
|
729
|
+
|
|
730
|
+
record_id = self.db_manager.add_item(item)
|
|
731
|
+
|
|
732
|
+
if item.completion:
|
|
733
|
+
completed_dt, due_dt = item.completion
|
|
734
|
+
# completed_ts = dt_as_utc_timestamp(completed_dt)
|
|
735
|
+
# due_ts = dt_as_utc_timestamp(due_dt) if due_dt else None
|
|
736
|
+
completion = (completed_dt, due_dt)
|
|
737
|
+
self.db_manager.add_completion(record_id, completion)
|
|
738
|
+
|
|
739
|
+
return record_id
|
|
740
|
+
|
|
741
|
+
def apply_anniversary_if_needed(
|
|
742
|
+
self, record_id: int, subject: str, instance: datetime
|
|
743
|
+
) -> str:
|
|
744
|
+
"""
|
|
745
|
+
If this record is a recurring event with a {XXX} placeholder,
|
|
746
|
+
replace it with the ordinal number of this instance.
|
|
747
|
+
"""
|
|
748
|
+
if "{XXX}" not in subject:
|
|
749
|
+
return subject
|
|
750
|
+
|
|
751
|
+
row = self.db_manager.get_record(record_id)
|
|
752
|
+
if not row:
|
|
753
|
+
return subject
|
|
754
|
+
|
|
755
|
+
# The rruleset text is column 4 (based on your tuple)
|
|
756
|
+
rruleset = row[4]
|
|
757
|
+
if not rruleset:
|
|
758
|
+
return subject
|
|
759
|
+
|
|
760
|
+
# --- Extract DTSTART and FREQ ---
|
|
761
|
+
start_dt = None
|
|
762
|
+
freq = None
|
|
763
|
+
|
|
764
|
+
for line in rruleset.splitlines():
|
|
765
|
+
if line.startswith("DTSTART"):
|
|
766
|
+
# Handles both VALUE=DATE and VALUE=DATETIME
|
|
767
|
+
if ":" in line:
|
|
768
|
+
val = line.split(":")[1].strip()
|
|
769
|
+
try:
|
|
770
|
+
if "T" in val:
|
|
771
|
+
start_dt = datetime.strptime(val, "%Y%m%dT%H%M%S")
|
|
772
|
+
else:
|
|
773
|
+
start_dt = datetime.strptime(val, "%Y%m%d")
|
|
774
|
+
except Exception:
|
|
775
|
+
pass
|
|
776
|
+
elif line.startswith("RRULE"):
|
|
777
|
+
# look for FREQ=YEARLY etc.
|
|
778
|
+
parts = line.split(":")[-1].split(";")
|
|
779
|
+
for p in parts:
|
|
780
|
+
if p.startswith("FREQ="):
|
|
781
|
+
freq_val = p.split("=")[1].strip().lower()
|
|
782
|
+
freq = {
|
|
783
|
+
"daily": "d",
|
|
784
|
+
"weekly": "w",
|
|
785
|
+
"monthly": "m",
|
|
786
|
+
"yearly": "y",
|
|
787
|
+
}.get(freq_val)
|
|
788
|
+
break
|
|
789
|
+
|
|
790
|
+
if not start_dt or not freq:
|
|
791
|
+
return subject
|
|
792
|
+
|
|
793
|
+
# --- Compute ordinal replacement ---
|
|
794
|
+
return set_anniversary(subject, start_dt, instance, freq)
|
|
795
|
+
|
|
796
|
+
# def get_tag_iterator(self, view: str, count: int) -> Iterator[str]:
|
|
797
|
+
# """
|
|
798
|
+
# Return an iterator over tags (a, b, ..., z, aa, ab, ..., aaa, ...),
|
|
799
|
+
# consistent with existing set_afill() and add_tag() logic.
|
|
800
|
+
# """
|
|
801
|
+
# # determine tag width for this view
|
|
802
|
+
# self.set_afill([None] * count, view)
|
|
803
|
+
# fill = self.afill_by_view[view]
|
|
804
|
+
#
|
|
805
|
+
# for i in range(count):
|
|
806
|
+
# yield indx_to_tag(i, fill)
|
|
807
|
+
|
|
808
|
+
def get_name_to_binpath(self) -> Dict[str, str]:
|
|
809
|
+
# leaf_lower -> "Leaf/Parent/.../Root"
|
|
810
|
+
return self.db_manager.bin_cache.name_to_binpath()
|
|
811
|
+
|
|
812
|
+
def get_tag_iterator(self, view: str, count: int) -> Iterator[str]:
|
|
813
|
+
if view not in self.afill_by_view:
|
|
814
|
+
self.set_afill([None] * count, view)
|
|
815
|
+
fill = self.afill_by_view[view]
|
|
816
|
+
for i in range(count):
|
|
817
|
+
yield indx_to_tag(i, fill)
|
|
818
|
+
|
|
819
|
+
# --- replace your set_afill with this per-view version ---
|
|
820
|
+
def set_afill(self, details: list, view: str):
|
|
821
|
+
n = len(details)
|
|
822
|
+
fill = 1 if n <= 26 else 2 if n <= 26 * 26 else 3
|
|
823
|
+
log_msg(f"{view = }, {n = }, {fill = }, {details = }")
|
|
824
|
+
self.afill_by_view[view] = fill
|
|
825
|
+
|
|
826
|
+
def add_tag(
|
|
827
|
+
self, view: str, indx: int, record_id: int, *, job_id: int | None = None
|
|
828
|
+
):
|
|
829
|
+
"""Produce the next tag (with the pre-chosen width) and register it."""
|
|
830
|
+
fill = self.afill_by_view[view]
|
|
831
|
+
tag = indx_to_tag(indx, fill) # uses your existing function
|
|
832
|
+
tag_fmt = f" [dim]{tag}[/dim] "
|
|
833
|
+
self.list_tag_to_id.setdefault(view, {})[tag] = {
|
|
834
|
+
"record_id": record_id,
|
|
835
|
+
"job_id": job_id,
|
|
836
|
+
}
|
|
837
|
+
return tag_fmt, indx + 1
|
|
838
|
+
|
|
839
|
+
def set_week_afill(self, details: list, yr_wk: Tuple[int, int]):
|
|
840
|
+
n = len(details)
|
|
841
|
+
fill = 1 if n <= 26 else 2 if n <= 26 * 26 else 3
|
|
842
|
+
log_msg(f"{yr_wk = }, {n = }, {fill = }")
|
|
843
|
+
self.afill_by_week[yr_wk] = fill
|
|
844
|
+
|
|
845
|
+
def add_week_tag(
|
|
846
|
+
self,
|
|
847
|
+
yr_wk: Tuple[int, int],
|
|
848
|
+
indx: int,
|
|
849
|
+
record_id: int,
|
|
850
|
+
job_id: int | None = None,
|
|
851
|
+
):
|
|
852
|
+
"""Produce the next tag (with the pre-chosen width) and register it."""
|
|
853
|
+
fill = self.afill_by_week[yr_wk]
|
|
854
|
+
tag = indx_to_tag(indx, fill) # uses your existing function
|
|
855
|
+
tag_fmt = f" [dim]{tag}[/dim] "
|
|
856
|
+
self.week_tag_to_id.setdefault(yr_wk, {})[tag] = {
|
|
857
|
+
"record_id": record_id,
|
|
858
|
+
"job_id": job_id,
|
|
859
|
+
}
|
|
860
|
+
return tag_fmt, indx + 1
|
|
861
|
+
|
|
862
|
+
def mark_agenda_dirty(self) -> None:
|
|
863
|
+
self._agenda_dirty = True
|
|
864
|
+
|
|
865
|
+
def consume_agenda_dirty(self) -> bool:
|
|
866
|
+
was_dirty = self._agenda_dirty
|
|
867
|
+
self._agenda_dirty = False
|
|
868
|
+
return was_dirty
|
|
869
|
+
|
|
870
|
+
def toggle_pin(self, record_id: int) -> bool:
|
|
871
|
+
self.db_manager.toggle_pinned(record_id)
|
|
872
|
+
self.mark_agenda_dirty() # ← mark dirty every time
|
|
873
|
+
return self.db_manager.is_pinned(record_id)
|
|
874
|
+
|
|
875
|
+
def get_last_details_meta(self):
|
|
876
|
+
return self._last_details_meta
|
|
877
|
+
|
|
878
|
+
def toggle_pinned(self, record_id: int):
|
|
879
|
+
self.db_manager.toggle_pinned(record_id)
|
|
880
|
+
log_msg(f"{record_id = }, {self.db_manager.is_pinned(record_id) = }")
|
|
881
|
+
return self.db_manager.is_pinned(record_id)
|
|
882
|
+
|
|
883
|
+
def get_entry(self, record_id, job_id=None):
|
|
884
|
+
lines = []
|
|
885
|
+
result = self.db_manager.get_tokens(record_id)
|
|
886
|
+
# log_msg(f"{result = }")
|
|
887
|
+
|
|
888
|
+
tokens, rruleset, created, modified = result[0]
|
|
889
|
+
|
|
890
|
+
entry = format_tokens(tokens, self.width)
|
|
891
|
+
entry = f"[bold {type_color}]{entry[0]}[/bold {type_color}]{entry[1:]}"
|
|
892
|
+
|
|
893
|
+
log_msg(f"{rruleset = }")
|
|
894
|
+
# rruleset = f"\n{11 * ' '}".join(rruleset.splitlines())
|
|
895
|
+
|
|
896
|
+
rr_line = ""
|
|
897
|
+
if rruleset:
|
|
898
|
+
formatted_rr = format_rruleset_for_details(
|
|
899
|
+
rruleset, width=self.width - 10, subsequent_indent=9
|
|
900
|
+
)
|
|
901
|
+
rr_line = f"[{label_color}]rruleset:[/{label_color}] {formatted_rr}"
|
|
902
|
+
|
|
903
|
+
job = (
|
|
904
|
+
f" [{label_color}]job_id:[/{label_color}] [bold]{job_id}[/bold]"
|
|
905
|
+
if job_id
|
|
906
|
+
else ""
|
|
907
|
+
)
|
|
908
|
+
lines.extend(
|
|
909
|
+
[
|
|
910
|
+
entry,
|
|
911
|
+
" ",
|
|
912
|
+
rr_line,
|
|
913
|
+
f"[{label_color}]id/cr/md:[/{label_color}] {record_id}{job} / {created} / {modified}",
|
|
914
|
+
]
|
|
915
|
+
)
|
|
916
|
+
|
|
917
|
+
return lines
|
|
918
|
+
|
|
919
|
+
def update_record_from_item(self, item) -> None:
|
|
920
|
+
self.cursor.execute(
|
|
921
|
+
"""
|
|
922
|
+
UPDATE Records
|
|
923
|
+
SET itemtype=?, subject=?, description=?, rruleset=?, timezone=?,
|
|
924
|
+
extent=?, alerts=?, notice=?, context=?, jobs=?, tags=?,
|
|
925
|
+
priority=?, tokens=?, modified=?
|
|
926
|
+
WHERE id=?
|
|
927
|
+
""",
|
|
928
|
+
(
|
|
929
|
+
item.itemtype,
|
|
930
|
+
item.subject,
|
|
931
|
+
item.description,
|
|
932
|
+
item.rruleset,
|
|
933
|
+
item.timezone or "",
|
|
934
|
+
item.extent or "",
|
|
935
|
+
json.dumps(item.alerts or []),
|
|
936
|
+
item.notice or "",
|
|
937
|
+
item.context or "",
|
|
938
|
+
json.dumps(item.jobs or None),
|
|
939
|
+
";".join(item.tags or []),
|
|
940
|
+
item.p or "",
|
|
941
|
+
json.dumps(item.tokens),
|
|
942
|
+
datetime.utcnow().timestamp(),
|
|
943
|
+
item.id,
|
|
944
|
+
),
|
|
945
|
+
)
|
|
946
|
+
self.conn.commit()
|
|
947
|
+
|
|
948
|
+
def get_record_core(self, record_id: int) -> dict:
|
|
949
|
+
row = self.db_manager.get_record(record_id)
|
|
950
|
+
if not row:
|
|
951
|
+
return {
|
|
952
|
+
"id": record_id,
|
|
953
|
+
"itemtype": "",
|
|
954
|
+
"subject": "",
|
|
955
|
+
"rruleset": None,
|
|
956
|
+
"record": None,
|
|
957
|
+
}
|
|
958
|
+
# tuple layout per your schema
|
|
959
|
+
return {
|
|
960
|
+
"id": record_id,
|
|
961
|
+
"itemtype": row[1],
|
|
962
|
+
"subject": row[2],
|
|
963
|
+
"rruleset": row[4],
|
|
964
|
+
"record": row,
|
|
965
|
+
}
|
|
966
|
+
|
|
967
|
+
def get_details_for_record(
|
|
968
|
+
self,
|
|
969
|
+
record_id: int,
|
|
970
|
+
job_id: int | None = None,
|
|
971
|
+
):
|
|
972
|
+
"""
|
|
973
|
+
Return list: [title, '', ... lines ...] same as process_tag would.
|
|
974
|
+
Use the same internal logic as process_tag but accept ids directly.
|
|
975
|
+
"""
|
|
976
|
+
# If you have a general helper that returns fields for a record, reuse it.
|
|
977
|
+
# Here we replicate the important parts used by process_tag()
|
|
978
|
+
core = self.get_record_core(record_id) or {}
|
|
979
|
+
itemtype = core.get("itemtype") or ""
|
|
980
|
+
rruleset = core.get("rruleset") or ""
|
|
981
|
+
all_prereqs = core.get("all_prereqs") or ""
|
|
982
|
+
|
|
983
|
+
subject = core.get("subject") or "(untitled)"
|
|
984
|
+
if job_id is not None:
|
|
985
|
+
try:
|
|
986
|
+
js = self.db_manager.get_job_display_subject(record_id, job_id)
|
|
987
|
+
if js:
|
|
988
|
+
subject = js
|
|
989
|
+
except Exception:
|
|
990
|
+
pass
|
|
991
|
+
|
|
992
|
+
try:
|
|
993
|
+
pinned_now = (
|
|
994
|
+
self.db_manager.is_task_pinned(record_id) if itemtype == "~" else False
|
|
995
|
+
)
|
|
996
|
+
except Exception:
|
|
997
|
+
pinned_now = False
|
|
998
|
+
|
|
999
|
+
fields = [
|
|
1000
|
+
"",
|
|
1001
|
+
] + self.get_entry(record_id, job_id)
|
|
1002
|
+
|
|
1003
|
+
_dts = self.db_manager.get_next_start_datetimes_for_record(record_id)
|
|
1004
|
+
first, second = (_dts + [None, None])[:2]
|
|
1005
|
+
|
|
1006
|
+
title = f"[bold]{subject:^{self.width}}[/bold]"
|
|
1007
|
+
|
|
1008
|
+
meta = {
|
|
1009
|
+
"record_id": record_id,
|
|
1010
|
+
"job_id": job_id,
|
|
1011
|
+
"itemtype": itemtype,
|
|
1012
|
+
"subject": subject,
|
|
1013
|
+
"rruleset": rruleset,
|
|
1014
|
+
"first": first,
|
|
1015
|
+
"second": second,
|
|
1016
|
+
"all_prereqs": all_prereqs,
|
|
1017
|
+
"pinned": bool(pinned_now),
|
|
1018
|
+
"record": self.db_manager.get_record(record_id),
|
|
1019
|
+
}
|
|
1020
|
+
self._last_details_meta = meta
|
|
1021
|
+
|
|
1022
|
+
# return [title, ""] + fields
|
|
1023
|
+
return title, fields, meta
|
|
1024
|
+
|
|
1025
|
+
def get_record(self, record_id):
|
|
1026
|
+
return self.db_manager.get_record(record_id)
|
|
1027
|
+
|
|
1028
|
+
def get_all_records(self):
|
|
1029
|
+
return self.db_manager.get_all()
|
|
1030
|
+
|
|
1031
|
+
def delete_record(self, record_id):
|
|
1032
|
+
self.db_manager.delete_record(record_id)
|
|
1033
|
+
|
|
1034
|
+
def update_tags(self, record_data):
|
|
1035
|
+
return self.db_manager.update_record_with_tags(record_data)
|
|
1036
|
+
|
|
1037
|
+
def get_tags(self, record_id):
|
|
1038
|
+
return self.db_manager.get_tags_for_record(record_id)
|
|
1039
|
+
|
|
1040
|
+
def get_tagged_records(self, tag):
|
|
1041
|
+
return self.db_manager.get_tagged(tag)
|
|
1042
|
+
|
|
1043
|
+
def sync_jobs(self, record_id, jobs_list):
|
|
1044
|
+
self.db_manager.sync_jobs_from_record(record_id, jobs_list)
|
|
1045
|
+
|
|
1046
|
+
def get_jobs(self, record_id):
|
|
1047
|
+
return self.db_manager.get_jobs_for_record(record_id)
|
|
1048
|
+
|
|
1049
|
+
def get_job(self, record_id):
|
|
1050
|
+
return self.db_manager.get_jobs_for_record(record_id)
|
|
1051
|
+
|
|
1052
|
+
def record_count(self):
|
|
1053
|
+
return self.db_manager.count_records()
|
|
1054
|
+
|
|
1055
|
+
def populate_alerts(self):
|
|
1056
|
+
self.db_manager.populate_alerts()
|
|
1057
|
+
|
|
1058
|
+
def populate_notice(self):
|
|
1059
|
+
self.db_manager.populate_notice()
|
|
1060
|
+
|
|
1061
|
+
def refresh_alerts(self):
|
|
1062
|
+
self.db_manager.populate_alerts()
|
|
1063
|
+
|
|
1064
|
+
def refresh_tags(self):
|
|
1065
|
+
self.db_manager.populate_tags()
|
|
1066
|
+
|
|
1067
|
+
def execute_alert(self, command: str):
|
|
1068
|
+
"""
|
|
1069
|
+
Execute the given alert command using subprocess.
|
|
1070
|
+
|
|
1071
|
+
Args:
|
|
1072
|
+
command (str): The command string to execute.
|
|
1073
|
+
"""
|
|
1074
|
+
if not command:
|
|
1075
|
+
print("❌ Error: No command provided to execute.")
|
|
1076
|
+
return
|
|
1077
|
+
|
|
1078
|
+
try:
|
|
1079
|
+
# ✅ Use shlex.split() to safely parse the command
|
|
1080
|
+
subprocess.run(shlex.split(command), check=True)
|
|
1081
|
+
print(f"✅ Successfully executed: {command}")
|
|
1082
|
+
except subprocess.CalledProcessError as e:
|
|
1083
|
+
print(f"❌ Error executing command: {command}\n{e}")
|
|
1084
|
+
except FileNotFoundError:
|
|
1085
|
+
print(f"❌ Command not found: {command}")
|
|
1086
|
+
except Exception as e:
|
|
1087
|
+
print(f"❌ Unexpected error: {e}")
|
|
1088
|
+
|
|
1089
|
+
def execute_due_alerts(self):
|
|
1090
|
+
records = self.db_manager.get_due_alerts()
|
|
1091
|
+
# log_msg(f"{records = }")
|
|
1092
|
+
# SELECT alert_id, record_id, record_name, trigger_datetime, start_timedelta, command
|
|
1093
|
+
for record in records:
|
|
1094
|
+
(
|
|
1095
|
+
alert_id,
|
|
1096
|
+
record_id,
|
|
1097
|
+
trigger_datetime,
|
|
1098
|
+
start_datetime,
|
|
1099
|
+
alert_name,
|
|
1100
|
+
alert_command,
|
|
1101
|
+
) = record
|
|
1102
|
+
log_msg(
|
|
1103
|
+
f"Executing alert {alert_name = }, {alert_command = }, {trigger_datetime = }"
|
|
1104
|
+
)
|
|
1105
|
+
self.execute_alert(alert_command)
|
|
1106
|
+
# need command to execute command with arguments
|
|
1107
|
+
self.db_manager.mark_alert_executed(alert_id)
|
|
1108
|
+
|
|
1109
|
+
def get_due_alerts(self, now: datetime) -> List[Alert]:
|
|
1110
|
+
due = []
|
|
1111
|
+
records = self.db_manager.get_due_alerts()
|
|
1112
|
+
for record in records:
|
|
1113
|
+
(
|
|
1114
|
+
alert_id,
|
|
1115
|
+
record_id,
|
|
1116
|
+
trigger_datetime,
|
|
1117
|
+
start_datetime,
|
|
1118
|
+
alert_name,
|
|
1119
|
+
alert_command,
|
|
1120
|
+
) = record
|
|
1121
|
+
due.append([alert_id, alert_name, alert_command])
|
|
1122
|
+
log_msg(f"{due[-1] = }")
|
|
1123
|
+
return due
|
|
1124
|
+
|
|
1125
|
+
def get_active_alerts(self, width: int = 70):
|
|
1126
|
+
# now_fmt = datetime.now().strftime("%A, %B %-d %H:%M:%S")
|
|
1127
|
+
alerts = self.db_manager.get_active_alerts()
|
|
1128
|
+
log_msg(f"{alerts = }")
|
|
1129
|
+
title = "Remaining alerts for today"
|
|
1130
|
+
if not alerts:
|
|
1131
|
+
header = f"[{HEADER_COLOR}] none remaining [/{HEADER_COLOR}]"
|
|
1132
|
+
return [], header
|
|
1133
|
+
|
|
1134
|
+
now = datetime.now()
|
|
1135
|
+
|
|
1136
|
+
trigger_width = 7 if self.AMPM else 8
|
|
1137
|
+
start_width = 7 if self.AMPM else 6
|
|
1138
|
+
alert_width = trigger_width + 3
|
|
1139
|
+
name_width = width - 35
|
|
1140
|
+
header = f"[bold][dim]{'tag':^3}[/dim] {'alert':^{alert_width}} {'@s':^{start_width}} {'subject':<{name_width}}[/bold]"
|
|
1141
|
+
|
|
1142
|
+
rows = []
|
|
1143
|
+
log_msg(f"processing {len(alerts)} alerts")
|
|
1144
|
+
|
|
1145
|
+
for alert in alerts:
|
|
1146
|
+
log_msg(f"Alert: {alert = }")
|
|
1147
|
+
# alert_id, record_id, record_name, start_dt, td, command
|
|
1148
|
+
(
|
|
1149
|
+
alert_id,
|
|
1150
|
+
record_id,
|
|
1151
|
+
record_name,
|
|
1152
|
+
trigger_datetime,
|
|
1153
|
+
start_datetime,
|
|
1154
|
+
alert_name,
|
|
1155
|
+
alert_command,
|
|
1156
|
+
) = alert
|
|
1157
|
+
if now > datetime_from_timestamp(trigger_datetime):
|
|
1158
|
+
log_msg("skipping - already passed")
|
|
1159
|
+
continue
|
|
1160
|
+
# tag_fmt, indx = self.add_tag("alerts", indx, record_id)
|
|
1161
|
+
trtime = self.format_datetime(trigger_datetime)
|
|
1162
|
+
sttime = self.format_datetime(start_datetime)
|
|
1163
|
+
subject = truncate_string(record_name, name_width)
|
|
1164
|
+
text = (
|
|
1165
|
+
f"[{SALMON}] {alert_name} {trtime:<{trigger_width}}[/{SALMON}][{PALE_GREEN}] → {sttime:<{start_width}}[/{PALE_GREEN}] "
|
|
1166
|
+
+ f" [{AVAILABLE_COLOR}]{subject:<{name_width}}[/{AVAILABLE_COLOR}]"
|
|
1167
|
+
)
|
|
1168
|
+
rows.append({"record_id": record_id, "job_id": None, "text": text})
|
|
1169
|
+
pages = page_tagger(rows)
|
|
1170
|
+
log_msg(f"{header = }\n{rows = }\n{pages = }")
|
|
1171
|
+
return pages, header
|
|
1172
|
+
|
|
1173
|
+
def process_tag(self, tag: str, view: str, selected_week: tuple[int, int]):
|
|
1174
|
+
job_id = None
|
|
1175
|
+
if view == "week":
|
|
1176
|
+
payload = None
|
|
1177
|
+
tags_for_week = self.week_tag_to_id.get(selected_week, None)
|
|
1178
|
+
payload = tags_for_week.get(tag, None) if tags_for_week else None
|
|
1179
|
+
if payload is None:
|
|
1180
|
+
return [f"There is no item corresponding to tag '{tag}'."]
|
|
1181
|
+
if isinstance(payload, dict):
|
|
1182
|
+
record_id = payload.get("record_id")
|
|
1183
|
+
job_id = payload.get("job_id")
|
|
1184
|
+
else:
|
|
1185
|
+
record_id, job_id = payload, None
|
|
1186
|
+
|
|
1187
|
+
elif view in [
|
|
1188
|
+
"next",
|
|
1189
|
+
"last",
|
|
1190
|
+
"find",
|
|
1191
|
+
"events",
|
|
1192
|
+
"tasks",
|
|
1193
|
+
"agenda-events",
|
|
1194
|
+
"agenda-tasks",
|
|
1195
|
+
"alerts",
|
|
1196
|
+
]:
|
|
1197
|
+
payload = self.list_tag_to_id.get(view, {}).get(tag)
|
|
1198
|
+
if payload is None:
|
|
1199
|
+
return [f"There is no item corresponding to tag '{tag}'."]
|
|
1200
|
+
if isinstance(payload, dict):
|
|
1201
|
+
record_id = payload.get("record_id")
|
|
1202
|
+
job_id = payload.get("job_id")
|
|
1203
|
+
else:
|
|
1204
|
+
record_id, job_id = payload, None
|
|
1205
|
+
else:
|
|
1206
|
+
return ["Invalid view."]
|
|
1207
|
+
|
|
1208
|
+
core = self.get_record_core(record_id) or {}
|
|
1209
|
+
itemtype = core.get("itemtype") or ""
|
|
1210
|
+
rruleset = core.get("rruleset") or ""
|
|
1211
|
+
all_prereqs = core.get("all_prereqs") or ""
|
|
1212
|
+
|
|
1213
|
+
# ----- subject selection -----
|
|
1214
|
+
# default to record subject
|
|
1215
|
+
subject = core.get("subject") or "(untitled)"
|
|
1216
|
+
# if we're in week view and this tag points to a job, prefer the job's display_subject
|
|
1217
|
+
# if view == "week" and job_id is not None:
|
|
1218
|
+
if job_id is not None:
|
|
1219
|
+
log_msg(f"setting subject for {record_id = }, {job_id = }")
|
|
1220
|
+
try:
|
|
1221
|
+
js = self.db_manager.get_job_display_subject(record_id, job_id)
|
|
1222
|
+
if js: # only override if present/non-empty
|
|
1223
|
+
subject = js
|
|
1224
|
+
except Exception as e:
|
|
1225
|
+
# fail-safe: keep the record subject
|
|
1226
|
+
log_msg(f"Error: {e}. Failed for {record_id = }, {job_id = }")
|
|
1227
|
+
# -----------------------------
|
|
1228
|
+
|
|
1229
|
+
try:
|
|
1230
|
+
pinned_now = (
|
|
1231
|
+
self.db_manager.is_task_pinned(record_id) if itemtype == "~" else False
|
|
1232
|
+
)
|
|
1233
|
+
except Exception:
|
|
1234
|
+
pinned_now = False
|
|
1235
|
+
|
|
1236
|
+
fields = [
|
|
1237
|
+
"",
|
|
1238
|
+
] + self.get_entry(record_id, job_id)
|
|
1239
|
+
|
|
1240
|
+
_dts = self.db_manager.get_next_start_datetimes_for_record(record_id, job_id)
|
|
1241
|
+
first, second = (_dts + [None, None])[:2]
|
|
1242
|
+
log_msg(f"{record_id = }, {job_id = }, {_dts = }, {first = }, {second = }")
|
|
1243
|
+
|
|
1244
|
+
# job_suffix = (
|
|
1245
|
+
# f" [{label_color}]job_id:[/{label_color}] [bold]{job_id}[/bold]"
|
|
1246
|
+
# if job_id is not None
|
|
1247
|
+
# else ""
|
|
1248
|
+
# )
|
|
1249
|
+
# title = f"[{label_color}]details:[/{label_color}] [bold]{subject}[/bold]"
|
|
1250
|
+
title = f"[bold]{subject:^{self.width}}[/bold]"
|
|
1251
|
+
# ids = f"[{label_color}]id:[/{label_color}] [bold]{record_id}[/bold]{job_suffix}"
|
|
1252
|
+
|
|
1253
|
+
# side-channel meta for detail actions
|
|
1254
|
+
self._last_details_meta = {
|
|
1255
|
+
"record_id": record_id,
|
|
1256
|
+
"job_id": job_id,
|
|
1257
|
+
"itemtype": itemtype,
|
|
1258
|
+
"subject": subject,
|
|
1259
|
+
"rruleset": rruleset,
|
|
1260
|
+
"first": first,
|
|
1261
|
+
"second": second,
|
|
1262
|
+
"all_prereqs": all_prereqs,
|
|
1263
|
+
"pinned": bool(pinned_now),
|
|
1264
|
+
"record": self.db_manager.get_record(record_id),
|
|
1265
|
+
}
|
|
1266
|
+
|
|
1267
|
+
return [
|
|
1268
|
+
title,
|
|
1269
|
+
" ",
|
|
1270
|
+
] + fields
|
|
1271
|
+
|
|
1272
|
+
def get_table_and_list(self, start_date: datetime, selected_week: tuple[int, int]):
|
|
1273
|
+
year, week = selected_week
|
|
1274
|
+
|
|
1275
|
+
try:
|
|
1276
|
+
extended = self.db_manager.ensure_week_generated_with_topup(
|
|
1277
|
+
year, week, cushion=6, topup_threshold=2
|
|
1278
|
+
)
|
|
1279
|
+
if extended:
|
|
1280
|
+
log_msg(
|
|
1281
|
+
f"[weeks] extended/generated around {year}-W{week:02d} (+cushion)"
|
|
1282
|
+
)
|
|
1283
|
+
except Exception as e:
|
|
1284
|
+
log_msg(f"[weeks] ensure_week_generated_with_topup error: {e}")
|
|
1285
|
+
|
|
1286
|
+
year_week = f"{year:04d}-{week:02d}"
|
|
1287
|
+
busy_bits = self.db_manager.get_busy_bits_for_week(year_week)
|
|
1288
|
+
busy_bar = self._format_busy_bar(busy_bits)
|
|
1289
|
+
|
|
1290
|
+
start_dt = datetime.strptime(f"{year} {week} 1", "%G %V %u")
|
|
1291
|
+
# end_dt = start_dt + timedelta(weeks=1)
|
|
1292
|
+
details = self.get_week_details(selected_week)
|
|
1293
|
+
|
|
1294
|
+
title = format_iso_week(start_dt)
|
|
1295
|
+
return title, busy_bar, details
|
|
1296
|
+
|
|
1297
|
+
# def get_table_and_list(self, start_date: datetime, selected_week: tuple[int, int]):
|
|
1298
|
+
# """
|
|
1299
|
+
# Return the header title, busy bar (as text), and event list details
|
|
1300
|
+
# for the given ISO week.
|
|
1301
|
+
#
|
|
1302
|
+
# Returns: (title, busy_bar_str, details_list)
|
|
1303
|
+
# """
|
|
1304
|
+
# year, week = selected_week
|
|
1305
|
+
# year_week = f"{year:04d}-{week:02d}"
|
|
1306
|
+
#
|
|
1307
|
+
# # --- 1. Busy bits from BusyWeeks table
|
|
1308
|
+
# busy_bits = self.db_manager.get_busy_bits_for_week(year_week)
|
|
1309
|
+
# busy_bar = self._format_busy_bar(busy_bits)
|
|
1310
|
+
#
|
|
1311
|
+
# # --- 2. Week events using your existing method
|
|
1312
|
+
# start_dt = datetime.strptime(f"{year} {week} 1", "%G %V %u")
|
|
1313
|
+
# end_dt = start_dt + timedelta(weeks=1)
|
|
1314
|
+
# details = self.get_week_details(selected_week)
|
|
1315
|
+
#
|
|
1316
|
+
# # title = f"{format_date_range(start_dt, end_dt)} #{start_dt.isocalendar().week}"
|
|
1317
|
+
# title = format_iso_week(start_dt)
|
|
1318
|
+
# # --- 3. Title for the week header
|
|
1319
|
+
# # title = f"Week {week} — {start_dt.strftime('%b %d')} to {(end_dt - timedelta(days=1)).strftime('%b %d')}"
|
|
1320
|
+
#
|
|
1321
|
+
# return title, busy_bar, details
|
|
1322
|
+
|
|
1323
|
+
def _format_busy_bar(
|
|
1324
|
+
self,
|
|
1325
|
+
bits: list[int],
|
|
1326
|
+
*,
|
|
1327
|
+
busy_color: str = "green",
|
|
1328
|
+
conflict_color: str = "red",
|
|
1329
|
+
allday_color: str = "yellow",
|
|
1330
|
+
) -> str:
|
|
1331
|
+
"""
|
|
1332
|
+
Render 35 busy bits (7×[1 all-day + 4×6h blocks])
|
|
1333
|
+
as a compact single-row week bar with color markup.
|
|
1334
|
+
|
|
1335
|
+
Layout:
|
|
1336
|
+
| Mon | Tue | Wed | Thu | Fri | Sat | Sun |
|
|
1337
|
+
|■██▓▓| |▓███ | ... |
|
|
1338
|
+
|
|
1339
|
+
Encoding:
|
|
1340
|
+
0 = free → " "
|
|
1341
|
+
1 = busy → colored block
|
|
1342
|
+
2 = conflict → colored block
|
|
1343
|
+
(first of 5 per day is the all-day bit → colored "■" if set)
|
|
1344
|
+
"""
|
|
1345
|
+
DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
|
|
1346
|
+
assert len(bits) == 35, "expected 35 bits (7×5)"
|
|
1347
|
+
|
|
1348
|
+
# --- Header line
|
|
1349
|
+
header = "│".join(f" {d:^3} " for d in DAYS)
|
|
1350
|
+
lines = [f"│{header}│"]
|
|
1351
|
+
|
|
1352
|
+
# --- Busy row
|
|
1353
|
+
day_segments = []
|
|
1354
|
+
for day in range(7):
|
|
1355
|
+
start = day * 5
|
|
1356
|
+
all_day_bit = bits[start]
|
|
1357
|
+
block_bits = bits[start + 1 : start + 5]
|
|
1358
|
+
|
|
1359
|
+
# --- all-day symbol
|
|
1360
|
+
if all_day_bit:
|
|
1361
|
+
all_day_char = f"[{allday_color}]■[/{allday_color}]"
|
|
1362
|
+
else:
|
|
1363
|
+
all_day_char = " "
|
|
1364
|
+
|
|
1365
|
+
# --- 4×6h blocks
|
|
1366
|
+
blocks = ""
|
|
1367
|
+
for b in block_bits:
|
|
1368
|
+
if b == 1:
|
|
1369
|
+
blocks += f"[{busy_color}]█[/{busy_color}]"
|
|
1370
|
+
elif b == 2:
|
|
1371
|
+
blocks += f"[{conflict_color}]▓[/{conflict_color}]"
|
|
1372
|
+
else:
|
|
1373
|
+
blocks += " "
|
|
1374
|
+
|
|
1375
|
+
day_segments.append(all_day_char + blocks)
|
|
1376
|
+
|
|
1377
|
+
lines.append(f"│{'│'.join(day_segments)}│")
|
|
1378
|
+
return "\n".join(lines)
|
|
1379
|
+
|
|
1380
|
+
def get_week_details(self, yr_wk):
|
|
1381
|
+
"""
|
|
1382
|
+
Fetch and format rows for a specific week.
|
|
1383
|
+
"""
|
|
1384
|
+
# log_msg(f"Getting rows for week {yr_wk}")
|
|
1385
|
+
today = datetime.now()
|
|
1386
|
+
tomorrow = today + ONEDAY
|
|
1387
|
+
today_year, today_week, today_weekday = today.isocalendar()
|
|
1388
|
+
tomorrow_year, tomorrow_week, tomorrow_day = tomorrow.isocalendar()
|
|
1389
|
+
|
|
1390
|
+
self.selected_week = yr_wk
|
|
1391
|
+
|
|
1392
|
+
start_datetime = datetime.strptime(f"{yr_wk[0]} {yr_wk[1]} 1", "%G %V %u")
|
|
1393
|
+
end_datetime = start_datetime + timedelta(weeks=1)
|
|
1394
|
+
events = self.db_manager.get_events_for_period(start_datetime, end_datetime)
|
|
1395
|
+
|
|
1396
|
+
# log_msg(f"from get_events_for_period:\n{events = }")
|
|
1397
|
+
this_week = format_date_range(start_datetime, end_datetime - ONEDAY)
|
|
1398
|
+
# terminal_width = shutil.get_terminal_size().columns
|
|
1399
|
+
|
|
1400
|
+
header = f"{this_week} #{yr_wk[1]} ({len(events)})"
|
|
1401
|
+
rows = []
|
|
1402
|
+
|
|
1403
|
+
self.set_week_afill(events, yr_wk)
|
|
1404
|
+
|
|
1405
|
+
if not events:
|
|
1406
|
+
rows.append(
|
|
1407
|
+
{
|
|
1408
|
+
"record_id": None,
|
|
1409
|
+
"job_id": None,
|
|
1410
|
+
"text": f" [{HEADER_COLOR}]Nothing scheduled for this week[/{HEADER_COLOR}]",
|
|
1411
|
+
}
|
|
1412
|
+
)
|
|
1413
|
+
pages = page_tagger(rows)
|
|
1414
|
+
return pages
|
|
1415
|
+
|
|
1416
|
+
weekday_to_events = {}
|
|
1417
|
+
for i in range(7):
|
|
1418
|
+
this_day = (start_datetime + timedelta(days=i)).date()
|
|
1419
|
+
weekday_to_events[this_day] = []
|
|
1420
|
+
|
|
1421
|
+
for start_ts, end_ts, itemtype, subject, id, job_id in events:
|
|
1422
|
+
log_msg(f"{itemtype = }, {subject = }, {id = }, {job_id = }")
|
|
1423
|
+
start_dt = datetime_from_timestamp(start_ts)
|
|
1424
|
+
end_dt = datetime_from_timestamp(end_ts)
|
|
1425
|
+
if itemtype == "*": # event
|
|
1426
|
+
# 🪄 new line: replace {XXX} with ordinal instance
|
|
1427
|
+
subject = self.apply_anniversary_if_needed(id, subject, start_dt)
|
|
1428
|
+
# log_msg(
|
|
1429
|
+
# f"Week rows {itemtype = }, {subject = }, {start_dt = }, {end_dt = }"
|
|
1430
|
+
# )
|
|
1431
|
+
status = "available"
|
|
1432
|
+
|
|
1433
|
+
if start_dt == end_dt:
|
|
1434
|
+
# if start_dt.hour == 0 and start_dt.minute == 0 and start_dt.second == 0:
|
|
1435
|
+
if start_dt.hour == 0 and start_dt.minute == 0:
|
|
1436
|
+
# start_end = f"{str('~'):^11}"
|
|
1437
|
+
start_end = ""
|
|
1438
|
+
elif start_dt.hour == 23 and start_dt.minute == 59:
|
|
1439
|
+
start_end = ""
|
|
1440
|
+
else:
|
|
1441
|
+
start_end = f"{format_time_range(start_dt, end_dt, self.AMPM)}"
|
|
1442
|
+
else:
|
|
1443
|
+
start_end = f"{format_time_range(start_dt, end_dt, self.AMPM)}"
|
|
1444
|
+
|
|
1445
|
+
type_color = TYPE_TO_COLOR[itemtype]
|
|
1446
|
+
escaped_start_end = (
|
|
1447
|
+
f"[not bold]{start_end} [/not bold]" if start_end else ""
|
|
1448
|
+
)
|
|
1449
|
+
|
|
1450
|
+
if job_id:
|
|
1451
|
+
job = self.db_manager.get_job_dict(id, job_id)
|
|
1452
|
+
status = job.get("status", "available")
|
|
1453
|
+
subject = job.get("display_subject", subject)
|
|
1454
|
+
itemtype = "~"
|
|
1455
|
+
if status != "available":
|
|
1456
|
+
type_color = WAITING_COLOR
|
|
1457
|
+
|
|
1458
|
+
row = {
|
|
1459
|
+
"record_id": id,
|
|
1460
|
+
"job_id": job_id,
|
|
1461
|
+
"text": f"[{type_color}]{itemtype} {escaped_start_end}{subject}[/{type_color}]",
|
|
1462
|
+
}
|
|
1463
|
+
weekday_to_events.setdefault(start_dt.date(), []).append(row)
|
|
1464
|
+
log_msg(f"job row: {row = }")
|
|
1465
|
+
|
|
1466
|
+
for day, events in weekday_to_events.items():
|
|
1467
|
+
# TODO: today, tomorrow here
|
|
1468
|
+
iso_year, iso_week, weekday = day.isocalendar()
|
|
1469
|
+
today = (
|
|
1470
|
+
iso_year == today_year
|
|
1471
|
+
and iso_week == today_week
|
|
1472
|
+
and weekday == today_weekday
|
|
1473
|
+
)
|
|
1474
|
+
tomorrow = (
|
|
1475
|
+
iso_year == tomorrow_year
|
|
1476
|
+
and iso_week == tomorrow_week
|
|
1477
|
+
and weekday == tomorrow_day
|
|
1478
|
+
)
|
|
1479
|
+
flag = " (today)" if today else " (tomorrow)" if tomorrow else ""
|
|
1480
|
+
if events:
|
|
1481
|
+
rows.append(
|
|
1482
|
+
{
|
|
1483
|
+
"record_id": None,
|
|
1484
|
+
"job_id": None,
|
|
1485
|
+
"text": f"[bold][{HEADER_COLOR}]{day.strftime('%a, %b %-d')}{flag}[/{HEADER_COLOR}][/bold]",
|
|
1486
|
+
}
|
|
1487
|
+
)
|
|
1488
|
+
for event in events:
|
|
1489
|
+
rows.append(event)
|
|
1490
|
+
pages = page_tagger(rows)
|
|
1491
|
+
self.yrwk_to_pages[yr_wk] = pages
|
|
1492
|
+
# log_msg(f"{len(pages) = }, {pages[0] = }, {pages[-1] = }")
|
|
1493
|
+
return pages
|
|
1494
|
+
|
|
1495
|
+
def get_busy_bits_for_week(self, selected_week: tuple[int, int]) -> list[int]:
|
|
1496
|
+
"""Convert (year, week) tuple to 'YYYY-WW' and delegate to model."""
|
|
1497
|
+
year, week = selected_week
|
|
1498
|
+
year_week = f"{year:04d}-{week:02d}"
|
|
1499
|
+
return self.db_manager.get_busy_bits_for_week(year_week)
|
|
1500
|
+
|
|
1501
|
+
def get_next(self):
|
|
1502
|
+
"""
|
|
1503
|
+
Fetch and format description for the next instances.
|
|
1504
|
+
"""
|
|
1505
|
+
events = self.db_manager.get_next_instances()
|
|
1506
|
+
header = f"Next Instances ({len(events)})"
|
|
1507
|
+
|
|
1508
|
+
if not events:
|
|
1509
|
+
return [], header
|
|
1510
|
+
|
|
1511
|
+
year_to_events = {}
|
|
1512
|
+
|
|
1513
|
+
for id, job_id, subject, description, itemtype, start_ts in events:
|
|
1514
|
+
start_dt = datetime_from_timestamp(start_ts)
|
|
1515
|
+
subject = self.apply_anniversary_if_needed(id, subject, start_dt)
|
|
1516
|
+
if job_id is not None:
|
|
1517
|
+
try:
|
|
1518
|
+
js = self.db_manager.get_job_display_subject(id, job_id)
|
|
1519
|
+
if js: # only override if present/non-empty
|
|
1520
|
+
subject = js
|
|
1521
|
+
# log_msg(f"{subject = }")
|
|
1522
|
+
except Exception as e:
|
|
1523
|
+
# fail-safe: keep the record subject
|
|
1524
|
+
log_msg(f"{e = }")
|
|
1525
|
+
pass
|
|
1526
|
+
monthday = start_dt.strftime("%-d")
|
|
1527
|
+
start_end = f"{monthday:>2} {format_hours_mins(start_dt, HRS_MINS)}"
|
|
1528
|
+
type_color = TYPE_TO_COLOR[itemtype]
|
|
1529
|
+
escaped_start_end = f"[not bold]{start_end}[/not bold]"
|
|
1530
|
+
item = {
|
|
1531
|
+
"record_id": id,
|
|
1532
|
+
"job_id": job_id,
|
|
1533
|
+
"text": f"[{type_color}]{itemtype} {escaped_start_end} {subject}[/{type_color}]",
|
|
1534
|
+
}
|
|
1535
|
+
# yr_mnth_to_events.setdefault(start_dt.strftime("%B %Y"), []).append(row)
|
|
1536
|
+
year_to_events.setdefault(start_dt.strftime("%b %Y"), []).append(item)
|
|
1537
|
+
|
|
1538
|
+
# self.list_tag_to_id.setdefault("next", {})
|
|
1539
|
+
# indx = 0
|
|
1540
|
+
"""
|
|
1541
|
+
rows: a list of dicts each with either
|
|
1542
|
+
- { 'record_id': int, 'text': str } (a taggable record row)
|
|
1543
|
+
- { 'record_id': None, 'text': str } (a non-taggable header row)
|
|
1544
|
+
page_size: number of taggable rows per page
|
|
1545
|
+
"""
|
|
1546
|
+
|
|
1547
|
+
rows = []
|
|
1548
|
+
for ym, events in year_to_events.items():
|
|
1549
|
+
if events:
|
|
1550
|
+
rows.append(
|
|
1551
|
+
{
|
|
1552
|
+
"record_id": None,
|
|
1553
|
+
"job_id": None,
|
|
1554
|
+
"text": f"[not bold][{HEADER_COLOR}]{ym}[/{HEADER_COLOR}][/not bold]",
|
|
1555
|
+
}
|
|
1556
|
+
)
|
|
1557
|
+
for event in events:
|
|
1558
|
+
rows.append(event)
|
|
1559
|
+
|
|
1560
|
+
# build 'rows' as a list of dicts with record_id and text
|
|
1561
|
+
pages = page_tagger(rows)
|
|
1562
|
+
log_msg(f"{pages = }")
|
|
1563
|
+
return pages, header
|
|
1564
|
+
|
|
1565
|
+
def get_last(self):
|
|
1566
|
+
"""
|
|
1567
|
+
Fetch and format description for the next instances.
|
|
1568
|
+
"""
|
|
1569
|
+
events = self.db_manager.get_last_instances()
|
|
1570
|
+
header = f"Last instances ({len(events)})"
|
|
1571
|
+
# description = [f"[not bold][{HEADER_COLOR}]{header}[/{HEADER_COLOR}][/not bold]"]
|
|
1572
|
+
|
|
1573
|
+
if not events:
|
|
1574
|
+
return [], header
|
|
1575
|
+
|
|
1576
|
+
# use a, ..., z if len(events) <= 26 else use aa, ..., zz
|
|
1577
|
+
year_to_events = {}
|
|
1578
|
+
|
|
1579
|
+
for id, job_id, subject, description, itemtype, start_ts in events:
|
|
1580
|
+
start_dt = datetime_from_timestamp(start_ts)
|
|
1581
|
+
subject = self.apply_anniversary_if_needed(id, subject, start_dt)
|
|
1582
|
+
# log_msg(f"Week description {subject = }, {start_dt = }, {end_dt = }")
|
|
1583
|
+
if job_id is not None:
|
|
1584
|
+
try:
|
|
1585
|
+
js = self.db_manager.get_job_display_subject(id, job_id)
|
|
1586
|
+
if js: # only override if present/non-empty
|
|
1587
|
+
subject = js
|
|
1588
|
+
log_msg(f"{subject = }")
|
|
1589
|
+
except Exception as e:
|
|
1590
|
+
# fail-safe: keep the record subject
|
|
1591
|
+
log_msg(f"{e = }")
|
|
1592
|
+
pass
|
|
1593
|
+
monthday = start_dt.strftime("%-d")
|
|
1594
|
+
start_end = f"{monthday:>2} {format_hours_mins(start_dt, HRS_MINS)}"
|
|
1595
|
+
type_color = TYPE_TO_COLOR[itemtype]
|
|
1596
|
+
escaped_start_end = f"[not bold]{start_end}[/not bold]"
|
|
1597
|
+
item = {
|
|
1598
|
+
"record_id": id,
|
|
1599
|
+
"job_id": job_id,
|
|
1600
|
+
"text": f"[{type_color}]{itemtype} {escaped_start_end} {subject}[/{type_color}]",
|
|
1601
|
+
}
|
|
1602
|
+
year_to_events.setdefault(start_dt.strftime("%b %Y"), []).append(item)
|
|
1603
|
+
|
|
1604
|
+
rows = []
|
|
1605
|
+
for ym, events in year_to_events.items():
|
|
1606
|
+
if events:
|
|
1607
|
+
rows.append(
|
|
1608
|
+
{
|
|
1609
|
+
"record_id": None,
|
|
1610
|
+
"job_id": None,
|
|
1611
|
+
"text": f"[not bold][{HEADER_COLOR}]{ym}[/{HEADER_COLOR}][/not bold]",
|
|
1612
|
+
}
|
|
1613
|
+
)
|
|
1614
|
+
for event in events:
|
|
1615
|
+
rows.append(event)
|
|
1616
|
+
pages = page_tagger(rows)
|
|
1617
|
+
log_msg(f"{pages = }")
|
|
1618
|
+
return pages, header
|
|
1619
|
+
|
|
1620
|
+
def find_records(self, search_str: str):
|
|
1621
|
+
"""
|
|
1622
|
+
Fetch and format description for the next instances.
|
|
1623
|
+
"""
|
|
1624
|
+
search_str = search_str.strip()
|
|
1625
|
+
events = self.db_manager.find_records(search_str)
|
|
1626
|
+
|
|
1627
|
+
matching = (
|
|
1628
|
+
f'containing a match for "[{SELECTED_COLOR}]{search_str}[/{SELECTED_COLOR}]" '
|
|
1629
|
+
if search_str
|
|
1630
|
+
else "matching anything"
|
|
1631
|
+
)
|
|
1632
|
+
|
|
1633
|
+
header = f"Items ({len(events)})\n {matching}"
|
|
1634
|
+
|
|
1635
|
+
if not events:
|
|
1636
|
+
return [], header
|
|
1637
|
+
|
|
1638
|
+
rows = []
|
|
1639
|
+
|
|
1640
|
+
for record_id, subject, _, itemtype, last_ts, next_ts in events:
|
|
1641
|
+
subject = f"{truncate_string(subject, 32):<34}"
|
|
1642
|
+
last_dt = (
|
|
1643
|
+
datetime_from_timestamp(last_ts).strftime("%y-%m-%d %H:%M")
|
|
1644
|
+
if last_ts
|
|
1645
|
+
else "~"
|
|
1646
|
+
)
|
|
1647
|
+
last_fmt = f"{last_dt:^14}"
|
|
1648
|
+
next_dt = (
|
|
1649
|
+
datetime_from_timestamp(next_ts).strftime("%y-%m-%d %H:%M")
|
|
1650
|
+
if next_ts
|
|
1651
|
+
else "~"
|
|
1652
|
+
)
|
|
1653
|
+
next_fmt = f"{next_dt:^14}"
|
|
1654
|
+
type_color = TYPE_TO_COLOR[itemtype]
|
|
1655
|
+
escaped_last = f"[not bold]{last_fmt}[/not bold]"
|
|
1656
|
+
escaped_next = f"[not bold]{next_fmt}[/not bold]"
|
|
1657
|
+
rows.append(
|
|
1658
|
+
{
|
|
1659
|
+
"record_id": record_id,
|
|
1660
|
+
"job_id": None,
|
|
1661
|
+
"text": f"[{type_color}]{itemtype} {subject} {escaped_next}[/{type_color}]",
|
|
1662
|
+
}
|
|
1663
|
+
)
|
|
1664
|
+
pages = page_tagger(rows)
|
|
1665
|
+
log_msg(f"{pages = }")
|
|
1666
|
+
return pages, header
|
|
1667
|
+
|
|
1668
|
+
def group_events_by_date_and_time(self, events):
|
|
1669
|
+
"""
|
|
1670
|
+
Groups only scheduled '*' events by date and time.
|
|
1671
|
+
|
|
1672
|
+
Args:
|
|
1673
|
+
events (List[Tuple[int, int, str, str, int]]):
|
|
1674
|
+
List of (start_ts, end_ts, itemtype, subject, id)
|
|
1675
|
+
|
|
1676
|
+
Returns:
|
|
1677
|
+
Dict[date, List[Tuple[time, Tuple]]]:
|
|
1678
|
+
Dict mapping date to list of (start_time, event) tuples
|
|
1679
|
+
"""
|
|
1680
|
+
grouped = defaultdict(list)
|
|
1681
|
+
|
|
1682
|
+
for start_ts, end_ts, itemtype, subject, record_id, job_id in events:
|
|
1683
|
+
# log_msg(f"{start_ts = }, {end_ts = }, {subject = }")
|
|
1684
|
+
if itemtype != "*":
|
|
1685
|
+
continue # Only events
|
|
1686
|
+
|
|
1687
|
+
start_dt = datetime_from_timestamp(start_ts)
|
|
1688
|
+
grouped[start_dt.date()].append(
|
|
1689
|
+
(start_dt.time(), (start_ts, end_ts, subject, record_id, job_id))
|
|
1690
|
+
)
|
|
1691
|
+
|
|
1692
|
+
# Sort each day's events by time
|
|
1693
|
+
for date in grouped:
|
|
1694
|
+
grouped[date].sort(key=lambda x: x[0])
|
|
1695
|
+
|
|
1696
|
+
return dict(grouped)
|
|
1697
|
+
|
|
1698
|
+
def get_completions_view(self):
|
|
1699
|
+
"""
|
|
1700
|
+
Fetch and format description for all completions, grouped by year.
|
|
1701
|
+
"""
|
|
1702
|
+
events = self.db_manager.get_all_completions()
|
|
1703
|
+
header = f"Completions ({len(events)})"
|
|
1704
|
+
display = [header]
|
|
1705
|
+
|
|
1706
|
+
if not events:
|
|
1707
|
+
display.append(f" [{HEADER_COLOR}]Nothing found[/{HEADER_COLOR}]")
|
|
1708
|
+
return display
|
|
1709
|
+
|
|
1710
|
+
year_to_events = {}
|
|
1711
|
+
for record_id, subject, description, itemtype, due_ts, completed_ts in events:
|
|
1712
|
+
completed_dt = datetime_from_timestamp(completed_ts)
|
|
1713
|
+
due_dt = datetime_from_timestamp(due_ts) if due_ts else None
|
|
1714
|
+
|
|
1715
|
+
# Format display string
|
|
1716
|
+
monthday = completed_dt.strftime("%m-%d")
|
|
1717
|
+
completed_str = f"{monthday}{format_hours_mins(completed_dt, HRS_MINS):>8}"
|
|
1718
|
+
type_color = TYPE_TO_COLOR[itemtype]
|
|
1719
|
+
escaped_completed = f"[not bold]{completed_str}[/not bold]"
|
|
1720
|
+
|
|
1721
|
+
extra = f" (due {due_dt.strftime('%m-%d')})" if due_dt else ""
|
|
1722
|
+
row = [
|
|
1723
|
+
record_id,
|
|
1724
|
+
None, # no job_id for completions
|
|
1725
|
+
f"[{type_color}]{itemtype} {escaped_completed:<12} {subject}{extra}[/{type_color}]",
|
|
1726
|
+
]
|
|
1727
|
+
|
|
1728
|
+
year_to_events.setdefault(completed_dt.strftime("%Y"), []).append(row)
|
|
1729
|
+
|
|
1730
|
+
self.set_afill(events, "completions")
|
|
1731
|
+
self.list_tag_to_id.setdefault("completions", {})
|
|
1732
|
+
|
|
1733
|
+
indx = 0
|
|
1734
|
+
for year, events in year_to_events.items():
|
|
1735
|
+
if events:
|
|
1736
|
+
display.append(
|
|
1737
|
+
f"[not bold][{HEADER_COLOR}]{year}[/{HEADER_COLOR}][/not bold]"
|
|
1738
|
+
)
|
|
1739
|
+
for event in events:
|
|
1740
|
+
record_id, job_id, event_str = event
|
|
1741
|
+
tag_fmt, indx = self.add_tag(
|
|
1742
|
+
"completions", indx, record_id, job_id=job_id
|
|
1743
|
+
)
|
|
1744
|
+
display.append(f"{tag_fmt}{event_str}")
|
|
1745
|
+
|
|
1746
|
+
return display
|
|
1747
|
+
|
|
1748
|
+
def get_record_completions(self, record_id: int, width: int = 70):
|
|
1749
|
+
"""
|
|
1750
|
+
Fetch and format completion history for a given record.
|
|
1751
|
+
"""
|
|
1752
|
+
completions = self.db_manager.get_completions(record_id)
|
|
1753
|
+
header = "Completion history"
|
|
1754
|
+
results = [header]
|
|
1755
|
+
|
|
1756
|
+
if not completions:
|
|
1757
|
+
results.append(f" [{HEADER_COLOR}]no completions recorded[/{HEADER_COLOR}]")
|
|
1758
|
+
return results
|
|
1759
|
+
|
|
1760
|
+
# Column widths similar to alerts
|
|
1761
|
+
completed_width = 14 # space for "YYYY-MM-DD HH:MM"
|
|
1762
|
+
due_width = 14
|
|
1763
|
+
name_width = width - (3 + 3 + completed_width + due_width + 6)
|
|
1764
|
+
|
|
1765
|
+
results.append(
|
|
1766
|
+
f"[bold][dim]{'tag':^3}[/dim] "
|
|
1767
|
+
f"{'completed':^{completed_width}} "
|
|
1768
|
+
f"{'due':^{due_width}} "
|
|
1769
|
+
f"{'subject':<{name_width}}[/bold]"
|
|
1770
|
+
)
|
|
1771
|
+
|
|
1772
|
+
self.set_afill(completions, "record_completions")
|
|
1773
|
+
self.list_tag_to_id.setdefault("record_completions", {})
|
|
1774
|
+
indx = 0
|
|
1775
|
+
|
|
1776
|
+
for (
|
|
1777
|
+
record_id,
|
|
1778
|
+
subject,
|
|
1779
|
+
description,
|
|
1780
|
+
itemtype,
|
|
1781
|
+
due_ts,
|
|
1782
|
+
completed_ts,
|
|
1783
|
+
) in completions:
|
|
1784
|
+
completed_dt = datetime_from_timestamp(completed_ts)
|
|
1785
|
+
completed_str = self.format_datetime(completed_dt, short=True)
|
|
1786
|
+
|
|
1787
|
+
due_str = (
|
|
1788
|
+
self.format_datetime(datetime_from_timestamp(due_ts), short=True)
|
|
1789
|
+
if due_ts
|
|
1790
|
+
else "-"
|
|
1791
|
+
)
|
|
1792
|
+
subj_fmt = truncate_string(subject, name_width)
|
|
1793
|
+
|
|
1794
|
+
tag_fmt, indx = self.add_tag("record_completions", indx, record_id)
|
|
1795
|
+
|
|
1796
|
+
row = " ".join(
|
|
1797
|
+
[
|
|
1798
|
+
f"{tag_fmt}",
|
|
1799
|
+
f"[{SALMON}]{completed_str:<{completed_width}}[/{SALMON}]",
|
|
1800
|
+
f"[{PALE_GREEN}]{due_str:<{due_width}}[/{PALE_GREEN}]",
|
|
1801
|
+
f"[{AVAILABLE_COLOR}]{subj_fmt:<{name_width}}[/{AVAILABLE_COLOR}]",
|
|
1802
|
+
]
|
|
1803
|
+
)
|
|
1804
|
+
results.append(row)
|
|
1805
|
+
|
|
1806
|
+
return results
|
|
1807
|
+
|
|
1808
|
+
def get_agenda(self, now: datetime = datetime.now()):
|
|
1809
|
+
""" """
|
|
1810
|
+
header = "Agenda - Events and Tasks"
|
|
1811
|
+
divider = [
|
|
1812
|
+
{"record_id": None, "job_id": None, "text": " "},
|
|
1813
|
+
]
|
|
1814
|
+
events_by_date = self.get_agenda_events()
|
|
1815
|
+
tasks_by_urgency = self.get_agenda_tasks()
|
|
1816
|
+
events_and_tasks = events_by_date + divider + tasks_by_urgency
|
|
1817
|
+
pages = page_tagger(events_and_tasks)
|
|
1818
|
+
log_msg(f"{pages = }")
|
|
1819
|
+
return pages, header
|
|
1820
|
+
|
|
1821
|
+
def get_agenda_events(self, now: datetime = datetime.now()):
|
|
1822
|
+
"""
|
|
1823
|
+
Returns dict: date -> list of (tag, label, subject) for up to three days.
|
|
1824
|
+
Rules:
|
|
1825
|
+
• Pick the first 3 days that have events.
|
|
1826
|
+
• Also include TODAY if it has notice/drafts even with no events.
|
|
1827
|
+
• If nothing to display at all, return {}.
|
|
1828
|
+
"""
|
|
1829
|
+
notice_records = (
|
|
1830
|
+
self.db_manager.get_notice_for_events()
|
|
1831
|
+
) # (record_id, days_remaining, subject)
|
|
1832
|
+
draft_records = self.db_manager.get_drafts() # (record_id, subject)
|
|
1833
|
+
|
|
1834
|
+
today_dt = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
1835
|
+
today = today_dt.date()
|
|
1836
|
+
now_ts = _fmt_naive(now)
|
|
1837
|
+
|
|
1838
|
+
# Pull events for the next couple of weeks (or whatever window you prefer)
|
|
1839
|
+
window_start = today_dt
|
|
1840
|
+
window_end = today_dt + timedelta(days=14)
|
|
1841
|
+
events = self.db_manager.get_events_for_period(
|
|
1842
|
+
_to_local_naive(window_start), _to_local_naive(window_end)
|
|
1843
|
+
)
|
|
1844
|
+
# events rows: (start_ts, end_ts, itemtype, subject, record_id)
|
|
1845
|
+
|
|
1846
|
+
grouped_by_date = self.group_events_by_date_and_time(
|
|
1847
|
+
events
|
|
1848
|
+
) # {date: [(time_key, (start_ts, end_ts, subject, record_id)), ...]}
|
|
1849
|
+
|
|
1850
|
+
# 1) Determine the first three dates with events
|
|
1851
|
+
event_dates_sorted = sorted(grouped_by_date.keys())
|
|
1852
|
+
allowed_dates: list[date] = []
|
|
1853
|
+
for d in event_dates_sorted:
|
|
1854
|
+
allowed_dates.append(d)
|
|
1855
|
+
if len(allowed_dates) == 3:
|
|
1856
|
+
break
|
|
1857
|
+
|
|
1858
|
+
# 2) If today has notice/draft items, include it even if it has no events
|
|
1859
|
+
has_today_meta = bool(notice_records or draft_records)
|
|
1860
|
+
if has_today_meta and today not in allowed_dates:
|
|
1861
|
+
# Prepend today; keep max three days
|
|
1862
|
+
allowed_dates = [today] + allowed_dates
|
|
1863
|
+
# De-dupe while preserving order
|
|
1864
|
+
seen = set()
|
|
1865
|
+
deduped = []
|
|
1866
|
+
for d in allowed_dates:
|
|
1867
|
+
if d not in seen:
|
|
1868
|
+
seen.add(d)
|
|
1869
|
+
deduped.append(d)
|
|
1870
|
+
allowed_dates = deduped[:3] # cap to 3
|
|
1871
|
+
|
|
1872
|
+
# 3) If nothing at all to show, bail early
|
|
1873
|
+
nothing_to_show = (not allowed_dates) and (not has_today_meta)
|
|
1874
|
+
if nothing_to_show:
|
|
1875
|
+
return []
|
|
1876
|
+
|
|
1877
|
+
# 4) Build events_by_date only for allowed dates
|
|
1878
|
+
events_by_date: dict[date, list[dict]] = {}
|
|
1879
|
+
|
|
1880
|
+
for d in allowed_dates:
|
|
1881
|
+
entries = grouped_by_date.get(d, [])
|
|
1882
|
+
for _, (start_ts, end_ts, subject, record_id, job_id) in entries:
|
|
1883
|
+
end_ts = end_ts or start_ts
|
|
1884
|
+
label = format_time_range(start_ts, end_ts, self.AMPM).strip()
|
|
1885
|
+
if end_ts.endswith("T000000"):
|
|
1886
|
+
color = ALLDAY_COLOR
|
|
1887
|
+
elif end_ts <= now_ts and end_ts != start_ts:
|
|
1888
|
+
color = PASSED_EVENT
|
|
1889
|
+
elif start_ts <= now_ts:
|
|
1890
|
+
color = ACTIVE_EVENT
|
|
1891
|
+
else:
|
|
1892
|
+
color = EVENT_COLOR
|
|
1893
|
+
label_fmt = f"{label} " if label else ""
|
|
1894
|
+
events_by_date.setdefault(d, []).append(
|
|
1895
|
+
{
|
|
1896
|
+
"record_id": record_id,
|
|
1897
|
+
"job_id": None,
|
|
1898
|
+
"text": f"[{color}]{label_fmt}{subject}[/{color}]",
|
|
1899
|
+
}
|
|
1900
|
+
)
|
|
1901
|
+
|
|
1902
|
+
# 5) If TODAY is in allowed_dates (either because it had events or we added it)
|
|
1903
|
+
# attach notice + draft markers even if it had no events
|
|
1904
|
+
if today in allowed_dates:
|
|
1905
|
+
if notice_records:
|
|
1906
|
+
for record_id, days_remaining, subject in notice_records:
|
|
1907
|
+
events_by_date.setdefault(today, []).append(
|
|
1908
|
+
{
|
|
1909
|
+
"record_id": record_id,
|
|
1910
|
+
"job_id": None,
|
|
1911
|
+
"text": f"[{NOTICE_COLOR}]+{days_remaining}d {subject} [/{NOTICE_COLOR}]",
|
|
1912
|
+
}
|
|
1913
|
+
)
|
|
1914
|
+
if draft_records:
|
|
1915
|
+
for record_id, subject in draft_records:
|
|
1916
|
+
events_by_date.setdefault(today, []).append(
|
|
1917
|
+
{
|
|
1918
|
+
"record_id": record_id,
|
|
1919
|
+
"job_id": None,
|
|
1920
|
+
"text": f"[{DRAFT_COLOR}] ? {subject}[/{DRAFT_COLOR}]",
|
|
1921
|
+
}
|
|
1922
|
+
)
|
|
1923
|
+
|
|
1924
|
+
# 6) Tagging and indexing
|
|
1925
|
+
total_items = sum(len(v) for v in events_by_date.values())
|
|
1926
|
+
if total_items == 0:
|
|
1927
|
+
# Edge case: allowed_dates may exist but nothing actually added (shouldn’t happen, but safe-guard)
|
|
1928
|
+
return {}
|
|
1929
|
+
|
|
1930
|
+
# self.set_afill(range(total_items), "events")
|
|
1931
|
+
# self.afill_by_view["events"] = self.afill
|
|
1932
|
+
# self.list_tag_to_id.setdefault("events", {})
|
|
1933
|
+
|
|
1934
|
+
rows = []
|
|
1935
|
+
for d, events in sorted(events_by_date.items()):
|
|
1936
|
+
if events:
|
|
1937
|
+
rows.append(
|
|
1938
|
+
{
|
|
1939
|
+
"record_id": None,
|
|
1940
|
+
"job_id": None,
|
|
1941
|
+
"text": f"[not bold][{HEADER_COLOR}]{d.strftime('%a %b %-d')}[/{HEADER_COLOR}][/not bold]",
|
|
1942
|
+
}
|
|
1943
|
+
)
|
|
1944
|
+
for event in events:
|
|
1945
|
+
rows.append(event)
|
|
1946
|
+
|
|
1947
|
+
return rows
|
|
1948
|
+
|
|
1949
|
+
def get_agenda_tasks(self):
|
|
1950
|
+
"""
|
|
1951
|
+
Returns list of (urgency_str_or_pin, color, tag_fmt, colored_subject)
|
|
1952
|
+
Suitable for the Agenda Tasks pane.
|
|
1953
|
+
"""
|
|
1954
|
+
tasks_by_urgency = []
|
|
1955
|
+
|
|
1956
|
+
# Use the JOIN with Pinned so pins persist across restarts
|
|
1957
|
+
urgency_records = self.db_manager.get_urgency()
|
|
1958
|
+
# rows: (record_id, job_id, subject, urgency, color, status, weights, pinned_int)
|
|
1959
|
+
|
|
1960
|
+
# self.set_afill(urgency_records, "tasks")
|
|
1961
|
+
# log_msg(f"urgency_records {self.afill_by_view = }, {len(urgency_records) = }")
|
|
1962
|
+
# indx = 0
|
|
1963
|
+
# self.list_tag_to_id.setdefault("tasks", {})
|
|
1964
|
+
|
|
1965
|
+
# Agenda tasks (has job_id)
|
|
1966
|
+
header = f"Tasks ({len(urgency_records)})"
|
|
1967
|
+
rows = [
|
|
1968
|
+
{"record_id": None, "job_id": None, "text": header},
|
|
1969
|
+
]
|
|
1970
|
+
for (
|
|
1971
|
+
record_id,
|
|
1972
|
+
job_id,
|
|
1973
|
+
subject,
|
|
1974
|
+
urgency,
|
|
1975
|
+
color,
|
|
1976
|
+
status,
|
|
1977
|
+
weights,
|
|
1978
|
+
pinned,
|
|
1979
|
+
) in urgency_records:
|
|
1980
|
+
# log_msg(f"collecting tasks {record_id = }, {job_id = }, {subject = }")
|
|
1981
|
+
# tag_fmt, indx = self.add_tag("tasks", indx, record_id, job_id=job_id)
|
|
1982
|
+
urgency_str = (
|
|
1983
|
+
"📌" if pinned else f"[{color}]{int(round(urgency * 100)):>2}[/{color}]"
|
|
1984
|
+
)
|
|
1985
|
+
rows.append(
|
|
1986
|
+
{
|
|
1987
|
+
"record_id": record_id,
|
|
1988
|
+
"job_id": job_id,
|
|
1989
|
+
"text": f"[{TASK_COLOR}]{urgency_str} {subject}[/{TASK_COLOR}]",
|
|
1990
|
+
}
|
|
1991
|
+
)
|
|
1992
|
+
|
|
1993
|
+
return rows
|
|
1994
|
+
|
|
1995
|
+
def get_entry_from_record(self, record_id: int) -> str:
|
|
1996
|
+
"""
|
|
1997
|
+
1) Load record -> Item
|
|
1998
|
+
2) Call item.finish_without_exdate(...)
|
|
1999
|
+
3) Persist Item
|
|
2000
|
+
4) Insert Completions row
|
|
2001
|
+
5) If fully finished, remove from Urgency/DateTimes
|
|
2002
|
+
6) Return summary dict
|
|
2003
|
+
"""
|
|
2004
|
+
result = self.db_manager.get_tokens(record_id)
|
|
2005
|
+
tokens, rruleset, created, modified = result[0]
|
|
2006
|
+
entry = format_tokens(tokens, self.width, False)
|
|
2007
|
+
|
|
2008
|
+
return entry
|
|
2009
|
+
|
|
2010
|
+
if isinstance(tokens_value, str):
|
|
2011
|
+
try:
|
|
2012
|
+
tokens = json.loads(tokens_value)
|
|
2013
|
+
except Exception:
|
|
2014
|
+
# already a list or malformed — best effort
|
|
2015
|
+
pass
|
|
2016
|
+
if not isinstance(tokens, list):
|
|
2017
|
+
raise ValueError("Structured tokens not available/invalid for this record.")
|
|
2018
|
+
|
|
2019
|
+
entry_str = "\n".join(tok.get("token", "") for tok in tokens)
|
|
2020
|
+
return entry_str
|
|
2021
|
+
|
|
2022
|
+
def finish_from_details(
|
|
2023
|
+
self, record_id: int, job_id: int | None, completed_dt: datetime
|
|
2024
|
+
) -> dict:
|
|
2025
|
+
"""
|
|
2026
|
+
1) Load record -> Item
|
|
2027
|
+
2) Call item.finish_without_exdate(...)
|
|
2028
|
+
3) Persist Item
|
|
2029
|
+
4) Insert Completions row
|
|
2030
|
+
5) If fully finished, remove from Urgency/DateTimes
|
|
2031
|
+
6) Return summary dict
|
|
2032
|
+
"""
|
|
2033
|
+
row = self.db_manager.get_record(record_id)
|
|
2034
|
+
if not row:
|
|
2035
|
+
raise ValueError(f"No record found for id {record_id}")
|
|
2036
|
+
|
|
2037
|
+
# 0..16 schema like you described; 13 = tokens
|
|
2038
|
+
tokens_value = row[13]
|
|
2039
|
+
tokens = tokens_value
|
|
2040
|
+
if isinstance(tokens_value, str):
|
|
2041
|
+
try:
|
|
2042
|
+
tokens = json.loads(tokens_value)
|
|
2043
|
+
except Exception:
|
|
2044
|
+
# already a list or malformed — best effort
|
|
2045
|
+
pass
|
|
2046
|
+
if not isinstance(tokens, list):
|
|
2047
|
+
raise ValueError("Structured tokens not available/invalid for this record.")
|
|
2048
|
+
|
|
2049
|
+
entry_str = "".join(tok.get("token", "") for tok in tokens).strip()
|
|
2050
|
+
|
|
2051
|
+
# Build/parse the Item
|
|
2052
|
+
# item = Item(entry_str)
|
|
2053
|
+
item = self.make_item(entry_str)
|
|
2054
|
+
if not getattr(item, "parse_ok", True):
|
|
2055
|
+
# Some Item versions set parse_ok/parse_message; if not, skip this guard.
|
|
2056
|
+
raise ValueError(getattr(item, "parse_message", "Item.parse failed"))
|
|
2057
|
+
|
|
2058
|
+
# Remember subject fallback so we never null it on update
|
|
2059
|
+
existing_subject = row[2]
|
|
2060
|
+
if not item.subject:
|
|
2061
|
+
item.subject = existing_subject
|
|
2062
|
+
|
|
2063
|
+
# 2) Let Item do all the schedule math (no EXDATE path as requested)
|
|
2064
|
+
fin = item.finish_without_exdate(
|
|
2065
|
+
completed_dt=completed_dt,
|
|
2066
|
+
record_id=record_id,
|
|
2067
|
+
job_id=job_id,
|
|
2068
|
+
)
|
|
2069
|
+
due_ts_used = getattr(fin, "due_ts_used", None)
|
|
2070
|
+
finished_final = getattr(fin, "finished_final", False)
|
|
2071
|
+
|
|
2072
|
+
# 3) Persist the mutated Item
|
|
2073
|
+
self.db_manager.update_item(record_id, item)
|
|
2074
|
+
|
|
2075
|
+
# 4) Insert completion (NULL due is allowed for one-shots)
|
|
2076
|
+
self.db_manager.insert_completion(
|
|
2077
|
+
record_id=record_id,
|
|
2078
|
+
due_ts=due_ts_used,
|
|
2079
|
+
completed_ts=int(completed_dt.timestamp()),
|
|
2080
|
+
)
|
|
2081
|
+
|
|
2082
|
+
# 5) If final, purge from derived tables so it vanishes from lists
|
|
2083
|
+
if finished_final:
|
|
2084
|
+
try:
|
|
2085
|
+
self.db_manager.cursor.execute(
|
|
2086
|
+
"DELETE FROM Urgency WHERE record_id=?", (record_id,)
|
|
2087
|
+
)
|
|
2088
|
+
self.db_manager.cursor.execute(
|
|
2089
|
+
"DELETE FROM DateTimes WHERE record_id=?", (record_id,)
|
|
2090
|
+
)
|
|
2091
|
+
self.db_manager.conn.commit()
|
|
2092
|
+
except Exception:
|
|
2093
|
+
pass
|
|
2094
|
+
|
|
2095
|
+
# Optional: recompute derivations; DetailsScreen also calls refresh, but safe here
|
|
2096
|
+
try:
|
|
2097
|
+
self.db_manager.populate_dependent_tables()
|
|
2098
|
+
except Exception:
|
|
2099
|
+
pass
|
|
2100
|
+
|
|
2101
|
+
return {
|
|
2102
|
+
"record_id": record_id,
|
|
2103
|
+
"final": finished_final,
|
|
2104
|
+
"due_ts": due_ts_used,
|
|
2105
|
+
"completed_ts": int(completed_dt.timestamp()),
|
|
2106
|
+
"new_rruleset": item.rruleset or "",
|
|
2107
|
+
}
|
|
2108
|
+
|
|
2109
|
+
def get_bin_name(self, bin_id: int) -> str:
|
|
2110
|
+
return self.db_manager.get_bin_name(bin_id)
|
|
2111
|
+
|
|
2112
|
+
def get_parent_bin(self, bin_id: int) -> dict | None:
|
|
2113
|
+
return self.db_manager.get_parent_bin(bin_id)
|
|
2114
|
+
|
|
2115
|
+
def get_subbins(self, bin_id: int) -> list[dict]:
|
|
2116
|
+
return self.db_manager.get_subbins(bin_id)
|
|
2117
|
+
|
|
2118
|
+
def get_reminders(self, bin_id: int) -> list[dict]:
|
|
2119
|
+
return self.db_manager.get_reminders_in_bin(bin_id)
|
|
2120
|
+
|
|
2121
|
+
# def _bin_name(self, bin_id: int) -> str:
|
|
2122
|
+
# self.db_manager.cursor.execute("SELECT name FROM Bins WHERE id=?", (bin_id,))
|
|
2123
|
+
# row = self.db_manager.cursor.fetchone()
|
|
2124
|
+
# return row[0] if row else f"bin:{bin_id}"
|
|
2125
|
+
|
|
2126
|
+
def _is_root(self, bin_id: int) -> bool:
|
|
2127
|
+
# adjust if your root id differs
|
|
2128
|
+
return bin_id == getattr(self, "root_id", 0)
|
|
2129
|
+
|
|
2130
|
+
@lru_cache(maxsize=2048)
|
|
2131
|
+
def _bin_name(self, bin_id: int) -> str:
|
|
2132
|
+
if self._is_root(bin_id):
|
|
2133
|
+
# choose what you want to display for root
|
|
2134
|
+
return "root" # or "" if you prefer no label
|
|
2135
|
+
cur = self.db_manager.cursor
|
|
2136
|
+
cur.execute("SELECT name FROM Bins WHERE id=?", (bin_id,))
|
|
2137
|
+
row = cur.fetchone()
|
|
2138
|
+
return row[0] if row and row[0] else f"bin:{bin_id}"
|
|
2139
|
+
|
|
2140
|
+
def _parent_bin_id(self, bin_id: int) -> Optional[int]:
|
|
2141
|
+
# Root has NULL parent
|
|
2142
|
+
self.db_manager.cursor.execute(
|
|
2143
|
+
"SELECT container_id FROM BinLinks WHERE bin_id=? LIMIT 1", (bin_id,)
|
|
2144
|
+
)
|
|
2145
|
+
row = self.db_manager.cursor.fetchone()
|
|
2146
|
+
return row[0] if row and row[0] is not None else None
|
|
2147
|
+
|
|
2148
|
+
def _bin_path_ids(self, bin_id: int) -> List[int]:
|
|
2149
|
+
"""Return path of bin ids from root→...→bin_id, but EXCLUDING root."""
|
|
2150
|
+
path: List[int] = []
|
|
2151
|
+
cur = bin_id
|
|
2152
|
+
while cur is not None:
|
|
2153
|
+
parent = self._parent_bin_id(cur)
|
|
2154
|
+
path.append(cur)
|
|
2155
|
+
cur = parent
|
|
2156
|
+
path.reverse()
|
|
2157
|
+
# Exclude root if it exists and is first
|
|
2158
|
+
if path and self._bin_name(path[0]).lower() == "root":
|
|
2159
|
+
path = path[1:]
|
|
2160
|
+
return path
|
|
2161
|
+
|
|
2162
|
+
def bin_tagger(self, bin_id: int, page_size: int = 26) -> List[Page]:
|
|
2163
|
+
"""
|
|
2164
|
+
Build pages for a single Bin view.
|
|
2165
|
+
|
|
2166
|
+
Path (excluding 'root') is shown as the first row on every page.
|
|
2167
|
+
- Path segments are tagged a.., but the LAST segment (the current bin) is NOT tagged.
|
|
2168
|
+
- On every page, content letters start after the header letters, so if header used a..c,
|
|
2169
|
+
content begins at 'd' on each page.
|
|
2170
|
+
- Only taggable rows (bins + reminders) count toward page_size.
|
|
2171
|
+
|
|
2172
|
+
Returns: list[ (rows: list[str], tag_map: dict[str, ('bin'| 'record', target)]) ]
|
|
2173
|
+
- target is bin_id for 'bin', or (record_id, job_id|None) for 'record'.
|
|
2174
|
+
"""
|
|
2175
|
+
|
|
2176
|
+
# ---------- helpers ----------
|
|
2177
|
+
def _is_root(bid: int) -> bool:
|
|
2178
|
+
# Adjust if you use a different root id
|
|
2179
|
+
return bid == getattr(self, "root_id", 0)
|
|
2180
|
+
|
|
2181
|
+
@lru_cache(maxsize=4096)
|
|
2182
|
+
def _bin_name(bid: int) -> str:
|
|
2183
|
+
if _is_root(bid):
|
|
2184
|
+
return "root"
|
|
2185
|
+
cur = self.db_manager.cursor
|
|
2186
|
+
cur.execute("SELECT name FROM Bins WHERE id=?", (bid,))
|
|
2187
|
+
row = cur.fetchone()
|
|
2188
|
+
return row[0] if row and row[0] else f"bin:{bid}"
|
|
2189
|
+
|
|
2190
|
+
def _bin_path_ids(bid: int) -> List[int]:
|
|
2191
|
+
"""Return ancestor path including current bin, excluding root."""
|
|
2192
|
+
ids: List[int] = []
|
|
2193
|
+
cur = self.db_manager.cursor
|
|
2194
|
+
b = bid
|
|
2195
|
+
while b is not None and not _is_root(b):
|
|
2196
|
+
ids.append(b)
|
|
2197
|
+
cur.execute(
|
|
2198
|
+
"SELECT container_id FROM BinLinks WHERE bin_id = ? LIMIT 1", (b,)
|
|
2199
|
+
)
|
|
2200
|
+
row = cur.fetchone()
|
|
2201
|
+
b = row[0] if row else None
|
|
2202
|
+
ids.reverse()
|
|
2203
|
+
return ids
|
|
2204
|
+
|
|
2205
|
+
def _pretty_child_name(parent_name: str, child_name: str) -> str:
|
|
2206
|
+
"""
|
|
2207
|
+
Trim exactly 'parent:' from the front of a child name.
|
|
2208
|
+
This avoids accidental trims when a child merely starts with the same characters.
|
|
2209
|
+
Examples:
|
|
2210
|
+
parent='2025', child='2025:10' -> '10'
|
|
2211
|
+
parent='people', child='people:S' -> 'S'
|
|
2212
|
+
parent='2025', child='202510' -> '202510' (unchanged)
|
|
2213
|
+
parent='2025', child='2025x' -> '2025x' (unchanged)
|
|
2214
|
+
"""
|
|
2215
|
+
if not parent_name:
|
|
2216
|
+
return child_name
|
|
2217
|
+
prefix = f"{parent_name}:"
|
|
2218
|
+
if child_name.startswith(prefix):
|
|
2219
|
+
suffix = child_name[len(prefix) :]
|
|
2220
|
+
return suffix or child_name # never return empty string
|
|
2221
|
+
return child_name
|
|
2222
|
+
|
|
2223
|
+
def _format_path_header(
|
|
2224
|
+
path_ids: List[int], continued: bool
|
|
2225
|
+
) -> Tuple[str, Dict[str, Tuple[str, int]], int]:
|
|
2226
|
+
"""
|
|
2227
|
+
Build the header text and its tag_map.
|
|
2228
|
+
Tag all but the last path segment (so the current bin is untagged).
|
|
2229
|
+
Returns: (header_text, header_tagmap, header_letters_count)
|
|
2230
|
+
"""
|
|
2231
|
+
tag_map: Dict[str, Tuple[str, int]] = {}
|
|
2232
|
+
segs: List[str] = []
|
|
2233
|
+
if not path_ids:
|
|
2234
|
+
header_text = ".."
|
|
2235
|
+
return (
|
|
2236
|
+
(header_text + (" [i](continued)[/i]" if continued else "")),
|
|
2237
|
+
tag_map,
|
|
2238
|
+
0,
|
|
2239
|
+
)
|
|
2240
|
+
|
|
2241
|
+
# how many path letters to tag (exclude current bin)
|
|
2242
|
+
taggable = max(0, len(path_ids) - 1)
|
|
2243
|
+
header_letters = min(taggable, 26)
|
|
2244
|
+
|
|
2245
|
+
for i, bid in enumerate(path_ids):
|
|
2246
|
+
name = _bin_name(bid)
|
|
2247
|
+
if i < header_letters: # tagged ancestor
|
|
2248
|
+
tag = chr(ord("a") + i)
|
|
2249
|
+
tag_map[tag] = ("bin", bid)
|
|
2250
|
+
segs.append(f"[dim]{tag}[/dim] {name}")
|
|
2251
|
+
elif i == len(path_ids) - 1: # current bin (untagged)
|
|
2252
|
+
segs.append(f"[bold red]{name}[/bold red]")
|
|
2253
|
+
else: # very deep path overflow (unlikely)
|
|
2254
|
+
f"[bold yellow]{segs.append(name)}[/bold yellow]"
|
|
2255
|
+
|
|
2256
|
+
header = " / ".join(segs) if segs else ".."
|
|
2257
|
+
if continued:
|
|
2258
|
+
header += " [i](continued)[/i]"
|
|
2259
|
+
return header, tag_map, header_letters
|
|
2260
|
+
|
|
2261
|
+
# ---------- gather data ----------
|
|
2262
|
+
path_ids = _bin_path_ids(bin_id) # excludes root, includes current bin
|
|
2263
|
+
current_name = "" if _is_root(bin_id) else _bin_name(bin_id)
|
|
2264
|
+
|
|
2265
|
+
subbins = self.db_manager.get_subbins(bin_id) # [{id,name,subbins,reminders}]
|
|
2266
|
+
reminders = self.db_manager.get_reminders_in_bin(
|
|
2267
|
+
bin_id
|
|
2268
|
+
) # [{id,subject,itemtype}]
|
|
2269
|
+
|
|
2270
|
+
# Prepare content rows (bins then reminders), sorted
|
|
2271
|
+
bin_rows: List[Tuple[str, Any, str]] = []
|
|
2272
|
+
for b in sorted(subbins, key=lambda x: x["name"].lower()):
|
|
2273
|
+
disp = _pretty_child_name(current_name, b["name"])
|
|
2274
|
+
bin_rows.append(
|
|
2275
|
+
(
|
|
2276
|
+
"bin",
|
|
2277
|
+
b["id"],
|
|
2278
|
+
f"[bold yellow]{disp}[/bold yellow] [dim]({b['subbins']}/{b['reminders']})[/dim]",
|
|
2279
|
+
)
|
|
2280
|
+
)
|
|
2281
|
+
|
|
2282
|
+
rec_rows: List[Tuple[str, Any, str]] = []
|
|
2283
|
+
for r in sorted(reminders, key=lambda x: x["subject"].lower()):
|
|
2284
|
+
color = TYPE_TO_COLOR.get(r.get("itemtype", ""), "white")
|
|
2285
|
+
rec_rows.append(
|
|
2286
|
+
(
|
|
2287
|
+
"record",
|
|
2288
|
+
(r["id"], None),
|
|
2289
|
+
f"[{color}]{r.get('itemtype', '')} {r['subject']}[/{color}]",
|
|
2290
|
+
)
|
|
2291
|
+
)
|
|
2292
|
+
|
|
2293
|
+
all_rows: List[Tuple[str, Any, str]] = bin_rows + rec_rows
|
|
2294
|
+
|
|
2295
|
+
# ---------- paging ----------
|
|
2296
|
+
pages: List[Page] = []
|
|
2297
|
+
idx = 0
|
|
2298
|
+
first = True
|
|
2299
|
+
|
|
2300
|
+
# header (first page) + how many letters consumed by header
|
|
2301
|
+
first_header_text, first_hdr_map, header_letters = _format_path_header(
|
|
2302
|
+
path_ids, continued=False
|
|
2303
|
+
)
|
|
2304
|
+
content_capacity = max(0, page_size - header_letters)
|
|
2305
|
+
|
|
2306
|
+
while first or idx < len(all_rows):
|
|
2307
|
+
if first:
|
|
2308
|
+
header_text, hdr_map = first_header_text, dict(first_hdr_map)
|
|
2309
|
+
else:
|
|
2310
|
+
# repeated header with (continued)
|
|
2311
|
+
header_text, hdr_map, _ = _format_path_header(path_ids, continued=True)
|
|
2312
|
+
|
|
2313
|
+
rows_out: List[str] = [header_text]
|
|
2314
|
+
tag_map: Dict[str, Tuple[str, Any]] = dict(hdr_map)
|
|
2315
|
+
|
|
2316
|
+
if content_capacity == 0:
|
|
2317
|
+
# Deep path; show header-only page to avoid infinite loop
|
|
2318
|
+
pages.append((rows_out, tag_map))
|
|
2319
|
+
break
|
|
2320
|
+
|
|
2321
|
+
tagged = 0
|
|
2322
|
+
next_letter_idx = (
|
|
2323
|
+
header_letters # content starts after header letters every page
|
|
2324
|
+
)
|
|
2325
|
+
while idx < len(all_rows) and tagged < content_capacity:
|
|
2326
|
+
kind, payload, text = all_rows[idx]
|
|
2327
|
+
idx += 1
|
|
2328
|
+
tag = chr(ord("a") + next_letter_idx)
|
|
2329
|
+
if kind == "bin":
|
|
2330
|
+
tag_map[tag] = ("bin", payload)
|
|
2331
|
+
else:
|
|
2332
|
+
tag_map[tag] = ("record", payload) # (record_id, job_id)
|
|
2333
|
+
rows_out.append(f" [dim]{tag}[/dim] {text}")
|
|
2334
|
+
tagged += 1
|
|
2335
|
+
next_letter_idx += 1
|
|
2336
|
+
|
|
2337
|
+
pages.append((rows_out, tag_map))
|
|
2338
|
+
first = False
|
|
2339
|
+
|
|
2340
|
+
return pages
|
|
2341
|
+
|
|
2342
|
+
def get_bin_pages(self, bin_id: int):
|
|
2343
|
+
"""Public API the view will call."""
|
|
2344
|
+
pages = self.bin_tagger(bin_id)
|
|
2345
|
+
# Title: path text without tags, e.g. "Activities / Travel". If no path => "root".
|
|
2346
|
+
path_ids = self._bin_path_ids(bin_id)
|
|
2347
|
+
# title = " / ".join(self._bin_name(b) for b in path_ids) or ".."
|
|
2348
|
+
title = "Bins"
|
|
2349
|
+
return pages, title
|
|
2350
|
+
|
|
2351
|
+
def get_record_details(self, record_id: int) -> str:
|
|
2352
|
+
"""Fetch record details formatted for the details pane."""
|
|
2353
|
+
record = self.db_manager.get_record(record_id)
|
|
2354
|
+
if not record:
|
|
2355
|
+
return "[red]No details found[/red]"
|
|
2356
|
+
|
|
2357
|
+
subject = record[2]
|
|
2358
|
+
desc = record[3] or ""
|
|
2359
|
+
itemtype = record[1]
|
|
2360
|
+
return f"[bold]{itemtype}[/bold] {subject}\n\n{desc}"
|
|
2361
|
+
|
|
2362
|
+
# controller.py (inside class Controller)
|
|
2363
|
+
|
|
2364
|
+
# --- Backup helpers ---------------------------------------------------------
|
|
2365
|
+
def _db_path_from_self(self) -> Path:
|
|
2366
|
+
"""
|
|
2367
|
+
Resolve the path of the live DB from Controller/DatabaseManager.
|
|
2368
|
+
Adjust the attribute names if yours differ.
|
|
2369
|
+
"""
|
|
2370
|
+
# Common patterns; pick whichever exists in your DB manager:
|
|
2371
|
+
for attr in ("db_path", "database_path", "path"):
|
|
2372
|
+
p = getattr(self.db_manager, attr, None)
|
|
2373
|
+
if p:
|
|
2374
|
+
return Path(p)
|
|
2375
|
+
# Fallback if you also store it on the controller:
|
|
2376
|
+
if hasattr(self, "db_path"):
|
|
2377
|
+
return Path(self.db_path)
|
|
2378
|
+
raise RuntimeError(
|
|
2379
|
+
"Couldn't resolve database path from Controller / db_manager."
|
|
2380
|
+
)
|
|
2381
|
+
|
|
2382
|
+
def _parse_backup_name(self, p: Path) -> Optional[date]:
|
|
2383
|
+
m = _BACKUP_RE.match(p.name)
|
|
2384
|
+
if not m:
|
|
2385
|
+
return None
|
|
2386
|
+
y, mth, d = map(int, m.groups())
|
|
2387
|
+
return date(y, mth, d)
|
|
2388
|
+
|
|
2389
|
+
def _find_backups(self, dir_path: Path) -> List[_BackupInfo]:
|
|
2390
|
+
out: List[_BackupInfo] = []
|
|
2391
|
+
if not dir_path.exists():
|
|
2392
|
+
return out
|
|
2393
|
+
for p in dir_path.iterdir():
|
|
2394
|
+
if not p.is_file():
|
|
2395
|
+
continue
|
|
2396
|
+
d = self._parse_backup_name(p)
|
|
2397
|
+
if d is None:
|
|
2398
|
+
continue
|
|
2399
|
+
try:
|
|
2400
|
+
st = p.stat()
|
|
2401
|
+
except FileNotFoundError:
|
|
2402
|
+
continue
|
|
2403
|
+
out.append(_BackupInfo(path=p, day=d, mtime=st.st_mtime))
|
|
2404
|
+
out.sort(key=lambda bi: (bi.day, bi.mtime), reverse=True)
|
|
2405
|
+
return out
|
|
2406
|
+
|
|
2407
|
+
# def _sqlite_backup(self, src_db: Path, dest_db: Path) -> None:
|
|
2408
|
+
# """Use SQLite's backup API for a consistent snapshot."""
|
|
2409
|
+
# dest_tmp = dest_db.with_suffix(dest_db.suffix + ".tmp")
|
|
2410
|
+
# dest_db.parent.mkdir(parents=True, exist_ok=True)
|
|
2411
|
+
# with sqlite3.connect(str(src_db)) as src, sqlite3.connect(str(dest_tmp)) as dst:
|
|
2412
|
+
# src.backup(dst, pages=0) # full backup
|
|
2413
|
+
# # Safety on the destination file only:
|
|
2414
|
+
# dst.execute("PRAGMA wal_checkpoint(TRUNCATE);")
|
|
2415
|
+
# dst.execute("VACUUM;")
|
|
2416
|
+
# dst.commit()
|
|
2417
|
+
# try:
|
|
2418
|
+
# shutil.copystat(src_db, dest_tmp)
|
|
2419
|
+
# except Exception:
|
|
2420
|
+
# pass
|
|
2421
|
+
# dest_tmp.replace(dest_db)
|
|
2422
|
+
|
|
2423
|
+
def _should_snapshot(self, db_path: Path, backups: List[_BackupInfo]) -> bool:
|
|
2424
|
+
try:
|
|
2425
|
+
db_mtime = db_path.stat().st_mtime
|
|
2426
|
+
except FileNotFoundError:
|
|
2427
|
+
return False
|
|
2428
|
+
latest_backup_mtime = max((b.mtime for b in backups), default=0.0)
|
|
2429
|
+
return db_mtime > latest_backup_mtime
|
|
2430
|
+
|
|
2431
|
+
def _select_retention(
|
|
2432
|
+
self, backups: List[_BackupInfo], today_local: date
|
|
2433
|
+
) -> Set[Path]:
|
|
2434
|
+
"""
|
|
2435
|
+
Keep at most 5:
|
|
2436
|
+
newest overall, newest >=3d, >=7d, >=14d, >=28d (by calendar day).
|
|
2437
|
+
"""
|
|
2438
|
+
keep: Set[Path] = set()
|
|
2439
|
+
if not backups:
|
|
2440
|
+
return keep
|
|
2441
|
+
|
|
2442
|
+
newest = max(backups, key=lambda b: (b.day, b.mtime))
|
|
2443
|
+
keep.add(newest.path)
|
|
2444
|
+
|
|
2445
|
+
for days in (3, 7, 14, 28):
|
|
2446
|
+
cutoff = today_local - timedelta(days=days)
|
|
2447
|
+
cands = [b for b in backups if b.day <= cutoff]
|
|
2448
|
+
if cands:
|
|
2449
|
+
chosen = max(cands, key=lambda b: (b.day, b.mtime))
|
|
2450
|
+
keep.add(chosen.path)
|
|
2451
|
+
return keep
|
|
2452
|
+
|
|
2453
|
+
# --- Public API --------------------------------------------------------------
|
|
2454
|
+
def rotate_daily_backups(self) -> None:
|
|
2455
|
+
# Where is the live DB?
|
|
2456
|
+
db_path: Path = Path(
|
|
2457
|
+
self.db_manager.db_path
|
|
2458
|
+
).resolve() # ensure DatabaseManager exposes .db_path
|
|
2459
|
+
backup_dir: Path = db_path.parent / "backups"
|
|
2460
|
+
backup_dir.mkdir(parents=True, exist_ok=True)
|
|
2461
|
+
|
|
2462
|
+
# Example: name yesterday’s snapshot
|
|
2463
|
+
snap_date = date.today() - timedelta(days=1)
|
|
2464
|
+
target = backup_dir / f"{snap_date.isoformat()}.db"
|
|
2465
|
+
|
|
2466
|
+
# Make the snapshot
|
|
2467
|
+
self.db_manager.backup_to(target)
|
|
2468
|
+
|
|
2469
|
+
# …then your retention/pruning logic …
|
|
2470
|
+
tz = getattr(getattr(self, "env", None), "timezone", "America/New_York")
|
|
2471
|
+
tzinfo = ZoneInfo(tz)
|
|
2472
|
+
|
|
2473
|
+
now = datetime.now(tzinfo)
|
|
2474
|
+
today = now.date()
|
|
2475
|
+
yesterday = today - timedelta(days=1)
|
|
2476
|
+
|
|
2477
|
+
bdir = Path(backup_dir) if backup_dir else db_path.parent
|
|
2478
|
+
bdir.mkdir(parents=True, exist_ok=True)
|
|
2479
|
+
|
|
2480
|
+
backups = self._find_backups(bdir)
|
|
2481
|
+
|
|
2482
|
+
created: Optional[Path] = None
|
|
2483
|
+
if self._should_snapshot(db_path, backups):
|
|
2484
|
+
target = bdir / f"{yesterday.isoformat()}.db"
|
|
2485
|
+
if not dry_run:
|
|
2486
|
+
self.db_manager.backup_to(target)
|
|
2487
|
+
created = target
|
|
2488
|
+
backups = self._find_backups(bdir) # refresh
|
|
2489
|
+
|
|
2490
|
+
keep = self._select_retention(backups, today_local=today)
|
|
2491
|
+
kept = sorted(keep)
|
|
2492
|
+
removed: List[Path] = []
|
|
2493
|
+
for b in backups:
|
|
2494
|
+
if b.path not in keep:
|
|
2495
|
+
removed.append(b.path)
|
|
2496
|
+
try:
|
|
2497
|
+
b.path.unlink()
|
|
2498
|
+
except FileNotFoundError:
|
|
2499
|
+
pass
|
|
2500
|
+
|
|
2501
|
+
return created, kept, removed
|
|
2502
|
+
|
|
2503
|
+
###VVV new for tagged bin tree
|
|
2504
|
+
|
|
2505
|
+
def get_root_bin_id(self) -> int:
|
|
2506
|
+
# Reuse your existing, tested anchor
|
|
2507
|
+
return self.db_manager.ensure_root_exists()
|
|
2508
|
+
|
|
2509
|
+
def _make_crumb(self, bin_id: int | None):
|
|
2510
|
+
"""Return [(id, name), ...] from root to current."""
|
|
2511
|
+
if bin_id is None:
|
|
2512
|
+
rid = self.db_manager.ensure_root_exists()
|
|
2513
|
+
return [(rid, "root")]
|
|
2514
|
+
# climb using your get_parent_bin
|
|
2515
|
+
chain = []
|
|
2516
|
+
cur = bin_id
|
|
2517
|
+
while cur is not None:
|
|
2518
|
+
name = self.db_manager.get_bin_name(cur)
|
|
2519
|
+
chain.append((cur, name))
|
|
2520
|
+
parent = self.db_manager.get_parent_bin(cur) # {'id','name'} or None
|
|
2521
|
+
cur = parent["id"] if parent else None
|
|
2522
|
+
return list(reversed(chain)) or [(self.db_manager.ensure_root_exists(), "root")]
|
|
2523
|
+
|
|
2524
|
+
def get_bin_summary(self, bin_id: int | None, *, filter_text: str | None = None):
|
|
2525
|
+
"""
|
|
2526
|
+
Returns:
|
|
2527
|
+
children -> [ChildBinRow]
|
|
2528
|
+
reminders -> [ReminderRow]
|
|
2529
|
+
crumb -> [(id, name), ...]
|
|
2530
|
+
Uses ONLY DatabaseManager public methods you showed.
|
|
2531
|
+
"""
|
|
2532
|
+
# 1) children (uses your counts + sort)
|
|
2533
|
+
raw_children = self.db_manager.get_subbins(
|
|
2534
|
+
bin_id if bin_id is not None else self.get_root_bin_id()
|
|
2535
|
+
)
|
|
2536
|
+
# shape: {"id","name","subbins","reminders"}
|
|
2537
|
+
children = [
|
|
2538
|
+
ChildBinRow(
|
|
2539
|
+
bin_id=c["id"],
|
|
2540
|
+
name=c["name"],
|
|
2541
|
+
child_ct=c["subbins"],
|
|
2542
|
+
rem_ct=c["reminders"],
|
|
2543
|
+
)
|
|
2544
|
+
for c in raw_children
|
|
2545
|
+
]
|
|
2546
|
+
|
|
2547
|
+
# 2) reminders (linked via ReminderLinks)
|
|
2548
|
+
raw_reminders = self.db_manager.get_reminders_in_bin(
|
|
2549
|
+
bin_id if bin_id is not None else self.get_root_bin_id()
|
|
2550
|
+
)
|
|
2551
|
+
# shape: {"id","subject","itemtype"}
|
|
2552
|
+
reminders = [
|
|
2553
|
+
ReminderRow(
|
|
2554
|
+
record_id=r["id"],
|
|
2555
|
+
subject=r["subject"],
|
|
2556
|
+
itemtype=r["itemtype"],
|
|
2557
|
+
)
|
|
2558
|
+
for r in raw_reminders
|
|
2559
|
+
]
|
|
2560
|
+
|
|
2561
|
+
# 3) apply filter (controller-level; no new SQL)
|
|
2562
|
+
if filter_text:
|
|
2563
|
+
f = filter_text.casefold()
|
|
2564
|
+
children = [c for c in children if f in c.name.casefold()]
|
|
2565
|
+
reminders = [r for r in reminders if f in r.subject.casefold()]
|
|
2566
|
+
|
|
2567
|
+
# 4) crumb
|
|
2568
|
+
crumb = self._make_crumb(
|
|
2569
|
+
bin_id if bin_id is not None else self.get_root_bin_id()
|
|
2570
|
+
)
|
|
2571
|
+
return children, reminders, crumb
|
|
2572
|
+
|
|
2573
|
+
def get_reminder_details(self, record_id: int) -> str:
|
|
2574
|
+
# Minimal, safe detail using your existing schema
|
|
2575
|
+
row = self.db_manager.cursor.execute(
|
|
2576
|
+
"SELECT subject, itemtype FROM Records WHERE id=?",
|
|
2577
|
+
(record_id,),
|
|
2578
|
+
).fetchone()
|
|
2579
|
+
if not row:
|
|
2580
|
+
return "[b]Unknown reminder[/b]"
|
|
2581
|
+
subject, itemtype = row
|
|
2582
|
+
return f"[b]{subject}[/b]\n[dim]type:[/dim] {itemtype or '—'}"
|
|
2583
|
+
|
|
2584
|
+
def get_descendant_tree(self, bin_id: int) -> List[Tuple[int, str, int]]:
|
|
2585
|
+
"""
|
|
2586
|
+
Return a pre-order flattened list of (bin_id, name, depth)
|
|
2587
|
+
for the *bins-only* subtree rooted at `bin_id`.
|
|
2588
|
+
Uses DatabaseManager.get_subbins() (which already returns sorted children).
|
|
2589
|
+
"""
|
|
2590
|
+
out: List[Tuple[int, str, int]] = []
|
|
2591
|
+
|
|
2592
|
+
def walk(current_id: int, depth: int) -> None:
|
|
2593
|
+
children = self.db_manager.get_subbins(
|
|
2594
|
+
current_id
|
|
2595
|
+
) # [{id,name,subbins,reminders}, ...]
|
|
2596
|
+
for ch in children:
|
|
2597
|
+
out.append((ch["id"], ch["name"], depth + 1))
|
|
2598
|
+
walk(ch["id"], depth + 1)
|
|
2599
|
+
|
|
2600
|
+
out.append((bin_id, self.db_manager.get_bin_name(bin_id), 0))
|
|
2601
|
+
walk(bin_id, 0)
|
|
2602
|
+
return out
|