promnesia 1.2.20230515__py3-none-any.whl → 1.3.20241021__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. promnesia/__init__.py +14 -3
  2. promnesia/__main__.py +60 -35
  3. promnesia/cannon.py +27 -27
  4. promnesia/common.py +85 -67
  5. promnesia/compare.py +21 -22
  6. promnesia/compat.py +10 -10
  7. promnesia/config.py +23 -23
  8. promnesia/database/common.py +67 -0
  9. promnesia/database/dump.py +188 -0
  10. promnesia/{read_db.py → database/load.py} +16 -17
  11. promnesia/extract.py +14 -11
  12. promnesia/kjson.py +12 -11
  13. promnesia/logging.py +4 -4
  14. promnesia/misc/__init__.pyi +0 -0
  15. promnesia/misc/config_example.py +1 -2
  16. promnesia/misc/install_server.py +7 -9
  17. promnesia/server.py +57 -47
  18. promnesia/sources/__init__.pyi +0 -0
  19. promnesia/sources/auto.py +50 -35
  20. promnesia/sources/auto_logseq.py +6 -5
  21. promnesia/sources/auto_obsidian.py +2 -2
  22. promnesia/sources/browser.py +14 -9
  23. promnesia/sources/browser_legacy.py +26 -16
  24. promnesia/sources/demo.py +19 -3
  25. promnesia/sources/fbmessenger.py +3 -2
  26. promnesia/sources/filetypes.py +16 -7
  27. promnesia/sources/github.py +7 -9
  28. promnesia/sources/guess.py +2 -1
  29. promnesia/sources/hackernews.py +2 -2
  30. promnesia/sources/hpi.py +2 -2
  31. promnesia/sources/html.py +7 -5
  32. promnesia/sources/hypothesis.py +4 -3
  33. promnesia/sources/instapaper.py +2 -2
  34. promnesia/sources/markdown.py +31 -21
  35. promnesia/sources/org.py +27 -13
  36. promnesia/sources/plaintext.py +30 -29
  37. promnesia/sources/pocket.py +3 -2
  38. promnesia/sources/reddit.py +20 -19
  39. promnesia/sources/roamresearch.py +2 -1
  40. promnesia/sources/rss.py +4 -5
  41. promnesia/sources/shellcmd.py +19 -6
  42. promnesia/sources/signal.py +33 -24
  43. promnesia/sources/smscalls.py +2 -2
  44. promnesia/sources/stackexchange.py +4 -3
  45. promnesia/sources/takeout.py +76 -9
  46. promnesia/sources/takeout_legacy.py +24 -12
  47. promnesia/sources/telegram.py +13 -11
  48. promnesia/sources/telegram_legacy.py +18 -7
  49. promnesia/sources/twitter.py +6 -5
  50. promnesia/sources/vcs.py +5 -3
  51. promnesia/sources/viber.py +10 -9
  52. promnesia/sources/website.py +4 -4
  53. promnesia/sources/zulip.py +3 -2
  54. promnesia/sqlite.py +7 -4
  55. promnesia/tests/__init__.py +0 -0
  56. promnesia/tests/common.py +140 -0
  57. promnesia/tests/server_helper.py +67 -0
  58. promnesia/tests/sources/__init__.py +0 -0
  59. promnesia/tests/sources/test_auto.py +65 -0
  60. promnesia/tests/sources/test_filetypes.py +43 -0
  61. promnesia/tests/sources/test_hypothesis.py +39 -0
  62. promnesia/tests/sources/test_org.py +64 -0
  63. promnesia/tests/sources/test_plaintext.py +25 -0
  64. promnesia/tests/sources/test_shellcmd.py +21 -0
  65. promnesia/tests/sources/test_takeout.py +56 -0
  66. promnesia/tests/test_cannon.py +325 -0
  67. promnesia/tests/test_cli.py +40 -0
  68. promnesia/tests/test_compare.py +30 -0
  69. promnesia/tests/test_config.py +289 -0
  70. promnesia/tests/test_db_dump.py +222 -0
  71. promnesia/tests/test_extract.py +65 -0
  72. promnesia/tests/test_extract_urls.py +43 -0
  73. promnesia/tests/test_indexer.py +251 -0
  74. promnesia/tests/test_server.py +291 -0
  75. promnesia/tests/test_traverse.py +39 -0
  76. promnesia/tests/utils.py +35 -0
  77. {promnesia-1.2.20230515.dist-info → promnesia-1.3.20241021.dist-info}/METADATA +15 -18
  78. promnesia-1.3.20241021.dist-info/RECORD +83 -0
  79. {promnesia-1.2.20230515.dist-info → promnesia-1.3.20241021.dist-info}/WHEEL +1 -1
  80. {promnesia-1.2.20230515.dist-info → promnesia-1.3.20241021.dist-info}/entry_points.txt +0 -1
  81. promnesia/dump.py +0 -105
  82. promnesia-1.2.20230515.dist-info/RECORD +0 -58
  83. {promnesia-1.2.20230515.dist-info → promnesia-1.3.20241021.dist-info}/LICENSE +0 -0
  84. {promnesia-1.2.20230515.dist-info → promnesia-1.3.20241021.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,188 @@
1
+ from __future__ import annotations
2
+
3
+ import sqlite3
4
+ from collections.abc import Iterable
5
+ from pathlib import Path
6
+ from typing import Optional
7
+
8
+ from more_itertools import chunked
9
+ from sqlalchemy import (
10
+ Engine,
11
+ MetaData,
12
+ Table,
13
+ create_engine,
14
+ event,
15
+ exc,
16
+ func,
17
+ select,
18
+ )
19
+ from sqlalchemy.dialects import sqlite as dialect_sqlite
20
+
21
+ from .. import config
22
+ from ..common import (
23
+ DbVisit,
24
+ Loc,
25
+ Res,
26
+ SourceName,
27
+ get_logger,
28
+ now_tz,
29
+ )
30
+ from .common import db_visit_to_row, get_columns
31
+
32
+ # NOTE: I guess the main performance benefit from this is not creating too many tmp lists and avoiding overhead
33
+ # since as far as sql is concerned it should all be in the same transaction. only a guess
34
+ # not sure it's the proper way to handle it
35
+ # see test_index_many
36
+ _CHUNK_BY = 10
37
+
38
+ # I guess 1 hour is definitely enough
39
+ _CONNECTION_TIMEOUT_SECONDS = 3600
40
+
41
+ SRC_ERROR = 'error'
42
+
43
+
44
+ # using WAL keeps database readable while we're writing in it
45
+ # this is tested by test_query_while_indexing
46
+ def enable_wal(dbapi_con, con_record) -> None:
47
+ dbapi_con.execute('PRAGMA journal_mode = WAL')
48
+
49
+
50
+ def begin_immediate_transaction(conn):
51
+ conn.exec_driver_sql('BEGIN IMMEDIATE')
52
+
53
+
54
+ Stats = dict[Optional[SourceName], int]
55
+
56
+
57
+ # returns critical warnings
58
+ def visits_to_sqlite(
59
+ vit: Iterable[Res[DbVisit]],
60
+ *,
61
+ overwrite_db: bool,
62
+ _db_path: Path | None = None, # only used in tests
63
+ ) -> list[Exception]:
64
+ if _db_path is None:
65
+ db_path = config.get().db
66
+ else:
67
+ db_path = _db_path
68
+
69
+ logger = get_logger()
70
+
71
+ now = now_tz()
72
+
73
+ index_stats: Stats = {}
74
+
75
+ def vit_ok() -> Iterable[DbVisit]:
76
+ for v in vit:
77
+ ev: DbVisit
78
+ if isinstance(v, DbVisit):
79
+ ev = v
80
+ else:
81
+ # conform to the schema and dump. can't hurt anyway
82
+ ev = DbVisit(
83
+ norm_url='<error>',
84
+ orig_url='<error>',
85
+ dt=now,
86
+ locator=Loc.make('<errror>'),
87
+ src=SRC_ERROR,
88
+ # todo attach backtrace?
89
+ context=repr(v),
90
+ )
91
+ index_stats[ev.src] = index_stats.get(ev.src, 0) + 1
92
+ yield ev
93
+
94
+ meta = MetaData()
95
+ table = Table('visits', meta, *get_columns())
96
+
97
+ def query_total_stats(conn) -> Stats:
98
+ query = select(table.c.src, func.count(table.c.src)).select_from(table).group_by(table.c.src)
99
+ return dict(conn.execute(query).all())
100
+
101
+ def get_engine(*args, **kwargs) -> Engine:
102
+ # kwargs['echo'] = True # useful for debugging
103
+ e = create_engine(*args, **kwargs)
104
+ event.listen(e, 'connect', enable_wal)
105
+ return e
106
+
107
+ ### use readonly database just to get stats
108
+ pengine = get_engine('sqlite://', creator=lambda: sqlite3.connect(f"file:{db_path}?mode=ro", uri=True))
109
+ stats_before: Stats
110
+ try:
111
+ with pengine.begin() as conn:
112
+ stats_before = query_total_stats(conn)
113
+ except exc.OperationalError as oe:
114
+ if oe.code == 'e3q8':
115
+ # db doesn't exist yet
116
+ stats_before = {}
117
+ else:
118
+ raise oe
119
+ pengine.dispose()
120
+ ###
121
+
122
+ # needtimeout, othewise concurrent indexing might not work
123
+ # (note that this also requires WAL mode)
124
+ engine = get_engine(f'sqlite:///{db_path}', connect_args={'timeout': _CONNECTION_TIMEOUT_SECONDS})
125
+
126
+ cleared: set[str] = set()
127
+
128
+ # by default, sqlalchemy does some sort of BEGIN (implicit) transaction, which doesn't provide proper isolation??
129
+ # see https://docs.sqlalchemy.org/en/20/dialects/sqlite.html#serializable-isolation-savepoints-transactional-ddl
130
+ event.listen(engine, 'begin', begin_immediate_transaction)
131
+ # TODO to allow more concurrent indexing, maybe could instead write to a temporary table?
132
+ # or collect visits first and only then start writing to the db to minimize db access window.. not sure
133
+
134
+ # engine.begin() starts a transaction
135
+ # so everything inside this block will be atomic to the outside observers
136
+ with engine.begin() as conn:
137
+ table.create(conn, checkfirst=True)
138
+
139
+ if overwrite_db:
140
+ conn.execute(table.delete())
141
+
142
+ insert_stmt = table.insert()
143
+ # using raw statement gives a massive speedup for inserting visits
144
+ # see test_benchmark_visits_dumping
145
+ insert_stmt_raw = str(insert_stmt.compile(dialect=dialect_sqlite.dialect(paramstyle='qmark')))
146
+
147
+ for chunk in chunked(vit_ok(), n=_CHUNK_BY):
148
+ srcs = {v.src or '' for v in chunk}
149
+ new = srcs.difference(cleared)
150
+
151
+ for src in new:
152
+ conn.execute(table.delete().where(table.c.src == src))
153
+ cleared.add(src)
154
+
155
+ bound = [db_visit_to_row(v) for v in chunk]
156
+ conn.exec_driver_sql(insert_stmt_raw, bound)
157
+
158
+ stats_after = query_total_stats(conn)
159
+ engine.dispose()
160
+
161
+ stats_changes = {}
162
+ # map str just in case some srcs are None
163
+ for k in sorted(map(str, {*stats_before.keys(), *stats_after.keys()})):
164
+ diff = stats_after.get(k, 0) - stats_before.get(k, 0)
165
+ if diff == 0:
166
+ continue
167
+ sdiff = ('+' if diff > 0 else '') + str(diff)
168
+ stats_changes[k] = sdiff
169
+
170
+ action = 'overwritten' if overwrite_db else 'updated'
171
+ total_indexed = sum(index_stats.values())
172
+ total_err = index_stats.get(SRC_ERROR, 0)
173
+ total_ok = total_indexed - total_err
174
+ logger.info(f'indexed (current run) : total: {total_indexed}, ok: {total_ok}, errors: {total_err} {index_stats}')
175
+ logger.info(f'database "{db_path}" : {action}')
176
+ logger.info(f'database stats before : {stats_before}')
177
+ logger.info(f'database stats after : {stats_after}')
178
+
179
+ if len(stats_changes) == 0:
180
+ logger.info('database stats changes: no changes')
181
+ else:
182
+ for k, v in stats_changes.items():
183
+ logger.info(f'database stats changes: {k} {v}')
184
+
185
+ res: list[Exception] = []
186
+ if total_ok == 0:
187
+ res.append(RuntimeError('No visits were indexed, something is probably wrong!'))
188
+ return res
@@ -1,32 +1,29 @@
1
+ from __future__ import annotations
2
+
1
3
  from pathlib import Path
2
- from typing import Tuple, List
3
4
 
4
- from cachew import NTBinder
5
5
  from sqlalchemy import (
6
- create_engine,
7
- exc,
8
- MetaData,
6
+ Engine,
9
7
  Index,
8
+ MetaData,
10
9
  Table,
10
+ create_engine,
11
+ exc,
11
12
  )
12
- from sqlalchemy.engine import Engine
13
13
 
14
- from .common import DbVisit
14
+ from .common import DbVisit, get_columns, row_to_db_visit
15
15
 
16
-
17
- DbStuff = Tuple[Engine, NTBinder, Table]
16
+ DbStuff = tuple[Engine, Table]
18
17
 
19
18
 
20
19
  def get_db_stuff(db_path: Path) -> DbStuff:
21
20
  assert db_path.exists(), db_path
22
21
  # todo how to open read only?
23
22
  # actually not sure if we can since we are creating an index here
24
- engine = create_engine(f'sqlite:///{db_path}') # , echo=True)
25
-
26
- binder = NTBinder.make(DbVisit)
23
+ engine = create_engine(f'sqlite:///{db_path}') # , echo=True)
27
24
 
28
25
  meta = MetaData()
29
- table = Table('visits', meta, *binder.columns)
26
+ table = Table('visits', meta, *get_columns())
30
27
 
31
28
  idx = Index('index_norm_url', table.c.norm_url)
32
29
  try:
@@ -39,13 +36,15 @@ def get_db_stuff(db_path: Path) -> DbStuff:
39
36
  raise e
40
37
 
41
38
  # NOTE: apparently it's ok to open connection on every request? at least my comparisons didn't show anything
42
- return engine, binder, table
39
+ return engine, table
43
40
 
44
41
 
45
- def get_all_db_visits(db_path: Path) -> List[DbVisit]:
42
+ def get_all_db_visits(db_path: Path) -> list[DbVisit]:
46
43
  # NOTE: this is pretty inefficient if the DB is huge
47
44
  # mostly intended for tests
48
- engine, binder, table = get_db_stuff(db_path)
45
+ engine, table = get_db_stuff(db_path)
49
46
  query = table.select()
50
47
  with engine.connect() as conn:
51
- return [binder.from_row(row) for row in conn.execute(query)]
48
+ res = [row_to_db_visit(row) for row in conn.execute(query)]
49
+ engine.dispose()
50
+ return res
promnesia/extract.py CHANGED
@@ -1,20 +1,22 @@
1
- from functools import lru_cache
1
+ from __future__ import annotations
2
+
2
3
  import re
3
- import traceback
4
- from typing import Set, Iterable, Sequence, Union
4
+ from collections.abc import Iterable, Sequence
5
+ from functools import lru_cache
5
6
 
6
7
  from .cannon import CanonifyException
7
8
  from .common import (
8
- logger,
9
- DbVisit, Visit,
10
- Res,
11
- SourceName, Source,
9
+ DbVisit,
12
10
  Filter,
11
+ Res,
12
+ Results,
13
+ Source,
14
+ SourceName,
13
15
  Url,
14
- Results, Extractor,
16
+ Visit,
17
+ logger,
15
18
  )
16
19
 
17
-
18
20
  DEFAULT_FILTERS = (
19
21
  r'^chrome-\w+://',
20
22
  r'chrome://newtab',
@@ -28,6 +30,7 @@ DEFAULT_FILTERS = (
28
30
  )
29
31
 
30
32
 
33
+ # TODO maybe move these to configs?
31
34
  @lru_cache(1) #meh, not sure what would happen under tests?
32
35
  def filters() -> Sequence[Filter]:
33
36
  from . import config
@@ -52,7 +55,7 @@ def extract_visits(source: Source, *, src: SourceName) -> Iterable[Res[DbVisit]]
52
55
  yield e
53
56
  return
54
57
 
55
- handled: Set[Visit] = set()
58
+ handled: set[Visit] = set()
56
59
  try:
57
60
  for p in vit:
58
61
  if isinstance(p, Exception):
@@ -93,7 +96,7 @@ def filtered(url: Url) -> bool:
93
96
  return any(f(url) for f in filters())
94
97
 
95
98
 
96
- def make_filter(thing: Union[str, Filter]) -> Filter:
99
+ def make_filter(thing: str | Filter) -> Filter:
97
100
  if isinstance(thing, str):
98
101
  rc = re.compile(thing)
99
102
  def filter_(u: str) -> bool:
promnesia/kjson.py CHANGED
@@ -3,19 +3,19 @@ Some experimental ideas on JSON processing.
3
3
  This is a bit overengineered and I admit it!
4
4
  I'll make it more readable, but in the meantime feel free to open an issue if you're confused about something.
5
5
  """
6
+ from __future__ import annotations
6
7
 
7
- from typing import Any, Dict, List, Union, Tuple, cast
8
+ from typing import Any, Union, cast
8
9
 
9
-
10
- JDict = Dict[str, Any] # TODO not sure if we can do recursive..
11
- JList = List[Any]
10
+ JDict = dict[str, Any] # TODO not sure if we can do recursive..
11
+ JList = list[Any]
12
12
  JPrim = Union[str, int, float] # , type(None)]
13
13
 
14
14
  Json = Union[JDict, JList, JPrim]
15
15
 
16
- JPathPart = Tuple[Json, Union[str, int]]
16
+ JPathPart = tuple[Json, Union[str, int]]
17
17
 
18
- JPath = Tuple[JPathPart, ...]
18
+ JPath = tuple[JPathPart, ...]
19
19
 
20
20
 
21
21
  class JsonProcessor:
@@ -36,7 +36,7 @@ class JsonProcessor:
36
36
  if res is self.SKIP:
37
37
  return
38
38
  for k, v in js.items():
39
- path = cast(JPath, jp + ((js, k), ))
39
+ path = cast(JPath, jp + ((js, k), )) # noqa: RUF005
40
40
  self._do(v, path)
41
41
 
42
42
  def do_list(self, js: JList, jp: JPath) -> None:
@@ -45,7 +45,7 @@ class JsonProcessor:
45
45
  if res is self.SKIP:
46
46
  return
47
47
  for i, x in enumerate(js):
48
- path = cast(JPath, jp + ((js, i), ))
48
+ path = cast(JPath, jp + ((js, i), )) # noqa: RUF005
49
49
  self._do(x, path)
50
50
 
51
51
  def _do(self, js: Json, path: JPath) -> None:
@@ -65,7 +65,7 @@ class JsonProcessor:
65
65
  self._do(js, path)
66
66
 
67
67
  @classmethod
68
- def kpath(cls, path: JPath) -> Tuple[JPathPart, ...]:
68
+ def kpath(cls, path: JPath) -> tuple[JPathPart, ...]:
69
69
  return tuple(x[1] for x in path) # type: ignore
70
70
 
71
71
  # TODO path is a sequence of jsons and keys?
@@ -73,9 +73,10 @@ class JsonProcessor:
73
73
  def test_json_processor():
74
74
  handled = []
75
75
  class Proc(JsonProcessor):
76
- def handle_dict(self, value: JDict, path):
77
- if 'skipme' in self.kpath(path):
76
+ def handle_dict(self, value: JDict, path): # noqa: ARG002
77
+ if 'skipme' in self.kpath(path): # type: ignore[comparison-overlap]
78
78
  return JsonProcessor.SKIP
79
+ return None
79
80
 
80
81
  def handle_str(self, value: str, path):
81
82
  if 'http' in value:
promnesia/logging.py CHANGED
@@ -29,9 +29,9 @@ def test() -> None:
29
29
 
30
30
 
31
31
  import logging
32
- from typing import Union, Optional, cast
33
32
  import os
34
33
  import warnings
34
+ from typing import Optional, Union, cast
35
35
 
36
36
  Level = int
37
37
  LevelIsh = Optional[Union[Level, str]]
@@ -61,7 +61,7 @@ _init_done = 'lazylogger_init_done'
61
61
  def setup_logger(logger: logging.Logger, level: LevelIsh) -> None:
62
62
  lvl = mklevel(level)
63
63
  try:
64
- import logzero # type: ignore[import]
64
+ import logzero # type: ignore[import-not-found]
65
65
  formatter = logzero.LogFormatter(
66
66
  fmt=FORMAT_COLOR,
67
67
  datefmt=DATEFMT,
@@ -75,7 +75,7 @@ def setup_logger(logger: logging.Logger, level: LevelIsh) -> None:
75
75
  logger.addFilter(AddExceptionTraceback())
76
76
  if use_logzero and not COLLAPSE_DEBUG_LOGS: # all set, nothing to do
77
77
  # 'simple' setup
78
- logzero.setup_logger(logger.name, level=lvl, formatter=formatter)
78
+ logzero.setup_logger(logger.name, level=lvl, formatter=formatter) # type: ignore[possibly-undefined]
79
79
  return
80
80
 
81
81
  h = CollapseDebugHandler() if COLLAPSE_DEBUG_LOGS else logging.StreamHandler()
@@ -101,7 +101,7 @@ class LazyLogger(logging.Logger):
101
101
  # oh god.. otherwise might go into an inf loop
102
102
  if not hasattr(logger, _init_done):
103
103
  setattr(logger, _init_done, False) # will setup on the first call
104
- logger.isEnabledFor = isEnabledFor_lazyinit # type: ignore[assignment]
104
+ logger.isEnabledFor = isEnabledFor_lazyinit # type: ignore[method-assign]
105
105
  return cast(LazyLogger, logger)
106
106
 
107
107
 
File without changes
@@ -11,7 +11,6 @@ SOURCES = [
11
11
  Source(
12
12
  auto.index,
13
13
  # just some arbitrary directory with plaintext files
14
- '/usr/include/c++/',
15
- '/usr/local/include/c++/', # on apple they are here apparently..
14
+ '/usr/share/vim/',
16
15
  )
17
16
  ]
@@ -1,14 +1,12 @@
1
- #!/usr/bin/env python3
2
1
  from __future__ import annotations
3
2
 
4
3
  import argparse
5
4
  import os
5
+ import platform
6
6
  import sys
7
7
  import time
8
8
  from pathlib import Path
9
- import platform
10
9
  from subprocess import check_call, run
11
- from typing import List
12
10
 
13
11
  SYSTEM = platform.system()
14
12
  UNSUPPORTED_SYSTEM = RuntimeError(f'Platform {SYSTEM} is not supported yet!')
@@ -58,7 +56,7 @@ def systemd(*args: str | Path, method=check_call) -> None:
58
56
  ])
59
57
 
60
58
 
61
- def install_systemd(name: str, out: Path, launcher: str, largs: List[str]) -> None:
59
+ def install_systemd(name: str, out: Path, launcher: str, largs: list[str]) -> None:
62
60
  unit_name = name
63
61
 
64
62
  import shlex
@@ -80,7 +78,7 @@ def install_systemd(name: str, out: Path, launcher: str, largs: List[str]) -> No
80
78
  raise e
81
79
 
82
80
 
83
- def install_launchd(name: str, out: Path, launcher: str, largs: List[str]) -> None:
81
+ def install_launchd(name: str, out: Path, launcher: str, largs: list[str]) -> None:
84
82
  service_name = name
85
83
  arguments = '\n'.join(f'<string>{a}</string>' for a in [launcher, *largs])
86
84
  out.write_text(LAUNCHD_TEMPLATE.format(
@@ -115,16 +113,16 @@ def install(args: argparse.Namespace) -> None:
115
113
  print(f"Writing launch script to {out}", file=sys.stderr)
116
114
 
117
115
  # ugh. we want to know whether we're invoked 'properly' as an executable or ad-hoc via scripts/promnesia
116
+ extra_exe: list[str] = []
118
117
  if os.environ.get('DIRTY_RUN') is not None:
119
118
  launcher = str(root() / 'scripts/promnesia')
120
119
  else:
121
- # must be installed, so available in PATH
122
- import distutils.spawn
123
- exe = distutils.spawn.find_executable('promnesia'); assert exe is not None
124
- launcher = exe # older systemd wants absolute paths..
120
+ launcher = sys.executable
121
+ extra_exe = ['-m', 'promnesia']
125
122
 
126
123
  db = args.db
127
124
  largs = [
125
+ *extra_exe,
128
126
  'serve',
129
127
  *([] if db is None else ['--db', str(db)]),
130
128
  '--timezone', args.timezone,