tagflux 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- tagflux-0.1.0/PKG-INFO +18 -0
- tagflux-0.1.0/README.md +0 -0
- tagflux-0.1.0/pyproject.toml +28 -0
- tagflux-0.1.0/tagflux/__init__.py +2 -0
- tagflux-0.1.0/tagflux/__main__.py +101 -0
- tagflux-0.1.0/tagflux/config.py +61 -0
- tagflux-0.1.0/tagflux/processors/__init__.py +0 -0
- tagflux-0.1.0/tagflux/processors/content.py +368 -0
- tagflux-0.1.0/tagflux/processors/static.py +166 -0
- tagflux-0.1.0/tagflux/processors/template.py +121 -0
- tagflux-0.1.0/tagflux/py.typed +0 -0
- tagflux-0.1.0/tagflux/watchers/__init__.py +0 -0
- tagflux-0.1.0/tagflux/watchers/content.py +49 -0
- tagflux-0.1.0/tagflux/watchers/static.py +47 -0
- tagflux-0.1.0/tagflux/watchers/templates.py +34 -0
tagflux-0.1.0/PKG-INFO
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
Metadata-Version: 2.3
|
|
2
|
+
Name: tagflux
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: Static site generator
|
|
5
|
+
Author: Roman
|
|
6
|
+
Author-email: Roman <roman@adontz.com>
|
|
7
|
+
Requires-Dist: dacite~=1.9.2
|
|
8
|
+
Requires-Dist: jinja2~=3.1.6
|
|
9
|
+
Requires-Dist: markdown~=3.9
|
|
10
|
+
Requires-Dist: pillow~=12.1.1
|
|
11
|
+
Requires-Dist: pygments~=2.19.2
|
|
12
|
+
Requires-Dist: pyyaml~=6.0.3
|
|
13
|
+
Requires-Dist: rcssmin~=1.2.2
|
|
14
|
+
Requires-Dist: rjsmin~=1.2.5
|
|
15
|
+
Requires-Dist: watchdog~=6.0.0
|
|
16
|
+
Requires-Python: >=3.12
|
|
17
|
+
Description-Content-Type: text/markdown
|
|
18
|
+
|
tagflux-0.1.0/README.md
ADDED
|
File without changes
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
[project]
|
|
2
|
+
name = "tagflux"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "Static site generator"
|
|
5
|
+
readme = "README.md"
|
|
6
|
+
authors = [
|
|
7
|
+
{ name = "Roman", email = "roman@adontz.com" }
|
|
8
|
+
]
|
|
9
|
+
requires-python = ">=3.12"
|
|
10
|
+
dependencies = [
|
|
11
|
+
"dacite~=1.9.2",
|
|
12
|
+
"Jinja2~=3.1.6",
|
|
13
|
+
"Markdown~=3.9",
|
|
14
|
+
"pillow~=12.1.1",
|
|
15
|
+
"pygments~=2.19.2",
|
|
16
|
+
"PyYAML~=6.0.3",
|
|
17
|
+
"rcssmin~=1.2.2",
|
|
18
|
+
"rjsmin~=1.2.5",
|
|
19
|
+
"watchdog~=6.0.0",
|
|
20
|
+
]
|
|
21
|
+
|
|
22
|
+
[build-system]
|
|
23
|
+
requires = ["uv_build>=0.11.1,<0.12.0"]
|
|
24
|
+
build-backend = "uv_build"
|
|
25
|
+
|
|
26
|
+
[tool.uv.build-backend]
|
|
27
|
+
module-name = "tagflux"
|
|
28
|
+
module-root = ""
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
#!/bin/env python3
|
|
2
|
+
|
|
3
|
+
from argparse import ArgumentParser
|
|
4
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
5
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
6
|
+
from dacite import from_dict
|
|
7
|
+
from os.path import abspath
|
|
8
|
+
from os.path import dirname
|
|
9
|
+
from os.path import join
|
|
10
|
+
from tagflux.config import Config
|
|
11
|
+
from tagflux.processors.content import PageCatalog
|
|
12
|
+
from tagflux.processors.static import StaticProcessor
|
|
13
|
+
from tagflux.processors.template import TemplateProcessor
|
|
14
|
+
from tagflux.watchers.content import ContentEventHandler
|
|
15
|
+
from tagflux.watchers.static import StaticEventHandler
|
|
16
|
+
from tagflux.watchers.templates import TemplateEventHandler
|
|
17
|
+
from time import time
|
|
18
|
+
from watchdog.observers import Observer
|
|
19
|
+
from yaml import full_load
|
|
20
|
+
|
|
21
|
+
argument_parser = ArgumentParser("tagflux")
|
|
22
|
+
argument_parser.add_argument("--config", required=True, default="./tagflux.yml", metavar="CONFIG_PATH", help="a path to a YAML configuration file")
|
|
23
|
+
argument_parser.add_argument("--watch", required=False, action="store_true", help="continuously watch for changes and rebuild if necessary")
|
|
24
|
+
argument_parser.add_argument("--executor", required=False, default=None, choices=["thread", "process"], help="force specific task executor")
|
|
25
|
+
arguments = argument_parser.parse_args()
|
|
26
|
+
|
|
27
|
+
config_path = abspath(arguments.config)
|
|
28
|
+
|
|
29
|
+
with open(config_path, "r") as config_file:
|
|
30
|
+
config_dict = full_load(config_file)
|
|
31
|
+
|
|
32
|
+
base_path = dirname(config_path)
|
|
33
|
+
config: Config = from_dict(Config, config_dict)
|
|
34
|
+
config.content.path = join(base_path, config.content.path)
|
|
35
|
+
config.output.path = join(base_path, config.output.path)
|
|
36
|
+
config.static.path = join(base_path, config.static.path)
|
|
37
|
+
config.templates.path = join(base_path, config.templates.path)
|
|
38
|
+
|
|
39
|
+
executor_classes = {
|
|
40
|
+
"thread": ThreadPoolExecutor,
|
|
41
|
+
"process": ProcessPoolExecutor,
|
|
42
|
+
}
|
|
43
|
+
content_executor_class = executor_classes.get(arguments.executor, ThreadPoolExecutor)
|
|
44
|
+
static_executor_class = executor_classes.get(arguments.executor, ProcessPoolExecutor)
|
|
45
|
+
template_executor_class = executor_classes.get(arguments.executor, ProcessPoolExecutor)
|
|
46
|
+
|
|
47
|
+
print("Fluxing...")
|
|
48
|
+
|
|
49
|
+
since = time()
|
|
50
|
+
|
|
51
|
+
content_executor = content_executor_class()
|
|
52
|
+
page_catalog = PageCatalog(config)
|
|
53
|
+
page_catalog.load_all(content_executor)
|
|
54
|
+
content_executor.shutdown()
|
|
55
|
+
|
|
56
|
+
until = time()
|
|
57
|
+
delay = int(1000 * (until - since))
|
|
58
|
+
|
|
59
|
+
print(f"X [{delay}ms] {config.content.path}")
|
|
60
|
+
|
|
61
|
+
since = time()
|
|
62
|
+
|
|
63
|
+
static_executor = static_executor_class()
|
|
64
|
+
static_processor = StaticProcessor(config)
|
|
65
|
+
static_processor.process_all(static_executor)
|
|
66
|
+
static_executor.shutdown()
|
|
67
|
+
|
|
68
|
+
until = time()
|
|
69
|
+
delay = int(1000 * (until - since))
|
|
70
|
+
|
|
71
|
+
print(f"X [{delay}ms] {config.static.path}")
|
|
72
|
+
since = time()
|
|
73
|
+
|
|
74
|
+
template_executor = template_executor_class()
|
|
75
|
+
template_processor = TemplateProcessor(config, page_catalog)
|
|
76
|
+
template_processor.process_all(template_executor, False)
|
|
77
|
+
template_executor.shutdown()
|
|
78
|
+
|
|
79
|
+
until = time()
|
|
80
|
+
delay = int(1000 * (until - since))
|
|
81
|
+
|
|
82
|
+
print(f"X [{delay}ms] {config.templates.path}")
|
|
83
|
+
|
|
84
|
+
if arguments.watch:
|
|
85
|
+
content_executor = content_executor_class()
|
|
86
|
+
static_executor = static_executor_class()
|
|
87
|
+
template_executor = template_executor_class()
|
|
88
|
+
|
|
89
|
+
print("Watching...")
|
|
90
|
+
observer = Observer()
|
|
91
|
+
observer.schedule(ContentEventHandler(content_executor, page_catalog, template_processor), config.content.path, recursive=True)
|
|
92
|
+
observer.schedule(StaticEventHandler(static_executor, static_processor), config.static.path, recursive=True)
|
|
93
|
+
observer.schedule(TemplateEventHandler(template_executor, page_catalog, template_processor), config.templates.path, recursive=True)
|
|
94
|
+
observer.start()
|
|
95
|
+
|
|
96
|
+
try:
|
|
97
|
+
while observer.is_alive():
|
|
98
|
+
observer.join(1)
|
|
99
|
+
finally:
|
|
100
|
+
observer.stop()
|
|
101
|
+
observer.join()
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import Any
|
|
3
|
+
from typing import Optional
|
|
4
|
+
from typing import Literal
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
@dataclass
|
|
8
|
+
class ConfigContent:
|
|
9
|
+
path: str
|
|
10
|
+
variants: list[str]
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class ConfigTemplates:
|
|
15
|
+
path: str
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class ConfigStaticOptimizationImage:
|
|
20
|
+
extensions: list[str]
|
|
21
|
+
formats: dict[Literal["JPEG", "PNG", "WEBP"], dict[str, Any]]
|
|
22
|
+
resize: Optional[list[str]]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class ConfigStaticOptimizationStyle:
|
|
27
|
+
extensions: list[str]
|
|
28
|
+
formats: dict[Literal["CSS"], dict]
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
@dataclass
|
|
32
|
+
class ConfigStaticOptimizationScript:
|
|
33
|
+
extensions: list[str]
|
|
34
|
+
formats: dict[Literal["JS"], dict]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class ConfigStaticOptimizations:
|
|
39
|
+
images: dict[str, ConfigStaticOptimizationImage]
|
|
40
|
+
styles: dict[str, ConfigStaticOptimizationStyle]
|
|
41
|
+
scripts: dict[str, ConfigStaticOptimizationScript]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class ConfigStatic:
|
|
46
|
+
path: str
|
|
47
|
+
optimizations: ConfigStaticOptimizations
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
@dataclass
|
|
51
|
+
class ConfigOutput:
|
|
52
|
+
path: str
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
@dataclass
|
|
56
|
+
class Config:
|
|
57
|
+
context: dict
|
|
58
|
+
content: ConfigContent
|
|
59
|
+
templates: ConfigTemplates
|
|
60
|
+
static: ConfigStatic
|
|
61
|
+
output: ConfigOutput
|
|
File without changes
|
|
@@ -0,0 +1,368 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from datetime import datetime
|
|
5
|
+
from datetime import UTC
|
|
6
|
+
from os import scandir
|
|
7
|
+
from os import stat
|
|
8
|
+
from os.path import basename
|
|
9
|
+
from os.path import dirname
|
|
10
|
+
from os.path import join
|
|
11
|
+
from os.path import splitext
|
|
12
|
+
from re import compile
|
|
13
|
+
from re import fullmatch
|
|
14
|
+
from re import Pattern
|
|
15
|
+
from re import UNICODE
|
|
16
|
+
from subprocess import run
|
|
17
|
+
from tagflux.config import Config
|
|
18
|
+
from yaml import full_load
|
|
19
|
+
|
|
20
|
+
FRONTMATTER_PREFIX = "---\n"
|
|
21
|
+
FRONTMATTER_SUFFIX = "\n---\n"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@dataclass
|
|
25
|
+
class PageFileInfo:
|
|
26
|
+
absolute_path: str
|
|
27
|
+
relative_path: str
|
|
28
|
+
name: str
|
|
29
|
+
modify_timestamp: int
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@dataclass
|
|
33
|
+
class PageGit:
|
|
34
|
+
author_name: str
|
|
35
|
+
author_email: str
|
|
36
|
+
author_timestamp: datetime
|
|
37
|
+
committer_name: str
|
|
38
|
+
committer_email: str
|
|
39
|
+
committer_timestamp: datetime
|
|
40
|
+
hash: str
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass
|
|
44
|
+
class PageMarkup:
|
|
45
|
+
absolute_path: str
|
|
46
|
+
relative_path: str
|
|
47
|
+
uri: str
|
|
48
|
+
git: PageGit
|
|
49
|
+
frontmatter: dict
|
|
50
|
+
content_md: str
|
|
51
|
+
modify_timestamp: int
|
|
52
|
+
update_timestamp: datetime
|
|
53
|
+
variants: dict[str, dict[str, "PageMarkup"]]
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class PageCatalog:
|
|
57
|
+
def _load_git_log(self, absolute_path: str):
|
|
58
|
+
git_output = run(
|
|
59
|
+
[
|
|
60
|
+
"git",
|
|
61
|
+
"rev-parse",
|
|
62
|
+
"--show-toplevel",
|
|
63
|
+
],
|
|
64
|
+
capture_output=True,
|
|
65
|
+
check=True,
|
|
66
|
+
text=True,
|
|
67
|
+
cwd=absolute_path,
|
|
68
|
+
)
|
|
69
|
+
repository_path = git_output.stdout.strip()
|
|
70
|
+
assert absolute_path.startswith(repository_path)
|
|
71
|
+
prefix = absolute_path[len(repository_path) :].lstrip("/")
|
|
72
|
+
git_output = run(
|
|
73
|
+
[
|
|
74
|
+
"git",
|
|
75
|
+
"log",
|
|
76
|
+
"--format=%x1F%an%x1F%ae%x1F%at%x1F%cn%x1F%ce%x1F%ct%x1F%H",
|
|
77
|
+
"--name-only",
|
|
78
|
+
],
|
|
79
|
+
capture_output=True,
|
|
80
|
+
check=True,
|
|
81
|
+
text=True,
|
|
82
|
+
cwd=absolute_path,
|
|
83
|
+
)
|
|
84
|
+
git_output_lines = git_output.stdout.splitlines()
|
|
85
|
+
page_git: PageGit = None
|
|
86
|
+
|
|
87
|
+
for git_output_line in git_output_lines:
|
|
88
|
+
if git_output_line.startswith("\u001f"):
|
|
89
|
+
parts = git_output_line.split("\u001f")
|
|
90
|
+
page_git = PageGit(
|
|
91
|
+
parts[1], parts[2], datetime.fromtimestamp(int(parts[3]), UTC), parts[4], parts[5], datetime.fromtimestamp(int(parts[6]), UTC), parts[7]
|
|
92
|
+
)
|
|
93
|
+
elif (len(git_output_line) > 0) and (git_output_line.startswith(prefix)):
|
|
94
|
+
self.git_log[git_output_line[len(prefix) :]] = page_git
|
|
95
|
+
|
|
96
|
+
def _find_page_markups(self, absolute_path: str, relative_path: str):
|
|
97
|
+
page_file_infos: list[PageFileInfo] = []
|
|
98
|
+
|
|
99
|
+
with scandir(absolute_path) as it:
|
|
100
|
+
for entry in it:
|
|
101
|
+
if not entry.name.startswith("."):
|
|
102
|
+
if entry.is_dir():
|
|
103
|
+
page_file_infos.extend(self._find_page_markups(join(absolute_path, entry.name), join(relative_path, entry.name)))
|
|
104
|
+
elif entry.is_file():
|
|
105
|
+
name, ext = splitext(entry.name)
|
|
106
|
+
|
|
107
|
+
if ext == ".md":
|
|
108
|
+
page_file_infos.append(
|
|
109
|
+
PageFileInfo(
|
|
110
|
+
join(absolute_path, entry.name),
|
|
111
|
+
join(relative_path, entry.name),
|
|
112
|
+
name,
|
|
113
|
+
entry.stat().st_mtime,
|
|
114
|
+
)
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
return page_file_infos
|
|
118
|
+
|
|
119
|
+
def _load_page_markup(self, git_log: dict, page_file_info: PageFileInfo) -> PageMarkup:
|
|
120
|
+
with open(page_file_info.absolute_path) as file:
|
|
121
|
+
file_content = file.read()
|
|
122
|
+
|
|
123
|
+
file_content = file_content.replace("\r\n", "\n").replace("\r", "\n")
|
|
124
|
+
|
|
125
|
+
if not file_content.startswith(FRONTMATTER_PREFIX):
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
frontmatter_suffix_index = file_content.index(FRONTMATTER_SUFFIX, 1)
|
|
129
|
+
|
|
130
|
+
if frontmatter_suffix_index <= 0:
|
|
131
|
+
return None
|
|
132
|
+
|
|
133
|
+
frontmatter_text = file_content[len(FRONTMATTER_PREFIX) : frontmatter_suffix_index]
|
|
134
|
+
content_md = file_content[frontmatter_suffix_index + len(FRONTMATTER_SUFFIX) :]
|
|
135
|
+
frontmatter = full_load(frontmatter_text) or {}
|
|
136
|
+
page_git: PageGit = git_log.get(page_file_info.relative_path)
|
|
137
|
+
uri_format = frontmatter.get("uri_format", "{name}.html")
|
|
138
|
+
|
|
139
|
+
if page_git:
|
|
140
|
+
page_uri = (
|
|
141
|
+
dirname(page_file_info.relative_path)
|
|
142
|
+
+ "/"
|
|
143
|
+
+ str.format(
|
|
144
|
+
uri_format,
|
|
145
|
+
name=page_file_info.name,
|
|
146
|
+
hash=page_git.hash,
|
|
147
|
+
author_timestamp=page_git.author_timestamp,
|
|
148
|
+
committer_timestamp=page_git.committer_timestamp,
|
|
149
|
+
)
|
|
150
|
+
)
|
|
151
|
+
else:
|
|
152
|
+
page_uri = (
|
|
153
|
+
dirname(page_file_info.relative_path)
|
|
154
|
+
+ "/"
|
|
155
|
+
+ str.format(
|
|
156
|
+
uri_format,
|
|
157
|
+
name=page_file_info.name,
|
|
158
|
+
hash="0000000000000000000000000000000000000000",
|
|
159
|
+
author_timestamp=datetime.min,
|
|
160
|
+
committer_timestamp=datetime.min,
|
|
161
|
+
)
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
return PageMarkup(
|
|
165
|
+
page_file_info.absolute_path,
|
|
166
|
+
page_file_info.relative_path,
|
|
167
|
+
page_uri,
|
|
168
|
+
page_git,
|
|
169
|
+
frontmatter,
|
|
170
|
+
content_md,
|
|
171
|
+
page_file_info.modify_timestamp,
|
|
172
|
+
datetime.fromtimestamp(0, UTC),
|
|
173
|
+
defaultdict(dict),
|
|
174
|
+
)
|
|
175
|
+
|
|
176
|
+
def _load_page_markups(self, pool_executor: ProcessPoolExecutor, page_file_infos: list[PageFileInfo]):
|
|
177
|
+
results = []
|
|
178
|
+
|
|
179
|
+
for page_file_info in page_file_infos:
|
|
180
|
+
results.append(pool_executor.submit(self._load_page_markup, self.git_log, page_file_info))
|
|
181
|
+
|
|
182
|
+
self.pages: list[PageMarkup] = []
|
|
183
|
+
|
|
184
|
+
for result in results:
|
|
185
|
+
page_markup = result.result()
|
|
186
|
+
|
|
187
|
+
if page_markup:
|
|
188
|
+
self.pages.append(page_markup)
|
|
189
|
+
|
|
190
|
+
def _find_page_variants(self):
|
|
191
|
+
page_map = {}
|
|
192
|
+
|
|
193
|
+
for page in self.pages:
|
|
194
|
+
page_map[page.uri] = page
|
|
195
|
+
|
|
196
|
+
variants: dict[str, dict[str, dict[str, PageMarkup]]] = defaultdict(lambda: defaultdict(dict))
|
|
197
|
+
variant_patterns: list[Pattern] = []
|
|
198
|
+
|
|
199
|
+
for variant_pattern in self.config.content.variants:
|
|
200
|
+
variant_patterns.append(compile(variant_pattern, UNICODE))
|
|
201
|
+
|
|
202
|
+
for page in self.pages:
|
|
203
|
+
for variant_pattern in variant_patterns:
|
|
204
|
+
match = variant_pattern.fullmatch(page.uri)
|
|
205
|
+
|
|
206
|
+
if match and match.lastgroup and match.lastindex:
|
|
207
|
+
generic_uri = page.uri[: match.start(match.lastindex)] + "*" + page.uri[match.end(match.lastindex) :]
|
|
208
|
+
key = match.group(match.lastindex)
|
|
209
|
+
variants[match.lastgroup][generic_uri][key] = page
|
|
210
|
+
|
|
211
|
+
for variant_group, variant_group_data in variants.items():
|
|
212
|
+
for variant_data in variant_group_data.values():
|
|
213
|
+
for key1, page1 in variant_data.items():
|
|
214
|
+
for key2, page2 in variant_data.items():
|
|
215
|
+
if key1 != key2:
|
|
216
|
+
page1.variants[variant_group][key2] = page2
|
|
217
|
+
|
|
218
|
+
def __init__(self, config: Config):
|
|
219
|
+
self.config = config
|
|
220
|
+
self.git_log: dict[str, PageGit] = {}
|
|
221
|
+
self.pages: list[PageMarkup] = []
|
|
222
|
+
|
|
223
|
+
def load_all(self, pool_executor: ProcessPoolExecutor):
|
|
224
|
+
self.git_log: dict[str, PageGit] = {}
|
|
225
|
+
self._load_git_log(self.config.content.path)
|
|
226
|
+
page_file_infos = self._find_page_markups(self.config.content.path, "/")
|
|
227
|
+
self._load_page_markups(pool_executor, page_file_infos)
|
|
228
|
+
self._find_page_variants()
|
|
229
|
+
|
|
230
|
+
def load_one(self, pool_executor: ProcessPoolExecutor, absolute_path: str):
|
|
231
|
+
self.git_log: dict[str, PageGit] = {}
|
|
232
|
+
self._load_git_log(self.config.content.path)
|
|
233
|
+
|
|
234
|
+
for page in self.pages:
|
|
235
|
+
if page.absolute_path == absolute_path:
|
|
236
|
+
try:
|
|
237
|
+
page_file_stat = stat(page.absolute_path)
|
|
238
|
+
page_file_info = PageFileInfo(
|
|
239
|
+
page.absolute_path,
|
|
240
|
+
page.relative_path,
|
|
241
|
+
basename(page.absolute_path),
|
|
242
|
+
page_file_stat.st_mtime,
|
|
243
|
+
)
|
|
244
|
+
page_markup = self._load_page_markup(self.git_log, page_file_info)
|
|
245
|
+
page.git = page_markup.git
|
|
246
|
+
page.frontmatter = page_markup.frontmatter
|
|
247
|
+
page.content_md = page_markup.content_md
|
|
248
|
+
page.modify_timestamp = page_markup.modify_timestamp
|
|
249
|
+
|
|
250
|
+
return page
|
|
251
|
+
except:
|
|
252
|
+
break
|
|
253
|
+
|
|
254
|
+
self.load_all(pool_executor)
|
|
255
|
+
|
|
256
|
+
def sort(self):
|
|
257
|
+
self.pages.sort(key=lambda p: p.update_timestamp, reverse=True)
|
|
258
|
+
|
|
259
|
+
def get_by_absolute_path(self, absolute_path: str) -> PageMarkup:
|
|
260
|
+
for page in self.pages:
|
|
261
|
+
if page.absolute_path == absolute_path:
|
|
262
|
+
return page
|
|
263
|
+
|
|
264
|
+
return None
|
|
265
|
+
|
|
266
|
+
def filter(self, **filter_conditions):
|
|
267
|
+
def match(obj):
|
|
268
|
+
for key, value in filter_conditions.items():
|
|
269
|
+
*attribute_names, operation_name = key.split("__")
|
|
270
|
+
attribute_value = obj
|
|
271
|
+
|
|
272
|
+
for attribute_name in attribute_names:
|
|
273
|
+
if isinstance(attribute_value, dict):
|
|
274
|
+
attribute_value = attribute_value.get(attribute_name, None)
|
|
275
|
+
else:
|
|
276
|
+
attribute_value = getattr(attribute_value, attribute_name, None)
|
|
277
|
+
|
|
278
|
+
if attribute_value is None:
|
|
279
|
+
break
|
|
280
|
+
|
|
281
|
+
if attribute_value is None:
|
|
282
|
+
continue
|
|
283
|
+
|
|
284
|
+
match operation_name:
|
|
285
|
+
case "eq":
|
|
286
|
+
return attribute_value == value
|
|
287
|
+
case "gt":
|
|
288
|
+
return attribute_value > value
|
|
289
|
+
case "gte":
|
|
290
|
+
return attribute_value >= value
|
|
291
|
+
case "lt":
|
|
292
|
+
return attribute_value > value
|
|
293
|
+
case "lte":
|
|
294
|
+
return attribute_value >= value
|
|
295
|
+
case "startswith":
|
|
296
|
+
return attribute_value.startswith(value)
|
|
297
|
+
case "endswith":
|
|
298
|
+
return attribute_value.endswith(value)
|
|
299
|
+
case "re":
|
|
300
|
+
return fullmatch(value, attribute_value)
|
|
301
|
+
case _:
|
|
302
|
+
return getattr(attribute_value, operation_name) == value
|
|
303
|
+
|
|
304
|
+
return True
|
|
305
|
+
|
|
306
|
+
return [x for x in self.pages if match(x)]
|
|
307
|
+
|
|
308
|
+
def aggregate(self, aggegate_descriptor, **filter_conditions):
|
|
309
|
+
pages = self.filter(**filter_conditions)
|
|
310
|
+
*attribute_names, operation_name = aggegate_descriptor.split("__")
|
|
311
|
+
|
|
312
|
+
def attribute_values(pages):
|
|
313
|
+
values = []
|
|
314
|
+
|
|
315
|
+
for page in pages:
|
|
316
|
+
attribute_value = page
|
|
317
|
+
|
|
318
|
+
for attribute_name in attribute_names:
|
|
319
|
+
if isinstance(attribute_value, dict):
|
|
320
|
+
attribute_value = attribute_value.get(attribute_name, None)
|
|
321
|
+
else:
|
|
322
|
+
attribute_value = getattr(attribute_value, attribute_name, None)
|
|
323
|
+
|
|
324
|
+
if attribute_value is None:
|
|
325
|
+
break
|
|
326
|
+
|
|
327
|
+
if attribute_value is not None:
|
|
328
|
+
values.append(attribute_value)
|
|
329
|
+
|
|
330
|
+
return values
|
|
331
|
+
|
|
332
|
+
match operation_name:
|
|
333
|
+
case "count":
|
|
334
|
+
return len(pages)
|
|
335
|
+
case "sum":
|
|
336
|
+
result = None
|
|
337
|
+
|
|
338
|
+
for attribute_value in attribute_values(pages):
|
|
339
|
+
result = attribute_value if result is None else (result + attribute_value)
|
|
340
|
+
|
|
341
|
+
return result
|
|
342
|
+
case "average" | "avg":
|
|
343
|
+
result = 0.0
|
|
344
|
+
|
|
345
|
+
for attribute_value in attribute_values(pages):
|
|
346
|
+
result += float(attribute_value)
|
|
347
|
+
|
|
348
|
+
return result / len([pages])
|
|
349
|
+
case "minimum" | "min":
|
|
350
|
+
result = None
|
|
351
|
+
|
|
352
|
+
for attribute_value in attribute_values(pages):
|
|
353
|
+
result = attribute_value if result is None else min(result, attribute_value)
|
|
354
|
+
|
|
355
|
+
return result
|
|
356
|
+
case "maximum" | "max":
|
|
357
|
+
result = None
|
|
358
|
+
|
|
359
|
+
for attribute_value in attribute_values(pages):
|
|
360
|
+
result = attribute_value if result is None else max(result, attribute_value)
|
|
361
|
+
|
|
362
|
+
return result
|
|
363
|
+
case "unique":
|
|
364
|
+
return set(attribute_values(pages))
|
|
365
|
+
case _:
|
|
366
|
+
return None
|
|
367
|
+
|
|
368
|
+
return None
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
2
|
+
from rjsmin import jsmin
|
|
3
|
+
from rcssmin import cssmin
|
|
4
|
+
from os import stat
|
|
5
|
+
from os.path import splitext
|
|
6
|
+
from PIL import Image
|
|
7
|
+
from shutil import copy2
|
|
8
|
+
from shutil import copytree
|
|
9
|
+
from tagflux.config import Config
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class StaticProcessor:
|
|
13
|
+
def __init__(self, config: Config):
|
|
14
|
+
self.config = config
|
|
15
|
+
|
|
16
|
+
def _copy_file(self, src_path, dst_path):
|
|
17
|
+
should_copy = True
|
|
18
|
+
|
|
19
|
+
if src_path.startswith(self.config.static.path) and dst_path.startswith(self.config.output.path):
|
|
20
|
+
try:
|
|
21
|
+
src_stat = stat(src_path)
|
|
22
|
+
dst_stat = stat(dst_path)
|
|
23
|
+
|
|
24
|
+
if src_stat.st_mtime < dst_stat.st_mtime:
|
|
25
|
+
return
|
|
26
|
+
except FileNotFoundError:
|
|
27
|
+
pass
|
|
28
|
+
|
|
29
|
+
relative_src_path = src_path[len(self.config.static.path) :]
|
|
30
|
+
|
|
31
|
+
for base_path, config in self.config.static.optimizations.images.items():
|
|
32
|
+
if relative_src_path.startswith(base_path):
|
|
33
|
+
dst_name, dst_ext = splitext(dst_path)
|
|
34
|
+
|
|
35
|
+
if dst_ext in config.extensions:
|
|
36
|
+
should_copy = False
|
|
37
|
+
|
|
38
|
+
for format_name, format_parameters in config.formats.items():
|
|
39
|
+
parameters = format_parameters.copy()
|
|
40
|
+
extension = parameters.pop("extension")
|
|
41
|
+
|
|
42
|
+
if config.resize:
|
|
43
|
+
for size_name in config.resize:
|
|
44
|
+
|
|
45
|
+
if size_name.endswith("%"):
|
|
46
|
+
scale_value = int(10 * float(size_name[:-1]))
|
|
47
|
+
variant_dst_path = dst_name + f".x{scale_value:04d}" + extension
|
|
48
|
+
needs_update = True
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
dst_stat = stat(variant_dst_path)
|
|
52
|
+
|
|
53
|
+
if src_stat.st_mtime < dst_stat.st_mtime:
|
|
54
|
+
needs_update = False
|
|
55
|
+
except FileNotFoundError:
|
|
56
|
+
pass
|
|
57
|
+
|
|
58
|
+
if needs_update:
|
|
59
|
+
src_image = Image.open(src_path).convert("RGB")
|
|
60
|
+
dst_image = src_image.resize(
|
|
61
|
+
(int(src_image.width * scale_value / 1000.0), int(src_image.height * scale_value / 1000.0)),
|
|
62
|
+
Image.Resampling.LANCZOS,
|
|
63
|
+
)
|
|
64
|
+
dst_image.save(variant_dst_path, format_name, **parameters)
|
|
65
|
+
elif size_name.endswith("w"):
|
|
66
|
+
width_value = int(size_name[:-1])
|
|
67
|
+
variant_dst_path = dst_name + f".w{width_value}" + extension
|
|
68
|
+
needs_update = True
|
|
69
|
+
|
|
70
|
+
try:
|
|
71
|
+
dst_stat = stat(variant_dst_path)
|
|
72
|
+
|
|
73
|
+
if src_stat.st_mtime < dst_stat.st_mtime:
|
|
74
|
+
needs_update = False
|
|
75
|
+
except FileNotFoundError:
|
|
76
|
+
pass
|
|
77
|
+
|
|
78
|
+
if needs_update:
|
|
79
|
+
src_image = Image.open(src_path).convert("RGB")
|
|
80
|
+
height_value = int(src_image.height * width_value / src_image.width)
|
|
81
|
+
dst_image = src_image.resize((width_value, height_value), Image.Resampling.LANCZOS)
|
|
82
|
+
dst_image.save(variant_dst_path, format_name, **parameters)
|
|
83
|
+
elif size_name.endswith("h"):
|
|
84
|
+
height_value = int(size_name[:-1])
|
|
85
|
+
variant_dst_path = dst_name + f".h{height_value}" + extension
|
|
86
|
+
needs_update = True
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
dst_stat = stat(variant_dst_path)
|
|
90
|
+
|
|
91
|
+
if src_stat.st_mtime < dst_stat.st_mtime:
|
|
92
|
+
needs_update = False
|
|
93
|
+
except FileNotFoundError:
|
|
94
|
+
pass
|
|
95
|
+
|
|
96
|
+
if needs_update:
|
|
97
|
+
src_image = Image.open(src_path).convert("RGB")
|
|
98
|
+
width_value = int(src_image.width * height_value / src_image.height)
|
|
99
|
+
dst_image = src_image.resize((width_value, height_value), Image.Resampling.LANCZOS)
|
|
100
|
+
dst_image.save(variant_dst_path, format_name, **parameters)
|
|
101
|
+
|
|
102
|
+
else:
|
|
103
|
+
variant_dst_path = dst_name + extension
|
|
104
|
+
needs_update = True
|
|
105
|
+
|
|
106
|
+
try:
|
|
107
|
+
dst_stat = stat(variant_dst_path)
|
|
108
|
+
|
|
109
|
+
if src_stat.st_mtime < dst_stat.st_mtime:
|
|
110
|
+
needs_update = False
|
|
111
|
+
except FileNotFoundError:
|
|
112
|
+
pass
|
|
113
|
+
|
|
114
|
+
if needs_update:
|
|
115
|
+
src_image = Image.open(src_path).convert("RGB")
|
|
116
|
+
src_image.save(variant_dst_path, format_name, **parameters)
|
|
117
|
+
|
|
118
|
+
for base_path, config in self.config.static.optimizations.styles.items():
|
|
119
|
+
if relative_src_path.startswith(base_path):
|
|
120
|
+
dst_name, dst_ext = splitext(dst_path)
|
|
121
|
+
|
|
122
|
+
if dst_ext in config.extensions:
|
|
123
|
+
should_copy = False
|
|
124
|
+
|
|
125
|
+
with open(src_path, "rb") as style_file:
|
|
126
|
+
style_data = style_file.read()
|
|
127
|
+
|
|
128
|
+
style_data = cssmin(style_data)
|
|
129
|
+
|
|
130
|
+
with open(dst_path, "wb") as style_file:
|
|
131
|
+
style_file.write(style_data)
|
|
132
|
+
|
|
133
|
+
for base_path, config in self.config.static.optimizations.scripts.items():
|
|
134
|
+
if relative_src_path.startswith(base_path):
|
|
135
|
+
dst_name, dst_ext = splitext(dst_path)
|
|
136
|
+
|
|
137
|
+
if dst_ext in config.extensions:
|
|
138
|
+
should_copy = False
|
|
139
|
+
|
|
140
|
+
with open(src_path, "rb") as style_file:
|
|
141
|
+
style_data = style_file.read()
|
|
142
|
+
|
|
143
|
+
style_data = jsmin(style_data)
|
|
144
|
+
|
|
145
|
+
with open(dst_path, "wb") as style_file:
|
|
146
|
+
style_file.write(style_data)
|
|
147
|
+
|
|
148
|
+
if should_copy:
|
|
149
|
+
copy2(src_path, dst_path)
|
|
150
|
+
|
|
151
|
+
def process_all(self, pool_executor: ProcessPoolExecutor):
|
|
152
|
+
def copy(src_path, dst_path):
|
|
153
|
+
pool_executor.submit(
|
|
154
|
+
self._copy_file,
|
|
155
|
+
src_path=src_path,
|
|
156
|
+
dst_path=dst_path,
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
pool_executor.submit(copytree, src=self.config.static.path, dst=self.config.output.path, dirs_exist_ok=True, copy_function=copy)
|
|
160
|
+
|
|
161
|
+
def process_one(self, pool_executor: ProcessPoolExecutor, path: str):
|
|
162
|
+
pool_executor.submit(
|
|
163
|
+
self._copy_file,
|
|
164
|
+
src_path=self.config.static.path + path,
|
|
165
|
+
dst_path=self.config.output.path + path,
|
|
166
|
+
)
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from datetime import UTC
|
|
4
|
+
from itertools import batched
|
|
5
|
+
from jinja2.loaders import FileSystemLoader
|
|
6
|
+
from jinja2.sandbox import SandboxedEnvironment
|
|
7
|
+
from markdown import markdown
|
|
8
|
+
from os import makedirs
|
|
9
|
+
from os import stat
|
|
10
|
+
from os.path import dirname
|
|
11
|
+
from tagflux.config import Config
|
|
12
|
+
from tagflux.processors.content import PageCatalog
|
|
13
|
+
from tagflux.processors.content import PageMarkup
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def _render_all(page_catalog: PageCatalog, pages: list[PageMarkup], context: dict, template_path: str, output_path: str, update: bool):
|
|
17
|
+
template_loader = FileSystemLoader(template_path)
|
|
18
|
+
template_environment = SandboxedEnvironment(loader=template_loader)
|
|
19
|
+
|
|
20
|
+
for page in pages:
|
|
21
|
+
content_html = markdown(page.content_md, extensions=["admonition", "codehilite", "smarty", "toc"])
|
|
22
|
+
output_file_path = output_path + page.uri
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
output_stat = stat(output_file_path)
|
|
26
|
+
except FileNotFoundError:
|
|
27
|
+
output_stat = None
|
|
28
|
+
|
|
29
|
+
template = template_environment.get_template(page.frontmatter.get("template", "default.html"))
|
|
30
|
+
template_stat = stat(template.filename)
|
|
31
|
+
|
|
32
|
+
if (output_stat is None) or (page.modify_timestamp >= output_stat.st_mtime) or (template_stat.st_mtime >= output_stat.st_mtime):
|
|
33
|
+
output_dir_path = dirname(output_file_path)
|
|
34
|
+
makedirs(output_dir_path, exist_ok=True)
|
|
35
|
+
|
|
36
|
+
new_page_html = template.render({"page": page, "pages": page_catalog, "content": content_html, "context": context})
|
|
37
|
+
|
|
38
|
+
if update:
|
|
39
|
+
try:
|
|
40
|
+
with open(output_file_path, "r") as page_file:
|
|
41
|
+
old_page_html = page_file.read()
|
|
42
|
+
|
|
43
|
+
if old_page_html != new_page_html:
|
|
44
|
+
page.update_timestamp = datetime.now(UTC)
|
|
45
|
+
|
|
46
|
+
with open(output_file_path, "w") as page_file:
|
|
47
|
+
page_file.write(new_page_html)
|
|
48
|
+
except FileNotFoundError:
|
|
49
|
+
page.update_timestamp = datetime.now(UTC)
|
|
50
|
+
|
|
51
|
+
with open(output_file_path, "w") as page_file:
|
|
52
|
+
page_file.write(new_page_html)
|
|
53
|
+
else:
|
|
54
|
+
with open(output_file_path, "w") as page_file:
|
|
55
|
+
page_file.write(new_page_html)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _render_one(page_catalog: PageCatalog, page: PageMarkup, context: dict, template_path: str, output_path: str, update: bool):
|
|
59
|
+
content_html = markdown(page.content_md, extensions=["admonition", "codehilite", "smarty", "toc"])
|
|
60
|
+
output_file_path = output_path + page.uri
|
|
61
|
+
|
|
62
|
+
try:
|
|
63
|
+
output_stat = stat(output_file_path)
|
|
64
|
+
except FileNotFoundError:
|
|
65
|
+
output_stat = None
|
|
66
|
+
|
|
67
|
+
if (output_stat is None) or (page.modify_timestamp >= output_stat.st_mtime):
|
|
68
|
+
output_dir_path = dirname(output_file_path)
|
|
69
|
+
makedirs(output_dir_path, exist_ok=True)
|
|
70
|
+
template_loader = FileSystemLoader(template_path)
|
|
71
|
+
template_environment = SandboxedEnvironment(loader=template_loader)
|
|
72
|
+
template = template_environment.get_template(page.frontmatter.get("template", "default.html"))
|
|
73
|
+
new_page_html = template.render({"page": page, "pages": page_catalog, "content": content_html, "context": context})
|
|
74
|
+
|
|
75
|
+
if update:
|
|
76
|
+
try:
|
|
77
|
+
with open(output_file_path, "r") as page_file:
|
|
78
|
+
old_page_html = page_file.read()
|
|
79
|
+
|
|
80
|
+
if old_page_html != new_page_html:
|
|
81
|
+
page.update_timestamp = datetime.now(UTC)
|
|
82
|
+
|
|
83
|
+
with open(output_file_path, "w") as page_file:
|
|
84
|
+
page_file.write(new_page_html)
|
|
85
|
+
except FileNotFoundError:
|
|
86
|
+
page.update_timestamp = datetime.now(UTC)
|
|
87
|
+
|
|
88
|
+
with open(output_file_path, "w") as page_file:
|
|
89
|
+
page_file.write(new_page_html)
|
|
90
|
+
else:
|
|
91
|
+
with open(output_file_path, "w") as page_file:
|
|
92
|
+
page_file.write(new_page_html)
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
class TemplateProcessor:
|
|
96
|
+
def __init__(self, config: Config, page_catalog: PageCatalog):
|
|
97
|
+
self.config = config
|
|
98
|
+
self.page_catalog = page_catalog
|
|
99
|
+
|
|
100
|
+
def process_all(self, pool_executor: ProcessPoolExecutor, update: bool):
|
|
101
|
+
for pages in batched(self.page_catalog.pages, 50):
|
|
102
|
+
pool_executor.submit(
|
|
103
|
+
_render_all,
|
|
104
|
+
self.page_catalog,
|
|
105
|
+
pages,
|
|
106
|
+
self.config.context,
|
|
107
|
+
self.config.templates.path,
|
|
108
|
+
self.config.output.path,
|
|
109
|
+
update,
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
def process_one(self, pool_executor: ProcessPoolExecutor, page: PageMarkup, update: bool):
|
|
113
|
+
pool_executor.submit(
|
|
114
|
+
_render_one,
|
|
115
|
+
self.page_catalog,
|
|
116
|
+
[page],
|
|
117
|
+
self.config.context,
|
|
118
|
+
self.config.templates.path,
|
|
119
|
+
self.config.output.path,
|
|
120
|
+
update,
|
|
121
|
+
)
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
2
|
+
from tagflux.processors.content import PageCatalog
|
|
3
|
+
from tagflux.processors.template import TemplateProcessor
|
|
4
|
+
from time import time
|
|
5
|
+
from watchdog.events import FileSystemEvent
|
|
6
|
+
from watchdog.events import FileSystemEventHandler
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ContentEventHandler(FileSystemEventHandler):
|
|
10
|
+
def __init__(self, pool_executor: ProcessPoolExecutor, page_catalog: PageCatalog, template_processor: TemplateProcessor):
|
|
11
|
+
self.pool_executor = pool_executor
|
|
12
|
+
self.page_catalog = page_catalog
|
|
13
|
+
self.template_processor = template_processor
|
|
14
|
+
|
|
15
|
+
def _render_all(self, code: str, event: FileSystemEvent):
|
|
16
|
+
if not event.is_directory:
|
|
17
|
+
since = time()
|
|
18
|
+
self.page_catalog.load_all(self.pool_executor)
|
|
19
|
+
self.template_processor.process_all(self.pool_executor, True)
|
|
20
|
+
self.page_catalog.sort()
|
|
21
|
+
until = time()
|
|
22
|
+
delay = int(1000 * (until - since))
|
|
23
|
+
print(f"{code} [{delay}ms] {event.src_path}")
|
|
24
|
+
|
|
25
|
+
def _render_one(self, code: str, event: FileSystemEvent):
|
|
26
|
+
if not event.is_directory:
|
|
27
|
+
since = time()
|
|
28
|
+
page = self.page_catalog.load_one(self.pool_executor, event.src_path)
|
|
29
|
+
|
|
30
|
+
if page:
|
|
31
|
+
self.template_processor.process_one(self.pool_executor, page, True)
|
|
32
|
+
else:
|
|
33
|
+
self.template_processor.process_all(self.pool_executor, True)
|
|
34
|
+
|
|
35
|
+
until = time()
|
|
36
|
+
delay = int(1000 * (until - since))
|
|
37
|
+
print(f"{code} [{delay}ms] {event.src_path}")
|
|
38
|
+
|
|
39
|
+
def on_created(self, event: FileSystemEvent):
|
|
40
|
+
self._render_all("+", event)
|
|
41
|
+
|
|
42
|
+
def on_deleted(self, event: FileSystemEvent):
|
|
43
|
+
self._render_all("-", event)
|
|
44
|
+
|
|
45
|
+
def on_moved(self, event: FileSystemEvent):
|
|
46
|
+
self._render_all("M", event)
|
|
47
|
+
|
|
48
|
+
def on_closed(self, event: FileSystemEvent) -> None:
|
|
49
|
+
self._render_one("U", event)
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
2
|
+
from tagflux.processors.static import StaticProcessor
|
|
3
|
+
from time import time
|
|
4
|
+
from watchdog.events import FileSystemEvent
|
|
5
|
+
from watchdog.events import FileSystemEventHandler
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class StaticEventHandler(FileSystemEventHandler):
|
|
9
|
+
def __init__(self, pool_executor: ProcessPoolExecutor, static_processor: StaticProcessor):
|
|
10
|
+
self.pool_executor = pool_executor
|
|
11
|
+
self.static_processor = static_processor
|
|
12
|
+
|
|
13
|
+
def on_created(self, event: FileSystemEvent):
|
|
14
|
+
since = time()
|
|
15
|
+
|
|
16
|
+
if event.is_directory:
|
|
17
|
+
self.static_processor.process_all(self.pool_executor)
|
|
18
|
+
else:
|
|
19
|
+
path = event.src_path[len(self.static_processor.config.static.path) :]
|
|
20
|
+
self.static_processor.process_one(self.pool_executor, path)
|
|
21
|
+
|
|
22
|
+
until = time()
|
|
23
|
+
delay = int(1000 * (until - since))
|
|
24
|
+
print(f"+ [{delay}ms] {event.src_path}")
|
|
25
|
+
|
|
26
|
+
def on_moved(self, event: FileSystemEvent):
|
|
27
|
+
since = time()
|
|
28
|
+
|
|
29
|
+
if event.is_directory:
|
|
30
|
+
self.static_processor.process_all(self.pool_executor)
|
|
31
|
+
else:
|
|
32
|
+
path = event.dest_path[len(self.static_processor.config.static.path) :]
|
|
33
|
+
self.static_processor.process_one(self.pool_executor, path)
|
|
34
|
+
|
|
35
|
+
until = time()
|
|
36
|
+
delay = int(1000 * (until - since))
|
|
37
|
+
print(f"M [{delay}ms] {event.dest_path}")
|
|
38
|
+
|
|
39
|
+
def on_closed(self, event: FileSystemEvent) -> None:
|
|
40
|
+
since = time()
|
|
41
|
+
|
|
42
|
+
path = event.src_path[len(self.static_processor.config.static.path) :]
|
|
43
|
+
self.static_processor.process_one(self.pool_executor, path)
|
|
44
|
+
|
|
45
|
+
until = time()
|
|
46
|
+
delay = int(1000 * (until - since))
|
|
47
|
+
print(f"U [{delay}ms] {event.src_path}")
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from concurrent.futures import ProcessPoolExecutor
|
|
2
|
+
from tagflux.processors.content import PageCatalog
|
|
3
|
+
from tagflux.processors.template import TemplateProcessor
|
|
4
|
+
from time import time
|
|
5
|
+
from watchdog.events import FileSystemEvent
|
|
6
|
+
from watchdog.events import FileSystemEventHandler
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class TemplateEventHandler(FileSystemEventHandler):
|
|
10
|
+
def __init__(self, pool_executor: ProcessPoolExecutor, page_catalog: PageCatalog, template_processor: TemplateProcessor):
|
|
11
|
+
self.pool_executor = pool_executor
|
|
12
|
+
self.page_catalog = page_catalog
|
|
13
|
+
self.template_processor = template_processor
|
|
14
|
+
|
|
15
|
+
def _render(self, code: str, event: FileSystemEvent):
|
|
16
|
+
if not event.is_directory:
|
|
17
|
+
since = time()
|
|
18
|
+
self.template_processor.process_all(self.pool_executor, True)
|
|
19
|
+
self.page_catalog.sort()
|
|
20
|
+
until = time()
|
|
21
|
+
delay = int(1000 * (until - since))
|
|
22
|
+
print(f"{code} [{delay}ms] {event.src_path}")
|
|
23
|
+
|
|
24
|
+
def on_created(self, event: FileSystemEvent):
|
|
25
|
+
self._render("+", event)
|
|
26
|
+
|
|
27
|
+
def on_deleted(self, event: FileSystemEvent):
|
|
28
|
+
self._render("-", event)
|
|
29
|
+
|
|
30
|
+
def on_moved(self, event: FileSystemEvent):
|
|
31
|
+
self._render("M", event)
|
|
32
|
+
|
|
33
|
+
def on_closed(self, event: FileSystemEvent) -> None:
|
|
34
|
+
self._render("U", event)
|