chgksuite 0.26.0b3__tar.gz → 0.26.0b4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {chgksuite-0.26.0b3/chgksuite.egg-info → chgksuite-0.26.0b4}/PKG-INFO +1 -1
- chgksuite-0.26.0b4/chgksuite/handouter/__init__.py +0 -0
- chgksuite-0.26.0b4/chgksuite/handouter/gen.py +143 -0
- chgksuite-0.26.0b4/chgksuite/handouter/installer.py +245 -0
- chgksuite-0.26.0b4/chgksuite/handouter/pack.py +79 -0
- chgksuite-0.26.0b4/chgksuite/handouter/runner.py +234 -0
- chgksuite-0.26.0b4/chgksuite/handouter/tex_internals.py +47 -0
- chgksuite-0.26.0b4/chgksuite/handouter/utils.py +67 -0
- chgksuite-0.26.0b4/chgksuite/version.py +1 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4/chgksuite.egg-info}/PKG-INFO +1 -1
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite.egg-info/SOURCES.txt +7 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/setup.py +1 -1
- chgksuite-0.26.0b3/chgksuite/version.py +0 -1
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/LICENSE +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/MANIFEST.in +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/README.md +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/__init__.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/__main__.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/cli.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/common.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/__init__.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/chgksuite_parser.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/composer_common.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/db.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/docx.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/latex.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/lj.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/openquiz.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/pptx.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/reddit.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/stats.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/telegram.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/telegram_bot.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/composer/telegram_parser.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/parser.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/parser_db.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/cheader.tex +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/fix-unnumbered-sections.sty +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_by.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_by_tar.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_en.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_kz_cyr.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_ru.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_sr.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_ua.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_uz.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/labels_uz_cyr.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/pptx_config.toml +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_by.json +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_en.json +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_kz_cyr.json +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_ru.json +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_sr.json +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_ua.json +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_uz_cyr.json +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/template.docx +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/template.pptx +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/template_shorin.pptx +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/resources/trello.json +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/trello.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/typotools.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite/vulture_whitelist.py +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite.egg-info/dependency_links.txt +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite.egg-info/entry_points.txt +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite.egg-info/requires.txt +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/chgksuite.egg-info/top_level.txt +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/history.md +0 -0
- {chgksuite-0.26.0b3 → chgksuite-0.26.0b4}/setup.cfg +0 -0
|
File without changes
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
import itertools
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
|
|
8
|
+
import toml
|
|
9
|
+
|
|
10
|
+
from chgksuite.common import get_source_dirs
|
|
11
|
+
from chgksuite.composer.chgksuite_parser import parse_4s
|
|
12
|
+
from chgksuite.composer.composer_common import _parse_4s_elem, parseimg
|
|
13
|
+
from chgksuite.handouter.utils import read_file, write_file
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def postprocess(s):
|
|
17
|
+
return s.replace("\\_", "_")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def generate_handouts_list(handouts, output_dir, base_name, parsed):
|
|
21
|
+
"""Generate a human-readable file with question numbers that have handouts."""
|
|
22
|
+
question_numbers = sorted([int(h["for_question"]) for h in handouts])
|
|
23
|
+
|
|
24
|
+
content = "ВОПРОСЫ С РАЗДАТОЧНЫМ МАТЕРИАЛОМ:\n\n"
|
|
25
|
+
content += f"Сквозная нумерация:\n{', '.join(map(str, question_numbers))}\n\n"
|
|
26
|
+
|
|
27
|
+
content += "По турам:\n"
|
|
28
|
+
tour = 0
|
|
29
|
+
by_tour = {}
|
|
30
|
+
for tup in parsed:
|
|
31
|
+
if tup[0] == "section":
|
|
32
|
+
tour += 1
|
|
33
|
+
by_tour[tour] = []
|
|
34
|
+
if tup[0] == "Question":
|
|
35
|
+
if tour == 0:
|
|
36
|
+
tour = 1
|
|
37
|
+
by_tour[tour] = []
|
|
38
|
+
if tup[1]["number"] in question_numbers:
|
|
39
|
+
by_tour[tour].append(tup[1]["number"])
|
|
40
|
+
|
|
41
|
+
for tour in sorted(by_tour):
|
|
42
|
+
tour_handouts = by_tour[tour]
|
|
43
|
+
if tour_handouts:
|
|
44
|
+
content += f"Тур {tour}: {', '.join(map(str, tour_handouts))}\n"
|
|
45
|
+
else:
|
|
46
|
+
content += f"Тур {tour}: нет раздаток\n"
|
|
47
|
+
|
|
48
|
+
output_fn = os.path.join(output_dir, base_name + "_handouts_list.txt")
|
|
49
|
+
write_file(output_fn, content)
|
|
50
|
+
print(f"File with list of handouts: {output_fn}")
|
|
51
|
+
print(content)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def generate_handouts(args):
|
|
55
|
+
_, resourcedir = get_source_dirs()
|
|
56
|
+
labels = toml.loads(
|
|
57
|
+
read_file(os.path.join(resourcedir, f"labels_{args.lang}.toml"))
|
|
58
|
+
)
|
|
59
|
+
handout_re = re.compile(
|
|
60
|
+
"\\["
|
|
61
|
+
+ labels["question_labels"]["handout_short"]
|
|
62
|
+
+ ".+?:( |\n)(?P<handout_text>.+?)\\]",
|
|
63
|
+
flags=re.DOTALL,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
cnt = read_file(args.filename)
|
|
67
|
+
parsed = parse_4s(cnt)
|
|
68
|
+
|
|
69
|
+
questions = [q[1] for q in parsed if q[0] == "Question"]
|
|
70
|
+
handouts = []
|
|
71
|
+
for q in questions:
|
|
72
|
+
if isinstance(q["question"], list):
|
|
73
|
+
question_text = "\n".join(itertools.chain.from_iterable(q["question"]))
|
|
74
|
+
else:
|
|
75
|
+
question_text = q["question"]
|
|
76
|
+
question_text_lower = question_text.lower()
|
|
77
|
+
srch = handout_re.search(question_text)
|
|
78
|
+
if srch:
|
|
79
|
+
text = postprocess(srch.group("handout_text"))
|
|
80
|
+
elems = _parse_4s_elem(text)
|
|
81
|
+
img = [el for el in elems if el[0] == "img"]
|
|
82
|
+
if img:
|
|
83
|
+
try:
|
|
84
|
+
parsed_img = parseimg(img[0][1])
|
|
85
|
+
except:
|
|
86
|
+
print(
|
|
87
|
+
f"Image file for question {q['number']} not found, add it by hand"
|
|
88
|
+
)
|
|
89
|
+
continue
|
|
90
|
+
else:
|
|
91
|
+
parsed_img = None
|
|
92
|
+
res = {"for_question": q["number"]}
|
|
93
|
+
if parsed_img:
|
|
94
|
+
res["image"] = parsed_img["imgfile"]
|
|
95
|
+
else:
|
|
96
|
+
res["text"] = text
|
|
97
|
+
handouts.append(res)
|
|
98
|
+
elif (
|
|
99
|
+
"раздат" in question_text_lower
|
|
100
|
+
or "роздан" in question_text_lower
|
|
101
|
+
or "(img" in question_text_lower
|
|
102
|
+
):
|
|
103
|
+
print(f"probably badly formatted handout for question {q['number']}")
|
|
104
|
+
res = {"for_question": q["number"], "text": postprocess(question_text)}
|
|
105
|
+
handouts.append(res)
|
|
106
|
+
result = []
|
|
107
|
+
result_by_question = defaultdict(list)
|
|
108
|
+
for handout in handouts:
|
|
109
|
+
if "image" in handout:
|
|
110
|
+
key = "image"
|
|
111
|
+
prefix = "image: "
|
|
112
|
+
else:
|
|
113
|
+
key = "text"
|
|
114
|
+
prefix = ""
|
|
115
|
+
value = handout[key]
|
|
116
|
+
formatted = (
|
|
117
|
+
f"for_question: {handout['for_question']}\n" if not args.separate else ""
|
|
118
|
+
) + f"columns: 3\n\n{prefix}{value}"
|
|
119
|
+
result.append(formatted)
|
|
120
|
+
result_by_question[handout["for_question"]].append(formatted)
|
|
121
|
+
output_dir = os.path.dirname(os.path.abspath(args.filename))
|
|
122
|
+
bn, _ = os.path.splitext(os.path.basename(args.filename))
|
|
123
|
+
|
|
124
|
+
if args.separate:
|
|
125
|
+
for k, v in result_by_question.items():
|
|
126
|
+
if len(v) > 1:
|
|
127
|
+
for i, cnt in enumerate(v):
|
|
128
|
+
output_fn = os.path.join(
|
|
129
|
+
output_dir, f"{bn}_q{k.zfill(2)}_{i + 1}.txt"
|
|
130
|
+
)
|
|
131
|
+
print(output_fn)
|
|
132
|
+
write_file(output_fn, cnt)
|
|
133
|
+
else:
|
|
134
|
+
output_fn = os.path.join(output_dir, f"{bn}_q{str(k).zfill(2)}.txt")
|
|
135
|
+
print(output_fn)
|
|
136
|
+
write_file(output_fn, v[0])
|
|
137
|
+
else:
|
|
138
|
+
output_fn = os.path.join(output_dir, bn + "_handouts.txt")
|
|
139
|
+
print(f"output filename: {output_fn}")
|
|
140
|
+
write_file(output_fn, "\n---\n".join(result))
|
|
141
|
+
|
|
142
|
+
if args.list_handouts:
|
|
143
|
+
generate_handouts_list(handouts, output_dir, bn, parsed)
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import os
|
|
3
|
+
import platform
|
|
4
|
+
import re
|
|
5
|
+
import shutil
|
|
6
|
+
import subprocess
|
|
7
|
+
import tarfile
|
|
8
|
+
import zipfile
|
|
9
|
+
|
|
10
|
+
import requests
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_utils_dir():
|
|
14
|
+
path = os.path.join(os.path.expanduser("~"), ".pecheny_utils")
|
|
15
|
+
if not os.path.exists(path):
|
|
16
|
+
os.mkdir(path)
|
|
17
|
+
return path
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def escape_latex(text):
|
|
21
|
+
text = text.replace("\\", "\\textbackslash")
|
|
22
|
+
text = text.replace("~", "\\textasciitilde")
|
|
23
|
+
text = text.replace("^", "\\textasciicircum")
|
|
24
|
+
for char in ("%", "&", "$", "#", "{", "}", "_"):
|
|
25
|
+
text = text.replace(char, "\\" + char)
|
|
26
|
+
text = text.replace("\n", "\\linebreak\n")
|
|
27
|
+
return text
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def check_tectonic_path(tectonic_path):
|
|
31
|
+
proc = subprocess.run([tectonic_path, "--help"], capture_output=True, check=True)
|
|
32
|
+
return proc.returncode == 0
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_tectonic_path():
|
|
36
|
+
errors = []
|
|
37
|
+
system = platform.system()
|
|
38
|
+
|
|
39
|
+
cpdir = get_utils_dir()
|
|
40
|
+
if system == "Windows":
|
|
41
|
+
binary_name = "tectonic.exe"
|
|
42
|
+
tectonic_path = os.path.join(cpdir, binary_name)
|
|
43
|
+
else:
|
|
44
|
+
binary_name = "tectonic"
|
|
45
|
+
tectonic_path = os.path.join(cpdir, binary_name)
|
|
46
|
+
|
|
47
|
+
tectonic_ok = False
|
|
48
|
+
try:
|
|
49
|
+
tectonic_ok = check_tectonic_path(binary_name)
|
|
50
|
+
except FileNotFoundError:
|
|
51
|
+
pass # tectonic not found in PATH
|
|
52
|
+
except subprocess.CalledProcessError as e:
|
|
53
|
+
errors.append(f"tectonic --version failed: {type(e)} {e}")
|
|
54
|
+
if tectonic_ok:
|
|
55
|
+
return binary_name
|
|
56
|
+
if os.path.isfile(tectonic_path):
|
|
57
|
+
try:
|
|
58
|
+
tectonic_ok = check_tectonic_path(tectonic_path)
|
|
59
|
+
except subprocess.CalledProcessError as e:
|
|
60
|
+
errors.append(f"tectonic --version failed: {type(e)} {e}")
|
|
61
|
+
if tectonic_ok:
|
|
62
|
+
return tectonic_path
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def github_get_latest_release(repo):
|
|
66
|
+
url = f"https://api.github.com/repos/{repo}/releases/latest"
|
|
67
|
+
req = requests.get(url)
|
|
68
|
+
assets_url = req.json()["assets_url"]
|
|
69
|
+
assets_req = requests.get(assets_url)
|
|
70
|
+
return {asset["name"]: asset["browser_download_url"] for asset in assets_req.json()}
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def darwin_is_emulated():
|
|
74
|
+
try:
|
|
75
|
+
sub = subprocess.run(
|
|
76
|
+
["sysctl", "-n", "sysctl.proc_translated"], capture_output=True, check=True
|
|
77
|
+
)
|
|
78
|
+
out = sub.stdout.decode("utf8").strip()
|
|
79
|
+
return int(out)
|
|
80
|
+
except subprocess.CalledProcessError:
|
|
81
|
+
print("couldn't tell if emulated, returning 0")
|
|
82
|
+
return 0
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def parse_tectonic_archive_name(archive_name):
|
|
86
|
+
if archive_name.endswith(".tar.gz"):
|
|
87
|
+
archive_name = archive_name[: -len(".tar.gz")]
|
|
88
|
+
elif archive_name.endswith(".zip"):
|
|
89
|
+
archive_name = archive_name[: -len(".zip")]
|
|
90
|
+
else:
|
|
91
|
+
return
|
|
92
|
+
sp = archive_name.split("-")
|
|
93
|
+
result = {
|
|
94
|
+
"version": sp[1],
|
|
95
|
+
"arch": sp[2],
|
|
96
|
+
"manufacturer": sp[3],
|
|
97
|
+
"system": sp[4],
|
|
98
|
+
}
|
|
99
|
+
if len(sp) > 5:
|
|
100
|
+
result["toolchain"] = sp[5]
|
|
101
|
+
return result
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
# download_file function taken from https://stackoverflow.com/a/39217788
|
|
105
|
+
def download_file(url):
|
|
106
|
+
print(f"downloading from {url}...")
|
|
107
|
+
local_filename = url.split("/")[-1]
|
|
108
|
+
with requests.get(url, stream=True) as resp:
|
|
109
|
+
resp.raw.read = functools.partial(resp.raw.read, decode_content=True)
|
|
110
|
+
with open(local_filename, "wb") as f:
|
|
111
|
+
shutil.copyfileobj(resp.raw, f, length=16 * 1024 * 1024)
|
|
112
|
+
return local_filename
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def extract_zip(zip_file, dirname=None):
|
|
116
|
+
if dirname is None:
|
|
117
|
+
dirname = zip_file[:-4]
|
|
118
|
+
with zipfile.ZipFile(zip_file, "r") as zip_ref:
|
|
119
|
+
zip_ref.extractall(dirname)
|
|
120
|
+
os.remove(zip_file)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def extract_tar(tar_file, dirname=None):
|
|
124
|
+
if dirname is None:
|
|
125
|
+
dirname = tar_file[: tar_file.lower().index(".tar")]
|
|
126
|
+
tf = tarfile.open(tar_file)
|
|
127
|
+
tf.extractall(dirname)
|
|
128
|
+
os.remove(tar_file)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def extract_archive(filename, dirname=None):
|
|
132
|
+
if filename.lower().endswith((".tar", ".tar.gz")):
|
|
133
|
+
extract_tar(filename, dirname=dirname)
|
|
134
|
+
elif filename.lower().endswith(".zip"):
|
|
135
|
+
extract_zip(filename, dirname=dirname)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def guess_archive_url(assets):
|
|
139
|
+
system = platform.system()
|
|
140
|
+
proc = platform.processor()
|
|
141
|
+
if system == "Darwin":
|
|
142
|
+
if proc == "arm" or (proc == "i386" and darwin_is_emulated()):
|
|
143
|
+
arch = "aarch64"
|
|
144
|
+
else:
|
|
145
|
+
arch = "x86_64"
|
|
146
|
+
for k, v in assets.items():
|
|
147
|
+
parsed = parse_tectonic_archive_name(k)
|
|
148
|
+
if not parsed:
|
|
149
|
+
continue
|
|
150
|
+
if parsed["arch"] == arch and parsed["system"] == "darwin":
|
|
151
|
+
return v
|
|
152
|
+
elif system == "Windows":
|
|
153
|
+
for k, v in assets.items():
|
|
154
|
+
parsed = parse_tectonic_archive_name(k)
|
|
155
|
+
if not parsed:
|
|
156
|
+
continue
|
|
157
|
+
if (
|
|
158
|
+
parsed["arch"] == "x86_64"
|
|
159
|
+
and parsed["system"] == "windows"
|
|
160
|
+
and parsed["toolchain"] == "msvc"
|
|
161
|
+
):
|
|
162
|
+
return v
|
|
163
|
+
elif system == "Linux":
|
|
164
|
+
for k, v in assets.items():
|
|
165
|
+
parsed = parse_tectonic_archive_name(k)
|
|
166
|
+
if not parsed:
|
|
167
|
+
continue
|
|
168
|
+
if (
|
|
169
|
+
(not proc or (proc and parsed["arch"] == proc))
|
|
170
|
+
and parsed["system"] == "linux"
|
|
171
|
+
and parsed["toolchain"] == "musl"
|
|
172
|
+
):
|
|
173
|
+
return v
|
|
174
|
+
raise Exception(f"Archive for system {system} proc {proc} not found")
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def archive_url_from_regex(assets, regex):
|
|
178
|
+
for k, v in assets.items():
|
|
179
|
+
if re.match(regex, k):
|
|
180
|
+
return v
|
|
181
|
+
raise Exception(f"Archive for regex {regex} not found")
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def install_tectonic(args):
|
|
185
|
+
system = platform.system()
|
|
186
|
+
assets = github_get_latest_release("tectonic-typesetting/tectonic")
|
|
187
|
+
if args.tectonic_package_regex:
|
|
188
|
+
archive_url = archive_url_from_regex(assets, args.tectonic_package_regex)
|
|
189
|
+
else:
|
|
190
|
+
archive_url = guess_archive_url(assets)
|
|
191
|
+
downloaded = download_file(archive_url)
|
|
192
|
+
dirname = "tectonic_folder"
|
|
193
|
+
extract_archive(downloaded, dirname=dirname)
|
|
194
|
+
if system == "Windows":
|
|
195
|
+
filename = "tectonic.exe"
|
|
196
|
+
else:
|
|
197
|
+
filename = "tectonic"
|
|
198
|
+
target_path = os.path.join(get_utils_dir(), filename)
|
|
199
|
+
shutil.move(os.path.join(dirname, filename), target_path)
|
|
200
|
+
shutil.rmtree(dirname)
|
|
201
|
+
return target_path
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def install_font(url):
|
|
205
|
+
fn = url.split("/")[-1].split("?")[0]
|
|
206
|
+
bn, ext = os.path.splitext(fn)
|
|
207
|
+
if "." in bn:
|
|
208
|
+
new_fn = bn.replace(".", "_") + ext
|
|
209
|
+
else:
|
|
210
|
+
new_fn = fn
|
|
211
|
+
dir_name = new_fn[:-4]
|
|
212
|
+
dir_name_base = dir_name.split(os.pathsep)[-1]
|
|
213
|
+
fonts_dir = os.path.join(get_utils_dir(), "fonts")
|
|
214
|
+
if not os.path.exists(fonts_dir):
|
|
215
|
+
os.makedirs(fonts_dir)
|
|
216
|
+
target_dir = os.path.join(fonts_dir, dir_name_base)
|
|
217
|
+
if os.path.isdir(target_dir):
|
|
218
|
+
print(f"{target_dir} already exists")
|
|
219
|
+
return
|
|
220
|
+
download_file(url)
|
|
221
|
+
if fn != new_fn:
|
|
222
|
+
os.rename(fn, new_fn)
|
|
223
|
+
extract_archive(new_fn, dirname=dir_name)
|
|
224
|
+
if not os.path.isdir(target_dir):
|
|
225
|
+
shutil.copytree(dir_name, target_dir)
|
|
226
|
+
shutil.rmtree(dir_name)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def find_font(file_name, root_dir=None):
|
|
230
|
+
root_dir = root_dir or os.path.join(get_utils_dir(), "fonts")
|
|
231
|
+
if not os.path.isdir(root_dir):
|
|
232
|
+
os.makedirs(root_dir, exist_ok=True)
|
|
233
|
+
for dir_, _, files in os.walk(root_dir):
|
|
234
|
+
for fn in files:
|
|
235
|
+
if fn == file_name:
|
|
236
|
+
return os.path.join(dir_, fn)
|
|
237
|
+
raise Exception(f"{file_name} not found")
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def install_font_from_github_wrapper(repo):
|
|
241
|
+
latest = github_get_latest_release(repo)
|
|
242
|
+
for k, v in latest.items():
|
|
243
|
+
if k.endswith(".zip"):
|
|
244
|
+
install_font(v)
|
|
245
|
+
break
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
import math
|
|
4
|
+
import os
|
|
5
|
+
import subprocess
|
|
6
|
+
|
|
7
|
+
from pypdf import PdfWriter
|
|
8
|
+
|
|
9
|
+
from chgksuite.handouter.utils import parse_handouts
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def run_hndt(fullpath, args):
|
|
13
|
+
spargs = ["hndt"]
|
|
14
|
+
if args.font:
|
|
15
|
+
spargs.extend(["--font", args.font])
|
|
16
|
+
spargs.append(fullpath)
|
|
17
|
+
proc = subprocess.run(spargs, cwd=args.folder, check=True, capture_output=True)
|
|
18
|
+
ns = globals()
|
|
19
|
+
ns.update(locals())
|
|
20
|
+
lines = [line for line in proc.stdout.decode("utf8").split("\n") if line]
|
|
21
|
+
return lines[-1].split("Output file:")[1].strip()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def pdf_output(pages, filename):
|
|
25
|
+
print(f"merging to {filename}, total pages {len(pages)}...")
|
|
26
|
+
merger = PdfWriter()
|
|
27
|
+
|
|
28
|
+
for pdf in pages:
|
|
29
|
+
merger.append(pdf)
|
|
30
|
+
|
|
31
|
+
merger.write(filename)
|
|
32
|
+
merger.close()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def pack_handouts(args):
|
|
36
|
+
if not args.folder:
|
|
37
|
+
args.folder = os.getcwd()
|
|
38
|
+
args.folder = os.path.abspath(args.folder)
|
|
39
|
+
|
|
40
|
+
color_pages = []
|
|
41
|
+
bw_pages = []
|
|
42
|
+
|
|
43
|
+
for fn in sorted(os.listdir(args.folder)):
|
|
44
|
+
if not fn.endswith(".txt"):
|
|
45
|
+
continue
|
|
46
|
+
fullpath = os.path.join(args.folder, fn)
|
|
47
|
+
with open(fullpath, encoding="utf8") as f:
|
|
48
|
+
contents = f.read()
|
|
49
|
+
parsed = parse_handouts(contents)
|
|
50
|
+
if len(parsed) > 1:
|
|
51
|
+
print(f"skipping {fn}: more than one handout per txt is not supported")
|
|
52
|
+
continue
|
|
53
|
+
color = parsed[0].get("color") or 0
|
|
54
|
+
handouts_per_team = parsed[0].get("handouts_per_team") or 3
|
|
55
|
+
total_handouts_per_page = parsed[0]["columns"] * parsed[0]["rows"]
|
|
56
|
+
teams_per_page = total_handouts_per_page / handouts_per_team
|
|
57
|
+
pages = math.ceil((args.n_teams + 1) / teams_per_page)
|
|
58
|
+
print(f"processing {fn}")
|
|
59
|
+
print(f"color = {color}")
|
|
60
|
+
print(f"handouts_per_team = {handouts_per_team}")
|
|
61
|
+
print(f"total_handouts_per_page = {total_handouts_per_page}")
|
|
62
|
+
print(f"teams_per_page = {round(teams_per_page, 1)}")
|
|
63
|
+
print(f"pages = {pages}")
|
|
64
|
+
print("running hndt...")
|
|
65
|
+
output_file = run_hndt(fullpath, args)
|
|
66
|
+
if color:
|
|
67
|
+
color_pages += [output_file] * pages
|
|
68
|
+
else:
|
|
69
|
+
bw_pages += [output_file] * pages
|
|
70
|
+
if color_pages:
|
|
71
|
+
pdf_output(
|
|
72
|
+
color_pages,
|
|
73
|
+
os.path.join(args.folder, args.output_filename_prefix + "_color.pdf"),
|
|
74
|
+
)
|
|
75
|
+
if bw_pages:
|
|
76
|
+
pdf_output(
|
|
77
|
+
bw_pages,
|
|
78
|
+
os.path.join(args.folder, args.output_filename_prefix + "_bw.pdf"),
|
|
79
|
+
)
|
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
import os
|
|
4
|
+
import shutil
|
|
5
|
+
import subprocess
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
from watchdog.events import FileSystemEventHandler
|
|
9
|
+
from watchdog.observers import Observer
|
|
10
|
+
|
|
11
|
+
from chgksuite.handouter.gen import generate_handouts
|
|
12
|
+
from chgksuite.handouter.pack import pack_handouts
|
|
13
|
+
from chgksuite.handouter.installer import get_tectonic_path, install_tectonic
|
|
14
|
+
from chgksuite.handouter.tex_internals import (
|
|
15
|
+
GREYTEXT,
|
|
16
|
+
GREYTEXT_LANGS,
|
|
17
|
+
HEADER,
|
|
18
|
+
IMG,
|
|
19
|
+
IMGWIDTH,
|
|
20
|
+
TIKZBOX_END,
|
|
21
|
+
TIKZBOX_INNER,
|
|
22
|
+
TIKZBOX_START,
|
|
23
|
+
)
|
|
24
|
+
from chgksuite.handouter.utils import parse_handouts, read_file, replace_ext, write_file
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class HandoutGenerator:
|
|
28
|
+
SPACE = 1.5 # mm
|
|
29
|
+
|
|
30
|
+
def __init__(self, args):
|
|
31
|
+
self.args = args
|
|
32
|
+
self.blocks = [self.get_header()]
|
|
33
|
+
|
|
34
|
+
def get_header(self):
|
|
35
|
+
header = HEADER
|
|
36
|
+
header = (
|
|
37
|
+
header.replace("<PAPERWIDTH>", str(self.args.paperwidth))
|
|
38
|
+
.replace("<PAPERHEIGHT>", str(self.args.paperheight))
|
|
39
|
+
.replace("<MARGIN_LEFT>", str(self.args.margin_left))
|
|
40
|
+
.replace("<MARGIN_RIGHT>", str(self.args.margin_right))
|
|
41
|
+
.replace("<MARGIN_TOP>", str(self.args.margin_top))
|
|
42
|
+
.replace("<MARGIN_BOTTOM>", str(self.args.margin_bottom))
|
|
43
|
+
.replace("<TIKZ_MM>", str(self.args.tikz_mm))
|
|
44
|
+
)
|
|
45
|
+
if self.args.font:
|
|
46
|
+
header = header.replace("Arial", self.args.font)
|
|
47
|
+
return header
|
|
48
|
+
|
|
49
|
+
def parse_input(self, filepath):
|
|
50
|
+
contents = read_file(filepath)
|
|
51
|
+
return parse_handouts(contents)
|
|
52
|
+
|
|
53
|
+
def generate_for_question(self, question_num):
|
|
54
|
+
return GREYTEXT.replace(
|
|
55
|
+
"<GREYTEXT>", GREYTEXT_LANGS[self.args.lang].format(question_num)
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
def make_tikzbox(self, block):
|
|
59
|
+
if block.get("no_center"):
|
|
60
|
+
align = ""
|
|
61
|
+
else:
|
|
62
|
+
align = ", align=center"
|
|
63
|
+
textwidth = ", text width=\\boxwidthinner"
|
|
64
|
+
fs = block.get("font_size") or self.args.font_size
|
|
65
|
+
fontsize = "\\fontsize{FSpt}{LHpt}\\selectfont ".replace("FS", str(fs)).replace(
|
|
66
|
+
"LH", str(round(fs * 1.2, 1))
|
|
67
|
+
)
|
|
68
|
+
contents = block["contents"]
|
|
69
|
+
if block.get("font_family"):
|
|
70
|
+
contents = "\\fontspec{" + block["font_family"] + "}" + contents
|
|
71
|
+
return (
|
|
72
|
+
TIKZBOX_INNER.replace("<CONTENTS>", contents)
|
|
73
|
+
.replace("<ALIGN>", align)
|
|
74
|
+
.replace("<TEXTWIDTH>", textwidth)
|
|
75
|
+
.replace("<FONTSIZE>", fontsize)
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
def get_page_width(self):
|
|
79
|
+
return self.args.paperwidth - self.args.margin_left - self.args.margin_right - 2
|
|
80
|
+
|
|
81
|
+
def generate_regular_block(self, block_):
|
|
82
|
+
block = block_.copy()
|
|
83
|
+
if not (block.get("image") or block.get("text")):
|
|
84
|
+
return
|
|
85
|
+
columns = block["columns"]
|
|
86
|
+
spaces = block["columns"] - 1
|
|
87
|
+
boxwidth = self.args.boxwidth or round(
|
|
88
|
+
(self.get_page_width() - spaces * self.SPACE) / block["columns"],
|
|
89
|
+
3,
|
|
90
|
+
)
|
|
91
|
+
total_width = boxwidth * columns + spaces * self.SPACE
|
|
92
|
+
if self.args.debug:
|
|
93
|
+
print(
|
|
94
|
+
f"columns: {columns}, boxwidth: {boxwidth}, total width: {total_width}"
|
|
95
|
+
)
|
|
96
|
+
boxwidthinner = self.args.boxwidthinner or (boxwidth - 2 * self.args.tikz_mm)
|
|
97
|
+
header = [
|
|
98
|
+
r"\setlength{\boxwidth}{<Q>mm}%".replace("<Q>", str(boxwidth)),
|
|
99
|
+
r"\setlength{\boxwidthinner}{<Q>mm}%".replace("<Q>", str(boxwidthinner)),
|
|
100
|
+
]
|
|
101
|
+
rows = []
|
|
102
|
+
contents = []
|
|
103
|
+
if block.get("image"):
|
|
104
|
+
img_qwidth = block.get("resize_image") or 1.0
|
|
105
|
+
imgwidth = IMGWIDTH.replace("<QWIDTH>", str(img_qwidth))
|
|
106
|
+
contents.append(
|
|
107
|
+
IMG.replace("<IMGPATH>", block["image"]).replace("<IMGWIDTH>", imgwidth)
|
|
108
|
+
)
|
|
109
|
+
if block.get("text"):
|
|
110
|
+
contents.append(block["text"])
|
|
111
|
+
block["contents"] = "\\linebreak\n".join(contents)
|
|
112
|
+
if block.get("no_center"):
|
|
113
|
+
block["centering"] = ""
|
|
114
|
+
else:
|
|
115
|
+
block["centering"] = "\\centering"
|
|
116
|
+
for _ in range(block.get("rows") or 1):
|
|
117
|
+
row = (
|
|
118
|
+
TIKZBOX_START.replace("<CENTERING>", block["centering"])
|
|
119
|
+
+ "\n".join([self.make_tikzbox(block)] * block["columns"])
|
|
120
|
+
+ TIKZBOX_END
|
|
121
|
+
)
|
|
122
|
+
rows.append(row)
|
|
123
|
+
return "\n".join(header) + "\n" + "\n\n\\vspace{1mm}\n\n".join(rows)
|
|
124
|
+
|
|
125
|
+
def generate(self):
|
|
126
|
+
for block in self.parse_input(self.args.filename):
|
|
127
|
+
if self.args.debug:
|
|
128
|
+
print(block)
|
|
129
|
+
if block.get("for_question"):
|
|
130
|
+
self.blocks.append(self.generate_for_question(block["for_question"]))
|
|
131
|
+
if block.get("columns"):
|
|
132
|
+
block = self.generate_regular_block(block)
|
|
133
|
+
if block:
|
|
134
|
+
self.blocks.append(block)
|
|
135
|
+
self.blocks.append("\\end{document}")
|
|
136
|
+
return "\n\n".join(self.blocks)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def process_file(args, file_dir, bn):
|
|
140
|
+
tex_contents = HandoutGenerator(args).generate()
|
|
141
|
+
tex_path = os.path.join(file_dir, f"{bn}_{args.lang}.tex")
|
|
142
|
+
write_file(tex_path, tex_contents)
|
|
143
|
+
|
|
144
|
+
tectonic_path = get_tectonic_path()
|
|
145
|
+
if not tectonic_path:
|
|
146
|
+
print("tectonic is not present, installing it...")
|
|
147
|
+
install_tectonic()
|
|
148
|
+
tectonic_path = get_tectonic_path()
|
|
149
|
+
if not tectonic_path:
|
|
150
|
+
raise Exception("tectonic couldn't be installed successfully :(")
|
|
151
|
+
if args.debug:
|
|
152
|
+
print(f"tectonic found at `{tectonic_path}`")
|
|
153
|
+
|
|
154
|
+
subprocess.run(
|
|
155
|
+
[tectonic_path, os.path.basename(tex_path)], check=True, cwd=file_dir
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
output_file = replace_ext(tex_path, "pdf")
|
|
159
|
+
|
|
160
|
+
if args.compress:
|
|
161
|
+
print(f"compressing {output_file}")
|
|
162
|
+
size_before = round(os.stat(output_file).st_size / 1024)
|
|
163
|
+
output_file_compressed = output_file[:-4] + ".compressed.pdf"
|
|
164
|
+
subprocess.run(
|
|
165
|
+
[
|
|
166
|
+
"gs",
|
|
167
|
+
"-sDEVICE=pdfwrite",
|
|
168
|
+
"-dCompatibilityLevel=1.5",
|
|
169
|
+
f"-dPDFSETTINGS=/{args.pdfsettings}",
|
|
170
|
+
"-dNOPAUSE",
|
|
171
|
+
"-dQUIET",
|
|
172
|
+
"-dBATCH",
|
|
173
|
+
f"-sOutputFile={output_file_compressed}",
|
|
174
|
+
output_file,
|
|
175
|
+
],
|
|
176
|
+
check=True,
|
|
177
|
+
)
|
|
178
|
+
shutil.move(output_file_compressed, output_file)
|
|
179
|
+
size_after = round(os.stat(output_file).st_size / 1024)
|
|
180
|
+
q = round(size_after / size_before, 1)
|
|
181
|
+
print(f"before: {size_before}kb, after: {size_after}kb, compression: {q}")
|
|
182
|
+
|
|
183
|
+
print(f"Output file: {output_file}")
|
|
184
|
+
|
|
185
|
+
if not args.debug:
|
|
186
|
+
os.remove(tex_path)
|
|
187
|
+
|
|
188
|
+
|
|
189
|
+
class FileChangeHandler(FileSystemEventHandler):
|
|
190
|
+
def __init__(self, args, file_dir, bn):
|
|
191
|
+
self.args = args
|
|
192
|
+
self.file_dir = file_dir
|
|
193
|
+
self.bn = bn
|
|
194
|
+
self.last_processed = 0
|
|
195
|
+
|
|
196
|
+
def on_modified(self, event):
|
|
197
|
+
if event.src_path == os.path.abspath(self.args.filename):
|
|
198
|
+
# Debounce to avoid processing the same change multiple times
|
|
199
|
+
current_time = time.time()
|
|
200
|
+
if current_time - self.last_processed > 1:
|
|
201
|
+
print(f"File {self.args.filename} changed, regenerating PDF...")
|
|
202
|
+
process_file(self.args, self.file_dir, self.bn)
|
|
203
|
+
self.last_processed = current_time
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def run_handouter(args):
|
|
207
|
+
file_dir = os.path.dirname(os.path.abspath(args.filename))
|
|
208
|
+
bn, _ = os.path.splitext(os.path.basename(args.filename))
|
|
209
|
+
|
|
210
|
+
process_file(args, file_dir, bn)
|
|
211
|
+
|
|
212
|
+
if args.watch:
|
|
213
|
+
print(f"Watching {args.filename} for changes. Press Ctrl+C to stop.")
|
|
214
|
+
event_handler = FileChangeHandler(args, file_dir, bn)
|
|
215
|
+
observer = Observer()
|
|
216
|
+
observer.schedule(event_handler, path=file_dir, recursive=False)
|
|
217
|
+
observer.start()
|
|
218
|
+
try:
|
|
219
|
+
while True:
|
|
220
|
+
time.sleep(1)
|
|
221
|
+
except KeyboardInterrupt:
|
|
222
|
+
observer.stop()
|
|
223
|
+
observer.join()
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def gui_handouter(args):
|
|
227
|
+
if args.handoutssubcommand == "run":
|
|
228
|
+
run_handouter(args)
|
|
229
|
+
elif args.handoutssubcommand == "generate":
|
|
230
|
+
generate_handouts(args)
|
|
231
|
+
elif args.handoutssubcommand == "pack":
|
|
232
|
+
pack_handouts(args)
|
|
233
|
+
elif args.handoutssubcommand == "install":
|
|
234
|
+
install_tectonic(args)
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
HEADER = r"""
|
|
2
|
+
\documentclass{minimal}
|
|
3
|
+
\usepackage[paperwidth=<PAPERWIDTH>mm,paperheight=<PAPERHEIGHT>mm,top=<MARGIN_TOP>mm,bottom=<MARGIN_BOTTOM>mm,left=<MARGIN_LEFT>mm,right=<MARGIN_RIGHT>mm]{geometry}
|
|
4
|
+
\frenchspacing
|
|
5
|
+
\usepackage{fontspec}
|
|
6
|
+
\usepackage{xcolor}
|
|
7
|
+
\usepackage{tikz}
|
|
8
|
+
\usepackage{calc}
|
|
9
|
+
\usepackage[document]{ragged2e}
|
|
10
|
+
\setmainfont{Arial}
|
|
11
|
+
\newlength{\boxwidth}
|
|
12
|
+
\newlength{\boxwidthinner}
|
|
13
|
+
\begin{document}
|
|
14
|
+
\fontsize{14pt}{16pt}\selectfont
|
|
15
|
+
\setlength\parindent{0pt}
|
|
16
|
+
\tikzstyle{box}=[draw, dashed, rectangle, inner sep=<TIKZ_MM>mm]
|
|
17
|
+
\raggedright
|
|
18
|
+
\raggedbottom
|
|
19
|
+
""".strip()
|
|
20
|
+
|
|
21
|
+
GREYTEXT = r"""{\fontsize{9pt}{11pt}\selectfont \textcolor{gray}{<GREYTEXT>}}"""
|
|
22
|
+
|
|
23
|
+
GREYTEXT_LANGS = {
|
|
24
|
+
"by": "Да пытаньня {}",
|
|
25
|
+
"en": "Handout for question {}",
|
|
26
|
+
"kz": "{}-сұрақтың үлестіру материалы",
|
|
27
|
+
"ro": "Material care urmează a fi distribuit pentru întrebarea {}",
|
|
28
|
+
"ru": "К вопросу {}",
|
|
29
|
+
"sr": "Materijal za deljenje uz pitanje {}",
|
|
30
|
+
"ua": "До запитання {}",
|
|
31
|
+
"uz": "{} саволга тарқатма материал",
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
TIKZBOX_START = r"""{<CENTERING>
|
|
35
|
+
"""
|
|
36
|
+
|
|
37
|
+
TIKZBOX_INNER = r"""
|
|
38
|
+
\begin{tikzpicture}
|
|
39
|
+
\node[box, minimum width=\boxwidth<TEXTWIDTH><ALIGN>] {<FONTSIZE><CONTENTS>\par};
|
|
40
|
+
\end{tikzpicture}
|
|
41
|
+
""".strip()
|
|
42
|
+
|
|
43
|
+
TIKZBOX_END = "\n}"
|
|
44
|
+
|
|
45
|
+
IMG = r"""\includegraphics<IMGWIDTH>{<IMGPATH>}"""
|
|
46
|
+
|
|
47
|
+
IMGWIDTH = r"[width=<QWIDTH>\textwidth]"
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import os
|
|
2
|
+
|
|
3
|
+
from pecheny_utils import escape_latex
|
|
4
|
+
|
|
5
|
+
RESERVED_WORDS = [
|
|
6
|
+
"image",
|
|
7
|
+
"for_question",
|
|
8
|
+
"columns",
|
|
9
|
+
"rows",
|
|
10
|
+
"resize_image",
|
|
11
|
+
"font_size",
|
|
12
|
+
"font_family",
|
|
13
|
+
"no_center",
|
|
14
|
+
"raw_tex",
|
|
15
|
+
"color",
|
|
16
|
+
"handouts_per_team",
|
|
17
|
+
]
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def read_file(filepath):
|
|
21
|
+
with open(filepath, "r", encoding="utf8") as f:
|
|
22
|
+
contents = f.read()
|
|
23
|
+
return contents
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def write_file(filepath, contents):
|
|
27
|
+
with open(filepath, "w", encoding="utf8") as f:
|
|
28
|
+
f.write(contents)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def replace_ext(filepath, new_ext):
|
|
32
|
+
if not new_ext.startswith("."):
|
|
33
|
+
new_ext = "." + new_ext
|
|
34
|
+
dirname = os.path.dirname(filepath)
|
|
35
|
+
basename = os.path.basename(filepath)
|
|
36
|
+
base, _ = os.path.splitext(basename)
|
|
37
|
+
return os.path.join(dirname, base + new_ext)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def wrap_val(key, val):
|
|
41
|
+
if key in ("columns", "rows", "no_center", "color", "handouts_per_team"):
|
|
42
|
+
return int(val.strip())
|
|
43
|
+
if key in ("resize_image", "font_size"):
|
|
44
|
+
return float(val.strip())
|
|
45
|
+
return val.strip()
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def parse_handouts(contents):
|
|
49
|
+
blocks = contents.split("\n---\n")
|
|
50
|
+
result = []
|
|
51
|
+
for block_ in blocks:
|
|
52
|
+
block = block_.strip()
|
|
53
|
+
block_dict = {}
|
|
54
|
+
text = []
|
|
55
|
+
lines = block.split("\n")
|
|
56
|
+
for line in lines:
|
|
57
|
+
sp = line.split(":", 1)
|
|
58
|
+
if sp[0] in RESERVED_WORDS:
|
|
59
|
+
block_dict[sp[0]] = wrap_val(sp[0], sp[1])
|
|
60
|
+
elif line.strip():
|
|
61
|
+
text.append(line.strip())
|
|
62
|
+
if text:
|
|
63
|
+
block_dict["text"] = "\n".join(text).strip()
|
|
64
|
+
if not block_dict.get("raw_tex"):
|
|
65
|
+
block_dict["text"] = escape_latex(block_dict["text"])
|
|
66
|
+
result.append(block_dict)
|
|
67
|
+
return result
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = "0.26.0b4"
|
|
@@ -33,6 +33,13 @@ chgksuite/composer/stats.py
|
|
|
33
33
|
chgksuite/composer/telegram.py
|
|
34
34
|
chgksuite/composer/telegram_bot.py
|
|
35
35
|
chgksuite/composer/telegram_parser.py
|
|
36
|
+
chgksuite/handouter/__init__.py
|
|
37
|
+
chgksuite/handouter/gen.py
|
|
38
|
+
chgksuite/handouter/installer.py
|
|
39
|
+
chgksuite/handouter/pack.py
|
|
40
|
+
chgksuite/handouter/runner.py
|
|
41
|
+
chgksuite/handouter/tex_internals.py
|
|
42
|
+
chgksuite/handouter/utils.py
|
|
36
43
|
chgksuite/resources/cheader.tex
|
|
37
44
|
chgksuite/resources/fix-unnumbered-sections.sty
|
|
38
45
|
chgksuite/resources/labels_by.toml
|
|
@@ -31,7 +31,7 @@ setup(
|
|
|
31
31
|
"License :: OSI Approved :: MIT License",
|
|
32
32
|
"Operating System :: OS Independent",
|
|
33
33
|
],
|
|
34
|
-
packages=["chgksuite", "chgksuite.composer"],
|
|
34
|
+
packages=["chgksuite", "chgksuite.composer", "chgksuite.handouter"],
|
|
35
35
|
package_data={
|
|
36
36
|
"chgksuite": [
|
|
37
37
|
"resources/*.json",
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = "0.26.0b3"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|