chgksuite 0.26.0b2__tar.gz → 0.26.0b4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {chgksuite-0.26.0b2/chgksuite.egg-info → chgksuite-0.26.0b4}/PKG-INFO +4 -2
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/cli.py +203 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/telegram.py +8 -5
- chgksuite-0.26.0b4/chgksuite/handouter/__init__.py +0 -0
- chgksuite-0.26.0b4/chgksuite/handouter/gen.py +143 -0
- chgksuite-0.26.0b4/chgksuite/handouter/installer.py +245 -0
- chgksuite-0.26.0b4/chgksuite/handouter/pack.py +79 -0
- chgksuite-0.26.0b4/chgksuite/handouter/runner.py +234 -0
- chgksuite-0.26.0b4/chgksuite/handouter/tex_internals.py +47 -0
- chgksuite-0.26.0b4/chgksuite/handouter/utils.py +67 -0
- chgksuite-0.26.0b4/chgksuite/version.py +1 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4/chgksuite.egg-info}/PKG-INFO +4 -2
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite.egg-info/SOURCES.txt +7 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite.egg-info/requires.txt +3 -1
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/history.md +3 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/setup.py +4 -2
- chgksuite-0.26.0b2/chgksuite/version.py +0 -1
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/LICENSE +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/MANIFEST.in +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/README.md +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/__init__.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/__main__.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/common.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/__init__.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/chgksuite_parser.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/composer_common.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/db.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/docx.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/latex.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/lj.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/openquiz.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/pptx.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/reddit.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/stats.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/telegram_bot.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/composer/telegram_parser.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/parser.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/parser_db.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/cheader.tex +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/fix-unnumbered-sections.sty +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_by.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_by_tar.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_en.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_kz_cyr.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_ru.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_sr.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_ua.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_uz.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/labels_uz_cyr.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/pptx_config.toml +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_by.json +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_en.json +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_kz_cyr.json +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_ru.json +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_sr.json +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_ua.json +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/regexes_uz_cyr.json +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/template.docx +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/template.pptx +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/template_shorin.pptx +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/resources/trello.json +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/trello.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/typotools.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite/vulture_whitelist.py +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite.egg-info/dependency_links.txt +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite.egg-info/entry_points.txt +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/chgksuite.egg-info/top_level.txt +0 -0
- {chgksuite-0.26.0b2 → chgksuite-0.26.0b4}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: chgksuite
|
|
3
|
-
Version: 0.26.
|
|
3
|
+
Version: 0.26.0b4
|
|
4
4
|
Summary: A package for chgk automation
|
|
5
5
|
Home-page: https://gitlab.com/peczony/chgksuite
|
|
6
6
|
Author: Alexander Pecheny
|
|
@@ -20,12 +20,14 @@ Requires-Dist: parse
|
|
|
20
20
|
Requires-Dist: Pillow
|
|
21
21
|
Requires-Dist: ply
|
|
22
22
|
Requires-Dist: pypandoc
|
|
23
|
+
Requires-Dist: pypdf
|
|
23
24
|
Requires-Dist: pyperclip
|
|
24
25
|
Requires-Dist: python-docx
|
|
25
26
|
Requires-Dist: python-pptx
|
|
26
|
-
Requires-Dist: requests
|
|
27
27
|
Requires-Dist: python-telegram-bot
|
|
28
|
+
Requires-Dist: requests
|
|
28
29
|
Requires-Dist: toml
|
|
30
|
+
Requires-Dist: watchdog
|
|
29
31
|
Dynamic: author
|
|
30
32
|
Dynamic: author-email
|
|
31
33
|
Dynamic: classifier
|
|
@@ -12,6 +12,8 @@ from chgksuite.common import (
|
|
|
12
12
|
load_settings,
|
|
13
13
|
)
|
|
14
14
|
from chgksuite.composer import gui_compose
|
|
15
|
+
from chgksuite.handouter.runner import gui_handouter
|
|
16
|
+
from chgksuite.handouter.tex_internals import GREYTEXT_LANGS
|
|
15
17
|
from chgksuite.parser import gui_parse
|
|
16
18
|
from chgksuite.trello import gui_trello
|
|
17
19
|
from chgksuite.version import __version__
|
|
@@ -817,6 +819,205 @@ class ArgparseBuilder:
|
|
|
817
819
|
caption="Не открывать браузер",
|
|
818
820
|
)
|
|
819
821
|
|
|
822
|
+
cmdhandouts = subparsers.add_parser("handouts")
|
|
823
|
+
cmdhandouts_subcommands = cmdhandouts.add_subparsers(dest="handoutssubcommand")
|
|
824
|
+
cmdhandouts_run = self.add_parser(cmdhandouts_subcommands, "run")
|
|
825
|
+
self.add_argument(
|
|
826
|
+
cmdhandouts_run,
|
|
827
|
+
"filename",
|
|
828
|
+
help="file with handouts",
|
|
829
|
+
caption="Имя файла с раздатками",
|
|
830
|
+
filetypes=[("handouts files", "*.hndts"), ("text files", "*.txt")],
|
|
831
|
+
)
|
|
832
|
+
self.add_argument(
|
|
833
|
+
cmdhandouts_run,
|
|
834
|
+
"--lang",
|
|
835
|
+
default="ru",
|
|
836
|
+
argtype="radiobutton",
|
|
837
|
+
choices=sorted(GREYTEXT_LANGS.keys()),
|
|
838
|
+
help="language",
|
|
839
|
+
caption="Язык",
|
|
840
|
+
advanced=True,
|
|
841
|
+
)
|
|
842
|
+
self.add_argument(
|
|
843
|
+
cmdhandouts_run,
|
|
844
|
+
"--compress",
|
|
845
|
+
action="store_true",
|
|
846
|
+
help="compress",
|
|
847
|
+
caption="Сжать файл после вёрстки (требует установленного ghostscript)",
|
|
848
|
+
advanced=True,
|
|
849
|
+
)
|
|
850
|
+
self.add_argument(cmdhandouts_run, "--font", "-f", help="font", caption="Шрифт")
|
|
851
|
+
self.add_argument(
|
|
852
|
+
cmdhandouts_run,
|
|
853
|
+
"--font_size",
|
|
854
|
+
type=int,
|
|
855
|
+
default=14,
|
|
856
|
+
help="font size",
|
|
857
|
+
caption="Размер шрифта",
|
|
858
|
+
)
|
|
859
|
+
self.add_argument(
|
|
860
|
+
cmdhandouts_run,
|
|
861
|
+
"--pdfsettings",
|
|
862
|
+
choices=["screen", "ebook", "printer", "prepress", "default"],
|
|
863
|
+
default="default",
|
|
864
|
+
advanced=True,
|
|
865
|
+
caption="Настройки pdfsettings для ghostscript",
|
|
866
|
+
)
|
|
867
|
+
self.add_argument(
|
|
868
|
+
cmdhandouts_run,
|
|
869
|
+
"--paperwidth",
|
|
870
|
+
type=float,
|
|
871
|
+
default=210,
|
|
872
|
+
help="paper width",
|
|
873
|
+
caption="Ширина бумаги",
|
|
874
|
+
advanced=True,
|
|
875
|
+
)
|
|
876
|
+
self.add_argument(
|
|
877
|
+
cmdhandouts_run,
|
|
878
|
+
"--paperheight",
|
|
879
|
+
type=float,
|
|
880
|
+
default=297,
|
|
881
|
+
help="paper height",
|
|
882
|
+
caption="Высота бумаги",
|
|
883
|
+
advanced=True,
|
|
884
|
+
)
|
|
885
|
+
self.add_argument(
|
|
886
|
+
cmdhandouts_run,
|
|
887
|
+
"--margin_top",
|
|
888
|
+
type=float,
|
|
889
|
+
default=5,
|
|
890
|
+
help="top margin",
|
|
891
|
+
caption="Верхний отступ",
|
|
892
|
+
advanced=True,
|
|
893
|
+
)
|
|
894
|
+
self.add_argument(
|
|
895
|
+
cmdhandouts_run,
|
|
896
|
+
"--margin_bottom",
|
|
897
|
+
type=float,
|
|
898
|
+
default=5,
|
|
899
|
+
help="bottom margin",
|
|
900
|
+
caption="Нижний отступ",
|
|
901
|
+
advanced=True,
|
|
902
|
+
)
|
|
903
|
+
self.add_argument(
|
|
904
|
+
cmdhandouts_run,
|
|
905
|
+
"--margin_left",
|
|
906
|
+
type=float,
|
|
907
|
+
default=5,
|
|
908
|
+
help="left margin",
|
|
909
|
+
caption="Левый отступ",
|
|
910
|
+
advanced=True,
|
|
911
|
+
)
|
|
912
|
+
self.add_argument(
|
|
913
|
+
cmdhandouts_run,
|
|
914
|
+
"--margin_right",
|
|
915
|
+
type=float,
|
|
916
|
+
default=5,
|
|
917
|
+
help="right margin",
|
|
918
|
+
caption="Правый отступ",
|
|
919
|
+
advanced=True,
|
|
920
|
+
)
|
|
921
|
+
self.add_argument(
|
|
922
|
+
cmdhandouts_run,
|
|
923
|
+
"--boxwidth",
|
|
924
|
+
type=float,
|
|
925
|
+
help="box width",
|
|
926
|
+
caption="Ширина блока",
|
|
927
|
+
advanced=True,
|
|
928
|
+
)
|
|
929
|
+
self.add_argument(
|
|
930
|
+
cmdhandouts_run,
|
|
931
|
+
"--boxwidthinner",
|
|
932
|
+
type=float,
|
|
933
|
+
help="box width inner",
|
|
934
|
+
caption="Внутренняя ширина блока",
|
|
935
|
+
advanced=True,
|
|
936
|
+
)
|
|
937
|
+
self.add_argument(
|
|
938
|
+
cmdhandouts_run,
|
|
939
|
+
"--tikz_mm",
|
|
940
|
+
type=float,
|
|
941
|
+
default=2,
|
|
942
|
+
help="tikz_mm width",
|
|
943
|
+
caption="Ширина tikz_mm",
|
|
944
|
+
advanced=True,
|
|
945
|
+
)
|
|
946
|
+
|
|
947
|
+
cmdhandouts_generate = self.add_parser(cmdhandouts_subcommands, "generate")
|
|
948
|
+
self.add_argument(
|
|
949
|
+
cmdhandouts_generate,
|
|
950
|
+
"filename",
|
|
951
|
+
help="file with questions packet",
|
|
952
|
+
caption="Имя файла с пакетом",
|
|
953
|
+
filetypes=[("chgksuite files", "*.4s")],
|
|
954
|
+
)
|
|
955
|
+
self.add_argument(
|
|
956
|
+
cmdhandouts_generate,
|
|
957
|
+
"--lang",
|
|
958
|
+
default="ru",
|
|
959
|
+
help="language",
|
|
960
|
+
caption="Язык",
|
|
961
|
+
argtype="radiobutton",
|
|
962
|
+
choices=sorted(GREYTEXT_LANGS.keys()),
|
|
963
|
+
advanced=True,
|
|
964
|
+
)
|
|
965
|
+
self.add_argument(
|
|
966
|
+
cmdhandouts_generate,
|
|
967
|
+
"--separate",
|
|
968
|
+
action="store_true",
|
|
969
|
+
help="Generate separate handouts for each question",
|
|
970
|
+
caption="Сгенерировать отдельный файл с раздатками для каждого вопроса",
|
|
971
|
+
)
|
|
972
|
+
self.add_argument(
|
|
973
|
+
cmdhandouts_generate,
|
|
974
|
+
"--list-handouts",
|
|
975
|
+
"-l",
|
|
976
|
+
action="store_true",
|
|
977
|
+
help="Generate a file with a list of handouts",
|
|
978
|
+
caption="Сгенерировать файл со списком раздаток",
|
|
979
|
+
)
|
|
980
|
+
|
|
981
|
+
cmdhandouts_pack = self.add_parser(cmdhandouts_subcommands, "pack")
|
|
982
|
+
self.add_argument(
|
|
983
|
+
cmdhandouts_pack,
|
|
984
|
+
"folder",
|
|
985
|
+
help="input directory",
|
|
986
|
+
caption="Папка с раздатками",
|
|
987
|
+
)
|
|
988
|
+
self.add_argument(
|
|
989
|
+
cmdhandouts_pack,
|
|
990
|
+
"--output_filename_prefix",
|
|
991
|
+
"-o",
|
|
992
|
+
default="packed_handouts",
|
|
993
|
+
help="output filename prefix",
|
|
994
|
+
caption="Префикс имени выходного файла",
|
|
995
|
+
)
|
|
996
|
+
self.add_argument(
|
|
997
|
+
cmdhandouts_pack,
|
|
998
|
+
"--n_teams",
|
|
999
|
+
"-n",
|
|
1000
|
+
type=int,
|
|
1001
|
+
required=True,
|
|
1002
|
+
help="number of teams",
|
|
1003
|
+
caption="Количество команд",
|
|
1004
|
+
)
|
|
1005
|
+
self.add_argument(
|
|
1006
|
+
cmdhandouts_pack,
|
|
1007
|
+
"--font",
|
|
1008
|
+
"-f",
|
|
1009
|
+
help="font",
|
|
1010
|
+
caption="Шрифт",
|
|
1011
|
+
)
|
|
1012
|
+
|
|
1013
|
+
cmdhandouts_install = self.add_parser(cmdhandouts_subcommands, "install")
|
|
1014
|
+
self.add_argument(
|
|
1015
|
+
cmdhandouts_install,
|
|
1016
|
+
"--tectonic_package_regex",
|
|
1017
|
+
advanced=True,
|
|
1018
|
+
caption="Переопределить имя файла с релизом tectonic",
|
|
1019
|
+
)
|
|
1020
|
+
|
|
820
1021
|
|
|
821
1022
|
def single_action(args, use_wrapper, resourcedir):
|
|
822
1023
|
if use_wrapper:
|
|
@@ -860,6 +1061,8 @@ def single_action(args, use_wrapper, resourcedir):
|
|
|
860
1061
|
gui_compose(args)
|
|
861
1062
|
if args.action == "trello":
|
|
862
1063
|
gui_trello(args)
|
|
1064
|
+
if args.action == "handouts":
|
|
1065
|
+
gui_handouter(args)
|
|
863
1066
|
|
|
864
1067
|
|
|
865
1068
|
def app():
|
|
@@ -852,7 +852,7 @@ class TelegramExporter(BaseExporter):
|
|
|
852
852
|
|
|
853
853
|
# Wait for a forwarded message with channel information
|
|
854
854
|
channel_id = self.wait_for_forwarded_message(
|
|
855
|
-
entity_type="channel", check_type=True
|
|
855
|
+
entity_type="channel", check_type=True
|
|
856
856
|
)
|
|
857
857
|
if channel_id:
|
|
858
858
|
self.save_username(channel_result, channel_id)
|
|
@@ -875,7 +875,7 @@ class TelegramExporter(BaseExporter):
|
|
|
875
875
|
|
|
876
876
|
# Wait for a forwarded message with chat information
|
|
877
877
|
chat_id = self.wait_for_forwarded_message(
|
|
878
|
-
entity_type="chat", check_type=False
|
|
878
|
+
entity_type="chat", check_type=False
|
|
879
879
|
)
|
|
880
880
|
if not chat_id:
|
|
881
881
|
self.logger.error("Failed to get chat ID from forwarded message")
|
|
@@ -891,7 +891,6 @@ class TelegramExporter(BaseExporter):
|
|
|
891
891
|
entity_type="chat",
|
|
892
892
|
check_type=False,
|
|
893
893
|
add_msg=error_msg,
|
|
894
|
-
string_id=chat_result,
|
|
895
894
|
)
|
|
896
895
|
if chat_id:
|
|
897
896
|
self.save_username(chat_result, chat_id)
|
|
@@ -1117,11 +1116,15 @@ class TelegramExporter(BaseExporter):
|
|
|
1117
1116
|
|
|
1118
1117
|
# Look for a forwarded message in recent messages
|
|
1119
1118
|
cursor = self.db_conn.cursor()
|
|
1119
|
+
if self.created_at:
|
|
1120
|
+
threshold = "'" + self.created_at + "'"
|
|
1121
|
+
else:
|
|
1122
|
+
threshold = "datetime('now', '-2 minutes')"
|
|
1120
1123
|
cursor.execute(
|
|
1121
|
-
"""
|
|
1124
|
+
f"""
|
|
1122
1125
|
SELECT raw_data, created_at
|
|
1123
1126
|
FROM messages
|
|
1124
|
-
WHERE created_at >
|
|
1127
|
+
WHERE created_at > {threshold}
|
|
1125
1128
|
ORDER BY created_at DESC
|
|
1126
1129
|
"""
|
|
1127
1130
|
)
|
|
File without changes
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# -*- coding: utf-8 -*-
|
|
3
|
+
import itertools
|
|
4
|
+
import os
|
|
5
|
+
import re
|
|
6
|
+
from collections import defaultdict
|
|
7
|
+
|
|
8
|
+
import toml
|
|
9
|
+
|
|
10
|
+
from chgksuite.common import get_source_dirs
|
|
11
|
+
from chgksuite.composer.chgksuite_parser import parse_4s
|
|
12
|
+
from chgksuite.composer.composer_common import _parse_4s_elem, parseimg
|
|
13
|
+
from chgksuite.handouter.utils import read_file, write_file
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def postprocess(s):
|
|
17
|
+
return s.replace("\\_", "_")
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def generate_handouts_list(handouts, output_dir, base_name, parsed):
|
|
21
|
+
"""Generate a human-readable file with question numbers that have handouts."""
|
|
22
|
+
question_numbers = sorted([int(h["for_question"]) for h in handouts])
|
|
23
|
+
|
|
24
|
+
content = "ВОПРОСЫ С РАЗДАТОЧНЫМ МАТЕРИАЛОМ:\n\n"
|
|
25
|
+
content += f"Сквозная нумерация:\n{', '.join(map(str, question_numbers))}\n\n"
|
|
26
|
+
|
|
27
|
+
content += "По турам:\n"
|
|
28
|
+
tour = 0
|
|
29
|
+
by_tour = {}
|
|
30
|
+
for tup in parsed:
|
|
31
|
+
if tup[0] == "section":
|
|
32
|
+
tour += 1
|
|
33
|
+
by_tour[tour] = []
|
|
34
|
+
if tup[0] == "Question":
|
|
35
|
+
if tour == 0:
|
|
36
|
+
tour = 1
|
|
37
|
+
by_tour[tour] = []
|
|
38
|
+
if tup[1]["number"] in question_numbers:
|
|
39
|
+
by_tour[tour].append(tup[1]["number"])
|
|
40
|
+
|
|
41
|
+
for tour in sorted(by_tour):
|
|
42
|
+
tour_handouts = by_tour[tour]
|
|
43
|
+
if tour_handouts:
|
|
44
|
+
content += f"Тур {tour}: {', '.join(map(str, tour_handouts))}\n"
|
|
45
|
+
else:
|
|
46
|
+
content += f"Тур {tour}: нет раздаток\n"
|
|
47
|
+
|
|
48
|
+
output_fn = os.path.join(output_dir, base_name + "_handouts_list.txt")
|
|
49
|
+
write_file(output_fn, content)
|
|
50
|
+
print(f"File with list of handouts: {output_fn}")
|
|
51
|
+
print(content)
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def generate_handouts(args):
|
|
55
|
+
_, resourcedir = get_source_dirs()
|
|
56
|
+
labels = toml.loads(
|
|
57
|
+
read_file(os.path.join(resourcedir, f"labels_{args.lang}.toml"))
|
|
58
|
+
)
|
|
59
|
+
handout_re = re.compile(
|
|
60
|
+
"\\["
|
|
61
|
+
+ labels["question_labels"]["handout_short"]
|
|
62
|
+
+ ".+?:( |\n)(?P<handout_text>.+?)\\]",
|
|
63
|
+
flags=re.DOTALL,
|
|
64
|
+
)
|
|
65
|
+
|
|
66
|
+
cnt = read_file(args.filename)
|
|
67
|
+
parsed = parse_4s(cnt)
|
|
68
|
+
|
|
69
|
+
questions = [q[1] for q in parsed if q[0] == "Question"]
|
|
70
|
+
handouts = []
|
|
71
|
+
for q in questions:
|
|
72
|
+
if isinstance(q["question"], list):
|
|
73
|
+
question_text = "\n".join(itertools.chain.from_iterable(q["question"]))
|
|
74
|
+
else:
|
|
75
|
+
question_text = q["question"]
|
|
76
|
+
question_text_lower = question_text.lower()
|
|
77
|
+
srch = handout_re.search(question_text)
|
|
78
|
+
if srch:
|
|
79
|
+
text = postprocess(srch.group("handout_text"))
|
|
80
|
+
elems = _parse_4s_elem(text)
|
|
81
|
+
img = [el for el in elems if el[0] == "img"]
|
|
82
|
+
if img:
|
|
83
|
+
try:
|
|
84
|
+
parsed_img = parseimg(img[0][1])
|
|
85
|
+
except:
|
|
86
|
+
print(
|
|
87
|
+
f"Image file for question {q['number']} not found, add it by hand"
|
|
88
|
+
)
|
|
89
|
+
continue
|
|
90
|
+
else:
|
|
91
|
+
parsed_img = None
|
|
92
|
+
res = {"for_question": q["number"]}
|
|
93
|
+
if parsed_img:
|
|
94
|
+
res["image"] = parsed_img["imgfile"]
|
|
95
|
+
else:
|
|
96
|
+
res["text"] = text
|
|
97
|
+
handouts.append(res)
|
|
98
|
+
elif (
|
|
99
|
+
"раздат" in question_text_lower
|
|
100
|
+
or "роздан" in question_text_lower
|
|
101
|
+
or "(img" in question_text_lower
|
|
102
|
+
):
|
|
103
|
+
print(f"probably badly formatted handout for question {q['number']}")
|
|
104
|
+
res = {"for_question": q["number"], "text": postprocess(question_text)}
|
|
105
|
+
handouts.append(res)
|
|
106
|
+
result = []
|
|
107
|
+
result_by_question = defaultdict(list)
|
|
108
|
+
for handout in handouts:
|
|
109
|
+
if "image" in handout:
|
|
110
|
+
key = "image"
|
|
111
|
+
prefix = "image: "
|
|
112
|
+
else:
|
|
113
|
+
key = "text"
|
|
114
|
+
prefix = ""
|
|
115
|
+
value = handout[key]
|
|
116
|
+
formatted = (
|
|
117
|
+
f"for_question: {handout['for_question']}\n" if not args.separate else ""
|
|
118
|
+
) + f"columns: 3\n\n{prefix}{value}"
|
|
119
|
+
result.append(formatted)
|
|
120
|
+
result_by_question[handout["for_question"]].append(formatted)
|
|
121
|
+
output_dir = os.path.dirname(os.path.abspath(args.filename))
|
|
122
|
+
bn, _ = os.path.splitext(os.path.basename(args.filename))
|
|
123
|
+
|
|
124
|
+
if args.separate:
|
|
125
|
+
for k, v in result_by_question.items():
|
|
126
|
+
if len(v) > 1:
|
|
127
|
+
for i, cnt in enumerate(v):
|
|
128
|
+
output_fn = os.path.join(
|
|
129
|
+
output_dir, f"{bn}_q{k.zfill(2)}_{i + 1}.txt"
|
|
130
|
+
)
|
|
131
|
+
print(output_fn)
|
|
132
|
+
write_file(output_fn, cnt)
|
|
133
|
+
else:
|
|
134
|
+
output_fn = os.path.join(output_dir, f"{bn}_q{str(k).zfill(2)}.txt")
|
|
135
|
+
print(output_fn)
|
|
136
|
+
write_file(output_fn, v[0])
|
|
137
|
+
else:
|
|
138
|
+
output_fn = os.path.join(output_dir, bn + "_handouts.txt")
|
|
139
|
+
print(f"output filename: {output_fn}")
|
|
140
|
+
write_file(output_fn, "\n---\n".join(result))
|
|
141
|
+
|
|
142
|
+
if args.list_handouts:
|
|
143
|
+
generate_handouts_list(handouts, output_dir, bn, parsed)
|
|
@@ -0,0 +1,245 @@
|
|
|
1
|
+
import functools
|
|
2
|
+
import os
|
|
3
|
+
import platform
|
|
4
|
+
import re
|
|
5
|
+
import shutil
|
|
6
|
+
import subprocess
|
|
7
|
+
import tarfile
|
|
8
|
+
import zipfile
|
|
9
|
+
|
|
10
|
+
import requests
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_utils_dir():
|
|
14
|
+
path = os.path.join(os.path.expanduser("~"), ".pecheny_utils")
|
|
15
|
+
if not os.path.exists(path):
|
|
16
|
+
os.mkdir(path)
|
|
17
|
+
return path
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def escape_latex(text):
|
|
21
|
+
text = text.replace("\\", "\\textbackslash")
|
|
22
|
+
text = text.replace("~", "\\textasciitilde")
|
|
23
|
+
text = text.replace("^", "\\textasciicircum")
|
|
24
|
+
for char in ("%", "&", "$", "#", "{", "}", "_"):
|
|
25
|
+
text = text.replace(char, "\\" + char)
|
|
26
|
+
text = text.replace("\n", "\\linebreak\n")
|
|
27
|
+
return text
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def check_tectonic_path(tectonic_path):
|
|
31
|
+
proc = subprocess.run([tectonic_path, "--help"], capture_output=True, check=True)
|
|
32
|
+
return proc.returncode == 0
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_tectonic_path():
|
|
36
|
+
errors = []
|
|
37
|
+
system = platform.system()
|
|
38
|
+
|
|
39
|
+
cpdir = get_utils_dir()
|
|
40
|
+
if system == "Windows":
|
|
41
|
+
binary_name = "tectonic.exe"
|
|
42
|
+
tectonic_path = os.path.join(cpdir, binary_name)
|
|
43
|
+
else:
|
|
44
|
+
binary_name = "tectonic"
|
|
45
|
+
tectonic_path = os.path.join(cpdir, binary_name)
|
|
46
|
+
|
|
47
|
+
tectonic_ok = False
|
|
48
|
+
try:
|
|
49
|
+
tectonic_ok = check_tectonic_path(binary_name)
|
|
50
|
+
except FileNotFoundError:
|
|
51
|
+
pass # tectonic not found in PATH
|
|
52
|
+
except subprocess.CalledProcessError as e:
|
|
53
|
+
errors.append(f"tectonic --version failed: {type(e)} {e}")
|
|
54
|
+
if tectonic_ok:
|
|
55
|
+
return binary_name
|
|
56
|
+
if os.path.isfile(tectonic_path):
|
|
57
|
+
try:
|
|
58
|
+
tectonic_ok = check_tectonic_path(tectonic_path)
|
|
59
|
+
except subprocess.CalledProcessError as e:
|
|
60
|
+
errors.append(f"tectonic --version failed: {type(e)} {e}")
|
|
61
|
+
if tectonic_ok:
|
|
62
|
+
return tectonic_path
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def github_get_latest_release(repo):
|
|
66
|
+
url = f"https://api.github.com/repos/{repo}/releases/latest"
|
|
67
|
+
req = requests.get(url)
|
|
68
|
+
assets_url = req.json()["assets_url"]
|
|
69
|
+
assets_req = requests.get(assets_url)
|
|
70
|
+
return {asset["name"]: asset["browser_download_url"] for asset in assets_req.json()}
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def darwin_is_emulated():
|
|
74
|
+
try:
|
|
75
|
+
sub = subprocess.run(
|
|
76
|
+
["sysctl", "-n", "sysctl.proc_translated"], capture_output=True, check=True
|
|
77
|
+
)
|
|
78
|
+
out = sub.stdout.decode("utf8").strip()
|
|
79
|
+
return int(out)
|
|
80
|
+
except subprocess.CalledProcessError:
|
|
81
|
+
print("couldn't tell if emulated, returning 0")
|
|
82
|
+
return 0
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def parse_tectonic_archive_name(archive_name):
|
|
86
|
+
if archive_name.endswith(".tar.gz"):
|
|
87
|
+
archive_name = archive_name[: -len(".tar.gz")]
|
|
88
|
+
elif archive_name.endswith(".zip"):
|
|
89
|
+
archive_name = archive_name[: -len(".zip")]
|
|
90
|
+
else:
|
|
91
|
+
return
|
|
92
|
+
sp = archive_name.split("-")
|
|
93
|
+
result = {
|
|
94
|
+
"version": sp[1],
|
|
95
|
+
"arch": sp[2],
|
|
96
|
+
"manufacturer": sp[3],
|
|
97
|
+
"system": sp[4],
|
|
98
|
+
}
|
|
99
|
+
if len(sp) > 5:
|
|
100
|
+
result["toolchain"] = sp[5]
|
|
101
|
+
return result
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
# download_file function taken from https://stackoverflow.com/a/39217788
|
|
105
|
+
def download_file(url):
|
|
106
|
+
print(f"downloading from {url}...")
|
|
107
|
+
local_filename = url.split("/")[-1]
|
|
108
|
+
with requests.get(url, stream=True) as resp:
|
|
109
|
+
resp.raw.read = functools.partial(resp.raw.read, decode_content=True)
|
|
110
|
+
with open(local_filename, "wb") as f:
|
|
111
|
+
shutil.copyfileobj(resp.raw, f, length=16 * 1024 * 1024)
|
|
112
|
+
return local_filename
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def extract_zip(zip_file, dirname=None):
|
|
116
|
+
if dirname is None:
|
|
117
|
+
dirname = zip_file[:-4]
|
|
118
|
+
with zipfile.ZipFile(zip_file, "r") as zip_ref:
|
|
119
|
+
zip_ref.extractall(dirname)
|
|
120
|
+
os.remove(zip_file)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def extract_tar(tar_file, dirname=None):
|
|
124
|
+
if dirname is None:
|
|
125
|
+
dirname = tar_file[: tar_file.lower().index(".tar")]
|
|
126
|
+
tf = tarfile.open(tar_file)
|
|
127
|
+
tf.extractall(dirname)
|
|
128
|
+
os.remove(tar_file)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def extract_archive(filename, dirname=None):
|
|
132
|
+
if filename.lower().endswith((".tar", ".tar.gz")):
|
|
133
|
+
extract_tar(filename, dirname=dirname)
|
|
134
|
+
elif filename.lower().endswith(".zip"):
|
|
135
|
+
extract_zip(filename, dirname=dirname)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def guess_archive_url(assets):
|
|
139
|
+
system = platform.system()
|
|
140
|
+
proc = platform.processor()
|
|
141
|
+
if system == "Darwin":
|
|
142
|
+
if proc == "arm" or (proc == "i386" and darwin_is_emulated()):
|
|
143
|
+
arch = "aarch64"
|
|
144
|
+
else:
|
|
145
|
+
arch = "x86_64"
|
|
146
|
+
for k, v in assets.items():
|
|
147
|
+
parsed = parse_tectonic_archive_name(k)
|
|
148
|
+
if not parsed:
|
|
149
|
+
continue
|
|
150
|
+
if parsed["arch"] == arch and parsed["system"] == "darwin":
|
|
151
|
+
return v
|
|
152
|
+
elif system == "Windows":
|
|
153
|
+
for k, v in assets.items():
|
|
154
|
+
parsed = parse_tectonic_archive_name(k)
|
|
155
|
+
if not parsed:
|
|
156
|
+
continue
|
|
157
|
+
if (
|
|
158
|
+
parsed["arch"] == "x86_64"
|
|
159
|
+
and parsed["system"] == "windows"
|
|
160
|
+
and parsed["toolchain"] == "msvc"
|
|
161
|
+
):
|
|
162
|
+
return v
|
|
163
|
+
elif system == "Linux":
|
|
164
|
+
for k, v in assets.items():
|
|
165
|
+
parsed = parse_tectonic_archive_name(k)
|
|
166
|
+
if not parsed:
|
|
167
|
+
continue
|
|
168
|
+
if (
|
|
169
|
+
(not proc or (proc and parsed["arch"] == proc))
|
|
170
|
+
and parsed["system"] == "linux"
|
|
171
|
+
and parsed["toolchain"] == "musl"
|
|
172
|
+
):
|
|
173
|
+
return v
|
|
174
|
+
raise Exception(f"Archive for system {system} proc {proc} not found")
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def archive_url_from_regex(assets, regex):
|
|
178
|
+
for k, v in assets.items():
|
|
179
|
+
if re.match(regex, k):
|
|
180
|
+
return v
|
|
181
|
+
raise Exception(f"Archive for regex {regex} not found")
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def install_tectonic(args):
|
|
185
|
+
system = platform.system()
|
|
186
|
+
assets = github_get_latest_release("tectonic-typesetting/tectonic")
|
|
187
|
+
if args.tectonic_package_regex:
|
|
188
|
+
archive_url = archive_url_from_regex(assets, args.tectonic_package_regex)
|
|
189
|
+
else:
|
|
190
|
+
archive_url = guess_archive_url(assets)
|
|
191
|
+
downloaded = download_file(archive_url)
|
|
192
|
+
dirname = "tectonic_folder"
|
|
193
|
+
extract_archive(downloaded, dirname=dirname)
|
|
194
|
+
if system == "Windows":
|
|
195
|
+
filename = "tectonic.exe"
|
|
196
|
+
else:
|
|
197
|
+
filename = "tectonic"
|
|
198
|
+
target_path = os.path.join(get_utils_dir(), filename)
|
|
199
|
+
shutil.move(os.path.join(dirname, filename), target_path)
|
|
200
|
+
shutil.rmtree(dirname)
|
|
201
|
+
return target_path
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def install_font(url):
|
|
205
|
+
fn = url.split("/")[-1].split("?")[0]
|
|
206
|
+
bn, ext = os.path.splitext(fn)
|
|
207
|
+
if "." in bn:
|
|
208
|
+
new_fn = bn.replace(".", "_") + ext
|
|
209
|
+
else:
|
|
210
|
+
new_fn = fn
|
|
211
|
+
dir_name = new_fn[:-4]
|
|
212
|
+
dir_name_base = dir_name.split(os.pathsep)[-1]
|
|
213
|
+
fonts_dir = os.path.join(get_utils_dir(), "fonts")
|
|
214
|
+
if not os.path.exists(fonts_dir):
|
|
215
|
+
os.makedirs(fonts_dir)
|
|
216
|
+
target_dir = os.path.join(fonts_dir, dir_name_base)
|
|
217
|
+
if os.path.isdir(target_dir):
|
|
218
|
+
print(f"{target_dir} already exists")
|
|
219
|
+
return
|
|
220
|
+
download_file(url)
|
|
221
|
+
if fn != new_fn:
|
|
222
|
+
os.rename(fn, new_fn)
|
|
223
|
+
extract_archive(new_fn, dirname=dir_name)
|
|
224
|
+
if not os.path.isdir(target_dir):
|
|
225
|
+
shutil.copytree(dir_name, target_dir)
|
|
226
|
+
shutil.rmtree(dir_name)
|
|
227
|
+
|
|
228
|
+
|
|
229
|
+
def find_font(file_name, root_dir=None):
|
|
230
|
+
root_dir = root_dir or os.path.join(get_utils_dir(), "fonts")
|
|
231
|
+
if not os.path.isdir(root_dir):
|
|
232
|
+
os.makedirs(root_dir, exist_ok=True)
|
|
233
|
+
for dir_, _, files in os.walk(root_dir):
|
|
234
|
+
for fn in files:
|
|
235
|
+
if fn == file_name:
|
|
236
|
+
return os.path.join(dir_, fn)
|
|
237
|
+
raise Exception(f"{file_name} not found")
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
def install_font_from_github_wrapper(repo):
|
|
241
|
+
latest = github_get_latest_release(repo)
|
|
242
|
+
for k, v in latest.items():
|
|
243
|
+
if k.endswith(".zip"):
|
|
244
|
+
install_font(v)
|
|
245
|
+
break
|