pyeasyphd 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pyeasyphd might be problematic. Click here for more details.
- pyeasyphd/.python-version +1 -0
- pyeasyphd/Main.sublime-menu +43 -0
- pyeasyphd/__init__.py +0 -0
- pyeasyphd/bib/__init__.py +1 -0
- pyeasyphd/bib/bibtexbase/__init__.py +7 -0
- pyeasyphd/bib/bibtexbase/standardize/_base.py +36 -0
- pyeasyphd/bib/bibtexbase/standardize/default_data.py +97 -0
- pyeasyphd/bib/bibtexbase/standardize/do_on_bib.py +54 -0
- pyeasyphd/bib/bibtexbase/standardize/do_on_comment_block.py +38 -0
- pyeasyphd/bib/bibtexbase/standardize/do_on_entry_block.py +310 -0
- pyeasyphd/bib/bibtexbase/standardize/do_on_preamble_block.py +35 -0
- pyeasyphd/bib/bibtexbase/standardize/do_on_string_block.py +34 -0
- pyeasyphd/bib/bibtexbase/standardize_bib.py +75 -0
- pyeasyphd/bib/bibtexparser/__init__.py +47 -0
- pyeasyphd/bib/bibtexparser/bibtex_format.py +87 -0
- pyeasyphd/bib/bibtexparser/exceptions.py +64 -0
- pyeasyphd/bib/bibtexparser/library.py +207 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/add.py +94 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/authors.py +22 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/doi_url.py +62 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/entry_field_keys_normalize.py +47 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/entry_field_keys_replace.py +31 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/entry_field_values_normalize.py +222 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/entry_fields_delete.py +34 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/entry_fields_keep.py +33 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/entry_fields_sort.py +70 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/entry_types.py +15 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/journal_booktitle.py +113 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/month_year.py +34 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/number_volume.py +21 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/pages.py +28 -0
- pyeasyphd/bib/bibtexparser/middlewares/block/title.py +20 -0
- pyeasyphd/bib/bibtexparser/middlewares/library/generating_entrykeys.py +98 -0
- pyeasyphd/bib/bibtexparser/middlewares/library/keeping_blocks.py +29 -0
- pyeasyphd/bib/bibtexparser/middlewares/library/sorting_blocks.py +124 -0
- pyeasyphd/bib/bibtexparser/middlewares/middleware.py +222 -0
- pyeasyphd/bib/bibtexparser/middlewares/parsestack.py +13 -0
- pyeasyphd/bib/bibtexparser/middlewares/utils.py +226 -0
- pyeasyphd/bib/bibtexparser/middlewares_library_to_library.py +414 -0
- pyeasyphd/bib/bibtexparser/middlewares_library_to_str.py +42 -0
- pyeasyphd/bib/bibtexparser/middlewares_str_to_library.py +35 -0
- pyeasyphd/bib/bibtexparser/middlewares_str_to_str.py +29 -0
- pyeasyphd/bib/bibtexparser/model.py +481 -0
- pyeasyphd/bib/bibtexparser/splitter.py +151 -0
- pyeasyphd/bib/core/__init__.py +18 -0
- pyeasyphd/bib/core/convert_library_to_library.py +31 -0
- pyeasyphd/bib/core/convert_library_to_str.py +199 -0
- pyeasyphd/bib/core/convert_str_to_library.py +34 -0
- pyeasyphd/bib/core/convert_str_to_str.py +27 -0
- pyeasyphd/main/__init__.py +17 -0
- pyeasyphd/main/basic_input.py +149 -0
- pyeasyphd/main/pandoc_md_to.py +361 -0
- pyeasyphd/main/python_run_bib.py +73 -0
- pyeasyphd/main/python_run_md.py +235 -0
- pyeasyphd/main/python_run_tex.py +149 -0
- pyeasyphd/main/python_writers.py +212 -0
- pyeasyphd/pyeasyphd.py +72 -0
- pyeasyphd/pyeasyphd.sublime-settings +235 -0
- pyeasyphd/pyeasyphd.sublime-syntax +5 -0
- pyeasyphd/tools/__init__.py +30 -0
- pyeasyphd/tools/compare/compare_bibs.py +234 -0
- pyeasyphd/tools/experiments_base.py +203 -0
- pyeasyphd/tools/format_save_bibs.py +178 -0
- pyeasyphd/tools/generate/generate_from_bibs.py +447 -0
- pyeasyphd/tools/generate/generate_links.py +356 -0
- pyeasyphd/tools/py_run_bib_md_tex.py +378 -0
- pyeasyphd/tools/replace/replace.py +81 -0
- pyeasyphd/tools/search/data.py +318 -0
- pyeasyphd/tools/search/search_base.py +118 -0
- pyeasyphd/tools/search/search_core.py +326 -0
- pyeasyphd/tools/search/search_keywords.py +227 -0
- pyeasyphd/tools/search/search_writers.py +288 -0
- pyeasyphd/tools/search/utils.py +152 -0
- pyeasyphd/tools/spider/process_spider_bib.py +247 -0
- pyeasyphd/tools/spider/process_spider_url.py +74 -0
- pyeasyphd/tools/spider/process_spider_url_bib.py +62 -0
- pyeasyphd/utils/utils.py +62 -0
- pyeasyphd-0.0.2.dist-info/METADATA +27 -0
- pyeasyphd-0.0.2.dist-info/RECORD +80 -0
- pyeasyphd-0.0.2.dist-info/WHEEL +4 -0
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
from typing import Any, Dict
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def obtain_search_keywords() -> Dict[str, Any]:
|
|
5
|
+
"""Keywords."""
|
|
6
|
+
_h_ = "(?:| |-)" # hyphen
|
|
7
|
+
|
|
8
|
+
evol = "evol(?:ution|utionary)" # 'evol(?:ution|utionary|ve|ved|ving)'
|
|
9
|
+
computation = "computation(?:|al)"
|
|
10
|
+
strateg = "strateg(?:y|ies)"
|
|
11
|
+
program = "program(?:|ming)"
|
|
12
|
+
algorithm = "algorithm(?:|s)"
|
|
13
|
+
automat = "automat(?:ed|ion)"
|
|
14
|
+
keywords_ec = [ # evolution computation
|
|
15
|
+
["simulated annealing"],
|
|
16
|
+
["taboo search"],
|
|
17
|
+
[f"{evol} {strateg}"],
|
|
18
|
+
["CMA-ES"], #
|
|
19
|
+
[f"{evol} {program}"],
|
|
20
|
+
[f"differential {evol}"],
|
|
21
|
+
[f"{evol} {algorithm}"],
|
|
22
|
+
[[evol], [strateg, program, "differential", algorithm]],
|
|
23
|
+
[f"genetic {algorithm}"],
|
|
24
|
+
[f"genetic {program}"],
|
|
25
|
+
[["genetic"], [algorithm, program]],
|
|
26
|
+
["particle swarm"],
|
|
27
|
+
[["swarm"], ["particle"]],
|
|
28
|
+
["ant colony"],
|
|
29
|
+
["bee colony"],
|
|
30
|
+
[["colony"], ["ant", "bee"]],
|
|
31
|
+
[f"memetic {algorithm}"],
|
|
32
|
+
[f"population{_h_}based"],
|
|
33
|
+
["quality diversity"],
|
|
34
|
+
[evol, algorithm, automat],
|
|
35
|
+
[evol, computation],
|
|
36
|
+
]
|
|
37
|
+
|
|
38
|
+
keywords_ss = [ # search strategy
|
|
39
|
+
["local search"],
|
|
40
|
+
[["local", "search"], ["local search"]],
|
|
41
|
+
["local optimization"],
|
|
42
|
+
[["local", "optimization"], ["local optimization"]],
|
|
43
|
+
["random search"],
|
|
44
|
+
[["random", "search"], ["random search"]],
|
|
45
|
+
["random optimization"],
|
|
46
|
+
[["random", "optimization"], ["random optimization"]],
|
|
47
|
+
["global search"],
|
|
48
|
+
[["global", "search"], ["global search"]],
|
|
49
|
+
["global optimization"],
|
|
50
|
+
[["global", "optimization"], ["global optimization"]],
|
|
51
|
+
["heuristic search"],
|
|
52
|
+
[["heuristic", "search"], ["heuristic search"]],
|
|
53
|
+
["heuristic optimization"],
|
|
54
|
+
[["heuristic", "optimization"], ["heuristic optimization"]],
|
|
55
|
+
]
|
|
56
|
+
|
|
57
|
+
nsga = "NSGA(?:|II|-II|III|-III)"
|
|
58
|
+
moea_d = "MOEA/D"
|
|
59
|
+
network = "network(?:|s)"
|
|
60
|
+
uncertain = "uncertain(?:|ty)"
|
|
61
|
+
keywords_multi = [ # multi objective
|
|
62
|
+
[moea_d],
|
|
63
|
+
[nsga],
|
|
64
|
+
[f"multi{_h_}objective optimization"],
|
|
65
|
+
[
|
|
66
|
+
[f"multi{_h_}objective", "optimization"],
|
|
67
|
+
[f"multi{_h_}objective optimization"],
|
|
68
|
+
],
|
|
69
|
+
[[f"multi{_h_}objective"], ["optimization"]],
|
|
70
|
+
[f"multi{_h_}model optimization"],
|
|
71
|
+
[[f"multi{_h_}model", "optimization"], [f"multi{_h_}model optimization"]],
|
|
72
|
+
[[f"multi{_h_}model"], ["optimization"]],
|
|
73
|
+
[f"many{_h_}objective optimization"],
|
|
74
|
+
[[f"many{_h_}objective", "optimization"], [f"many{_h_}objective optimization"]],
|
|
75
|
+
[[f"many{_h_}objective"], ["optimization"]],
|
|
76
|
+
[f"dynamic multi{_h_}objective"],
|
|
77
|
+
[f"dynamic {evol} multi{_h_}objective"],
|
|
78
|
+
[
|
|
79
|
+
["dynamic", f"multi{_h_}objective"],
|
|
80
|
+
[
|
|
81
|
+
f"dynamic multi{_h_}objective",
|
|
82
|
+
f"dynamic {evol} multi{_h_}objective"
|
|
83
|
+
]
|
|
84
|
+
],
|
|
85
|
+
[f"dynamic multi{_h_}model"],
|
|
86
|
+
[["dynamic", f"multi{_h_}model"], [f"dynamic multi{_h_}model"]],
|
|
87
|
+
[f"dynamic many{_h_}objective"],
|
|
88
|
+
[f"dynamic {evol} many{_h_}objective"],
|
|
89
|
+
[
|
|
90
|
+
["dynamic", f"many{_h_}objective"],
|
|
91
|
+
[
|
|
92
|
+
f"dynamic many{_h_}objective",
|
|
93
|
+
f"dynamic {evol} many{_h_}objective",
|
|
94
|
+
]
|
|
95
|
+
],
|
|
96
|
+
["dynamic", "optimization"],
|
|
97
|
+
["dynamic", network],
|
|
98
|
+
[
|
|
99
|
+
["dynamic"],
|
|
100
|
+
[
|
|
101
|
+
f"multi{_h_}objective",
|
|
102
|
+
f"multi{_h_}model",
|
|
103
|
+
f"many{_h_}objective",
|
|
104
|
+
"optimization",
|
|
105
|
+
network,
|
|
106
|
+
],
|
|
107
|
+
],
|
|
108
|
+
[f"{uncertain} optimization"],
|
|
109
|
+
[[uncertain, "optimization"], [f"{uncertain} optimization"]],
|
|
110
|
+
[[uncertain], ["optimization"]],
|
|
111
|
+
["pareto optimization"],
|
|
112
|
+
[["pareto", "optimization"], ["pareto optimization"]],
|
|
113
|
+
[["pareto"], ["optimization"]],
|
|
114
|
+
]
|
|
115
|
+
|
|
116
|
+
dimension = "dimension(?:|al)"
|
|
117
|
+
distribut = "distribut(?:ion|ed)"
|
|
118
|
+
keywords_parallel = [ # parallel
|
|
119
|
+
[f"large{_h_}scale"],
|
|
120
|
+
[f"high{_h_}{dimension}"],
|
|
121
|
+
[f"high{_h_}performance"],
|
|
122
|
+
["parallel", evol],
|
|
123
|
+
["parallel", algorithm],
|
|
124
|
+
[["parallel"], [evol, algorithm]],
|
|
125
|
+
[distribut, evol],
|
|
126
|
+
[distribut, algorithm],
|
|
127
|
+
[[distribut], [evol, algorithm]],
|
|
128
|
+
]
|
|
129
|
+
|
|
130
|
+
keywords_mo = [ # math optimization
|
|
131
|
+
[f"zero{_h_}orde", "optimization"],
|
|
132
|
+
["coordinate", "descent"],
|
|
133
|
+
["gradient", "descent"],
|
|
134
|
+
["gradient", "stochastic"],
|
|
135
|
+
[["gradient"], ["descent", "stochastic"]],
|
|
136
|
+
["convex", "optimization"],
|
|
137
|
+
[f"non{_h_}convex", "optimization"],
|
|
138
|
+
[["convex"], [f"non{_h_}convex", "optimization"]],
|
|
139
|
+
[[f"non{_h_}convex"], ["convex", "optimization"]],
|
|
140
|
+
["stochastic", "optimization"],
|
|
141
|
+
[["stochastic"], ["optimization"]],
|
|
142
|
+
["gaussian", "distribution"],
|
|
143
|
+
]
|
|
144
|
+
|
|
145
|
+
multi_task = "multi(?:|-)task"
|
|
146
|
+
federa = "federa(?:l|ted)"
|
|
147
|
+
weakly_ = f"weakly{_h_}"
|
|
148
|
+
generat = "generat(?:ive|ion)"
|
|
149
|
+
keywords_ml = [ # machine learning
|
|
150
|
+
["automated", "machine", "learning"],
|
|
151
|
+
[["machine", "learning"], [automat]],
|
|
152
|
+
["deep", "learning"],
|
|
153
|
+
[f"semi{_h_}supervised", "learning"],
|
|
154
|
+
[f"self{_h_}supervised", "learning"],
|
|
155
|
+
[f"{weakly_}supervised", "learning"],
|
|
156
|
+
["unsupervised", "learning"],
|
|
157
|
+
[f"multi{_h_}instance", "learning"],
|
|
158
|
+
["active", "learning"],
|
|
159
|
+
[
|
|
160
|
+
["supervised", "learning"],
|
|
161
|
+
[
|
|
162
|
+
f"semi{_h_}supervised",
|
|
163
|
+
f"self{_h_}supervised",
|
|
164
|
+
f"weakly{_h_}supervised",
|
|
165
|
+
"unsupervised",
|
|
166
|
+
],
|
|
167
|
+
],
|
|
168
|
+
["reinforcement", "learning", f"on{_h_}policy"],
|
|
169
|
+
["reinforcement", "learning", f"off{_h_}policy"],
|
|
170
|
+
["reinforcement", "learning", "offline"],
|
|
171
|
+
["reinforcement", "learning", f"model{_h_}based"],
|
|
172
|
+
["reinforcement", "learning", "continual"],
|
|
173
|
+
["reinforcement", "learning", "deep"],
|
|
174
|
+
["reinforcement", "learning", evol],
|
|
175
|
+
[
|
|
176
|
+
["reinforcement", "learning"],
|
|
177
|
+
[
|
|
178
|
+
"offline",
|
|
179
|
+
f"on{_h_}policy",
|
|
180
|
+
f"off{_h_}policy",
|
|
181
|
+
f"model{_h_}based",
|
|
182
|
+
"deep",
|
|
183
|
+
"continual",
|
|
184
|
+
evol,
|
|
185
|
+
],
|
|
186
|
+
],
|
|
187
|
+
["policy", "search"],
|
|
188
|
+
[["policy"], ["policy", "search"]],
|
|
189
|
+
[f"q{_h_}learning"],
|
|
190
|
+
["manifold", "learning"],
|
|
191
|
+
[["manifold"], ["Learning"]],
|
|
192
|
+
[multi_task, "learning"],
|
|
193
|
+
[[multi_task], ["learning"]],
|
|
194
|
+
["transfe", "learning"],
|
|
195
|
+
[["transfe"], ["Learning"]],
|
|
196
|
+
["domain", "adaptation"],
|
|
197
|
+
["domain", "generalization"],
|
|
198
|
+
[f"meta{_h_}learning"],
|
|
199
|
+
[[f"meta{_h_}learning"], ["learning"]],
|
|
200
|
+
[federa, "learning"],
|
|
201
|
+
[[federa], ["learning"]],
|
|
202
|
+
["ensemble", "learning"],
|
|
203
|
+
[["ensemble"], ["learning"]],
|
|
204
|
+
["online", "learning"],
|
|
205
|
+
[f"few{_h_}shot", "learning"],
|
|
206
|
+
[[f"few{_h_}shot"], ["learning"]],
|
|
207
|
+
[f"one{_h_}shot", "learning"],
|
|
208
|
+
[[f"one{_h_}shot"], ["learning"]],
|
|
209
|
+
[f"zero{_h_}shot", "learning"],
|
|
210
|
+
[[f"zero{_h_}shot"], ["learning"]],
|
|
211
|
+
["representation", "learning"],
|
|
212
|
+
[["representation"], ["learning"]],
|
|
213
|
+
["induction"],
|
|
214
|
+
["deduction"],
|
|
215
|
+
["transduction"],
|
|
216
|
+
["neural", network],
|
|
217
|
+
["graph", network],
|
|
218
|
+
[[network], ["graph", "neural"]],
|
|
219
|
+
[["graph"], [network, "neural"]],
|
|
220
|
+
["kernel"],
|
|
221
|
+
["embedding"],
|
|
222
|
+
["transformer"],
|
|
223
|
+
["diffusion", "model"],
|
|
224
|
+
[["diffusion"], ["model"]],
|
|
225
|
+
[generat, "model"],
|
|
226
|
+
[[generat], ["model"]],
|
|
227
|
+
["large language model"],
|
|
228
|
+
[["large", "language", "model"], ["large language model"]],
|
|
229
|
+
]
|
|
230
|
+
|
|
231
|
+
cluster = "cluster(?:|s|ing)"
|
|
232
|
+
data_driven = "date(?:| |-)driven"
|
|
233
|
+
prove = "prov(?:able|e)"
|
|
234
|
+
predict = "predict(?:|ed|ion)"
|
|
235
|
+
recommend = "recommend(?:ed|ation)"
|
|
236
|
+
markov = "markov(?:|ian)"
|
|
237
|
+
keywords_ec_ml = [ # evolution computation and machine learning
|
|
238
|
+
["neuro(?:| |-)evolution"],
|
|
239
|
+
["adaptation"],
|
|
240
|
+
["bayesian", "optimization"],
|
|
241
|
+
["bi-level", "optimization"],
|
|
242
|
+
["bayesian", "inference"],
|
|
243
|
+
["bayesian", "learning"],
|
|
244
|
+
[["bayesian"], ["optimization", "inference", "learning"]],
|
|
245
|
+
[markov, "decision"],
|
|
246
|
+
[markov, "chain"],
|
|
247
|
+
[[markov], ["decision", "chain"]],
|
|
248
|
+
[prove],
|
|
249
|
+
["time", "series"],
|
|
250
|
+
[cluster],
|
|
251
|
+
[f"co{_h_}evolution", f"co{_h_}operation"],
|
|
252
|
+
[[f"co{_h_}evolution"], [f"co{_h_}operation"]],
|
|
253
|
+
[[f"co{_h_}operation"], [f"co{_h_}evolution"]],
|
|
254
|
+
[data_driven],
|
|
255
|
+
[predict],
|
|
256
|
+
[recommend, "system"],
|
|
257
|
+
[distribut, "shift"],
|
|
258
|
+
]
|
|
259
|
+
|
|
260
|
+
converg = "converg(?:e|ence|ent|ed|ing)"
|
|
261
|
+
theor = "theor(?:y|etic|etical|etically)"
|
|
262
|
+
analy = "analy(?:ze|sis|zed|zing)"
|
|
263
|
+
bound = "bound(?:|s)"
|
|
264
|
+
run = "run(?:|ning)"
|
|
265
|
+
keywords_theory = [ # theory
|
|
266
|
+
["drift", "analysis"],
|
|
267
|
+
["hitting", "time"],
|
|
268
|
+
[evol, converg],
|
|
269
|
+
[evol, "time"],
|
|
270
|
+
[evol, theor],
|
|
271
|
+
[evol, bound],
|
|
272
|
+
[evol, "complexity"],
|
|
273
|
+
["swarm", converg],
|
|
274
|
+
["swarm", "time"],
|
|
275
|
+
["swarm", theor],
|
|
276
|
+
["swarm", bound],
|
|
277
|
+
["swarm", "complexity"],
|
|
278
|
+
["colony", converg],
|
|
279
|
+
["colony", "time"],
|
|
280
|
+
["colony", theor],
|
|
281
|
+
["colony", bound],
|
|
282
|
+
["colony", "complexity"],
|
|
283
|
+
["genetic", converg],
|
|
284
|
+
["genetic", "time"],
|
|
285
|
+
["genetic", theor],
|
|
286
|
+
["genetic", bound],
|
|
287
|
+
["genetic", "complexity"],
|
|
288
|
+
[analy, converg],
|
|
289
|
+
[analy, "time"],
|
|
290
|
+
[analy, theor],
|
|
291
|
+
[analy, bound],
|
|
292
|
+
[analy, "complexity"],
|
|
293
|
+
[computation, "time"],
|
|
294
|
+
[f"{run} time"],
|
|
295
|
+
["upper", bound],
|
|
296
|
+
["lower", bound],
|
|
297
|
+
[[converg], [evol, "swarm", "colony", "genetic", analy]],
|
|
298
|
+
[
|
|
299
|
+
["time"],
|
|
300
|
+
[evol, "swarm", "colony", "genetic", analy, "hitting", computation, run],
|
|
301
|
+
],
|
|
302
|
+
[[theor], [evol, "swarm", "colony", "genetic", analy]],
|
|
303
|
+
[[bound], [evol, "swarm", "colony", "genetic", analy, "upper", "lower"]],
|
|
304
|
+
[["complexity"], [evol, "swarm", "colony", "genetic", analy]],
|
|
305
|
+
[[analy], [converg, "time", theor, bound, "complexity"]],
|
|
306
|
+
]
|
|
307
|
+
|
|
308
|
+
keywords_dict = {
|
|
309
|
+
"EC": keywords_ec,
|
|
310
|
+
"SS": keywords_ss,
|
|
311
|
+
"Multi": keywords_multi,
|
|
312
|
+
"Parallel": keywords_parallel,
|
|
313
|
+
"MO": keywords_mo,
|
|
314
|
+
"ML": keywords_ml,
|
|
315
|
+
"ECML": keywords_ec_ml,
|
|
316
|
+
"Theory": keywords_theory,
|
|
317
|
+
}
|
|
318
|
+
return keywords_dict
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import copy
|
|
2
|
+
import re
|
|
3
|
+
from typing import Dict, List, Tuple
|
|
4
|
+
|
|
5
|
+
from ...bib.bibtexparser import Library
|
|
6
|
+
from ...main import PythonRunBib, PythonWriters
|
|
7
|
+
from .search_writers import WriteInitialResult, WriteSeparateResult
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def search_keywords_core(keywords_list_list: List[List[str]], library: Library, field: str) -> Tuple[Library, Library]:
|
|
11
|
+
"""Search keywords in `field` such as `title` or `abstract` or `keywords`."""
|
|
12
|
+
search_library = []
|
|
13
|
+
no_search_library = []
|
|
14
|
+
|
|
15
|
+
for entry in library.entries:
|
|
16
|
+
flag = False
|
|
17
|
+
content = entry[field] if field in entry else ""
|
|
18
|
+
if content:
|
|
19
|
+
content = re.sub("{", "", content)
|
|
20
|
+
content = re.sub("}", "", content)
|
|
21
|
+
|
|
22
|
+
# All keywords from keyword_list_list[0] should be found in bib
|
|
23
|
+
flag = all([re.search(keyword, content, flags=re.I) for keyword in keywords_list_list[0]])
|
|
24
|
+
if flag and (len(keywords_list_list) == 2):
|
|
25
|
+
# Any keywords from keyword_list_list[1] found in bib will results in False flag.
|
|
26
|
+
flag = not any([re.search(keyword, content, flags=re.I) for keyword in keywords_list_list[1]])
|
|
27
|
+
|
|
28
|
+
if flag:
|
|
29
|
+
search_library.append(entry)
|
|
30
|
+
else:
|
|
31
|
+
no_search_library.append(entry)
|
|
32
|
+
|
|
33
|
+
return Library(search_library), Library(no_search_library)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
class SearchInitialResult(object):
|
|
37
|
+
"""Search initial result.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
options: dict
|
|
41
|
+
|
|
42
|
+
Attributes:
|
|
43
|
+
options: dict
|
|
44
|
+
|
|
45
|
+
print_on_screen (bool = False): print on screen
|
|
46
|
+
deepcopy_library_for_every_field (bool = False): deepcopy library for every field
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self, options: dict) -> None:
|
|
50
|
+
self.options = options
|
|
51
|
+
|
|
52
|
+
self.print_on_screen: bool = options.get("print_on_screen", False)
|
|
53
|
+
self.deepcopy_library_for_every_field = options.get("deepcopy_library_for_every_field", False)
|
|
54
|
+
|
|
55
|
+
self._python_bib = PythonRunBib(options)
|
|
56
|
+
|
|
57
|
+
_options = {}
|
|
58
|
+
_options["empty_entry_cite_keys"] = True
|
|
59
|
+
_options.update(self.options)
|
|
60
|
+
self._python_writer = PythonWriters(_options)
|
|
61
|
+
|
|
62
|
+
def main(
|
|
63
|
+
self,
|
|
64
|
+
search_field_list: List[str],
|
|
65
|
+
path_initial: str,
|
|
66
|
+
library: Library,
|
|
67
|
+
keywords_type: str,
|
|
68
|
+
keywords_list_list: List[List[str]],
|
|
69
|
+
combine_keywords: str,
|
|
70
|
+
output_prefix: str,
|
|
71
|
+
path_separate: str,
|
|
72
|
+
) -> Tuple[List[str], Dict[str, List[List[str]]], Dict[str, int], Library]:
|
|
73
|
+
"""Search."""
|
|
74
|
+
error_pandoc_md_md, field_data_dict, no_search_library = [], {}, library
|
|
75
|
+
field_number_dict: Dict[str, int] = {}
|
|
76
|
+
|
|
77
|
+
for field in search_field_list:
|
|
78
|
+
if len(no_search_library.entries) == 0:
|
|
79
|
+
continue
|
|
80
|
+
|
|
81
|
+
# Search
|
|
82
|
+
search_library, no_search_library = search_keywords_core(keywords_list_list, no_search_library, field)
|
|
83
|
+
field_number_dict.update({field: len(search_library.entries)})
|
|
84
|
+
|
|
85
|
+
# Deepcopy library for every field
|
|
86
|
+
if self.deepcopy_library_for_every_field:
|
|
87
|
+
no_search_library = copy.deepcopy(library)
|
|
88
|
+
|
|
89
|
+
# Operate on the search library (deepcopy)
|
|
90
|
+
libraries = self._python_bib.parse_to_multi_standard_library(copy.deepcopy(search_library))
|
|
91
|
+
library_for_abbr, library_for_zotero, library_for_save = libraries
|
|
92
|
+
|
|
93
|
+
if self.print_on_screen:
|
|
94
|
+
print("".join(self._python_writer.write_to_str(library_for_zotero)))
|
|
95
|
+
continue
|
|
96
|
+
if not (library_for_abbr.entries and library_for_zotero.entries and library_for_save.entries):
|
|
97
|
+
continue
|
|
98
|
+
|
|
99
|
+
# Initially write tex, bib, and md files
|
|
100
|
+
data_temp, temp_error_pandoc_md_md = WriteInitialResult(copy.deepcopy(self.options)).main(
|
|
101
|
+
path_initial,
|
|
102
|
+
output_prefix,
|
|
103
|
+
field,
|
|
104
|
+
keywords_type,
|
|
105
|
+
combine_keywords,
|
|
106
|
+
library_for_abbr,
|
|
107
|
+
library_for_zotero,
|
|
108
|
+
library_for_save,
|
|
109
|
+
)
|
|
110
|
+
|
|
111
|
+
# Separatelly write with the method 'a' for '_basic', '_beauty', '_complex'
|
|
112
|
+
WriteSeparateResult().main(copy.deepcopy(data_temp), field, keywords_type, combine_keywords, path_separate)
|
|
113
|
+
|
|
114
|
+
# Save for combined results
|
|
115
|
+
field_data_dict.update({field: copy.deepcopy(data_temp)})
|
|
116
|
+
error_pandoc_md_md.extend(temp_error_pandoc_md_md)
|
|
117
|
+
|
|
118
|
+
return error_pandoc_md_md, field_data_dict, field_number_dict, no_search_library
|