pyeasyphd 0.3.8__py3-none-any.whl → 0.3.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyeasyphd might be problematic. Click here for more details.

@@ -10,11 +10,18 @@ __all__ = [
10
10
  "run_generate_j_weekly",
11
11
  "run_generate_j_monthly",
12
12
  "run_generate_j_yearly",
13
+ # from pybibtexer
14
+ "run_compare_bib_with_local",
15
+ "run_compare_bib_with_zotero",
16
+ "run_format_bib_to_save_by_entry_type",
17
+ "run_format_bib_to_abbr_zotero_save"
13
18
  ]
14
19
 
15
20
  from .run_article_md import run_article_md_daily_notes
16
21
  from .run_article_tex import run_article_tex_submit
17
22
  from .run_beamer_tex import run_beamer_tex_weekly_reports
23
+ from .run_compare import run_compare_bib_with_local, run_compare_bib_with_zotero
24
+ from .run_format import run_format_bib_to_abbr_zotero_save, run_format_bib_to_save_by_entry_type
18
25
  from .run_generate import (
19
26
  run_generate_c_yearly,
20
27
  run_generate_j_e_weekly,
@@ -7,6 +7,11 @@ def expand_path(path: str) -> str:
7
7
  return os.path.expandvars(os.path.expanduser(path))
8
8
 
9
9
 
10
+ def expand_paths(*paths):
11
+ # Expand and normalize file paths
12
+ return [expand_path(path) for path in paths]
13
+
14
+
10
15
  def build_base_options(
11
16
  include_publisher_list: List[str],
12
17
  include_abbr_list: List[str],
@@ -27,6 +32,7 @@ def build_base_options(
27
32
  Returns:
28
33
  Dictionary containing configured options
29
34
  """
35
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
30
36
  return {
31
37
  "include_publisher_list": include_publisher_list,
32
38
  "include_abbr_list": include_abbr_list,
@@ -0,0 +1,45 @@
1
+ from pybibtexer.tools import compare_bibs_with_local, compare_bibs_with_zotero
2
+
3
+ from ._base import build_base_options, expand_paths
4
+
5
+
6
+ def run_compare_bib_with_local(
7
+ options: dict,
8
+ need_compare_bib: str,
9
+ path_output: str,
10
+ path_spidered_bibs: str,
11
+ path_spidering_bibs: str,
12
+ path_conferences_journals_json: str,
13
+ ) -> None:
14
+ # Expand and normalize file paths
15
+ need_compare_bib, path_output, path_spidered_bibs, path_spidering_bibs = expand_paths(
16
+ need_compare_bib, path_output, path_spidered_bibs, path_spidering_bibs
17
+ )
18
+
19
+ # Update options
20
+ options_ = build_base_options([], [], ["arXiv"], [], path_conferences_journals_json)
21
+ options_["include_early_access"] = True
22
+ options_.update(options)
23
+
24
+ # Compare
25
+ compare_bibs_with_local(need_compare_bib, path_spidered_bibs, path_spidering_bibs, path_output, options_)
26
+
27
+
28
+ def run_compare_bib_with_zotero(
29
+ options: dict,
30
+ need_compare_bib: str,
31
+ zotero_bib: str,
32
+ path_output: str,
33
+ path_conferences_journals_json: str,
34
+ ) -> None:
35
+ # Expand and normalize file paths
36
+ need_compare_bib, zotero_bib, path_output = expand_paths(
37
+ need_compare_bib, zotero_bib, path_output
38
+ )
39
+
40
+ # Update options
41
+ options_ = build_base_options([], [], ["arXiv"], [], path_conferences_journals_json)
42
+ options_.update(options)
43
+
44
+ # Compare
45
+ compare_bibs_with_zotero(zotero_bib, need_compare_bib, path_output, options_)
@@ -0,0 +1,37 @@
1
+ from pathlib import Path
2
+
3
+ from pybibtexer.tools import format_bib_to_abbr_zotero_save_modes, format_bib_to_save_mode_by_entry_type
4
+
5
+ from ._base import build_base_options, expand_paths
6
+
7
+
8
+ def run_format_bib_to_save_by_entry_type(
9
+ options: dict,
10
+ need_format_bib: str,
11
+ path_output: str,
12
+ path_conferences_journals_json: str,
13
+ ) -> None:
14
+ # Expand and normalize file paths
15
+ need_format_bib, path_output = expand_paths(need_format_bib, path_output)
16
+
17
+ # Update options
18
+ options_ = build_base_options([], [], [], [], path_conferences_journals_json)
19
+ options_.update(options)
20
+
21
+ format_bib_to_save_mode_by_entry_type(Path(need_format_bib).stem, path_output, need_format_bib, options=options_)
22
+
23
+
24
+ def run_format_bib_to_abbr_zotero_save(
25
+ options: dict,
26
+ need_format_bib: str,
27
+ path_output: str,
28
+ path_conferences_journals_json: str,
29
+ ) -> None:
30
+ # Expand and normalize file paths
31
+ need_format_bib, path_output = expand_paths(need_format_bib, path_output)
32
+
33
+ # Update options
34
+ options_ = build_base_options([], [], [], [], path_conferences_journals_json)
35
+ options_.update(options)
36
+
37
+ format_bib_to_abbr_zotero_save_modes(need_format_bib, path_output, options=options_)
@@ -3,7 +3,7 @@ import os
3
3
  from pyeasyphd.tools import PaperLinksGenerator, generate_from_bibs_and_write
4
4
  from pyeasyphd.utils.utils import is_last_week_of_month
5
5
 
6
- from ._base import build_base_options, expand_path
6
+ from ._base import build_base_options, expand_paths
7
7
 
8
8
 
9
9
  def run_generate_j_weekly(
@@ -14,11 +14,11 @@ def run_generate_j_weekly(
14
14
  path_conferences_journals_json: str,
15
15
  ):
16
16
  # Expand and normalize file paths
17
- path_weekly_docs = expand_path(path_weekly_docs)
17
+ path_weekly_docs, path_spidering_bibs = expand_paths(path_weekly_docs, path_spidering_bibs)
18
18
 
19
- _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
20
- run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
21
- )
19
+ # Update options
20
+ options_ = build_base_options([], [], ["arXiv"], [], path_conferences_journals_json)
21
+ options_.update(options)
22
22
 
23
23
  # Generate md and html files
24
24
  for gc in ["generate_data", "combine_data"]:
@@ -33,6 +33,11 @@ def run_generate_j_weekly(
33
33
 
34
34
  # Generate links
35
35
  for keywords_category_name in keywords_category_names:
36
+ full_json_c, full_json_j, full_json_k = (
37
+ options_["full_json_c"],
38
+ options_["full_json_j"],
39
+ options_["full_json_k"],
40
+ )
36
41
  output_basename = os.path.join("data", "Weekly")
37
42
  generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_weekly_docs, keywords_category_name)
38
43
  generator.generate_weekly_links(output_basename)
@@ -47,11 +52,11 @@ def run_generate_j_e_weekly(
47
52
  path_conferences_journals_json: str,
48
53
  ):
49
54
  # Expand and normalize file paths
50
- path_weekly_docs = expand_path(path_weekly_docs)
55
+ path_weekly_docs, path_spidering_bibs = expand_paths(path_weekly_docs, path_spidering_bibs)
51
56
 
52
- _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
53
- run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
54
- )
57
+ # Update options
58
+ options_ = build_base_options([], [], ["arXiv"], [], path_conferences_journals_json)
59
+ options_.update(options)
55
60
  options_["early_access"] = True
56
61
 
57
62
  # Generate md and html files
@@ -73,6 +78,11 @@ def run_generate_j_e_weekly(
73
78
 
74
79
  # Generate links
75
80
  for keywords_category_name in keywords_category_names:
81
+ full_json_c, full_json_j, full_json_k = (
82
+ options_["full_json_c"],
83
+ options_["full_json_j"],
84
+ options_["full_json_k"],
85
+ )
76
86
  output_basename = os.path.join("data", "Weekly")
77
87
  generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_weekly_docs, keywords_category_name)
78
88
  generator.generate_ieee_early_access_links(output_basename)
@@ -87,11 +97,11 @@ def run_generate_j_monthly(
87
97
  path_conferences_journals_json: str,
88
98
  ):
89
99
  # Expand and normalize file paths
90
- path_monthly_docs = expand_path(path_monthly_docs)
100
+ path_monthly_docs, path_spidering_bibs = expand_paths(path_monthly_docs, path_spidering_bibs)
91
101
 
92
- _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
93
- run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
94
- )
102
+ # Update options
103
+ options_ = build_base_options([], [], ["arXiv"], [], path_conferences_journals_json)
104
+ options_.update(options)
95
105
 
96
106
  # Generate md and html files
97
107
  for gc in ["generate_data", "combine_data"]:
@@ -110,6 +120,11 @@ def run_generate_j_monthly(
110
120
 
111
121
  # Generate links
112
122
  for keywords_category_name in keywords_category_names:
123
+ full_json_c, full_json_j, full_json_k = (
124
+ options_["full_json_c"],
125
+ options_["full_json_j"],
126
+ options_["full_json_k"],
127
+ )
113
128
  output_basename = os.path.join("data", "Monthly")
114
129
  generator = PaperLinksGenerator(
115
130
  full_json_c, full_json_j, full_json_k, path_monthly_docs, keywords_category_name
@@ -127,11 +142,11 @@ def run_generate_j_yearly(
127
142
  year_list: list[str],
128
143
  ):
129
144
  # Expand and normalize file paths
130
- path_yearly_docs = expand_path(path_yearly_docs)
145
+ path_yearly_docs, path_spidered_bibs = expand_paths(path_yearly_docs, path_spidered_bibs)
131
146
 
132
- path_spidered_bibs, _, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
133
- run_generate_base(options, path_spidered_bibs, "", path_conferences_journals_json)
134
- )
147
+ # Update options
148
+ options_ = build_base_options([], [], ["arXiv"], [], path_conferences_journals_json)
149
+ options_.update(options)
135
150
 
136
151
  # Generate md and html files
137
152
  for gc in ["generate_data", "combine_data"]:
@@ -146,45 +161,17 @@ def run_generate_j_yearly(
146
161
 
147
162
  # Generate links
148
163
  for keywords_category_name in keywords_category_names:
164
+ full_json_c, full_json_j, full_json_k = (
165
+ options_["full_json_c"],
166
+ options_["full_json_j"],
167
+ options_["full_json_k"],
168
+ )
149
169
  output_basename = os.path.join("data", "Yearly")
150
170
  generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_yearly_docs, keywords_category_name)
151
171
  generator.generate_yearly_links("Journals", output_basename)
152
172
  generator.generate_keywords_links_yearly("Journals", output_basename)
153
173
 
154
174
 
155
- def run_generate_base(
156
- options: dict, path_spidered_bibs: str, path_spidering_bibs: str, path_conferences_journals_json: str
157
- ):
158
- # Expand and normalize file paths
159
- path_spidered_bibs = expand_path(path_spidered_bibs)
160
- path_spidering_bibs = expand_path(path_spidering_bibs)
161
- path_conferences_journals_json = expand_path(path_conferences_journals_json)
162
-
163
- # Configure options
164
- options_ = build_base_options(
165
- include_publisher_list=[],
166
- include_abbr_list=[],
167
- exclude_publisher_list=["arXiv"],
168
- exclude_abbr_list=[],
169
- path_conferences_journals_json=path_conferences_journals_json,
170
- )
171
- options_.update(options)
172
-
173
- full_json_c = options_["full_json_c"]
174
- full_json_j = options_["full_json_j"]
175
- full_json_k = options_["full_json_k"]
176
-
177
- return (
178
- path_spidered_bibs,
179
- path_spidering_bibs,
180
- path_conferences_journals_json,
181
- full_json_c,
182
- full_json_j,
183
- full_json_k,
184
- options_
185
- )
186
-
187
-
188
175
  def run_generate_c_yearly(
189
176
  options: dict,
190
177
  path_yearly_docs: str,
@@ -194,11 +181,11 @@ def run_generate_c_yearly(
194
181
  year_list: list[str],
195
182
  ):
196
183
  # Expand and normalize file paths
197
- path_yearly_docs = expand_path(path_yearly_docs)
184
+ path_yearly_docs, path_spidered_bibs = expand_paths(path_yearly_docs, path_spidered_bibs)
198
185
 
199
- path_spidered_bibs, _, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
200
- run_generate_base(options, path_spidered_bibs, "", path_conferences_journals_json)
201
- )
186
+ # Update options
187
+ options_ = build_base_options([], [], ["arXiv"], [], path_conferences_journals_json)
188
+ options_.update(options)
202
189
 
203
190
  # Generate md and html files
204
191
  for gc in ["generate_data", "combine_data"]:
@@ -213,6 +200,11 @@ def run_generate_c_yearly(
213
200
 
214
201
  # Generate links
215
202
  for keywords_category_name in keywords_category_names:
203
+ full_json_c, full_json_j, full_json_k = (
204
+ options_["full_json_c"],
205
+ options_["full_json_j"],
206
+ options_["full_json_k"],
207
+ )
216
208
  output_basename = os.path.join("data", "Yearly")
217
209
  generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_yearly_docs, keywords_category_name)
218
210
  generator.generate_yearly_links("Conferences", output_basename)
@@ -6,7 +6,7 @@ from pybibtexer.tools import compare_bibs_with_zotero
6
6
 
7
7
  from pyeasyphd.tools import Searchkeywords
8
8
 
9
- from ._base import build_base_options, build_search_options, expand_path
9
+ from ._base import build_base_options, build_search_options, expand_path, expand_paths
10
10
 
11
11
 
12
12
  def run_search_for_screen(
@@ -29,9 +29,9 @@ def run_search_for_screen(
29
29
  path_conferences_journals_json: Path to conferences/journals JSON files
30
30
  """
31
31
  # Expand and normalize file paths
32
- path_spidered_bibs = expand_path(path_spidered_bibs)
33
- path_spidering_bibs = expand_path(path_spidering_bibs)
34
- path_conferences_journals_json = expand_path(path_conferences_journals_json)
32
+ path_spidered_bibs, path_spidering_bibs, path_conferences_journals_json = expand_paths(
33
+ path_spidered_bibs, path_spidering_bibs, path_conferences_journals_json
34
+ )
35
35
 
36
36
  # Configure search options
37
37
  options = {
@@ -72,9 +72,9 @@ def run_search_for_files(
72
72
  """
73
73
  # Expand and normalize file paths
74
74
  path_main_output = expand_path(path_main_output)
75
- path_spidered_bibs = expand_path(path_spidered_bibs)
76
- path_spidering_bibs = expand_path(path_spidering_bibs)
77
- path_conferences_journals_json = expand_path(path_conferences_journals_json)
75
+ path_spidered_bibs, path_spidering_bibs, path_conferences_journals_json = expand_paths(
76
+ path_spidered_bibs, path_spidering_bibs, path_conferences_journals_json
77
+ )
78
78
 
79
79
  # Configure search options
80
80
  options = {
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyeasyphd
3
- Version: 0.3.8
3
+ Version: 0.3.10
4
4
  Summary: pyeasyphd
5
5
  License: GPL-3.0-or-later
6
6
  License-File: LICENSE
@@ -26,13 +26,15 @@ pyeasyphd/main/python_run_tex.py,sha256=9Syu8qRjPXN3gEabfRUWxwTFBm_izIcB4yFhsz3Q
26
26
  pyeasyphd/pyeasyphd.py,sha256=OAwbwq2rSXLSk2AoTAF8hmlOMRSRfvDn1Uqk-zkuqH8,3470
27
27
  pyeasyphd/pyeasyphd.sublime-settings,sha256=KcXx3DjyVf8UfnB4FP4u-jaTQU3Cuj24OvxGCZvXAsw,3135
28
28
  pyeasyphd/pyeasyphd.sublime-syntax,sha256=pXylbA-tye-K5dCTjEJLFVRqtY1T7AgWZ4laxo-dnaE,73
29
- pyeasyphd/scripts/__init__.py,sha256=3X112jjcmFaOuklm_5g6PQacPQAnMQtLfBicaDGGr5E,780
30
- pyeasyphd/scripts/_base.py,sha256=YY8dmqBN8mBW1Opu0A5Usd_j2RSeMIxht5kXHrQaBFo,2232
29
+ pyeasyphd/scripts/__init__.py,sha256=iZ5LfEIdN0IQHD9veiqk9Tooti1CHV0D64Nl3zed-gk,1134
30
+ pyeasyphd/scripts/_base.py,sha256=v4RhrUBZ4Hz-tzhXwoVFSrtzlp6Ps9AL7EkXyWz3TOc,2428
31
31
  pyeasyphd/scripts/run_article_md.py,sha256=ZjdO03YRDokSqo-Rffby-1p6_P35N4amERKHe4yS6_0,4127
32
32
  pyeasyphd/scripts/run_article_tex.py,sha256=TuTEQk7y-Ykos3a_mlEJzyFnT9RKsAYjGXnwHJa5cjY,4672
33
33
  pyeasyphd/scripts/run_beamer_tex.py,sha256=UUOadRfmyWGfK__9NzJIUo1lAiDRB5edxRsfhrS3Ejo,4209
34
- pyeasyphd/scripts/run_generate.py,sha256=pTLg2Bq-gOnvhwDjCeuLnOFl8T7hPYEC5a7rWEMg3AA,8959
35
- pyeasyphd/scripts/run_search.py,sha256=DPpFcctEuzHCBZQdoGsETzqtIq_dfcMfEPd5lUTNKHg,8106
34
+ pyeasyphd/scripts/run_compare.py,sha256=dNZxDy_mIqvOpOwB2xkLcF6VF1cf8J2yvWk-wrYJndU,1437
35
+ pyeasyphd/scripts/run_format.py,sha256=-289mzLLNrPeBogQlc36Aw9AWMUkV3KwLc2QwkJtqYM,1215
36
+ pyeasyphd/scripts/run_generate.py,sha256=RZ2683tZ5hi0VtHU_i2HqetHw5AhKSnUsO6clSxytSY,8679
37
+ pyeasyphd/scripts/run_search.py,sha256=KPL-qACo_m20b8YU5MtlWB6Fe8c2IJOHpVUe_wcvV2k,8082
36
38
  pyeasyphd/tools/__init__.py,sha256=u1MZu_JjVac3HhEmcSTwroS83UVu0W5Vspy3Wu_-GH8,496
37
39
  pyeasyphd/tools/generate/generate_from_bibs.py,sha256=Dp1MyADwIRb9qFTFOkMPJLaeeh7NBjuiSLBN7smP2eo,7640
38
40
  pyeasyphd/tools/generate/generate_html.py,sha256=JzUEqgTVCaFzd4hXTYUEf0cVSO1QRe0nVUS72W6oyyU,5349
@@ -46,7 +48,7 @@ pyeasyphd/tools/search/search_keywords.py,sha256=YCurXuoYeU1ftve0cb8Hcn_g2FXCXf7
46
48
  pyeasyphd/tools/search/search_writers.py,sha256=Dz6D8m17R7x8NT7_PCjwmzlq29AfUz-N6sjyCguDTo4,15702
47
49
  pyeasyphd/tools/search/utils.py,sha256=bo7xtIZu31dQvjol1lwyWq1t6ldbw28oondwK8VbAqk,7562
48
50
  pyeasyphd/utils/utils.py,sha256=kWxzzgNwz77K9Q7j-RKTaoPpxqiVLVtaBMMhLuEenwE,3128
49
- pyeasyphd-0.3.8.dist-info/METADATA,sha256=e_gvZv3rIz0OyESQZ8hsaP9mrfyFL_0k304xmmfqhXc,985
50
- pyeasyphd-0.3.8.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
51
- pyeasyphd-0.3.8.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
52
- pyeasyphd-0.3.8.dist-info/RECORD,,
51
+ pyeasyphd-0.3.10.dist-info/METADATA,sha256=6MIQRik0Rs5kQv9u-FM2i7w5jOI8OoBZp7zKJHY8SmE,986
52
+ pyeasyphd-0.3.10.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
53
+ pyeasyphd-0.3.10.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
54
+ pyeasyphd-0.3.10.dist-info/RECORD,,