pyeasyphd 0.3.5__py3-none-any.whl → 0.3.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyeasyphd might be problematic. Click here for more details.

@@ -5,9 +5,21 @@ __all__ = [
5
5
  "run_search_for_screen",
6
6
  "run_search_for_files",
7
7
  "run_compare_after_search",
8
+ "run_generate_c_Yearly",
9
+ "run_generate_j_e_Weekly",
10
+ "run_generate_j_Weekly",
11
+ "run_generate_j_Monthly",
12
+ "run_generate_j_Yearly",
8
13
  ]
9
14
 
10
15
  from .run_article_md import run_article_md_daily_notes
11
16
  from .run_article_tex import run_article_tex_submit
12
17
  from .run_beamer_tex import run_beamer_tex_weekly_reports
13
- from .run_search_keywords import run_compare_after_search, run_search_for_files, run_search_for_screen
18
+ from .run_generate import (
19
+ run_generate_c_Yearly,
20
+ run_generate_j_e_Weekly,
21
+ run_generate_j_Monthly,
22
+ run_generate_j_Weekly,
23
+ run_generate_j_Yearly,
24
+ )
25
+ from .run_search import run_compare_after_search, run_search_for_files, run_search_for_screen
@@ -0,0 +1,61 @@
1
+ import os
2
+ from typing import Any, Dict, List
3
+
4
+
5
+ def expand_path(path: str) -> str:
6
+ """Expand user home directory and environment variables in path."""
7
+ return os.path.expandvars(os.path.expanduser(path))
8
+
9
+
10
+ def build_base_options(
11
+ include_publisher_list: List[str],
12
+ include_abbr_list: List[str],
13
+ exclude_publisher_list: List[str],
14
+ exclude_abbr_list: List[str],
15
+ path_conferences_journals_json: str,
16
+ ) -> Dict[str, Any]:
17
+ """
18
+ Build options dictionary with common configuration.
19
+
20
+ Args:
21
+ include_publisher_list: List of publishers to include
22
+ include_abbr_list: List of conference/journal abbreviations to include
23
+ exclude_publisher_list: List of publishers to exclude
24
+ exclude_abbr_list: List of conference/journal abbreviations to exclude
25
+ path_conferences_journals_json: Base path for conferences/journals JSON files
26
+
27
+ Returns:
28
+ Dictionary containing configured options
29
+ """
30
+ return {
31
+ "include_publisher_list": include_publisher_list,
32
+ "include_abbr_list": include_abbr_list,
33
+ "exclude_publisher_list": exclude_publisher_list,
34
+ "exclude_abbr_list": exclude_abbr_list,
35
+ "full_json_c": os.path.join(path_conferences_journals_json, "conferences.json"),
36
+ "full_json_j": os.path.join(path_conferences_journals_json, "journals.json"),
37
+ "full_json_k": os.path.join(path_conferences_journals_json, "keywords.json"),
38
+ }
39
+
40
+
41
+ def build_search_options(
42
+ print_on_screen: bool, search_year_list: List[str], keywords_type: str, keywords_list_list: List[List[str]]
43
+ ) -> Dict[str, Any]:
44
+ """
45
+ Build search options dictionary with common configuration.
46
+
47
+ Args:
48
+ print_on_screen: Whether to display results on screen
49
+ search_year_list: List of years to filter search results
50
+ keywords_type: Category name for search keywords
51
+ keywords_list_list: Nested list of search keywords
52
+
53
+ Returns:
54
+ Dictionary containing configured search options
55
+ """
56
+ return {
57
+ "print_on_screen": print_on_screen,
58
+ "search_year_list": search_year_list,
59
+ "keywords_dict": {keywords_type: keywords_list_list},
60
+ "keywords_type_list": [keywords_type],
61
+ }
@@ -0,0 +1,219 @@
1
+ import os
2
+
3
+ from pyeasyphd.tools import PaperLinksGenerator, generate_from_bibs_and_write
4
+ from pyeasyphd.utils.utils import is_last_week_of_month
5
+
6
+ from ._base import build_base_options, expand_path
7
+
8
+
9
+ def run_generate_j_Weekly(
10
+ options: dict,
11
+ path_weekly_docs: str,
12
+ keywords_category_names: list[str],
13
+ path_spidering_bibs: str,
14
+ path_conferences_journals_json: str,
15
+ ):
16
+ # Expand and normalize file paths
17
+ path_weekly_docs = expand_path(path_weekly_docs)
18
+
19
+ _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
20
+ run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
21
+ )
22
+
23
+ # Generate md and html files
24
+ for gc in ["generate_data", "combine_data"]:
25
+ path_storage = os.path.join(path_spidering_bibs, "spider_j")
26
+ output_basename = os.path.join("data", "Weekly")
27
+ path_output = os.path.expanduser(os.path.join(path_weekly_docs, output_basename, "Journals"))
28
+ # "current_issue", "current_month"
29
+ for flag in ["current_issue", "current_month"]:
30
+ generate_from_bibs_and_write(
31
+ path_storage, path_output, output_basename, "Journals", gc, "current_year", flag, options_
32
+ )
33
+
34
+ # Generate links
35
+ for keywords_category_name in keywords_category_names:
36
+ output_basename = os.path.join("data", "Weekly")
37
+ generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_weekly_docs, keywords_category_name)
38
+ generator.generate_weekly_links(output_basename)
39
+ generator.generate_keywords_links_weekly("Journals", output_basename)
40
+
41
+
42
+ def run_generate_j_e_Weekly(
43
+ options: dict,
44
+ path_weekly_docs: str,
45
+ keywords_category_names: list[str],
46
+ path_spidering_bibs: str,
47
+ path_conferences_journals_json: str,
48
+ ):
49
+ # Expand and normalize file paths
50
+ path_weekly_docs = expand_path(path_weekly_docs)
51
+
52
+ _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
53
+ run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
54
+ )
55
+ options_["early_access"] = True
56
+
57
+ # Generate md and html files
58
+ for gc in ["generate_data", "combine_data"]:
59
+ path_storage = os.path.join(path_spidering_bibs, "spider_j_e")
60
+ output_basename = os.path.join("data", "Weekly")
61
+ path_output = os.path.expanduser(os.path.join(path_weekly_docs, output_basename, "Journals"))
62
+ # "current_month"
63
+ for flag in ["current_month"]:
64
+ generate_from_bibs_and_write(
65
+ path_storage, path_output, output_basename, "Journals", gc, "current_year", flag, options_
66
+ )
67
+
68
+ # "all_years"
69
+ for year in ["all_years"]:
70
+ generate_from_bibs_and_write(
71
+ path_storage, path_output, output_basename, "Journals", gc, year, "all_months", options_
72
+ )
73
+
74
+ # Generate links
75
+ for keywords_category_name in keywords_category_names:
76
+ output_basename = os.path.join("data", "Weekly")
77
+ generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_weekly_docs, keywords_category_name)
78
+ generator.generate_ieee_early_access_links(output_basename)
79
+ generator.generate_keywords_links_weekly("Journals", output_basename)
80
+
81
+
82
+ def run_generate_j_Monthly(
83
+ options: dict,
84
+ path_monthly_docs: str,
85
+ keywords_category_names: list[str],
86
+ path_spidering_bibs: str,
87
+ path_conferences_journals_json: str,
88
+ ):
89
+ # Expand and normalize file paths
90
+ path_monthly_docs = expand_path(path_monthly_docs)
91
+
92
+ _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
93
+ run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
94
+ )
95
+
96
+ # Generate md and html files
97
+ for gc in ["generate_data", "combine_data"]:
98
+ path_storage = os.path.join(path_spidering_bibs, "spider_j")
99
+ output_basename = os.path.join("data", "Monthly")
100
+ path_output = os.path.expanduser(os.path.join(path_monthly_docs, output_basename, "Journals"))
101
+ # "all_months"
102
+ for flag in ["all_months"]:
103
+ if flag == "all_months":
104
+ if not is_last_week_of_month():
105
+ continue
106
+
107
+ generate_from_bibs_and_write(
108
+ path_storage, path_output, output_basename, "Journals", gc, "current_year", flag, options_
109
+ )
110
+
111
+ # Generate links
112
+ for keywords_category_name in keywords_category_names:
113
+ output_basename = os.path.join("data", "Monthly")
114
+ generator = PaperLinksGenerator(
115
+ full_json_c, full_json_j, full_json_k, path_monthly_docs, keywords_category_name
116
+ )
117
+ generator.generate_monthly_links(output_basename)
118
+ generator.generate_keywords_links_monthly("Journals", output_basename)
119
+
120
+
121
+ def run_generate_j_Yearly(
122
+ options: dict,
123
+ path_yearly_docs: str,
124
+ keywords_category_names: list[str],
125
+ path_spidered_bibs: str,
126
+ path_conferences_journals_json: str,
127
+ year_list: list[str],
128
+ ):
129
+ # Expand and normalize file paths
130
+ path_yearly_docs = expand_path(path_yearly_docs)
131
+
132
+ path_spidered_bibs, _, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
133
+ run_generate_base(options, path_spidered_bibs, "", path_conferences_journals_json)
134
+ )
135
+
136
+ # Generate md and html files
137
+ for gc in ["generate_data", "combine_data"]:
138
+ path_storage = os.path.join(path_spidered_bibs, "Journals")
139
+ output_basename = os.path.join("data", "Yearly")
140
+ path_output = os.path.expanduser(os.path.join(path_yearly_docs, output_basename, "Journals"))
141
+ # "2024", "2023", "2022", "2021", "2020", "2019", "2018", "2017", "2016", "2015"
142
+ for year in year_list:
143
+ generate_from_bibs_and_write(
144
+ path_storage, path_output, output_basename, "Journals", gc, [year], "all_months", options_
145
+ )
146
+
147
+ # Generate links
148
+ for keywords_category_name in keywords_category_names:
149
+ output_basename = os.path.join("data", "Yearly")
150
+ generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_yearly_docs, keywords_category_name)
151
+ generator.generate_yearly_links("Journals", output_basename)
152
+ generator.generate_keywords_links_yearly("Journals", output_basename)
153
+
154
+
155
+ def run_generate_base(
156
+ options: dict, path_spidered_bibs: str, path_spidering_bibs: str, path_conferences_journals_json: str
157
+ ):
158
+ # Expand and normalize file paths
159
+ path_spidered_bibs = expand_path(path_spidered_bibs)
160
+ path_spidering_bibs = expand_path(path_spidering_bibs)
161
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
162
+
163
+ # Configure options
164
+ options_ = build_base_options(
165
+ include_publisher_list=[],
166
+ include_abbr_list=[],
167
+ exclude_publisher_list=["arXiv"],
168
+ exclude_abbr_list=[],
169
+ path_conferences_journals_json=path_conferences_journals_json,
170
+ )
171
+ options_.update(options)
172
+
173
+ full_json_c = options_["full_json_c"]
174
+ full_json_j = options_["full_json_j"]
175
+ full_json_k = options_["full_json_k"]
176
+
177
+ return (
178
+ path_spidered_bibs,
179
+ path_spidering_bibs,
180
+ path_conferences_journals_json,
181
+ full_json_c,
182
+ full_json_j,
183
+ full_json_k,
184
+ options_
185
+ )
186
+
187
+
188
+ def run_generate_c_Yearly(
189
+ options: dict,
190
+ path_yearly_docs: str,
191
+ keywords_category_names: list[str],
192
+ path_spidered_bibs: str,
193
+ path_conferences_journals_json: str,
194
+ year_list: list[str],
195
+ ):
196
+ # Expand and normalize file paths
197
+ path_yearly_docs = expand_path(path_yearly_docs)
198
+
199
+ path_spidered_bibs, _, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
200
+ run_generate_base(options, path_spidered_bibs, "", path_conferences_journals_json)
201
+ )
202
+
203
+ # Generate md and html files
204
+ for gc in ["generate_data", "combine_data"]:
205
+ path_storage = os.path.join(path_spidered_bibs, "Conferences")
206
+ output_basename = os.path.join("data", "Yearly")
207
+ path_output = os.path.expanduser(os.path.join(path_yearly_docs, output_basename, "Conferences"))
208
+ # "2025", "2024", "2023", "2022", "2021", "2020", "2019", "2018", "2017", "2016", "2015"
209
+ for year in year_list:
210
+ generate_from_bibs_and_write(
211
+ path_storage, path_output, output_basename, "Conferences", gc, [year], "all_months", options_
212
+ )
213
+
214
+ # Generate links
215
+ for keywords_category_name in keywords_category_names:
216
+ output_basename = os.path.join("data", "Yearly")
217
+ generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_yearly_docs, keywords_category_name)
218
+ generator.generate_yearly_links("Conferences", output_basename)
219
+ generator.generate_keywords_links_yearly("Conferences", output_basename)
@@ -6,6 +6,8 @@ from pybibtexer.tools import compare_bibs_with_zotero
6
6
 
7
7
  from pyeasyphd.tools import Searchkeywords
8
8
 
9
+ from ._base import build_base_options, build_search_options, expand_path
10
+
9
11
 
10
12
  def run_search_for_screen(
11
13
  acronym: str,
@@ -27,22 +29,23 @@ def run_search_for_screen(
27
29
  path_conferences_journals_json: Path to conferences/journals JSON files
28
30
  """
29
31
  # Expand and normalize file paths
30
- path_spidered_bibs = _expand_path(path_spidered_bibs)
31
- path_spidering_bibs = _expand_path(path_spidering_bibs)
32
- path_conferences_journals_json = _expand_path(path_conferences_journals_json)
32
+ path_spidered_bibs = expand_path(path_spidered_bibs)
33
+ path_spidering_bibs = expand_path(path_spidering_bibs)
34
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
33
35
 
34
36
  # Configure search options
35
- options = _build_search_options(
36
- print_on_screen=True,
37
- search_year_list=[str(year)],
38
- include_publisher_list=[],
39
- include_abbr_list=[acronym],
40
- exclude_publisher_list=["arXiv"],
41
- exclude_abbr_list=[],
42
- keywords_type="Temp",
43
- keywords_list_list=[[title]],
44
- path_conferences_journals_json=path_conferences_journals_json,
45
- )
37
+ options = {
38
+ **build_base_options(
39
+ include_publisher_list=[],
40
+ include_abbr_list=[acronym],
41
+ exclude_publisher_list=["arXiv"],
42
+ exclude_abbr_list=[],
43
+ path_conferences_journals_json=path_conferences_journals_json,
44
+ ),
45
+ **build_search_options(
46
+ print_on_screen=True, search_year_list=[str(year)], keywords_type="Temp", keywords_list_list=[[title]]
47
+ ),
48
+ }
46
49
 
47
50
  # Execute searches across different bibliography sources
48
51
  _execute_searches(options, "", path_spidered_bibs, path_spidering_bibs)
@@ -68,75 +71,31 @@ def run_search_for_files(
68
71
  path_conferences_journals_json: Path to conferences/journals JSON files
69
72
  """
70
73
  # Expand and normalize file paths
71
- path_main_output = _expand_path(path_main_output)
72
- path_spidered_bibs = _expand_path(path_spidered_bibs)
73
- path_spidering_bibs = _expand_path(path_spidering_bibs)
74
- path_conferences_journals_json = _expand_path(path_conferences_journals_json)
74
+ path_main_output = expand_path(path_main_output)
75
+ path_spidered_bibs = expand_path(path_spidered_bibs)
76
+ path_spidering_bibs = expand_path(path_spidering_bibs)
77
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
75
78
 
76
79
  # Configure search options
77
- options = _build_search_options(
78
- print_on_screen=False,
79
- search_year_list=[],
80
- include_publisher_list=[],
81
- include_abbr_list=[],
82
- exclude_publisher_list=["arXiv"],
83
- exclude_abbr_list=[],
84
- keywords_type=keywords_type,
85
- keywords_list_list=keywords_list_list,
86
- path_conferences_journals_json=path_conferences_journals_json,
87
- )
88
-
80
+ options = {
81
+ **build_base_options(
82
+ include_publisher_list=[],
83
+ include_abbr_list=[],
84
+ exclude_publisher_list=["arXiv"],
85
+ exclude_abbr_list=[],
86
+ path_conferences_journals_json=path_conferences_journals_json,
87
+ ),
88
+ **build_search_options(
89
+ print_on_screen=False,
90
+ search_year_list=[],
91
+ keywords_type=keywords_type,
92
+ keywords_list_list=keywords_list_list,
93
+ ),
94
+ }
89
95
  # Execute searches across different bibliography sources
90
96
  _execute_searches(options, path_main_output, path_spidered_bibs, path_spidering_bibs)
91
97
 
92
98
 
93
- def _expand_path(path: str) -> str:
94
- """Expand user home directory and environment variables in path."""
95
- return os.path.expandvars(os.path.expanduser(path))
96
-
97
-
98
- def _build_search_options(
99
- print_on_screen: bool,
100
- search_year_list: List[str],
101
- include_publisher_list: List[str],
102
- include_abbr_list: List[str],
103
- exclude_publisher_list: List[str],
104
- exclude_abbr_list: List[str],
105
- keywords_type: str,
106
- keywords_list_list: List[List[str]],
107
- path_conferences_journals_json: str,
108
- ) -> Dict[str, Any]:
109
- """
110
- Build search options dictionary with common configuration.
111
-
112
- Args:
113
- print_on_screen: Whether to display results on screen
114
- search_year_list: List of years to filter search results
115
- include_publisher_list: List of publishers to include
116
- include_abbr_list: List of conference/journal abbreviations to include
117
- exclude_publisher_list: List of publishers to exclude from search
118
- exclude_abbr_list: List of conference/journal abbreviations to exclude from search
119
- keywords_type: Category name for search keywords
120
- keywords_list_list: Nested list of search keywords
121
- path_conferences_journals_json: Base path for conferences/journals JSON files
122
-
123
- Returns:
124
- Dictionary containing configured search options
125
- """
126
- return {
127
- "print_on_screen": print_on_screen,
128
- "search_year_list": search_year_list,
129
- "include_publisher_list": include_publisher_list,
130
- "include_abbr_list": include_abbr_list,
131
- "exclude_publisher_list": exclude_publisher_list,
132
- "exclude_abbr_list": exclude_abbr_list,
133
- "keywords_dict": {keywords_type: keywords_list_list},
134
- "keywords_type_list": [keywords_type],
135
- "full_json_c": os.path.join(path_conferences_journals_json, "conferences.json"),
136
- "full_json_j": os.path.join(path_conferences_journals_json, "journals.json"),
137
- }
138
-
139
-
140
99
  def _execute_searches(
141
100
  options: Dict[str, Any], path_main_output: str, path_spidered_bibs: str, path_spidering_bibs: str
142
101
  ) -> None:
@@ -163,10 +122,7 @@ def _execute_searches(
163
122
 
164
123
 
165
124
  def run_compare_after_search(
166
- zotero_bib: str,
167
- keywords_type: str,
168
- path_main_output: str,
169
- path_conferences_journals_json: str,
125
+ zotero_bib: str, keywords_type: str, path_main_output: str, path_conferences_journals_json: str
170
126
  ):
171
127
  """
172
128
  Compare search results with Zotero bibliography and generate comparison report.
@@ -178,22 +134,23 @@ def run_compare_after_search(
178
134
  path_conferences_journals_json: Path to conferences/journals JSON files
179
135
  """
180
136
  # Expand and normalize file paths
181
- zotero_bib = _expand_path(zotero_bib)
182
- path_main_output = _expand_path(path_main_output)
183
- path_conferences_journals_json = _expand_path(path_conferences_journals_json)
137
+ zotero_bib = expand_path(zotero_bib)
138
+ path_main_output = expand_path(path_main_output)
139
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
184
140
 
185
141
  # Configure search options
186
- options = _build_search_options(
187
- print_on_screen=False,
188
- search_year_list=[],
189
- include_publisher_list=[],
190
- include_abbr_list=[],
191
- exclude_publisher_list=["arXiv"],
192
- exclude_abbr_list=[],
193
- keywords_type=keywords_type,
194
- keywords_list_list=[],
195
- path_conferences_journals_json=path_conferences_journals_json,
196
- )
142
+ options = {
143
+ **build_base_options(
144
+ include_publisher_list=[],
145
+ include_abbr_list=[],
146
+ exclude_publisher_list=["arXiv"],
147
+ exclude_abbr_list=[],
148
+ path_conferences_journals_json=path_conferences_journals_json,
149
+ ),
150
+ **build_search_options(
151
+ print_on_screen=False, search_year_list=[], keywords_type=keywords_type, keywords_list_list=[]
152
+ ),
153
+ }
197
154
 
198
155
  # Download bibliography files from local search results
199
156
  download_bib = _download_bib_from_local(path_main_output, keywords_type)
@@ -221,13 +178,7 @@ def _generate_data_list(path_output: str, folder_name: str, keywords_type: str)
221
178
 
222
179
  # Extract data from both title and abstract bibliography folders
223
180
  for bib_type in ["title-bib-zotero", "abstract-bib-zotero"]:
224
- folder_path = os.path.join(
225
- path_output,
226
- f"{folder_name}-Separate",
227
- "article",
228
- keywords_type,
229
- bib_type
230
- )
181
+ folder_path = os.path.join(path_output, f"{folder_name}-Separate", "article", keywords_type, bib_type)
231
182
 
232
183
  # Extract bibliography data content if folder exists
233
184
  if os.path.exists(folder_path):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyeasyphd
3
- Version: 0.3.5
3
+ Version: 0.3.7
4
4
  Summary: pyeasyphd
5
5
  License: GPL-3.0-or-later
6
6
  License-File: LICENSE
@@ -26,11 +26,13 @@ pyeasyphd/main/python_run_tex.py,sha256=9Syu8qRjPXN3gEabfRUWxwTFBm_izIcB4yFhsz3Q
26
26
  pyeasyphd/pyeasyphd.py,sha256=OAwbwq2rSXLSk2AoTAF8hmlOMRSRfvDn1Uqk-zkuqH8,3470
27
27
  pyeasyphd/pyeasyphd.sublime-settings,sha256=KcXx3DjyVf8UfnB4FP4u-jaTQU3Cuj24OvxGCZvXAsw,3135
28
28
  pyeasyphd/pyeasyphd.sublime-syntax,sha256=pXylbA-tye-K5dCTjEJLFVRqtY1T7AgWZ4laxo-dnaE,73
29
- pyeasyphd/scripts/__init__.py,sha256=0UW0H3Ht37dI1Kn3Pfdov3T7BvRLYDtRS40euB_t4Ls,473
29
+ pyeasyphd/scripts/__init__.py,sha256=aCI90-4ekx3y-F21BwbuaE7kUZOmxcevlvYWYsHOuiw,780
30
+ pyeasyphd/scripts/_base.py,sha256=YY8dmqBN8mBW1Opu0A5Usd_j2RSeMIxht5kXHrQaBFo,2232
30
31
  pyeasyphd/scripts/run_article_md.py,sha256=ZjdO03YRDokSqo-Rffby-1p6_P35N4amERKHe4yS6_0,4127
31
32
  pyeasyphd/scripts/run_article_tex.py,sha256=TuTEQk7y-Ykos3a_mlEJzyFnT9RKsAYjGXnwHJa5cjY,4672
32
33
  pyeasyphd/scripts/run_beamer_tex.py,sha256=UUOadRfmyWGfK__9NzJIUo1lAiDRB5edxRsfhrS3Ejo,4209
33
- pyeasyphd/scripts/run_search_keywords.py,sha256=jvBgYYNvOmiQYXCBPEP6dPo0SxvFRHQE2sPIOVvEmHc,9831
34
+ pyeasyphd/scripts/run_generate.py,sha256=5NQtLKTXeFg6t03zg17YbTSup0S11cOR07PRyJwnl9w,8959
35
+ pyeasyphd/scripts/run_search.py,sha256=DPpFcctEuzHCBZQdoGsETzqtIq_dfcMfEPd5lUTNKHg,8106
34
36
  pyeasyphd/tools/__init__.py,sha256=u1MZu_JjVac3HhEmcSTwroS83UVu0W5Vspy3Wu_-GH8,496
35
37
  pyeasyphd/tools/generate/generate_from_bibs.py,sha256=Dp1MyADwIRb9qFTFOkMPJLaeeh7NBjuiSLBN7smP2eo,7640
36
38
  pyeasyphd/tools/generate/generate_html.py,sha256=JzUEqgTVCaFzd4hXTYUEf0cVSO1QRe0nVUS72W6oyyU,5349
@@ -44,7 +46,7 @@ pyeasyphd/tools/search/search_keywords.py,sha256=YCurXuoYeU1ftve0cb8Hcn_g2FXCXf7
44
46
  pyeasyphd/tools/search/search_writers.py,sha256=Dz6D8m17R7x8NT7_PCjwmzlq29AfUz-N6sjyCguDTo4,15702
45
47
  pyeasyphd/tools/search/utils.py,sha256=bo7xtIZu31dQvjol1lwyWq1t6ldbw28oondwK8VbAqk,7562
46
48
  pyeasyphd/utils/utils.py,sha256=kWxzzgNwz77K9Q7j-RKTaoPpxqiVLVtaBMMhLuEenwE,3128
47
- pyeasyphd-0.3.5.dist-info/METADATA,sha256=T1k7yh4ZsdlWxiW1XbmPBMQx1VtjfS8gu-Agnl26Y8g,985
48
- pyeasyphd-0.3.5.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
49
- pyeasyphd-0.3.5.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
50
- pyeasyphd-0.3.5.dist-info/RECORD,,
49
+ pyeasyphd-0.3.7.dist-info/METADATA,sha256=azMoJuLTE7jJSYvvoQNsCrQjEmrV_SgkbakUzGU8Wr8,985
50
+ pyeasyphd-0.3.7.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
51
+ pyeasyphd-0.3.7.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
52
+ pyeasyphd-0.3.7.dist-info/RECORD,,