pyeasyphd 0.3.4__py3-none-any.whl → 0.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pyeasyphd might be problematic. Click here for more details.

@@ -2,11 +2,24 @@ __all__ = [
2
2
  "run_article_md_daily_notes",
3
3
  "run_article_tex_submit",
4
4
  "run_beamer_tex_weekly_reports",
5
- "run_search_for_files",
6
5
  "run_search_for_screen",
6
+ "run_search_for_files",
7
+ "run_compare_after_search",
8
+ "run_generate_c_Yearly",
9
+ "run_generate_j_e_Weekly",
10
+ "run_generate_j_Weekly",
11
+ "run_generate_j_Monthly",
12
+ "run_generate_j_Yearly",
7
13
  ]
8
14
 
9
15
  from .run_article_md import run_article_md_daily_notes
10
16
  from .run_article_tex import run_article_tex_submit
11
17
  from .run_beamer_tex import run_beamer_tex_weekly_reports
12
- from .run_search_keywords import run_search_for_files, run_search_for_screen
18
+ from .run_generate import (
19
+ run_generate_c_Yearly,
20
+ run_generate_j_e_Weekly,
21
+ run_generate_j_Monthly,
22
+ run_generate_j_Weekly,
23
+ run_generate_j_Yearly,
24
+ )
25
+ from .run_search import run_compare_after_search, run_search_for_files, run_search_for_screen
@@ -0,0 +1,61 @@
1
+ import os
2
+ from typing import Any, Dict, List
3
+
4
+
5
+ def expand_path(path: str) -> str:
6
+ """Expand user home directory and environment variables in path."""
7
+ return os.path.expandvars(os.path.expanduser(path))
8
+
9
+
10
+ def build_base_options(
11
+ include_publisher_list: List[str],
12
+ include_abbr_list: List[str],
13
+ exclude_publisher_list: List[str],
14
+ exclude_abbr_list: List[str],
15
+ path_conferences_journals_json: str,
16
+ ) -> Dict[str, Any]:
17
+ """
18
+ Build options dictionary with common configuration.
19
+
20
+ Args:
21
+ include_publisher_list: List of publishers to include
22
+ include_abbr_list: List of conference/journal abbreviations to include
23
+ exclude_publisher_list: List of publishers to exclude
24
+ exclude_abbr_list: List of conference/journal abbreviations to exclude
25
+ path_conferences_journals_json: Base path for conferences/journals JSON files
26
+
27
+ Returns:
28
+ Dictionary containing configured options
29
+ """
30
+ return {
31
+ "include_publisher_list": include_publisher_list,
32
+ "include_abbr_list": include_abbr_list,
33
+ "exclude_publisher_list": exclude_publisher_list,
34
+ "exclude_abbr_list": exclude_abbr_list,
35
+ "full_json_c": os.path.join(path_conferences_journals_json, "conferences.json"),
36
+ "full_json_j": os.path.join(path_conferences_journals_json, "journals.json"),
37
+ "full_json_k": os.path.join(path_conferences_journals_json, "keywords.json"),
38
+ }
39
+
40
+
41
+ def build_search_options(
42
+ print_on_screen: bool, search_year_list: List[str], keywords_type: str, keywords_list_list: List[List[str]]
43
+ ) -> Dict[str, Any]:
44
+ """
45
+ Build search options dictionary with common configuration.
46
+
47
+ Args:
48
+ print_on_screen: Whether to display results on screen
49
+ search_year_list: List of years to filter search results
50
+ keywords_type: Category name for search keywords
51
+ keywords_list_list: Nested list of search keywords
52
+
53
+ Returns:
54
+ Dictionary containing configured search options
55
+ """
56
+ return {
57
+ "print_on_screen": print_on_screen,
58
+ "search_year_list": search_year_list,
59
+ "keywords_dict": {keywords_type: keywords_list_list},
60
+ "keywords_type_list": [keywords_type],
61
+ }
@@ -0,0 +1,219 @@
1
+ import os
2
+
3
+ from pyeasyphd.tools import PaperLinksGenerator, generate_from_bibs_and_write
4
+ from pyeasyphd.utils.utils import is_last_week_of_month
5
+
6
+ from ._base import build_base_options, expand_path
7
+
8
+
9
+ def run_generate_j_Weekly(
10
+ options: dict,
11
+ path_weekly_docs: str,
12
+ keywords_category_names: list[str],
13
+ path_spidering_bibs: str,
14
+ path_conferences_journals_json: str,
15
+ ):
16
+ # Expand and normalize file paths
17
+ path_weekly_docs = expand_path(path_weekly_docs)
18
+
19
+ _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
20
+ run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
21
+ )
22
+
23
+ # Generate md and html files
24
+ for gc in ["generate_data", "combine_data"]:
25
+ path_storage = os.path.join(path_spidering_bibs, "spider_j")
26
+ output_basename = os.path.join("data", "Weekly")
27
+ path_output = os.path.expanduser(os.path.join(path_weekly_docs, output_basename, "Journals"))
28
+ # "current_issue", "current_month"
29
+ for flag in ["current_issue", "current_month"]:
30
+ generate_from_bibs_and_write(
31
+ path_storage, path_output, output_basename, "Journals", gc, "current_year", flag, options_
32
+ )
33
+
34
+ # Generate links
35
+ for keywords_category_name in keywords_category_names:
36
+ output_basename = os.path.join("data", "Weekly")
37
+ generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_weekly_docs, keywords_category_name)
38
+ generator.generate_weekly_links(output_basename)
39
+ generator.generate_keywords_links_weekly("Journals", output_basename)
40
+
41
+
42
+ def run_generate_j_e_Weekly(
43
+ options: dict,
44
+ path_weekly_docs: str,
45
+ keywords_category_names: list[str],
46
+ path_spidering_bibs: str,
47
+ path_conferences_journals_json: str,
48
+ ):
49
+ # Expand and normalize file paths
50
+ path_weekly_docs = expand_path(path_weekly_docs)
51
+
52
+ _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
53
+ run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
54
+ )
55
+ options_["early_access"] = True
56
+
57
+ # Generate md and html files
58
+ for gc in ["generate_data", "combine_data"]:
59
+ path_storage = os.path.join(path_spidering_bibs, "spider_j_e")
60
+ output_basename = os.path.join("data", "Weekly")
61
+ path_output = os.path.expanduser(os.path.join(path_weekly_docs, output_basename, "Journals_Early_Access"))
62
+ # "current_month"
63
+ for flag in ["current_month"]:
64
+ generate_from_bibs_and_write(
65
+ path_storage, path_output, output_basename, "Journals", gc, "current_year", flag, options_
66
+ )
67
+
68
+ # "all_years"
69
+ for year in ["all_years"]:
70
+ generate_from_bibs_and_write(
71
+ path_storage, path_output, output_basename, "Journals", gc, year, "all_months", options_
72
+ )
73
+
74
+ # Generate links
75
+ for keywords_category_name in keywords_category_names:
76
+ output_basename = os.path.join("data", "Weekly")
77
+ generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_weekly_docs, keywords_category_name)
78
+ generator.generate_ieee_early_access_links(output_basename)
79
+ generator.generate_keywords_links_weekly("Journals", output_basename)
80
+
81
+
82
+ def run_generate_j_Monthly(
83
+ options: dict,
84
+ path_monthly_docs: str,
85
+ keywords_category_names: list[str],
86
+ path_spidering_bibs: str,
87
+ path_conferences_journals_json: str,
88
+ ):
89
+ # Expand and normalize file paths
90
+ path_monthly_docs = expand_path(path_monthly_docs)
91
+
92
+ _, path_spidering_bibs, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
93
+ run_generate_base(options, "", path_spidering_bibs, path_conferences_journals_json)
94
+ )
95
+
96
+ # Generate md and html files
97
+ for gc in ["generate_data", "combine_data"]:
98
+ path_storage = os.path.join(path_spidering_bibs, "spider_j")
99
+ output_basename = os.path.join("data", "Monthly")
100
+ path_output = os.path.expanduser(os.path.join(path_monthly_docs, output_basename, "Journals"))
101
+ # "all_months"
102
+ for flag in ["all_months"]:
103
+ if flag == "all_months":
104
+ if not is_last_week_of_month():
105
+ continue
106
+
107
+ generate_from_bibs_and_write(
108
+ path_storage, path_output, output_basename, "Journals", gc, "current_year", flag, options_
109
+ )
110
+
111
+ # Generate links
112
+ for keywords_category_name in keywords_category_names:
113
+ output_basename = os.path.join("data", "Monthly")
114
+ generator = PaperLinksGenerator(
115
+ full_json_c, full_json_j, full_json_k, path_monthly_docs, keywords_category_name
116
+ )
117
+ generator.generate_monthly_links(output_basename)
118
+ generator.generate_keywords_links_monthly("Journals", output_basename)
119
+
120
+
121
+ def run_generate_j_Yearly(
122
+ options: dict,
123
+ path_yearly_docs: str,
124
+ keywords_category_names: list[str],
125
+ path_spidered_bibs: str,
126
+ path_conferences_journals_json: str,
127
+ year_list: list[str],
128
+ ):
129
+ # Expand and normalize file paths
130
+ path_yearly_docs = expand_path(path_yearly_docs)
131
+
132
+ path_spidered_bibs, _, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
133
+ run_generate_base(options, path_spidered_bibs, "", path_conferences_journals_json)
134
+ )
135
+
136
+ # Generate md and html files
137
+ for gc in ["generate_data", "combine_data"]:
138
+ path_storage = os.path.join(path_spidered_bibs, "Journals")
139
+ output_basename = os.path.join("data", "Yearly")
140
+ path_output = os.path.expanduser(os.path.join(path_yearly_docs, output_basename, "Journals"))
141
+ # "2024", "2023", "2022", "2021", "2020", "2019", "2018", "2017", "2016", "2015"
142
+ for year in year_list:
143
+ generate_from_bibs_and_write(
144
+ path_storage, path_output, output_basename, "Journals", gc, [year], "all_months", options_
145
+ )
146
+
147
+ # Generate links
148
+ for keywords_category_name in keywords_category_names:
149
+ output_basename = os.path.join("data", "Yearly")
150
+ generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_yearly_docs, keywords_category_name)
151
+ generator.generate_yearly_links("Journals", output_basename)
152
+ generator.generate_keywords_links_yearly("Journals", output_basename)
153
+
154
+
155
+ def run_generate_base(
156
+ options: dict, path_spidered_bibs: str, path_spidering_bibs: str, path_conferences_journals_json: str
157
+ ):
158
+ # Expand and normalize file paths
159
+ path_spidered_bibs = expand_path(path_spidered_bibs)
160
+ path_spidering_bibs = expand_path(path_spidering_bibs)
161
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
162
+
163
+ # Configure options
164
+ options_ = build_base_options(
165
+ include_publisher_list=[],
166
+ include_abbr_list=[],
167
+ exclude_publisher_list=["arXiv"],
168
+ exclude_abbr_list=[],
169
+ path_conferences_journals_json=path_conferences_journals_json,
170
+ )
171
+ options_.update(options)
172
+
173
+ full_json_c = options_["full_json_c"]
174
+ full_json_j = options_["full_json_j"]
175
+ full_json_k = options_["full_json_k"]
176
+
177
+ return (
178
+ path_spidered_bibs,
179
+ path_spidering_bibs,
180
+ path_conferences_journals_json,
181
+ full_json_c,
182
+ full_json_j,
183
+ full_json_k,
184
+ options_
185
+ )
186
+
187
+
188
+ def run_generate_c_Yearly(
189
+ options: dict,
190
+ path_yearly_docs: str,
191
+ keywords_category_names: list[str],
192
+ path_spidered_bibs: str,
193
+ path_conferences_journals_json: str,
194
+ year_list: list[str],
195
+ ):
196
+ # Expand and normalize file paths
197
+ path_yearly_docs = expand_path(path_yearly_docs)
198
+
199
+ path_spidered_bibs, _, path_conferences_journals_json, full_json_c, full_json_j, full_json_k, options_ = (
200
+ run_generate_base(options, path_spidered_bibs, "", path_conferences_journals_json)
201
+ )
202
+
203
+ # Generate md and html files
204
+ for gc in ["generate_data", "combine_data"]:
205
+ path_storage = os.path.join(path_spidered_bibs, "Conferences")
206
+ output_basename = os.path.join("data", "Yearly")
207
+ path_output = os.path.expanduser(os.path.join(path_yearly_docs, output_basename, "Conferences"))
208
+ # "2025", "2024", "2023", "2022", "2021", "2020", "2019", "2018", "2017", "2016", "2015"
209
+ for year in year_list:
210
+ generate_from_bibs_and_write(
211
+ path_storage, path_output, output_basename, "Conferences", gc, [year], "all_months", options_
212
+ )
213
+
214
+ # Generate links
215
+ for keywords_category_name in keywords_category_names:
216
+ output_basename = os.path.join("data", "Yearly")
217
+ generator = PaperLinksGenerator(full_json_c, full_json_j, full_json_k, path_yearly_docs, keywords_category_name)
218
+ generator.generate_yearly_links("Conferences", output_basename)
219
+ generator.generate_keywords_links_yearly("Conferences", output_basename)
@@ -0,0 +1,213 @@
1
+ import os
2
+ from typing import Any, Dict, List
3
+
4
+ from pyadvtools import transform_to_data_list
5
+ from pybibtexer.tools import compare_bibs_with_zotero
6
+
7
+ from pyeasyphd.tools import Searchkeywords
8
+
9
+ from ._base import build_base_options, build_search_options, expand_path
10
+
11
+
12
+ def run_search_for_screen(
13
+ acronym: str,
14
+ year: int,
15
+ title: str,
16
+ path_spidered_bibs: str,
17
+ path_spidering_bibs: str,
18
+ path_conferences_journals_json: str,
19
+ ) -> None:
20
+ """
21
+ Run search for screen display with specific conference/journal parameters.
22
+
23
+ Args:
24
+ acronym: Conference/journal acronym to search for
25
+ year: Publication year to filter by
26
+ title: Paper title used as search keyword
27
+ path_spidered_bibs: Path to spidered bibliography files
28
+ path_spidering_bibs: Path to spidering bibliography files
29
+ path_conferences_journals_json: Path to conferences/journals JSON files
30
+ """
31
+ # Expand and normalize file paths
32
+ path_spidered_bibs = expand_path(path_spidered_bibs)
33
+ path_spidering_bibs = expand_path(path_spidering_bibs)
34
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
35
+
36
+ # Configure search options
37
+ options = {
38
+ **build_base_options(
39
+ include_publisher_list=[],
40
+ include_abbr_list=[acronym],
41
+ exclude_publisher_list=["arXiv"],
42
+ exclude_abbr_list=[],
43
+ path_conferences_journals_json=path_conferences_journals_json,
44
+ ),
45
+ **build_search_options(
46
+ print_on_screen=True, search_year_list=[str(year)], keywords_type="Temp", keywords_list_list=[[title]]
47
+ ),
48
+ }
49
+
50
+ # Execute searches across different bibliography sources
51
+ _execute_searches(options, "", path_spidered_bibs, path_spidering_bibs)
52
+
53
+
54
+ def run_search_for_files(
55
+ keywords_type: str,
56
+ keywords_list_list: List[List[str]],
57
+ path_main_output: str,
58
+ path_spidered_bibs: str,
59
+ path_spidering_bibs: str,
60
+ path_conferences_journals_json: str,
61
+ ) -> None:
62
+ """
63
+ Run search and save results to files with custom keywords.
64
+
65
+ Args:
66
+ keywords_type: Category name for the search keywords
67
+ keywords_list_list: Nested list of keywords to search for
68
+ path_main_output: Main output directory for search results
69
+ path_spidered_bibs: Path to spidered bibliography files
70
+ path_spidering_bibs: Path to spidering bibliography files
71
+ path_conferences_journals_json: Path to conferences/journals JSON files
72
+ """
73
+ # Expand and normalize file paths
74
+ path_main_output = expand_path(path_main_output)
75
+ path_spidered_bibs = expand_path(path_spidered_bibs)
76
+ path_spidering_bibs = expand_path(path_spidering_bibs)
77
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
78
+
79
+ # Configure search options
80
+ options = {
81
+ **build_base_options(
82
+ include_publisher_list=[],
83
+ include_abbr_list=[],
84
+ exclude_publisher_list=["arXiv"],
85
+ exclude_abbr_list=[],
86
+ path_conferences_journals_json=path_conferences_journals_json,
87
+ ),
88
+ **build_search_options(
89
+ print_on_screen=False,
90
+ search_year_list=[],
91
+ keywords_type=keywords_type,
92
+ keywords_list_list=keywords_list_list,
93
+ ),
94
+ }
95
+ # Execute searches across different bibliography sources
96
+ _execute_searches(options, path_main_output, path_spidered_bibs, path_spidering_bibs)
97
+
98
+
99
+ def _execute_searches(
100
+ options: Dict[str, Any], path_main_output: str, path_spidered_bibs: str, path_spidering_bibs: str
101
+ ) -> None:
102
+ """
103
+ Execute searches across different bibliography sources.
104
+
105
+ Args:
106
+ options: Search configuration options
107
+ path_main_output: Base path for search results output
108
+ path_spidered_bibs: Path to spidered bibliography files
109
+ path_spidering_bibs: Path to spidering bibliography files
110
+ """
111
+ # Search in spidered bibliographies (Conferences and Journals)
112
+ for cj in ["Conferences", "Journals"]:
113
+ path_storage = os.path.join(path_spidered_bibs, cj)
114
+ path_output = os.path.join(path_main_output, "Search_spidered_bib", cj)
115
+ Searchkeywords(path_storage, path_output, options).run()
116
+
117
+ # Search in spidering bibliographies (Journals and Journals Early Access)
118
+ for je in ["spider_j", "spider_j_e"]:
119
+ path_storage = os.path.join(path_spidering_bibs, je)
120
+ path_output = os.path.join(path_main_output, "Search_spidering_bib", je)
121
+ Searchkeywords(path_storage, path_output, options).run()
122
+
123
+
124
+ def run_compare_after_search(
125
+ zotero_bib: str, keywords_type: str, path_main_output: str, path_conferences_journals_json: str
126
+ ):
127
+ """
128
+ Compare search results with Zotero bibliography and generate comparison report.
129
+
130
+ Args:
131
+ zotero_bib: Path to Zotero bibliography file
132
+ keywords_type: Category name for the search keywords used
133
+ path_main_output: Main output directory for search results and comparison
134
+ path_conferences_journals_json: Path to conferences/journals JSON files
135
+ """
136
+ # Expand and normalize file paths
137
+ zotero_bib = expand_path(zotero_bib)
138
+ path_main_output = expand_path(path_main_output)
139
+ path_conferences_journals_json = expand_path(path_conferences_journals_json)
140
+
141
+ # Configure search options
142
+ options = {
143
+ **build_base_options(
144
+ include_publisher_list=[],
145
+ include_abbr_list=[],
146
+ exclude_publisher_list=["arXiv"],
147
+ exclude_abbr_list=[],
148
+ path_conferences_journals_json=path_conferences_journals_json,
149
+ ),
150
+ **build_search_options(
151
+ print_on_screen=False, search_year_list=[], keywords_type=keywords_type, keywords_list_list=[]
152
+ ),
153
+ }
154
+
155
+ # Download bibliography files from local search results
156
+ download_bib = _download_bib_from_local(path_main_output, keywords_type)
157
+
158
+ # Generate comparison output path and run comparison
159
+ path_output = os.path.join(path_main_output, "comparison_new")
160
+ compare_bibs_with_zotero(zotero_bib, download_bib, path_output, options)
161
+
162
+ return None
163
+
164
+
165
+ def _generate_data_list(path_output: str, folder_name: str, keywords_type: str) -> list[str]:
166
+ """
167
+ Extract bibliography data content from files in specified folder structure.
168
+
169
+ Args:
170
+ path_output: Base output path for search results
171
+ folder_name: Specific folder name within the output structure
172
+ keywords_type: Category name for the search keywords used
173
+
174
+ Returns:
175
+ List of bibliography data content extracted from .bib files in the specified folders
176
+ """
177
+ data_list = []
178
+
179
+ # Extract data from both title and abstract bibliography folders
180
+ for bib_type in ["title-bib-zotero", "abstract-bib-zotero"]:
181
+ folder_path = os.path.join(path_output, f"{folder_name}-Separate", "article", keywords_type, bib_type)
182
+
183
+ # Extract bibliography data content if folder exists
184
+ if os.path.exists(folder_path):
185
+ data_list.extend(transform_to_data_list(folder_path, ".bib"))
186
+
187
+ return data_list
188
+
189
+
190
+ def _download_bib_from_local(path_output: str, keywords_type: str) -> list[str]:
191
+ """
192
+ Collect bibliography data content from all local search result directories.
193
+
194
+ Args:
195
+ path_output: Base output path containing search results
196
+ keywords_type: Category name for the search keywords used
197
+
198
+ Returns:
199
+ Combined list of bibliography data content from all .bib files in search results
200
+ """
201
+ data_list = []
202
+
203
+ # Collect data from spidered bibliographies (Conferences and Journals)
204
+ for cj in ["Conferences", "Journals"]:
205
+ folder_name = os.path.join("Search_spidered_bib", cj)
206
+ data_list.extend(_generate_data_list(path_output, folder_name, keywords_type))
207
+
208
+ # Collect data from spidering bibliographies (journal sources)
209
+ for je in ["spider_j", "spider_j_e"]:
210
+ folder_name = os.path.join("Search_spidering_bib", je)
211
+ data_list.extend(_generate_data_list(path_output, folder_name, keywords_type))
212
+
213
+ return data_list
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pyeasyphd
3
- Version: 0.3.4
3
+ Version: 0.3.6
4
4
  Summary: pyeasyphd
5
5
  License: GPL-3.0-or-later
6
6
  License-File: LICENSE
@@ -26,11 +26,13 @@ pyeasyphd/main/python_run_tex.py,sha256=9Syu8qRjPXN3gEabfRUWxwTFBm_izIcB4yFhsz3Q
26
26
  pyeasyphd/pyeasyphd.py,sha256=OAwbwq2rSXLSk2AoTAF8hmlOMRSRfvDn1Uqk-zkuqH8,3470
27
27
  pyeasyphd/pyeasyphd.sublime-settings,sha256=KcXx3DjyVf8UfnB4FP4u-jaTQU3Cuj24OvxGCZvXAsw,3135
28
28
  pyeasyphd/pyeasyphd.sublime-syntax,sha256=pXylbA-tye-K5dCTjEJLFVRqtY1T7AgWZ4laxo-dnaE,73
29
- pyeasyphd/scripts/__init__.py,sha256=R-gQqvYxkiQFZXjzxIWSNheWRolklXb1QY8b2nrKKZk,415
29
+ pyeasyphd/scripts/__init__.py,sha256=aCI90-4ekx3y-F21BwbuaE7kUZOmxcevlvYWYsHOuiw,780
30
+ pyeasyphd/scripts/_base.py,sha256=YY8dmqBN8mBW1Opu0A5Usd_j2RSeMIxht5kXHrQaBFo,2232
30
31
  pyeasyphd/scripts/run_article_md.py,sha256=ZjdO03YRDokSqo-Rffby-1p6_P35N4amERKHe4yS6_0,4127
31
32
  pyeasyphd/scripts/run_article_tex.py,sha256=TuTEQk7y-Ykos3a_mlEJzyFnT9RKsAYjGXnwHJa5cjY,4672
32
33
  pyeasyphd/scripts/run_beamer_tex.py,sha256=UUOadRfmyWGfK__9NzJIUo1lAiDRB5edxRsfhrS3Ejo,4209
33
- pyeasyphd/scripts/run_search_keywords.py,sha256=HAbELDQy2buf2pxZvmdAM6GrS8W9dGSsuf8cOB5GyFA,6178
34
+ pyeasyphd/scripts/run_generate.py,sha256=fLUQ6yCDY_jxpOWlSusHQlZblbvbdHpkb9Bv703jTQ0,8972
35
+ pyeasyphd/scripts/run_search.py,sha256=DPpFcctEuzHCBZQdoGsETzqtIq_dfcMfEPd5lUTNKHg,8106
34
36
  pyeasyphd/tools/__init__.py,sha256=u1MZu_JjVac3HhEmcSTwroS83UVu0W5Vspy3Wu_-GH8,496
35
37
  pyeasyphd/tools/generate/generate_from_bibs.py,sha256=Dp1MyADwIRb9qFTFOkMPJLaeeh7NBjuiSLBN7smP2eo,7640
36
38
  pyeasyphd/tools/generate/generate_html.py,sha256=JzUEqgTVCaFzd4hXTYUEf0cVSO1QRe0nVUS72W6oyyU,5349
@@ -44,7 +46,7 @@ pyeasyphd/tools/search/search_keywords.py,sha256=YCurXuoYeU1ftve0cb8Hcn_g2FXCXf7
44
46
  pyeasyphd/tools/search/search_writers.py,sha256=Dz6D8m17R7x8NT7_PCjwmzlq29AfUz-N6sjyCguDTo4,15702
45
47
  pyeasyphd/tools/search/utils.py,sha256=bo7xtIZu31dQvjol1lwyWq1t6ldbw28oondwK8VbAqk,7562
46
48
  pyeasyphd/utils/utils.py,sha256=kWxzzgNwz77K9Q7j-RKTaoPpxqiVLVtaBMMhLuEenwE,3128
47
- pyeasyphd-0.3.4.dist-info/METADATA,sha256=yTHmsfDu8UqmssQFuVtZhpMeEc8PI8azisZEwGHoyDU,985
48
- pyeasyphd-0.3.4.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
49
- pyeasyphd-0.3.4.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
50
- pyeasyphd-0.3.4.dist-info/RECORD,,
49
+ pyeasyphd-0.3.6.dist-info/METADATA,sha256=sOsSH-zW116-879Yv6TRJPI0cEbM2KR0XtepSUpT-8U,985
50
+ pyeasyphd-0.3.6.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
51
+ pyeasyphd-0.3.6.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
52
+ pyeasyphd-0.3.6.dist-info/RECORD,,
@@ -1,159 +0,0 @@
1
- import os
2
- from typing import Any, Dict, List
3
-
4
- from pyeasyphd.tools import Searchkeywords
5
-
6
-
7
- def run_search_for_screen(
8
- acronym: str,
9
- year: int,
10
- title: str,
11
- path_spidered_bibs: str,
12
- path_spidering_bibs: str,
13
- path_conferences_journals_json: str,
14
- ) -> None:
15
- """
16
- Run search for screen display with specific conference/journal parameters.
17
-
18
- Args:
19
- acronym: Conference/journal acronym to search for
20
- year: Publication year to filter by
21
- title: Paper title used as search keyword
22
- path_spidered_bibs: Path to spidered bibliography files
23
- path_spidering_bibs: Path to spidering bibliography files
24
- path_conferences_journals_json: Path to conferences/journals JSON files
25
- """
26
- # Expand and normalize file paths
27
- path_spidered_bibs = _expand_path(path_spidered_bibs)
28
- path_spidering_bibs = _expand_path(path_spidering_bibs)
29
- path_conferences_journals_json = _expand_path(path_conferences_journals_json)
30
-
31
- # Configure search options
32
- options = _build_search_options(
33
- print_on_screen=True,
34
- search_year_list=[str(year)],
35
- include_publisher_list=[],
36
- include_abbr_list=[acronym],
37
- exclude_publisher_list=["arXiv"],
38
- exclude_abbr_list=[],
39
- keywords_type="Temp",
40
- keywords_list_list=[[title]],
41
- path_conferences_journals_json=path_conferences_journals_json,
42
- )
43
-
44
- # Execute searches across different bibliography sources
45
- _execute_searches(options, "", path_spidered_bibs, path_spidering_bibs)
46
-
47
-
48
- def run_search_for_files(
49
- keywords_type: str,
50
- keywords_list_list: List[List[str]],
51
- path_main_output: str,
52
- path_spidered_bibs: str,
53
- path_spidering_bibs: str,
54
- path_conferences_journals_json: str,
55
- ) -> None:
56
- """
57
- Run search and save results to files with custom keywords.
58
-
59
- Args:
60
- keywords_type: Category name for the search keywords
61
- keywords_list_list: Nested list of keywords to search for
62
- path_main_output: Main output directory for search results
63
- path_spidered_bibs: Path to spidered bibliography files
64
- path_spidering_bibs: Path to spidering bibliography files
65
- path_conferences_journals_json: Path to conferences/journals JSON files
66
- """
67
- # Expand and normalize file paths
68
- path_main_output = _expand_path(path_main_output)
69
- path_spidered_bibs = _expand_path(path_spidered_bibs)
70
- path_spidering_bibs = _expand_path(path_spidering_bibs)
71
- path_conferences_journals_json = _expand_path(path_conferences_journals_json)
72
-
73
- # Configure search options
74
- options = _build_search_options(
75
- print_on_screen=False,
76
- search_year_list=[],
77
- include_publisher_list=[],
78
- include_abbr_list=[],
79
- exclude_publisher_list=["arXiv"],
80
- exclude_abbr_list=[],
81
- keywords_type=keywords_type,
82
- keywords_list_list=keywords_list_list,
83
- path_conferences_journals_json=path_conferences_journals_json,
84
- )
85
-
86
- # Execute searches across different bibliography sources
87
- _execute_searches(options, path_main_output, path_spidered_bibs, path_spidering_bibs)
88
-
89
-
90
- def _expand_path(path: str) -> str:
91
- """Expand user home directory and environment variables in path."""
92
- return os.path.expandvars(os.path.expanduser(path))
93
-
94
-
95
- def _build_search_options(
96
- print_on_screen: bool,
97
- search_year_list: List[str],
98
- include_publisher_list: List[str],
99
- include_abbr_list: List[str],
100
- exclude_publisher_list: List[str],
101
- exclude_abbr_list: List[str],
102
- keywords_type: str,
103
- keywords_list_list: List[List[str]],
104
- path_conferences_journals_json: str,
105
- ) -> Dict[str, Any]:
106
- """
107
- Build search options dictionary with common configuration.
108
-
109
- Args:
110
- print_on_screen: Whether to display results on screen
111
- search_year_list: List of years to filter search results
112
- include_publisher_list: List of publishers to include
113
- include_abbr_list: List of conference/journal abbreviations to include
114
- exclude_publisher_list: List of publishers to exclude from search
115
- exclude_abbr_list: List of conference/journal abbreviations to exclude from search
116
- keywords_type: Category name for search keywords
117
- keywords_list_list: Nested list of search keywords
118
- path_conferences_journals_json: Base path for conferences/journals JSON files
119
-
120
- Returns:
121
- Dictionary containing configured search options
122
- """
123
- return {
124
- "print_on_screen": print_on_screen,
125
- "search_year_list": search_year_list,
126
- "include_publisher_list": include_publisher_list,
127
- "include_abbr_list": include_abbr_list,
128
- "exclude_publisher_list": exclude_publisher_list,
129
- "exclude_abbr_list": exclude_abbr_list,
130
- "keywords_dict": {keywords_type: keywords_list_list},
131
- "keywords_type_list": [keywords_type],
132
- "full_json_c": os.path.join(path_conferences_journals_json, "conferences.json"),
133
- "full_json_j": os.path.join(path_conferences_journals_json, "journals.json"),
134
- }
135
-
136
-
137
- def _execute_searches(
138
- options: Dict[str, Any], path_main_output: str, path_spidered_bibs: str, path_spidering_bibs: str
139
- ) -> None:
140
- """
141
- Execute searches across different bibliography sources.
142
-
143
- Args:
144
- options: Search configuration options
145
- path_main_output: Base path for search results output
146
- path_spidered_bibs: Path to spidered bibliography files
147
- path_spidering_bibs: Path to spidering bibliography files
148
- """
149
- # Search in spidered bibliographies (Conferences and Journals)
150
- for cj in ["Conferences", "Journals"]:
151
- path_storage = os.path.join(path_spidered_bibs, cj)
152
- path_output = os.path.join(path_main_output, "Search_spidered_bib", cj)
153
- Searchkeywords(path_storage, path_output, options).run()
154
-
155
- # Search in spidering bibliographies (Journals and Journals Early Access)
156
- for je in ["spider_j", "spider_j_e"]:
157
- path_storage = os.path.join(path_spidering_bibs, je)
158
- path_output = os.path.join(path_main_output, "Search_spidering_bib", je)
159
- Searchkeywords(path_storage, path_output, options).run()