pub-analyzer 0.1.2__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pub-analyzer might be problematic. Click here for more details.
- pub_analyzer/css/body.tcss +48 -35
- pub_analyzer/css/buttons.tcss +0 -1
- pub_analyzer/css/collapsible.tcss +31 -0
- pub_analyzer/css/main.tcss +4 -0
- pub_analyzer/css/summary.tcss +75 -0
- pub_analyzer/internal/identifier.py +36 -10
- pub_analyzer/internal/render.py +1 -1
- pub_analyzer/internal/report.py +177 -53
- pub_analyzer/internal/templates/author/{author_resume.typ → author_summary.typ} +4 -3
- pub_analyzer/internal/templates/author/report.typ +4 -3
- pub_analyzer/internal/templates/author/sources.typ +7 -5
- pub_analyzer/internal/templates/author/works.typ +12 -12
- pub_analyzer/internal/templates/author/works_extended.typ +4 -4
- pub_analyzer/main.py +6 -7
- pub_analyzer/models/author.py +20 -28
- pub_analyzer/models/concept.py +19 -0
- pub_analyzer/models/institution.py +22 -5
- pub_analyzer/models/report.py +14 -14
- pub_analyzer/models/source.py +59 -3
- pub_analyzer/models/topic.py +59 -0
- pub_analyzer/models/work.py +30 -7
- pub_analyzer/widgets/author/cards.py +15 -14
- pub_analyzer/widgets/author/core.py +80 -115
- pub_analyzer/widgets/author/tables.py +1 -1
- pub_analyzer/widgets/common/__init__.py +6 -6
- pub_analyzer/widgets/common/filesystem.py +16 -13
- pub_analyzer/widgets/common/filters.py +111 -0
- pub_analyzer/widgets/common/input.py +14 -5
- pub_analyzer/widgets/common/selector.py +1 -1
- pub_analyzer/widgets/common/summary.py +7 -0
- pub_analyzer/widgets/institution/cards.py +13 -15
- pub_analyzer/widgets/institution/core.py +81 -115
- pub_analyzer/widgets/institution/tables.py +1 -1
- pub_analyzer/widgets/report/cards.py +33 -31
- pub_analyzer/widgets/report/concept.py +47 -0
- pub_analyzer/widgets/report/core.py +90 -20
- pub_analyzer/widgets/report/export.py +2 -2
- pub_analyzer/widgets/report/grants.py +46 -0
- pub_analyzer/widgets/report/locations.py +14 -12
- pub_analyzer/widgets/report/source.py +22 -14
- pub_analyzer/widgets/report/topic.py +55 -0
- pub_analyzer/widgets/report/work.py +70 -34
- pub_analyzer/widgets/search/__init__.py +4 -4
- pub_analyzer/widgets/search/results.py +15 -16
- pub_analyzer/widgets/sidebar.py +11 -9
- {pub_analyzer-0.1.2.dist-info → pub_analyzer-0.3.0.dist-info}/METADATA +31 -7
- pub_analyzer-0.3.0.dist-info/RECORD +69 -0
- {pub_analyzer-0.1.2.dist-info → pub_analyzer-0.3.0.dist-info}/WHEEL +1 -1
- pub_analyzer/css/author.tcss +0 -78
- pub_analyzer/css/institution.tcss +0 -78
- pub_analyzer-0.1.2.dist-info/RECORD +0 -62
- {pub_analyzer-0.1.2.dist-info → pub_analyzer-0.3.0.dist-info}/LICENSE +0 -0
- {pub_analyzer-0.1.2.dist-info → pub_analyzer-0.3.0.dist-info}/entry_points.txt +0 -0
pub_analyzer/internal/report.py
CHANGED
|
@@ -2,27 +2,72 @@
|
|
|
2
2
|
|
|
3
3
|
import datetime
|
|
4
4
|
import math
|
|
5
|
-
from typing import Any
|
|
5
|
+
from typing import Any, NewType
|
|
6
6
|
|
|
7
7
|
import httpx
|
|
8
8
|
from pydantic import TypeAdapter
|
|
9
9
|
|
|
10
10
|
from pub_analyzer.internal import identifier
|
|
11
|
-
from pub_analyzer.models.author import Author
|
|
12
|
-
from pub_analyzer.models.institution import Institution
|
|
11
|
+
from pub_analyzer.models.author import Author, AuthorOpenAlexKey, AuthorResult, DehydratedAuthor
|
|
12
|
+
from pub_analyzer.models.institution import DehydratedInstitution, Institution, InstitutionOpenAlexKey, InstitutionResult
|
|
13
13
|
from pub_analyzer.models.report import (
|
|
14
14
|
AuthorReport,
|
|
15
15
|
CitationReport,
|
|
16
|
-
|
|
16
|
+
CitationSummary,
|
|
17
17
|
CitationType,
|
|
18
18
|
InstitutionReport,
|
|
19
|
-
|
|
20
|
-
|
|
19
|
+
OpenAccessSummary,
|
|
20
|
+
SourcesSummary,
|
|
21
21
|
WorkReport,
|
|
22
22
|
WorkTypeCounter,
|
|
23
23
|
)
|
|
24
|
+
from pub_analyzer.models.source import DehydratedSource, Source
|
|
24
25
|
from pub_analyzer.models.work import Authorship, Work
|
|
25
26
|
|
|
27
|
+
FromDate = NewType("FromDate", datetime.datetime)
|
|
28
|
+
"""DateTime marker for works published from this date."""
|
|
29
|
+
|
|
30
|
+
ToDate = NewType("ToDate", datetime.datetime)
|
|
31
|
+
"""DateTime marker for works published up to this date."""
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
def _get_author_profiles_keys(
|
|
35
|
+
author: Author, extra_profiles: list[Author | AuthorResult | DehydratedAuthor] | None
|
|
36
|
+
) -> list[AuthorOpenAlexKey]:
|
|
37
|
+
"""Create a list of profiles IDs joining main author profile and extra author profiles.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
author: Main OpenAlex author object.
|
|
41
|
+
extra_profiles: Extra OpenAlex authors objects related with the main author.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
List of Author OpenAlex Keys.
|
|
45
|
+
"""
|
|
46
|
+
if extra_profiles:
|
|
47
|
+
profiles = [author, *extra_profiles]
|
|
48
|
+
return [identifier.get_author_id(profile) for profile in profiles]
|
|
49
|
+
else:
|
|
50
|
+
return [identifier.get_author_id(author)]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _get_institution_keys(
|
|
54
|
+
institution: Institution, extra_profiles: list[Institution | InstitutionResult | DehydratedInstitution] | None
|
|
55
|
+
) -> list[InstitutionOpenAlexKey]:
|
|
56
|
+
"""Create a list of profiles IDs joining main institution profile and extra institution profiles.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
institution: Main OpenAlex institution object.
|
|
60
|
+
extra_profiles: Extra OpenAlex institutions objects related with the main institution.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
List of Institution OpenAlex Keys.
|
|
64
|
+
"""
|
|
65
|
+
if extra_profiles:
|
|
66
|
+
profiles = [institution, *extra_profiles]
|
|
67
|
+
return [identifier.get_institution_id(profile) for profile in profiles]
|
|
68
|
+
else:
|
|
69
|
+
return [identifier.get_institution_id(institution)]
|
|
70
|
+
|
|
26
71
|
|
|
27
72
|
def _get_authors_list(authorships: list[Authorship]) -> list[str]:
|
|
28
73
|
"""Collect OpenAlex IDs from authors in a list of authorships.
|
|
@@ -93,7 +138,7 @@ def _get_valid_works(works: list[dict[str, Any]]) -> list[dict[str, Any]]:
|
|
|
93
138
|
In response, we have chosen to exclude such works at this stage, thus avoiding
|
|
94
139
|
the need to handle exceptions within the Model validators.
|
|
95
140
|
"""
|
|
96
|
-
return [_add_work_abstract(work) for work in works if work[
|
|
141
|
+
return [_add_work_abstract(work) for work in works if work["title"] is not None]
|
|
97
142
|
|
|
98
143
|
|
|
99
144
|
async def _get_works(client: httpx.AsyncClient, url: str) -> list[Work]:
|
|
@@ -118,7 +163,7 @@ async def _get_works(client: httpx.AsyncClient, url: str) -> list[Work]:
|
|
|
118
163
|
meta_info = json_response["meta"]
|
|
119
164
|
page_count = math.ceil(meta_info["count"] / meta_info["per_page"])
|
|
120
165
|
|
|
121
|
-
works_data = list(_get_valid_works(json_response["results"])
|
|
166
|
+
works_data = list(_get_valid_works(json_response["results"]))
|
|
122
167
|
|
|
123
168
|
for page_number in range(1, page_count):
|
|
124
169
|
page_result = (await client.get(url + f"&page={page_number + 1}")).json()
|
|
@@ -127,13 +172,44 @@ async def _get_works(client: httpx.AsyncClient, url: str) -> list[Work]:
|
|
|
127
172
|
return TypeAdapter(list[Work]).validate_python(works_data)
|
|
128
173
|
|
|
129
174
|
|
|
130
|
-
async def
|
|
175
|
+
async def _get_source(client: httpx.AsyncClient, url: str) -> Source:
|
|
176
|
+
"""Get source given a URL.
|
|
177
|
+
|
|
178
|
+
Args:
|
|
179
|
+
client: HTTPX asynchronous client to be used to make the requests.
|
|
180
|
+
url: URL of works with all filters.
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
Source Model.
|
|
184
|
+
|
|
185
|
+
Raises:
|
|
186
|
+
httpx.HTTPStatusError: One response from OpenAlex API had an error HTTP status of 4xx or 5xx.
|
|
187
|
+
"""
|
|
188
|
+
response = await client.get(url=url)
|
|
189
|
+
response.raise_for_status()
|
|
190
|
+
|
|
191
|
+
return Source(**response.json())
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
async def make_author_report(
|
|
195
|
+
author: Author,
|
|
196
|
+
extra_profiles: list[Author | AuthorResult | DehydratedAuthor] | None = None,
|
|
197
|
+
pub_from_date: FromDate | None = None,
|
|
198
|
+
pub_to_date: ToDate | None = None,
|
|
199
|
+
cited_from_date: FromDate | None = None,
|
|
200
|
+
cited_to_date: ToDate | None = None,
|
|
201
|
+
) -> AuthorReport:
|
|
131
202
|
"""Make a scientific production report by Author.
|
|
132
203
|
|
|
133
204
|
Args:
|
|
134
205
|
author: Author to whom the report is generated.
|
|
135
|
-
|
|
136
|
-
|
|
206
|
+
extra_profiles: List of author profiles whose works will be attached.
|
|
207
|
+
|
|
208
|
+
pub_from_date: Filter works published from this date.
|
|
209
|
+
pub_to_date: Filter works published up to this date.
|
|
210
|
+
|
|
211
|
+
cited_from_date: Filter works that cite the author, published after this date.
|
|
212
|
+
cited_to_date: Filter works that cite the author, published up to this date.
|
|
137
213
|
|
|
138
214
|
Returns:
|
|
139
215
|
Author's scientific production report Model.
|
|
@@ -141,31 +217,40 @@ async def make_author_report(author: Author, from_date: datetime.date | None = N
|
|
|
141
217
|
Raises:
|
|
142
218
|
httpx.HTTPStatusError: One response from OpenAlex API had an error HTTP status of 4xx or 5xx.
|
|
143
219
|
"""
|
|
144
|
-
|
|
220
|
+
author_profiles_keys = _get_author_profiles_keys(author, extra_profiles)
|
|
221
|
+
profiles_query_parameter = "|".join(author_profiles_keys)
|
|
145
222
|
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
url =
|
|
223
|
+
pub_from_filter = f",from_publication_date:{pub_from_date:%Y-%m-%d}" if pub_from_date else ""
|
|
224
|
+
pub_to_filter = f",to_publication_date:{pub_to_date:%Y-%m-%d}" if pub_to_date else ""
|
|
225
|
+
url = (
|
|
226
|
+
f"https://api.openalex.org/works?filter=author.id:{profiles_query_parameter}{pub_from_filter}{pub_to_filter}&sort=publication_date"
|
|
227
|
+
)
|
|
149
228
|
|
|
150
229
|
async with httpx.AsyncClient() as client:
|
|
151
230
|
# Getting all the author works.
|
|
152
231
|
author_works = await _get_works(client, url)
|
|
153
232
|
|
|
233
|
+
# Extra filters
|
|
234
|
+
cited_from_filter = f",from_publication_date:{cited_from_date:%Y-%m-%d}" if cited_from_date else ""
|
|
235
|
+
cited_to_filter = f",to_publication_date:{cited_to_date:%Y-%m-%d}" if cited_to_date else ""
|
|
236
|
+
|
|
154
237
|
# Report fields.
|
|
155
238
|
works: list[WorkReport] = []
|
|
156
|
-
|
|
157
|
-
|
|
239
|
+
report_citation_summary = CitationSummary()
|
|
240
|
+
open_access_summary = OpenAccessSummary()
|
|
158
241
|
works_type_counter: list[WorkTypeCounter] = []
|
|
159
|
-
|
|
242
|
+
dehydrated_sources: list[DehydratedSource] = []
|
|
160
243
|
|
|
161
244
|
# Getting all works that have cited the author.
|
|
162
245
|
for author_work in author_works:
|
|
163
246
|
work_id = identifier.get_work_id(author_work)
|
|
164
247
|
work_authors = _get_authors_list(authorships=author_work.authorships)
|
|
165
|
-
cited_by_api_url =
|
|
248
|
+
cited_by_api_url = (
|
|
249
|
+
f"https://api.openalex.org/works?filter=cites:{work_id}{cited_from_filter}{cited_to_filter}&sort=publication_date"
|
|
250
|
+
)
|
|
166
251
|
|
|
167
252
|
# Adding the type of OpenAccess in the counter.
|
|
168
|
-
|
|
253
|
+
open_access_summary.add_oa_type(author_work.open_access.oa_status)
|
|
169
254
|
|
|
170
255
|
# Adding the work type to works type counter.
|
|
171
256
|
work_type = next((work_type for work_type in works_type_counter if work_type.type_name == author_work.type), None)
|
|
@@ -176,43 +261,64 @@ async def make_author_report(author: Author, from_date: datetime.date | None = N
|
|
|
176
261
|
|
|
177
262
|
# Add Sources to global list.
|
|
178
263
|
for location in author_work.locations:
|
|
179
|
-
if location.source and not any(source.
|
|
180
|
-
|
|
264
|
+
if location.source and not any(source.id == location.source.id for source in dehydrated_sources):
|
|
265
|
+
dehydrated_sources.append(location.source)
|
|
181
266
|
|
|
182
267
|
cited_by_works = await _get_works(client, cited_by_api_url)
|
|
183
268
|
cited_by: list[CitationReport] = []
|
|
184
|
-
|
|
269
|
+
work_citation_summary = CitationSummary()
|
|
185
270
|
for cited_by_work in cited_by_works:
|
|
186
271
|
cited_authors = _get_authors_list(authorships=cited_by_work.authorships)
|
|
187
272
|
citation_type = _get_citation_type(work_authors, cited_authors)
|
|
188
273
|
|
|
189
274
|
# Adding the type of cites in the counters.
|
|
190
|
-
|
|
191
|
-
|
|
275
|
+
report_citation_summary.add_cite_type(citation_type)
|
|
276
|
+
work_citation_summary.add_cite_type(citation_type)
|
|
192
277
|
|
|
193
278
|
cited_by.append(CitationReport(work=cited_by_work, citation_type=citation_type))
|
|
194
279
|
|
|
195
|
-
works.append(WorkReport(work=author_work, cited_by=cited_by,
|
|
280
|
+
works.append(WorkReport(work=author_work, cited_by=cited_by, citation_summary=work_citation_summary))
|
|
281
|
+
|
|
282
|
+
# Get sources full info.
|
|
283
|
+
sources: list[Source] = []
|
|
284
|
+
for dehydrated_source in dehydrated_sources:
|
|
285
|
+
source_id = identifier.get_source_id(dehydrated_source)
|
|
286
|
+
source_url = f"https://api.openalex.org/sources/{source_id}"
|
|
287
|
+
sources.append(await _get_source(client, source_url))
|
|
288
|
+
|
|
289
|
+
# Sort sources by h_index
|
|
290
|
+
sources_sorted = sorted(sources, key=lambda source: source.summary_stats.two_yr_mean_citedness, reverse=True)
|
|
291
|
+
sources_summary = SourcesSummary(sources=sources_sorted)
|
|
196
292
|
|
|
197
293
|
return AuthorReport(
|
|
198
294
|
author=author,
|
|
199
295
|
works=works,
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
296
|
+
citation_summary=report_citation_summary,
|
|
297
|
+
open_access_summary=open_access_summary,
|
|
298
|
+
works_type_summary=works_type_counter,
|
|
299
|
+
sources_summary=sources_summary,
|
|
204
300
|
)
|
|
205
301
|
|
|
206
302
|
|
|
207
303
|
async def make_institution_report(
|
|
208
|
-
|
|
209
|
-
|
|
304
|
+
institution: Institution,
|
|
305
|
+
extra_profiles: list[Institution | InstitutionResult | DehydratedInstitution] | None = None,
|
|
306
|
+
pub_from_date: FromDate | None = None,
|
|
307
|
+
pub_to_date: ToDate | None = None,
|
|
308
|
+
cited_from_date: FromDate | None = None,
|
|
309
|
+
cited_to_date: ToDate | None = None,
|
|
310
|
+
) -> InstitutionReport:
|
|
210
311
|
"""Make a scientific production report by Institution.
|
|
211
312
|
|
|
212
313
|
Args:
|
|
213
314
|
institution: Institution to which the report is generated.
|
|
214
|
-
|
|
215
|
-
|
|
315
|
+
extra_profiles: List of institutions profiles whose works will be attached.
|
|
316
|
+
|
|
317
|
+
pub_from_date: Filter works published from this date.
|
|
318
|
+
pub_to_date: Filter works published up to this date.
|
|
319
|
+
|
|
320
|
+
cited_from_date: Filter works that cite the institution, published after this date.
|
|
321
|
+
cited_to_date: Filter works that cite the institution, published up to this date.
|
|
216
322
|
|
|
217
323
|
Returns:
|
|
218
324
|
Institution's scientific production report Model.
|
|
@@ -220,31 +326,38 @@ async def make_institution_report(
|
|
|
220
326
|
Raises:
|
|
221
327
|
httpx.HTTPStatusError: One response from OpenAlex API had an error HTTP status of 4xx or 5xx.
|
|
222
328
|
"""
|
|
223
|
-
|
|
329
|
+
institution_keys = _get_institution_keys(institution, extra_profiles)
|
|
330
|
+
institution_query_parameter = "|".join(institution_keys)
|
|
224
331
|
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
url = f"https://api.openalex.org/works?filter=institutions.id:{
|
|
332
|
+
pub_from_filter = f",from_publication_date:{pub_from_date:%Y-%m-%d}" if pub_from_date else ""
|
|
333
|
+
pub_to_filter = f",to_publication_date:{pub_to_date:%Y-%m-%d}" if pub_to_date else ""
|
|
334
|
+
url = f"https://api.openalex.org/works?filter=institutions.id:{institution_query_parameter}{pub_from_filter}{pub_to_filter}&sort=publication_date"
|
|
228
335
|
|
|
229
336
|
async with httpx.AsyncClient() as client:
|
|
230
337
|
# Getting all the institution works.
|
|
231
338
|
institution_works = await _get_works(client=client, url=url)
|
|
232
339
|
|
|
340
|
+
# Extra filters
|
|
341
|
+
cited_from_filter = f",from_publication_date:{cited_from_date:%Y-%m-%d}" if cited_from_date else ""
|
|
342
|
+
cited_to_filter = f",to_publication_date:{cited_to_date:%Y-%m-%d}" if cited_to_date else ""
|
|
343
|
+
|
|
233
344
|
# Report fields.
|
|
234
345
|
works: list[WorkReport] = []
|
|
235
|
-
|
|
236
|
-
|
|
346
|
+
report_citation_summary = CitationSummary()
|
|
347
|
+
open_access_summary = OpenAccessSummary()
|
|
237
348
|
works_type_counter: list[WorkTypeCounter] = []
|
|
238
|
-
|
|
349
|
+
dehydrated_sources: list[DehydratedSource] = []
|
|
239
350
|
|
|
240
351
|
# Getting all works that have cited a work.
|
|
241
352
|
for institution_work in institution_works:
|
|
242
353
|
work_id = identifier.get_work_id(institution_work)
|
|
243
354
|
work_authors = _get_authors_list(authorships=institution_work.authorships)
|
|
244
|
-
cited_by_api_url =
|
|
355
|
+
cited_by_api_url = (
|
|
356
|
+
f"https://api.openalex.org/works?filter=cites:{work_id}{cited_from_filter}{cited_to_filter}&sort=publication_date"
|
|
357
|
+
)
|
|
245
358
|
|
|
246
359
|
# Adding the type of OpenAccess in the counter.
|
|
247
|
-
|
|
360
|
+
open_access_summary.add_oa_type(institution_work.open_access.oa_status)
|
|
248
361
|
|
|
249
362
|
# Adding the work type to works type counter.
|
|
250
363
|
work_type = next((work_type for work_type in works_type_counter if work_type.type_name == institution_work.type), None)
|
|
@@ -255,29 +368,40 @@ async def make_institution_report(
|
|
|
255
368
|
|
|
256
369
|
# Add Sources to global list.
|
|
257
370
|
for location in institution_work.locations:
|
|
258
|
-
if location.source and not any(source.
|
|
259
|
-
|
|
371
|
+
if location.source and not any(source.id == location.source.id for source in dehydrated_sources):
|
|
372
|
+
dehydrated_sources.append(location.source)
|
|
260
373
|
|
|
261
374
|
cited_by_works = await _get_works(client, cited_by_api_url)
|
|
262
375
|
cited_by: list[CitationReport] = []
|
|
263
|
-
|
|
376
|
+
work_citation_summary = CitationSummary()
|
|
264
377
|
for cited_by_work in cited_by_works:
|
|
265
378
|
cited_authors = _get_authors_list(authorships=cited_by_work.authorships)
|
|
266
379
|
citation_type = _get_citation_type(work_authors, cited_authors)
|
|
267
380
|
|
|
268
381
|
# Adding the type of cites in the counters.
|
|
269
|
-
|
|
270
|
-
|
|
382
|
+
report_citation_summary.add_cite_type(citation_type)
|
|
383
|
+
work_citation_summary.add_cite_type(citation_type)
|
|
271
384
|
|
|
272
385
|
cited_by.append(CitationReport(work=cited_by_work, citation_type=citation_type))
|
|
273
386
|
|
|
274
|
-
works.append(WorkReport(work=institution_work, cited_by=cited_by,
|
|
387
|
+
works.append(WorkReport(work=institution_work, cited_by=cited_by, citation_summary=work_citation_summary))
|
|
388
|
+
|
|
389
|
+
# Get sources full info.
|
|
390
|
+
sources: list[Source] = []
|
|
391
|
+
for dehydrated_source in dehydrated_sources:
|
|
392
|
+
source_id = identifier.get_source_id(dehydrated_source)
|
|
393
|
+
source_url = f"https://api.openalex.org/sources/{source_id}"
|
|
394
|
+
sources.append(await _get_source(client, source_url))
|
|
395
|
+
|
|
396
|
+
# Sort sources by h_index
|
|
397
|
+
sources_sorted = sorted(sources, key=lambda source: source.summary_stats.two_yr_mean_citedness, reverse=True)
|
|
398
|
+
sources_summary = SourcesSummary(sources=sources_sorted)
|
|
275
399
|
|
|
276
400
|
return InstitutionReport(
|
|
277
401
|
institution=institution,
|
|
278
402
|
works=works,
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
403
|
+
citation_summary=report_citation_summary,
|
|
404
|
+
open_access_summary=open_access_summary,
|
|
405
|
+
works_type_summary=works_type_counter,
|
|
406
|
+
sources_summary=sources_summary,
|
|
283
407
|
)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
// Author
|
|
1
|
+
// Author Summary
|
|
2
2
|
= Author.
|
|
3
3
|
|
|
4
4
|
// Cards
|
|
@@ -22,8 +22,9 @@
|
|
|
22
22
|
[#align(center)[#text(size: 12pt)[Last institution:]]],
|
|
23
23
|
|
|
24
24
|
// Card content
|
|
25
|
-
{% if report.author.
|
|
26
|
-
|
|
25
|
+
{% if report.author.last_known_institutions%}
|
|
26
|
+
{% set last_known_institution = report.author.last_known_institutions[0] %}
|
|
27
|
+
[#align(left)[#text(size: 10pt)[- *Name:* {{ last_known_institution.display_name }}]]],
|
|
27
28
|
[#align(left)[#text(size: 10pt)[- *Country:* MX]]],
|
|
28
29
|
[#align(left)[#text(size: 10pt)[- *Type:* education]]],
|
|
29
30
|
{% endif %}
|
|
@@ -39,12 +39,13 @@
|
|
|
39
39
|
columns: (1fr),
|
|
40
40
|
row-gutter: 11pt,
|
|
41
41
|
[#align(center, text(size: 17pt, weight: "bold")[{{ report.author.display_name }}])],
|
|
42
|
-
{% if report.author.
|
|
43
|
-
|
|
42
|
+
{% if report.author.last_known_institutions %}
|
|
43
|
+
{% set last_known_institution = report.author.last_known_institutions[0] %}
|
|
44
|
+
[#align(center, text(size: 15pt, weight: "thin")[{{ last_known_institution.display_name }}])],
|
|
44
45
|
{% endif %}
|
|
45
46
|
)
|
|
46
47
|
|
|
47
|
-
{% include '
|
|
48
|
+
{% include 'author_summary.typ' %}
|
|
48
49
|
|
|
49
50
|
{% include 'works.typ' %}
|
|
50
51
|
|
|
@@ -2,19 +2,21 @@
|
|
|
2
2
|
= Sources.
|
|
3
3
|
|
|
4
4
|
#table(
|
|
5
|
-
columns: (auto, 3fr, 2fr, auto, auto, auto),
|
|
5
|
+
columns: (auto, 3fr, 2fr, auto, auto, auto, auto, auto),
|
|
6
6
|
inset: 8pt,
|
|
7
7
|
align: horizon,
|
|
8
8
|
// Headers
|
|
9
|
-
[], [*Name*], [*Publisher or institution*], [*Type*], [*ISSN-L*], [*
|
|
9
|
+
[], [*Name*], [*Publisher or institution*], [*Type*], [*ISSN-L*], [*Impact factor*], [*h-index*], [*Is OA*],
|
|
10
10
|
|
|
11
11
|
// Content
|
|
12
|
-
{% for source in report.
|
|
13
|
-
[{{ loop.index }}],
|
|
14
|
-
[#underline([#link("{{ source.
|
|
12
|
+
{% for source in report.sources_summary.sources %}
|
|
13
|
+
[#underline[3.{{ loop.index }}. #label("source_{{ source.id.path.rpartition("/")[2] }}")]],
|
|
14
|
+
[#underline([#link("{{ source.homepage_url }}")[#"{{ source.display_name }}"]])],
|
|
15
15
|
[{{ source.host_organization_name or "-" }}],
|
|
16
16
|
[{{source.type }}],
|
|
17
17
|
[{{ source.issn_l or "-" }}],
|
|
18
|
+
[{{ source.summary_stats.two_yr_mean_citedness|round(3) }}],
|
|
19
|
+
[{{ source.summary_stats.h_index }}],
|
|
18
20
|
[{% if source.is_oa %}#text(rgb("909d63"))[True]{% else %}#text(rgb("bc5653"))[False]{% endif %}],
|
|
19
21
|
{% endfor %}
|
|
20
22
|
)
|
|
@@ -9,14 +9,14 @@
|
|
|
9
9
|
[
|
|
10
10
|
#align(center)[_Citation metrics_]
|
|
11
11
|
#parbreak()
|
|
12
|
-
- *Count:* {{ report.
|
|
13
|
-
- *Type A:* {{ report.
|
|
14
|
-
- *Type B:* {{ report.
|
|
12
|
+
- *Count:* {{ report.citation_summary.type_a_count + report.citation_summary.type_b_count }}
|
|
13
|
+
- *Type A:* {{ report.citation_summary.type_a_count }}
|
|
14
|
+
- *Type B:* {{ report.citation_summary.type_b_count }}
|
|
15
15
|
],
|
|
16
16
|
[
|
|
17
17
|
#align(center)[_Work Type_]
|
|
18
18
|
#parbreak()
|
|
19
|
-
{% for work_type in report.
|
|
19
|
+
{% for work_type in report.works_type_summary %}
|
|
20
20
|
- *{{ work_type.type_name }}:* {{ work_type.count }}
|
|
21
21
|
{% endfor %}
|
|
22
22
|
],
|
|
@@ -27,13 +27,13 @@
|
|
|
27
27
|
columns: (1fr, 1fr),
|
|
28
28
|
column-gutter: 15pt,
|
|
29
29
|
[
|
|
30
|
-
- *gold:* {{report.
|
|
31
|
-
- *green:* {{report.
|
|
32
|
-
- *hybrid:* {{report.
|
|
30
|
+
- *gold:* {{report.open_access_summary.gold}}
|
|
31
|
+
- *green:* {{report.open_access_summary.green}}
|
|
32
|
+
- *hybrid:* {{report.open_access_summary.hybrid}}
|
|
33
33
|
],
|
|
34
34
|
[
|
|
35
|
-
- *bronze:* {{report.
|
|
36
|
-
- *closed:* {{report.
|
|
35
|
+
- *bronze:* {{report.open_access_summary.bronze}}
|
|
36
|
+
- *closed:* {{report.open_access_summary.closed}}
|
|
37
37
|
],
|
|
38
38
|
)
|
|
39
39
|
],
|
|
@@ -56,9 +56,9 @@
|
|
|
56
56
|
[{{ work.work.type }}],
|
|
57
57
|
[{% if work.work.ids.doi %}#underline([#link("{{ work.work.ids.doi }}")[DOI]]){% else %}-{% endif %}],
|
|
58
58
|
[{{ work.work.publication_date }}],
|
|
59
|
-
[{{ work.
|
|
60
|
-
[{{ work.
|
|
61
|
-
[{{ work.
|
|
59
|
+
[{{ work.citation_summary.type_a_count + work.citation_summary.type_b_count }}],
|
|
60
|
+
[{{ work.citation_summary.type_a_count }}],
|
|
61
|
+
[{{ work.citation_summary.type_b_count }}],
|
|
62
62
|
[{{ work.work.open_access.oa_status.value }}],
|
|
63
63
|
{% endfor %}
|
|
64
64
|
)
|
|
@@ -39,9 +39,9 @@
|
|
|
39
39
|
[
|
|
40
40
|
#align(center)[_Citation_]
|
|
41
41
|
#parbreak()
|
|
42
|
-
- *Count:* {{ work.
|
|
43
|
-
- *Type A:* {{ work.
|
|
44
|
-
- *Type B:* {{ work.
|
|
42
|
+
- *Count:* {{ work.citation_summary.type_a_count + work.citation_summary.type_b_count }}
|
|
43
|
+
- *Type A:* {{ work.citation_summary.type_a_count }}
|
|
44
|
+
- *Type B:* {{ work.citation_summary.type_b_count }}
|
|
45
45
|
],
|
|
46
46
|
)
|
|
47
47
|
|
|
@@ -83,7 +83,7 @@
|
|
|
83
83
|
// Content
|
|
84
84
|
{% for location in work.work.locations %}
|
|
85
85
|
{% if location.source %}
|
|
86
|
-
[{{ loop.index }}],
|
|
86
|
+
[#underline([#link(label("source_{{ location.source.id.path.rpartition("/")[2] }}"))[{{ loop.index }}]])],
|
|
87
87
|
[#underline([#link("{{ location.landing_page_url }}")[#"{{ location.source.display_name }}"]])],
|
|
88
88
|
[{{ location.source.host_organization_name or "-" }}],
|
|
89
89
|
[{{ location.source.type }}],
|
pub_analyzer/main.py
CHANGED
|
@@ -18,15 +18,17 @@ from pub_analyzer.widgets.sidebar import SideBar
|
|
|
18
18
|
class PubAnalyzerApp(App[DOMNode]):
|
|
19
19
|
"""Pub Analyzer App entrypoint."""
|
|
20
20
|
|
|
21
|
-
|
|
22
|
-
|
|
21
|
+
TITLE = "Pub Analyzer"
|
|
22
|
+
|
|
23
|
+
CSS_PATH: ClassVar[CSSPathType | None] = [
|
|
23
24
|
"css/body.tcss",
|
|
24
25
|
"css/buttons.tcss",
|
|
25
26
|
"css/checkbox.tcss",
|
|
27
|
+
"css/collapsible.tcss",
|
|
26
28
|
"css/datatable.tcss",
|
|
27
|
-
"css/institution.tcss",
|
|
28
29
|
"css/main.tcss",
|
|
29
30
|
"css/report.tcss",
|
|
31
|
+
"css/summary.tcss",
|
|
30
32
|
"css/search.tcss",
|
|
31
33
|
"css/tabs.tcss",
|
|
32
34
|
"css/tree.tcss",
|
|
@@ -59,10 +61,7 @@ class PubAnalyzerApp(App[DOMNode]):
|
|
|
59
61
|
"""Take Screenshot."""
|
|
60
62
|
file_path = self.app.save_screenshot()
|
|
61
63
|
self.app.notify(
|
|
62
|
-
title="Screenshot saved!",
|
|
63
|
-
message=f"You can see the screenshot at {file_path}",
|
|
64
|
-
severity="information",
|
|
65
|
-
timeout=10.0
|
|
64
|
+
title="Screenshot saved!", message=f"You can see the screenshot at {file_path}", severity="information", timeout=10.0
|
|
66
65
|
)
|
|
67
66
|
|
|
68
67
|
def action_open_link(self, link: str) -> None:
|
pub_analyzer/models/author.py
CHANGED
|
@@ -1,26 +1,26 @@
|
|
|
1
1
|
"""Authors models from OpenAlex API Schema definition."""
|
|
2
2
|
|
|
3
|
-
from
|
|
3
|
+
from typing import TypeAlias
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, Field, HttpUrl
|
|
4
6
|
|
|
5
7
|
from pub_analyzer.models.institution import DehydratedInstitution
|
|
6
8
|
|
|
9
|
+
AuthorOpenAlexID: TypeAlias = HttpUrl
|
|
10
|
+
"""OpenAlex ID for Author Objects with the format `https://openalex.org/A000000000`"""
|
|
7
11
|
|
|
8
|
-
|
|
9
|
-
|
|
12
|
+
AuthorOpenAlexKey: TypeAlias = str
|
|
13
|
+
"""OpenAlex author entity Key with the format `A000000000`"""
|
|
10
14
|
|
|
11
|
-
openalex: str
|
|
12
|
-
orcid: str | None = ""
|
|
13
|
-
scopus: str | None = ""
|
|
14
|
-
twitter: str | None = ""
|
|
15
|
-
wikipedia: str | None = ""
|
|
16
15
|
|
|
17
|
-
|
|
18
|
-
|
|
16
|
+
class AuthorIDs(BaseModel):
|
|
17
|
+
"""IDs from an Author."""
|
|
19
18
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
19
|
+
openalex: AuthorOpenAlexID
|
|
20
|
+
orcid: HttpUrl | None = None
|
|
21
|
+
scopus: HttpUrl | None = None
|
|
22
|
+
twitter: HttpUrl | None = None
|
|
23
|
+
wikipedia: HttpUrl | None = None
|
|
24
24
|
|
|
25
25
|
|
|
26
26
|
class AuthorYearCount(BaseModel):
|
|
@@ -42,7 +42,7 @@ class AuthorSummaryStats(BaseModel):
|
|
|
42
42
|
class Author(BaseModel):
|
|
43
43
|
"""Author Model Object from OpenAlex API definition."""
|
|
44
44
|
|
|
45
|
-
id:
|
|
45
|
+
id: AuthorOpenAlexID
|
|
46
46
|
ids: AuthorIDs
|
|
47
47
|
orcid: str | None = ""
|
|
48
48
|
|
|
@@ -52,7 +52,7 @@ class Author(BaseModel):
|
|
|
52
52
|
works_count: int
|
|
53
53
|
cited_by_count: int
|
|
54
54
|
|
|
55
|
-
|
|
55
|
+
last_known_institutions: list[DehydratedInstitution]
|
|
56
56
|
counts_by_year: list[AuthorYearCount]
|
|
57
57
|
|
|
58
58
|
summary_stats: AuthorSummaryStats
|
|
@@ -63,7 +63,7 @@ class Author(BaseModel):
|
|
|
63
63
|
class DehydratedAuthor(BaseModel):
|
|
64
64
|
"""Stripped-down Author Model."""
|
|
65
65
|
|
|
66
|
-
id:
|
|
66
|
+
id: AuthorOpenAlexID
|
|
67
67
|
display_name: str | None = None
|
|
68
68
|
orcid: HttpUrl | None = None
|
|
69
69
|
|
|
@@ -71,18 +71,10 @@ class DehydratedAuthor(BaseModel):
|
|
|
71
71
|
class AuthorResult(BaseModel):
|
|
72
72
|
"""Author result Model resulting from a search in OpenAlex."""
|
|
73
73
|
|
|
74
|
-
id:
|
|
74
|
+
id: AuthorOpenAlexID
|
|
75
75
|
display_name: str
|
|
76
|
-
hint: str | None =
|
|
76
|
+
hint: str | None = None
|
|
77
77
|
cited_by_count: int
|
|
78
78
|
works_count: int
|
|
79
79
|
entity_type: str
|
|
80
|
-
external_id: str | None =
|
|
81
|
-
|
|
82
|
-
# Allowing a value to be assigned during validation.
|
|
83
|
-
model_config = ConfigDict(validate_assignment=True)
|
|
84
|
-
|
|
85
|
-
@field_validator("hint", "external_id")
|
|
86
|
-
def set_default(cls, value: str) -> str:
|
|
87
|
-
"""Define a default text."""
|
|
88
|
-
return value or ""
|
|
80
|
+
external_id: str | None = None
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
"""Concept model from OpenAlex API Schema definition."""
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel, HttpUrl
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class DehydratedConcept(BaseModel):
|
|
7
|
+
"""Stripped-down Concept Model."""
|
|
8
|
+
|
|
9
|
+
id: HttpUrl
|
|
10
|
+
"""The OpenAlex ID for this concept."""
|
|
11
|
+
display_name: str
|
|
12
|
+
"""The English-language label of the concept."""
|
|
13
|
+
|
|
14
|
+
wikidata: HttpUrl
|
|
15
|
+
"""The Wikidata ID for this concept. All OpenAlex concepts are also Wikidata concepts."""
|
|
16
|
+
level: int
|
|
17
|
+
"""The level in the concept. Lower-level concepts are more general, and higher-level concepts are more specific."""
|
|
18
|
+
score: float
|
|
19
|
+
"""The strength of the connection between the work and this concept (higher is stronger)."""
|