academic-refchecker 1.2.48__tar.gz → 1.2.50__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {academic_refchecker-1.2.48/src/academic_refchecker.egg-info → academic_refchecker-1.2.50}/PKG-INFO +1 -1
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/__version__.py +1 -1
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50/src/academic_refchecker.egg-info}/PKG-INFO +1 -1
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/enhanced_hybrid_checker.py +84 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/openreview_checker.py +467 -4
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/semantic_scholar.py +2 -2
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/core/parallel_processor.py +7 -5
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/core/refchecker.py +50 -15
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/arxiv_utils.py +123 -77
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/error_utils.py +33 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/text_utils.py +6 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/LICENSE +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/MANIFEST.in +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/README.md +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/pyproject.toml +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/requirements.txt +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/scripts/download_db.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/scripts/run_tests.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/scripts/start_vllm_server.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/setup.cfg +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/academic_refchecker.egg-info/SOURCES.txt +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/academic_refchecker.egg-info/dependency_links.txt +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/academic_refchecker.egg-info/entry_points.txt +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/academic_refchecker.egg-info/requires.txt +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/academic_refchecker.egg-info/top_level.txt +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/crossref.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/github_checker.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/local_semantic_scholar.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/openalex.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/webpage_checker.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/config/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/config/logging.conf +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/config/settings.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/core/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/core/db_connection_pool.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/database/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/database/download_semantic_scholar_db.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/llm/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/llm/base.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/llm/providers.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/scripts/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/scripts/start_vllm_server.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/services/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/services/pdf_processor.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/__init__.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/author_utils.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/biblatex_parser.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/bibliography_utils.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/bibtex_parser.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/config_validator.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/db_utils.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/doi_utils.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/mock_objects.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/unicode_utils.py +0 -0
- {academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/utils/url_utils.py +0 -0
{academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/enhanced_hybrid_checker.py
RENAMED
|
@@ -312,12 +312,36 @@ class EnhancedHybridReferenceChecker:
|
|
|
312
312
|
if (self.openreview and
|
|
313
313
|
hasattr(self.openreview, 'is_openreview_reference') and
|
|
314
314
|
self.openreview.is_openreview_reference(reference)):
|
|
315
|
+
logger.debug("Enhanced Hybrid: Trying OpenReview URL-based verification")
|
|
315
316
|
verified_data, errors, url, success, failure_type = self._try_api('openreview', self.openreview, reference)
|
|
316
317
|
if success:
|
|
317
318
|
return verified_data, errors, url
|
|
318
319
|
if failure_type in ['throttled', 'timeout', 'server_error']:
|
|
319
320
|
failed_apis.append(('openreview', self.openreview, failure_type))
|
|
320
321
|
|
|
322
|
+
# Strategy 5b: Try OpenReview by search if venue suggests it might be there
|
|
323
|
+
elif (self.openreview and
|
|
324
|
+
hasattr(self.openreview, 'verify_reference_by_search')):
|
|
325
|
+
# Check if venue suggests this might be on OpenReview
|
|
326
|
+
venue = reference.get('venue', reference.get('journal', '')).lower()
|
|
327
|
+
openreview_venues = [
|
|
328
|
+
'iclr', 'icml', 'neurips', 'nips', 'aaai', 'ijcai',
|
|
329
|
+
'international conference on learning representations',
|
|
330
|
+
'international conference on machine learning',
|
|
331
|
+
'neural information processing systems'
|
|
332
|
+
]
|
|
333
|
+
|
|
334
|
+
venue_suggests_openreview = any(or_venue in venue for or_venue in openreview_venues)
|
|
335
|
+
logger.debug(f"Enhanced Hybrid: OpenReview venue check - venue: '{venue}', suggests: {venue_suggests_openreview}")
|
|
336
|
+
|
|
337
|
+
if venue_suggests_openreview:
|
|
338
|
+
logger.debug("Enhanced Hybrid: Trying OpenReview search-based verification")
|
|
339
|
+
verified_data, errors, url, success, failure_type = self._try_openreview_search(reference)
|
|
340
|
+
if success:
|
|
341
|
+
return verified_data, errors, url
|
|
342
|
+
if failure_type in ['throttled', 'timeout', 'server_error']:
|
|
343
|
+
failed_apis.append(('openreview_search', self.openreview, failure_type))
|
|
344
|
+
|
|
321
345
|
# Strategy 6: Try CrossRef if we haven't already (for non-DOI references)
|
|
322
346
|
if not self._should_try_doi_apis_first(reference) and self.crossref:
|
|
323
347
|
verified_data, errors, url, success, failure_type = self._try_api('crossref', self.crossref, reference)
|
|
@@ -399,6 +423,66 @@ class EnhancedHybridReferenceChecker:
|
|
|
399
423
|
'error_details': 'Could not verify reference using any available API'
|
|
400
424
|
}], None
|
|
401
425
|
|
|
426
|
+
def _try_openreview_search(self, reference: Dict[str, Any]) -> Tuple[Optional[Dict[str, Any]], List[Dict[str, Any]], Optional[str], bool, str]:
|
|
427
|
+
"""
|
|
428
|
+
Try to verify reference using OpenReview search
|
|
429
|
+
|
|
430
|
+
Returns:
|
|
431
|
+
Tuple of (verified_data, errors, url, success, failure_type)
|
|
432
|
+
"""
|
|
433
|
+
if not self.openreview:
|
|
434
|
+
return None, [], None, False, 'none'
|
|
435
|
+
|
|
436
|
+
start_time = time.time()
|
|
437
|
+
failure_type = 'none'
|
|
438
|
+
|
|
439
|
+
try:
|
|
440
|
+
verified_data, errors, url = self.openreview.verify_reference_by_search(reference)
|
|
441
|
+
duration = time.time() - start_time
|
|
442
|
+
|
|
443
|
+
# Consider it successful if we found data or verification errors
|
|
444
|
+
success = verified_data is not None or len(errors) > 0
|
|
445
|
+
self._update_api_stats('openreview', success, duration)
|
|
446
|
+
|
|
447
|
+
if success:
|
|
448
|
+
logger.debug(f"Enhanced Hybrid: OpenReview search successful in {duration:.2f}s, URL: {url}")
|
|
449
|
+
return verified_data, errors, url, True, 'none'
|
|
450
|
+
else:
|
|
451
|
+
logger.debug(f"Enhanced Hybrid: OpenReview search found no results in {duration:.2f}s")
|
|
452
|
+
return None, [], None, False, 'not_found'
|
|
453
|
+
|
|
454
|
+
except requests.exceptions.Timeout as e:
|
|
455
|
+
duration = time.time() - start_time
|
|
456
|
+
self._update_api_stats('openreview', False, duration)
|
|
457
|
+
failure_type = 'timeout'
|
|
458
|
+
logger.debug(f"Enhanced Hybrid: OpenReview search timed out in {duration:.2f}s: {e}")
|
|
459
|
+
return None, [], None, False, failure_type
|
|
460
|
+
|
|
461
|
+
except requests.exceptions.RequestException as e:
|
|
462
|
+
duration = time.time() - start_time
|
|
463
|
+
self._update_api_stats('openreview', False, duration)
|
|
464
|
+
|
|
465
|
+
# Check if it's a rate limiting error
|
|
466
|
+
if hasattr(e, 'response') and e.response is not None:
|
|
467
|
+
if e.response.status_code in [429, 503]:
|
|
468
|
+
failure_type = 'throttled'
|
|
469
|
+
elif e.response.status_code >= 500:
|
|
470
|
+
failure_type = 'server_error'
|
|
471
|
+
else:
|
|
472
|
+
failure_type = 'other'
|
|
473
|
+
else:
|
|
474
|
+
failure_type = 'other'
|
|
475
|
+
|
|
476
|
+
logger.debug(f"Enhanced Hybrid: OpenReview search failed in {duration:.2f}s: {type(e).__name__}: {e}")
|
|
477
|
+
return None, [], None, False, failure_type
|
|
478
|
+
|
|
479
|
+
except Exception as e:
|
|
480
|
+
duration = time.time() - start_time
|
|
481
|
+
self._update_api_stats('openreview', False, duration)
|
|
482
|
+
failure_type = 'other'
|
|
483
|
+
logger.debug(f"Enhanced Hybrid: OpenReview search error in {duration:.2f}s: {type(e).__name__}: {e}")
|
|
484
|
+
return None, [], None, False, failure_type
|
|
485
|
+
|
|
402
486
|
def get_performance_stats(self) -> Dict[str, Any]:
|
|
403
487
|
"""
|
|
404
488
|
Get performance statistics for all APIs
|
{academic_refchecker-1.2.48 → academic_refchecker-1.2.50}/src/checkers/openreview_checker.py
RENAMED
|
@@ -498,6 +498,160 @@ class OpenReviewReferenceChecker:
|
|
|
498
498
|
logger.debug(f"OpenReview verification completed for: {openreview_url}")
|
|
499
499
|
return verified_data, errors, openreview_url
|
|
500
500
|
|
|
501
|
+
def verify_by_search(self, reference: Dict[str, Any]) -> Tuple[Optional[Dict[str, Any]], List[Dict[str, Any]], Optional[str]]:
|
|
502
|
+
"""
|
|
503
|
+
Verify a reference by searching OpenReview (when no URL is provided)
|
|
504
|
+
|
|
505
|
+
Args:
|
|
506
|
+
reference: Reference dictionary with title, authors, year, etc.
|
|
507
|
+
|
|
508
|
+
Returns:
|
|
509
|
+
Tuple of (verified_data, errors, paper_url) where:
|
|
510
|
+
- verified_data: Dict with verified OpenReview paper data or None
|
|
511
|
+
- errors: List of error/warning dictionaries
|
|
512
|
+
- paper_url: The OpenReview URL if found
|
|
513
|
+
"""
|
|
514
|
+
logger.debug(f"Searching OpenReview for reference: {reference.get('title', 'Untitled')}")
|
|
515
|
+
|
|
516
|
+
title = reference.get('title', '').strip()
|
|
517
|
+
authors = reference.get('authors', [])
|
|
518
|
+
year = reference.get('year')
|
|
519
|
+
venue = reference.get('venue', '').strip()
|
|
520
|
+
|
|
521
|
+
if not title:
|
|
522
|
+
return None, [], None
|
|
523
|
+
|
|
524
|
+
# Check if venue suggests this might be on OpenReview
|
|
525
|
+
if not self._is_likely_openreview_venue(venue):
|
|
526
|
+
logger.debug(f"Venue '{venue}' doesn't suggest OpenReview, skipping search")
|
|
527
|
+
return None, [], None
|
|
528
|
+
|
|
529
|
+
# Search for matching papers
|
|
530
|
+
search_results = self.search_paper(title, authors, year)
|
|
531
|
+
|
|
532
|
+
if not search_results:
|
|
533
|
+
logger.debug("No matching papers found on OpenReview")
|
|
534
|
+
return None, [], None
|
|
535
|
+
|
|
536
|
+
# Use the best match (first result, as they're sorted by relevance)
|
|
537
|
+
best_match = search_results[0]
|
|
538
|
+
paper_url = best_match.get('forum_url')
|
|
539
|
+
|
|
540
|
+
logger.debug(f"Found OpenReview match: {best_match.get('title', 'Untitled')}")
|
|
541
|
+
|
|
542
|
+
# Verify the reference against the found paper
|
|
543
|
+
errors = []
|
|
544
|
+
|
|
545
|
+
# Check title match
|
|
546
|
+
cited_title = reference.get('title', '').strip()
|
|
547
|
+
paper_title = best_match.get('title', '').strip()
|
|
548
|
+
|
|
549
|
+
if cited_title and paper_title:
|
|
550
|
+
similarity = calculate_title_similarity(cited_title, paper_title)
|
|
551
|
+
if similarity < 0.8: # Slightly higher threshold for search results
|
|
552
|
+
from utils.error_utils import format_title_mismatch
|
|
553
|
+
details = format_title_mismatch(cited_title, paper_title) + f" (similarity: {similarity:.2f})"
|
|
554
|
+
errors.append({
|
|
555
|
+
"warning_type": "title",
|
|
556
|
+
"warning_details": details
|
|
557
|
+
})
|
|
558
|
+
|
|
559
|
+
# Check authors
|
|
560
|
+
cited_authors = reference.get('authors', [])
|
|
561
|
+
paper_authors = best_match.get('authors', [])
|
|
562
|
+
|
|
563
|
+
if cited_authors and paper_authors:
|
|
564
|
+
# Convert to list format if needed
|
|
565
|
+
if isinstance(cited_authors, str):
|
|
566
|
+
cited_authors = [author.strip() for author in cited_authors.split(',')]
|
|
567
|
+
if isinstance(paper_authors, str):
|
|
568
|
+
paper_authors = [author.strip() for author in paper_authors.split(',')]
|
|
569
|
+
|
|
570
|
+
# Use the existing author comparison function
|
|
571
|
+
match, error_msg = compare_authors(cited_authors, paper_authors)
|
|
572
|
+
if not match and error_msg:
|
|
573
|
+
errors.append({
|
|
574
|
+
"warning_type": "author",
|
|
575
|
+
"warning_details": error_msg
|
|
576
|
+
})
|
|
577
|
+
|
|
578
|
+
# Check year
|
|
579
|
+
cited_year = reference.get('year')
|
|
580
|
+
paper_year = best_match.get('year')
|
|
581
|
+
|
|
582
|
+
if cited_year and paper_year:
|
|
583
|
+
try:
|
|
584
|
+
cited_year_int = int(cited_year)
|
|
585
|
+
paper_year_int = int(paper_year)
|
|
586
|
+
|
|
587
|
+
is_different, year_message = is_year_substantially_different(cited_year_int, paper_year_int)
|
|
588
|
+
if is_different and year_message:
|
|
589
|
+
from utils.error_utils import format_year_mismatch
|
|
590
|
+
errors.append({
|
|
591
|
+
"warning_type": "year",
|
|
592
|
+
"warning_details": format_year_mismatch(cited_year_int, paper_year_int)
|
|
593
|
+
})
|
|
594
|
+
except (ValueError, TypeError):
|
|
595
|
+
pass # Skip year validation if conversion fails
|
|
596
|
+
|
|
597
|
+
# Check venue if provided in reference
|
|
598
|
+
cited_venue = reference.get('venue', '').strip()
|
|
599
|
+
paper_venue = best_match.get('venue', '').strip()
|
|
600
|
+
|
|
601
|
+
if cited_venue and paper_venue:
|
|
602
|
+
if are_venues_substantially_different(cited_venue, paper_venue):
|
|
603
|
+
from utils.error_utils import format_venue_mismatch
|
|
604
|
+
errors.append({
|
|
605
|
+
"warning_type": "venue",
|
|
606
|
+
"warning_details": format_venue_mismatch(cited_venue, paper_venue)
|
|
607
|
+
})
|
|
608
|
+
|
|
609
|
+
# Create verified data structure
|
|
610
|
+
verified_data = {
|
|
611
|
+
'title': best_match.get('title', cited_title),
|
|
612
|
+
'authors': best_match.get('authors', cited_authors),
|
|
613
|
+
'year': best_match.get('year', cited_year),
|
|
614
|
+
'venue': best_match.get('venue', cited_venue),
|
|
615
|
+
'url': paper_url,
|
|
616
|
+
'abstract': best_match.get('abstract', ''),
|
|
617
|
+
'keywords': best_match.get('keywords', []),
|
|
618
|
+
'openreview_metadata': best_match,
|
|
619
|
+
'verification_source': 'OpenReview (search)'
|
|
620
|
+
}
|
|
621
|
+
|
|
622
|
+
logger.debug(f"OpenReview search verification completed for: {paper_url}")
|
|
623
|
+
return verified_data, errors, paper_url
|
|
624
|
+
|
|
625
|
+
def _is_likely_openreview_venue(self, venue: str) -> bool:
|
|
626
|
+
"""
|
|
627
|
+
Check if a venue suggests the paper might be on OpenReview
|
|
628
|
+
|
|
629
|
+
Args:
|
|
630
|
+
venue: Venue string from reference
|
|
631
|
+
|
|
632
|
+
Returns:
|
|
633
|
+
True if venue suggests OpenReview
|
|
634
|
+
"""
|
|
635
|
+
if not venue:
|
|
636
|
+
return False
|
|
637
|
+
|
|
638
|
+
venue_lower = venue.lower()
|
|
639
|
+
|
|
640
|
+
# Common venues that use OpenReview
|
|
641
|
+
openreview_venues = [
|
|
642
|
+
'iclr', 'international conference on learning representations',
|
|
643
|
+
'neurips', 'neural information processing systems', 'nips',
|
|
644
|
+
'icml', 'international conference on machine learning',
|
|
645
|
+
'iclr workshop', 'neurips workshop', 'icml workshop',
|
|
646
|
+
'aaai', 'ijcai', 'aistats'
|
|
647
|
+
]
|
|
648
|
+
|
|
649
|
+
for or_venue in openreview_venues:
|
|
650
|
+
if or_venue in venue_lower:
|
|
651
|
+
return True
|
|
652
|
+
|
|
653
|
+
return False
|
|
654
|
+
|
|
501
655
|
def search_paper(self, title: str, authors: List[str] = None, year: int = None) -> List[Dict[str, Any]]:
|
|
502
656
|
"""
|
|
503
657
|
Search for papers on OpenReview by title, authors, and/or year
|
|
@@ -510,7 +664,316 @@ class OpenReviewReferenceChecker:
|
|
|
510
664
|
Returns:
|
|
511
665
|
List of matching paper metadata dictionaries
|
|
512
666
|
"""
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
667
|
+
if not title or not title.strip():
|
|
668
|
+
return []
|
|
669
|
+
|
|
670
|
+
logger.debug(f"Searching OpenReview for: {title}")
|
|
671
|
+
|
|
672
|
+
# Clean title for search
|
|
673
|
+
search_title = clean_title_for_search(title)
|
|
674
|
+
|
|
675
|
+
# Try API search first
|
|
676
|
+
results = self._search_via_api(search_title, authors, year)
|
|
677
|
+
if results:
|
|
678
|
+
return results
|
|
679
|
+
|
|
680
|
+
# If API search fails, try web search as fallback
|
|
681
|
+
return self._search_via_web(search_title, authors, year)
|
|
682
|
+
|
|
683
|
+
def _search_via_api(self, title: str, authors: List[str] = None, year: int = None) -> List[Dict[str, Any]]:
|
|
684
|
+
"""
|
|
685
|
+
Search using OpenReview API
|
|
686
|
+
|
|
687
|
+
Args:
|
|
688
|
+
title: Clean title to search for
|
|
689
|
+
authors: List of author names (optional)
|
|
690
|
+
year: Publication year (optional)
|
|
691
|
+
|
|
692
|
+
Returns:
|
|
693
|
+
List of matching paper dictionaries
|
|
694
|
+
"""
|
|
695
|
+
try:
|
|
696
|
+
# The OpenReview API requires specific parameters
|
|
697
|
+
# We'll search by content.title or content.venue (for venue-based search)
|
|
698
|
+
search_params = {
|
|
699
|
+
'limit': 20, # Limit results to avoid overwhelming the API
|
|
700
|
+
'details': 'directReplies' # Get basic details
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
# Try searching by venue first if year suggests recent conferences
|
|
704
|
+
if year and year >= 2017: # OpenReview started around 2017
|
|
705
|
+
venues_by_year = {
|
|
706
|
+
2025: ['ICLR 2025'],
|
|
707
|
+
2024: ['ICLR 2024', 'NeurIPS 2024', 'ICML 2024'],
|
|
708
|
+
2023: ['ICLR 2023', 'NeurIPS 2023', 'ICML 2023'],
|
|
709
|
+
2022: ['ICLR 2022', 'NeurIPS 2022', 'ICML 2022'],
|
|
710
|
+
2021: ['ICLR 2021', 'NeurIPS 2021', 'ICML 2021'],
|
|
711
|
+
2020: ['ICLR 2020', 'NeurIPS 2020', 'ICML 2020'],
|
|
712
|
+
2019: ['ICLR 2019', 'NeurIPS 2019', 'ICML 2019'],
|
|
713
|
+
2018: ['ICLR 2018', 'NeurIPS 2018', 'ICML 2018'],
|
|
714
|
+
2017: ['ICLR 2017']
|
|
715
|
+
}
|
|
716
|
+
|
|
717
|
+
possible_venues = venues_by_year.get(year, [])
|
|
718
|
+
|
|
719
|
+
results = []
|
|
720
|
+
for venue in possible_venues:
|
|
721
|
+
# Search by venue and then filter by title
|
|
722
|
+
venue_params = search_params.copy()
|
|
723
|
+
venue_params['content.venue'] = venue
|
|
724
|
+
|
|
725
|
+
api_url = f"{self.api_url}/notes"
|
|
726
|
+
response = self._respectful_request(api_url, params=venue_params)
|
|
727
|
+
|
|
728
|
+
if response and response.status_code == 200:
|
|
729
|
+
try:
|
|
730
|
+
data = response.json()
|
|
731
|
+
if 'notes' in data and data['notes']:
|
|
732
|
+
for note in data['notes']:
|
|
733
|
+
try:
|
|
734
|
+
metadata = self._parse_api_response(note)
|
|
735
|
+
if metadata and self._is_good_match(metadata, title, authors, year):
|
|
736
|
+
results.append(metadata)
|
|
737
|
+
if len(results) >= 5: # Limit results
|
|
738
|
+
break
|
|
739
|
+
except Exception as e:
|
|
740
|
+
logger.debug(f"Error parsing note: {e}")
|
|
741
|
+
continue
|
|
742
|
+
|
|
743
|
+
if results:
|
|
744
|
+
break # Found results, no need to search other venues
|
|
745
|
+
|
|
746
|
+
except (json.JSONDecodeError, KeyError) as e:
|
|
747
|
+
logger.debug(f"Failed to parse venue search response: {e}")
|
|
748
|
+
continue
|
|
749
|
+
else:
|
|
750
|
+
logger.debug(f"Venue search failed for {venue}: {response.status_code if response else 'No response'}")
|
|
751
|
+
|
|
752
|
+
if results:
|
|
753
|
+
logger.debug(f"OpenReview API search found {len(results)} matches via venue search")
|
|
754
|
+
return results
|
|
755
|
+
|
|
756
|
+
# If venue search didn't work, try other approaches
|
|
757
|
+
# OpenReview API is quite restrictive, so we might need to fall back to web scraping
|
|
758
|
+
logger.debug("OpenReview API venue search returned no results, trying web search")
|
|
759
|
+
return []
|
|
760
|
+
|
|
761
|
+
except Exception as e:
|
|
762
|
+
logger.debug(f"OpenReview API search error: {e}")
|
|
763
|
+
return []
|
|
764
|
+
|
|
765
|
+
def _search_via_web(self, title: str, authors: List[str] = None, year: int = None) -> List[Dict[str, Any]]:
|
|
766
|
+
"""
|
|
767
|
+
Search using OpenReview web interface (fallback)
|
|
768
|
+
|
|
769
|
+
Args:
|
|
770
|
+
title: Clean title to search for
|
|
771
|
+
authors: List of author names (optional)
|
|
772
|
+
year: Publication year (optional)
|
|
773
|
+
|
|
774
|
+
Returns:
|
|
775
|
+
List of matching paper dictionaries
|
|
776
|
+
"""
|
|
777
|
+
try:
|
|
778
|
+
# Build search URL
|
|
779
|
+
search_query = title.replace(' ', '+')
|
|
780
|
+
search_url = f"{self.base_url}/search?term={search_query}"
|
|
781
|
+
|
|
782
|
+
response = self._respectful_request(search_url)
|
|
783
|
+
if not response or response.status_code != 200:
|
|
784
|
+
return []
|
|
785
|
+
|
|
786
|
+
# Parse search results page
|
|
787
|
+
soup = BeautifulSoup(response.text, 'html.parser')
|
|
788
|
+
|
|
789
|
+
# Look for paper links in search results
|
|
790
|
+
# OpenReview search results typically contain links to forum pages
|
|
791
|
+
results = []
|
|
792
|
+
|
|
793
|
+
# Find links that look like OpenReview paper URLs
|
|
794
|
+
for link in soup.find_all('a', href=True):
|
|
795
|
+
href = link.get('href', '')
|
|
796
|
+
if '/forum?id=' in href:
|
|
797
|
+
paper_id = self.extract_paper_id(href)
|
|
798
|
+
if paper_id:
|
|
799
|
+
# Get full metadata for this paper
|
|
800
|
+
metadata = self.get_paper_metadata(paper_id)
|
|
801
|
+
if metadata and self._is_good_match(metadata, title, authors, year):
|
|
802
|
+
results.append(metadata)
|
|
803
|
+
if len(results) >= 5: # Limit results
|
|
804
|
+
break
|
|
805
|
+
|
|
806
|
+
logger.debug(f"OpenReview web search found {len(results)} matches")
|
|
807
|
+
return results
|
|
808
|
+
|
|
809
|
+
except Exception as e:
|
|
810
|
+
logger.debug(f"OpenReview web search error: {e}")
|
|
811
|
+
return []
|
|
812
|
+
|
|
813
|
+
def _is_good_match(self, metadata: Dict[str, Any], search_title: str, authors: List[str] = None, year: int = None) -> bool:
|
|
814
|
+
"""
|
|
815
|
+
Check if the found paper is a good match for the search criteria
|
|
816
|
+
|
|
817
|
+
Args:
|
|
818
|
+
metadata: Paper metadata from OpenReview
|
|
819
|
+
search_title: Title we're searching for
|
|
820
|
+
authors: Authors we're looking for (optional)
|
|
821
|
+
year: Year we're looking for (optional)
|
|
822
|
+
|
|
823
|
+
Returns:
|
|
824
|
+
True if it's a good match
|
|
825
|
+
"""
|
|
826
|
+
paper_title = metadata.get('title', '')
|
|
827
|
+
if not paper_title:
|
|
828
|
+
return False
|
|
829
|
+
|
|
830
|
+
# Check title similarity
|
|
831
|
+
title_similarity = calculate_title_similarity(search_title, paper_title)
|
|
832
|
+
if title_similarity < 0.7: # Require at least 70% similarity
|
|
833
|
+
return False
|
|
834
|
+
|
|
835
|
+
# Check year if provided
|
|
836
|
+
if year:
|
|
837
|
+
paper_year = metadata.get('year')
|
|
838
|
+
if paper_year and abs(int(paper_year) - year) > 1: # Allow 1 year difference
|
|
839
|
+
return False
|
|
840
|
+
|
|
841
|
+
# Check authors if provided
|
|
842
|
+
if authors and len(authors) > 0:
|
|
843
|
+
paper_authors = metadata.get('authors', [])
|
|
844
|
+
if paper_authors:
|
|
845
|
+
# Check if at least one author matches
|
|
846
|
+
author_match = False
|
|
847
|
+
for search_author in authors[:2]: # Check first 2 authors
|
|
848
|
+
for paper_author in paper_authors[:3]: # Check first 3 paper authors
|
|
849
|
+
if is_name_match(search_author, paper_author):
|
|
850
|
+
author_match = True
|
|
851
|
+
break
|
|
852
|
+
if author_match:
|
|
853
|
+
break
|
|
854
|
+
|
|
855
|
+
if not author_match:
|
|
856
|
+
return False
|
|
857
|
+
|
|
858
|
+
return True
|
|
859
|
+
|
|
860
|
+
def search_by_title(self, title: str, max_results: int = 5) -> List[Dict[str, Any]]:
|
|
861
|
+
"""
|
|
862
|
+
Search OpenReview for papers by title using the working search API.
|
|
863
|
+
|
|
864
|
+
Args:
|
|
865
|
+
title: Paper title to search for
|
|
866
|
+
max_results: Maximum number of results to return
|
|
867
|
+
|
|
868
|
+
Returns:
|
|
869
|
+
List of paper data dictionaries
|
|
870
|
+
"""
|
|
871
|
+
try:
|
|
872
|
+
# Use OpenReview's search API with term parameter (this works!)
|
|
873
|
+
params = {
|
|
874
|
+
'term': title,
|
|
875
|
+
'limit': max_results
|
|
876
|
+
}
|
|
877
|
+
|
|
878
|
+
response = self._respectful_request(f"{self.api_url}/notes/search", params=params)
|
|
879
|
+
if not response or response.status_code != 200:
|
|
880
|
+
logger.debug(f"OpenReview search API failed with status {response.status_code if response else 'None'}")
|
|
881
|
+
return []
|
|
882
|
+
|
|
883
|
+
data = response.json()
|
|
884
|
+
papers = []
|
|
885
|
+
|
|
886
|
+
for note in data.get('notes', []):
|
|
887
|
+
# Filter to exact or close title matches
|
|
888
|
+
note_title = note.get('content', {}).get('title', '')
|
|
889
|
+
if self._is_title_match(title, note_title):
|
|
890
|
+
paper_data = self._parse_api_response(note)
|
|
891
|
+
if paper_data:
|
|
892
|
+
papers.append(paper_data)
|
|
893
|
+
|
|
894
|
+
logger.debug(f"OpenReview search found {len(papers)} matching papers for '{title}'")
|
|
895
|
+
return papers
|
|
896
|
+
|
|
897
|
+
except Exception as e:
|
|
898
|
+
logger.error(f"Error searching OpenReview by title '{title}': {e}")
|
|
899
|
+
return []
|
|
900
|
+
|
|
901
|
+
def _is_title_match(self, search_title: str, found_title: str, threshold: float = 0.8) -> bool:
|
|
902
|
+
"""
|
|
903
|
+
Check if two titles match closely enough.
|
|
904
|
+
|
|
905
|
+
Args:
|
|
906
|
+
search_title: Title we're searching for
|
|
907
|
+
found_title: Title found in search results
|
|
908
|
+
threshold: Similarity threshold (0.0 to 1.0)
|
|
909
|
+
|
|
910
|
+
Returns:
|
|
911
|
+
True if titles match closely enough
|
|
912
|
+
"""
|
|
913
|
+
if not search_title or not found_title:
|
|
914
|
+
return False
|
|
915
|
+
|
|
916
|
+
# Exact match
|
|
917
|
+
if search_title.lower().strip() == found_title.lower().strip():
|
|
918
|
+
return True
|
|
919
|
+
|
|
920
|
+
# Check if one contains the other (for cases where one is longer)
|
|
921
|
+
search_clean = search_title.lower().strip()
|
|
922
|
+
found_clean = found_title.lower().strip()
|
|
923
|
+
|
|
924
|
+
if search_clean in found_clean or found_clean in search_clean:
|
|
925
|
+
return True
|
|
926
|
+
|
|
927
|
+
# Use similarity calculation from text_utils
|
|
928
|
+
try:
|
|
929
|
+
from utils.text_utils import calculate_title_similarity
|
|
930
|
+
similarity = calculate_title_similarity(search_title, found_title)
|
|
931
|
+
return similarity >= threshold
|
|
932
|
+
except ImportError:
|
|
933
|
+
# Fallback to simple word matching
|
|
934
|
+
search_words = set(search_clean.split())
|
|
935
|
+
found_words = set(found_clean.split())
|
|
936
|
+
|
|
937
|
+
if not search_words or not found_words:
|
|
938
|
+
return False
|
|
939
|
+
|
|
940
|
+
intersection = search_words.intersection(found_words)
|
|
941
|
+
union = search_words.union(found_words)
|
|
942
|
+
|
|
943
|
+
jaccard_similarity = len(intersection) / len(union) if union else 0
|
|
944
|
+
return jaccard_similarity >= threshold
|
|
945
|
+
|
|
946
|
+
def verify_reference_by_search(self, reference: Dict[str, Any]) -> Tuple[Optional[Dict[str, Any]], List[Dict[str, Any]], Optional[str]]:
|
|
947
|
+
"""
|
|
948
|
+
Verify a reference by searching OpenReview (for papers without URLs).
|
|
949
|
+
|
|
950
|
+
Args:
|
|
951
|
+
reference: Reference data dictionary
|
|
952
|
+
|
|
953
|
+
Returns:
|
|
954
|
+
Tuple of (verified_data, errors_and_warnings, debug_info)
|
|
955
|
+
"""
|
|
956
|
+
title = reference.get('title', '').strip()
|
|
957
|
+
if not title:
|
|
958
|
+
return None, [], "No title provided for search"
|
|
959
|
+
|
|
960
|
+
# Search for the paper
|
|
961
|
+
search_results = self.search_by_title(title)
|
|
962
|
+
|
|
963
|
+
if not search_results:
|
|
964
|
+
return None, [], f"No papers found on OpenReview for title: {title}"
|
|
965
|
+
|
|
966
|
+
# Take the best match (first result, as search is already filtered)
|
|
967
|
+
best_match = search_results[0]
|
|
968
|
+
|
|
969
|
+
# Use the existing verify_reference method with the found URL
|
|
970
|
+
forum_url = best_match.get('forum_url')
|
|
971
|
+
if forum_url:
|
|
972
|
+
# Create a reference with the OpenReview URL for verification
|
|
973
|
+
reference_with_url = reference.copy()
|
|
974
|
+
reference_with_url['url'] = forum_url
|
|
975
|
+
|
|
976
|
+
return self.verify_reference(reference_with_url)
|
|
977
|
+
|
|
978
|
+
# If no URL, return the metadata as verification
|
|
979
|
+
return best_match, [], f"Found on OpenReview: {best_match.get('title')}"
|
|
@@ -583,8 +583,8 @@ class NonArxivReferenceChecker:
|
|
|
583
583
|
|
|
584
584
|
if not (has_arxiv_url or has_arxiv_doi):
|
|
585
585
|
errors.append({
|
|
586
|
-
'
|
|
587
|
-
'
|
|
586
|
+
'info_type': 'url',
|
|
587
|
+
'info_details': f"Reference could include arXiv URL: {arxiv_url}",
|
|
588
588
|
'ref_url_correct': arxiv_url
|
|
589
589
|
})
|
|
590
590
|
|
|
@@ -340,7 +340,7 @@ class ParallelReferenceProcessor:
|
|
|
340
340
|
# Display errors and warnings
|
|
341
341
|
if result.errors:
|
|
342
342
|
# Check if there's an unverified error
|
|
343
|
-
has_unverified_error = any(e.get('error_type') == 'unverified' or e.get('warning_type') == 'unverified' for e in result.errors)
|
|
343
|
+
has_unverified_error = any(e.get('error_type') == 'unverified' or e.get('warning_type') == 'unverified' or e.get('info_type') == 'unverified' for e in result.errors)
|
|
344
344
|
|
|
345
345
|
if has_unverified_error:
|
|
346
346
|
# Use the centralized unverified error display function from base checker
|
|
@@ -348,9 +348,9 @@ class ParallelReferenceProcessor:
|
|
|
348
348
|
|
|
349
349
|
# Display all non-unverified errors and warnings
|
|
350
350
|
for error in result.errors:
|
|
351
|
-
if error.get('error_type') != 'unverified' and error.get('warning_type') != 'unverified':
|
|
352
|
-
error_type = error.get('error_type') or error.get('warning_type')
|
|
353
|
-
error_details = error.get('error_details') or error.get('warning_details', 'Unknown error')
|
|
351
|
+
if error.get('error_type') != 'unverified' and error.get('warning_type') != 'unverified' and error.get('info_type') != 'unverified':
|
|
352
|
+
error_type = error.get('error_type') or error.get('warning_type') or error.get('info_type')
|
|
353
|
+
error_details = error.get('error_details') or error.get('warning_details') or error.get('info_details', 'Unknown error')
|
|
354
354
|
|
|
355
355
|
from utils.error_utils import print_labeled_multiline
|
|
356
356
|
|
|
@@ -359,8 +359,10 @@ class ParallelReferenceProcessor:
|
|
|
359
359
|
print(f" ❌ {error_details}")
|
|
360
360
|
elif 'error_type' in error:
|
|
361
361
|
print_labeled_multiline("❌ Error", error_details)
|
|
362
|
-
|
|
362
|
+
elif 'warning_type' in error:
|
|
363
363
|
print_labeled_multiline("⚠️ Warning", error_details)
|
|
364
|
+
else:
|
|
365
|
+
print_labeled_multiline("ℹ️ Information", error_details)
|
|
364
366
|
|
|
365
367
|
# Show timing info for slow references
|
|
366
368
|
if result.processing_time > 5.0:
|