folio-data-import 0.2.8rc5__tar.gz → 0.2.8rc7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of folio-data-import might be problematic. Click here for more details.
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/PKG-INFO +1 -1
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/pyproject.toml +1 -1
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/src/folio_data_import/MARCDataImport.py +50 -35
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/src/folio_data_import/marc_preprocessors/_preprocessors.py +1 -1
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/LICENSE +0 -0
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/README.md +0 -0
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/src/folio_data_import/UserImport.py +0 -0
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/src/folio_data_import/__init__.py +0 -0
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/src/folio_data_import/__main__.py +0 -0
- {folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/src/folio_data_import/marc_preprocessors/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "folio_data_import"
|
|
3
|
-
version = "0.2.
|
|
3
|
+
version = "0.2.8rc7"
|
|
4
4
|
description = "A python module to interact with the data importing capabilities of the open-source FOLIO ILS"
|
|
5
5
|
authors = ["Brooks Travis <brooks.travis@gmail.com>"]
|
|
6
6
|
license = "MIT"
|
{folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/src/folio_data_import/MARCDataImport.py
RENAMED
|
@@ -21,6 +21,7 @@ import pymarc
|
|
|
21
21
|
import tabulate
|
|
22
22
|
from humps import decamelize
|
|
23
23
|
from tqdm import tqdm
|
|
24
|
+
from zmq import has
|
|
24
25
|
|
|
25
26
|
|
|
26
27
|
try:
|
|
@@ -459,34 +460,36 @@ class MARCImportJob:
|
|
|
459
460
|
await self.get_job_status()
|
|
460
461
|
sleep(1)
|
|
461
462
|
if self.finished:
|
|
462
|
-
job_summary
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
463
|
+
if job_summary := await self.get_job_summary():
|
|
464
|
+
job_summary.pop("jobExecutionId")
|
|
465
|
+
job_summary.pop("totalErrors")
|
|
466
|
+
columns = ["Summary"] + list(job_summary.keys())
|
|
467
|
+
rows = set()
|
|
468
|
+
for key in columns[1:]:
|
|
469
|
+
rows.update(job_summary[key].keys())
|
|
470
|
+
|
|
471
|
+
table_data = []
|
|
472
|
+
for row in rows:
|
|
473
|
+
metric_name = decamelize(row).split("_")[1]
|
|
474
|
+
table_row = [metric_name]
|
|
475
|
+
for col in columns[1:]:
|
|
476
|
+
table_row.append(job_summary[col].get(row, "N/A"))
|
|
477
|
+
table_data.append(table_row)
|
|
478
|
+
table_data.sort(key=lambda x: REPORT_SUMMARY_ORDERING.get(x[0], 99))
|
|
479
|
+
columns = columns[:1] + [
|
|
480
|
+
" ".join(decamelize(x).split("_")[:-1]) for x in columns[1:]
|
|
481
|
+
]
|
|
482
|
+
print(
|
|
483
|
+
f"Results for {'file' if len(self.current_file) == 1 else 'files'}: "
|
|
484
|
+
f"{', '.join([os.path.basename(x.name) for x in self.current_file])}"
|
|
485
|
+
)
|
|
486
|
+
print(
|
|
487
|
+
tabulate.tabulate(
|
|
488
|
+
table_data, headers=columns, tablefmt="fancy_grid"
|
|
489
|
+
),
|
|
490
|
+
)
|
|
491
|
+
else:
|
|
492
|
+
print(f"No job summary available for job {self.job_id}.")
|
|
490
493
|
self.last_current = 0
|
|
491
494
|
self.finished = False
|
|
492
495
|
|
|
@@ -501,18 +504,28 @@ class MARCImportJob:
|
|
|
501
504
|
self.current_retry_timeout = (
|
|
502
505
|
self.current_retry_timeout * RETRY_TIMEOUT_RETRY_FACTOR
|
|
503
506
|
) if self.current_retry_timeout else RETRY_TIMEOUT_START
|
|
504
|
-
job_summary = self.folio_client.folio_get(
|
|
505
|
-
f"/metadata-provider/jobSummary/{self.job_id}"
|
|
506
|
-
)
|
|
507
|
-
self.current_retry_timeout = None
|
|
508
|
-
except (httpx.ConnectTimeout, httpx.ReadTimeout):
|
|
509
|
-
sleep(.25)
|
|
510
507
|
with httpx.Client(
|
|
511
508
|
timeout=self.current_retry_timeout,
|
|
512
509
|
verify=self.folio_client.ssl_verify
|
|
513
510
|
) as temp_client:
|
|
514
511
|
self.folio_client.httpx_client = temp_client
|
|
515
|
-
|
|
512
|
+
job_summary = self.folio_client.folio_get(
|
|
513
|
+
f"/metadata-provider/jobSummary/{self.job_id}"
|
|
514
|
+
)
|
|
515
|
+
self.current_retry_timeout = None
|
|
516
|
+
except (httpx.ConnectTimeout, httpx.ReadTimeout, httpx.HTTPStatusError) as e:
|
|
517
|
+
if not hasattr(e, "response") or e.response.status_code == 502:
|
|
518
|
+
sleep(.25)
|
|
519
|
+
with httpx.Client(
|
|
520
|
+
timeout=self.current_retry_timeout,
|
|
521
|
+
verify=self.folio_client.ssl_verify
|
|
522
|
+
) as temp_client:
|
|
523
|
+
self.folio_client.httpx_client = temp_client
|
|
524
|
+
return await self.get_job_status()
|
|
525
|
+
elif hasattr(e, "response") and e.response.status_code == 504:
|
|
526
|
+
job_summary = {}
|
|
527
|
+
else:
|
|
528
|
+
raise e
|
|
516
529
|
return job_summary
|
|
517
530
|
|
|
518
531
|
|
|
@@ -595,6 +608,8 @@ async def main() -> None:
|
|
|
595
608
|
else:
|
|
596
609
|
marc_files = list(Path("./").glob(args.marc_file_path))
|
|
597
610
|
|
|
611
|
+
marc_files.sort()
|
|
612
|
+
|
|
598
613
|
if len(marc_files) == 0:
|
|
599
614
|
print(f"No files found matching {args.marc_file_path}. Exiting.")
|
|
600
615
|
sys.exit(1)
|
|
File without changes
|
|
File without changes
|
{folio_data_import-0.2.8rc5 → folio_data_import-0.2.8rc7}/src/folio_data_import/UserImport.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|