folio-data-import 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of folio-data-import might be problematic. Click here for more details.
- folio_data_import-0.1.0.dist-info/LICENSE +21 -0
- folio_data_import-0.1.0.dist-info/METADATA +63 -0
- folio_data_import-0.1.0.dist-info/RECORD +9 -0
- folio_data_import-0.1.0.dist-info/WHEEL +4 -0
- folio_data_import-0.1.0.dist-info/entry_points.txt +5 -0
- src/folio_data_import/MARCDataImport.py +528 -0
- src/folio_data_import/UserImport.py +724 -0
- src/folio_data_import/__init__.py +0 -0
- src/folio_data_import/__main__.py +109 -0
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2024 EBSCO Information Services
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: folio_data_import
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: A python module to interact with the data importing capabilities of the open-source FOLIO ILS
|
|
5
|
+
License: MIT
|
|
6
|
+
Author: Brooks Travis
|
|
7
|
+
Author-email: brooks.travis@gmail.com
|
|
8
|
+
Requires-Python: >=3.10,<4.0
|
|
9
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
+
Requires-Dist: aiofiles (>=24.1.0,<25.0.0)
|
|
15
|
+
Requires-Dist: folioclient (>=0.60.5,<0.61.0)
|
|
16
|
+
Requires-Dist: httpx (>=0.23.0,<0.24.0)
|
|
17
|
+
Requires-Dist: inquirer (>=3.4.0,<4.0.0)
|
|
18
|
+
Requires-Dist: pydantic (>=2.8.2,<3.0.0)
|
|
19
|
+
Requires-Dist: pyhumps (>=3.8.0,<4.0.0)
|
|
20
|
+
Requires-Dist: pymarc (>=5.2.2,<6.0.0)
|
|
21
|
+
Requires-Dist: tabulate (>=0.9.0,<0.10.0)
|
|
22
|
+
Requires-Dist: tqdm (>=4.66.5,<5.0.0)
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
|
|
25
|
+
# folio_data_import
|
|
26
|
+
|
|
27
|
+
## Description
|
|
28
|
+
|
|
29
|
+
This project is designed to import data into the FOLIO LSP. It provides a simple and efficient way to import data from various sources using FOLIO's REST APIs.
|
|
30
|
+
|
|
31
|
+
## Features
|
|
32
|
+
|
|
33
|
+
- Import MARC records using FOLIO's Data Import system
|
|
34
|
+
- Import User records using FOLIO's User APIs
|
|
35
|
+
|
|
36
|
+
## Installation
|
|
37
|
+
|
|
38
|
+
## Installation
|
|
39
|
+
|
|
40
|
+
To install the project using Poetry, follow these steps:
|
|
41
|
+
|
|
42
|
+
1. Clone the repository.
|
|
43
|
+
2. Navigate to the project directory: `$ cd /path/to/folio_data_import`.
|
|
44
|
+
3. Install Poetry if you haven't already: `$ pip install poetry`.
|
|
45
|
+
4. Install the project dependencies: `$ poetry install`.
|
|
46
|
+
6. Run the application using Poetry: `$ poetry run python -m folio_data_import --help`.
|
|
47
|
+
|
|
48
|
+
Make sure to activate the virtual environment created by Poetry before running the application.
|
|
49
|
+
|
|
50
|
+
## Usage
|
|
51
|
+
|
|
52
|
+
1. Prepare the data to be imported in the specified format.
|
|
53
|
+
2. Run the application and follow the prompts to import the data.
|
|
54
|
+
3. Monitor the import progress and handle any errors or conflicts that may arise.
|
|
55
|
+
|
|
56
|
+
## Contributing
|
|
57
|
+
|
|
58
|
+
Contributions are welcome! If you have any ideas, suggestions, or bug reports, please open an issue or submit a pull request.
|
|
59
|
+
|
|
60
|
+
## License
|
|
61
|
+
|
|
62
|
+
This project is licensed under the [MIT License](LICENSE).
|
|
63
|
+
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
src/folio_data_import/MARCDataImport.py,sha256=uTgK6jHckVMh3BJycD5uRUpPNjZuppqSjjIm6rPDpSw,19117
|
|
2
|
+
src/folio_data_import/UserImport.py,sha256=CXdGHJI6NHJuB6yBg9rLZG62NEha2aaK9YKJ6v-4cEw,27390
|
|
3
|
+
src/folio_data_import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
4
|
+
src/folio_data_import/__main__.py,sha256=b-UeuwXjw89i8-ZPJr1ue5qK4bTHSgTAzB_iNjAv5Ls,3634
|
|
5
|
+
folio_data_import-0.1.0.dist-info/LICENSE,sha256=5kxqhOaS2dDgFtPiP_1f9ljqmu7d7FaUWtto6SjrVxE,1082
|
|
6
|
+
folio_data_import-0.1.0.dist-info/METADATA,sha256=DUIPA17q6YXxk0H3JwZ9kzBLPSM2T8F7UlgJFxcyIcM,2176
|
|
7
|
+
folio_data_import-0.1.0.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
|
8
|
+
folio_data_import-0.1.0.dist-info/entry_points.txt,sha256=usF39DtjTTaw8XtBKauL3KwQas66GndsmW1YJ4VfeLo,204
|
|
9
|
+
folio_data_import-0.1.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,528 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import asyncio
|
|
3
|
+
import glob
|
|
4
|
+
import io
|
|
5
|
+
import os
|
|
6
|
+
from typing import List
|
|
7
|
+
import uuid
|
|
8
|
+
from contextlib import ExitStack
|
|
9
|
+
import datetime
|
|
10
|
+
from datetime import datetime as dt
|
|
11
|
+
from getpass import getpass
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from time import sleep
|
|
14
|
+
|
|
15
|
+
import folioclient
|
|
16
|
+
import httpx
|
|
17
|
+
import inquirer
|
|
18
|
+
import pymarc
|
|
19
|
+
import tabulate
|
|
20
|
+
from humps import decamelize
|
|
21
|
+
from tqdm import tqdm
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
try:
|
|
25
|
+
datetime_utc = datetime.UTC
|
|
26
|
+
except AttributeError:
|
|
27
|
+
datetime_utc = datetime.timezone.utc
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# The order in which the report summary should be displayed
|
|
31
|
+
REPORT_SUMMARY_ORDERING = {"created": 0, "updated": 1, "discarded": 2, "error": 3}
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class MARCImportJob:
|
|
35
|
+
"""
|
|
36
|
+
Class to manage importing MARC data (Bib, Authority) into FOLIO using the Change Manager
|
|
37
|
+
APIs (https://github.com/folio-org/mod-source-record-manager/tree/master?tab=readme-ov-file#data-import-workflow),
|
|
38
|
+
rather than file-based Data Import. When executed in an interactive environment, it can provide progress bars
|
|
39
|
+
for tracking the number of records both uploaded and processed.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
folio_client (FolioClient): An instance of the FolioClient class.
|
|
43
|
+
marc_files (list): A list of Path objects representing the MARC files to import.
|
|
44
|
+
import_profile_name (str): The name of the data import job profile to use.
|
|
45
|
+
batch_size (int): The number of source records to include in a record batch (default=10).
|
|
46
|
+
batch_delay (float): The number of seconds to wait between record batches (default=0).
|
|
47
|
+
consolidate (bool): Consolidate files into a single job. Default is one job for each file.
|
|
48
|
+
no_progress (bool): Disable progress bars (eg. for running in a CI environment).
|
|
49
|
+
"""
|
|
50
|
+
|
|
51
|
+
bad_records_file: io.TextIOWrapper
|
|
52
|
+
failed_batches_file: io.TextIOWrapper
|
|
53
|
+
job_id: str
|
|
54
|
+
job_import_profile: dict
|
|
55
|
+
pbar_sent: tqdm
|
|
56
|
+
pbar_imported: tqdm
|
|
57
|
+
http_client: httpx.Client
|
|
58
|
+
current_file: List[Path]
|
|
59
|
+
record_batch: List[dict] = []
|
|
60
|
+
error_records: int = 0
|
|
61
|
+
last_current: int = 0
|
|
62
|
+
total_records_sent: int = 0
|
|
63
|
+
finished: bool = False
|
|
64
|
+
|
|
65
|
+
def __init__(
|
|
66
|
+
self,
|
|
67
|
+
folio_client: folioclient.FolioClient,
|
|
68
|
+
marc_files: List[Path],
|
|
69
|
+
import_profile_name: str,
|
|
70
|
+
batch_size=10,
|
|
71
|
+
batch_delay=0,
|
|
72
|
+
consolidate=False,
|
|
73
|
+
no_progress=False,
|
|
74
|
+
) -> None:
|
|
75
|
+
self.consolidate_files = consolidate
|
|
76
|
+
self.no_progress = no_progress
|
|
77
|
+
self.folio_client: folioclient.FolioClient = folio_client
|
|
78
|
+
self.import_files = marc_files
|
|
79
|
+
self.import_profile_name = import_profile_name
|
|
80
|
+
self.batch_size = batch_size
|
|
81
|
+
self.batch_delay = batch_delay
|
|
82
|
+
|
|
83
|
+
async def do_work(self) -> None:
|
|
84
|
+
"""
|
|
85
|
+
Performs the necessary work for data import.
|
|
86
|
+
|
|
87
|
+
This method initializes an HTTP client, files to store records that fail to send,
|
|
88
|
+
and calls `self.import_marc_records` to import MARC files. If `consolidate_files` is True,
|
|
89
|
+
it imports all the files specified in `import_files` as a single batch. Otherwise,
|
|
90
|
+
it imports each file as a separate import job.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
None
|
|
94
|
+
"""
|
|
95
|
+
with httpx.Client() as http_client, open(
|
|
96
|
+
self.import_files[0].parent.joinpath(
|
|
97
|
+
f"bad_marc_records_{dt.now(tz=datetime_utc).strftime('%Y%m%d%H%M%S')}.mrc"
|
|
98
|
+
),
|
|
99
|
+
"wb+",
|
|
100
|
+
) as bad_marc_file, open(
|
|
101
|
+
self.import_files[0].parent.joinpath(
|
|
102
|
+
f"failed_batches_{dt.now(tz=datetime_utc).strftime('%Y%m%d%H%M%S')}.mrc"
|
|
103
|
+
),
|
|
104
|
+
"wb+",
|
|
105
|
+
) as failed_batches:
|
|
106
|
+
self.bad_records_file = bad_marc_file
|
|
107
|
+
print(f"Writing bad records to {self.bad_records_file.name}")
|
|
108
|
+
self.failed_batches_file = failed_batches
|
|
109
|
+
print(f"Writing failed batches to {self.failed_batches_file.name}")
|
|
110
|
+
self.http_client = http_client
|
|
111
|
+
if self.consolidate_files:
|
|
112
|
+
self.current_file = self.import_files
|
|
113
|
+
await self.import_marc_file()
|
|
114
|
+
else:
|
|
115
|
+
for file in self.import_files:
|
|
116
|
+
self.current_file = [file]
|
|
117
|
+
await self.import_marc_file()
|
|
118
|
+
await self.wrap_up()
|
|
119
|
+
|
|
120
|
+
async def wrap_up(self) -> None:
|
|
121
|
+
"""
|
|
122
|
+
Wraps up the data import process.
|
|
123
|
+
|
|
124
|
+
This method is called after the import process is complete.
|
|
125
|
+
It checks for empty bad records and error files and removes them.
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
None
|
|
129
|
+
"""
|
|
130
|
+
self.bad_records_file.seek(0)
|
|
131
|
+
if not self.bad_records_file.read(1):
|
|
132
|
+
os.remove(self.bad_records_file.name)
|
|
133
|
+
print("No bad records found. Removing bad records file.")
|
|
134
|
+
self.failed_batches_file.seek(0)
|
|
135
|
+
if not self.failed_batches_file.read(1):
|
|
136
|
+
os.remove(self.failed_batches_file.name)
|
|
137
|
+
print("No failed batches. Removing failed batches file.")
|
|
138
|
+
print("Import complete.")
|
|
139
|
+
print(f"Total records imported: {self.total_records_sent}")
|
|
140
|
+
|
|
141
|
+
async def get_job_status(self) -> None:
|
|
142
|
+
"""
|
|
143
|
+
Retrieves the status of a job execution.
|
|
144
|
+
|
|
145
|
+
Returns:
|
|
146
|
+
None
|
|
147
|
+
|
|
148
|
+
Raises:
|
|
149
|
+
IndexError: If the job execution with the specified ID is not found.
|
|
150
|
+
"""
|
|
151
|
+
job_status = self.folio_client.folio_get(
|
|
152
|
+
"/metadata-provider/jobExecutions?statusNot=DISCARDED&uiStatusAny"
|
|
153
|
+
"=PREPARING_FOR_PREVIEW&uiStatusAny=READY_FOR_PREVIEW&uiStatusAny=RUNNING&limit=50"
|
|
154
|
+
)
|
|
155
|
+
try:
|
|
156
|
+
status = [
|
|
157
|
+
job for job in job_status["jobExecutions"] if job["id"] == self.job_id
|
|
158
|
+
][0]
|
|
159
|
+
self.pbar_imported.update(status["progress"]["current"] - self.last_current)
|
|
160
|
+
self.last_current = status["progress"]["current"]
|
|
161
|
+
except IndexError:
|
|
162
|
+
job_status = self.folio_client.folio_get(
|
|
163
|
+
"/metadata-provider/jobExecutions?limit=100&sortBy=completed_date%2Cdesc&statusAny"
|
|
164
|
+
"=COMMITTED&statusAny=ERROR&statusAny=CANCELLED"
|
|
165
|
+
)
|
|
166
|
+
status = [
|
|
167
|
+
job for job in job_status["jobExecutions"] if job["id"] == self.job_id
|
|
168
|
+
][0]
|
|
169
|
+
self.pbar_imported.update(status["progress"]["current"] - self.last_current)
|
|
170
|
+
self.last_current = status["progress"]["current"]
|
|
171
|
+
self.finished = True
|
|
172
|
+
|
|
173
|
+
async def create_folio_import_job(self) -> None:
|
|
174
|
+
"""
|
|
175
|
+
Creates a job execution for importing data into FOLIO.
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
None
|
|
179
|
+
|
|
180
|
+
Raises:
|
|
181
|
+
HTTPError: If there is an error creating the job.
|
|
182
|
+
"""
|
|
183
|
+
create_job = self.http_client.post(
|
|
184
|
+
self.folio_client.okapi_url + "/change-manager/jobExecutions",
|
|
185
|
+
headers=self.folio_client.okapi_headers,
|
|
186
|
+
json={"sourceType": "ONLINE", "userId": self.folio_client.current_user},
|
|
187
|
+
)
|
|
188
|
+
try:
|
|
189
|
+
create_job.raise_for_status()
|
|
190
|
+
except httpx.HTTPError as e:
|
|
191
|
+
print(
|
|
192
|
+
"Error creating job: "
|
|
193
|
+
+ str(e)
|
|
194
|
+
+ "\n"
|
|
195
|
+
+ getattr(getattr(e, "response", ""), "text", "")
|
|
196
|
+
)
|
|
197
|
+
raise e
|
|
198
|
+
self.job_id = create_job.json()["parentJobExecutionId"]
|
|
199
|
+
|
|
200
|
+
async def get_import_profile(self) -> None:
|
|
201
|
+
"""
|
|
202
|
+
Retrieves the import profile with the specified name.
|
|
203
|
+
"""
|
|
204
|
+
import_profiles = self.folio_client.folio_get(
|
|
205
|
+
"/data-import-profiles/jobProfiles",
|
|
206
|
+
"jobProfiles",
|
|
207
|
+
query_params={"limit": "1000"},
|
|
208
|
+
)
|
|
209
|
+
profile = [
|
|
210
|
+
profile
|
|
211
|
+
for profile in import_profiles
|
|
212
|
+
if profile["name"] == self.import_profile_name
|
|
213
|
+
][0]
|
|
214
|
+
self.job_import_profile = profile
|
|
215
|
+
|
|
216
|
+
async def set_job_profile(self) -> None:
|
|
217
|
+
"""
|
|
218
|
+
Sets the job profile for the current job execution.
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
The response from the HTTP request to set the job profile.
|
|
222
|
+
"""
|
|
223
|
+
set_job_profile = self.http_client.put(
|
|
224
|
+
self.folio_client.okapi_url
|
|
225
|
+
+ "/change-manager/jobExecutions/"
|
|
226
|
+
+ self.job_id
|
|
227
|
+
+ "/jobProfile",
|
|
228
|
+
headers=self.folio_client.okapi_headers,
|
|
229
|
+
json={
|
|
230
|
+
"id": self.job_import_profile["id"],
|
|
231
|
+
"name": self.job_import_profile["name"],
|
|
232
|
+
"dataType": "MARC",
|
|
233
|
+
},
|
|
234
|
+
)
|
|
235
|
+
try:
|
|
236
|
+
set_job_profile.raise_for_status()
|
|
237
|
+
except httpx.HTTPError as e:
|
|
238
|
+
print(
|
|
239
|
+
"Error creating job: "
|
|
240
|
+
+ str(e)
|
|
241
|
+
+ "\n"
|
|
242
|
+
+ getattr(getattr(e, "response", ""), "text", "")
|
|
243
|
+
)
|
|
244
|
+
raise e
|
|
245
|
+
|
|
246
|
+
async def read_total_records(self, files) -> int:
|
|
247
|
+
"""
|
|
248
|
+
Reads the total number of records from the given files.
|
|
249
|
+
|
|
250
|
+
Args:
|
|
251
|
+
files (list): List of files to read.
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
int: The total number of records found in the files.
|
|
255
|
+
"""
|
|
256
|
+
total_records = 0
|
|
257
|
+
for import_file in files:
|
|
258
|
+
while True:
|
|
259
|
+
chunk = import_file.read(1024)
|
|
260
|
+
if not chunk:
|
|
261
|
+
break
|
|
262
|
+
total_records += chunk.count(b"\x1d")
|
|
263
|
+
import_file.seek(0)
|
|
264
|
+
return total_records
|
|
265
|
+
|
|
266
|
+
async def process_record_batch(self, batch_payload) -> None:
|
|
267
|
+
"""
|
|
268
|
+
Processes a record batch.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
batch_payload (dict): A records payload containing the current batch of MARC records.
|
|
272
|
+
"""
|
|
273
|
+
post_batch = self.http_client.post(
|
|
274
|
+
self.folio_client.okapi_url
|
|
275
|
+
+ f"/change-manager/jobExecutions/{self.job_id}/records",
|
|
276
|
+
headers=self.folio_client.okapi_headers,
|
|
277
|
+
json=batch_payload,
|
|
278
|
+
)
|
|
279
|
+
try:
|
|
280
|
+
post_batch.raise_for_status()
|
|
281
|
+
self.total_records_sent += len(self.record_batch)
|
|
282
|
+
self.record_batch = []
|
|
283
|
+
self.pbar_sent.update(len(batch_payload["initialRecords"]))
|
|
284
|
+
except Exception as e:
|
|
285
|
+
print("Error posting batch: " + str(e))
|
|
286
|
+
for record in self.record_batch:
|
|
287
|
+
self.failed_batches_file.write(record)
|
|
288
|
+
self.error_records += len(self.record_batch)
|
|
289
|
+
self.pbar_sent.total = self.pbar_sent.total - len(self.record_batch)
|
|
290
|
+
self.record_batch = []
|
|
291
|
+
sleep(self.batch_delay)
|
|
292
|
+
|
|
293
|
+
async def process_records(self, files, total_records) -> None:
|
|
294
|
+
"""
|
|
295
|
+
Process records from the given files.
|
|
296
|
+
|
|
297
|
+
Args:
|
|
298
|
+
files (list): List of files to process.
|
|
299
|
+
total_records (int): Total number of records to process.
|
|
300
|
+
pbar_sent: Progress bar for tracking the number of records sent.
|
|
301
|
+
|
|
302
|
+
Returns:
|
|
303
|
+
None
|
|
304
|
+
"""
|
|
305
|
+
counter = 0
|
|
306
|
+
for import_file in files:
|
|
307
|
+
self.pbar_sent.set_description(
|
|
308
|
+
f"Sent ({os.path.basename(import_file.name)}): "
|
|
309
|
+
)
|
|
310
|
+
reader = pymarc.MARCReader(import_file, hide_utf8_warnings=True)
|
|
311
|
+
for record in reader:
|
|
312
|
+
if len(self.record_batch) == self.batch_size:
|
|
313
|
+
await self.process_record_batch(
|
|
314
|
+
await self.create_batch_payload(counter, total_records, False),
|
|
315
|
+
)
|
|
316
|
+
await self.get_job_status()
|
|
317
|
+
sleep(0.25)
|
|
318
|
+
if record:
|
|
319
|
+
self.record_batch.append(record.as_marc())
|
|
320
|
+
counter += 1
|
|
321
|
+
else:
|
|
322
|
+
self.bad_records_file.write(reader.current_chunk)
|
|
323
|
+
if self.record_batch:
|
|
324
|
+
await self.process_record_batch(
|
|
325
|
+
await self.create_batch_payload(counter, total_records, True),
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
async def create_batch_payload(self, counter, total_records, is_last) -> dict:
|
|
329
|
+
"""
|
|
330
|
+
Create a batch payload for data import.
|
|
331
|
+
|
|
332
|
+
Args:
|
|
333
|
+
counter (int): The current counter value.
|
|
334
|
+
total_records (int): The total number of records.
|
|
335
|
+
is_last (bool): Indicates if this is the last batch.
|
|
336
|
+
|
|
337
|
+
Returns:
|
|
338
|
+
dict: The batch payload containing the ID, records metadata, and initial records.
|
|
339
|
+
"""
|
|
340
|
+
return {
|
|
341
|
+
"id": str(uuid.uuid4()),
|
|
342
|
+
"recordsMetadata": {
|
|
343
|
+
"last": is_last,
|
|
344
|
+
"counter": counter - self.error_records,
|
|
345
|
+
"contentType": "MARC_RAW",
|
|
346
|
+
"total": total_records - self.error_records,
|
|
347
|
+
},
|
|
348
|
+
"initialRecords": [{"record": x.decode()} for x in self.record_batch],
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
async def import_marc_file(self) -> None:
|
|
352
|
+
"""
|
|
353
|
+
Imports MARC file into the system.
|
|
354
|
+
|
|
355
|
+
This method performs the following steps:
|
|
356
|
+
1. Creates a FOLIO import job.
|
|
357
|
+
2. Retrieves the import profile.
|
|
358
|
+
3. Sets the job profile.
|
|
359
|
+
4. Opens the MARC file(s) and reads the total number of records.
|
|
360
|
+
5. Displays progress bars for imported and sent records.
|
|
361
|
+
6. Processes the records and updates the progress bars.
|
|
362
|
+
7. Checks the job status periodically until the import is finished.
|
|
363
|
+
|
|
364
|
+
Note: This method assumes that the necessary instance attributes are already set.
|
|
365
|
+
|
|
366
|
+
Returns:
|
|
367
|
+
None
|
|
368
|
+
"""
|
|
369
|
+
await self.create_folio_import_job()
|
|
370
|
+
await self.get_import_profile()
|
|
371
|
+
await self.set_job_profile()
|
|
372
|
+
with ExitStack() as stack:
|
|
373
|
+
files = [
|
|
374
|
+
stack.enter_context(open(file, "rb")) for file in self.current_file
|
|
375
|
+
]
|
|
376
|
+
total_records = await self.read_total_records(files)
|
|
377
|
+
with tqdm(
|
|
378
|
+
desc="Imported: ",
|
|
379
|
+
total=total_records,
|
|
380
|
+
position=1,
|
|
381
|
+
disable=self.no_progress,
|
|
382
|
+
) as pbar_imported, tqdm(
|
|
383
|
+
desc="Sent: ()",
|
|
384
|
+
total=total_records,
|
|
385
|
+
position=0,
|
|
386
|
+
disable=self.no_progress,
|
|
387
|
+
) as pbar_sent:
|
|
388
|
+
self.pbar_sent = pbar_sent
|
|
389
|
+
self.pbar_imported = pbar_imported
|
|
390
|
+
await self.process_records(files, total_records)
|
|
391
|
+
while not self.finished:
|
|
392
|
+
await self.get_job_status()
|
|
393
|
+
sleep(1)
|
|
394
|
+
if self.finished:
|
|
395
|
+
job_summary = self.folio_client.folio_get(
|
|
396
|
+
f"/metadata-provider/jobSummary/{self.job_id}"
|
|
397
|
+
)
|
|
398
|
+
job_summary.pop("jobExecutionId")
|
|
399
|
+
job_summary.pop("totalErrors")
|
|
400
|
+
columns = ["Summary"] + list(job_summary.keys())
|
|
401
|
+
rows = set()
|
|
402
|
+
for key in columns[1:]:
|
|
403
|
+
rows.update(job_summary[key].keys())
|
|
404
|
+
|
|
405
|
+
table_data = []
|
|
406
|
+
for row in rows:
|
|
407
|
+
metric_name = decamelize(row).split("_")[1]
|
|
408
|
+
table_row = [metric_name]
|
|
409
|
+
for col in columns[1:]:
|
|
410
|
+
table_row.append(job_summary[col].get(row, "N/A"))
|
|
411
|
+
table_data.append(table_row)
|
|
412
|
+
table_data.sort(key=lambda x: REPORT_SUMMARY_ORDERING.get(x[0], 99))
|
|
413
|
+
columns = columns[:1] + [
|
|
414
|
+
" ".join(decamelize(x).split("_")[:-1]) for x in columns[1:]
|
|
415
|
+
]
|
|
416
|
+
print(
|
|
417
|
+
f"Results for {'file' if len(self.current_file) == 1 else 'files'}: "
|
|
418
|
+
f"{', '.join([os.path.basename(x.name) for x in self.current_file])}"
|
|
419
|
+
)
|
|
420
|
+
print(
|
|
421
|
+
tabulate.tabulate(
|
|
422
|
+
table_data, headers=columns, tablefmt="fancy_grid"
|
|
423
|
+
),
|
|
424
|
+
)
|
|
425
|
+
self.last_current = 0
|
|
426
|
+
self.finished = False
|
|
427
|
+
|
|
428
|
+
|
|
429
|
+
async def main() -> None:
|
|
430
|
+
"""
|
|
431
|
+
Main function to run the MARC import job.
|
|
432
|
+
|
|
433
|
+
This function parses command line arguments, initializes the FolioClient,
|
|
434
|
+
and runs the MARCImportJob.
|
|
435
|
+
"""
|
|
436
|
+
parser = argparse.ArgumentParser()
|
|
437
|
+
parser.add_argument("--gateway_url", type=str, help="The FOLIO API Gateway URL")
|
|
438
|
+
parser.add_argument("--tenant_id", type=str, help="The FOLIO tenant ID")
|
|
439
|
+
parser.add_argument("--username", type=str, help="The FOLIO username")
|
|
440
|
+
parser.add_argument("--password", type=str, help="The FOLIO password", default="")
|
|
441
|
+
parser.add_argument(
|
|
442
|
+
"--marc_file_path",
|
|
443
|
+
type=str,
|
|
444
|
+
help="The MARC file (or file glob, using shell globbing syntax) to import",
|
|
445
|
+
)
|
|
446
|
+
parser.add_argument(
|
|
447
|
+
"--import_profile_name",
|
|
448
|
+
type=str,
|
|
449
|
+
help="The name of the data import job profile to use",
|
|
450
|
+
default="",
|
|
451
|
+
)
|
|
452
|
+
parser.add_argument(
|
|
453
|
+
"--batch_size",
|
|
454
|
+
type=int,
|
|
455
|
+
help="The number of source records to include in a record batch sent to FOLIO.",
|
|
456
|
+
default=10,
|
|
457
|
+
)
|
|
458
|
+
parser.add_argument(
|
|
459
|
+
"--batch_delay",
|
|
460
|
+
type=float,
|
|
461
|
+
help="The number of seconds to wait between record batches.",
|
|
462
|
+
default=0.0,
|
|
463
|
+
)
|
|
464
|
+
parser.add_argument(
|
|
465
|
+
"--consolidate",
|
|
466
|
+
action="store_true",
|
|
467
|
+
help=(
|
|
468
|
+
"Consolidate records into a single job. "
|
|
469
|
+
"Default is to create a new job for each MARC file."
|
|
470
|
+
),
|
|
471
|
+
)
|
|
472
|
+
parser.add_argument(
|
|
473
|
+
"--no-progress",
|
|
474
|
+
action="store_true",
|
|
475
|
+
help="Disable progress bars (eg. for running in a CI environment)",
|
|
476
|
+
)
|
|
477
|
+
args = parser.parse_args()
|
|
478
|
+
if not args.password:
|
|
479
|
+
args.password = getpass("Enter FOLIO password: ")
|
|
480
|
+
folio_client = folioclient.FolioClient(
|
|
481
|
+
args.gateway_url, args.tenant_id, args.username, args.password
|
|
482
|
+
)
|
|
483
|
+
if not args.import_profile_name:
|
|
484
|
+
import_profiles = folio_client.folio_get(
|
|
485
|
+
"/data-import-profiles/jobProfiles",
|
|
486
|
+
"jobProfiles",
|
|
487
|
+
query_params={"limit": "1000"},
|
|
488
|
+
)
|
|
489
|
+
import_profile_names = [
|
|
490
|
+
profile["name"]
|
|
491
|
+
for profile in import_profiles
|
|
492
|
+
if "marc" in profile["dataType"].lower()
|
|
493
|
+
]
|
|
494
|
+
questions = [
|
|
495
|
+
inquirer.List(
|
|
496
|
+
"import_profile_name",
|
|
497
|
+
message="Select an import profile",
|
|
498
|
+
choices=import_profile_names,
|
|
499
|
+
)
|
|
500
|
+
]
|
|
501
|
+
answers = inquirer.prompt(questions)
|
|
502
|
+
args.import_profile_name = answers["import_profile_name"]
|
|
503
|
+
marc_files = [Path(x) for x in glob.glob(args.marc_file_path, root_dir="./")]
|
|
504
|
+
print(marc_files)
|
|
505
|
+
try:
|
|
506
|
+
await MARCImportJob(
|
|
507
|
+
folio_client,
|
|
508
|
+
marc_files,
|
|
509
|
+
args.import_profile_name,
|
|
510
|
+
batch_size=args.batch_size,
|
|
511
|
+
batch_delay=args.batch_delay,
|
|
512
|
+
consolidate=bool(args.consolidate),
|
|
513
|
+
no_progress=bool(args.no_progress),
|
|
514
|
+
).do_work()
|
|
515
|
+
except Exception as e:
|
|
516
|
+
print("Error importing files: " + str(e))
|
|
517
|
+
raise
|
|
518
|
+
|
|
519
|
+
|
|
520
|
+
def sync_main() -> None:
|
|
521
|
+
"""
|
|
522
|
+
Synchronous main function to run the MARC import job.
|
|
523
|
+
"""
|
|
524
|
+
asyncio.run(main())
|
|
525
|
+
|
|
526
|
+
|
|
527
|
+
if __name__ == "__main__":
|
|
528
|
+
asyncio.run(main())
|