folio-migration-tools 1.9.10__py3-none-any.whl → 1.10.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- folio_migration_tools/__init__.py +3 -4
- folio_migration_tools/__main__.py +44 -31
- folio_migration_tools/circulation_helper.py +114 -105
- folio_migration_tools/custom_dict.py +2 -2
- folio_migration_tools/custom_exceptions.py +4 -5
- folio_migration_tools/folder_structure.py +1 -1
- folio_migration_tools/helper.py +1 -1
- folio_migration_tools/library_configuration.py +65 -37
- folio_migration_tools/mapper_base.py +38 -25
- folio_migration_tools/mapping_file_transformation/courses_mapper.py +1 -1
- folio_migration_tools/mapping_file_transformation/holdings_mapper.py +7 -3
- folio_migration_tools/mapping_file_transformation/item_mapper.py +13 -26
- folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py +1 -2
- folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +13 -11
- folio_migration_tools/mapping_file_transformation/order_mapper.py +6 -5
- folio_migration_tools/mapping_file_transformation/organization_mapper.py +3 -3
- folio_migration_tools/mapping_file_transformation/user_mapper.py +43 -28
- folio_migration_tools/marc_rules_transformation/conditions.py +84 -70
- folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +13 -5
- folio_migration_tools/marc_rules_transformation/hrid_handler.py +3 -2
- folio_migration_tools/marc_rules_transformation/marc_file_processor.py +14 -22
- folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py +1 -0
- folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +46 -36
- folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +25 -15
- folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +62 -32
- folio_migration_tools/migration_report.py +1 -1
- folio_migration_tools/migration_tasks/authority_transformer.py +1 -2
- folio_migration_tools/migration_tasks/batch_poster.py +78 -68
- folio_migration_tools/migration_tasks/bibs_transformer.py +12 -7
- folio_migration_tools/migration_tasks/courses_migrator.py +2 -3
- folio_migration_tools/migration_tasks/holdings_csv_transformer.py +14 -15
- folio_migration_tools/migration_tasks/holdings_marc_transformer.py +11 -21
- folio_migration_tools/migration_tasks/items_transformer.py +17 -30
- folio_migration_tools/migration_tasks/loans_migrator.py +53 -131
- folio_migration_tools/migration_tasks/migration_task_base.py +33 -55
- folio_migration_tools/migration_tasks/orders_transformer.py +21 -39
- folio_migration_tools/migration_tasks/organization_transformer.py +9 -18
- folio_migration_tools/migration_tasks/requests_migrator.py +11 -15
- folio_migration_tools/migration_tasks/reserves_migrator.py +1 -1
- folio_migration_tools/migration_tasks/user_transformer.py +10 -15
- folio_migration_tools/task_configuration.py +6 -7
- folio_migration_tools/transaction_migration/legacy_loan.py +15 -27
- folio_migration_tools/transaction_migration/legacy_request.py +1 -1
- {folio_migration_tools-1.9.10.dist-info → folio_migration_tools-1.10.0b1.dist-info}/METADATA +18 -28
- {folio_migration_tools-1.9.10.dist-info → folio_migration_tools-1.10.0b1.dist-info}/RECORD +47 -50
- folio_migration_tools-1.10.0b1.dist-info/WHEEL +4 -0
- folio_migration_tools-1.10.0b1.dist-info/entry_points.txt +3 -0
- folio_migration_tools/test_infrastructure/__init__.py +0 -0
- folio_migration_tools/test_infrastructure/mocked_classes.py +0 -406
- folio_migration_tools-1.9.10.dist-info/WHEEL +0 -4
- folio_migration_tools-1.9.10.dist-info/entry_points.txt +0 -3
- folio_migration_tools-1.9.10.dist-info/licenses/LICENSE +0 -21
|
@@ -47,15 +47,13 @@ class MigrationTaskBase:
|
|
|
47
47
|
logging.info("MigrationTaskBase init")
|
|
48
48
|
self.start_datetime = datetime.now(timezone.utc)
|
|
49
49
|
self.task_configuration = task_configuration
|
|
50
|
-
logging.info(self.task_configuration.
|
|
50
|
+
logging.info(self.task_configuration.model_dump_json(indent=4))
|
|
51
51
|
self.folio_client: FolioClient = folio_client
|
|
52
52
|
self.ecs_tenant_id = (
|
|
53
53
|
task_configuration.ecs_tenant_id or library_configuration.ecs_tenant_id
|
|
54
54
|
)
|
|
55
|
-
self.
|
|
56
|
-
|
|
57
|
-
)
|
|
58
|
-
self.folio_client.okapi_headers.update(self.ecs_tenant_header)
|
|
55
|
+
self.folio_client.tenant_id = self.ecs_tenant_id
|
|
56
|
+
|
|
59
57
|
self.central_folder_structure: Optional[FolderStructure] = None
|
|
60
58
|
if library_configuration.is_ecs and library_configuration.ecs_central_iteration_identifier:
|
|
61
59
|
self.central_folder_structure = FolderStructure(
|
|
@@ -133,11 +131,7 @@ class MigrationTaskBase:
|
|
|
133
131
|
TransformationProcessError: _description_
|
|
134
132
|
|
|
135
133
|
"""
|
|
136
|
-
files = [
|
|
137
|
-
source_path / f.file_name
|
|
138
|
-
for f in file_defs
|
|
139
|
-
if isfile(source_path / f.file_name)
|
|
140
|
-
]
|
|
134
|
+
files = [source_path / f.file_name for f in file_defs if isfile(source_path / f.file_name)]
|
|
141
135
|
ret_str = ", ".join(f.file_name for f in file_defs)
|
|
142
136
|
|
|
143
137
|
if files and len(files) < len(file_defs):
|
|
@@ -162,12 +156,13 @@ class MigrationTaskBase:
|
|
|
162
156
|
This is in the base class because multiple tasks need it. It exists because instances in an ECS environment
|
|
163
157
|
are transformed for the central and data tenants separately, but the data tenants need to know about
|
|
164
158
|
the central tenant instance ids. This is a bit of a hack, but it works for now.
|
|
165
|
-
"""
|
|
159
|
+
""" # noqa: E501
|
|
166
160
|
map_files = []
|
|
167
161
|
instance_id_map = {}
|
|
168
162
|
if self.library_configuration.is_ecs and self.central_folder_structure:
|
|
169
163
|
logging.info(
|
|
170
|
-
"Loading ECS central tenant instance id map from %s",
|
|
164
|
+
"Loading ECS central tenant instance id map from %s",
|
|
165
|
+
self.central_folder_structure.instance_id_map_path,
|
|
171
166
|
)
|
|
172
167
|
instance_id_map = self.load_id_map(
|
|
173
168
|
self.central_folder_structure.instance_id_map_path,
|
|
@@ -176,7 +171,7 @@ class MigrationTaskBase:
|
|
|
176
171
|
map_files.append(str(self.central_folder_structure.instance_id_map_path))
|
|
177
172
|
logging.info(
|
|
178
173
|
"Loading member tenant isntance id map from %s",
|
|
179
|
-
self.folder_structure.instance_id_map_path
|
|
174
|
+
self.folder_structure.instance_id_map_path,
|
|
180
175
|
)
|
|
181
176
|
instance_id_map = self.load_id_map(
|
|
182
177
|
self.folder_structure.instance_id_map_path,
|
|
@@ -190,13 +185,11 @@ class MigrationTaskBase:
|
|
|
190
185
|
return instance_id_map
|
|
191
186
|
|
|
192
187
|
@staticmethod
|
|
193
|
-
def load_id_map(map_path, raise_if_empty=False, existing_id_map=
|
|
188
|
+
def load_id_map(map_path, raise_if_empty=False, existing_id_map=None):
|
|
194
189
|
if not isfile(map_path):
|
|
195
|
-
logging.warning(
|
|
196
|
-
"No legacy id map found at %s. Will build one from scratch", map_path
|
|
197
|
-
)
|
|
190
|
+
logging.warning("No legacy id map found at %s. Will build one from scratch", map_path)
|
|
198
191
|
return {}
|
|
199
|
-
id_map = existing_id_map
|
|
192
|
+
id_map = existing_id_map or {}
|
|
200
193
|
loaded_rows = len(id_map)
|
|
201
194
|
with open(map_path) as id_map_file:
|
|
202
195
|
for index, json_string in enumerate(id_map_file, start=1):
|
|
@@ -247,9 +240,7 @@ class MigrationTaskBase:
|
|
|
247
240
|
else:
|
|
248
241
|
logger.setLevel(logging.INFO)
|
|
249
242
|
stream_handler.setLevel(logging.INFO)
|
|
250
|
-
stream_handler.addFilter(
|
|
251
|
-
ExcludeLevelFilter(30)
|
|
252
|
-
) # Exclude warnings from pymarc
|
|
243
|
+
stream_handler.addFilter(ExcludeLevelFilter(30)) # Exclude warnings from pymarc
|
|
253
244
|
stream_handler.setFormatter(formatter)
|
|
254
245
|
logger.addHandler(stream_handler)
|
|
255
246
|
|
|
@@ -280,22 +271,16 @@ class MigrationTaskBase:
|
|
|
280
271
|
def setup_records_map(self, mapping_file_path):
|
|
281
272
|
with open(mapping_file_path) as mapping_file:
|
|
282
273
|
field_map = json.load(mapping_file)
|
|
283
|
-
logging.info(
|
|
284
|
-
"%s fields present in record mapping file", len(field_map["data"])
|
|
285
|
-
)
|
|
274
|
+
logging.info("%s fields present in record mapping file", len(field_map["data"]))
|
|
286
275
|
mapped_fields = (
|
|
287
276
|
f
|
|
288
277
|
for f in field_map["data"]
|
|
289
278
|
if f["legacy_field"] and f["legacy_field"] != "Not mapped"
|
|
290
279
|
)
|
|
291
|
-
logging.info(
|
|
292
|
-
"%s fields mapped in record mapping file", len(list(mapped_fields))
|
|
293
|
-
)
|
|
280
|
+
logging.info("%s fields mapped in record mapping file", len(list(mapped_fields)))
|
|
294
281
|
return field_map
|
|
295
282
|
|
|
296
|
-
def log_and_exit_if_too_many_errors(
|
|
297
|
-
self, error: TransformationRecordFailedError, idx
|
|
298
|
-
):
|
|
283
|
+
def log_and_exit_if_too_many_errors(self, error: TransformationRecordFailedError, idx):
|
|
299
284
|
self.num_exeptions += 1
|
|
300
285
|
error.log_it()
|
|
301
286
|
if self.num_exeptions / (1 + idx) > 0.2 and self.num_exeptions > 5000:
|
|
@@ -311,9 +296,7 @@ class MigrationTaskBase:
|
|
|
311
296
|
if num_processed > 1 and num_processed % 10000 == 0:
|
|
312
297
|
elapsed = num_processed / (time.time() - start_time)
|
|
313
298
|
elapsed_formatted = "{0:.4g}".format(elapsed)
|
|
314
|
-
logging.info(
|
|
315
|
-
f"{num_processed:,} records processed. Recs/sec: {elapsed_formatted} "
|
|
316
|
-
)
|
|
299
|
+
logging.info(f"{num_processed:,} records processed. Recs/sec: {elapsed_formatted} ")
|
|
317
300
|
|
|
318
301
|
def do_work_marc_transformer(
|
|
319
302
|
self,
|
|
@@ -322,9 +305,7 @@ class MigrationTaskBase:
|
|
|
322
305
|
if self.folder_structure.failed_marc_recs_file.is_file():
|
|
323
306
|
os.remove(self.folder_structure.failed_marc_recs_file)
|
|
324
307
|
logging.info("Removed failed marc records file to prevent duplicating data")
|
|
325
|
-
with open(
|
|
326
|
-
self.folder_structure.created_objects_path, "w+"
|
|
327
|
-
) as created_records_file:
|
|
308
|
+
with open(self.folder_structure.created_objects_path, "w+") as created_records_file:
|
|
328
309
|
self.processor = MarcFileProcessor(
|
|
329
310
|
self.mapper, self.folder_structure, created_records_file
|
|
330
311
|
)
|
|
@@ -377,7 +358,7 @@ class MigrationTaskBase:
|
|
|
377
358
|
|
|
378
359
|
Returns:
|
|
379
360
|
None
|
|
380
|
-
"""
|
|
361
|
+
""" # noqa: E501
|
|
381
362
|
current_pos = map_file.tell()
|
|
382
363
|
try:
|
|
383
364
|
map_file.seek(0)
|
|
@@ -391,13 +372,12 @@ class MigrationTaskBase:
|
|
|
391
372
|
"",
|
|
392
373
|
(
|
|
393
374
|
f"Mapping file {map_file.name} has rows with different number "
|
|
394
|
-
f"of columns ({'Row' if len(invalid_lines) == 1 else 'Rows'}
|
|
375
|
+
f"of columns ({'Row' if len(invalid_lines) == 1 else 'Rows'} "
|
|
376
|
+
f"{', '.join(invalid_lines)})"
|
|
395
377
|
),
|
|
396
378
|
)
|
|
397
379
|
if not valid_lines:
|
|
398
|
-
raise TransformationProcessError(
|
|
399
|
-
"", f"Map has no rows: {map_file.name}"
|
|
400
|
-
)
|
|
380
|
+
raise TransformationProcessError("", f"Map has no rows: {map_file.name}")
|
|
401
381
|
finally:
|
|
402
382
|
map_file.seek(current_pos)
|
|
403
383
|
|
|
@@ -418,15 +398,12 @@ class MigrationTaskBase:
|
|
|
418
398
|
required (bool): Whether the property is required or not
|
|
419
399
|
"""
|
|
420
400
|
if (
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
)
|
|
428
|
-
and map_file_path.is_file()
|
|
429
|
-
):
|
|
401
|
+
folio_property_name in folio_keys
|
|
402
|
+
or required
|
|
403
|
+
or folio_property_name.startswith("statisticalCodeIds")
|
|
404
|
+
or folio_property_name.startswith("locationMap")
|
|
405
|
+
or folio_property_name.startswith("fundsMap")
|
|
406
|
+
) and map_file_path.is_file():
|
|
430
407
|
try:
|
|
431
408
|
with open(map_file_path) as map_file:
|
|
432
409
|
# Validate the structure of the mapping file
|
|
@@ -486,7 +463,7 @@ class MarcTaskConfigurationBase(task_configuration.AbstractTaskConfiguration):
|
|
|
486
463
|
deactivate035_from001 (bool):
|
|
487
464
|
Disables the default FOLIO functionality of moving the previous 001 field into a 035 field, prefixed with the value from 003.
|
|
488
465
|
Default is False, meaning the functionality remains active.
|
|
489
|
-
"""
|
|
466
|
+
""" # noqa: E501
|
|
490
467
|
|
|
491
468
|
files: Annotated[
|
|
492
469
|
List[library_configuration.FileDefinition],
|
|
@@ -500,8 +477,7 @@ class MarcTaskConfigurationBase(task_configuration.AbstractTaskConfiguration):
|
|
|
500
477
|
Field(
|
|
501
478
|
title="Create source records",
|
|
502
479
|
description=(
|
|
503
|
-
"Controls whether or not to retain the MARC records in "
|
|
504
|
-
"Source Record Storage."
|
|
480
|
+
"Controls whether or not to retain the MARC records in Source Record Storage."
|
|
505
481
|
),
|
|
506
482
|
),
|
|
507
483
|
] = False
|
|
@@ -542,12 +518,14 @@ class MarcTaskConfigurationBase(task_configuration.AbstractTaskConfiguration):
|
|
|
542
518
|
title="Statistical code mapping fields",
|
|
543
519
|
description=(
|
|
544
520
|
"List of fields + subfields to be used for mapping statistical codes. "
|
|
545
|
-
|
|
546
|
-
"will be treated as unique values. Multiple subfields will be concatenated
|
|
521
|
+
'Subfields should be delimited by a "$" (eg. 907$a). Single repeating subfields '
|
|
522
|
+
"will be treated as unique values. Multiple subfields will be concatenated "
|
|
523
|
+
"together with a space."
|
|
547
524
|
),
|
|
548
525
|
),
|
|
549
526
|
] = []
|
|
550
527
|
|
|
528
|
+
|
|
551
529
|
class ExcludeLevelFilter(logging.Filter):
|
|
552
530
|
def __init__(self, level):
|
|
553
531
|
super().__init__()
|
|
@@ -4,7 +4,6 @@ import json
|
|
|
4
4
|
import logging
|
|
5
5
|
import sys
|
|
6
6
|
import time
|
|
7
|
-
from os.path import isfile
|
|
8
7
|
from typing import List, Optional, Annotated
|
|
9
8
|
from pydantic import Field
|
|
10
9
|
|
|
@@ -84,50 +83,35 @@ class OrdersTransformer(MigrationTaskBase):
|
|
|
84
83
|
Optional[str],
|
|
85
84
|
Field(
|
|
86
85
|
title="Payment Status Map File Name",
|
|
87
|
-
description=(
|
|
88
|
-
"File name for payment status mapping. "
|
|
89
|
-
"By default is empty string."
|
|
90
|
-
),
|
|
86
|
+
description=("File name for payment status mapping. By default is empty string."),
|
|
91
87
|
),
|
|
92
88
|
] = ""
|
|
93
89
|
receipt_status_map_file_name: Annotated[
|
|
94
90
|
Optional[str],
|
|
95
91
|
Field(
|
|
96
92
|
title="Receipt Status Map File Name",
|
|
97
|
-
description=(
|
|
98
|
-
"File name for receipt status mapping. "
|
|
99
|
-
"By default is empty string."
|
|
100
|
-
),
|
|
93
|
+
description=("File name for receipt status mapping. By default is empty string."),
|
|
101
94
|
),
|
|
102
95
|
] = ""
|
|
103
96
|
workflow_status_map_file_name: Annotated[
|
|
104
97
|
Optional[str],
|
|
105
98
|
Field(
|
|
106
99
|
title="Workflow Status Map File Name",
|
|
107
|
-
description=(
|
|
108
|
-
"File name for workflow status mapping. "
|
|
109
|
-
"By default is empty string."
|
|
110
|
-
),
|
|
100
|
+
description=("File name for workflow status mapping. By default is empty string."),
|
|
111
101
|
),
|
|
112
102
|
] = ""
|
|
113
103
|
location_map_file_name: Annotated[
|
|
114
104
|
Optional[str],
|
|
115
105
|
Field(
|
|
116
106
|
title="Location Map File Name",
|
|
117
|
-
description=(
|
|
118
|
-
"File name for location mapping. "
|
|
119
|
-
"By default is empty string."
|
|
120
|
-
),
|
|
107
|
+
description=("File name for location mapping. By default is empty string."),
|
|
121
108
|
),
|
|
122
109
|
] = ""
|
|
123
110
|
funds_map_file_name: Annotated[
|
|
124
111
|
Optional[str],
|
|
125
112
|
Field(
|
|
126
113
|
title="Funds Map File Name",
|
|
127
|
-
description=(
|
|
128
|
-
"File name for funds mapping. "
|
|
129
|
-
"By default is empty string."
|
|
130
|
-
),
|
|
114
|
+
description=("File name for funds mapping. By default is empty string."),
|
|
131
115
|
),
|
|
132
116
|
] = ""
|
|
133
117
|
funds_expense_class_map_file_name: Annotated[
|
|
@@ -135,8 +119,7 @@ class OrdersTransformer(MigrationTaskBase):
|
|
|
135
119
|
Field(
|
|
136
120
|
title="Funds Expense Class Map File Name",
|
|
137
121
|
description=(
|
|
138
|
-
"File name for funds expense class mapping. "
|
|
139
|
-
"By default is empty string."
|
|
122
|
+
"File name for funds expense class mapping. By default is empty string."
|
|
140
123
|
),
|
|
141
124
|
),
|
|
142
125
|
] = ""
|
|
@@ -216,8 +199,7 @@ class OrdersTransformer(MigrationTaskBase):
|
|
|
216
199
|
),
|
|
217
200
|
self.load_ref_data_mapping_file( # Required if there was is a fund.
|
|
218
201
|
"fundsMap",
|
|
219
|
-
self.folder_structure.mapping_files_folder
|
|
220
|
-
/ self.task_config.funds_map_file_name,
|
|
202
|
+
self.folder_structure.mapping_files_folder / self.task_config.funds_map_file_name,
|
|
221
203
|
self.folio_keys,
|
|
222
204
|
True,
|
|
223
205
|
),
|
|
@@ -231,26 +213,26 @@ class OrdersTransformer(MigrationTaskBase):
|
|
|
231
213
|
)
|
|
232
214
|
|
|
233
215
|
def list_source_files(self):
|
|
234
|
-
files = [
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
)
|
|
216
|
+
files = []
|
|
217
|
+
for f in self.task_config.files:
|
|
218
|
+
file_path = self.folder_structure.data_folder / self.object_type_name / f.file_name
|
|
219
|
+
|
|
220
|
+
if not file_path.is_file():
|
|
221
|
+
print(f"\n\nERROR: File defined in task not found - {f.file_name}")
|
|
222
|
+
raise TransformationProcessError(
|
|
223
|
+
f"\n\nERROR: File defined in task not found - {f.file_name}"
|
|
224
|
+
)
|
|
225
|
+
files.append(file_path)
|
|
245
226
|
logging.info("Files to process:")
|
|
246
227
|
for filename in files:
|
|
247
228
|
logging.info("\t%s", filename)
|
|
248
229
|
return files
|
|
249
230
|
|
|
250
231
|
def process_single_file(self, filename):
|
|
251
|
-
with
|
|
252
|
-
|
|
253
|
-
|
|
232
|
+
with (
|
|
233
|
+
open(filename, encoding="utf-8-sig") as records_file,
|
|
234
|
+
open(self.folder_structure.created_objects_path, "w+") as results_file,
|
|
235
|
+
):
|
|
254
236
|
self.mapper.migration_report.add_general_statistics(
|
|
255
237
|
i18n.t("Number of files processed")
|
|
256
238
|
)
|
|
@@ -50,27 +50,21 @@ class OrganizationTransformer(MigrationTaskBase):
|
|
|
50
50
|
str,
|
|
51
51
|
Field(
|
|
52
52
|
title="Migration task type",
|
|
53
|
-
description=(
|
|
54
|
-
"The type of migration task you want to perform"
|
|
55
|
-
),
|
|
53
|
+
description=("The type of migration task you want to perform"),
|
|
56
54
|
),
|
|
57
55
|
]
|
|
58
56
|
files: Annotated[
|
|
59
57
|
List[FileDefinition],
|
|
60
58
|
Field(
|
|
61
59
|
title="Source files",
|
|
62
|
-
description=(
|
|
63
|
-
"List of MARC21 files with holdings records"
|
|
64
|
-
),
|
|
60
|
+
description=("List of MARC21 files with holdings records"),
|
|
65
61
|
),
|
|
66
62
|
]
|
|
67
63
|
organization_map_path: Annotated[
|
|
68
64
|
str,
|
|
69
65
|
Field(
|
|
70
66
|
title="Organization map path",
|
|
71
|
-
description=(
|
|
72
|
-
"Path to the organization map file"
|
|
73
|
-
),
|
|
67
|
+
description=("Path to the organization map file"),
|
|
74
68
|
),
|
|
75
69
|
]
|
|
76
70
|
organization_types_map_path: Annotated[
|
|
@@ -95,18 +89,14 @@ class OrganizationTransformer(MigrationTaskBase):
|
|
|
95
89
|
Optional[str],
|
|
96
90
|
Field(
|
|
97
91
|
title="Email categories map path",
|
|
98
|
-
description=(
|
|
99
|
-
"Path to the email categories map file. By default is empty string"
|
|
100
|
-
),
|
|
92
|
+
description=("Path to the email categories map file. By default is empty string"),
|
|
101
93
|
),
|
|
102
94
|
] = ""
|
|
103
95
|
phone_categories_map_path: Annotated[
|
|
104
96
|
Optional[str],
|
|
105
97
|
Field(
|
|
106
98
|
title="Phone categories map path",
|
|
107
|
-
description=(
|
|
108
|
-
"Path to the phone categories map file. By default is empty string"
|
|
109
|
-
),
|
|
99
|
+
description=("Path to the phone categories map file. By default is empty string"),
|
|
110
100
|
),
|
|
111
101
|
] = ""
|
|
112
102
|
|
|
@@ -201,9 +191,10 @@ class OrganizationTransformer(MigrationTaskBase):
|
|
|
201
191
|
return files
|
|
202
192
|
|
|
203
193
|
def process_single_file(self, filename):
|
|
204
|
-
with
|
|
205
|
-
|
|
206
|
-
|
|
194
|
+
with (
|
|
195
|
+
open(filename, encoding="utf-8-sig") as records_file,
|
|
196
|
+
open(self.folder_structure.created_objects_path, "w+") as results_file,
|
|
197
|
+
):
|
|
207
198
|
self.mapper.migration_report.add_general_statistics(
|
|
208
199
|
i18n.t("Number of files processed")
|
|
209
200
|
)
|
|
@@ -32,7 +32,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
32
32
|
description=(
|
|
33
33
|
"Name of this migration task. The name is being used to call "
|
|
34
34
|
"the specific task, and to distinguish tasks of similar types"
|
|
35
|
-
)
|
|
35
|
+
),
|
|
36
36
|
),
|
|
37
37
|
]
|
|
38
38
|
migration_task_type: Annotated[
|
|
@@ -54,8 +54,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
54
54
|
Field(
|
|
55
55
|
title="Starting row",
|
|
56
56
|
description=(
|
|
57
|
-
"Row number to start processing data from. "
|
|
58
|
-
"Optional, by default is first row"
|
|
57
|
+
"Row number to start processing data from. Optional, by default is first row"
|
|
59
58
|
),
|
|
60
59
|
),
|
|
61
60
|
] = 1
|
|
@@ -64,8 +63,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
64
63
|
Field(
|
|
65
64
|
title="Item files",
|
|
66
65
|
description=(
|
|
67
|
-
"List of files containing item data. "
|
|
68
|
-
"Optional, by default is empty list"
|
|
66
|
+
"List of files containing item data. Optional, by default is empty list"
|
|
69
67
|
),
|
|
70
68
|
),
|
|
71
69
|
] = []
|
|
@@ -74,8 +72,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
74
72
|
Field(
|
|
75
73
|
title="Patron files",
|
|
76
74
|
description=(
|
|
77
|
-
"List of files containing patron data. "
|
|
78
|
-
"Optional, by default is empty list"
|
|
75
|
+
"List of files containing patron data. Optional, by default is empty list"
|
|
79
76
|
),
|
|
80
77
|
),
|
|
81
78
|
] = []
|
|
@@ -88,7 +85,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
88
85
|
self,
|
|
89
86
|
task_configuration: TaskConfiguration,
|
|
90
87
|
library_config: LibraryConfiguration,
|
|
91
|
-
folio_client
|
|
88
|
+
folio_client,
|
|
92
89
|
):
|
|
93
90
|
csv.register_dialect("tsv", delimiter="\t")
|
|
94
91
|
self.migration_report = MigrationReport()
|
|
@@ -135,8 +132,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
135
132
|
)
|
|
136
133
|
else:
|
|
137
134
|
logging.info(
|
|
138
|
-
"No item or user files supplied. Not validating
|
|
139
|
-
"previously migrated objects"
|
|
135
|
+
"No item or user files supplied. Not validating againstpreviously migrated objects"
|
|
140
136
|
)
|
|
141
137
|
self.valid_legacy_requests = self.semi_valid_legacy_requests
|
|
142
138
|
|
|
@@ -188,7 +184,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
188
184
|
legacy_request.instance_id = holding.get("instanceId")
|
|
189
185
|
if item["status"]["name"] in ["Available"]:
|
|
190
186
|
legacy_request.request_type = "Page"
|
|
191
|
-
logging.info(f
|
|
187
|
+
logging.info(f"Setting request to Page, since the status is {item['status']['name']}")
|
|
192
188
|
self.migration_report.add_general_statistics(
|
|
193
189
|
i18n.t("Valid, prepared requests, ready for posting")
|
|
194
190
|
)
|
|
@@ -197,7 +193,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
197
193
|
def do_work(self):
|
|
198
194
|
logging.info("Starting")
|
|
199
195
|
if self.task_configuration.starting_row > 1:
|
|
200
|
-
logging.info(f"Skipping {(self.task_configuration.starting_row-1)} records")
|
|
196
|
+
logging.info(f"Skipping {(self.task_configuration.starting_row - 1)} records")
|
|
201
197
|
for num_requests, legacy_request in enumerate(
|
|
202
198
|
self.valid_legacy_requests[self.task_configuration.starting_row - 1 :],
|
|
203
199
|
start=1,
|
|
@@ -281,7 +277,8 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
281
277
|
self.migration_report.add(
|
|
282
278
|
"DiscardedLoans",
|
|
283
279
|
i18n.t(
|
|
284
|
-
"Requests discarded. Had migrated item barcode: %{item_barcode}.\n
|
|
280
|
+
"Requests discarded. Had migrated item barcode: %{item_barcode}.\n "
|
|
281
|
+
"Had migrated user barcode: %{patron_barcode}",
|
|
285
282
|
item_barcode=has_item_barcode,
|
|
286
283
|
patron_barcode=has_patron_barcode,
|
|
287
284
|
),
|
|
@@ -333,8 +330,7 @@ class RequestsMigrator(MigrationTaskBase):
|
|
|
333
330
|
except ValueError as ve:
|
|
334
331
|
logging.exception(ve)
|
|
335
332
|
logging.info(
|
|
336
|
-
f"Done validating {legacy_reques_count} "
|
|
337
|
-
f"legacy requests with {num_bad} rotten apples"
|
|
333
|
+
f"Done validating {legacy_reques_count} legacy requests with {num_bad} rotten apples"
|
|
338
334
|
)
|
|
339
335
|
if num_bad > 0 and (num_bad / legacy_reques_count) > 0.5:
|
|
340
336
|
q = num_bad / legacy_reques_count
|
|
@@ -59,7 +59,7 @@ class ReservesMigrator(MigrationTaskBase):
|
|
|
59
59
|
self,
|
|
60
60
|
task_configuration: TaskConfiguration,
|
|
61
61
|
library_config: LibraryConfiguration,
|
|
62
|
-
folio_client
|
|
62
|
+
folio_client,
|
|
63
63
|
):
|
|
64
64
|
csv.register_dialect("tsv", delimiter="\t")
|
|
65
65
|
self.migration_report = MigrationReport()
|
|
@@ -49,41 +49,37 @@ class UserTransformer(MigrationTaskBase):
|
|
|
49
49
|
Field(
|
|
50
50
|
title="Group map path",
|
|
51
51
|
description="Define the path for group mapping",
|
|
52
|
-
)
|
|
52
|
+
),
|
|
53
53
|
]
|
|
54
54
|
departments_map_path: Annotated[
|
|
55
55
|
Optional[str],
|
|
56
56
|
Field(
|
|
57
57
|
title="Departments map path",
|
|
58
58
|
description=(
|
|
59
|
-
"Define the path for departments mapping. "
|
|
60
|
-
"Optional, by dfault is empty string"
|
|
59
|
+
"Define the path for departments mapping. Optional, by dfault is empty string"
|
|
61
60
|
),
|
|
62
|
-
)
|
|
61
|
+
),
|
|
63
62
|
] = ""
|
|
64
63
|
use_group_map: Annotated[
|
|
65
64
|
Optional[bool],
|
|
66
65
|
Field(
|
|
67
66
|
title="Use group map",
|
|
68
|
-
description=(
|
|
69
|
-
|
|
70
|
-
"Optional, by default is True"
|
|
71
|
-
),
|
|
72
|
-
)
|
|
67
|
+
description=("Specify whether to use group mapping. Optional, by default is True"),
|
|
68
|
+
),
|
|
73
69
|
] = True
|
|
74
70
|
user_mapping_file_name: Annotated[
|
|
75
71
|
str,
|
|
76
72
|
Field(
|
|
77
73
|
title="User mapping file name",
|
|
78
74
|
description="Specify the user mapping file name",
|
|
79
|
-
)
|
|
75
|
+
),
|
|
80
76
|
]
|
|
81
77
|
user_file: Annotated[
|
|
82
78
|
FileDefinition,
|
|
83
79
|
Field(
|
|
84
80
|
title="User file",
|
|
85
81
|
description="Select the user data file",
|
|
86
|
-
)
|
|
82
|
+
),
|
|
87
83
|
]
|
|
88
84
|
remove_id_and_request_preferences: Annotated[
|
|
89
85
|
Optional[bool],
|
|
@@ -93,7 +89,7 @@ class UserTransformer(MigrationTaskBase):
|
|
|
93
89
|
"Specify whether to remove user ID and request preferences. "
|
|
94
90
|
"Optional, by default is False"
|
|
95
91
|
),
|
|
96
|
-
)
|
|
92
|
+
),
|
|
97
93
|
] = False
|
|
98
94
|
remove_request_preferences: Annotated[
|
|
99
95
|
Optional[bool],
|
|
@@ -103,7 +99,7 @@ class UserTransformer(MigrationTaskBase):
|
|
|
103
99
|
"Specify whether to remove user request preferences. "
|
|
104
100
|
"Optional, by default is False"
|
|
105
101
|
),
|
|
106
|
-
)
|
|
102
|
+
),
|
|
107
103
|
] = False
|
|
108
104
|
|
|
109
105
|
@staticmethod
|
|
@@ -306,8 +302,7 @@ def find_primary_addresses(addresses):
|
|
|
306
302
|
if "primaryAddress" not in address:
|
|
307
303
|
address["primaryAddress"] = False
|
|
308
304
|
elif (
|
|
309
|
-
isinstance(address["primaryAddress"], bool)
|
|
310
|
-
and address["primaryAddress"] is True
|
|
305
|
+
isinstance(address["primaryAddress"], bool) and address["primaryAddress"] is True
|
|
311
306
|
) or (
|
|
312
307
|
isinstance(address["primaryAddress"], str)
|
|
313
308
|
and address["primaryAddress"].lower() == "true"
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from typing import Annotated
|
|
2
2
|
|
|
3
3
|
from humps import camelize
|
|
4
|
-
from pydantic import BaseModel
|
|
4
|
+
from pydantic import BaseModel, ConfigDict
|
|
5
5
|
from pydantic import Field
|
|
6
6
|
|
|
7
7
|
|
|
@@ -25,9 +25,7 @@ class AbstractTaskConfiguration(BaseModel):
|
|
|
25
25
|
str,
|
|
26
26
|
Field(
|
|
27
27
|
title="Migration task type",
|
|
28
|
-
description=(
|
|
29
|
-
"The type of migration task you want to perform."
|
|
30
|
-
),
|
|
28
|
+
description=("The type of migration task you want to perform."),
|
|
31
29
|
),
|
|
32
30
|
]
|
|
33
31
|
ecs_tenant_id: Annotated[
|
|
@@ -41,6 +39,7 @@ class AbstractTaskConfiguration(BaseModel):
|
|
|
41
39
|
),
|
|
42
40
|
] = ""
|
|
43
41
|
|
|
44
|
-
|
|
45
|
-
alias_generator
|
|
46
|
-
|
|
42
|
+
model_config = ConfigDict(
|
|
43
|
+
alias_generator=to_camel,
|
|
44
|
+
populate_by_name=True,
|
|
45
|
+
)
|