folio-migration-tools 1.9.10__py3-none-any.whl → 1.10.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- folio_migration_tools/__init__.py +3 -4
- folio_migration_tools/__main__.py +44 -31
- folio_migration_tools/circulation_helper.py +114 -105
- folio_migration_tools/custom_dict.py +2 -2
- folio_migration_tools/custom_exceptions.py +4 -5
- folio_migration_tools/folder_structure.py +1 -1
- folio_migration_tools/helper.py +1 -1
- folio_migration_tools/library_configuration.py +65 -37
- folio_migration_tools/mapper_base.py +38 -25
- folio_migration_tools/mapping_file_transformation/courses_mapper.py +1 -1
- folio_migration_tools/mapping_file_transformation/holdings_mapper.py +7 -3
- folio_migration_tools/mapping_file_transformation/item_mapper.py +13 -26
- folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py +1 -2
- folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +13 -11
- folio_migration_tools/mapping_file_transformation/order_mapper.py +6 -5
- folio_migration_tools/mapping_file_transformation/organization_mapper.py +3 -3
- folio_migration_tools/mapping_file_transformation/user_mapper.py +43 -28
- folio_migration_tools/marc_rules_transformation/conditions.py +84 -70
- folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +13 -5
- folio_migration_tools/marc_rules_transformation/hrid_handler.py +3 -2
- folio_migration_tools/marc_rules_transformation/marc_file_processor.py +14 -22
- folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py +1 -0
- folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +46 -36
- folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +25 -15
- folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +62 -32
- folio_migration_tools/migration_report.py +1 -1
- folio_migration_tools/migration_tasks/authority_transformer.py +1 -2
- folio_migration_tools/migration_tasks/batch_poster.py +78 -68
- folio_migration_tools/migration_tasks/bibs_transformer.py +12 -7
- folio_migration_tools/migration_tasks/courses_migrator.py +2 -3
- folio_migration_tools/migration_tasks/holdings_csv_transformer.py +14 -15
- folio_migration_tools/migration_tasks/holdings_marc_transformer.py +11 -21
- folio_migration_tools/migration_tasks/items_transformer.py +17 -30
- folio_migration_tools/migration_tasks/loans_migrator.py +53 -131
- folio_migration_tools/migration_tasks/migration_task_base.py +33 -55
- folio_migration_tools/migration_tasks/orders_transformer.py +21 -39
- folio_migration_tools/migration_tasks/organization_transformer.py +9 -18
- folio_migration_tools/migration_tasks/requests_migrator.py +11 -15
- folio_migration_tools/migration_tasks/reserves_migrator.py +1 -1
- folio_migration_tools/migration_tasks/user_transformer.py +10 -15
- folio_migration_tools/task_configuration.py +6 -7
- folio_migration_tools/transaction_migration/legacy_loan.py +15 -27
- folio_migration_tools/transaction_migration/legacy_request.py +1 -1
- {folio_migration_tools-1.9.10.dist-info → folio_migration_tools-1.10.0b1.dist-info}/METADATA +18 -28
- {folio_migration_tools-1.9.10.dist-info → folio_migration_tools-1.10.0b1.dist-info}/RECORD +47 -50
- folio_migration_tools-1.10.0b1.dist-info/WHEEL +4 -0
- folio_migration_tools-1.10.0b1.dist-info/entry_points.txt +3 -0
- folio_migration_tools/test_infrastructure/__init__.py +0 -0
- folio_migration_tools/test_infrastructure/mocked_classes.py +0 -406
- folio_migration_tools-1.9.10.dist-info/WHEEL +0 -4
- folio_migration_tools-1.9.10.dist-info/entry_points.txt +0 -3
- folio_migration_tools-1.9.10.dist-info/licenses/LICENSE +0 -21
|
@@ -52,7 +52,10 @@ class RulesMapperBase(MapperBase):
|
|
|
52
52
|
self.mappings: dict = {}
|
|
53
53
|
self.schema_properties = None
|
|
54
54
|
self.create_source_records = all(
|
|
55
|
-
[
|
|
55
|
+
[
|
|
56
|
+
self.task_configuration.create_source_records,
|
|
57
|
+
(not getattr(self.task_configuration, "data_import_marc", False)),
|
|
58
|
+
]
|
|
56
59
|
)
|
|
57
60
|
if hasattr(self.task_configuration, "hrid_handling"):
|
|
58
61
|
self.hrid_handler = HRIDHandler(
|
|
@@ -190,10 +193,10 @@ class RulesMapperBase(MapperBase):
|
|
|
190
193
|
):
|
|
191
194
|
values: List[str] = []
|
|
192
195
|
if mapping.get("subfield") and (custom_delimiters := mapping.get("subFieldDelimiter")):
|
|
193
|
-
delimiter_map =
|
|
196
|
+
delimiter_map = dict.fromkeys(mapping.get("subfield"), " ")
|
|
194
197
|
for custom_delimiter in custom_delimiters:
|
|
195
198
|
delimiter_map.update(
|
|
196
|
-
|
|
199
|
+
dict.fromkeys(custom_delimiter["subfields"], custom_delimiter["value"])
|
|
197
200
|
)
|
|
198
201
|
custom_delimited_strings: List[Tuple[str, List[str]]] = []
|
|
199
202
|
subfields = mapping.get("subfield")
|
|
@@ -505,7 +508,7 @@ class RulesMapperBase(MapperBase):
|
|
|
505
508
|
+ i18n.t("Check mapping file against the schema.")
|
|
506
509
|
+ " "
|
|
507
510
|
+ i18n.t("Target type")
|
|
508
|
-
+ f": {sch.get(target_string,{}).get('type','')} "
|
|
511
|
+
+ f": {sch.get(target_string, {}).get('type', '')} "
|
|
509
512
|
+ i18n.t("Value")
|
|
510
513
|
+ f": {value}",
|
|
511
514
|
"",
|
|
@@ -529,7 +532,7 @@ class RulesMapperBase(MapperBase):
|
|
|
529
532
|
"",
|
|
530
533
|
(
|
|
531
534
|
f"Edge! Target string: {target_string} "
|
|
532
|
-
f"Target type: {sch.get(target_string,{}).get('type','')} Value: {value}"
|
|
535
|
+
f"Target type: {sch.get(target_string, {}).get('type', '')} Value: {value}"
|
|
533
536
|
),
|
|
534
537
|
)
|
|
535
538
|
|
|
@@ -564,8 +567,9 @@ class RulesMapperBase(MapperBase):
|
|
|
564
567
|
if subfield != "9":
|
|
565
568
|
Helper.log_data_issue(
|
|
566
569
|
index_or_legacy_id,
|
|
567
|
-
f"authorityId mapping from ${subfield} is not supported. Data Import
|
|
568
|
-
"Use only $9 for authority id mapping in MARC-to-Instance
|
|
570
|
+
f"authorityId mapping from ${subfield} is not supported. Data Import "
|
|
571
|
+
"will fail. Use only $9 for authority id mapping in MARC-to-Instance "
|
|
572
|
+
"mapping rules.",
|
|
569
573
|
marc_field,
|
|
570
574
|
)
|
|
571
575
|
entity_mapping["subfield"] = ["9"]
|
|
@@ -599,7 +603,8 @@ class RulesMapperBase(MapperBase):
|
|
|
599
603
|
entity = {}
|
|
600
604
|
Helper.log_data_issue(
|
|
601
605
|
index_or_legacy_id,
|
|
602
|
-
f"Missing one or more required property in entity {entity_parent_key}
|
|
606
|
+
f"Missing one or more required property in entity {entity_parent_key} "
|
|
607
|
+
f"({missing_required_props})",
|
|
603
608
|
marc_field,
|
|
604
609
|
)
|
|
605
610
|
return entity
|
|
@@ -639,7 +644,8 @@ class RulesMapperBase(MapperBase):
|
|
|
639
644
|
)
|
|
640
645
|
or e_parent in ["electronicAccess", "publication"]
|
|
641
646
|
or (
|
|
642
|
-
e_parent.startswith("holdingsStatements")
|
|
647
|
+
e_parent.startswith("holdingsStatements")
|
|
648
|
+
and any(v for k, v in entity.items())
|
|
643
649
|
)
|
|
644
650
|
):
|
|
645
651
|
self.add_entity_to_record(entity, e_parent, folio_record, self.schema)
|
|
@@ -661,12 +667,12 @@ class RulesMapperBase(MapperBase):
|
|
|
661
667
|
folio_record["discoverySuppress"] = file_def.discovery_suppressed
|
|
662
668
|
self.migration_report.add(
|
|
663
669
|
"Suppression",
|
|
664
|
-
i18n.t("Suppressed from discovery") + f
|
|
670
|
+
i18n.t("Suppressed from discovery") + f" = {folio_record['discoverySuppress']}",
|
|
665
671
|
)
|
|
666
672
|
if not only_discovery_suppress:
|
|
667
673
|
folio_record["staffSuppress"] = file_def.staff_suppressed
|
|
668
674
|
self.migration_report.add(
|
|
669
|
-
"Suppression", i18n.t("Staff suppressed") + f
|
|
675
|
+
"Suppression", i18n.t("Staff suppressed") + f" = {folio_record['staffSuppress']} "
|
|
670
676
|
)
|
|
671
677
|
|
|
672
678
|
def create_preceding_succeeding_titles(
|
|
@@ -825,7 +831,6 @@ class RulesMapperBase(MapperBase):
|
|
|
825
831
|
)
|
|
826
832
|
data_import_marc_file.write(marc_record.as_marc())
|
|
827
833
|
|
|
828
|
-
|
|
829
834
|
def map_statistical_codes(
|
|
830
835
|
self,
|
|
831
836
|
folio_record: dict,
|
|
@@ -854,11 +859,17 @@ class RulesMapperBase(MapperBase):
|
|
|
854
859
|
for mapping in self.task_configuration.statistical_code_mapping_fields:
|
|
855
860
|
stat_code_marc_fields.append(mapping.split("$"))
|
|
856
861
|
for field_map in stat_code_marc_fields:
|
|
857
|
-
mapped_codes = self.map_stat_codes_from_marc_field(
|
|
858
|
-
|
|
862
|
+
mapped_codes = self.map_stat_codes_from_marc_field(
|
|
863
|
+
field_map, marc_record, self.library_configuration.multi_field_delimiter
|
|
864
|
+
)
|
|
865
|
+
folio_record["statisticalCodeIds"] = (
|
|
866
|
+
folio_record.get("statisticalCodeIds", []) + mapped_codes
|
|
867
|
+
)
|
|
859
868
|
|
|
860
869
|
@staticmethod
|
|
861
|
-
def map_stat_codes_from_marc_field(
|
|
870
|
+
def map_stat_codes_from_marc_field(
|
|
871
|
+
field_map: List[str], marc_record: Record, multi_field_delimiter: str = "<delimiter>"
|
|
872
|
+
) -> List[str]:
|
|
862
873
|
"""Map statistical codes from MARC field to FOLIO instance.
|
|
863
874
|
|
|
864
875
|
This function extracts statistical codes from a MARC field based on the provided field map.
|
|
@@ -871,30 +882,26 @@ class RulesMapperBase(MapperBase):
|
|
|
871
882
|
|
|
872
883
|
Returns:
|
|
873
884
|
str: A string of statistical codes extracted from the MARC field, formatted as "<field>_<subfield>:<value>".
|
|
874
|
-
"""
|
|
885
|
+
""" # noqa: E501
|
|
875
886
|
field_values = []
|
|
876
887
|
if len(field_map) == 2:
|
|
877
888
|
subfields = []
|
|
878
889
|
for mf in marc_record.get_fields(field_map[0]):
|
|
879
890
|
subfields.extend(
|
|
880
|
-
multi_field_delimiter.join(
|
|
881
|
-
|
|
882
|
-
)
|
|
891
|
+
multi_field_delimiter.join(mf.get_subfields(field_map[1])).split(
|
|
892
|
+
multi_field_delimiter
|
|
893
|
+
)
|
|
883
894
|
)
|
|
884
|
-
field_values.extend(
|
|
885
|
-
[
|
|
886
|
-
f"{field_map[0]}_{field_map[1]}:{x}" for
|
|
887
|
-
x in subfields
|
|
888
|
-
]
|
|
889
|
-
)
|
|
895
|
+
field_values.extend([f"{field_map[0]}_{field_map[1]}:{x}" for x in subfields])
|
|
890
896
|
elif len(field_map) > 2:
|
|
891
897
|
for mf in marc_record.get_fields(field_map[0]):
|
|
892
898
|
for sf in field_map[1:]:
|
|
893
899
|
field_values.extend(
|
|
894
900
|
[
|
|
895
|
-
f"{field_map[0]}_{sf}:{x}"
|
|
896
|
-
|
|
897
|
-
|
|
901
|
+
f"{field_map[0]}_{sf}:{x}"
|
|
902
|
+
for x in multi_field_delimiter.join(mf.get_subfields(sf)).split(
|
|
903
|
+
multi_field_delimiter
|
|
904
|
+
)
|
|
898
905
|
]
|
|
899
906
|
)
|
|
900
907
|
elif field_map:
|
|
@@ -960,11 +967,7 @@ class RulesMapperBase(MapperBase):
|
|
|
960
967
|
}
|
|
961
968
|
|
|
962
969
|
return str(
|
|
963
|
-
FolioUUID(
|
|
964
|
-
self.base_string_for_folio_uuid,
|
|
965
|
-
srs_types.get(record_type),
|
|
966
|
-
legacy_id
|
|
967
|
-
)
|
|
970
|
+
FolioUUID(self.base_string_for_folio_uuid, srs_types.get(record_type), legacy_id)
|
|
968
971
|
)
|
|
969
972
|
|
|
970
973
|
@staticmethod
|
|
@@ -1075,6 +1078,7 @@ def is_array_of_objects(schema_property):
|
|
|
1075
1078
|
sc_prop_type = schema_property.get("type", "string")
|
|
1076
1079
|
return sc_prop_type == "array" and schema_property["items"]["type"] == "object"
|
|
1077
1080
|
|
|
1081
|
+
|
|
1078
1082
|
def entity_indicators_match(entity_mapping, marc_field):
|
|
1079
1083
|
"""
|
|
1080
1084
|
Check if the indicators of the entity mapping match the indicators of the MARC field.
|
|
@@ -1095,12 +1099,18 @@ def entity_indicators_match(entity_mapping, marc_field):
|
|
|
1095
1099
|
|
|
1096
1100
|
Returns:
|
|
1097
1101
|
bool: True if the indicators match, False otherwise.
|
|
1098
|
-
"""
|
|
1102
|
+
""" # noqa: E501
|
|
1099
1103
|
if indicator_rule := [x["indicators"] for x in entity_mapping if "indicators" in x]:
|
|
1100
1104
|
return all(
|
|
1101
1105
|
[
|
|
1102
|
-
(
|
|
1103
|
-
|
|
1106
|
+
(
|
|
1107
|
+
marc_field.indicator1 == indicator_rule[0]["ind1"]
|
|
1108
|
+
or indicator_rule[0]["ind1"] == "*"
|
|
1109
|
+
),
|
|
1110
|
+
(
|
|
1111
|
+
marc_field.indicator2 == indicator_rule[0]["ind2"]
|
|
1112
|
+
or indicator_rule[0]["ind2"] == "*"
|
|
1113
|
+
),
|
|
1104
1114
|
]
|
|
1105
1115
|
)
|
|
1106
1116
|
else:
|
|
@@ -67,7 +67,9 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
67
67
|
self.hrid_handler.deactivate035_from001 = True
|
|
68
68
|
self.start = time.time()
|
|
69
69
|
|
|
70
|
-
def perform_initial_preparation(
|
|
70
|
+
def perform_initial_preparation(
|
|
71
|
+
self, file_def: FileDefinition, marc_record: Record, legacy_ids: List[str]
|
|
72
|
+
):
|
|
71
73
|
folio_instance = {}
|
|
72
74
|
folio_instance["id"] = str(
|
|
73
75
|
FolioUUID(
|
|
@@ -145,7 +147,13 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
145
147
|
self.report_folio_mapping(clean_folio_instance, self.schema)
|
|
146
148
|
return [clean_folio_instance]
|
|
147
149
|
|
|
148
|
-
def simple_bib_map(
|
|
150
|
+
def simple_bib_map(
|
|
151
|
+
self,
|
|
152
|
+
folio_instance: dict,
|
|
153
|
+
marc_record: Record,
|
|
154
|
+
ignored_subsequent_fields: set,
|
|
155
|
+
legacy_ids: List[str],
|
|
156
|
+
):
|
|
149
157
|
"""
|
|
150
158
|
This method applies a much simplified MARC-to-instance
|
|
151
159
|
mapping to create a minimal FOLIO Instance record to be
|
|
@@ -164,20 +172,24 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
164
172
|
if len(main_entry_fields) > 1:
|
|
165
173
|
Helper.log_data_issue(
|
|
166
174
|
legacy_ids,
|
|
167
|
-
"Multiple main entry fields in record. Record will fail Data Import.
|
|
168
|
-
|
|
175
|
+
"Multiple main entry fields in record. Record will fail Data Import. "
|
|
176
|
+
"Creating Instance anyway.",
|
|
177
|
+
[str(field) for field in main_entry_fields],
|
|
169
178
|
)
|
|
170
179
|
if not main_entry_fields:
|
|
171
180
|
main_entry_fields += marc_record.get_fields("700", "710", "711", "730")
|
|
172
181
|
main_entry_fields.sort(key=lambda x: int(x.tag))
|
|
173
182
|
if main_entry_fields:
|
|
174
|
-
self.process_marc_field(
|
|
183
|
+
self.process_marc_field(
|
|
184
|
+
folio_instance, main_entry_fields[0], ignored_subsequent_fields, legacy_ids
|
|
185
|
+
)
|
|
175
186
|
try:
|
|
176
|
-
self.process_marc_field(
|
|
187
|
+
self.process_marc_field(
|
|
188
|
+
folio_instance, marc_record["245"], ignored_subsequent_fields, legacy_ids
|
|
189
|
+
)
|
|
177
190
|
except KeyError as ke:
|
|
178
191
|
raise TransformationRecordFailedError(
|
|
179
|
-
legacy_ids,
|
|
180
|
-
"No 245 field in MARC record"
|
|
192
|
+
legacy_ids, "No 245 field in MARC record"
|
|
181
193
|
) from ke
|
|
182
194
|
|
|
183
195
|
def perform_additional_parsing(
|
|
@@ -220,10 +232,8 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
220
232
|
|
|
221
233
|
def handle_languages(self, folio_instance: Dict, marc_record: Record, legacy_ids: List[str]):
|
|
222
234
|
if "languages" in folio_instance:
|
|
223
|
-
orig_languages =
|
|
224
|
-
orig_languages.update(
|
|
225
|
-
{lang: None for lang in self.get_languages(marc_record, legacy_ids)}
|
|
226
|
-
)
|
|
235
|
+
orig_languages = dict.fromkeys(folio_instance["languages"])
|
|
236
|
+
orig_languages.update(dict.fromkeys(self.get_languages(marc_record, legacy_ids)))
|
|
227
237
|
folio_instance["languages"] = list(orig_languages.keys())
|
|
228
238
|
else:
|
|
229
239
|
folio_instance["languages"] = self.get_languages(marc_record, legacy_ids)
|
|
@@ -422,7 +432,7 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
422
432
|
return True
|
|
423
433
|
self.migration_report.add(
|
|
424
434
|
"InstanceFormat",
|
|
425
|
-
("InstanceFormat not mapped since 338$2 (Source)
|
|
435
|
+
(f"InstanceFormat not mapped since 338$2 (Source) is set to {field['2']}. "),
|
|
426
436
|
)
|
|
427
437
|
return False
|
|
428
438
|
|
|
@@ -533,10 +543,10 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
533
543
|
return ""
|
|
534
544
|
|
|
535
545
|
def get_languages_041(self, marc_record: Record, legacy_id: List[str]) -> Dict[str, None]:
|
|
536
|
-
languages =
|
|
546
|
+
languages = {}
|
|
537
547
|
lang_fields = marc_record.get_fields("041")
|
|
538
548
|
if not any(lang_fields):
|
|
539
|
-
return
|
|
549
|
+
return {}
|
|
540
550
|
subfields = "abdefghjkmn"
|
|
541
551
|
for lang_tag in lang_fields:
|
|
542
552
|
if "2" in lang_tag:
|
|
@@ -95,25 +95,21 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
95
95
|
)
|
|
96
96
|
self.mappings["852"] = new_852_mapping
|
|
97
97
|
|
|
98
|
-
def integrate_supplemental_mfhd_mappings(self, new_rules=
|
|
98
|
+
def integrate_supplemental_mfhd_mappings(self, new_rules=None):
|
|
99
99
|
try:
|
|
100
|
-
self.mappings.update(new_rules)
|
|
100
|
+
self.mappings.update(new_rules or {})
|
|
101
101
|
self.fix_853_bug_in_rules()
|
|
102
102
|
except Exception as e:
|
|
103
103
|
raise TransformationProcessError(
|
|
104
104
|
"",
|
|
105
105
|
"Failed to integrate supplemental mfhd mappings",
|
|
106
106
|
str(e),
|
|
107
|
-
)
|
|
107
|
+
) from e
|
|
108
108
|
|
|
109
109
|
def prep_852_notes(self, marc_record: Record):
|
|
110
110
|
for field in marc_record.get_fields("852"):
|
|
111
111
|
field.subfields.sort(key=lambda x: x[0])
|
|
112
|
-
new_952 = Field(
|
|
113
|
-
tag="952",
|
|
114
|
-
indicators=["f", "f"],
|
|
115
|
-
subfields=field.subfields
|
|
116
|
-
)
|
|
112
|
+
new_952 = Field(tag="952", indicators=["f", "f"], subfields=field.subfields)
|
|
117
113
|
marc_record.add_ordered_field(new_952)
|
|
118
114
|
|
|
119
115
|
def parse_record(
|
|
@@ -270,7 +266,11 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
270
266
|
ignored_subsequent_fields.add(marc_field.tag)
|
|
271
267
|
|
|
272
268
|
def perform_additional_mapping(
|
|
273
|
-
self,
|
|
269
|
+
self,
|
|
270
|
+
marc_record: Record,
|
|
271
|
+
folio_holding: Dict,
|
|
272
|
+
legacy_ids: List[str],
|
|
273
|
+
file_def: FileDefinition,
|
|
274
274
|
):
|
|
275
275
|
"""_summary_
|
|
276
276
|
|
|
@@ -306,11 +306,13 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
306
306
|
"",
|
|
307
307
|
)
|
|
308
308
|
self.handle_suppression(folio_holding, file_def, True)
|
|
309
|
-
# First, map statistical codes from MARC fields and FileDefinitions to FOLIO statistical
|
|
310
|
-
# Then, convert the mapped statistical codes to their corresponding code IDs.
|
|
309
|
+
# First, map statistical codes from MARC fields and FileDefinitions to FOLIO statistical
|
|
310
|
+
# codes. Then, convert the mapped statistical codes to their corresponding code IDs.
|
|
311
311
|
self.map_statistical_codes(folio_holding, file_def, marc_record)
|
|
312
312
|
self.map_statistical_code_ids(legacy_ids, folio_holding)
|
|
313
|
-
self.set_source_id(
|
|
313
|
+
self.set_source_id(
|
|
314
|
+
self.create_source_records, folio_holding, self.holdingssources, file_def
|
|
315
|
+
)
|
|
314
316
|
|
|
315
317
|
def pick_first_location_if_many(self, folio_holding: Dict, legacy_ids: List[str]):
|
|
316
318
|
if " " in folio_holding.get("permanentLocationId", ""):
|
|
@@ -324,7 +326,12 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
324
326
|
]
|
|
325
327
|
|
|
326
328
|
@staticmethod
|
|
327
|
-
def set_source_id(
|
|
329
|
+
def set_source_id(
|
|
330
|
+
create_source_records: bool,
|
|
331
|
+
folio_rec: Dict,
|
|
332
|
+
holdingssources: Dict,
|
|
333
|
+
file_def: FileDefinition,
|
|
334
|
+
):
|
|
328
335
|
if file_def.create_source_records and create_source_records:
|
|
329
336
|
folio_rec["sourceId"] = holdingssources.get("MARC")
|
|
330
337
|
else:
|
|
@@ -371,10 +378,14 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
371
378
|
"""
|
|
372
379
|
if self.task_configuration.include_mrk_statements:
|
|
373
380
|
mrk_statement_notes = []
|
|
374
|
-
for field in marc_record.get_fields(
|
|
381
|
+
for field in marc_record.get_fields(
|
|
382
|
+
"853", "854", "855", "863", "864", "865", "866", "867", "868"
|
|
383
|
+
):
|
|
375
384
|
mrk_statement_notes.append(str(field))
|
|
376
385
|
if mrk_statement_notes:
|
|
377
|
-
folio_holding["notes"] = folio_holding.get(
|
|
386
|
+
folio_holding["notes"] = folio_holding.get(
|
|
387
|
+
"notes", []
|
|
388
|
+
) + self.add_mrk_statements_note(mrk_statement_notes, legacy_ids)
|
|
378
389
|
|
|
379
390
|
def add_mrk_statements_note(self, mrk_statement_notes: List[str], legacy_ids) -> List[Dict]:
|
|
380
391
|
"""Creates a note from the MRK statements
|
|
@@ -386,7 +397,9 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
386
397
|
List: A list containing the FOLIO holdings note object (Dict)
|
|
387
398
|
"""
|
|
388
399
|
holdings_note_type_tuple = self.conditions.get_ref_data_tuple_by_name(
|
|
389
|
-
self.folio.holding_note_types,
|
|
400
|
+
self.folio.holding_note_types,
|
|
401
|
+
"holding_note_types",
|
|
402
|
+
self.task_configuration.mrk_holdings_note_type,
|
|
390
403
|
)
|
|
391
404
|
try:
|
|
392
405
|
holdings_note_type_id = holdings_note_type_tuple[0]
|
|
@@ -394,7 +407,8 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
394
407
|
logging.error(ee)
|
|
395
408
|
raise TransformationRecordFailedError(
|
|
396
409
|
legacy_ids,
|
|
397
|
-
f
|
|
410
|
+
f"Holdings note type mapping error.\tNote type name: "
|
|
411
|
+
f"{self.task_configuration.mrk_holdings_note_type}\t"
|
|
398
412
|
f"MFHD holdings statement note type not found in FOLIO.",
|
|
399
413
|
self.task_configuration.mrk_holdings_note_type,
|
|
400
414
|
) from ee
|
|
@@ -403,7 +417,8 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
403
417
|
"note": chunk,
|
|
404
418
|
"holdingsNoteTypeId": holdings_note_type_id,
|
|
405
419
|
"staffOnly": True,
|
|
406
|
-
}
|
|
420
|
+
}
|
|
421
|
+
for chunk in self.split_mrk_by_max_note_size("\n".join(mrk_statement_notes))
|
|
407
422
|
]
|
|
408
423
|
|
|
409
424
|
@staticmethod
|
|
@@ -423,7 +438,9 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
423
438
|
chunks.append(current_chunk)
|
|
424
439
|
return chunks
|
|
425
440
|
|
|
426
|
-
def add_mfhd_as_mrk_note(
|
|
441
|
+
def add_mfhd_as_mrk_note(
|
|
442
|
+
self, marc_record: Record, folio_holding: Dict, legacy_ids: List[str]
|
|
443
|
+
):
|
|
427
444
|
"""Adds the MFHD as a note to the holdings record
|
|
428
445
|
|
|
429
446
|
This is done to preserve the information in the MARC record for future reference.
|
|
@@ -434,7 +451,9 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
434
451
|
"""
|
|
435
452
|
if self.task_configuration.include_mfhd_mrk_as_note:
|
|
436
453
|
holdings_note_type_tuple = self.conditions.get_ref_data_tuple_by_name(
|
|
437
|
-
self.folio.holding_note_types,
|
|
454
|
+
self.folio.holding_note_types,
|
|
455
|
+
"holding_note_types",
|
|
456
|
+
self.task_configuration.mfhd_mrk_note_type,
|
|
438
457
|
)
|
|
439
458
|
try:
|
|
440
459
|
holdings_note_type_id = holdings_note_type_tuple[0]
|
|
@@ -442,7 +461,8 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
442
461
|
logging.error(ee)
|
|
443
462
|
raise TransformationRecordFailedError(
|
|
444
463
|
legacy_ids,
|
|
445
|
-
f
|
|
464
|
+
f"Holdings note type mapping error.\tNote type name: "
|
|
465
|
+
f"{self.task_configuration.mfhd_mrk_note_type}\t"
|
|
446
466
|
f"Note type not found in FOLIO.",
|
|
447
467
|
self.task_configuration.mfhd_mrk_note_type,
|
|
448
468
|
) from ee
|
|
@@ -451,13 +471,16 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
451
471
|
"note": chunk,
|
|
452
472
|
"holdingsNoteTypeId": holdings_note_type_id,
|
|
453
473
|
"staffOnly": True,
|
|
454
|
-
}
|
|
474
|
+
}
|
|
475
|
+
for chunk in self.split_mrk_by_max_note_size(str(marc_record))
|
|
455
476
|
]
|
|
456
477
|
|
|
457
478
|
@staticmethod
|
|
458
|
-
def split_mrc_by_max_note_size(
|
|
479
|
+
def split_mrc_by_max_note_size(
|
|
480
|
+
data: bytes, sep: bytes = b"\x1e", max_chunk_size: int = 32000
|
|
481
|
+
) -> List[bytes]:
|
|
459
482
|
# Split data into segments, each ending with the separator (except possibly the last)
|
|
460
|
-
pattern = re.compile(b
|
|
483
|
+
pattern = re.compile(b"(.*?" + re.escape(sep) + b"|.+?$)", re.DOTALL)
|
|
461
484
|
parts = [m.group(0) for m in pattern.finditer(data) if m.group(0)]
|
|
462
485
|
chunks = []
|
|
463
486
|
current_chunk = b""
|
|
@@ -471,7 +494,9 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
471
494
|
chunks.append(current_chunk)
|
|
472
495
|
return chunks
|
|
473
496
|
|
|
474
|
-
def add_mfhd_as_mrc_note(
|
|
497
|
+
def add_mfhd_as_mrc_note(
|
|
498
|
+
self, marc_record: Record, folio_holding: Dict, legacy_ids: List[str]
|
|
499
|
+
):
|
|
475
500
|
"""Adds the MFHD as a note to the holdings record
|
|
476
501
|
|
|
477
502
|
This is done to preserve the information in the MARC record for future reference.
|
|
@@ -482,7 +507,9 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
482
507
|
"""
|
|
483
508
|
if self.task_configuration.include_mfhd_mrc_as_note:
|
|
484
509
|
holdings_note_type_tuple = self.conditions.get_ref_data_tuple_by_name(
|
|
485
|
-
self.folio.holding_note_types,
|
|
510
|
+
self.folio.holding_note_types,
|
|
511
|
+
"holding_note_types",
|
|
512
|
+
self.task_configuration.mfhd_mrc_note_type,
|
|
486
513
|
)
|
|
487
514
|
try:
|
|
488
515
|
holdings_note_type_id = holdings_note_type_tuple[0]
|
|
@@ -490,7 +517,8 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
490
517
|
logging.error(ee)
|
|
491
518
|
raise TransformationRecordFailedError(
|
|
492
519
|
legacy_ids,
|
|
493
|
-
f
|
|
520
|
+
f"Holdings note type mapping error.\tNote type name: "
|
|
521
|
+
f"{self.task_configuration.mfhd_mrc_note_type}\t"
|
|
494
522
|
f"Note type not found in FOLIO.",
|
|
495
523
|
self.task_configuration.mfhd_mrc_note_type,
|
|
496
524
|
) from ee
|
|
@@ -499,7 +527,8 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
499
527
|
"note": chunk.decode("utf-8"),
|
|
500
528
|
"holdingsNoteTypeId": holdings_note_type_id,
|
|
501
529
|
"staffOnly": True,
|
|
502
|
-
}
|
|
530
|
+
}
|
|
531
|
+
for chunk in self.split_mrc_by_max_note_size(marc_record.as_marc())
|
|
503
532
|
]
|
|
504
533
|
|
|
505
534
|
def wrap_up(self):
|
|
@@ -642,7 +671,7 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
642
671
|
Raises:
|
|
643
672
|
TransformationProcessError: If MFHD_ID or BIB_ID is missing from the entry or if the instance_uuid is not in the parent_id_map.
|
|
644
673
|
TransformationRecordFailedError: If BIB_ID is not in the instance id map.
|
|
645
|
-
"""
|
|
674
|
+
""" # noqa: E501
|
|
646
675
|
new_map = {}
|
|
647
676
|
for idx, entry in enumerate(boundwith_relationship_map_list):
|
|
648
677
|
self.verity_boundwith_map_entry(entry)
|
|
@@ -663,9 +692,10 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
663
692
|
def get_bw_instance_id_map_tuple(self, entry: Dict):
|
|
664
693
|
try:
|
|
665
694
|
return self.parent_id_map[entry["BIB_ID"]]
|
|
666
|
-
except KeyError:
|
|
695
|
+
except KeyError as e:
|
|
667
696
|
raise TransformationRecordFailedError(
|
|
668
697
|
entry["MFHD_ID"],
|
|
669
|
-
"Boundwith relationship map contains a BIB_ID id not in the instance id map.
|
|
698
|
+
"Boundwith relationship map contains a BIB_ID id not in the instance id map. "
|
|
699
|
+
"No boundwith holdings created for this BIB_ID.",
|
|
670
700
|
entry["BIB_ID"],
|
|
671
|
-
)
|
|
701
|
+
) from e
|
|
@@ -76,7 +76,7 @@ class MigrationReport:
|
|
|
76
76
|
]
|
|
77
77
|
)
|
|
78
78
|
)
|
|
79
|
-
logging.info(f"Elapsed time: {time_finished-time_started}")
|
|
79
|
+
logging.info(f"Elapsed time: {time_finished - time_started}")
|
|
80
80
|
for a in self.report:
|
|
81
81
|
blurb_id = self.report[a].get("blurb_id") or ""
|
|
82
82
|
report_file.write(
|
|
@@ -66,8 +66,7 @@ class AuthorityTransformer(MigrationTaskBase):
|
|
|
66
66
|
Field(
|
|
67
67
|
title="Create source records",
|
|
68
68
|
description=(
|
|
69
|
-
"Controls wheter or not to retain the MARC records in "
|
|
70
|
-
"Source Record Storage."
|
|
69
|
+
"Controls wheter or not to retain the MARC records in Source Record Storage."
|
|
71
70
|
),
|
|
72
71
|
),
|
|
73
72
|
] = True
|