folio-migration-tools 1.9.2__py3-none-any.whl → 1.9.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- folio_migration_tools/mapping_file_transformation/user_mapper.py +39 -31
- folio_migration_tools/marc_rules_transformation/conditions.py +239 -30
- folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +99 -65
- folio_migration_tools/marc_rules_transformation/marc_file_processor.py +6 -1
- folio_migration_tools/migration_tasks/batch_poster.py +82 -33
- folio_migration_tools/migration_tasks/holdings_marc_transformer.py +22 -11
- folio_migration_tools/migration_tasks/items_transformer.py +21 -9
- folio_migration_tools/migration_tasks/loans_migrator.py +20 -4
- folio_migration_tools/task_configuration.py +3 -1
- folio_migration_tools/test_infrastructure/mocked_classes.py +5 -0
- folio_migration_tools/transaction_migration/legacy_loan.py +8 -2
- folio_migration_tools/translations/en.json +9 -0
- {folio_migration_tools-1.9.2.dist-info → folio_migration_tools-1.9.3.dist-info}/METADATA +1 -1
- {folio_migration_tools-1.9.2.dist-info → folio_migration_tools-1.9.3.dist-info}/RECORD +17 -17
- {folio_migration_tools-1.9.2.dist-info → folio_migration_tools-1.9.3.dist-info}/LICENSE +0 -0
- {folio_migration_tools-1.9.2.dist-info → folio_migration_tools-1.9.3.dist-info}/WHEEL +0 -0
- {folio_migration_tools-1.9.2.dist-info → folio_migration_tools-1.9.3.dist-info}/entry_points.txt +0 -0
|
@@ -59,8 +59,8 @@ class UserMapper(MappingFileMapperBase):
|
|
|
59
59
|
True,
|
|
60
60
|
)
|
|
61
61
|
self.notes_mapper.migration_report = self.migration_report
|
|
62
|
-
self.setup_departments_mapping(departments_mapping)
|
|
63
|
-
self.setup_groups_mapping(groups_map)
|
|
62
|
+
self.departments_mapping = self.setup_departments_mapping(departments_mapping)
|
|
63
|
+
self.groups_mapping = self.setup_groups_mapping(groups_map)
|
|
64
64
|
|
|
65
65
|
for m in self.record_map["data"]:
|
|
66
66
|
if m["folio_field"].startswith("customFields"):
|
|
@@ -120,7 +120,8 @@ class UserMapper(MappingFileMapperBase):
|
|
|
120
120
|
|
|
121
121
|
return clean_folio_object
|
|
122
122
|
|
|
123
|
-
|
|
123
|
+
@staticmethod
|
|
124
|
+
def get_users(source_file, file_format: str):
|
|
124
125
|
csv.register_dialect("tsv", delimiter="\t")
|
|
125
126
|
if file_format == "tsv":
|
|
126
127
|
reader = csv.DictReader(source_file, dialect="tsv")
|
|
@@ -156,12 +157,25 @@ class UserMapper(MappingFileMapperBase):
|
|
|
156
157
|
"No Departments mapping set up. Set up a departments mapping file "
|
|
157
158
|
" or remove the mapping of the Departments field",
|
|
158
159
|
)
|
|
159
|
-
|
|
160
|
-
self.departments_mapping,
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
160
|
+
if len(self.departments_mapping.mapped_legacy_keys) == 1 and self.library_configuration.multi_field_delimiter in legacy_user.get(self.departments_mapping.mapped_legacy_keys[0], ""):
|
|
161
|
+
split_departments = legacy_user.get(self.departments_mapping.mapped_legacy_keys[0], "").split(
|
|
162
|
+
self.library_configuration.multi_field_delimiter
|
|
163
|
+
)
|
|
164
|
+
return self.library_configuration.multi_field_delimiter.join([
|
|
165
|
+
self.get_mapped_name(
|
|
166
|
+
self.departments_mapping,
|
|
167
|
+
{self.departments_mapping.mapped_legacy_keys[0]: dept},
|
|
168
|
+
index_or_id,
|
|
169
|
+
True,
|
|
170
|
+
) for dept in split_departments
|
|
171
|
+
])
|
|
172
|
+
else:
|
|
173
|
+
return self.get_mapped_name(
|
|
174
|
+
self.departments_mapping,
|
|
175
|
+
legacy_user,
|
|
176
|
+
index_or_id,
|
|
177
|
+
True,
|
|
178
|
+
)
|
|
165
179
|
elif folio_prop_name in ["expirationDate", "enrollmentDate", "personal.dateOfBirth"]:
|
|
166
180
|
return self.get_parsed_date(mapped_value, folio_prop_name)
|
|
167
181
|
return mapped_value
|
|
@@ -184,27 +198,21 @@ class UserMapper(MappingFileMapperBase):
|
|
|
184
198
|
return ""
|
|
185
199
|
|
|
186
200
|
def setup_groups_mapping(self, groups_map):
|
|
187
|
-
|
|
188
|
-
self.
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
)
|
|
196
|
-
else:
|
|
197
|
-
self.groups_mapping = None
|
|
201
|
+
return RefDataMapping(
|
|
202
|
+
self.folio_client,
|
|
203
|
+
"/groups",
|
|
204
|
+
"usergroups",
|
|
205
|
+
groups_map,
|
|
206
|
+
"group",
|
|
207
|
+
"UserGroupMapping",
|
|
208
|
+
) if groups_map else None
|
|
198
209
|
|
|
199
210
|
def setup_departments_mapping(self, departments_mapping):
|
|
200
|
-
|
|
201
|
-
self.
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
)
|
|
209
|
-
else:
|
|
210
|
-
self.departments_mapping = None
|
|
211
|
+
return RefDataMapping(
|
|
212
|
+
self.folio_client,
|
|
213
|
+
"/departments",
|
|
214
|
+
"departments",
|
|
215
|
+
departments_mapping,
|
|
216
|
+
"name",
|
|
217
|
+
"DepartmentsMapping",
|
|
218
|
+
) if departments_mapping else None
|
|
@@ -90,6 +90,7 @@ class Conditions:
|
|
|
90
90
|
logging.info("%s\tholding_note_types", len(self.folio.holding_note_types)) # type: ignore
|
|
91
91
|
logging.info("%s\tcall_number_types", len(self.folio.call_number_types)) # type: ignore
|
|
92
92
|
self.setup_and_validate_holdings_types()
|
|
93
|
+
self.ill_policies = self.folio.folio_get_all("/ill-policies", "illPolicies")
|
|
93
94
|
# Raise for empty settings
|
|
94
95
|
if not self.folio.holding_note_types:
|
|
95
96
|
raise TransformationProcessError("", "No holding_note_types in FOLIO")
|
|
@@ -466,36 +467,6 @@ class Conditions:
|
|
|
466
467
|
def condition_char_select(self, legacy_id, value, parameter, marc_field: field.Field):
|
|
467
468
|
return value[parameter["from"] : parameter["to"]]
|
|
468
469
|
|
|
469
|
-
def condition_set_receipt_status(self, legacy_id, value, parameter, marc_field: field.Field):
|
|
470
|
-
if len(value) < 7:
|
|
471
|
-
self.mapper.migration_report.add(
|
|
472
|
-
"ReceiptStatusMapping", i18n.t("008 is too short") + f": {value}"
|
|
473
|
-
)
|
|
474
|
-
return ""
|
|
475
|
-
try:
|
|
476
|
-
status_map = {
|
|
477
|
-
"0": "Unknown",
|
|
478
|
-
"1": "Other receipt or acquisition status",
|
|
479
|
-
"2": "Received and complete or ceased",
|
|
480
|
-
"3": "On order",
|
|
481
|
-
"4": "Currently received",
|
|
482
|
-
"5": "Not currently received",
|
|
483
|
-
"6": "External access",
|
|
484
|
-
}
|
|
485
|
-
mapped_value = status_map[value[6]]
|
|
486
|
-
self.mapper.migration_report.add(
|
|
487
|
-
"ReceiptStatusMapping",
|
|
488
|
-
i18n.t(
|
|
489
|
-
"%{value} mapped to %{mapped_value}", value=value[6], mapped_value=mapped_value
|
|
490
|
-
),
|
|
491
|
-
)
|
|
492
|
-
|
|
493
|
-
return
|
|
494
|
-
except Exception:
|
|
495
|
-
self.mapper.migration_report.add(
|
|
496
|
-
"ReceiptStatusMapping", i18n.t("%{value} not found in map.", value=value)
|
|
497
|
-
)
|
|
498
|
-
return "Unknown"
|
|
499
470
|
|
|
500
471
|
def condition_set_identifier_type_id_by_name(
|
|
501
472
|
self, legacy_id, value, parameter, marc_field: field.Field
|
|
@@ -915,3 +886,241 @@ class Conditions:
|
|
|
915
886
|
legacy_id,
|
|
916
887
|
f"Subject source not found for {value} {marc_field}",
|
|
917
888
|
)
|
|
889
|
+
|
|
890
|
+
def condition_set_receipt_status(
|
|
891
|
+
self, legacy_id, value, parameter, marc_field: field.Field
|
|
892
|
+
):
|
|
893
|
+
"""
|
|
894
|
+
This method maps the receipt status based on the 008 field.
|
|
895
|
+
This condition is not available in FOLIO's MARC mapping engine and
|
|
896
|
+
will require use of a supplemental mapping rules file in the
|
|
897
|
+
HoldingsMarcTransformer task definition.
|
|
898
|
+
"""
|
|
899
|
+
if len(value) < 7:
|
|
900
|
+
self.mapper.migration_report.add(
|
|
901
|
+
"ReceiptStatusMapping", i18n.t("008 is too short") + f": {value}"
|
|
902
|
+
)
|
|
903
|
+
return ""
|
|
904
|
+
|
|
905
|
+
status_map = {
|
|
906
|
+
"0": "Unknown",
|
|
907
|
+
"1": "Other receipt or acquisition status",
|
|
908
|
+
"2": "Received and complete or ceased",
|
|
909
|
+
"3": "On order",
|
|
910
|
+
"4": "Currently received",
|
|
911
|
+
"5": "Not currently received",
|
|
912
|
+
"6": "External access",
|
|
913
|
+
}
|
|
914
|
+
|
|
915
|
+
try:
|
|
916
|
+
mapped_value = status_map[value[6]]
|
|
917
|
+
self.mapper.migration_report.add(
|
|
918
|
+
"ReceiptStatusMapping",
|
|
919
|
+
i18n.t(
|
|
920
|
+
"%{value} mapped to %{mapped_value}",
|
|
921
|
+
value=value[6],
|
|
922
|
+
mapped_value=mapped_value,
|
|
923
|
+
),
|
|
924
|
+
)
|
|
925
|
+
return mapped_value
|
|
926
|
+
except Exception:
|
|
927
|
+
self.mapper.migration_report.add(
|
|
928
|
+
"ReceiptStatusMapping", i18n.t("%{value} not found in map.", value=value[6])
|
|
929
|
+
)
|
|
930
|
+
return ""
|
|
931
|
+
|
|
932
|
+
def condition_set_acquisition_method(
|
|
933
|
+
self, legacy_id, value, parameter, marc_field: field.Field
|
|
934
|
+
):
|
|
935
|
+
"""
|
|
936
|
+
This method maps the acquisition method based on the 008 field.
|
|
937
|
+
This condition is not available in FOLIO's MARC mapping engine and
|
|
938
|
+
will require use of a supplemental mapping rules file in the
|
|
939
|
+
HoldingsMarcTransformer task definition.
|
|
940
|
+
"""
|
|
941
|
+
if len(value) < 8:
|
|
942
|
+
self.mapper.migration_report.add(
|
|
943
|
+
"ReceiptStatusMapping", i18n.t("008 is too short") + f": {value}"
|
|
944
|
+
)
|
|
945
|
+
return ""
|
|
946
|
+
|
|
947
|
+
try:
|
|
948
|
+
acq_methods = {
|
|
949
|
+
"c": "Cooperative or consortial purchase",
|
|
950
|
+
"d": "Deposit",
|
|
951
|
+
"e": "Exchange",
|
|
952
|
+
"f": "Free",
|
|
953
|
+
"g": "Gift",
|
|
954
|
+
"l": "Legal deposit",
|
|
955
|
+
"m": "Membership",
|
|
956
|
+
"n": "Non-library purchase",
|
|
957
|
+
"p": "Purchase",
|
|
958
|
+
"q": "Lease",
|
|
959
|
+
"u": "Unknown",
|
|
960
|
+
"z": "Other method of acquisition",
|
|
961
|
+
}
|
|
962
|
+
mapped_value = acq_methods[value[7]]
|
|
963
|
+
self.mapper.migration_report.add(
|
|
964
|
+
"MethodOfAcquisitionMapping",
|
|
965
|
+
i18n.t(
|
|
966
|
+
"%{value} mapped to %{mapped_value}", value=value[7], mapped_value=mapped_value
|
|
967
|
+
),
|
|
968
|
+
)
|
|
969
|
+
return mapped_value
|
|
970
|
+
except Exception:
|
|
971
|
+
self.mapper.migration_report.add(
|
|
972
|
+
"MethodOfAcquisitionMapping", i18n.t("%{value} not found in map.", value=value[8])
|
|
973
|
+
)
|
|
974
|
+
return ""
|
|
975
|
+
|
|
976
|
+
def condition_set_retention_policy(
|
|
977
|
+
self, legacy_id, value, parameter, marc_field: field.Field
|
|
978
|
+
):
|
|
979
|
+
"""
|
|
980
|
+
This method maps the retention policy based on the 008 field.
|
|
981
|
+
This condition is not available in FOLIO's MARC mapping engine and
|
|
982
|
+
will require use of a supplemental mapping rules file in the
|
|
983
|
+
HoldingsMarcTransformer task definition.
|
|
984
|
+
"""
|
|
985
|
+
if len(value) < 13:
|
|
986
|
+
self.mapper.migration_report.add(
|
|
987
|
+
"RetentionPolicyMapping", i18n.t("008 is too short") + f": {value}"
|
|
988
|
+
)
|
|
989
|
+
return ""
|
|
990
|
+
value = value.replace("|", " ").replace("#", " ") # Replace pipe with space for mapping consistency
|
|
991
|
+
try:
|
|
992
|
+
retention_policies = {
|
|
993
|
+
"0": "Unknown",
|
|
994
|
+
"1": "Other general retention policy",
|
|
995
|
+
"2": "Retained except as replaced by updates",
|
|
996
|
+
"3": "Sample issue retained",
|
|
997
|
+
"4": "Retained until replaced by microform",
|
|
998
|
+
"5": "Retained until replaced by cumulation, replacement volume, or revision",
|
|
999
|
+
"6": "Retained for a limited period",
|
|
1000
|
+
"7": "Not retained",
|
|
1001
|
+
"8": "Permanently retained",
|
|
1002
|
+
}
|
|
1003
|
+
mapped_value = retention_policies[value[12]]
|
|
1004
|
+
self.mapper.migration_report.add(
|
|
1005
|
+
"RetentionPolicyMapping",
|
|
1006
|
+
i18n.t(
|
|
1007
|
+
"%{value} mapped to %{mapped_value}",
|
|
1008
|
+
value=value[12],
|
|
1009
|
+
mapped_value=mapped_value,
|
|
1010
|
+
),
|
|
1011
|
+
)
|
|
1012
|
+
if value[12] == "6":
|
|
1013
|
+
policy_types = {
|
|
1014
|
+
"l": "Latest",
|
|
1015
|
+
"p": "Previous",
|
|
1016
|
+
}
|
|
1017
|
+
unit_types = {
|
|
1018
|
+
"m": "Day",
|
|
1019
|
+
"w": "Month",
|
|
1020
|
+
"y": "Year",
|
|
1021
|
+
"e": "Edition",
|
|
1022
|
+
"i": "Issue",
|
|
1023
|
+
"s": "Supplement"
|
|
1024
|
+
}
|
|
1025
|
+
try:
|
|
1026
|
+
specific_retention_policy = ""
|
|
1027
|
+
if value[13].strip() or value[15].strip():
|
|
1028
|
+
if value[14].strip() and int(value[14]) > 1:
|
|
1029
|
+
specific_retention_policy = f"{policy_types.get(value[13], '')} {value[14]} {unit_types.get(value[15], '')}s retained".strip()
|
|
1030
|
+
else:
|
|
1031
|
+
specific_retention_policy = f"{policy_types.get(value[13], '')} {unit_types.get(value[15], '')} retained".strip()
|
|
1032
|
+
if specific_retention_policy:
|
|
1033
|
+
self.mapper.migration_report.add(
|
|
1034
|
+
"RetentionPolicyMapping",
|
|
1035
|
+
i18n.t(
|
|
1036
|
+
"Retention policy 6 indicates a limited period. Specific retention period will be mapped from 008/13-15",
|
|
1037
|
+
)
|
|
1038
|
+
)
|
|
1039
|
+
return specific_retention_policy
|
|
1040
|
+
else:
|
|
1041
|
+
raise ValueError(
|
|
1042
|
+
"Specific retention policy is empty or invalid in 008/13-15"
|
|
1043
|
+
)
|
|
1044
|
+
except ValueError:
|
|
1045
|
+
self.mapper.migration_report.add(
|
|
1046
|
+
"RetentionPolicyMapping",
|
|
1047
|
+
i18n.t("Invalid specific retention policy in 008/13-15: %{value}", value=value[13:16]),
|
|
1048
|
+
)
|
|
1049
|
+
return mapped_value
|
|
1050
|
+
except Exception:
|
|
1051
|
+
self.mapper.migration_report.add(
|
|
1052
|
+
"RetentionPolicyMapping", i18n.t("%{value} not found in map.", value=value[12])
|
|
1053
|
+
)
|
|
1054
|
+
return ""
|
|
1055
|
+
|
|
1056
|
+
def condition_set_ill_policy(
|
|
1057
|
+
self, legacy_id, value, parameter, marc_field: field.Field
|
|
1058
|
+
):
|
|
1059
|
+
"""
|
|
1060
|
+
This method maps the ILL policy based on the 008 field.
|
|
1061
|
+
This condition is not available in FOLIO's MARC mapping engine and
|
|
1062
|
+
will require use of a supplemental mapping rules file in the
|
|
1063
|
+
HoldingsMarcTransformer task definition."""
|
|
1064
|
+
if len(value) < 21:
|
|
1065
|
+
self.mapper.migration_report.add(
|
|
1066
|
+
"ILLPolicyMapping", i18n.t("008 is too short") + f": {value}"
|
|
1067
|
+
)
|
|
1068
|
+
return ""
|
|
1069
|
+
try:
|
|
1070
|
+
ill_policies = {
|
|
1071
|
+
"a": "Will lend",
|
|
1072
|
+
"b": "Will not lend",
|
|
1073
|
+
"c": "Will lend hard copy only",
|
|
1074
|
+
"l": "Limited lending policy",
|
|
1075
|
+
"u": "Unknown",
|
|
1076
|
+
}
|
|
1077
|
+
mapped_value = ill_policies[value[20]]
|
|
1078
|
+
self.mapper.migration_report.add(
|
|
1079
|
+
"ILLPolicyMapping",
|
|
1080
|
+
i18n.t("%{value} mapped to %{mapped_value}", value=value[20], mapped_value=mapped_value),
|
|
1081
|
+
)
|
|
1082
|
+
ill_policy_id = self.get_ref_data_tuple_by_name(
|
|
1083
|
+
self.ill_policies, "ill_policies", mapped_value
|
|
1084
|
+
)
|
|
1085
|
+
return ill_policy_id[0] if ill_policy_id else ""
|
|
1086
|
+
except Exception:
|
|
1087
|
+
self.mapper.migration_report.add(
|
|
1088
|
+
"ILLPolicyMapping", i18n.t("%{value} not found in map.", value=value[20])
|
|
1089
|
+
)
|
|
1090
|
+
return ""
|
|
1091
|
+
|
|
1092
|
+
def condition_set_digitization_policy(
|
|
1093
|
+
self, legacy_id, value, parameter, marc_field: field.Field
|
|
1094
|
+
):
|
|
1095
|
+
"""
|
|
1096
|
+
This method maps the digitization policy based on the 008 field.
|
|
1097
|
+
This condition is not available in FOLIO's MARC mapping engine and
|
|
1098
|
+
will require use of a supplemental mapping rules file in the
|
|
1099
|
+
HoldingsMarcTransformer task definition.
|
|
1100
|
+
"""
|
|
1101
|
+
if len(value) < 22:
|
|
1102
|
+
self.mapper.migration_report.add(
|
|
1103
|
+
"DigitizationPolicyMapping", i18n.t("008 is too short") + f": {value}"
|
|
1104
|
+
)
|
|
1105
|
+
return ""
|
|
1106
|
+
try:
|
|
1107
|
+
digitization_policies = {
|
|
1108
|
+
"a": "Will reproduce",
|
|
1109
|
+
"b": "Will not reproduce",
|
|
1110
|
+
"u": "Unknown",
|
|
1111
|
+
}
|
|
1112
|
+
mapped_value = digitization_policies[value[21]]
|
|
1113
|
+
self.mapper.migration_report.add(
|
|
1114
|
+
"DigitizationPolicyMapping",
|
|
1115
|
+
i18n.t(
|
|
1116
|
+
"%{value} mapped to %{mapped_value}",
|
|
1117
|
+
value=value[21],
|
|
1118
|
+
mapped_value=mapped_value,
|
|
1119
|
+
),
|
|
1120
|
+
)
|
|
1121
|
+
return mapped_value
|
|
1122
|
+
except Exception:
|
|
1123
|
+
self.mapper.migration_report.add(
|
|
1124
|
+
"DigitizationPolicyMapping", i18n.t("%{value} not found in map.", value=value[21])
|
|
1125
|
+
)
|
|
1126
|
+
return ""
|
|
@@ -23,18 +23,18 @@ class HoldingsStatementsParser:
|
|
|
23
23
|
"""_summary_
|
|
24
24
|
|
|
25
25
|
Args:
|
|
26
|
-
marc_record (Record):
|
|
27
|
-
pattern_tag (str):
|
|
28
|
-
value_tag (str):
|
|
29
|
-
field_textual (str):
|
|
30
|
-
legacy_ids (List[str]):
|
|
31
|
-
dedupe_results (bool):
|
|
26
|
+
marc_record (Record): pymarc Record object
|
|
27
|
+
pattern_tag (str): MARC tag for the pattern field
|
|
28
|
+
value_tag (str): MARC tag for the value field
|
|
29
|
+
field_textual (str): MARC tag for the textual holdings field
|
|
30
|
+
legacy_ids (List[str]): List of legacy IDs associated with the record
|
|
31
|
+
dedupe_results (bool): Whether to deduplicate the results. Defaults to True.
|
|
32
32
|
|
|
33
33
|
Raises:
|
|
34
|
-
TransformationFieldMappingError:
|
|
34
|
+
TransformationFieldMappingError: If there is an error in mapping the holdings statements.
|
|
35
35
|
|
|
36
36
|
Returns:
|
|
37
|
-
dict:
|
|
37
|
+
dict: A dictionary containing parsed holdings statements and related information.
|
|
38
38
|
"""
|
|
39
39
|
|
|
40
40
|
# Textual holdings statements
|
|
@@ -45,21 +45,9 @@ class HoldingsStatementsParser:
|
|
|
45
45
|
|
|
46
46
|
value_fields = marc_record.get_fields(value_tag)
|
|
47
47
|
for pattern_field in marc_record.get_fields(pattern_tag):
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
i18n.t(
|
|
52
|
-
"%{tag} subfield %{subfield} not in field",
|
|
53
|
-
tag=pattern_tag,
|
|
54
|
-
subfield="8",
|
|
55
|
-
),
|
|
56
|
-
pattern_field,
|
|
57
|
-
)
|
|
58
|
-
linked_value_fields = [
|
|
59
|
-
value_field
|
|
60
|
-
for value_field in value_fields
|
|
61
|
-
if "8" in value_field and value_field["8"].split(".")[0] == pattern_field["8"]
|
|
62
|
-
]
|
|
48
|
+
linked_value_fields = HoldingsStatementsParser.get_linked_value_fields(
|
|
49
|
+
pattern_tag, legacy_ids, value_fields, pattern_field
|
|
50
|
+
)
|
|
63
51
|
|
|
64
52
|
if not any(linked_value_fields):
|
|
65
53
|
return_dict["migration_report"].append(
|
|
@@ -75,7 +63,7 @@ class HoldingsStatementsParser:
|
|
|
75
63
|
parsed_dict = HoldingsStatementsParser.parse_linked_field(
|
|
76
64
|
pattern_field, linked_value_field
|
|
77
65
|
)
|
|
78
|
-
except KeyError:
|
|
66
|
+
except KeyError as e:
|
|
79
67
|
raise TransformationFieldMappingError(
|
|
80
68
|
legacy_ids,
|
|
81
69
|
i18n.t(
|
|
@@ -84,24 +72,10 @@ class HoldingsStatementsParser:
|
|
|
84
72
|
linked_value_tag=linked_value_field,
|
|
85
73
|
),
|
|
86
74
|
pattern_field,
|
|
87
|
-
)
|
|
88
|
-
|
|
89
|
-
return_dict
|
|
90
|
-
|
|
91
|
-
logging.info(
|
|
92
|
-
f"HOLDINGS STATEMENT PATTERN\t{'-'.join(legacy_ids)}\t{pattern_field}"
|
|
93
|
-
f"\t{linked_value_field}"
|
|
94
|
-
f"\t{parsed_dict['statement']['statement']}"
|
|
95
|
-
f"\t{parsed_dict['statement']['note']}"
|
|
96
|
-
f"\t{parsed_dict['statement']['staffNote']}"
|
|
97
|
-
)
|
|
98
|
-
return_dict["migration_report"].append(
|
|
99
|
-
(
|
|
100
|
-
"Holdings statements",
|
|
101
|
-
f"From {pattern_tag}",
|
|
102
|
-
)
|
|
103
|
-
)
|
|
104
|
-
return_dict["statements"].append(parsed_dict["statement"])
|
|
75
|
+
) from e
|
|
76
|
+
HoldingsStatementsParser.prepare_return_dict(
|
|
77
|
+
pattern_tag, legacy_ids, return_dict, pattern_field, linked_value_field, parsed_dict
|
|
78
|
+
)
|
|
105
79
|
|
|
106
80
|
if dedupe_results:
|
|
107
81
|
return_dict["statements"] = HoldingsStatementsParser.dedupe_list_of_dict(
|
|
@@ -109,6 +83,47 @@ class HoldingsStatementsParser:
|
|
|
109
83
|
)
|
|
110
84
|
return return_dict
|
|
111
85
|
|
|
86
|
+
@staticmethod
|
|
87
|
+
def prepare_return_dict(
|
|
88
|
+
pattern_tag, legacy_ids, return_dict, pattern_field, linked_value_field, parsed_dict
|
|
89
|
+
):
|
|
90
|
+
if parsed_dict["hlm_stmt"]:
|
|
91
|
+
return_dict["hlm_stmts"].append(parsed_dict["hlm_stmt"])
|
|
92
|
+
if parsed_dict["statement"]:
|
|
93
|
+
logging.info(
|
|
94
|
+
f"HOLDINGS STATEMENT PATTERN\t{'-'.join(legacy_ids)}\t{pattern_field}"
|
|
95
|
+
f"\t{linked_value_field}"
|
|
96
|
+
f"\t{parsed_dict['statement']['statement']}"
|
|
97
|
+
f"\t{parsed_dict['statement']['note']}"
|
|
98
|
+
f"\t{parsed_dict['statement']['staffNote']}"
|
|
99
|
+
)
|
|
100
|
+
return_dict["migration_report"].append(
|
|
101
|
+
(
|
|
102
|
+
"Holdings statements",
|
|
103
|
+
f"From {pattern_tag}",
|
|
104
|
+
)
|
|
105
|
+
)
|
|
106
|
+
return_dict["statements"].append(parsed_dict["statement"])
|
|
107
|
+
|
|
108
|
+
@staticmethod
|
|
109
|
+
def get_linked_value_fields(pattern_tag, legacy_ids, value_fields, pattern_field):
|
|
110
|
+
if "8" not in pattern_field:
|
|
111
|
+
raise TransformationFieldMappingError(
|
|
112
|
+
legacy_ids,
|
|
113
|
+
i18n.t(
|
|
114
|
+
"%{tag} subfield %{subfield} not in field",
|
|
115
|
+
tag=pattern_tag,
|
|
116
|
+
subfield="8",
|
|
117
|
+
),
|
|
118
|
+
pattern_field,
|
|
119
|
+
)
|
|
120
|
+
linked_value_fields = [
|
|
121
|
+
value_field
|
|
122
|
+
for value_field in value_fields
|
|
123
|
+
if "8" in value_field and value_field["8"].split(".")[0] == pattern_field["8"]
|
|
124
|
+
]
|
|
125
|
+
return linked_value_fields
|
|
126
|
+
|
|
112
127
|
@staticmethod
|
|
113
128
|
def parse_linked_field(pattern_field: Field, linked_value_fields: Field):
|
|
114
129
|
break_ind = HoldingsStatementsParser.get_break_indicator(linked_value_fields)
|
|
@@ -123,6 +138,20 @@ class HoldingsStatementsParser:
|
|
|
123
138
|
"hlm_stmt": hlm_stmt,
|
|
124
139
|
}
|
|
125
140
|
|
|
141
|
+
_from, _to = HoldingsStatementsParser.format_from_to(_from, _to, cron_from, cron_to)
|
|
142
|
+
span = "-" if is_span or is_cron_span else ""
|
|
143
|
+
stmt = f"{_from}{span}{_to}{break_ind}" if _from else ""
|
|
144
|
+
stmt = stmt.strip()
|
|
145
|
+
if "z" in linked_value_fields:
|
|
146
|
+
return_dict["statement"]["note"] = linked_value_fields["z"]
|
|
147
|
+
if "x" in linked_value_fields:
|
|
148
|
+
return_dict["statement"]["staffNote"] = linked_value_fields["x"]
|
|
149
|
+
stmt = re.sub(" +", " ", stmt)
|
|
150
|
+
return_dict["statement"]["statement"] = stmt
|
|
151
|
+
return return_dict
|
|
152
|
+
|
|
153
|
+
@staticmethod
|
|
154
|
+
def format_from_to(_from, _to, cron_from, cron_to):
|
|
126
155
|
if _from and cron_from:
|
|
127
156
|
_from = f"{_from} ({cron_from})"
|
|
128
157
|
if not _from and cron_from:
|
|
@@ -137,16 +166,7 @@ class HoldingsStatementsParser:
|
|
|
137
166
|
_to = f"({cron_to})"
|
|
138
167
|
if _from and _from == cron_from:
|
|
139
168
|
_from = f"({cron_from})"
|
|
140
|
-
|
|
141
|
-
stmt = f"{_from}{span}{_to}{break_ind}" if _from else ""
|
|
142
|
-
stmt = stmt.strip()
|
|
143
|
-
if "z" in linked_value_fields:
|
|
144
|
-
return_dict["statement"]["note"] = linked_value_fields["z"]
|
|
145
|
-
if "x" in linked_value_fields:
|
|
146
|
-
return_dict["statement"]["staffNote"] = linked_value_fields["x"]
|
|
147
|
-
stmt = re.sub(" +", " ", stmt)
|
|
148
|
-
return_dict["statement"]["statement"] = stmt
|
|
149
|
-
return return_dict
|
|
169
|
+
return _from, _to
|
|
150
170
|
|
|
151
171
|
@staticmethod
|
|
152
172
|
def get_textual_statements(
|
|
@@ -276,12 +296,9 @@ class HoldingsStatementsParser:
|
|
|
276
296
|
elif cron_to.strip() and val:
|
|
277
297
|
val_rest = val
|
|
278
298
|
if year:
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
cron_to = f"{cron_to}:{''.join(val_rest)} "
|
|
283
|
-
elif not cron_to and "".join(val_rest):
|
|
284
|
-
cron_to = f"{spill_year}{''.join(val_rest)}"
|
|
299
|
+
cron_from, cron_to = HoldingsStatementsParser.format_year_cron_from_cron_to(
|
|
300
|
+
cron_from, cron_to, hlm_stmt, val, val_rest
|
|
301
|
+
)
|
|
285
302
|
|
|
286
303
|
else:
|
|
287
304
|
if "season" in desc:
|
|
@@ -292,6 +309,16 @@ class HoldingsStatementsParser:
|
|
|
292
309
|
cron_to = f"{cron_to} {''.join(val_rest)}".strip()
|
|
293
310
|
return (f"{cron_from.strip()}", cron_to.strip(), hlm_stmt, is_span)
|
|
294
311
|
|
|
312
|
+
@staticmethod
|
|
313
|
+
def format_year_cron_from_cron_to(cron_from, cron_to, hlm_stmt, val, val_rest):
|
|
314
|
+
spill_year = f"{hlm_stmt}:" if "-" not in hlm_stmt else ""
|
|
315
|
+
cron_from = f"{cron_from.strip()}:{val}"
|
|
316
|
+
if cron_to and "".join(val_rest):
|
|
317
|
+
cron_to = f"{cron_to}:{''.join(val_rest)}"
|
|
318
|
+
elif not cron_to and "".join(val_rest):
|
|
319
|
+
cron_to = f"{spill_year}{''.join(val_rest)}"
|
|
320
|
+
return cron_from, cron_to
|
|
321
|
+
|
|
295
322
|
@staticmethod
|
|
296
323
|
def get_from_to(pattern_field: Field, linked_value_field: Field):
|
|
297
324
|
_from = ""
|
|
@@ -300,11 +327,18 @@ class HoldingsStatementsParser:
|
|
|
300
327
|
for enum_level in [el for el in "abcdef" if el in linked_value_field]:
|
|
301
328
|
desc = pattern_field.get(enum_level, "")
|
|
302
329
|
desc = desc.strip() if "(" not in desc else ""
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
_from = f"{_from}{(':' if _from else '')}{desc}{val}"
|
|
307
|
-
temp_to = "".join(val_rest)
|
|
308
|
-
if temp_to.strip():
|
|
309
|
-
_to = f"{_to}{(':' if _to else '')}{desc}{temp_to}"
|
|
330
|
+
_from, _to, is_span = HoldingsStatementsParser.format_enum_parts(
|
|
331
|
+
linked_value_field, _from, _to, is_span, enum_level, desc
|
|
332
|
+
)
|
|
310
333
|
return (f"{_from.strip()}", _to.strip(), is_span)
|
|
334
|
+
|
|
335
|
+
@staticmethod
|
|
336
|
+
def format_enum_parts(linked_value_field, _from, _to, is_span, enum_level, desc):
|
|
337
|
+
if linked_value_field.get(enum_level):
|
|
338
|
+
val, *val_rest = linked_value_field[enum_level].split("-")
|
|
339
|
+
is_span = "-" in linked_value_field[enum_level] or is_span
|
|
340
|
+
_from = f"{_from}{(':' if _from else '')}{desc}{val}"
|
|
341
|
+
temp_to = "".join(val_rest)
|
|
342
|
+
if temp_to.strip():
|
|
343
|
+
_to = f"{_to}{(':' if _to else '')}{desc}{temp_to}"
|
|
344
|
+
return _from, _to, is_span
|
|
@@ -65,7 +65,12 @@ class MarcFileProcessor:
|
|
|
65
65
|
self.records_count += 1
|
|
66
66
|
try:
|
|
67
67
|
# Transform the MARC21 to a FOLIO record
|
|
68
|
-
|
|
68
|
+
try:
|
|
69
|
+
legacy_ids = self.mapper.get_legacy_ids(marc_record, idx)
|
|
70
|
+
except ValueError as e:
|
|
71
|
+
raise TransformationRecordFailedError(
|
|
72
|
+
f"{idx} in {file_def.file_name}", str(e), idx
|
|
73
|
+
) from e
|
|
69
74
|
if not legacy_ids:
|
|
70
75
|
raise TransformationRecordFailedError(
|
|
71
76
|
f"Index in file: {idx}", "No legacy id found", idx
|
|
@@ -33,9 +33,9 @@ def write_failed_batch_to_file(batch, file):
|
|
|
33
33
|
|
|
34
34
|
|
|
35
35
|
class BatchPoster(MigrationTaskBase):
|
|
36
|
-
"""
|
|
36
|
+
"""BatchPoster
|
|
37
37
|
|
|
38
|
-
|
|
38
|
+
Parents:
|
|
39
39
|
MigrationTaskBase (_type_): _description_
|
|
40
40
|
|
|
41
41
|
Raises:
|
|
@@ -158,7 +158,8 @@ class BatchPoster(MigrationTaskBase):
|
|
|
158
158
|
if self.api_info["supports_upsert"]:
|
|
159
159
|
self.query_params["upsert"] = self.task_configuration.upsert
|
|
160
160
|
elif self.task_configuration.upsert and not self.api_info["supports_upsert"]:
|
|
161
|
-
logging.info(
|
|
161
|
+
logging.info(
|
|
162
|
+
"Upsert is not supported for this object type. Query parameter will not be set.")
|
|
162
163
|
self.snapshot_id = str(uuid4())
|
|
163
164
|
self.failed_objects: list = []
|
|
164
165
|
self.batch_size = self.task_configuration.batch_size
|
|
@@ -174,11 +175,14 @@ class BatchPoster(MigrationTaskBase):
|
|
|
174
175
|
self.okapi_headers = self.folio_client.okapi_headers
|
|
175
176
|
self.http_client = None
|
|
176
177
|
self.starting_record_count_in_folio: Optional[int] = None
|
|
178
|
+
self.finished_record_count_in_folio: Optional[int] = None
|
|
177
179
|
|
|
178
180
|
def do_work(self):
|
|
179
181
|
with self.folio_client.get_folio_http_client() as httpx_client:
|
|
180
182
|
self.http_client = httpx_client
|
|
181
|
-
with open(
|
|
183
|
+
with open(
|
|
184
|
+
self.folder_structure.failed_recs_path, "w", encoding='utf-8'
|
|
185
|
+
) as failed_recs_file:
|
|
182
186
|
self.get_starting_record_count()
|
|
183
187
|
try:
|
|
184
188
|
batch = []
|
|
@@ -249,7 +253,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
249
253
|
self.handle_generic_exception(
|
|
250
254
|
exception, last_row, batch, self.processed, failed_recs_file
|
|
251
255
|
)
|
|
252
|
-
logging.info("Done posting %s records. ",
|
|
256
|
+
logging.info("Done posting %s records. ", self.processed)
|
|
253
257
|
if self.task_configuration.object_type == "SRS":
|
|
254
258
|
self.commit_snapshot()
|
|
255
259
|
|
|
@@ -276,7 +280,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
276
280
|
|
|
277
281
|
async def set_version_async(self, batch, query_api, object_type) -> None:
|
|
278
282
|
"""
|
|
279
|
-
Fetches the current version of the records in the batch
|
|
283
|
+
Fetches the current version of the records in the batch if the record exists in FOLIO
|
|
280
284
|
|
|
281
285
|
Args:
|
|
282
286
|
batch (list): List of records to fetch versions for
|
|
@@ -297,7 +301,10 @@ class BatchPoster(MigrationTaskBase):
|
|
|
297
301
|
client,
|
|
298
302
|
query_api,
|
|
299
303
|
params={
|
|
300
|
-
"query":
|
|
304
|
+
"query": (
|
|
305
|
+
"id==("
|
|
306
|
+
f"{' OR '.join([r['id'] for r in batch_slice if 'id' in r])})"
|
|
307
|
+
),
|
|
301
308
|
"limit": fetch_batch_size
|
|
302
309
|
},
|
|
303
310
|
)
|
|
@@ -332,11 +339,14 @@ class BatchPoster(MigrationTaskBase):
|
|
|
332
339
|
response.text,
|
|
333
340
|
)
|
|
334
341
|
|
|
335
|
-
async def get_with_retry(self, client: httpx.AsyncClient, url: str, params
|
|
342
|
+
async def get_with_retry(self, client: httpx.AsyncClient, url: str, params=None):
|
|
343
|
+
if params is None:
|
|
344
|
+
params = {}
|
|
336
345
|
retries = 3
|
|
337
346
|
for attempt in range(retries):
|
|
338
347
|
try:
|
|
339
|
-
response = await client.get(
|
|
348
|
+
response = await client.get(
|
|
349
|
+
url, params=params, headers=self.folio_client.okapi_headers)
|
|
340
350
|
response.raise_for_status()
|
|
341
351
|
return response
|
|
342
352
|
except httpx.HTTPError as e:
|
|
@@ -477,8 +487,8 @@ class BatchPoster(MigrationTaskBase):
|
|
|
477
487
|
)
|
|
478
488
|
logging.info(last_row)
|
|
479
489
|
logging.info("=========Stack trace==============")
|
|
480
|
-
traceback.logging.info_exc()
|
|
481
|
-
logging.info("======================="
|
|
490
|
+
traceback.logging.info_exc() # type: ignore
|
|
491
|
+
logging.info("=======================")
|
|
482
492
|
|
|
483
493
|
def post_batch(self, batch, failed_recs_file, num_records, recursion_depth=0):
|
|
484
494
|
if self.query_params.get("upsert", False) and self.api_info.get("query_endpoint", ""):
|
|
@@ -514,7 +524,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
514
524
|
)
|
|
515
525
|
write_failed_batch_to_file(batch, failed_recs_file)
|
|
516
526
|
if json_report.get("failedUsers", []):
|
|
517
|
-
logging.error("
|
|
527
|
+
logging.error("Error message: %s", json_report.get("error", []))
|
|
518
528
|
for failed_user in json_report.get("failedUsers"):
|
|
519
529
|
logging.error(
|
|
520
530
|
"User failed. %s\t%s\t%s",
|
|
@@ -581,8 +591,8 @@ class BatchPoster(MigrationTaskBase):
|
|
|
581
591
|
resp = json.dumps(response, indent=4)
|
|
582
592
|
except TypeError:
|
|
583
593
|
resp = response
|
|
584
|
-
except Exception:
|
|
585
|
-
logging.exception("something unexpected happened")
|
|
594
|
+
except Exception as e:
|
|
595
|
+
logging.exception(f"something unexpected happened, {e}")
|
|
586
596
|
resp = response
|
|
587
597
|
raise TransformationRecordFailedError(
|
|
588
598
|
"",
|
|
@@ -603,17 +613,29 @@ class BatchPoster(MigrationTaskBase):
|
|
|
603
613
|
payload = {self.api_info["object_name"]: batch}
|
|
604
614
|
if self.http_client and not self.http_client.is_closed:
|
|
605
615
|
return self.http_client.post(
|
|
606
|
-
url,
|
|
616
|
+
url,
|
|
617
|
+
json=payload,
|
|
618
|
+
headers=self.folio_client.okapi_headers,
|
|
619
|
+
params=self.query_params
|
|
607
620
|
)
|
|
608
621
|
else:
|
|
609
|
-
return httpx.post(
|
|
622
|
+
return httpx.post(
|
|
623
|
+
url,
|
|
624
|
+
headers=self.okapi_headers,
|
|
625
|
+
json=payload,
|
|
626
|
+
params=self.query_params,
|
|
627
|
+
timeout=None)
|
|
610
628
|
|
|
611
629
|
def get_current_record_count_in_folio(self):
|
|
612
630
|
if "query_endpoint" in self.api_info:
|
|
613
631
|
url = f"{self.folio_client.gateway_url}{self.api_info['query_endpoint']}"
|
|
614
632
|
query_params = {"query": "cql.allRecords=1", "limit": 0}
|
|
615
633
|
if self.http_client and not self.http_client.is_closed:
|
|
616
|
-
res = self.http_client.get(
|
|
634
|
+
res = self.http_client.get(
|
|
635
|
+
url,
|
|
636
|
+
headers=self.folio_client.okapi_headers,
|
|
637
|
+
params=query_params
|
|
638
|
+
)
|
|
617
639
|
else:
|
|
618
640
|
res = httpx.get(url, headers=self.okapi_headers, params=query_params, timeout=None)
|
|
619
641
|
try:
|
|
@@ -623,11 +645,15 @@ class BatchPoster(MigrationTaskBase):
|
|
|
623
645
|
logging.error("Failed to get current record count. HTTP %s", res.status_code)
|
|
624
646
|
return 0
|
|
625
647
|
except KeyError:
|
|
626
|
-
logging.error(
|
|
648
|
+
logging.error(
|
|
649
|
+
"Failed to get current record count. "
|
|
650
|
+
f"No 'totalRecords' in response: {res.json()}"
|
|
651
|
+
)
|
|
627
652
|
return 0
|
|
628
653
|
else:
|
|
629
654
|
raise ValueError(
|
|
630
|
-
"No 'query_endpoint' available for %s. Cannot get current record count.",
|
|
655
|
+
"No 'query_endpoint' available for %s. Cannot get current record count.",
|
|
656
|
+
self.task_configuration.object_type
|
|
631
657
|
)
|
|
632
658
|
|
|
633
659
|
def get_starting_record_count(self):
|
|
@@ -635,14 +661,20 @@ class BatchPoster(MigrationTaskBase):
|
|
|
635
661
|
logging.info("Getting starting record count in FOLIO")
|
|
636
662
|
self.starting_record_count_in_folio = self.get_current_record_count_in_folio()
|
|
637
663
|
else:
|
|
638
|
-
logging.info(
|
|
664
|
+
logging.info(
|
|
665
|
+
"No query_endpoint available for %s. Cannot get starting record count.",
|
|
666
|
+
self.task_configuration.object_type
|
|
667
|
+
)
|
|
639
668
|
|
|
640
669
|
def get_finished_record_count(self):
|
|
641
670
|
if "query_endpoint" in self.api_info:
|
|
642
671
|
logging.info("Getting finished record count in FOLIO")
|
|
643
672
|
self.finished_record_count_in_folio = self.get_current_record_count_in_folio()
|
|
644
673
|
else:
|
|
645
|
-
logging.info(
|
|
674
|
+
logging.info(
|
|
675
|
+
"No query_endpoint available for %s. Cannot get ending record count.",
|
|
676
|
+
self.task_configuration.object_type
|
|
677
|
+
)
|
|
646
678
|
|
|
647
679
|
def wrap_up(self):
|
|
648
680
|
logging.info("Done. Wrapping up")
|
|
@@ -663,7 +695,9 @@ class BatchPoster(MigrationTaskBase):
|
|
|
663
695
|
logging.info("Done posting %s records. %s failed", self.num_posted, self.num_failures)
|
|
664
696
|
if self.starting_record_count_in_folio:
|
|
665
697
|
self.get_finished_record_count()
|
|
666
|
-
total_on_server =
|
|
698
|
+
total_on_server = (
|
|
699
|
+
self.finished_record_count_in_folio - self.starting_record_count_in_folio
|
|
700
|
+
)
|
|
667
701
|
discrepancy = self.processed - self.num_failures - total_on_server
|
|
668
702
|
if discrepancy != 0:
|
|
669
703
|
logging.error(
|
|
@@ -712,7 +746,10 @@ class BatchPoster(MigrationTaskBase):
|
|
|
712
746
|
temp_report = copy.deepcopy(self.migration_report)
|
|
713
747
|
temp_start = self.start_datetime
|
|
714
748
|
self.task_configuration.rerun_failed_records = False
|
|
715
|
-
self.__init__(
|
|
749
|
+
self.__init__(
|
|
750
|
+
self.task_configuration,
|
|
751
|
+
self.library_configuration,
|
|
752
|
+
self.folio_client)
|
|
716
753
|
self.performing_rerun = True
|
|
717
754
|
self.migration_report = temp_report
|
|
718
755
|
self.start_datetime = temp_start
|
|
@@ -747,9 +784,11 @@ class BatchPoster(MigrationTaskBase):
|
|
|
747
784
|
res = httpx.post(url, headers=self.okapi_headers, json=snapshot, timeout=None)
|
|
748
785
|
res.raise_for_status()
|
|
749
786
|
logging.info("Posted Snapshot to FOLIO: %s", json.dumps(snapshot, indent=4))
|
|
750
|
-
get_url =
|
|
751
|
-
|
|
752
|
-
|
|
787
|
+
get_url = (
|
|
788
|
+
f"{self.folio_client.gateway_url}/source-storage/snapshots/{self.snapshot_id}"
|
|
789
|
+
)
|
|
790
|
+
got = False
|
|
791
|
+
while not got:
|
|
753
792
|
logging.info("Sleeping while waiting for the snapshot to get created")
|
|
754
793
|
time.sleep(5)
|
|
755
794
|
if self.http_client and not self.http_client.is_closed:
|
|
@@ -757,11 +796,14 @@ class BatchPoster(MigrationTaskBase):
|
|
|
757
796
|
else:
|
|
758
797
|
res = httpx.get(get_url, headers=self.okapi_headers, timeout=None)
|
|
759
798
|
if res.status_code == 200:
|
|
760
|
-
|
|
799
|
+
got = True
|
|
761
800
|
else:
|
|
762
801
|
logging.info(res.status_code)
|
|
763
|
-
except
|
|
764
|
-
logging.exception("
|
|
802
|
+
except httpx.HTTPStatusError as exc:
|
|
803
|
+
logging.exception("HTTP error occurred while posting the snapshot: %s", exc)
|
|
804
|
+
sys.exit(1)
|
|
805
|
+
except Exception as exc:
|
|
806
|
+
logging.exception("Could not post the snapshot: %s", exc)
|
|
765
807
|
sys.exit(1)
|
|
766
808
|
|
|
767
809
|
def commit_snapshot(self):
|
|
@@ -776,11 +818,15 @@ class BatchPoster(MigrationTaskBase):
|
|
|
776
818
|
res = httpx.put(url, headers=self.okapi_headers, json=snapshot, timeout=None)
|
|
777
819
|
res.raise_for_status()
|
|
778
820
|
logging.info("Posted Committed snapshot to FOLIO: %s", json.dumps(snapshot, indent=4))
|
|
779
|
-
except
|
|
821
|
+
except httpx.HTTPStatusError as exc:
|
|
822
|
+
logging.exception("HTTP error occurred while posting the snapshot: %s", exc)
|
|
823
|
+
sys.exit(1)
|
|
824
|
+
except Exception as exc:
|
|
780
825
|
logging.exception(
|
|
781
826
|
"Could not commit snapshot with id %s. Post this to /source-storage/snapshots/%s:",
|
|
782
827
|
self.snapshot_id,
|
|
783
828
|
self.snapshot_id,
|
|
829
|
+
exc,
|
|
784
830
|
)
|
|
785
831
|
logging.info("%s", json.dumps(snapshot, indent=4))
|
|
786
832
|
sys.exit(1)
|
|
@@ -891,8 +937,11 @@ def get_api_info(object_type: str, use_safe: bool = True):
|
|
|
891
937
|
try:
|
|
892
938
|
return choices[object_type]
|
|
893
939
|
except KeyError:
|
|
894
|
-
key_string = ",".join(choices.keys())
|
|
895
|
-
logging.error(
|
|
940
|
+
key_string = ", ".join(choices.keys())
|
|
941
|
+
logging.error(
|
|
942
|
+
f"Wrong type. Only one of {key_string} are allowed, "
|
|
943
|
+
f"received {object_type=} instead"
|
|
944
|
+
)
|
|
896
945
|
logging.error("Halting")
|
|
897
946
|
sys.exit(1)
|
|
898
947
|
|
|
@@ -908,7 +957,7 @@ def chunks(records, number_of_chunks):
|
|
|
908
957
|
_type_: _description_
|
|
909
958
|
"""
|
|
910
959
|
for i in range(0, len(records), number_of_chunks):
|
|
911
|
-
yield records[i
|
|
960
|
+
yield records[i: i + number_of_chunks]
|
|
912
961
|
|
|
913
962
|
|
|
914
963
|
def get_human_readable(size, precision=2):
|
|
@@ -1,5 +1,3 @@
|
|
|
1
|
-
'''Main "script."'''
|
|
2
|
-
|
|
3
1
|
import csv
|
|
4
2
|
import json
|
|
5
3
|
import logging
|
|
@@ -19,7 +17,10 @@ from folio_migration_tools.library_configuration import (
|
|
|
19
17
|
from folio_migration_tools.marc_rules_transformation.rules_mapper_holdings import (
|
|
20
18
|
RulesMapperHoldings,
|
|
21
19
|
)
|
|
22
|
-
from folio_migration_tools.migration_tasks.migration_task_base import
|
|
20
|
+
from folio_migration_tools.migration_tasks.migration_task_base import (
|
|
21
|
+
MarcTaskConfigurationBase,
|
|
22
|
+
MigrationTaskBase
|
|
23
|
+
)
|
|
23
24
|
|
|
24
25
|
|
|
25
26
|
class HoldingsMarcTransformer(MigrationTaskBase):
|
|
@@ -37,14 +38,18 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
37
38
|
str,
|
|
38
39
|
Field(
|
|
39
40
|
title="Migration task type",
|
|
40
|
-
description=(
|
|
41
|
+
description=(
|
|
42
|
+
"The type of migration task you want to perform"
|
|
43
|
+
),
|
|
41
44
|
),
|
|
42
45
|
]
|
|
43
46
|
files: Annotated[
|
|
44
47
|
List[FileDefinition],
|
|
45
48
|
Field(
|
|
46
49
|
title="Source files",
|
|
47
|
-
description=(
|
|
50
|
+
description=(
|
|
51
|
+
"List of MARC21 files with holdings records"
|
|
52
|
+
),
|
|
48
53
|
),
|
|
49
54
|
]
|
|
50
55
|
hrid_handling: Annotated[
|
|
@@ -125,8 +130,8 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
125
130
|
default_call_number_type_name: Annotated[
|
|
126
131
|
str,
|
|
127
132
|
Field(
|
|
128
|
-
title="Default
|
|
129
|
-
description="The name of the
|
|
133
|
+
title="Default call_number type name",
|
|
134
|
+
description="The name of the call_number type that will be used as fallback",
|
|
130
135
|
),
|
|
131
136
|
]
|
|
132
137
|
fallback_holdings_type_id: Annotated[
|
|
@@ -140,7 +145,10 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
140
145
|
str,
|
|
141
146
|
Field(
|
|
142
147
|
title="Supplemental MFHD mapping rules file",
|
|
143
|
-
description=
|
|
148
|
+
description=(
|
|
149
|
+
"The name of the file in the mapping_files directory "
|
|
150
|
+
"containing supplemental MFHD mapping rules"
|
|
151
|
+
),
|
|
144
152
|
),
|
|
145
153
|
] = ""
|
|
146
154
|
include_mrk_statements: Annotated[
|
|
@@ -148,8 +156,10 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
148
156
|
Field(
|
|
149
157
|
title="Include MARC statements (MRK-format) as staff-only Holdings notes",
|
|
150
158
|
description=(
|
|
151
|
-
"If set to true, the MARC statements
|
|
152
|
-
"
|
|
159
|
+
"If set to true, the MARC statements "
|
|
160
|
+
"will be included in the output as MARC Maker format fields. "
|
|
161
|
+
"If set to false (default), the MARC statements "
|
|
162
|
+
"will not be included in the output."
|
|
153
163
|
),
|
|
154
164
|
),
|
|
155
165
|
] = False
|
|
@@ -188,7 +198,8 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
188
198
|
title="Include MARC Record (as MARC21 decoded string) as note",
|
|
189
199
|
description=(
|
|
190
200
|
"If set to true, the MARC record will be included in the output as a "
|
|
191
|
-
"decoded binary MARC21 record. If set to false (default),
|
|
201
|
+
"decoded binary MARC21 record. If set to false (default), "
|
|
202
|
+
"the MARC record will not be "
|
|
192
203
|
"included in the output."
|
|
193
204
|
),
|
|
194
205
|
),
|
|
@@ -127,7 +127,8 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
127
127
|
title="Statistical code map file name",
|
|
128
128
|
description=(
|
|
129
129
|
"Path to the file containing the mapping of statistical codes. "
|
|
130
|
-
"The file should be in TSV format with legacy_stat_code
|
|
130
|
+
"The file should be in TSV format with legacy_stat_code "
|
|
131
|
+
"and folio_code columns."
|
|
131
132
|
),
|
|
132
133
|
),
|
|
133
134
|
] = ""
|
|
@@ -355,7 +356,7 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
355
356
|
)
|
|
356
357
|
|
|
357
358
|
self.mapper.perform_additional_mappings(legacy_id, folio_rec, file_def)
|
|
358
|
-
self.
|
|
359
|
+
self.handle_circulation_notes(folio_rec, self.folio_client.current_user)
|
|
359
360
|
self.handle_notes(folio_rec)
|
|
360
361
|
if folio_rec["holdingsRecordId"] in self.boundwith_relationship_map:
|
|
361
362
|
for idx_, instance_id in enumerate(
|
|
@@ -373,7 +374,7 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
373
374
|
if idx == 0:
|
|
374
375
|
logging.info("First FOLIO record:")
|
|
375
376
|
logging.info(json.dumps(folio_rec, indent=4))
|
|
376
|
-
# TODO: turn this into a
|
|
377
|
+
# TODO: turn this into a asynchronous task
|
|
377
378
|
Helper.write_to_file(results_file, folio_rec)
|
|
378
379
|
self.mapper.migration_report.add_general_statistics(
|
|
379
380
|
i18n.t("Number of records written to disk")
|
|
@@ -388,8 +389,8 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
388
389
|
logging.fatal(attribute_error)
|
|
389
390
|
logging.info("Quitting...")
|
|
390
391
|
sys.exit(1)
|
|
391
|
-
except Exception as
|
|
392
|
-
self.mapper.handle_generic_exception(idx,
|
|
392
|
+
except Exception as exception:
|
|
393
|
+
self.mapper.handle_generic_exception(idx, exception)
|
|
393
394
|
self.mapper.migration_report.add(
|
|
394
395
|
"GeneralStatistics",
|
|
395
396
|
i18n.t("Number of Legacy items in %{container}", container=file_def),
|
|
@@ -425,14 +426,14 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
425
426
|
del folio_object["notes"]
|
|
426
427
|
|
|
427
428
|
@staticmethod
|
|
428
|
-
def
|
|
429
|
+
def handle_circulation_notes(folio_rec, current_user_uuid):
|
|
429
430
|
if not folio_rec.get("circulationNotes", []):
|
|
430
431
|
return
|
|
431
432
|
filtered_notes = []
|
|
432
433
|
for circ_note in folio_rec.get("circulationNotes", []):
|
|
433
434
|
if circ_note.get("noteType", "") not in ["Check in", "Check out"]:
|
|
434
435
|
raise TransformationProcessError(
|
|
435
|
-
"", "Circulation Note types are not mapped
|
|
436
|
+
"", "Circulation Note types are not mapped correctly"
|
|
436
437
|
)
|
|
437
438
|
if circ_note.get("note", ""):
|
|
438
439
|
circ_note["id"] = str(uuid.uuid4())
|
|
@@ -455,11 +456,22 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
455
456
|
json.loads(x) for x in boundwith_relationship_file
|
|
456
457
|
)
|
|
457
458
|
logging.info(
|
|
458
|
-
|
|
459
|
+
"Rows in Bound with relationship map: %s",
|
|
460
|
+
len(self.boundwith_relationship_map)
|
|
459
461
|
)
|
|
460
462
|
except FileNotFoundError:
|
|
461
463
|
raise TransformationProcessError(
|
|
462
|
-
|
|
464
|
+
"",
|
|
465
|
+
"Boundwith relationship file specified, but relationships file "
|
|
466
|
+
"from holdings transformation not found.",
|
|
467
|
+
self.folder_structure.boundwith_relationships_map_path
|
|
468
|
+
)
|
|
469
|
+
except ValueError:
|
|
470
|
+
raise TransformationProcessError(
|
|
471
|
+
"",
|
|
472
|
+
"Boundwith relationship file specified, but relationships file "
|
|
473
|
+
"from holdings transformation is not a valid line JSON.",
|
|
474
|
+
self.folder_structure.boundwith_relationships_map_path,
|
|
463
475
|
)
|
|
464
476
|
|
|
465
477
|
def wrap_up(self):
|
|
@@ -17,6 +17,7 @@ from folio_uuid.folio_namespaces import FOLIONamespaces
|
|
|
17
17
|
from art import tprint
|
|
18
18
|
|
|
19
19
|
from folio_migration_tools.circulation_helper import CirculationHelper
|
|
20
|
+
from folio_migration_tools.custom_exceptions import TransformationRecordFailedError
|
|
20
21
|
from folio_migration_tools.helper import Helper
|
|
21
22
|
from folio_migration_tools.library_configuration import (
|
|
22
23
|
FileDefinition,
|
|
@@ -233,7 +234,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
233
234
|
legacy_loan (LegacyLoan): The Legacy loan
|
|
234
235
|
"""
|
|
235
236
|
res_checkout = self.circulation_helper.check_out_by_barcode(legacy_loan)
|
|
236
|
-
|
|
237
|
+
|
|
237
238
|
if res_checkout.was_successful:
|
|
238
239
|
self.migration_report.add("Details", i18n.t("Checked out on first try"))
|
|
239
240
|
self.migration_report.add_general_statistics(i18n.t("Successfully checked out"))
|
|
@@ -302,7 +303,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
302
303
|
|
|
303
304
|
def wrap_up(self):
|
|
304
305
|
for k, v in self.failed.items():
|
|
305
|
-
self.failed_and_not_dupe[k] = [v.to_dict()]
|
|
306
|
+
self.failed_and_not_dupe[k] = [v if isinstance(v, dict) else v.to_dict()]
|
|
306
307
|
print(f"Wrapping up. Unique loans in failed:{len(self.failed_and_not_dupe)}")
|
|
307
308
|
|
|
308
309
|
self.write_failed_loans_to_file()
|
|
@@ -404,6 +405,19 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
404
405
|
] = legacy_loan
|
|
405
406
|
else:
|
|
406
407
|
results.append(legacy_loan)
|
|
408
|
+
except TransformationRecordFailedError as trfe:
|
|
409
|
+
num_bad += 1
|
|
410
|
+
self.migration_report.add_general_statistics(
|
|
411
|
+
i18n.t("Loans failed pre-validation")
|
|
412
|
+
)
|
|
413
|
+
self.migration_report.add(
|
|
414
|
+
"DiscardedLoans",
|
|
415
|
+
f"{trfe.message} - see data issues log",
|
|
416
|
+
)
|
|
417
|
+
trfe.log_it()
|
|
418
|
+
self.failed[
|
|
419
|
+
legacy_loan_dict.get("item_barcode", f"no_barcode_{legacy_loan_count}")
|
|
420
|
+
] = legacy_loan_dict
|
|
407
421
|
except ValueError as ve:
|
|
408
422
|
logging.exception(ve)
|
|
409
423
|
logging.info(
|
|
@@ -449,7 +463,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
449
463
|
elif folio_checkout.error_message == "Declared lost":
|
|
450
464
|
return folio_checkout
|
|
451
465
|
elif folio_checkout.error_message.startswith("Cannot check out to inactive user"):
|
|
452
|
-
return self.
|
|
466
|
+
return self.checkout_to_inactive_user(legacy_loan)
|
|
453
467
|
else:
|
|
454
468
|
self.migration_report.add(
|
|
455
469
|
"Details",
|
|
@@ -479,7 +493,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
479
493
|
del self.failed[legacy_loan.item_barcode]
|
|
480
494
|
return TransactionResult(False, False, "", "", "")
|
|
481
495
|
|
|
482
|
-
def
|
|
496
|
+
def checkout_to_inactive_user(self, legacy_loan) -> TransactionResult:
|
|
483
497
|
logging.info("Cannot check out to inactive user. Activating and trying again")
|
|
484
498
|
user = self.get_user_by_barcode(legacy_loan.patron_barcode)
|
|
485
499
|
expiration_date = user.get("expirationDate", datetime.isoformat(datetime.now()))
|
|
@@ -487,6 +501,8 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
487
501
|
self.activate_user(user)
|
|
488
502
|
logging.debug("Successfully Activated user")
|
|
489
503
|
res = self.circulation_helper.check_out_by_barcode(legacy_loan) # checkout_and_update
|
|
504
|
+
if res.should_be_retried:
|
|
505
|
+
res = self.handle_checkout_failure(legacy_loan, res)
|
|
490
506
|
self.migration_report.add("Details", res.migration_report_message)
|
|
491
507
|
self.deactivate_user(user, expiration_date)
|
|
492
508
|
logging.debug("Successfully Deactivated user again")
|
|
@@ -25,7 +25,9 @@ class AbstractTaskConfiguration(BaseModel):
|
|
|
25
25
|
str,
|
|
26
26
|
Field(
|
|
27
27
|
title="Migration task type",
|
|
28
|
-
description=(
|
|
28
|
+
description=(
|
|
29
|
+
"The type of migration task you want to perform."
|
|
30
|
+
),
|
|
29
31
|
),
|
|
30
32
|
]
|
|
31
33
|
ecs_tenant_id: Annotated[
|
|
@@ -121,6 +121,11 @@ def folio_get_all_mocked(ref_data_path, array_name, query="", limit=10):
|
|
|
121
121
|
"name": "FOLIO user department name",
|
|
122
122
|
"code": "fdp",
|
|
123
123
|
},
|
|
124
|
+
{
|
|
125
|
+
"id": "12a2ad12-951d-4124-9fb2-58c70f0b7f72",
|
|
126
|
+
"name": "FOLIO user department name 2",
|
|
127
|
+
"code": "fdp2",
|
|
128
|
+
},
|
|
124
129
|
{
|
|
125
130
|
"id": "2f452d21-507d-4b32-a89d-8ea9753cc946",
|
|
126
131
|
"name": "FOLIO fallback user department name",
|
|
@@ -8,7 +8,7 @@ from dateutil import tz
|
|
|
8
8
|
from dateutil.parser import parse, ParserError
|
|
9
9
|
|
|
10
10
|
from folio_migration_tools.migration_report import MigrationReport
|
|
11
|
-
from folio_migration_tools.custom_exceptions import
|
|
11
|
+
from folio_migration_tools.custom_exceptions import TransformationRecordFailedError
|
|
12
12
|
|
|
13
13
|
utc = ZoneInfo("UTC")
|
|
14
14
|
|
|
@@ -124,7 +124,13 @@ class LegacyLoan(object):
|
|
|
124
124
|
if self.out_date.hour == 0:
|
|
125
125
|
self.out_date = self.out_date.replace(hour=0, minute=1)
|
|
126
126
|
if self.due_date <= self.out_date:
|
|
127
|
-
raise
|
|
127
|
+
raise TransformationRecordFailedError(
|
|
128
|
+
self.row,
|
|
129
|
+
i18n.t(
|
|
130
|
+
"Due date is before out date, or date information is missing from both"
|
|
131
|
+
),
|
|
132
|
+
json.dumps(self.legacy_loan_dict, indent=2)
|
|
133
|
+
)
|
|
128
134
|
|
|
129
135
|
def to_dict(self):
|
|
130
136
|
return {
|
|
@@ -446,6 +446,15 @@
|
|
|
446
446
|
"blurbs.ValueSetInMappingFile.title": "Value set in mapping file",
|
|
447
447
|
"blurbs.ValuesMappedFromLegacyFields.description": "A list fo the values and what they were mapped to",
|
|
448
448
|
"blurbs.ValuesMappedFromLegacyFields.title": "Values mapped from legacy fields",
|
|
449
|
+
"blurbs.MethodOfAcquisitionMapping.title": "Method of acquisition",
|
|
450
|
+
"blurbs.MethodOfAcquisitionMapping.description": "Acquisition methods mapped from `008[7]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
|
|
451
|
+
"blurbs.RetentionPolicyMapping.title": "Retention policy",
|
|
452
|
+
"blurbs.RetentionPolicyMapping.description": "Retention policies mapped from `008[12-15]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
|
|
453
|
+
"blurbs.ILLPolicyMapping.title": "ILL policy",
|
|
454
|
+
"blurbs.ILLPolicyMapping.description": "ILL policies mapped from `008[20]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
|
|
455
|
+
"blurbs.DigitizationPolicyMapping.title": "Digitization policy",
|
|
456
|
+
"blurbs.DigitizationPolicyMapping.description": "Digitization policies mapped from `008[21]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
|
|
457
|
+
"Invalid specific retention policy in 008/13-15: %{value}": "Invalid specific retention policy in 008/13-15: %{value}",
|
|
449
458
|
"created": "created",
|
|
450
459
|
"instance type code (%{code}) not found in FOLIO": "instance type code (%{code}) not found in FOLIO",
|
|
451
460
|
"item barcode": "item barcode",
|
|
@@ -22,13 +22,13 @@ folio_migration_tools/mapping_file_transformation/notes_mapper.py,sha256=vCmZmjr
|
|
|
22
22
|
folio_migration_tools/mapping_file_transformation/order_mapper.py,sha256=-JEBEeOntNPE9-NYhWAJ1hpQI03ZzMv-_mkyLzSa9x4,17750
|
|
23
23
|
folio_migration_tools/mapping_file_transformation/organization_mapper.py,sha256=u1Lb6tApn-nVLqbbJV38BuipKL3OK8Y2uQ4ogoyGQaI,14639
|
|
24
24
|
folio_migration_tools/mapping_file_transformation/ref_data_mapping.py,sha256=qFsn_LwKZeKFdOudfEQnNA3DEHOdNQVKzTPdZAlDPX0,8864
|
|
25
|
-
folio_migration_tools/mapping_file_transformation/user_mapper.py,sha256=
|
|
25
|
+
folio_migration_tools/mapping_file_transformation/user_mapper.py,sha256=LJTj2F2dRKqyI37Ww0gY1AHLLT3dqyuKkY_RS_3-rg0,8543
|
|
26
26
|
folio_migration_tools/marc_rules_transformation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
|
-
folio_migration_tools/marc_rules_transformation/conditions.py,sha256=
|
|
28
|
-
folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py,sha256=
|
|
27
|
+
folio_migration_tools/marc_rules_transformation/conditions.py,sha256=F78a70HXcDLnOXDs_vSTdgf4opMWHzXzOjvpWlOh4PM,47719
|
|
28
|
+
folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py,sha256=gM0ETZVVih35cSSpOBXA8wrBFhq2oeYaGsD89tnSNJs,13433
|
|
29
29
|
folio_migration_tools/marc_rules_transformation/hrid_handler.py,sha256=SgnSYeNR0z_qarkizBMWZZWr8tOPZJ4fvlZjlM3nJOU,9999
|
|
30
30
|
folio_migration_tools/marc_rules_transformation/loc_language_codes.xml,sha256=ztn2_yKws6qySL4oSsZh7sOjxq5bCC1PhAnXJdtgmJ0,382912
|
|
31
|
-
folio_migration_tools/marc_rules_transformation/marc_file_processor.py,sha256=
|
|
31
|
+
folio_migration_tools/marc_rules_transformation/marc_file_processor.py,sha256=QhVbJSlsWkGQgUo7ZVmQvlwpEN20Tyon_kzrZOWECoE,12549
|
|
32
32
|
folio_migration_tools/marc_rules_transformation/marc_reader_wrapper.py,sha256=9ATjYMRAjy0QcXtmNZaHVhHLJ5hE1WUgOcF6KMJjbgo,5309
|
|
33
33
|
folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py,sha256=PGt2w8h2pj8_8sGjQe3L-odFDlquURtKnoNFRWQB3GI,9621
|
|
34
34
|
folio_migration_tools/marc_rules_transformation/rules_mapper_base.py,sha256=loNZ9gEYaAwjkP2_wLlXGedjWvSdHoGF_oJN9g6gI3s,45928
|
|
@@ -37,13 +37,13 @@ folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py,sha256=
|
|
|
37
37
|
folio_migration_tools/migration_report.py,sha256=BkRspM1hwTBnWeqsHamf7yVEofzLj560Q-9G--O00hw,4258
|
|
38
38
|
folio_migration_tools/migration_tasks/__init__.py,sha256=ZkbY_yGyB84Ke8OMlYUzyyBj4cxxNrhMTwQlu_GbdDs,211
|
|
39
39
|
folio_migration_tools/migration_tasks/authority_transformer.py,sha256=AoXg9s-GLO3yEEDCrQV7hc4YVXxwxsdxDdpj1zhHydE,4251
|
|
40
|
-
folio_migration_tools/migration_tasks/batch_poster.py,sha256=
|
|
40
|
+
folio_migration_tools/migration_tasks/batch_poster.py,sha256=xN1BBZNGW2lZHWPznF6nkYV15XGhwwzcZccCzTbPfA4,40868
|
|
41
41
|
folio_migration_tools/migration_tasks/bibs_transformer.py,sha256=46d44pcDAodFXDYbrTCMRASISbDciXmA0CXYfhP2IaE,6298
|
|
42
42
|
folio_migration_tools/migration_tasks/courses_migrator.py,sha256=CzXnsu-KGP7B4zcINJzLYUqz47D16NuFfzu_DPqRlTQ,7061
|
|
43
43
|
folio_migration_tools/migration_tasks/holdings_csv_transformer.py,sha256=kMhtHE8DJjA4d6kXBcfflueha3R3nwlBQjdec8CaY8c,21926
|
|
44
|
-
folio_migration_tools/migration_tasks/holdings_marc_transformer.py,sha256=
|
|
45
|
-
folio_migration_tools/migration_tasks/items_transformer.py,sha256=
|
|
46
|
-
folio_migration_tools/migration_tasks/loans_migrator.py,sha256=
|
|
44
|
+
folio_migration_tools/migration_tasks/holdings_marc_transformer.py,sha256=c_ruhOgidyJdSnnRwWUs3wwFMiLqbVMPOhhCaYuH_TI,14343
|
|
45
|
+
folio_migration_tools/migration_tasks/items_transformer.py,sha256=HlTzV7K0AiGBHw56VMascupMKXG0Pv8LS65O9EiQ2VU,19637
|
|
46
|
+
folio_migration_tools/migration_tasks/loans_migrator.py,sha256=_7yZH951p5mhLjbyH1r496DG591dD1tg_mmTtHas62o,35316
|
|
47
47
|
folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py,sha256=CnmlTge7nChUJ10EiUkriQtJlVxWqglgfhjgneh2_yM,7247
|
|
48
48
|
folio_migration_tools/migration_tasks/migration_task_base.py,sha256=Q-57h6rmt74bC9LidA9ZoagEcwVd_ytq8IUWelVOm2E,22521
|
|
49
49
|
folio_migration_tools/migration_tasks/orders_transformer.py,sha256=6SnzU_rUTu2B5hQykI2nRA7vI1rg-uxuF9Ncupe0AEY,14302
|
|
@@ -51,17 +51,17 @@ folio_migration_tools/migration_tasks/organization_transformer.py,sha256=vcCjhN1
|
|
|
51
51
|
folio_migration_tools/migration_tasks/requests_migrator.py,sha256=QP9OBezC3FfcKpI78oMmydxcPaUIYAgHyKevyLwC-WQ,14841
|
|
52
52
|
folio_migration_tools/migration_tasks/reserves_migrator.py,sha256=4sSPer6_6yMwiiY1VYJmYZske_Ah1XG4KAM3NDadPhg,9952
|
|
53
53
|
folio_migration_tools/migration_tasks/user_transformer.py,sha256=aylrMC9n47fdStgsNfW4ZbJh2E4FDSPypsaNv52ynKc,12330
|
|
54
|
-
folio_migration_tools/task_configuration.py,sha256=
|
|
54
|
+
folio_migration_tools/task_configuration.py,sha256=73OWc8TX--fwPRptv3eQVEVv0-XmNaZcb3m__1HENSA,1161
|
|
55
55
|
folio_migration_tools/test_infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
56
|
-
folio_migration_tools/test_infrastructure/mocked_classes.py,sha256=
|
|
56
|
+
folio_migration_tools/test_infrastructure/mocked_classes.py,sha256=BurU3NGU_Q8as_BGmW98q9O6bujZDkOfFmvKKdVw9t8,15056
|
|
57
57
|
folio_migration_tools/transaction_migration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
58
|
-
folio_migration_tools/transaction_migration/legacy_loan.py,sha256=
|
|
58
|
+
folio_migration_tools/transaction_migration/legacy_loan.py,sha256=phd9oO6xd91qC4ilRq3podZ-rKIIwQ01SXe0JxbZAbQ,6339
|
|
59
59
|
folio_migration_tools/transaction_migration/legacy_request.py,sha256=1ulyFzPQw_InOjyPzkWpGnNptgXdQ18nmri0J8Nlpkc,6124
|
|
60
60
|
folio_migration_tools/transaction_migration/legacy_reserve.py,sha256=qzw0okg4axAE_ezXopP9gFsQ_e60o0zh7zqRzFBSWHY,1806
|
|
61
61
|
folio_migration_tools/transaction_migration/transaction_result.py,sha256=cTdCN0BnlI9_ZJB2Z3Fdkl9gpymIi-9mGZsRFlQcmDk,656
|
|
62
|
-
folio_migration_tools/translations/en.json,sha256=
|
|
63
|
-
folio_migration_tools-1.9.
|
|
64
|
-
folio_migration_tools-1.9.
|
|
65
|
-
folio_migration_tools-1.9.
|
|
66
|
-
folio_migration_tools-1.9.
|
|
67
|
-
folio_migration_tools-1.9.
|
|
62
|
+
folio_migration_tools/translations/en.json,sha256=TPQRTDdvdkZI2iHczP4hKmFEbd7Hyo5BE37uSo54W_4,40691
|
|
63
|
+
folio_migration_tools-1.9.3.dist-info/LICENSE,sha256=PhIEkitVi3ejgq56tt6sWoJIG_zmv82cjjd_aYPPGdI,1072
|
|
64
|
+
folio_migration_tools-1.9.3.dist-info/METADATA,sha256=dPvDnsZ0qw3K6pPfHatlCPfSCov_7d8Ll7L2pJYSta8,7444
|
|
65
|
+
folio_migration_tools-1.9.3.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
66
|
+
folio_migration_tools-1.9.3.dist-info/entry_points.txt,sha256=Hbe-HjqMcU8FwVshVIkeWyZd9XwgT1CCMNf06EpHQu8,77
|
|
67
|
+
folio_migration_tools-1.9.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
{folio_migration_tools-1.9.2.dist-info → folio_migration_tools-1.9.3.dist-info}/entry_points.txt
RENAMED
|
File without changes
|