folio-migration-tools 1.9.0rc5__tar.gz → 1.9.0rc7__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/PKG-INFO +1 -1
  2. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/pyproject.toml +1 -1
  3. folio_migration_tools-1.9.0rc7/src/folio_migration_tools/__init__.py +11 -0
  4. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/custom_dict.py +2 -2
  5. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/custom_exceptions.py +14 -11
  6. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapper_base.py +23 -3
  7. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/item_mapper.py +24 -9
  8. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +3 -7
  9. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/conditions.py +54 -30
  10. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +71 -38
  11. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +4 -1
  12. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/batch_poster.py +38 -14
  13. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/items_transformer.py +2 -2
  14. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/transaction_migration/legacy_loan.py +25 -27
  15. folio_migration_tools-1.9.0rc5/src/folio_migration_tools/__init__.py +0 -3
  16. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/LICENSE +0 -0
  17. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/README.md +0 -0
  18. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/__main__.py +0 -0
  19. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/circulation_helper.py +0 -0
  20. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/colors.py +0 -0
  21. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/config_file_load.py +0 -0
  22. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/extradata_writer.py +0 -0
  23. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/folder_structure.py +0 -0
  24. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/helper.py +0 -0
  25. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/holdings_helper.py +0 -0
  26. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/i18n_config.py +0 -0
  27. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/library_configuration.py +0 -0
  28. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/__init__.py +0 -0
  29. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/courses_mapper.py +0 -0
  30. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/holdings_mapper.py +0 -0
  31. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py +0 -0
  32. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/notes_mapper.py +0 -0
  33. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/order_mapper.py +0 -0
  34. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/organization_mapper.py +0 -0
  35. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/ref_data_mapping.py +0 -0
  36. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/mapping_file_transformation/user_mapper.py +0 -0
  37. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/__init__.py +0 -0
  38. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +0 -0
  39. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/hrid_handler.py +0 -0
  40. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/loc_language_codes.xml +0 -0
  41. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/marc_file_processor.py +0 -0
  42. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/marc_reader_wrapper.py +0 -0
  43. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py +0 -0
  44. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +0 -0
  45. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_report.py +0 -0
  46. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/__init__.py +0 -0
  47. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/authority_transformer.py +0 -0
  48. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/bibs_transformer.py +0 -0
  49. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/courses_migrator.py +0 -0
  50. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/holdings_csv_transformer.py +0 -0
  51. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/holdings_marc_transformer.py +0 -0
  52. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/loans_migrator.py +0 -0
  53. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py +0 -0
  54. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/migration_task_base.py +0 -0
  55. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/orders_transformer.py +0 -0
  56. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/organization_transformer.py +0 -0
  57. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/requests_migrator.py +0 -0
  58. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/reserves_migrator.py +0 -0
  59. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/migration_tasks/user_transformer.py +0 -0
  60. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/task_configuration.py +0 -0
  61. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/test_infrastructure/__init__.py +0 -0
  62. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/test_infrastructure/mocked_classes.py +0 -0
  63. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/transaction_migration/__init__.py +0 -0
  64. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/transaction_migration/legacy_request.py +0 -0
  65. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/transaction_migration/legacy_reserve.py +0 -0
  66. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/transaction_migration/transaction_result.py +0 -0
  67. {folio_migration_tools-1.9.0rc5 → folio_migration_tools-1.9.0rc7}/src/folio_migration_tools/translations/en.json +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: folio_migration_tools
3
- Version: 1.9.0rc5
3
+ Version: 1.9.0rc7
4
4
  Summary: A tool allowing you to migrate data from legacy ILS:s (Library systems) into FOLIO LSP
5
5
  License: MIT
6
6
  Keywords: FOLIO,ILS,LSP,Library Systems,MARC21,Library data
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "folio_migration_tools"
3
- version = "1.9.0rc5"
3
+ version = "1.9.0rc7"
4
4
  description = "A tool allowing you to migrate data from legacy ILS:s (Library systems) into FOLIO LSP"
5
5
  authors = [
6
6
  {name = "Theodor Tolstoy", email = "github.teddes@tolstoy.se"},
@@ -0,0 +1,11 @@
1
+ import importlib.metadata
2
+ from typing import Protocol
3
+
4
+ __version__ = importlib.metadata.version("folio_migration_tools")
5
+
6
+ class StrCoercible(Protocol):
7
+ def __repr__(self) -> str:
8
+ ...
9
+
10
+ def __str__(self) -> str:
11
+ ...
@@ -7,10 +7,10 @@ class InsensitiveDictReader(csv.DictReader):
7
7
  # spaces and to lower case.
8
8
  @property
9
9
  def fieldnames(self):
10
- return [field.strip().lower() for field in csv.DictReader.fieldnames.fget(self)]
10
+ return [field.strip().lower() for field in csv.DictReader.fieldnames.fget(self)] # type: ignore
11
11
 
12
12
  def next(self):
13
- return InsensitiveDict(csv.DictReader.next(self))
13
+ return InsensitiveDict(csv.DictReader.next(self)) # type: ignore
14
14
 
15
15
 
16
16
  class InsensitiveDict(dict):
@@ -1,19 +1,22 @@
1
1
  import logging
2
+ from typing import Union
2
3
  import i18n
3
4
 
5
+ from folio_migration_tools import StrCoercible
4
6
 
5
- class TransfomationError(Exception):
7
+
8
+ class TransformationError(Exception):
6
9
  pass
7
10
 
8
11
 
9
- class TransformationFieldMappingError(TransfomationError):
10
- """Raised when the a field mapping fails, but the error is not critical.
12
+ class TransformationFieldMappingError(TransformationError):
13
+ """Raised when the field mapping fails, but the error is not critical.
11
14
  The issue should be logged for the library to act upon it"""
12
15
 
13
- def __init__(self, index_or_id="", message="", data_value=""):
16
+ def __init__(self, index_or_id="", message="", data_value: Union[str, StrCoercible]=""):
14
17
  self.index_or_id = index_or_id or ""
15
18
  self.message = message
16
- self.data_value = data_value
19
+ self.data_value: Union[str, StrCoercible] = data_value
17
20
  super().__init__(self.message)
18
21
 
19
22
  def __str__(self):
@@ -32,13 +35,13 @@ class TransformationFieldMappingError(TransfomationError):
32
35
  )
33
36
 
34
37
 
35
- class TransformationRecordFailedError(TransfomationError):
36
- """Raised when the a field mapping fails, Error is critical and means tranformation fails"""
38
+ class TransformationRecordFailedError(TransformationError):
39
+ """Raised when the field mapping fails, Error is critical and means transformation fails"""
37
40
 
38
41
  def __init__(self, index_or_id, message="", data_value=""):
39
42
  self.index_or_id = index_or_id
40
43
  self.message = message
41
- self.data_value = data_value
44
+ self.data_value: Union[str, StrCoercible] = data_value
42
45
  # logging.log(26, f"RECORD FAILED\t{self.id}\t{self.message}\t{self.data_value}")
43
46
  super().__init__(self.message)
44
47
 
@@ -58,8 +61,8 @@ class TransformationRecordFailedError(TransfomationError):
58
61
  )
59
62
 
60
63
 
61
- class TransformationProcessError(TransfomationError):
62
- """Raised when the transformation fails due to incorrect configuraiton,
64
+ class TransformationProcessError(TransformationError):
65
+ """Raised when the transformation fails due to incorrect configuration,
63
66
  mapping or reference data. This error should take the process to a halt."""
64
67
 
65
68
  def __init__(
@@ -67,7 +70,7 @@ class TransformationProcessError(TransfomationError):
67
70
  index_or_id,
68
71
  message="Critical Process issue. Transformation failed."
69
72
  " Check configuration, mapping files and reference data",
70
- data_value="",
73
+ data_value: Union[str, StrCoercible]="",
71
74
  ):
72
75
  self.index_or_id = index_or_id
73
76
  self.message = message
@@ -6,7 +6,7 @@ import sys
6
6
  import uuid
7
7
  from datetime import datetime, timezone
8
8
  from pathlib import Path
9
- from typing import List
9
+ from typing import Dict, List
10
10
 
11
11
  import i18n
12
12
  from folio_uuid.folio_namespaces import FOLIONamespaces
@@ -35,7 +35,7 @@ class MapperBase:
35
35
  self,
36
36
  library_configuration: LibraryConfiguration,
37
37
  folio_client: FolioClient,
38
- parent_id_map: dict[str, tuple] = None,
38
+ parent_id_map: dict[str, tuple] = {},
39
39
  ):
40
40
  logging.info("MapperBase initiating")
41
41
  self.parent_id_map: dict[str, tuple] = parent_id_map
@@ -318,7 +318,14 @@ class MapperBase:
318
318
  entry["MFHD_ID"],
319
319
  )
320
320
  )
321
- new_map[mfhd_uuid] = new_map.get(mfhd_uuid, []) + [instance_uuid]
321
+ if entry["BIB_ID"] in self.parent_id_map:
322
+ new_map[mfhd_uuid] = new_map.get(mfhd_uuid, []) + [instance_uuid]
323
+ else:
324
+ raise TransformationRecordFailedError(
325
+ entry["MFHD_ID"],
326
+ "Boundwith relationship map contains a BIB_ID id not in the instance id map. No boundwith holdings created.",
327
+ entry["BIB_ID"],
328
+ )
322
329
 
323
330
  return new_map
324
331
 
@@ -469,6 +476,19 @@ class MapperBase:
469
476
  )
470
477
  )
471
478
 
479
+ @staticmethod
480
+ def validate_location_map(location_map: List[Dict], locations: List[Dict]) -> List[Dict]:
481
+ mapped_codes = [x['folio_code'] for x in location_map]
482
+ existing_codes = [x['code'] for x in locations]
483
+ missing_codes = set(mapped_codes) - set(existing_codes)
484
+ if missing_codes:
485
+ raise TransformationProcessError(
486
+ "",
487
+ f"Location map contains codes not found in locations: {', '.join(missing_codes)}",
488
+ "",
489
+ )
490
+ return location_map
491
+
472
492
 
473
493
  def flatten(my_dict: dict, path=""):
474
494
  for k, v in iter(my_dict.items()):
@@ -43,7 +43,7 @@ class ItemMapper(MappingFileMapperBase):
43
43
  temporary_location_mapping,
44
44
  library_configuration: LibraryConfiguration,
45
45
  boundwith_relationship_map,
46
- task_configuration: AbstractTaskConfiguration
46
+ task_configuration: AbstractTaskConfiguration,
47
47
  ):
48
48
  item_schema = folio_client.get_item_schema()
49
49
  super().__init__(
@@ -128,17 +128,24 @@ class ItemMapper(MappingFileMapperBase):
128
128
  folio_record["discoverySuppress"] = file_def.discovery_suppressed
129
129
  self.migration_report.add(
130
130
  "Suppression",
131
- i18n.t("Suppressed from discovery") + f' = {folio_record["discoverySuppress"]}',
131
+ i18n.t("Suppressed from discovery")
132
+ + f" = {folio_record['discoverySuppress']}",
132
133
  )
133
134
 
134
135
  def setup_status_mapping(self, item_statuses_map):
135
- statuses = self.item_schema["properties"]["status"]["properties"]["name"]["enum"]
136
+ statuses = self.item_schema["properties"]["status"]["properties"]["name"][
137
+ "enum"
138
+ ]
136
139
  for mapping in item_statuses_map:
137
140
  if "folio_name" not in mapping:
138
- logging.critical("folio_name is not a column in the status mapping file")
141
+ logging.critical(
142
+ "folio_name is not a column in the status mapping file"
143
+ )
139
144
  sys.exit(1)
140
145
  elif "legacy_code" not in mapping:
141
- logging.critical("legacy_code is not a column in the status mapping file")
146
+ logging.critical(
147
+ "legacy_code is not a column in the status mapping file"
148
+ )
142
149
  sys.exit(1)
143
150
  elif mapping["folio_name"] not in statuses:
144
151
  logging.critical(
@@ -153,7 +160,9 @@ class ItemMapper(MappingFileMapperBase):
153
160
  )
154
161
  sys.exit(1)
155
162
  elif not all(mapping.values()):
156
- logging.critical("empty value in mapping %s. Check mapping file", mapping.values())
163
+ logging.critical(
164
+ "empty value in mapping %s. Check mapping file", mapping.values()
165
+ )
157
166
  sys.exit(1)
158
167
  else:
159
168
  self.status_mapping = {
@@ -206,7 +215,9 @@ class ItemMapper(MappingFileMapperBase):
206
215
  index_or_id,
207
216
  True,
208
217
  )
209
- self.migration_report.add("TemporaryLoanTypeMapping", f"{folio_prop_name} -> {ltid}")
218
+ self.migration_report.add(
219
+ "TemporaryLoanTypeMapping", f"{folio_prop_name} -> {ltid}"
220
+ )
210
221
  return ltid
211
222
  elif folio_prop_name == "permanentLoanTypeId":
212
223
  return self.get_mapped_ref_data_value(
@@ -232,7 +243,9 @@ class ItemMapper(MappingFileMapperBase):
232
243
  normalized_barcode = barcode.strip().lower()
233
244
  if normalized_barcode and normalized_barcode in self.unique_barcodes:
234
245
  Helper.log_data_issue(index_or_id, "Duplicate barcode", mapped_value)
235
- self.migration_report.add_general_statistics(i18n.t("Duplicate barcodes"))
246
+ self.migration_report.add_general_statistics(
247
+ i18n.t("Duplicate barcodes")
248
+ )
236
249
  return f"{barcode}-{uuid4()}"
237
250
  else:
238
251
  if normalized_barcode:
@@ -257,7 +270,9 @@ class ItemMapper(MappingFileMapperBase):
257
270
  self.migration_report.add("UnmappedProperties", f"{folio_prop_name}")
258
271
  return ""
259
272
 
260
- def get_item_level_call_number_type_id(self, legacy_item, folio_prop_name: str, index_or_id):
273
+ def get_item_level_call_number_type_id(
274
+ self, legacy_item, folio_prop_name: str, index_or_id
275
+ ):
261
276
  if self.call_number_mapping:
262
277
  return self.get_mapped_ref_data_value(
263
278
  self.call_number_mapping, legacy_item, index_or_id, folio_prop_name
@@ -755,12 +755,8 @@ class MappingFileMapperBase(MapperBase):
755
755
  for k in data
756
756
  if k["folio_field"] == folio_prop_name
757
757
  and any(
758
- [
759
- is_set_or_bool_or_numeric(k.get("value", "")),
760
- is_set_or_bool_or_numeric(k.get("legacy_field", "")),
761
- is_set_or_bool_or_numeric(k.get("fallback_legacy_field", "")),
762
- is_set_or_bool_or_numeric(k.get("fallback_value", "")),
763
- ]
758
+ is_set_or_bool_or_numeric(k.get(key, ""))
759
+ for key in ("value", "legacy_field", "fallback_legacy_field", "fallback_value")
764
760
  )
765
761
  )
766
762
 
@@ -972,4 +968,4 @@ def in_deep(dictionary, keys):
972
968
 
973
969
 
974
970
  def is_set_or_bool_or_numeric(any_value):
975
- return any(isinstance(any_value, t) for t in [int, bool, float, complex]) or any_value.strip()
971
+ return (isinstance(any_value, str) and (any_value.strip() not in empty_vals)) or isinstance(any_value, (int, float, complex))
@@ -1,8 +1,9 @@
1
1
  import logging
2
2
  import re
3
+ import traceback
4
+ from typing import Union
3
5
 
4
6
  import i18n
5
- import pymarc
6
7
  from folioclient import FolioClient
7
8
  from pymarc import field
8
9
 
@@ -41,8 +42,8 @@ class Conditions:
41
42
  self.folio_release: FolioRelease = folio_release
42
43
  self.filter_last_chars = r",$"
43
44
  self.folio = folio
44
- self.default_contributor_type = ""
45
- self.mapper = mapper
45
+ self.default_contributor_type: dict = {}
46
+ self.mapper: RulesMapperBase = mapper
46
47
  self.ref_data_dicts = {}
47
48
  if object_type == "bibs":
48
49
  self.setup_reference_data_for_all()
@@ -52,15 +53,16 @@ class Conditions:
52
53
  else:
53
54
  self.setup_reference_data_for_all()
54
55
  self.setup_reference_data_for_items_and_holdings(default_call_number_type_name)
56
+ self.object_type = object_type
55
57
  self.condition_cache: dict = {}
56
58
 
57
59
  def setup_reference_data_for_bibs(self):
58
60
  logging.info("Setting up reference data for bib transformation")
59
- logging.info("%s\tcontrib_name_types", len(self.folio.contrib_name_types))
60
- logging.info("%s\tcontributor_types", len(self.folio.contributor_types))
61
- logging.info("%s\talt_title_types", len(self.folio.alt_title_types))
62
- logging.info("%s\tidentifier_types", len(self.folio.identifier_types))
63
- logging.info("%s\tsubject_types", len(self.folio.subject_types))
61
+ logging.info("%s\tcontrib_name_types", len(self.folio.contrib_name_types)) # type: ignore
62
+ logging.info("%s\tcontributor_types", len(self.folio.contributor_types)) # type: ignore
63
+ logging.info("%s\talt_title_types", len(self.folio.alt_title_types)) # type: ignore
64
+ logging.info("%s\tidentifier_types", len(self.folio.identifier_types)) # type: ignore
65
+ logging.info("%s\tsubject_types", len(self.folio.subject_types)) # type: ignore
64
66
  # Raise for empty settings
65
67
  if not self.folio.contributor_types:
66
68
  raise TransformationProcessError("", "No contributor_types in FOLIO")
@@ -75,18 +77,18 @@ class Conditions:
75
77
 
76
78
  # Set defaults
77
79
  logging.info("Setting defaults")
78
- self.default_contributor_name_type = self.folio.contrib_name_types[0]["id"]
80
+ self.default_contributor_name_type: str = self.folio.contrib_name_types[0]["id"] # type: ignore
79
81
  logging.info("Contributor name type:\t%s", self.default_contributor_name_type)
80
82
  self.default_contributor_type = next(
81
- ct for ct in self.folio.contributor_types if ct["code"] == "ctb"
83
+ ct for ct in self.folio.contributor_types if ct["code"] == "ctb" # type: ignore
82
84
  )
83
85
  logging.info("Contributor type:\t%s", self.default_contributor_type["id"])
84
86
 
85
87
  def setup_reference_data_for_items_and_holdings(self, default_call_number_type_name):
86
- logging.info(f"{len(self.folio.locations)}\tlocations")
88
+ logging.info(f"{len(self.folio.locations)}\tlocations") # type: ignore
87
89
  self.default_call_number_type = {}
88
- logging.info("%s\tholding_note_types", len(self.folio.holding_note_types))
89
- logging.info("%s\tcall_number_types", len(self.folio.call_number_types))
90
+ logging.info("%s\tholding_note_types", len(self.folio.holding_note_types)) # type: ignore
91
+ logging.info("%s\tcall_number_types", len(self.folio.call_number_types)) # type: ignore
90
92
  self.setup_and_validate_holdings_types()
91
93
  # Raise for empty settings
92
94
  if not self.folio.holding_note_types:
@@ -98,10 +100,10 @@ class Conditions:
98
100
 
99
101
  # Set defaults
100
102
  logging.info("Defaults")
101
- self.default_call_number_type = next(
103
+ self.default_call_number_type: dict = next(
102
104
  (
103
105
  ct
104
- for ct in self.folio.call_number_types
106
+ for ct in self.folio.call_number_types # type: ignore
105
107
  if ct["name"] == default_call_number_type_name
106
108
  ),
107
109
  None,
@@ -124,7 +126,7 @@ class Conditions:
124
126
  missing_holdings_types = [
125
127
  ht
126
128
  for ht in self.holdings_type_map.values()
127
- if ht not in [ht_ref["name"] for ht_ref in self.holdings_types]
129
+ if ht not in [ht_ref["name"] for ht_ref in self.holdings_types] # type: ignore
128
130
  ]
129
131
  if any(missing_holdings_types):
130
132
  raise TransformationProcessError(
@@ -132,15 +134,15 @@ class Conditions:
132
134
  "Holdings types are missing from the tenant. Please set them up",
133
135
  missing_holdings_types,
134
136
  )
135
- logging.info("%s\tholdings types", len(self.holdings_types))
137
+ logging.info("%s\tholdings types", len(self.holdings_types)) # type: ignore
136
138
 
137
139
  def setup_reference_data_for_all(self):
138
- logging.info(f"{len(self.folio.class_types)}\tclass_types")
140
+ logging.info(f"{len(self.folio.class_types)}\tclass_types") # type: ignore
139
141
  logging.info(
140
- f"{len(self.folio.electronic_access_relationships)}\telectronic_access_relationships"
142
+ f"{len(self.folio.electronic_access_relationships)}\telectronic_access_relationships" # type: ignore
141
143
  )
142
144
  self.statistical_codes = self.folio.statistical_codes
143
- logging.info(f"{len(self.statistical_codes)} \tstatistical_codes")
145
+ logging.info(f"{len(self.statistical_codes)} \tstatistical_codes") # type: ignore
144
146
 
145
147
  # Raise for empty settings
146
148
  if not self.folio.class_types:
@@ -153,13 +155,13 @@ class Conditions:
153
155
  )
154
156
  )
155
157
  logging.info(f"{len(self.authority_note_types)} \tAuthority note types")
156
- logging.info(f"{len(self.folio.identifier_types)} \tidentifier types")
158
+ logging.info(f"{len(self.folio.identifier_types)} \tidentifier types") # type: ignore
157
159
 
158
160
  def get_condition(
159
- self, name, legacy_id, value, parameter=None, marc_field: field.Field = None
161
+ self, name, legacy_id, value, parameter=None, marc_field: Union[None, field.Field] = None
160
162
  ):
161
163
  try:
162
- return self.condition_cache.get(name)(legacy_id, value, parameter, marc_field)
164
+ return self.condition_cache.get(name)(legacy_id, value, parameter, marc_field) # type: ignore
163
165
  # Exception should only handle the missing condition from the cache.
164
166
  # All other exceptions should propagate up
165
167
  except Exception:
@@ -375,10 +377,10 @@ class Conditions:
375
377
  "MappedIdentifierTypes", f"{marc_field.tag} -> {t[1]}"
376
378
  )
377
379
  return t[0]
378
- identifier_type = next(
380
+ identifier_type: dict = next(
379
381
  (
380
382
  f
381
- for f in self.folio.identifier_types
383
+ for f in self.folio.identifier_types # type: ignore
382
384
  if (
383
385
  f["name"] in parameter.get("names", "non existant")
384
386
  or f["name"] in parameter.get("name", "non existant")
@@ -617,7 +619,7 @@ class Conditions:
617
619
  return self._extracted_from_condition_set_electronic_access_relations_id_2("8", marc_field)
618
620
 
619
621
  def condition_set_call_number_type_by_indicator(
620
- self, legacy_id, value, parameter, marc_field: pymarc.Field
622
+ self, legacy_id, value, parameter, marc_field: field.Field
621
623
  ):
622
624
  self.mapper.migration_report.add(
623
625
  "Exceptions",
@@ -629,7 +631,7 @@ class Conditions:
629
631
  return self.condition_set_call_number_type_id(legacy_id, value, parameter, marc_field)
630
632
 
631
633
  def condition_set_call_number_type_id(
632
- self, legacy_id, value, parameter, marc_field: pymarc.Field
634
+ self, legacy_id, value, parameter, marc_field: field.Field
633
635
  ):
634
636
  first_level_map = {
635
637
  "0": "Library of Congress classification",
@@ -690,12 +692,13 @@ class Conditions:
690
692
  ):
691
693
  for subfield in marc_field.get_subfields("4", "e"):
692
694
  normalized_subfield = re.sub(r"[^A-Za-z0-9 ]+", "", subfield.strip())
693
- for cont_type in self.folio.contributor_types:
695
+ for cont_type in self.folio.contributor_types: # type: ignore
694
696
  if normalized_subfield in [cont_type["code"], cont_type["name"]]:
695
697
  return cont_type["name"]
696
698
  try:
697
699
  return value
698
- except IndexError as ee:
700
+ except IndexError:
701
+ logging.debug("Exception occurred: %s", traceback.format_exc())
699
702
  return ""
700
703
 
701
704
  def condition_set_alternative_title_type_id(self, legacy_id, value, parameter, marc_field):
@@ -728,7 +731,7 @@ class Conditions:
728
731
  ):
729
732
  if "legacy_locations" not in self.ref_data_dicts:
730
733
  try:
731
- d = {lm["legacy_code"]: lm["folio_code"] for lm in self.mapper.location_map}
734
+ d = {lm["legacy_code"]: lm["folio_code"] for lm in self.mapper.location_map} # type: ignore
732
735
  self.ref_data_dicts["legacy_locations"] = d
733
736
  for folio_code in d.values():
734
737
  t = self.get_ref_data_tuple_by_code(
@@ -812,6 +815,27 @@ class Conditions:
812
815
  def condition_set_electronic_access_relations_id(
813
816
  self, legacy_id, value, parameter, marc_field: field.Field
814
817
  ):
818
+ """
819
+ This method handles the mapping of electronic access relationship IDs.
820
+ If the record type being mapped is FOLIO holdings, it provides an (optional) alternative
821
+ mapping based on a provided name parameter, bypassing the FOLIO MARC-to-Holdings mapping
822
+ engine behavior. This requires use of a supplemental mapping rules file in the
823
+ HoldingsMarcTransformer task definition containing the name parameter.
824
+ """
825
+ if self.object_type == "holdings" and "name" in parameter:
826
+ try:
827
+ t = self.get_ref_data_tuple_by_name(
828
+ self.folio.electronic_access_relationships,
829
+ "electronic_access_relationships",
830
+ parameter["name"],
831
+ )
832
+ self.mapper.migration_report.add("MappedElectronicRelationshipTypes", t[1])
833
+ return t[0]
834
+ except Exception:
835
+ raise TransformationProcessError(
836
+ legacy_id,
837
+ f"Electronic access relationship not found for {parameter['name']} {marc_field}",
838
+ )
815
839
  return self._extracted_from_condition_set_electronic_access_relations_id_2("3", marc_field)
816
840
 
817
841
  # TODO Rename this here and in `condition_set_url_relationship` and `condition_set_electronic_access_relations_id`
@@ -597,46 +597,48 @@ class RulesMapperBase(MapperBase):
597
597
  legacy_ids,
598
598
  ):
599
599
  entity_mapping = mapping["entity"]
600
- e_parent = entity_mapping[0]["target"].split(".")[0]
601
- if mapping.get("entityPerRepeatedSubfield", False):
602
- for temp_field in self.grouped(marc_field):
603
- entity = self.create_entity(entity_mapping, temp_field, e_parent, legacy_ids)
604
- if entity and (
605
- (isinstance(entity, dict) and all(entity.values()))
606
- or (isinstance(entity, list) and all(entity))
600
+ if entity_indicators_match(entity_mapping, marc_field):
601
+ entity_mapping = [x for x in entity_mapping if "indicators" not in x]
602
+ e_parent = entity_mapping[0]["target"].split(".")[0]
603
+ if mapping.get("entityPerRepeatedSubfield", False):
604
+ for temp_field in self.grouped(marc_field):
605
+ entity = self.create_entity(entity_mapping, temp_field, e_parent, legacy_ids)
606
+ if entity and (
607
+ (isinstance(entity, dict) and all(entity.values()))
608
+ or (isinstance(entity, list) and all(entity))
609
+ ):
610
+ self.add_entity_to_record(entity, e_parent, folio_record, self.schema)
611
+ else:
612
+ if mapping.get("ignoreSubsequentSubfields", False):
613
+ marc_field = self.remove_repeated_subfields(marc_field)
614
+ entity = self.create_entity(entity_mapping, marc_field, e_parent, legacy_ids)
615
+ if e_parent in ["precedingTitles", "succeedingTitles"]:
616
+ self.create_preceding_succeeding_titles(
617
+ entity, e_parent, folio_record["id"], marc_field
618
+ )
619
+ elif entity and (
620
+ all(
621
+ v
622
+ for k, v in entity.items()
623
+ if k not in ["staffOnly", "primary", "isbnValue", "issnValue"]
624
+ )
625
+ or e_parent in ["electronicAccess", "publication"]
626
+ or (
627
+ e_parent.startswith("holdingsStatements") and any(v for k, v in entity.items())
628
+ )
607
629
  ):
608
630
  self.add_entity_to_record(entity, e_parent, folio_record, self.schema)
609
- else:
610
- if mapping.get("ignoreSubsequentSubfields", False):
611
- marc_field = self.remove_repeated_subfields(marc_field)
612
- entity = self.create_entity(entity_mapping, marc_field, e_parent, legacy_ids)
613
- if e_parent in ["precedingTitles", "succeedingTitles"]:
614
- self.create_preceding_succeeding_titles(
615
- entity, e_parent, folio_record["id"], marc_field
616
- )
617
- elif entity and (
618
- all(
619
- v
620
- for k, v in entity.items()
621
- if k not in ["staffOnly", "primary", "isbnValue", "issnValue"]
622
- )
623
- or e_parent in ["electronicAccess", "publication"]
624
- or (
625
- e_parent.startswith("holdingsStatements") and any(v for k, v in entity.items())
626
- )
627
- ):
628
- self.add_entity_to_record(entity, e_parent, folio_record, self.schema)
629
- else:
630
- sfs = " - ".join(
631
- f"{f[0]}:{('has_value' if f[1].strip() else 'empty')}" for f in marc_field
632
- )
633
- pattern = " - ".join(f"{k}:'{bool(v)}'" for k, v in entity.items())
634
- self.migration_report.add(
635
- "IncompleteEntityMapping",
636
- f"{marc_field.tag} {sfs} ->>-->> {e_parent} {pattern} ",
637
- )
638
- # Experimental
639
- # self.add_entity_to_record(entity, e_parent, rec, self.schema)
631
+ else:
632
+ sfs = " - ".join(
633
+ f"{f[0]}:{('has_value' if f[1].strip() else 'empty')}" for f in marc_field
634
+ )
635
+ pattern = " - ".join(f"{k}:'{bool(v)}'" for k, v in entity.items())
636
+ self.migration_report.add(
637
+ "IncompleteEntityMapping",
638
+ f"{marc_field.tag} {sfs} ->>-->> {e_parent} {pattern} ",
639
+ )
640
+ # Experimental
641
+ # self.add_entity_to_record(entity, e_parent, rec, self.schema)
640
642
 
641
643
  def handle_suppression(
642
644
  self, folio_record, file_def: FileDefinition, only_discovery_suppress: bool = False
@@ -975,3 +977,34 @@ def is_array_of_strings(schema_property):
975
977
  def is_array_of_objects(schema_property):
976
978
  sc_prop_type = schema_property.get("type", "string")
977
979
  return sc_prop_type == "array" and schema_property["items"]["type"] == "object"
980
+
981
+ def entity_indicators_match(entity_mapping, marc_field):
982
+ """
983
+ Check if the indicators of the entity mapping match the indicators of the MARC field.
984
+ Entity mappings can limit the fields they are applied to by specifying indicator values that
985
+ must match the provided MARC field's indicators. If the entity mapping does not specify any
986
+ indicator values, it is assumed to match all MARC fields. Entity indicator values can be a
987
+ specific value or a wildcard "*", which matches any value.
988
+
989
+ This function compares the indicators of the entity mapping with the indicators of the MARC field.
990
+ If the entity does not specify any indicator values, the function returns True. If the entity does
991
+ specify indicator values, the function checks if the MARC field's indicators match the specified
992
+ values or if the specified values are wildcards. If both indicators match, the function returns True;
993
+ otherwise, it returns False.
994
+
995
+ Args:
996
+ entity_mapping (dict): _description_
997
+ marc_field (pymarc.Field): _description_
998
+
999
+ Returns:
1000
+ bool: True if the indicators match, False otherwise.
1001
+ """
1002
+ if indicator_rule := [x["indicators"] for x in entity_mapping if "indicators" in x]:
1003
+ return all(
1004
+ [
1005
+ (marc_field.indicator1 == indicator_rule[0]['ind1'] or indicator_rule[0]['ind1'] == "*"),
1006
+ (marc_field.indicator2 == indicator_rule[0]['ind2'] or indicator_rule[0]['ind2'] == "*"),
1007
+ ]
1008
+ )
1009
+ else:
1010
+ return True
@@ -61,7 +61,10 @@ class RulesMapperHoldings(RulesMapperBase):
61
61
  self.boundwith_relationship_map = self.setup_boundwith_relationship_map(
62
62
  boundwith_relationship_map
63
63
  )
64
- self.location_map = location_map
64
+ self.location_map = self.validate_location_map(
65
+ location_map,
66
+ self.folio_client.locations,
67
+ )
65
68
  self.holdings_id_map: dict = {}
66
69
  self.ref_data_dicts: dict = {}
67
70
  self.fallback_holdings_type_id = self.task_configuration.fallback_holdings_type_id
@@ -132,6 +132,8 @@ class BatchPoster(MigrationTaskBase):
132
132
  ),
133
133
  ] = False
134
134
 
135
+ task_configuration: TaskConfiguration
136
+
135
137
  @staticmethod
136
138
  def get_object_type() -> FOLIONamespaces:
137
139
  return FOLIONamespaces.other
@@ -289,35 +291,57 @@ class BatchPoster(MigrationTaskBase):
289
291
  for i in range(0, len(batch), fetch_batch_size):
290
292
  batch_slice = batch[i:i + fetch_batch_size]
291
293
  fetch_tasks.append(
292
- client.get(
294
+ self.get_with_retry(
295
+ client,
293
296
  query_api,
294
297
  params={
295
298
  "query": f"id==({' OR '.join([record['id'] for record in batch_slice if 'id' in record])})",
296
299
  "limit": fetch_batch_size
297
300
  },
298
- headers=self.folio_client.okapi_headers
299
301
  )
300
302
  )
303
+
301
304
  responses = await asyncio.gather(*fetch_tasks)
302
305
 
303
306
  for response in responses:
304
- if response.status_code == 200:
305
- response_json = response.json()
306
- for record in response_json[object_type]:
307
- updates[record["id"]] = {
307
+ self.update_record_versions(object_type, updates, response)
308
+ for record in batch:
309
+ if record["id"] in updates:
310
+ record.update(updates[record["id"]])
311
+
312
+ @staticmethod
313
+ def update_record_versions(object_type, updates, response):
314
+ if response.status_code == 200:
315
+ response_json = response.json()
316
+ for record in response_json[object_type]:
317
+ updates[record["id"]] = {
308
318
  "_version": record["_version"],
309
319
  }
310
- if "status" in record:
311
- updates[record["id"]]["status"] = record["status"]
312
- else:
313
- logging.error(
320
+ if "status" in record:
321
+ updates[record["id"]]["status"] = record["status"]
322
+ if "lastCheckIn" in record:
323
+ updates[record["id"]]["lastCheckIn"] = record["lastCheckIn"]
324
+ else:
325
+ logging.error(
314
326
  "Failed to fetch current records. HTTP %s\t%s",
315
327
  response.status_code,
316
328
  response.text,
317
329
  )
318
- for record in batch:
319
- if record["id"] in updates:
320
- record.update(updates[record["id"]])
330
+
331
+ async def get_with_retry(self, client: httpx.AsyncClient, url: str, params: dict = {}):
332
+ retries = 3
333
+ for attempt in range(retries):
334
+ try:
335
+ response = await client.get(url, params=params, headers=self.folio_client.okapi_headers)
336
+ response.raise_for_status()
337
+ return response
338
+ except httpx.HTTPError as e:
339
+ if attempt < retries - 1:
340
+ logging.warning(f"Retrying due to {e}")
341
+ await asyncio.sleep(2 ** attempt)
342
+ else:
343
+ logging.error(f"Failed to connect after {retries} attempts: {e}")
344
+ raise
321
345
 
322
346
  def post_record_batch(self, batch, failed_recs_file, row):
323
347
  json_rec = json.loads(row.split("\t")[-1])
@@ -449,7 +473,7 @@ class BatchPoster(MigrationTaskBase):
449
473
  )
450
474
  logging.info(last_row)
451
475
  logging.info("=========Stack trace==============")
452
- traceback.logging.info_exc()
476
+ traceback.logging.info_exc() # type: ignore
453
477
  logging.info("=======================", flush=True)
454
478
 
455
479
  def post_batch(self, batch, failed_recs_file, num_records, recursion_depth=0):
@@ -368,12 +368,12 @@ class ItemsTransformer(MigrationTaskBase):
368
368
  self.handle_circiulation_notes(folio_rec, self.folio_client.current_user)
369
369
  self.handle_notes(folio_rec)
370
370
  if folio_rec["holdingsRecordId"] in self.mapper.boundwith_relationship_map:
371
- for idx, instance_id in enumerate(
371
+ for idx_, instance_id in enumerate(
372
372
  self.mapper.boundwith_relationship_map.get(
373
373
  folio_rec["holdingsRecordId"]
374
374
  )
375
375
  ):
376
- if idx == 0:
376
+ if idx_ == 0:
377
377
  bw_id = folio_rec["holdingsRecordId"]
378
378
  else:
379
379
  bw_id = self.mapper.generate_boundwith_holding_uuid(
@@ -3,9 +3,10 @@ from datetime import datetime
3
3
  from zoneinfo import ZoneInfo
4
4
 
5
5
  from dateutil import tz
6
- from dateutil.parser import parse
6
+ from dateutil.parser import parse, ParserError
7
7
 
8
8
  from folio_migration_tools.migration_report import MigrationReport
9
+ from folio_migration_tools.custom_exceptions import TransformationProcessError
9
10
 
10
11
  utc = ZoneInfo("UTC")
11
12
 
@@ -42,46 +43,47 @@ class LegacyLoan(object):
42
43
 
43
44
  self.tenant_timezone = tenant_timezone
44
45
  self.errors = []
46
+ self.row = row
45
47
  for prop in correct_headers:
46
48
  if prop not in legacy_loan_dict and prop not in optional_headers:
47
- self.errors.append(("Missing properties in legacy data", prop))
49
+ self.errors.append((f"Missing properties in legacy data {row=}", prop))
48
50
  if (
49
51
  prop != "next_item_status"
50
52
  and not legacy_loan_dict.get(prop, "").strip()
51
53
  and prop not in optional_headers
52
54
  ):
53
- self.errors.append(("Empty properties in legacy data", prop))
55
+ self.errors.append((f"Empty properties in legacy data {row=}", prop))
54
56
  try:
55
57
  temp_date_due: datetime = parse(legacy_loan_dict["due_date"])
56
58
  if temp_date_due.tzinfo != tz.UTC:
57
59
  temp_date_due = temp_date_due.replace(tzinfo=self.tenant_timezone)
58
60
  self.report(
59
- f"Provided due_date is not UTC, "
60
- f"setting tzinfo to tenant timezone ({self.tenant_timezone})"
61
+ f"Provided due_date is not UTC in {row=}, "
62
+ f"setting tz-info to tenant timezone ({self.tenant_timezone})"
61
63
  )
62
64
  if temp_date_due.hour == 0 and temp_date_due.minute == 0:
63
65
  temp_date_due = temp_date_due.replace(hour=23, minute=59)
64
66
  self.report(
65
- "Hour and minute not specified for due date. "
67
+ f"Hour and minute not specified for due date in {row=}. "
66
68
  "Assuming end of local calendar day (23:59)..."
67
69
  )
68
- except Exception as ee:
70
+ except (ParserError, OverflowError) as ee:
69
71
  logging.error(ee)
70
- self.errors.append(("Parse date failure. Setting UTC NOW", "due_date"))
72
+ self.errors.append((f"Parse date failure in {row=}. Setting UTC NOW", "due_date"))
71
73
  temp_date_due = datetime.now(ZoneInfo("UTC"))
72
74
  try:
73
75
  temp_date_out: datetime = parse(legacy_loan_dict["out_date"])
74
76
  if temp_date_out.tzinfo != tz.UTC:
75
77
  temp_date_out = temp_date_out.replace(tzinfo=self.tenant_timezone)
76
78
  self.report(
77
- f"Provided out_date is not UTC, "
78
- f"setting tzinfo to tenant timezone ({self.tenant_timezone})"
79
+ f"Provided out_date is not UTC in {row=}, "
80
+ f"setting tz-info to tenant timezone ({self.tenant_timezone})"
79
81
  )
80
- except Exception:
82
+ except (ParserError, OverflowError):
81
83
  temp_date_out = datetime.now(
82
84
  ZoneInfo("UTC")
83
85
  ) # TODO: Consider moving this assignment block above the temp_date_due
84
- self.errors.append(("Parse date failure. Setting UTC NOW", "out_date"))
86
+ self.errors.append((f"Parse date failure in {row=}. Setting UTC NOW", "out_date"))
85
87
 
86
88
  # good to go, set properties
87
89
  self.item_barcode: str = legacy_loan_dict["item_barcode"].strip()
@@ -94,7 +96,7 @@ class LegacyLoan(object):
94
96
  self.renewal_count = self.set_renewal_count(legacy_loan_dict)
95
97
  self.next_item_status = legacy_loan_dict.get("next_item_status", "").strip()
96
98
  if self.next_item_status not in legal_statuses:
97
- self.errors.append(("Not an allowed status", self.next_item_status))
99
+ self.errors.append((f"Not an allowed status {row=}", self.next_item_status))
98
100
  self.service_point_id = (
99
101
  legacy_loan_dict["service_point_id"]
100
102
  if legacy_loan_dict.get("service_point_id", "")
@@ -107,23 +109,19 @@ class LegacyLoan(object):
107
109
  try:
108
110
  return int(renewal_count)
109
111
  except ValueError:
110
- self.report(
111
- f"Unresolvable {renewal_count=} was replaced with 0.")
112
+ self.report(f"Unresolvable {renewal_count=} was replaced with 0.")
112
113
  else:
113
114
  self.report(f"Missing renewal count was replaced with 0.")
114
115
  return 0
115
116
 
116
117
  def correct_for_1_day_loans(self):
117
- try:
118
- if self.due_date.date() <= self.out_date.date():
119
- if self.due_date.hour == 0:
120
- self.due_date = self.due_date.replace(hour=23, minute=59)
121
- if self.out_date.hour == 0:
122
- self.out_date = self.out_date.replace(hour=0, minute=1)
123
- if self.due_date <= self.out_date:
124
- raise ValueError("Due date is before out date")
125
- except Exception:
126
- self.errors.append(("Time alignment issues", "both dates"))
118
+ if self.due_date.date() <= self.out_date.date():
119
+ if self.due_date.hour == 0:
120
+ self.due_date = self.due_date.replace(hour=23, minute=59)
121
+ if self.out_date.hour == 0:
122
+ self.out_date = self.out_date.replace(hour=0, minute=1)
123
+ if self.due_date <= self.out_date:
124
+ raise TransformationProcessError(self.row, "Due date is before out date")
127
125
 
128
126
  def to_dict(self):
129
127
  return {
@@ -140,8 +138,8 @@ class LegacyLoan(object):
140
138
  if self.tenant_timezone != ZoneInfo("UTC"):
141
139
  self.due_date = self.due_date.astimezone(ZoneInfo("UTC"))
142
140
  self.out_date = self.out_date.astimezone(ZoneInfo("UTC"))
143
- except Exception:
144
- self.errors.append(("UTC correction issues", "both dates"))
141
+ except TypeError:
142
+ self.errors.append((f"UTC correction issues {self.row}", "both dates"))
145
143
 
146
144
  def report(self, what_to_report: str):
147
145
  self.migration_report.add("Details", what_to_report)
@@ -1,3 +0,0 @@
1
- import importlib.metadata
2
-
3
- __version__ = importlib.metadata.version("folio_migration_tools")