folio-migration-tools 1.9.10__py3-none-any.whl → 1.10.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. folio_migration_tools/__init__.py +3 -4
  2. folio_migration_tools/__main__.py +44 -31
  3. folio_migration_tools/circulation_helper.py +114 -105
  4. folio_migration_tools/custom_dict.py +2 -2
  5. folio_migration_tools/custom_exceptions.py +4 -5
  6. folio_migration_tools/folder_structure.py +1 -1
  7. folio_migration_tools/helper.py +1 -1
  8. folio_migration_tools/library_configuration.py +65 -37
  9. folio_migration_tools/mapper_base.py +38 -25
  10. folio_migration_tools/mapping_file_transformation/courses_mapper.py +1 -1
  11. folio_migration_tools/mapping_file_transformation/holdings_mapper.py +7 -3
  12. folio_migration_tools/mapping_file_transformation/item_mapper.py +13 -26
  13. folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py +1 -2
  14. folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +13 -11
  15. folio_migration_tools/mapping_file_transformation/order_mapper.py +6 -5
  16. folio_migration_tools/mapping_file_transformation/organization_mapper.py +3 -3
  17. folio_migration_tools/mapping_file_transformation/user_mapper.py +43 -28
  18. folio_migration_tools/marc_rules_transformation/conditions.py +84 -70
  19. folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +13 -5
  20. folio_migration_tools/marc_rules_transformation/hrid_handler.py +3 -2
  21. folio_migration_tools/marc_rules_transformation/marc_file_processor.py +14 -22
  22. folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py +1 -0
  23. folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +46 -36
  24. folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +25 -15
  25. folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +62 -32
  26. folio_migration_tools/migration_report.py +1 -1
  27. folio_migration_tools/migration_tasks/authority_transformer.py +1 -2
  28. folio_migration_tools/migration_tasks/batch_poster.py +78 -68
  29. folio_migration_tools/migration_tasks/bibs_transformer.py +12 -7
  30. folio_migration_tools/migration_tasks/courses_migrator.py +2 -3
  31. folio_migration_tools/migration_tasks/holdings_csv_transformer.py +14 -15
  32. folio_migration_tools/migration_tasks/holdings_marc_transformer.py +11 -21
  33. folio_migration_tools/migration_tasks/items_transformer.py +17 -30
  34. folio_migration_tools/migration_tasks/loans_migrator.py +53 -131
  35. folio_migration_tools/migration_tasks/migration_task_base.py +33 -55
  36. folio_migration_tools/migration_tasks/orders_transformer.py +21 -39
  37. folio_migration_tools/migration_tasks/organization_transformer.py +9 -18
  38. folio_migration_tools/migration_tasks/requests_migrator.py +11 -15
  39. folio_migration_tools/migration_tasks/reserves_migrator.py +1 -1
  40. folio_migration_tools/migration_tasks/user_transformer.py +10 -15
  41. folio_migration_tools/task_configuration.py +6 -7
  42. folio_migration_tools/transaction_migration/legacy_loan.py +15 -27
  43. folio_migration_tools/transaction_migration/legacy_request.py +1 -1
  44. {folio_migration_tools-1.9.10.dist-info → folio_migration_tools-1.10.0b1.dist-info}/METADATA +18 -28
  45. {folio_migration_tools-1.9.10.dist-info → folio_migration_tools-1.10.0b1.dist-info}/RECORD +47 -50
  46. folio_migration_tools-1.10.0b1.dist-info/WHEEL +4 -0
  47. folio_migration_tools-1.10.0b1.dist-info/entry_points.txt +3 -0
  48. folio_migration_tools/test_infrastructure/__init__.py +0 -0
  49. folio_migration_tools/test_infrastructure/mocked_classes.py +0 -406
  50. folio_migration_tools-1.9.10.dist-info/WHEEL +0 -4
  51. folio_migration_tools-1.9.10.dist-info/entry_points.txt +0 -3
  52. folio_migration_tools-1.9.10.dist-info/licenses/LICENSE +0 -21
@@ -182,19 +182,27 @@ class BatchPoster(MigrationTaskBase):
182
182
  ),
183
183
  ),
184
184
  ] = True
185
- patch_existing_records: Annotated[bool, Field(
186
- title="Patch existing records",
187
- description=(
188
- "Toggles whether or not to patch existing records "
189
- "during the upsert process. Defaults to False"
185
+ patch_existing_records: Annotated[
186
+ bool,
187
+ Field(
188
+ title="Patch existing records",
189
+ description=(
190
+ "Toggles whether or not to patch existing records "
191
+ "during the upsert process. Defaults to False"
192
+ ),
190
193
  ),
191
- )] = False
192
- patch_paths: Annotated[List[str], Field(
193
- title="Patch paths",
194
- description=(
195
- "A list of fields in JSON Path notation to patch during the upsert process (leave off the $). If empty, all fields will be patched. Examples: ['statisticalCodeIds', 'administrativeNotes', 'instanceStatusId']"
194
+ ] = False
195
+ patch_paths: Annotated[
196
+ List[str],
197
+ Field(
198
+ title="Patch paths",
199
+ description=(
200
+ "A list of fields in JSON Path notation to patch during the upsert process "
201
+ "(leave off the $). If empty, all fields will be patched. Examples: "
202
+ "['statisticalCodeIds', 'administrativeNotes', 'instanceStatusId']"
203
+ ),
196
204
  ),
197
- )] = []
205
+ ] = []
198
206
 
199
207
  task_configuration: TaskConfiguration
200
208
 
@@ -223,7 +231,8 @@ class BatchPoster(MigrationTaskBase):
223
231
  self.query_params["upsert"] = self.task_configuration.upsert
224
232
  elif self.task_configuration.upsert and not self.api_info["supports_upsert"]:
225
233
  logging.info(
226
- "Upsert is not supported for this object type. Query parameter will not be set.")
234
+ "Upsert is not supported for this object type. Query parameter will not be set."
235
+ )
227
236
  self.snapshot_id = str(uuid4())
228
237
  self.failed_objects: list = []
229
238
  self.batch_size = self.task_configuration.batch_size
@@ -241,18 +250,18 @@ class BatchPoster(MigrationTaskBase):
241
250
  self.starting_record_count_in_folio: Optional[int] = None
242
251
  self.finished_record_count_in_folio: Optional[int] = None
243
252
 
244
- def do_work(self):
253
+ def do_work(self): # noqa: C901
245
254
  with self.folio_client.get_folio_http_client() as httpx_client:
246
255
  self.http_client = httpx_client
247
256
  with open(
248
- self.folder_structure.failed_recs_path, "w", encoding='utf-8'
257
+ self.folder_structure.failed_recs_path, "w", encoding="utf-8"
249
258
  ) as failed_recs_file:
250
259
  self.get_starting_record_count()
251
260
  try:
252
261
  batch = []
253
262
  if self.task_configuration.object_type == "SRS":
254
263
  self.create_snapshot()
255
- for idx, file_def in enumerate(self.task_configuration.files):
264
+ for idx, file_def in enumerate(self.task_configuration.files): # noqa: B007
256
265
  path = self.folder_structure.results_folder / file_def.file_name
257
266
  with open(path) as rows:
258
267
  logging.info("Running %s", path)
@@ -323,10 +332,10 @@ class BatchPoster(MigrationTaskBase):
323
332
 
324
333
  @staticmethod
325
334
  def set_consortium_source(json_rec):
326
- if json_rec['source'] == 'MARC':
327
- json_rec['source'] = 'CONSORTIUM-MARC'
328
- elif json_rec['source'] == 'FOLIO':
329
- json_rec['source'] = 'CONSORTIUM-FOLIO'
335
+ if json_rec["source"] == "MARC":
336
+ json_rec["source"] = "CONSORTIUM-MARC"
337
+ elif json_rec["source"] == "FOLIO":
338
+ json_rec["source"] = "CONSORTIUM-FOLIO"
330
339
 
331
340
  def set_version(self, batch, query_api, object_type) -> None:
332
341
  """
@@ -359,17 +368,16 @@ class BatchPoster(MigrationTaskBase):
359
368
  existing_records = {}
360
369
  async with httpx.AsyncClient(base_url=self.folio_client.gateway_url) as client:
361
370
  for i in range(0, len(batch), fetch_batch_size):
362
- batch_slice = batch[i:i + fetch_batch_size]
371
+ batch_slice = batch[i : i + fetch_batch_size]
363
372
  fetch_tasks.append(
364
373
  self.get_with_retry(
365
374
  client,
366
375
  query_api,
367
376
  params={
368
377
  "query": (
369
- "id==("
370
- f"{' OR '.join([r['id'] for r in batch_slice if 'id' in r])})"
378
+ f"id==({' OR '.join([r['id'] for r in batch_slice if 'id' in r])})"
371
379
  ),
372
- "limit": fetch_batch_size
380
+ "limit": fetch_batch_size,
373
381
  },
374
382
  )
375
383
  )
@@ -390,7 +398,7 @@ class BatchPoster(MigrationTaskBase):
390
398
  new_record (dict): The new record to be updated.
391
399
  existing_record (dict): The existing record to patch from.
392
400
  patch_paths (List[str]): List of fields in JSON Path notation (e.g., ['statisticalCodeIds', 'administrativeNotes', 'instanceStatusId']) to patch during the upsert process. If empty, all fields will be patched.
393
- """
401
+ """ # noqa: E501
394
402
  updates = {}
395
403
  updates.update(existing_record)
396
404
  keep_existing = {}
@@ -412,7 +420,9 @@ class BatchPoster(MigrationTaskBase):
412
420
  new_record.update(updates)
413
421
 
414
422
  @staticmethod
415
- def collect_existing_records_for_upsert(object_type: str, response: httpx.Response, existing_records: dict):
423
+ def collect_existing_records_for_upsert(
424
+ object_type: str, response: httpx.Response, existing_records: dict
425
+ ):
416
426
  if response.status_code == 200:
417
427
  response_json = response.json()
418
428
  for record in response_json[object_type]:
@@ -458,12 +468,23 @@ class BatchPoster(MigrationTaskBase):
458
468
 
459
469
  def prepare_record_for_upsert(self, new_record: dict, existing_record: dict):
460
470
  if "source" in existing_record and "MARC" in existing_record["source"]:
461
- if self.task_configuration.patch_paths:
471
+ patch_paths = [
472
+ x
473
+ for x in self.task_configuration.patch_paths
474
+ if ("suppress" in x.lower() or x.lower() == "deleted")
475
+ ]
476
+ if patch_paths:
477
+ logging.debug(
478
+ "Record %s is a MARC record, only suppression related fields will be patched",
479
+ existing_record["id"],
480
+ )
481
+ else:
462
482
  logging.debug(
463
483
  "Record %s is a MARC record, patch_paths will be ignored",
464
484
  existing_record["id"],
465
485
  )
466
- self.patch_record(new_record, existing_record, ["statisticalCodeIds", "administrativeNotes", "instanceStatusId"])
486
+ patch_paths.extend(["statisticalCodeIds", "administrativeNotes", "instanceStatusId"])
487
+ self.patch_record(new_record, existing_record, patch_paths)
467
488
  elif self.task_configuration.patch_existing_records:
468
489
  self.patch_record(new_record, existing_record, self.task_configuration.patch_paths)
469
490
  else:
@@ -471,7 +492,11 @@ class BatchPoster(MigrationTaskBase):
471
492
  "_version": existing_record["_version"],
472
493
  }
473
494
  self.keep_existing_fields(updates, existing_record)
474
- keep_new = {k: v for k, v in new_record.items() if k in ["statisticalCodeIds", "administrativeNotes"]}
495
+ keep_new = {
496
+ k: v
497
+ for k, v in new_record.items()
498
+ if k in ["statisticalCodeIds", "administrativeNotes"]
499
+ }
475
500
  keep_existing = {}
476
501
  self.handle_upsert_for_statistical_codes(existing_record, keep_existing)
477
502
  self.handle_upsert_for_administrative_notes(existing_record, keep_existing)
@@ -492,13 +517,14 @@ class BatchPoster(MigrationTaskBase):
492
517
  for attempt in range(retries):
493
518
  try:
494
519
  response = await client.get(
495
- url, params=params, headers=self.folio_client.okapi_headers)
520
+ url, params=params, headers=self.folio_client.okapi_headers
521
+ )
496
522
  response.raise_for_status()
497
523
  return response
498
524
  except httpx.HTTPError as e:
499
525
  if attempt < retries - 1:
500
526
  logging.warning(f"Retrying due to {e}")
501
- await asyncio.sleep(2 ** attempt)
527
+ await asyncio.sleep(2**attempt)
502
528
  else:
503
529
  logging.error(f"Failed to connect after {retries} attempts: {e}")
504
530
  raise
@@ -563,11 +589,11 @@ class BatchPoster(MigrationTaskBase):
563
589
  object_types.update(task_configuration.extradata_endpoints)
564
590
  if object_name == "instructor":
565
591
  instructor = json.loads(string_object)
566
- return f'coursereserves/courselistings/{instructor["courseListingId"]}/instructors'
592
+ return f"coursereserves/courselistings/{instructor['courseListingId']}/instructors"
567
593
 
568
594
  if object_name == "interfaceCredential":
569
595
  credential = json.loads(string_object)
570
- return f'organizations-storage/interfaces/{credential["interfaceId"]}/credentials'
596
+ return f"organizations-storage/interfaces/{credential['interfaceId']}/credentials"
571
597
 
572
598
  return object_types[object_name]
573
599
 
@@ -638,7 +664,7 @@ class BatchPoster(MigrationTaskBase):
638
664
 
639
665
  def post_batch(self, batch, failed_recs_file, num_records, recursion_depth=0):
640
666
  if self.query_params.get("upsert", False) and self.api_info.get("query_endpoint", ""):
641
- self.set_version(batch, self.api_info['query_endpoint'], self.api_info['object_name'])
667
+ self.set_version(batch, self.api_info["query_endpoint"], self.api_info["object_name"])
642
668
  response = self.do_post(batch)
643
669
  if response.status_code == 401:
644
670
  logging.error("Authorization failed (%s). Fetching new auth token...", response.text)
@@ -762,7 +788,7 @@ class BatchPoster(MigrationTaskBase):
762
788
  url,
763
789
  json=payload,
764
790
  headers=self.folio_client.okapi_headers,
765
- params=self.query_params
791
+ params=self.query_params,
766
792
  )
767
793
  else:
768
794
  return httpx.post(
@@ -770,7 +796,8 @@ class BatchPoster(MigrationTaskBase):
770
796
  headers=self.okapi_headers,
771
797
  json=payload,
772
798
  params=self.query_params,
773
- timeout=None)
799
+ timeout=None,
800
+ )
774
801
 
775
802
  def get_current_record_count_in_folio(self):
776
803
  if "query_endpoint" in self.api_info:
@@ -778,9 +805,7 @@ class BatchPoster(MigrationTaskBase):
778
805
  query_params = {"query": "cql.allRecords=1", "limit": 0}
779
806
  if self.http_client and not self.http_client.is_closed:
780
807
  res = self.http_client.get(
781
- url,
782
- headers=self.folio_client.okapi_headers,
783
- params=query_params
808
+ url, headers=self.folio_client.okapi_headers, params=query_params
784
809
  )
785
810
  else:
786
811
  res = httpx.get(url, headers=self.okapi_headers, params=query_params, timeout=None)
@@ -799,7 +824,7 @@ class BatchPoster(MigrationTaskBase):
799
824
  else:
800
825
  raise ValueError(
801
826
  "No 'query_endpoint' available for %s. Cannot get current record count.",
802
- self.task_configuration.object_type
827
+ self.task_configuration.object_type,
803
828
  )
804
829
 
805
830
  def get_starting_record_count(self):
@@ -809,7 +834,7 @@ class BatchPoster(MigrationTaskBase):
809
834
  else:
810
835
  logging.info(
811
836
  "No query_endpoint available for %s. Cannot get starting record count.",
812
- self.task_configuration.object_type
837
+ self.task_configuration.object_type,
813
838
  )
814
839
 
815
840
  def get_finished_record_count(self):
@@ -819,7 +844,7 @@ class BatchPoster(MigrationTaskBase):
819
844
  else:
820
845
  logging.info(
821
846
  "No query_endpoint available for %s. Cannot get ending record count.",
822
- self.task_configuration.object_type
847
+ self.task_configuration.object_type,
823
848
  )
824
849
 
825
850
  def wrap_up(self):
@@ -842,7 +867,7 @@ class BatchPoster(MigrationTaskBase):
842
867
  if self.starting_record_count_in_folio:
843
868
  self.get_finished_record_count()
844
869
  total_on_server = (
845
- self.finished_record_count_in_folio - self.starting_record_count_in_folio
870
+ self.finished_record_count_in_folio - self.starting_record_count_in_folio
846
871
  )
847
872
  discrepancy = self.processed - self.num_failures - total_on_server
848
873
  if discrepancy != 0:
@@ -893,9 +918,8 @@ class BatchPoster(MigrationTaskBase):
893
918
  temp_start = self.start_datetime
894
919
  self.task_configuration.rerun_failed_records = False
895
920
  self.__init__(
896
- self.task_configuration,
897
- self.library_configuration,
898
- self.folio_client)
921
+ self.task_configuration, self.library_configuration, self.folio_client
922
+ )
899
923
  self.performing_rerun = True
900
924
  self.migration_report = temp_report
901
925
  self.start_datetime = temp_start
@@ -1085,27 +1109,12 @@ def get_api_info(object_type: str, use_safe: bool = True):
1085
1109
  except KeyError:
1086
1110
  key_string = ", ".join(choices.keys())
1087
1111
  logging.error(
1088
- f"Wrong type. Only one of {key_string} are allowed, "
1089
- f"received {object_type=} instead"
1112
+ f"Wrong type. Only one of {key_string} are allowed, received {object_type=} instead"
1090
1113
  )
1091
1114
  logging.error("Halting")
1092
1115
  sys.exit(1)
1093
1116
 
1094
1117
 
1095
- def chunks(records, number_of_chunks):
1096
- """Yield successive n-sized chunks from lst.
1097
-
1098
- Args:
1099
- records (_type_): _description_
1100
- number_of_chunks (_type_): _description_
1101
-
1102
- Yields:
1103
- _type_: _description_
1104
- """
1105
- for i in range(0, len(records), number_of_chunks):
1106
- yield records[i: i + number_of_chunks]
1107
-
1108
-
1109
1118
  def get_human_readable(size, precision=2):
1110
1119
  suffixes = ["B", "KB", "MB", "GB", "TB"]
1111
1120
  suffix_index = 0
@@ -1122,15 +1131,16 @@ def get_req_size(response: httpx.Response):
1122
1131
  size += response.request.content.decode("utf-8") or ""
1123
1132
  return get_human_readable(len(size.encode("utf-8")))
1124
1133
 
1134
+
1125
1135
  def parse_path(path):
1126
1136
  """
1127
1137
  Parses a path like 'foo.bar[0].baz' into ['foo', 'bar', 0, 'baz']
1128
1138
  """
1129
1139
  tokens = []
1130
1140
  # Split by dot, then extract indices
1131
- for part in path.split('.'):
1141
+ for part in path.split("."):
1132
1142
  # Find all [index] parts
1133
- matches = re.findall(r'([^\[\]]+)|\[(\d+)\]', part)
1143
+ matches = re.findall(r"([^\[\]]+)|\[(\d+)\]", part)
1134
1144
  for name, idx in matches:
1135
1145
  if name:
1136
1146
  tokens.append(name)
@@ -1138,12 +1148,14 @@ def parse_path(path):
1138
1148
  tokens.append(int(idx))
1139
1149
  return tokens
1140
1150
 
1151
+
1141
1152
  def get_by_path(data, path):
1142
1153
  keys = parse_path(path)
1143
1154
  for key in keys:
1144
1155
  data = data[key]
1145
1156
  return data
1146
1157
 
1158
+
1147
1159
  def set_by_path(data, path, value):
1148
1160
  keys = parse_path(path)
1149
1161
  for i, key in enumerate(keys[:-1]):
@@ -1164,6 +1176,7 @@ def set_by_path(data, path, value):
1164
1176
  else:
1165
1177
  data[last_key] = value
1166
1178
 
1179
+
1167
1180
  def extract_paths(data, paths):
1168
1181
  result = {}
1169
1182
  for path in paths:
@@ -1174,6 +1187,7 @@ def extract_paths(data, paths):
1174
1187
  continue
1175
1188
  return result
1176
1189
 
1190
+
1177
1191
  def deep_update(target, patch):
1178
1192
  """
1179
1193
  Recursively update target dict/list with values from patch dict/list.
@@ -1181,11 +1195,7 @@ def deep_update(target, patch):
1181
1195
  """
1182
1196
  if isinstance(patch, dict):
1183
1197
  for k, v in patch.items():
1184
- if (
1185
- k in target
1186
- and isinstance(target[k], (dict, list))
1187
- and isinstance(v, (dict, list))
1188
- ):
1198
+ if k in target and isinstance(target[k], (dict, list)) and isinstance(v, (dict, list)):
1189
1199
  deep_update(target[k], v)
1190
1200
  else:
1191
1201
  target[k] = v
@@ -16,7 +16,10 @@ from folio_migration_tools.marc_rules_transformation.marc_file_processor import
16
16
  from folio_migration_tools.marc_rules_transformation.rules_mapper_bibs import (
17
17
  BibsRulesMapper,
18
18
  )
19
- from folio_migration_tools.migration_tasks.migration_task_base import MarcTaskConfigurationBase, MigrationTaskBase
19
+ from folio_migration_tools.migration_tasks.migration_task_base import (
20
+ MarcTaskConfigurationBase,
21
+ MigrationTaskBase,
22
+ )
20
23
 
21
24
 
22
25
  class BibsTransformer(MigrationTaskBase):
@@ -26,7 +29,7 @@ class BibsTransformer(MigrationTaskBase):
26
29
  Field(
27
30
  title="ILS flavour",
28
31
  description="The type of ILS you are migrating records from.",
29
- alias="ils_flavor"
32
+ alias="ils_flavor",
30
33
  ),
31
34
  ]
32
35
  custom_bib_id_field: Annotated[
@@ -68,11 +71,11 @@ class BibsTransformer(MigrationTaskBase):
68
71
  Field(
69
72
  title="Generate a MARC file for data import overlay of instances",
70
73
  description=(
71
- "If set to true, the process will generate a file of binary MARC records that can"
72
- "be imported into FOLIO using the Data Import APIs. If set to false, only a file"
73
- "of FOLIO instance records (and optional SRS records) will be generated."
74
+ "If set to true, the process will generate a file of binary MARC records that "
75
+ "can be imported into FOLIO using the Data Import APIs. If set to false, only "
76
+ "a file of FOLIO instance records (and optional SRS records) will be generated." # noqa: E501
74
77
  ),
75
- )
78
+ ),
76
79
  ] = True
77
80
  parse_cataloged_date: Annotated[
78
81
  bool,
@@ -130,7 +133,9 @@ class BibsTransformer(MigrationTaskBase):
130
133
  self.check_source_files(
131
134
  self.folder_structure.legacy_records_folder, self.task_configuration.files
132
135
  )
133
- self.mapper = BibsRulesMapper(self.folio_client, library_config, self.task_configuration, statcode_mapping)
136
+ self.mapper = BibsRulesMapper(
137
+ self.folio_client, library_config, self.task_configuration, statcode_mapping
138
+ )
134
139
  self.bib_ids: set = set()
135
140
  if (
136
141
  self.task_configuration.reset_hrid_settings
@@ -77,8 +77,7 @@ class CoursesMigrator(MigrationTaskBase):
77
77
  Field(
78
78
  title="Look up instructor",
79
79
  description=(
80
- "Flag to indicate whether to look up instructors. "
81
- "By default is False."
80
+ "Flag to indicate whether to look up instructors. By default is False."
82
81
  ),
83
82
  ),
84
83
  ] = False
@@ -91,7 +90,7 @@ class CoursesMigrator(MigrationTaskBase):
91
90
  self,
92
91
  task_configuration: TaskConfiguration,
93
92
  library_config: LibraryConfiguration,
94
- folio_client
93
+ folio_client,
95
94
  ):
96
95
  csv.register_dialect("tsv", delimiter="\t")
97
96
  self.task_configuration = task_configuration
@@ -57,10 +57,7 @@ class HoldingsCsvTransformer(MigrationTaskBase):
57
57
  HridHandling,
58
58
  Field(
59
59
  title="HRID handling",
60
- description=(
61
- "Determining how the HRID generation "
62
- "should be handled."
63
- ),
60
+ description=("Determining how the HRID generation should be handled."),
64
61
  ),
65
62
  ]
66
63
  files: Annotated[
@@ -96,8 +93,7 @@ class HoldingsCsvTransformer(MigrationTaskBase):
96
93
  Field(
97
94
  title="Previously generated holdings files",
98
95
  description=(
99
- "List of previously generated holdings files. "
100
- "By default is empty list."
96
+ "List of previously generated holdings files. By default is empty list."
101
97
  ),
102
98
  ),
103
99
  ] = []
@@ -145,8 +141,7 @@ class HoldingsCsvTransformer(MigrationTaskBase):
145
141
  Field(
146
142
  title="Reset HRID settings",
147
143
  description=(
148
- "At the end of the run reset "
149
- "FOLIO with the HRID settings. Default is FALSE."
144
+ "At the end of the run reset FOLIO with the HRID settings. Default is FALSE."
150
145
  ),
151
146
  ),
152
147
  ] = False
@@ -155,8 +150,7 @@ class HoldingsCsvTransformer(MigrationTaskBase):
155
150
  Field(
156
151
  title="Update HRID settings",
157
152
  description=(
158
- "At the end of the run update "
159
- "FOLIO with the HRID settings. Default is TRUE."
153
+ "At the end of the run update FOLIO with the HRID settings. Default is TRUE."
160
154
  ),
161
155
  ),
162
156
  ] = True
@@ -166,7 +160,7 @@ class HoldingsCsvTransformer(MigrationTaskBase):
166
160
  title="Statistical code map file name",
167
161
  description=(
168
162
  "Path to the file containing the mapping of statistical codes. "
169
- "The file should be in TSV format with legacy_stat_code and folio_code columns."
163
+ "The file should be in TSV format with legacy_stat_code and folio_code columns." # noqa: E501
170
164
  ),
171
165
  ),
172
166
  ] = ""
@@ -290,7 +284,8 @@ class HoldingsCsvTransformer(MigrationTaskBase):
290
284
 
291
285
  def load_location_map(self):
292
286
  with open(
293
- self.folder_structure.mapping_files_folder / self.task_configuration.location_map_file_name
287
+ self.folder_structure.mapping_files_folder
288
+ / self.task_configuration.location_map_file_name
294
289
  ) as location_map_f:
295
290
  return self.load_ref_data_map_from_file(
296
291
  location_map_f, "Found %s rows in location map"
@@ -304,7 +299,8 @@ class HoldingsCsvTransformer(MigrationTaskBase):
304
299
 
305
300
  def load_mapped_fields(self):
306
301
  with open(
307
- self.folder_structure.mapping_files_folder / self.task_configuration.holdings_map_file_name
302
+ self.folder_structure.mapping_files_folder
303
+ / self.task_configuration.holdings_map_file_name # noqa: E501
308
304
  ) as holdings_mapper_f:
309
305
  holdings_map = json.load(holdings_mapper_f)
310
306
  logging.info("%s fields in holdings mapping file map", len(holdings_map["data"]))
@@ -332,7 +328,8 @@ class HoldingsCsvTransformer(MigrationTaskBase):
332
328
  print(f"\n{error_str}\nHalting")
333
329
  sys.exit(1)
334
330
  logging.info(
335
- f"processed {self.total_records:,} records in {len(self.task_configuration.files)} files"
331
+ f"processed {self.total_records:,} records in "
332
+ f"{len(self.task_configuration.files)} files"
336
333
  )
337
334
 
338
335
  def wrap_up(self):
@@ -379,7 +376,9 @@ class HoldingsCsvTransformer(MigrationTaskBase):
379
376
  properties = holdings_schema["properties"].keys()
380
377
  logging.info(properties)
381
378
  logging.info(self.task_configuration.holdings_merge_criteria)
382
- res = [mc for mc in self.task_configuration.holdings_merge_criteria if mc not in properties]
379
+ res = [
380
+ mc for mc in self.task_configuration.holdings_merge_criteria if mc not in properties
381
+ ]
383
382
  if any(res):
384
383
  logging.critical(
385
384
  (
@@ -19,7 +19,7 @@ from folio_migration_tools.marc_rules_transformation.rules_mapper_holdings impor
19
19
  )
20
20
  from folio_migration_tools.migration_tasks.migration_task_base import (
21
21
  MarcTaskConfigurationBase,
22
- MigrationTaskBase
22
+ MigrationTaskBase,
23
23
  )
24
24
 
25
25
 
@@ -38,18 +38,14 @@ class HoldingsMarcTransformer(MigrationTaskBase):
38
38
  str,
39
39
  Field(
40
40
  title="Migration task type",
41
- description=(
42
- "The type of migration task you want to perform"
43
- ),
41
+ description=("The type of migration task you want to perform"),
44
42
  ),
45
43
  ]
46
44
  files: Annotated[
47
45
  List[FileDefinition],
48
46
  Field(
49
47
  title="Source files",
50
- description=(
51
- "List of MARC21 files with holdings records"
52
- ),
48
+ description=("List of MARC21 files with holdings records"),
53
49
  ),
54
50
  ]
55
51
  hrid_handling: Annotated[
@@ -167,9 +163,7 @@ class HoldingsMarcTransformer(MigrationTaskBase):
167
163
  str,
168
164
  Field(
169
165
  title="MARC Holdings Note type",
170
- description=(
171
- "The name of the note type to use for MARC (MRK) statements. "
172
- ),
166
+ description=("The name of the note type to use for MARC (MRK) statements. "),
173
167
  ),
174
168
  ] = "Original MARC holdings statements"
175
169
  include_mfhd_mrk_as_note: Annotated[
@@ -187,9 +181,7 @@ class HoldingsMarcTransformer(MigrationTaskBase):
187
181
  str,
188
182
  Field(
189
183
  title="MARC Record (as MARC Maker Representation) note type",
190
- description=(
191
- "The name of the note type to use for MFHD (MRK) note. "
192
- ),
184
+ description=("The name of the note type to use for MFHD (MRK) note. "),
193
185
  ),
194
186
  ] = "Original MFHD Record"
195
187
  include_mfhd_mrc_as_note: Annotated[
@@ -208,9 +200,7 @@ class HoldingsMarcTransformer(MigrationTaskBase):
208
200
  str,
209
201
  Field(
210
202
  title="MARC Record (as MARC21 decoded string) note type",
211
- description=(
212
- "The name of the note type to use for MFHD (MRC) note. "
213
- ),
203
+ description=("The name of the note type to use for MFHD (MRC) note. "),
214
204
  ),
215
205
  ] = "Original MFHD (MARC21)"
216
206
 
@@ -276,12 +266,12 @@ class HoldingsMarcTransformer(MigrationTaskBase):
276
266
  "Rows in Bound with relationship map: %s",
277
267
  len(self.boundwith_relationship_map_rows),
278
268
  )
279
- except FileNotFoundError:
269
+ except FileNotFoundError as fnfe:
280
270
  raise TransformationProcessError(
281
271
  "",
282
272
  i18n.t("Provided boundwith relationship file not found"),
283
273
  self.task_configuration.boundwith_relationship_file_path,
284
- )
274
+ ) from fnfe
285
275
 
286
276
  location_map_path = (
287
277
  self.folder_structure.mapping_files_folder
@@ -302,7 +292,7 @@ class HoldingsMarcTransformer(MigrationTaskBase):
302
292
  self.library_configuration,
303
293
  self.instance_id_map,
304
294
  self.boundwith_relationship_map_rows,
305
- statcode_mapping
295
+ statcode_mapping,
306
296
  )
307
297
  self.add_supplemental_mfhd_mappings()
308
298
  if (
@@ -330,12 +320,12 @@ class HoldingsMarcTransformer(MigrationTaskBase):
330
320
  "Supplemental MFHD mapping rules file must contain a dictionary",
331
321
  json.dumps(new_rules),
332
322
  )
333
- except FileNotFoundError:
323
+ except FileNotFoundError as fnfe:
334
324
  raise TransformationProcessError(
335
325
  "",
336
326
  "Provided supplemental MFHD mapping rules file not found",
337
327
  self.task_configuration.supplemental_mfhd_mapping_rules_file,
338
- )
328
+ ) from fnfe
339
329
  else:
340
330
  new_rules = {}
341
331
  self.mapper.integrate_supplemental_mfhd_mappings(new_rules)