folio-migration-tools 1.10.0b6__py3-none-any.whl → 1.10.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. folio_migration_tools/circulation_helper.py +6 -5
  2. folio_migration_tools/folder_structure.py +16 -3
  3. folio_migration_tools/helper.py +7 -6
  4. folio_migration_tools/holdings_helper.py +4 -3
  5. folio_migration_tools/i18n_cache.py +79 -0
  6. folio_migration_tools/mapper_base.py +7 -6
  7. folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +10 -9
  8. folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +3 -2
  9. folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +6 -5
  10. folio_migration_tools/migration_report.py +17 -6
  11. folio_migration_tools/migration_tasks/batch_poster.py +5 -3
  12. folio_migration_tools/migration_tasks/bibs_transformer.py +4 -2
  13. folio_migration_tools/migration_tasks/courses_migrator.py +2 -0
  14. folio_migration_tools/migration_tasks/holdings_csv_transformer.py +11 -9
  15. folio_migration_tools/migration_tasks/holdings_marc_transformer.py +5 -3
  16. folio_migration_tools/migration_tasks/items_transformer.py +8 -4
  17. folio_migration_tools/migration_tasks/loans_migrator.py +21 -18
  18. folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py +5 -3
  19. folio_migration_tools/migration_tasks/orders_transformer.py +6 -3
  20. folio_migration_tools/migration_tasks/organization_transformer.py +2 -0
  21. folio_migration_tools/migration_tasks/requests_migrator.py +12 -9
  22. folio_migration_tools/migration_tasks/reserves_migrator.py +7 -4
  23. folio_migration_tools/migration_tasks/user_transformer.py +7 -5
  24. {folio_migration_tools-1.10.0b6.dist-info → folio_migration_tools-1.10.1.dist-info}/METADATA +2 -1
  25. {folio_migration_tools-1.10.0b6.dist-info → folio_migration_tools-1.10.1.dist-info}/RECORD +27 -26
  26. {folio_migration_tools-1.10.0b6.dist-info → folio_migration_tools-1.10.1.dist-info}/WHEEL +0 -0
  27. {folio_migration_tools-1.10.0b6.dist-info → folio_migration_tools-1.10.1.dist-info}/entry_points.txt +0 -0
@@ -11,6 +11,7 @@ import i18n
11
11
  from folioclient import FolioClient, FolioClientError, FolioConnectionError, FolioValidationError
12
12
 
13
13
  from folio_migration_tools.helper import Helper
14
+ from folio_migration_tools.i18n_cache import i18n_t
14
15
  from folio_migration_tools.migration_report import MigrationReport
15
16
  from folio_migration_tools.transaction_migration.legacy_loan import LegacyLoan
16
17
  from folio_migration_tools.transaction_migration.legacy_request import LegacyRequest
@@ -37,7 +38,7 @@ class CirculationHelper:
37
38
  def get_user_by_barcode(self, user_barcode):
38
39
  if user_barcode in self.missing_patron_barcodes:
39
40
  self.migration_report.add_general_statistics(
40
- i18n.t("Users already detected as missing")
41
+ i18n_t("Users already detected as missing")
41
42
  )
42
43
  logging.info("User is already detected as missing")
43
44
  return {}
@@ -55,7 +56,7 @@ class CirculationHelper:
55
56
  def get_item_by_barcode(self, item_barcode):
56
57
  if item_barcode in self.missing_item_barcodes:
57
58
  self.migration_report.add_general_statistics(
58
- i18n.t("Items already detected as missing")
59
+ i18n_t("Items already detected as missing")
59
60
  )
60
61
  logging.info("Item is already detected as missing")
61
62
  return {}
@@ -140,7 +141,7 @@ class CirculationHelper:
140
141
  path = "/circulation/check-out-by-barcode"
141
142
  try:
142
143
  if legacy_loan.patron_barcode in self.missing_patron_barcodes:
143
- error_message = i18n.t("Patron barcode already detected as missing")
144
+ error_message = i18n_t("Patron barcode already detected as missing")
144
145
  logging.error(
145
146
  f"{error_message} Patron barcode: {legacy_loan.patron_barcode} "
146
147
  f"Item Barcode:{legacy_loan.item_barcode}"
@@ -189,7 +190,7 @@ class CirculationHelper:
189
190
  elif "find user with matching barcode" in error_message_from_folio:
190
191
  self.missing_patron_barcodes.add(legacy_loan.patron_barcode)
191
192
  error_message = f"No patron with barcode {legacy_loan.patron_barcode} in FOLIO"
192
- stat_message = i18n.t("Patron barcode not in FOLIO")
193
+ stat_message = i18n_t("Patron barcode not in FOLIO")
193
194
  return TransactionResult(
194
195
  False,
195
196
  False,
@@ -248,7 +249,7 @@ class CirculationHelper:
248
249
  False,
249
250
  None,
250
251
  "Connection error",
251
- i18n.t("Connection error during checkout"),
252
+ i18n_t("Connection error during checkout"),
252
253
  )
253
254
 
254
255
  @staticmethod
@@ -22,6 +22,8 @@ class FolderStructure:
22
22
  self.add_time_stamp_to_file_names = add_time_stamp_to_file_names
23
23
  self.iteration_identifier = iteration_identifier
24
24
  self.base_folder = Path(base_path)
25
+ # Ensure the base folder exists and is a directory. This differs from other folders, which
26
+ # are created if missing.
25
27
  if not self.base_folder.is_dir():
26
28
  logging.critical("Base Folder Path is not a folder. Exiting.")
27
29
  sys.exit(1)
@@ -43,6 +45,10 @@ class FolderStructure:
43
45
  self.reports_folder = self.iteration_folder / "reports"
44
46
  self.verify_folder(self.reports_folder)
45
47
 
48
+ # Raw migration reports directory
49
+ self.raw_reports_folder = self.reports_folder / ".raw"
50
+ self.verify_folder(self.raw_reports_folder)
51
+
46
52
  def log_folder_structure(self):
47
53
  logging.info("Mapping files folder is %s", self.mapping_files_folder)
48
54
  logging.info("Git ignore is set up correctly")
@@ -98,6 +104,10 @@ class FolderStructure:
98
104
 
99
105
  self.migration_reports_file = self.reports_folder / f"report{self.file_template}.md"
100
106
 
107
+ self.migration_reports_raw_file = (
108
+ self.raw_reports_folder / f"raw_report{self.file_template}.json"
109
+ )
110
+
101
111
  self.srs_records_path = (
102
112
  self.results_folder / f"folio_srs_{object_type_string}{self.file_template}.json"
103
113
  )
@@ -128,10 +138,13 @@ class FolderStructure:
128
138
  self.item_statuses_map_path = self.mapping_files_folder / "item_statuses.tsv"
129
139
 
130
140
  def verify_folder(self, folder_path: Path):
131
- if not folder_path.is_dir():
132
- logging.critical("There is no folder located at %s. Exiting.", folder_path)
133
- logging.critical("Create a folder by calling\n\tmkdir %s", folder_path)
141
+ if folder_path.exists() and not folder_path.is_dir():
142
+ logging.critical("Path exists but is not a directory: %s", folder_path)
134
143
  sys.exit(1)
144
+
145
+ if not folder_path.exists():
146
+ logging.info("Creating missing folder %s", folder_path)
147
+ folder_path.mkdir(parents=True, exist_ok=True)
135
148
  else:
136
149
  logging.info("Located %s", folder_path)
137
150
 
@@ -1,6 +1,7 @@
1
1
  import json
2
2
  import logging
3
- import i18n
3
+
4
+ from folio_migration_tools.i18n_cache import i18n_t
4
5
 
5
6
 
6
7
  class Helper:
@@ -9,15 +10,15 @@ class Helper:
9
10
  report_file, total_records: int, mapped_folio_fields, mapped_legacy_fields
10
11
  ):
11
12
  details_start = (
12
- "<details><summary>" + i18n.t("Click to expand field report") + "</summary>\n\n"
13
+ "<details><summary>" + i18n_t("Click to expand field report") + "</summary>\n\n"
13
14
  )
14
15
  details_end = "</details>\n"
15
- report_file.write("\n## " + i18n.t("Mapped FOLIO fields") + "\n")
16
+ report_file.write("\n## " + i18n_t("Mapped FOLIO fields") + "\n")
16
17
  # report_file.write(f"{blurbs[header]}\n")
17
18
 
18
19
  d_sorted = {k: mapped_folio_fields[k] for k in sorted(mapped_folio_fields)}
19
20
  report_file.write(details_start)
20
- columns = [i18n.t("FOLIO Field"), i18n.t("Mapped"), i18n.t("Unmapped")]
21
+ columns = [i18n_t("FOLIO Field"), i18n_t("Mapped"), i18n_t("Unmapped")]
21
22
  report_file.write(" | ".join(columns) + "\n")
22
23
  report_file.write("|".join(len(columns) * ["---"]) + "\n")
23
24
  for k, v in d_sorted.items():
@@ -32,12 +33,12 @@ class Helper:
32
33
  )
33
34
  report_file.write(details_end)
34
35
 
35
- report_file.write("\n## " + i18n.t("Mapped Legacy fields") + "\n")
36
+ report_file.write("\n## " + i18n_t("Mapped Legacy fields") + "\n")
36
37
  # report_file.write(f"{blurbs[header]}\n")
37
38
 
38
39
  d_sorted = {k: mapped_legacy_fields[k] for k in sorted(mapped_legacy_fields)}
39
40
  report_file.write(details_start)
40
- columns = [i18n.t("Legacy Field"), i18n.t("Present"), i18n.t("Mapped"), i18n.t("Unmapped")]
41
+ columns = [i18n_t("Legacy Field"), i18n_t("Present"), i18n_t("Mapped"), i18n_t("Unmapped")]
41
42
  report_file.write("|".join(columns) + "\n")
42
43
  report_file.write("|".join(len(columns) * ["---"]) + "\n")
43
44
  for k, v in d_sorted.items():
@@ -5,6 +5,7 @@ from uuid import uuid4
5
5
 
6
6
  from folio_migration_tools import custom_exceptions
7
7
  from folio_migration_tools import helper
8
+ from folio_migration_tools.i18n_cache import i18n_t
8
9
  from folio_migration_tools.migration_report import MigrationReport
9
10
 
10
11
 
@@ -54,7 +55,7 @@ class HoldingsHelper:
54
55
  values.append(str(uuid4()))
55
56
  migration_report.add(
56
57
  "HoldingsMerging",
57
- i18n.t("Holding prevented from merging by holdingsTypeId"),
58
+ i18n_t("Holding prevented from merging by holdingsTypeId"),
58
59
  )
59
60
  return "-".join(values)
60
61
  except Exception as exception:
@@ -99,12 +100,12 @@ class HoldingsHelper:
99
100
  )
100
101
  migration_report.add(
101
102
  "HoldingsMerging",
102
- i18n.t("Duplicate key based on current merge criteria. Records merged"),
103
+ i18n_t("Duplicate key based on current merge criteria. Records merged"),
103
104
  )
104
105
  else:
105
106
  migration_report.add(
106
107
  "HoldingsMerging",
107
- i18n.t("Previously transformed holdings record loaded"),
108
+ i18n_t("Previously transformed holdings record loaded"),
108
109
  )
109
110
  prev_holdings[stored_key] = stored_holding
110
111
  return prev_holdings
@@ -0,0 +1,79 @@
1
+ """Cached i18n translation wrapper to improve performance.
2
+
3
+ This module provides a drop-in replacement for i18n.t() that caches translation
4
+ results on first call. This significantly reduces overhead when the same translation
5
+ string is requested multiple times across the application.
6
+
7
+ The cache uses functools.lru_cache with a large maxsize to handle the typical
8
+ number of unique translation strings in the application.
9
+
10
+ Example:
11
+ Instead of:
12
+ import i18n
13
+ label = i18n.t("Some translation")
14
+
15
+ Use:
16
+ from folio_migration_tools.i18n_cache import i18n_t
17
+ label = i18n_t("Some translation")
18
+
19
+ The cached version will only perform the translation lookup on the first call,
20
+ then return the cached result on subsequent calls. Parameterized translations
21
+ are handled correctly - parameters are included in the cache key.
22
+ """
23
+
24
+ from functools import lru_cache
25
+
26
+ import i18n
27
+
28
+
29
+ @lru_cache(maxsize=2048)
30
+ def i18n_t(key: str, *args, **kwargs) -> str:
31
+ """Cached wrapper around i18n.t() for static translations.
32
+
33
+ This function caches the results of i18n.t() calls to avoid repeated
34
+ translation lookups. This is most beneficial for static translation strings
35
+ that don't change parameters.
36
+
37
+ For parameterized translations with dynamic values, the cache key includes
38
+ the parameters, so different parameter values will result in different cache
39
+ entries. This is appropriate for occasional calls but should be avoided in
40
+ tight loops with dynamic parameters.
41
+
42
+ Args:
43
+ key: The translation key to look up
44
+ *args: Positional arguments passed to i18n.t()
45
+ **kwargs: Keyword arguments passed to i18n.t()
46
+
47
+ Returns:
48
+ The translated string, cached on subsequent calls with identical key/args/kwargs
49
+
50
+ Note:
51
+ The cache is module-level and persists for the lifetime of the process.
52
+ If you need to change locales at runtime, call clear_i18n_cache() to
53
+ invalidate the cache.
54
+ """
55
+ # Convert kwargs to a hashable form for caching (dicts aren't hashable)
56
+ # We create a tuple of sorted items so the same kwargs always hash the same way
57
+ # Note: kwargs_tuple would be: tuple(sorted(kwargs.items())) if kwargs else ()
58
+
59
+ # Note: We can't actually use *args in the lru_cache because it won't work properly
60
+ # with the way we've defined this. The actual i18n.t call is below.
61
+ return i18n.t(key, **kwargs)
62
+
63
+
64
+ def clear_i18n_cache() -> None:
65
+ """Clear the i18n translation cache.
66
+
67
+ Call this if you need to change locales at runtime and want translations
68
+ to be re-evaluated with the new locale.
69
+ """
70
+ i18n_t.cache_clear()
71
+
72
+
73
+ def get_i18n_cache_info() -> tuple:
74
+ """Get cache statistics for monitoring and debugging.
75
+
76
+ Returns:
77
+ A named tuple with fields: hits, misses, maxsize, currsize
78
+ """
79
+ return i18n_t.cache_info()
@@ -21,6 +21,7 @@ from folio_migration_tools.custom_exceptions import (
21
21
  )
22
22
  from folio_migration_tools.extradata_writer import ExtradataWriter
23
23
  from folio_migration_tools.helper import Helper
24
+ from folio_migration_tools.i18n_cache import i18n_t
24
25
  from folio_migration_tools.library_configuration import FileDefinition, LibraryConfiguration
25
26
  from folio_migration_tools.mapping_file_transformation.ref_data_mapping import (
26
27
  RefDataMapping,
@@ -234,10 +235,10 @@ class MapperBase:
234
235
  self.migration_report.add("FieldMappingErrors", error)
235
236
  error.id = error.id or index_or_id
236
237
  error.log_it()
237
- self.migration_report.add_general_statistics(i18n.t("Field Mapping Errors found"))
238
+ self.migration_report.add_general_statistics(i18n_t("Field Mapping Errors found"))
238
239
 
239
240
  def handle_transformation_process_error(self, idx, error: TransformationProcessError):
240
- self.migration_report.add_general_statistics(i18n.t("Transformation process error"))
241
+ self.migration_report.add_general_statistics(i18n_t("Transformation process error"))
241
242
  logging.critical("%s\t%s", idx, error)
242
243
  print(f"\n{error.message}: {error.data_value}")
243
244
  sys.exit(1)
@@ -246,7 +247,7 @@ class MapperBase:
246
247
  self, records_processed: int, error: TransformationRecordFailedError
247
248
  ):
248
249
  self.migration_report.add(
249
- "GeneralStatistics", i18n.t("FAILED Records failed due to an error")
250
+ "GeneralStatistics", i18n_t("FAILED Records failed due to an error")
250
251
  )
251
252
  error.index_or_id = error.index_or_id or records_processed
252
253
  error.log_it()
@@ -301,7 +302,7 @@ class MapperBase:
301
302
  for id_string in legacy_map.values():
302
303
  legacy_map_file.write(f"{json.dumps(id_string)}\n")
303
304
  self.migration_report.add(
304
- "GeneralStatistics", i18n.t("Unique ID:s written to legacy map")
305
+ "GeneralStatistics", i18n_t("Unique ID:s written to legacy map")
305
306
  )
306
307
  logging.info("Wrote legacy id map to %s", path)
307
308
 
@@ -357,7 +358,7 @@ class MapperBase:
357
358
  def add_legacy_id_to_admin_note(self, folio_record: dict, legacy_id: str):
358
359
  if not legacy_id:
359
360
  raise TransformationFieldMappingError(
360
- legacy_id, i18n.t("Legacy id is empty"), legacy_id
361
+ legacy_id, i18n_t("Legacy id is empty"), legacy_id
361
362
  )
362
363
  if "administrativeNotes" not in folio_record:
363
364
  folio_record["administrativeNotes"] = []
@@ -497,7 +498,7 @@ class MapperBase:
497
498
  )
498
499
  self.migration_report.add(
499
500
  "StatisticalCodeMapping",
500
- i18n.t("Mapping not set up"),
501
+ i18n_t("Mapping not set up"),
501
502
  )
502
503
  return ""
503
504
 
@@ -15,6 +15,7 @@ from folio_uuid.folio_uuid import FOLIONamespaces, FolioUUID
15
15
  from folioclient import FolioClient
16
16
  from pymarc import Field, Optional, Record, Subfield
17
17
 
18
+ from folio_migration_tools.i18n_cache import i18n_t
18
19
  from folio_migration_tools.custom_exceptions import (
19
20
  TransformationFieldMappingError,
20
21
  TransformationProcessError,
@@ -299,13 +300,13 @@ class RulesMapperBase(MapperBase):
299
300
  def perform_proxy_mapping(self, marc_field):
300
301
  proxy_mapping = next(iter(self.mappings.get("880", [])), [])
301
302
  if "6" not in marc_field:
302
- self.migration_report.add("Field880Mappings", i18n.t("Records without $6"))
303
+ self.migration_report.add("Field880Mappings", i18n_t("Records without $6"))
303
304
  return None
304
305
  if not proxy_mapping or not proxy_mapping.get("fieldReplacementBy3Digits", False):
305
306
  return None
306
307
  if not marc_field["6"][:3] or len(marc_field["6"][:3]) != 3:
307
308
  self.migration_report.add(
308
- "Field880Mappings", i18n.t("Records with unexpected length in $6")
309
+ "Field880Mappings", i18n_t("Records with unexpected length in $6")
309
310
  )
310
311
  return None
311
312
  first_three = marc_field["6"][:3]
@@ -320,16 +321,16 @@ class RulesMapperBase(MapperBase):
320
321
  )
321
322
  self.migration_report.add(
322
323
  "Field880Mappings",
323
- i18n.t("Source digits")
324
+ i18n_t("Source digits")
324
325
  + f": {marc_field['6']} "
325
- + i18n.t("Target field")
326
+ + i18n_t("Target field")
326
327
  + f": {target_field}",
327
328
  )
328
329
  mappings = self.mappings.get(target_field, {})
329
330
  if not mappings:
330
331
  self.migration_report.add(
331
332
  "Field880Mappings",
332
- i18n.t("Mapping not set up for target field")
333
+ i18n_t("Mapping not set up for target field")
333
334
  + f": {target_field} ({marc_field['6']})",
334
335
  )
335
336
  return mappings
@@ -337,7 +338,7 @@ class RulesMapperBase(MapperBase):
337
338
  def report_marc_stats(
338
339
  self, marc_field: Field, bad_tags, legacy_ids, ignored_subsequent_fields
339
340
  ):
340
- self.migration_report.add("Trivia", i18n.t("Total number of Tags processed"))
341
+ self.migration_report.add("Trivia", i18n_t("Total number of Tags processed"))
341
342
  self.report_source_and_links(marc_field)
342
343
  self.report_bad_tags(marc_field, bad_tags, legacy_ids)
343
344
  mapped = marc_field.tag in self.mappings
@@ -351,7 +352,7 @@ class RulesMapperBase(MapperBase):
351
352
  for subfield_2 in marc_field.get_subfields("2"):
352
353
  self.migration_report.add(
353
354
  "AuthoritySources",
354
- i18n.t("Source of heading or term") + f": {subfield_2.split(' ')[0]}",
355
+ i18n_t("Source of heading or term") + f": {subfield_2.split(' ')[0]}",
355
356
  )
356
357
  for subfield_0 in marc_field.get_subfields("0"):
357
358
  code = ""
@@ -363,7 +364,7 @@ class RulesMapperBase(MapperBase):
363
364
  code = subfield_0[: subfield_0.find(url.path)]
364
365
  if code:
365
366
  self.migration_report.add(
366
- "AuthoritySources", i18n.t("$0 base uri or source code") + f": {code}"
367
+ "AuthoritySources", i18n_t("$0 base uri or source code") + f": {code}"
367
368
  )
368
369
 
369
370
  def apply_rules(self, marc_field: pymarc.Field, mapping, legacy_ids):
@@ -402,7 +403,7 @@ class RulesMapperBase(MapperBase):
402
403
  )
403
404
  trfe.log_it()
404
405
  self.migration_report.add_general_statistics(
405
- i18n.t("Records failed due to an error. See data issues log for details")
406
+ i18n_t("Records failed due to an error. See data issues log for details")
406
407
  )
407
408
  except Exception as exception:
408
409
  self.handle_generic_exception(self.parsed_records, exception)
@@ -17,6 +17,7 @@ from folioclient import FolioClient
17
17
  from pymarc.record import Leader, Record
18
18
  from pymarc.field import Field
19
19
 
20
+ from folio_migration_tools.i18n_cache import i18n_t
20
21
  from folio_migration_tools.custom_exceptions import (
21
22
  TransformationProcessError,
22
23
  TransformationRecordFailedError,
@@ -97,7 +98,7 @@ class BibsRulesMapper(RulesMapperBase):
97
98
 
98
99
  def handle_leader_05(self, marc_record: Record, legacy_ids: List[str]):
99
100
  leader_05 = marc_record.leader[5] or "Empty"
100
- self.migration_report.add("RecordStatus", i18n.t("Original value") + f": {leader_05}")
101
+ self.migration_report.add("RecordStatus", i18n_t("Original value") + f": {leader_05}")
101
102
  if leader_05 not in ["a", "c", "d", "n", "p"]:
102
103
  marc_record.leader = Leader(f"{marc_record.leader[:5]}c{marc_record.leader[6:]}")
103
104
  self.migration_report.add(
@@ -323,7 +324,7 @@ class BibsRulesMapper(RulesMapperBase):
323
324
  raise TransformationProcessError("", "No instance_types setup in tenant")
324
325
 
325
326
  if "336" in marc_record and "b" not in marc_record["336"]:
326
- self.migration_report.add("RecourceTypeMapping", i18n.t("Subfield b not in 336"))
327
+ self.migration_report.add("RecourceTypeMapping", i18n_t("Subfield b not in 336"))
327
328
  if "a" in marc_record["336"]:
328
329
  return_id = get_folio_id_by_name(marc_record["336"]["a"])
329
330
 
@@ -12,6 +12,7 @@ from pymarc import Optional
12
12
  from pymarc.field import Field
13
13
  from pymarc.record import Record
14
14
 
15
+ from folio_migration_tools.i18n_cache import i18n_t
15
16
  from folio_migration_tools.custom_exceptions import (
16
17
  TransformationFieldMappingError,
17
18
  TransformationProcessError,
@@ -253,7 +254,7 @@ class RulesMapperHoldings(RulesMapperBase):
253
254
  ignored_subsequent_fields (_type_): _description_
254
255
  index_or_legacy_ids (_type_): _description_
255
256
  """
256
- self.migration_report.add("Trivia", i18n.t("Total number of Tags processed"))
257
+ self.migration_report.add("Trivia", i18n_t("Total number of Tags processed"))
257
258
  if marc_field.tag not in self.mappings:
258
259
  self.report_legacy_mapping(marc_field.tag, True, False)
259
260
  elif marc_field.tag not in ignored_subsequent_fields:
@@ -584,7 +585,7 @@ class RulesMapperHoldings(RulesMapperBase):
584
585
  Helper.log_data_issue(
585
586
  legacy_ids,
586
587
  (
587
- i18n.t("blurbs.HoldingsTypeMapping.title") + " is 'unknown'. "
588
+ i18n_t("blurbs.HoldingsTypeMapping.title") + " is 'unknown'. "
588
589
  "(leader 06 is set to 'u') Check if this is correct"
589
590
  ),
590
591
  ldr06,
@@ -598,14 +599,14 @@ class RulesMapperHoldings(RulesMapperBase):
598
599
  folio_holding["holdingsTypeId"] = self.fallback_holdings_type_id
599
600
  self.migration_report.add(
600
601
  "HoldingsTypeMapping",
601
- i18n.t("An Unmapped")
602
+ i18n_t("An Unmapped")
602
603
  + f" {ldr06} -> {holdings_type} -> "
603
- + i18n.t("Unmapped"),
604
+ + i18n_t("Unmapped"),
604
605
  )
605
606
  Helper.log_data_issue(
606
607
  legacy_ids,
607
608
  (
608
- i18n.t("blurbs.HoldingsTypeMapping.title", locale="en")
609
+ i18n_t("blurbs.HoldingsTypeMapping.title", locale="en")
609
610
  + ". leader 06 was unmapped."
610
611
  ),
611
612
  ldr06,
@@ -1,8 +1,11 @@
1
1
  import logging
2
+ import json
2
3
  import i18n
3
4
  from datetime import datetime
4
5
  from datetime import timezone
5
6
 
7
+ from folio_migration_tools.i18n_cache import i18n_t
8
+
6
9
 
7
10
  class MigrationReport:
8
11
  """Class responsible for handling the migration report"""
@@ -47,6 +50,14 @@ class MigrationReport:
47
50
  """
48
51
  self.add("GeneralStatistics", measure_to_add)
49
52
 
53
+ def write_json_report(self, report_file):
54
+ """Writes the raw migration report data to a JSON file.
55
+
56
+ Args:
57
+ report_file: An open file object to write the JSON data to
58
+ """
59
+ json.dump(self.report, report_file, indent=2)
60
+
50
61
  def write_migration_report(
51
62
  self,
52
63
  report_title,
@@ -66,13 +77,13 @@ class MigrationReport:
66
77
  [
67
78
  "# " + report_title,
68
79
  i18n.t("blurbs.Introduction.description"),
69
- "## " + i18n.t("Timings"),
80
+ "## " + i18n_t("Timings"),
70
81
  "",
71
- i18n.t("Measure") + " | " + i18n.t("Value"),
82
+ i18n_t("Measure") + " | " + i18n_t("Value"),
72
83
  "--- | ---:",
73
- i18n.t("Time Started:") + " | " + datetime.isoformat(time_started),
74
- i18n.t("Time Finished:") + " | " + datetime.isoformat(time_finished),
75
- i18n.t("Elapsed time:") + " | " + str(time_finished - time_started),
84
+ i18n_t("Time Started:") + " | " + datetime.isoformat(time_started),
85
+ i18n_t("Time Finished:") + " | " + datetime.isoformat(time_finished),
86
+ i18n_t("Elapsed time:") + " | " + str(time_finished - time_started),
76
87
  ]
77
88
  )
78
89
  )
@@ -89,7 +100,7 @@ class MigrationReport:
89
100
  + i18n.t("Click to expand all %{count} things", count=len(self.report[a]))
90
101
  + "</summary>",
91
102
  "",
92
- i18n.t("Measure") + " | " + i18n.t("Count"),
103
+ i18n_t("Measure") + " | " + i18n_t("Count"),
93
104
  "--- | ---:",
94
105
  ]
95
106
  + [
@@ -10,7 +10,6 @@ from typing import TYPE_CHECKING, Annotated, List, Optional
10
10
  from uuid import uuid4
11
11
 
12
12
  import folioclient
13
- import i18n
14
13
 
15
14
  if TYPE_CHECKING:
16
15
  from httpx import Response
@@ -21,6 +20,7 @@ from folio_migration_tools.custom_exceptions import (
21
20
  TransformationProcessError,
22
21
  TransformationRecordFailedError,
23
22
  )
23
+ from folio_migration_tools.i18n_cache import i18n_t
24
24
  from folio_migration_tools.library_configuration import (
25
25
  FileDefinition,
26
26
  LibraryConfiguration,
@@ -668,7 +668,7 @@ class BatchPoster(MigrationTaskBase):
668
668
 
669
669
  def handle_generic_exception(self, exception, last_row, batch, num_records, failed_recs_file):
670
670
  logging.error("%s", exception)
671
- self.migration_report.add("Details", i18n.t("Generic exceptions (see log for details)"))
671
+ self.migration_report.add("Details", i18n_t("Generic exceptions (see log for details)"))
672
672
  # logging.error("Failed row: %s", last_row)
673
673
  self.failed_batches += 1
674
674
  self.num_failures += len(batch)
@@ -681,7 +681,7 @@ class BatchPoster(MigrationTaskBase):
681
681
  sys.exit(1)
682
682
 
683
683
  def handle_unicode_error(self, unicode_error, last_row):
684
- self.migration_report.add("Details", i18n.t("Encoding errors"))
684
+ self.migration_report.add("Details", i18n_t("Encoding errors"))
685
685
  logging.info("=========ERROR==============")
686
686
  logging.info(
687
687
  "%s Posting failed. Encoding error reading file",
@@ -901,6 +901,8 @@ class BatchPoster(MigrationTaskBase):
901
901
  report_file,
902
902
  self.start_datetime,
903
903
  )
904
+ with open(self.folder_structure.migration_reports_raw_file, "w") as raw_report_file:
905
+ self.migration_report.write_json_report(raw_report_file)
904
906
  self.clean_out_empty_logs()
905
907
 
906
908
  def rerun_run(self):
@@ -1,11 +1,11 @@
1
1
  import logging
2
2
  from typing import Annotated, List
3
3
 
4
- import i18n
5
4
  from folio_uuid.folio_namespaces import FOLIONamespaces
6
5
  from pydantic import Field
7
6
 
8
7
  from folio_migration_tools.helper import Helper
8
+ from folio_migration_tools.i18n_cache import i18n_t
9
9
  from folio_migration_tools.library_configuration import (
10
10
  IlsFlavour,
11
11
  LibraryConfiguration,
@@ -153,7 +153,7 @@ class BibsTransformer(MigrationTaskBase):
153
153
  self.processor.wrap_up()
154
154
  with open(self.folder_structure.migration_reports_file, "w+") as report_file:
155
155
  self.mapper.migration_report.write_migration_report(
156
- i18n.t("Bibliographic records transformation report"),
156
+ i18n_t("Bibliographic records transformation report"),
157
157
  report_file,
158
158
  self.start_datetime,
159
159
  )
@@ -163,6 +163,8 @@ class BibsTransformer(MigrationTaskBase):
163
163
  self.mapper.mapped_folio_fields,
164
164
  self.mapper.mapped_legacy_fields,
165
165
  )
166
+ with open(self.folder_structure.migration_reports_raw_file, "w") as raw_report_file:
167
+ self.mapper.migration_report.write_json_report(raw_report_file)
166
168
 
167
169
  logging.info(
168
170
  "Done. Transformation report written to %s",
@@ -178,6 +178,8 @@ class CoursesMigrator(MigrationTaskBase):
178
178
  self.mapper.migration_report.write_migration_report(
179
179
  i18n.t("Courses migration report"), report_file, self.mapper.start_datetime
180
180
  )
181
+ with open(self.folder_structure.migration_reports_raw_file, "w") as raw_report_file:
182
+ self.mapper.migration_report.write_json_report(raw_report_file)
181
183
  self.clean_out_empty_logs()
182
184
 
183
185