folio-migration-tools 1.9.3__py3-none-any.whl → 1.9.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -198,6 +198,14 @@ class CirculationHelper:
198
198
  error_message_from_folio,
199
199
  error_message_from_folio,
200
200
  )
201
+ elif "Item is already checked out" in error_message_from_folio:
202
+ return TransactionResult(
203
+ False,
204
+ True,
205
+ None,
206
+ error_message_from_folio,
207
+ error_message_from_folio,
208
+ )
201
209
  logging.error(
202
210
  f"{error_message} "
203
211
  f"Patron barcode: {legacy_loan.patron_barcode} "
@@ -56,6 +56,10 @@ class Helper:
56
56
  @staticmethod
57
57
  def log_data_issue(index_or_id, message, legacy_value):
58
58
  logging.log(26, "DATA ISSUE\t%s\t%s\t%s", index_or_id, message, legacy_value)
59
+
60
+ @staticmethod
61
+ def log_data_issue_failed(index_or_id, message, legacy_value):
62
+ logging.log(26, "RECORD FAILED\t%s\t%s\t%s", index_or_id, message, legacy_value)
59
63
 
60
64
  @staticmethod
61
65
  def write_to_file(file, folio_record):
@@ -174,11 +174,11 @@ class BibsRulesMapper(RulesMapperBase):
174
174
  self.process_marc_field(folio_instance, main_entry_fields[0], ignored_subsequent_fields, legacy_ids)
175
175
  try:
176
176
  self.process_marc_field(folio_instance, marc_record['245'], ignored_subsequent_fields, legacy_ids)
177
- except KeyError:
177
+ except KeyError as ke:
178
178
  raise TransformationRecordFailedError(
179
179
  legacy_ids,
180
180
  "No 245 field in MARC record"
181
- )
181
+ ) from ke
182
182
 
183
183
  def perform_additional_parsing(
184
184
  self,
@@ -196,9 +196,15 @@ class RulesMapperHoldings(RulesMapperBase):
196
196
  if "004" not in marc_record:
197
197
  raise TransformationProcessError(
198
198
  "",
199
- ("No 004 in record. The tools only support bib-mfhd linking throuh 004"),
199
+ ("No 004 in record. The tools only support bib-mfhd linking through 004"),
200
200
  legacy_ids,
201
201
  )
202
+ if len(marc_record.get_fields("004")) > 1:
203
+ Helper.log_data_issue(
204
+ legacy_ids,
205
+ "More than one linked bib (004) found in record. Using the first one",
206
+ [str(x) for x in marc_record.get_fields("004")],
207
+ )
202
208
  legacy_instance_id = marc_record["004"].data.strip()
203
209
  folio_holding["formerIds"].append(f"{self.bib_id_template}{legacy_instance_id}")
204
210
  if legacy_instance_id in self.parent_id_map:
@@ -131,6 +131,56 @@ class BatchPoster(MigrationTaskBase):
131
131
  ),
132
132
  ),
133
133
  ] = False
134
+ preserve_statistical_codes: Annotated[
135
+ bool,
136
+ Field(
137
+ title="Preserve statistical codes",
138
+ description=(
139
+ "Toggles whether or not to preserve statistical codes "
140
+ "during the upsert process. Defaults to False"
141
+ ),
142
+ ),
143
+ ] = False
144
+ preserve_administrative_notes: Annotated[
145
+ bool,
146
+ Field(
147
+ title="Preserve administrative notes",
148
+ description=(
149
+ "Toggles whether or not to preserve administrative notes "
150
+ "during the upsert process. Defaults to False"
151
+ ),
152
+ ),
153
+ ] = False
154
+ preserve_temporary_locations: Annotated[
155
+ bool,
156
+ Field(
157
+ title="Preserve temporary locations",
158
+ description=(
159
+ "Toggles whether or not to preserve temporary locations "
160
+ "on items during the upsert process. Defaults to False"
161
+ ),
162
+ ),
163
+ ] = False
164
+ preserve_temporary_loan_types: Annotated[
165
+ bool,
166
+ Field(
167
+ title="Preserve temporary loan types",
168
+ description=(
169
+ "Toggles whether or not to preserve temporary loan types "
170
+ "on items during the upsert process. Defaults to False"
171
+ ),
172
+ ),
173
+ ] = False
174
+ preserve_item_status: Annotated[
175
+ bool,
176
+ Field(
177
+ title="Preserve item status",
178
+ description=(
179
+ "Toggles whether or not to preserve item status "
180
+ "on items during the upsert process. Defaults to False"
181
+ ),
182
+ ),
183
+ ] = True
134
184
 
135
185
  task_configuration: TaskConfiguration
136
186
 
@@ -292,7 +342,7 @@ class BatchPoster(MigrationTaskBase):
292
342
  """
293
343
  fetch_batch_size = 90
294
344
  fetch_tasks = []
295
- updates = {}
345
+ existing_records = {}
296
346
  async with httpx.AsyncClient(base_url=self.folio_client.gateway_url) as client:
297
347
  for i in range(0, len(batch), fetch_batch_size):
298
348
  batch_slice = batch[i:i + fetch_batch_size]
@@ -313,31 +363,82 @@ class BatchPoster(MigrationTaskBase):
313
363
  responses = await asyncio.gather(*fetch_tasks)
314
364
 
315
365
  for response in responses:
316
- self.update_record_versions(object_type, updates, response)
366
+ self.collect_existing_records_for_upsert(object_type, response, existing_records)
317
367
  for record in batch:
318
- if record["id"] in updates:
319
- record.update(updates[record["id"]])
368
+ if record["id"] in existing_records:
369
+ self.prepare_record_for_upsert(record, existing_records[record["id"]])
370
+
371
+ def handle_source_marc(self, new_record: dict, existing_record: dict):
372
+ updates = {}
373
+ updates.update(existing_record)
374
+ self.handle_upsert_for_administrative_notes(updates)
375
+ self.handle_upsert_for_statistical_codes(updates)
376
+ keep_new = {k: v for k, v in new_record.items() if k in ["statisticalCodeIds", "administrativeNotes"]}
377
+ if "instanceStatusId" in new_record:
378
+ updates["instanceStatusId"] = new_record["instanceStatusId"]
379
+ for k, v in keep_new.items():
380
+ updates[k] = list(dict.fromkeys(updates.get(k, []) + v))
381
+ new_record.update(updates)
320
382
 
321
383
  @staticmethod
322
- def update_record_versions(object_type, updates, response):
384
+ def collect_existing_records_for_upsert(object_type: str, response: httpx.Response, existing_records: dict):
323
385
  if response.status_code == 200:
324
386
  response_json = response.json()
325
387
  for record in response_json[object_type]:
326
- updates[record["id"]] = {
327
- "_version": record["_version"],
328
- }
329
- if "hrid" in record:
330
- updates[record["id"]]["hrid"] = record["hrid"]
331
- if "status" in record:
332
- updates[record["id"]]["status"] = record["status"]
333
- if "lastCheckIn" in record:
334
- updates[record["id"]]["lastCheckIn"] = record["lastCheckIn"]
388
+ existing_records[record["id"]] = record
335
389
  else:
336
390
  logging.error(
337
- "Failed to fetch current records. HTTP %s\t%s",
338
- response.status_code,
339
- response.text,
340
- )
391
+ "Failed to fetch current records. HTTP %s\t%s",
392
+ response.status_code,
393
+ response.text,
394
+ )
395
+
396
+ def handle_upsert_for_statistical_codes(self, updates: dict):
397
+ if not self.task_configuration.preserve_statistical_codes:
398
+ updates.pop("statisticalCodeIds", None)
399
+
400
+ def handle_upsert_for_administrative_notes(self, updates: dict):
401
+ if not self.task_configuration.preserve_administrative_notes:
402
+ updates.pop("administrativeNotes", None)
403
+
404
+ def handle_upsert_for_temporary_locations(self, updates: dict):
405
+ if not self.task_configuration.preserve_temporary_locations:
406
+ updates.pop("temporaryLocationId", None)
407
+
408
+ def handle_upsert_for_temporary_loan_types(self, updates: dict):
409
+ if not self.task_configuration.preserve_temporary_loan_types:
410
+ updates.pop("temporaryLoanTypeId", None)
411
+
412
+ def keep_existing_fields(self, updates: dict, existing_record: dict):
413
+ keep_existing_fields = ["hrid", "lastCheckIn"]
414
+ if self.task_configuration.preserve_item_status:
415
+ keep_existing_fields.append("status")
416
+ for key in keep_existing_fields:
417
+ if key in existing_record:
418
+ updates[key] = existing_record[key]
419
+
420
+ def prepare_record_for_upsert(self, new_record: dict, existing_record: dict):
421
+ if "source" in existing_record and "MARC" in existing_record["source"]:
422
+ self.handle_source_marc(new_record, existing_record)
423
+ else:
424
+ updates = {
425
+ "_version": existing_record["_version"],
426
+ }
427
+ self.keep_existing_fields(updates, existing_record)
428
+ keep_new = {k: v for k, v in new_record.items() if k in ["statisticalCodeIds", "administrativeNotes"]}
429
+ self.handle_upsert_for_statistical_codes(existing_record)
430
+ self.handle_upsert_for_administrative_notes(existing_record)
431
+ self.handle_upsert_for_temporary_locations(existing_record)
432
+ self.handle_upsert_for_temporary_loan_types(existing_record)
433
+ for k, v in keep_new.items():
434
+ updates[k] = list(dict.fromkeys(existing_record.get(k, []) + v))
435
+ for key in [
436
+ "temporaryLocationId",
437
+ "temporaryLoanTypeId",
438
+ ]:
439
+ if key in existing_record:
440
+ updates[key] = existing_record[key]
441
+ new_record.update(updates)
341
442
 
342
443
  async def get_with_retry(self, client: httpx.AsyncClient, url: str, params=None):
343
444
  if params is None:
@@ -218,7 +218,10 @@ class ItemsTransformer(MigrationTaskBase):
218
218
  self.folio_keys = MappingFileMapperBase.get_mapped_folio_properties_from_map(
219
219
  self.items_map
220
220
  )
221
- if any(k for k in self.folio_keys if k.startswith("statisticalCodeIds")):
221
+ if (
222
+ any(k for k in self.folio_keys if k.startswith("statisticalCodeIds"))
223
+ or any(getattr(k, "statistical_code", "") for k in self.task_configuration.files)
224
+ ):
222
225
  statcode_mapping = self.load_ref_data_mapping_file(
223
226
  "statisticalCodeIds",
224
227
  self.folder_structure.mapping_files_folder
@@ -6,7 +6,7 @@ import sys
6
6
  import time
7
7
  import traceback
8
8
  from datetime import datetime, timedelta
9
- from typing import Annotated, Optional
9
+ from typing import Annotated, List, Literal, Optional
10
10
  from urllib.error import HTTPError
11
11
  from zoneinfo import ZoneInfo
12
12
  from pydantic import Field
@@ -55,7 +55,7 @@ class LoansMigrator(MigrationTaskBase):
55
55
  Optional[list[FileDefinition]],
56
56
  Field(
57
57
  title="Open loans files",
58
- description="List of files containing open loan data."
58
+ description="List of files containing open loan data.",
59
59
  ),
60
60
  ]
61
61
  fallback_service_point_id: Annotated[
@@ -69,10 +69,7 @@ class LoansMigrator(MigrationTaskBase):
69
69
  Optional[int],
70
70
  Field(
71
71
  title="Starting row",
72
- description=(
73
- "The starting row for data processing. "
74
- "By default is 1."
75
- ),
72
+ description=("The starting row for data processing. By default is 1."),
76
73
  ),
77
74
  ] = 1
78
75
  item_files: Annotated[
@@ -80,8 +77,7 @@ class LoansMigrator(MigrationTaskBase):
80
77
  Field(
81
78
  title="Item files",
82
79
  description=(
83
- "List of files containing item data. "
84
- "By default is empty list."
80
+ "List of files containing item data. By default is empty list."
85
81
  ),
86
82
  ),
87
83
  ] = []
@@ -90,8 +86,7 @@ class LoansMigrator(MigrationTaskBase):
90
86
  Field(
91
87
  title="Patron files",
92
88
  description=(
93
- "List of files containing patron data. "
94
- "By default is empty list."
89
+ "List of files containing patron data. By default is empty list."
95
90
  ),
96
91
  ),
97
92
  ] = []
@@ -104,7 +99,7 @@ class LoansMigrator(MigrationTaskBase):
104
99
  self,
105
100
  task_configuration: TaskConfiguration,
106
101
  library_config: LibraryConfiguration,
107
- folio_client
102
+ folio_client,
108
103
  ):
109
104
  csv.register_dialect("tsv", delimiter="\t")
110
105
  self.patron_item_combos: set = set()
@@ -115,7 +110,7 @@ class LoansMigrator(MigrationTaskBase):
115
110
  self.failed: dict = {}
116
111
  self.failed_and_not_dupe: dict = {}
117
112
  self.migration_report = MigrationReport()
118
- self.valid_legacy_loans = []
113
+ self.valid_legacy_loans: List[LegacyLoan] = []
119
114
  super().__init__(library_config, task_configuration, folio_client)
120
115
  self.circulation_helper = CirculationHelper(
121
116
  self.folio_client,
@@ -129,7 +124,9 @@ class LoansMigrator(MigrationTaskBase):
129
124
  my_path = "/configurations/entries?query=(module==ORG%20and%20configName==localeSettings)"
130
125
  try:
131
126
  self.tenant_timezone_str = json.loads(
132
- self.folio_client.folio_get_single_object(my_path)["configs"][0]["value"]
127
+ self.folio_client.folio_get_single_object(my_path)["configs"][0][
128
+ "value"
129
+ ]
133
130
  )["timezone"]
134
131
  logging.info("Tenant timezone is: %s", self.tenant_timezone_str)
135
132
  except Exception:
@@ -138,10 +135,14 @@ class LoansMigrator(MigrationTaskBase):
138
135
  self.tenant_timezone = ZoneInfo(self.tenant_timezone_str)
139
136
  self.semi_valid_legacy_loans = []
140
137
  for file_def in task_configuration.open_loans_files:
141
- loans_file_path = self.folder_structure.legacy_records_folder / file_def.file_name
138
+ loans_file_path = (
139
+ self.folder_structure.legacy_records_folder / file_def.file_name
140
+ )
142
141
  with open(loans_file_path, "r", encoding="utf-8") as loans_file:
143
- total_rows, empty_rows, reader = MappingFileMapperBase._get_delimited_file_reader(
144
- loans_file, loans_file_path
142
+ total_rows, empty_rows, reader = (
143
+ MappingFileMapperBase._get_delimited_file_reader(
144
+ loans_file, loans_file_path
145
+ )
145
146
  )
146
147
  logging.info("Source data file contains %d rows", total_rows)
147
148
  logging.info("Source data file contains %d empty rows", empty_rows)
@@ -158,7 +159,8 @@ class LoansMigrator(MigrationTaskBase):
158
159
  self.semi_valid_legacy_loans.extend(
159
160
  self.load_and_validate_legacy_loans(
160
161
  reader,
161
- file_def.service_point_id or task_configuration.fallback_service_point_id,
162
+ file_def.service_point_id
163
+ or task_configuration.fallback_service_point_id,
162
164
  )
163
165
  )
164
166
 
@@ -167,8 +169,12 @@ class LoansMigrator(MigrationTaskBase):
167
169
  len(self.semi_valid_legacy_loans),
168
170
  file_def.file_name,
169
171
  )
170
- logging.info("Loaded and validated %s loans in total", len(self.semi_valid_legacy_loans))
171
- if any(self.task_configuration.item_files) or any(self.task_configuration.patron_files):
172
+ logging.info(
173
+ "Loaded and validated %s loans in total", len(self.semi_valid_legacy_loans)
174
+ )
175
+ if any(self.task_configuration.item_files) or any(
176
+ self.task_configuration.patron_files
177
+ ):
172
178
  self.valid_legacy_loans = list(self.check_barcodes())
173
179
  logging.info(
174
180
  "Loaded and validated %s loans against barcodes",
@@ -185,9 +191,9 @@ class LoansMigrator(MigrationTaskBase):
185
191
 
186
192
  def check_smtp_config(self):
187
193
  try:
188
- smtp_config = self.folio_client.folio_get_single_object("/smtp-configuration")[
189
- "smtpConfigurations"
190
- ][0]
194
+ smtp_config = self.folio_client.folio_get_single_object(
195
+ "/smtp-configuration"
196
+ )["smtpConfigurations"][0]
191
197
  smtp_config_disabled = "disabled" in smtp_config["host"].lower()
192
198
  except IndexError:
193
199
  smtp_config_disabled = True
@@ -195,7 +201,9 @@ class LoansMigrator(MigrationTaskBase):
195
201
  if not smtp_config_disabled:
196
202
  logging.warn("SMTP connection not disabled...")
197
203
  for i in range(10, 0, -1):
198
- sys.stdout.write("Pausing for {:02d} seconds. Press Ctrl+C to exit...\r".format(i))
204
+ sys.stdout.write(
205
+ "Pausing for {:02d} seconds. Press Ctrl+C to exit...\r".format(i)
206
+ )
199
207
  time.sleep(1)
200
208
  else:
201
209
  logging.info("SMTP connection is disabled...")
@@ -219,13 +227,21 @@ class LoansMigrator(MigrationTaskBase):
219
227
  )
220
228
  try:
221
229
  self.checkout_single_loan(legacy_loan)
230
+ except TransformationRecordFailedError as ee:
231
+ logging.error(
232
+ f"Transformation failed in row {num_loans} Item barcode: {legacy_loan.item_barcode} "
233
+ f"Patron barcode: {legacy_loan.patron_barcode}"
234
+ )
235
+ ee.log_it()
222
236
  except Exception as ee:
223
237
  logging.exception(
224
238
  f"Error in row {num_loans} Item barcode: {legacy_loan.item_barcode} "
225
239
  f"Patron barcode: {legacy_loan.patron_barcode} {ee}"
226
240
  )
227
241
  if num_loans % 25 == 0:
228
- logging.info(f"{timings(self.t0, t0_migration, num_loans)} {num_loans}")
242
+ logging.info(
243
+ f"{timings(self.t0, t0_migration, num_loans)} {num_loans}"
244
+ )
229
245
 
230
246
  def checkout_single_loan(self, legacy_loan: LegacyLoan):
231
247
  """Checks a legacy loan out. Retries once if it fails.
@@ -234,17 +250,23 @@ class LoansMigrator(MigrationTaskBase):
234
250
  legacy_loan (LegacyLoan): The Legacy loan
235
251
  """
236
252
  res_checkout = self.circulation_helper.check_out_by_barcode(legacy_loan)
237
-
253
+
238
254
  if res_checkout.was_successful:
239
255
  self.migration_report.add("Details", i18n.t("Checked out on first try"))
240
- self.migration_report.add_general_statistics(i18n.t("Successfully checked out"))
256
+ self.migration_report.add_general_statistics(
257
+ i18n.t("Successfully checked out")
258
+ )
241
259
  self.set_renewal_count(legacy_loan, res_checkout)
242
260
  self.set_new_status(legacy_loan, res_checkout)
243
261
  elif res_checkout.should_be_retried:
244
262
  res_checkout2 = self.handle_checkout_failure(legacy_loan, res_checkout)
245
263
  if res_checkout2.was_successful and res_checkout2.folio_loan:
246
- self.migration_report.add("Details", i18n.t("Checked out on second try"))
247
- self.migration_report.add_general_statistics(i18n.t("Successfully checked out"))
264
+ self.migration_report.add(
265
+ "Details", i18n.t("Checked out on second try")
266
+ )
267
+ self.migration_report.add_general_statistics(
268
+ i18n.t("Successfully checked out")
269
+ )
248
270
  logging.info("Checked out on second try")
249
271
  self.set_renewal_count(legacy_loan, res_checkout2)
250
272
  self.set_new_status(legacy_loan, res_checkout2)
@@ -252,7 +274,8 @@ class LoansMigrator(MigrationTaskBase):
252
274
  if res_checkout2.error_message == "Aged to lost and checked out":
253
275
  self.migration_report.add(
254
276
  "Details",
255
- i18n.t("Second failure") + f": {res_checkout2.migration_report_message}",
277
+ i18n.t("Second failure")
278
+ + f": {res_checkout2.migration_report_message}",
256
279
  )
257
280
  logging.error(
258
281
  f"{res_checkout2.error_message}. Item barcode: {legacy_loan.item_barcode}"
@@ -260,16 +283,23 @@ class LoansMigrator(MigrationTaskBase):
260
283
  else:
261
284
  self.failed[legacy_loan.item_barcode] = legacy_loan
262
285
  self.migration_report.add_general_statistics(i18n.t("Failed loans"))
263
- Helper.log_data_issue(
264
- "", "Loans failing during checkout", json.dumps(legacy_loan.to_dict())
286
+ logging.error(
287
+ "Failed on second try: %s", res_checkout2.error_message
265
288
  )
266
- logging.error("Failed on second try: %s", res_checkout2.error_message)
267
289
  self.migration_report.add(
268
290
  "Details",
269
- i18n.t("Second failure") + f": {res_checkout2.migration_report_message}",
291
+ i18n.t("Second failure")
292
+ + f": {res_checkout2.migration_report_message}",
293
+ )
294
+ raise TransformationRecordFailedError(
295
+ f"Row {legacy_loan.row}",
296
+ i18n.t("Loans failing during checkout, second try"),
297
+ json.dumps(legacy_loan.to_dict()),
270
298
  )
271
299
  elif not res_checkout.should_be_retried:
272
- logging.error("Failed first time. No retries: %s", res_checkout.error_message)
300
+ logging.error(
301
+ "Failed first time. No retries: %s", res_checkout.error_message
302
+ )
273
303
  self.migration_report.add_general_statistics(i18n.t("Failed loans"))
274
304
  self.migration_report.add(
275
305
  "Details",
@@ -277,8 +307,10 @@ class LoansMigrator(MigrationTaskBase):
277
307
  + f": {res_checkout.migration_report_message}",
278
308
  )
279
309
  self.failed[legacy_loan.item_barcode] = legacy_loan
280
- Helper.log_data_issue(
281
- "", "Loans failing during checkout", json.dumps(legacy_loan.to_dict())
310
+ raise TransformationRecordFailedError(
311
+ f"Row {legacy_loan.row}",
312
+ i18n.t("Loans failing during checkout"),
313
+ json.dumps(legacy_loan.to_dict()),
282
314
  )
283
315
 
284
316
  def set_new_status(self, legacy_loan: LegacyLoan, res_checkout: TransactionResult):
@@ -296,10 +328,14 @@ class LoansMigrator(MigrationTaskBase):
296
328
  elif legacy_loan.next_item_status not in ["Available", "", "Checked out"]:
297
329
  self.set_item_status(legacy_loan)
298
330
 
299
- def set_renewal_count(self, legacy_loan: LegacyLoan, res_checkout: TransactionResult):
331
+ def set_renewal_count(
332
+ self, legacy_loan: LegacyLoan, res_checkout: TransactionResult
333
+ ):
300
334
  if legacy_loan.renewal_count > 0:
301
335
  self.update_open_loan(res_checkout.folio_loan, legacy_loan)
302
- self.migration_report.add_general_statistics(i18n.t("Updated renewal count for loan"))
336
+ self.migration_report.add_general_statistics(
337
+ i18n.t("Updated renewal count for loan")
338
+ )
303
339
 
304
340
  def wrap_up(self):
305
341
  for k, v in self.failed.items():
@@ -316,15 +352,19 @@ class LoansMigrator(MigrationTaskBase):
316
352
 
317
353
  def write_failed_loans_to_file(self):
318
354
  csv_columns = [
319
- "due_date",
355
+ "patron_barcode",
356
+ "proxy_patron_barcode",
320
357
  "item_barcode",
321
- "next_item_status",
358
+ "due_date",
322
359
  "out_date",
323
- "patron_barcode",
360
+ "next_item_status",
324
361
  "renewal_count",
362
+ "service_point_id",
325
363
  ]
326
364
  with open(self.folder_structure.failed_recs_path, "w+") as failed_loans_file:
327
- writer = csv.DictWriter(failed_loans_file, fieldnames=csv_columns, dialect="tsv")
365
+ writer = csv.DictWriter(
366
+ failed_loans_file, fieldnames=csv_columns, dialect="tsv"
367
+ )
328
368
  writer.writeheader()
329
369
  for _k, failed_loan in self.failed_and_not_dupe.items():
330
370
  writer.writerow(failed_loan[0])
@@ -339,12 +379,16 @@ class LoansMigrator(MigrationTaskBase):
339
379
  user_barcodes, self.task_configuration.patron_files, self.folder_structure
340
380
  )
341
381
  for loan in self.semi_valid_legacy_loans:
342
- has_item_barcode = loan.item_barcode in item_barcodes or not any(item_barcodes)
343
- has_patron_barcode = loan.patron_barcode in user_barcodes or not any(user_barcodes)
382
+ has_item_barcode = loan.item_barcode in item_barcodes or not any(
383
+ item_barcodes
384
+ )
385
+ has_patron_barcode = loan.patron_barcode in user_barcodes or not any(
386
+ user_barcodes
387
+ )
344
388
  has_proxy_barcode = True
345
389
  if loan.proxy_patron_barcode:
346
- has_proxy_barcode = loan.proxy_patron_barcode in user_barcodes or not any(
347
- user_barcodes
390
+ has_proxy_barcode = (
391
+ loan.proxy_patron_barcode in user_barcodes or not any(user_barcodes)
348
392
  )
349
393
  if has_item_barcode and has_patron_barcode and has_proxy_barcode:
350
394
  self.migration_report.add_general_statistics(
@@ -364,7 +408,7 @@ class LoansMigrator(MigrationTaskBase):
364
408
  + f": {has_proxy_barcode}",
365
409
  )
366
410
  if not has_item_barcode:
367
- Helper.log_data_issue(
411
+ Helper.log_data_issue_failed(
368
412
  "", "Loan without matched item barcode", json.dumps(loan.to_dict())
369
413
  )
370
414
  if not has_patron_barcode:
@@ -374,11 +418,15 @@ class LoansMigrator(MigrationTaskBase):
374
418
  json.dumps(loan.to_dict()),
375
419
  )
376
420
  if not has_proxy_barcode:
377
- Helper.log_data_issue(
378
- "", "Loan without matched proxy patron barcode", json.dumps(loan.to_dict())
421
+ Helper.log_data_issue_failed(
422
+ "",
423
+ "Loan without matched proxy patron barcode",
424
+ json.dumps(loan.to_dict()),
379
425
  )
380
426
 
381
- def load_and_validate_legacy_loans(self, loans_reader, service_point_id: str) -> list:
427
+ def load_and_validate_legacy_loans(
428
+ self, loans_reader, service_point_id: str
429
+ ) -> list:
382
430
  results = []
383
431
  num_bad = 0
384
432
  logging.info("Validating legacy loans in file...")
@@ -398,7 +446,9 @@ class LoansMigrator(MigrationTaskBase):
398
446
  )
399
447
  self.migration_report.add_general_statistics(i18n.t("Failed loans"))
400
448
  for error in legacy_loan.errors:
401
- self.migration_report.add("DiscardedLoans", f"{error[0]} - {error[1]}")
449
+ self.migration_report.add(
450
+ "DiscardedLoans", f"{error[0]} - {error[1]}"
451
+ )
402
452
  # Add this loan to failed loans for later correction and re-run.
403
453
  self.failed[
404
454
  legacy_loan.item_barcode or f"no_barcode_{legacy_loan_count}"
@@ -416,7 +466,9 @@ class LoansMigrator(MigrationTaskBase):
416
466
  )
417
467
  trfe.log_it()
418
468
  self.failed[
419
- legacy_loan_dict.get("item_barcode", f"no_barcode_{legacy_loan_count}")
469
+ legacy_loan_dict.get(
470
+ "item_barcode", f"no_barcode_{legacy_loan_count}"
471
+ )
420
472
  ] = legacy_loan_dict
421
473
  except ValueError as ve:
422
474
  logging.exception(ve)
@@ -457,13 +509,22 @@ class LoansMigrator(MigrationTaskBase):
457
509
  elif folio_checkout.error_message.startswith(
458
510
  "Cannot check out item that already has an open loan"
459
511
  ):
460
- return folio_checkout
512
+ return self.handle_checked_out_item(legacy_loan)
513
+ elif "Item is already checked out" in folio_checkout.error_message:
514
+ return self.handle_checked_out_item(legacy_loan)
461
515
  elif "Aged to lost" in folio_checkout.error_message:
462
- return self.handle_aged_to_lost_item(legacy_loan)
516
+ return self.handle_lost_item(legacy_loan, "Aged to lost")
463
517
  elif folio_checkout.error_message == "Declared lost":
464
- return folio_checkout
465
- elif folio_checkout.error_message.startswith("Cannot check out to inactive user"):
518
+ return self.handle_lost_item(legacy_loan, "Declared lost")
519
+ elif folio_checkout.error_message.startswith(
520
+ "Cannot check out to inactive user"
521
+ ):
466
522
  return self.checkout_to_inactive_user(legacy_loan)
523
+ elif (
524
+ "has the item status Claimed returned and cannot be checked out"
525
+ in folio_checkout.error_message
526
+ ):
527
+ return self.handle_claimed_returned_item(legacy_loan)
467
528
  else:
468
529
  self.migration_report.add(
469
530
  "Details",
@@ -489,7 +550,9 @@ class LoansMigrator(MigrationTaskBase):
489
550
  f"Duplicate loans (or failed twice) Item barcode: "
490
551
  f"{legacy_loan.item_barcode} Patron barcode: {legacy_loan.patron_barcode}"
491
552
  )
492
- self.migration_report.add("Details", i18n.t("Duplicate loans (or failed twice)"))
553
+ self.migration_report.add(
554
+ "Details", i18n.t("Duplicate loans (or failed twice)")
555
+ )
493
556
  del self.failed[legacy_loan.item_barcode]
494
557
  return TransactionResult(False, False, "", "", "")
495
558
 
@@ -500,7 +563,9 @@ class LoansMigrator(MigrationTaskBase):
500
563
  user["expirationDate"] = datetime.isoformat(datetime.now() + timedelta(days=1))
501
564
  self.activate_user(user)
502
565
  logging.debug("Successfully Activated user")
503
- res = self.circulation_helper.check_out_by_barcode(legacy_loan) # checkout_and_update
566
+ res = self.circulation_helper.check_out_by_barcode(
567
+ legacy_loan
568
+ ) # checkout_and_update
504
569
  if res.should_be_retried:
505
570
  res = self.handle_checkout_failure(legacy_loan, res)
506
571
  self.migration_report.add("Details", res.migration_report_message)
@@ -509,28 +574,92 @@ class LoansMigrator(MigrationTaskBase):
509
574
  self.migration_report.add("Details", i18n.t("Handled inactive users"))
510
575
  return res
511
576
 
512
- def handle_aged_to_lost_item(self, legacy_loan: LegacyLoan) -> TransactionResult:
577
+ def handle_checked_out_item(self, legacy_loan: LegacyLoan) -> TransactionResult:
513
578
  if self.circulation_helper.is_checked_out(legacy_loan):
514
579
  return TransactionResult(
515
580
  False,
516
581
  False,
517
582
  legacy_loan,
518
- i18n.t("Aged to lost and checked out"),
519
- i18n.t("Aged to lost and checked out"),
583
+ i18n.t(
584
+ "Loan already exists for %{item_barcode}",
585
+ item_barcode=legacy_loan.item_barcode,
586
+ ),
587
+ i18n.t(
588
+ "Loan already exists for %{item_barcode}",
589
+ item_barcode=legacy_loan.item_barcode,
590
+ ),
520
591
  )
521
-
522
592
  else:
523
- logging.debug("Setting Available")
593
+ logging.debug(
594
+ i18n.t(
595
+ 'Setting item %{item_barcode} to status "Available"',
596
+ item_barcode=legacy_loan.item_barcode,
597
+ )
598
+ )
524
599
  legacy_loan.next_item_status = "Available"
525
600
  self.set_item_status(legacy_loan)
526
601
  res_checkout = self.circulation_helper.check_out_by_barcode(legacy_loan)
527
- legacy_loan.next_item_status = "Aged to lost"
602
+ legacy_loan.next_item_status = "Checked out"
603
+ return res_checkout
604
+
605
+ def handle_lost_item(
606
+ self,
607
+ legacy_loan: LegacyLoan,
608
+ lost_type: Literal["Aged to lost", "Declared lost"],
609
+ ) -> TransactionResult:
610
+ if self.circulation_helper.is_checked_out(legacy_loan):
611
+ return TransactionResult(
612
+ False,
613
+ False,
614
+ legacy_loan,
615
+ i18n.t("%{lost_type} and checked out", lost_type=lost_type),
616
+ i18n.t("%{lost_type} and checked out", lost_type=lost_type),
617
+ )
618
+
619
+ else:
620
+ logging.debug(
621
+ 'Setting item %{item_barcode} to status "Available"',
622
+ item_barcode=legacy_loan.item_barcode,
623
+ )
624
+ legacy_loan.next_item_status = "Available"
528
625
  self.set_item_status(legacy_loan)
529
- s = "Successfully Checked out Aged to lost item and put the status back"
626
+ res_checkout = self.circulation_helper.check_out_by_barcode(legacy_loan)
627
+ legacy_loan.next_item_status = lost_type
628
+ if lost_type == "Aged to lost":
629
+ self.set_item_status(legacy_loan)
630
+ s = i18n.t(
631
+ "Successfully Checked out %{lost_type} item and put the status back",
632
+ lost_type=lost_type,
633
+ )
634
+ else:
635
+ s = i18n.t(
636
+ "Successfully Checked out %{lost_type} item. Item will be declared lost.",
637
+ lost_type=lost_type,
638
+ )
530
639
  logging.info(s)
531
640
  self.migration_report.add("Details", s)
532
641
  return res_checkout
533
642
 
643
+ def handle_claimed_returned_item(self, legacy_loan: LegacyLoan):
644
+ if self.circulation_helper.is_checked_out(legacy_loan):
645
+ return TransactionResult(
646
+ False,
647
+ False,
648
+ legacy_loan,
649
+ i18n.t("Claimed returned and checked out"),
650
+ i18n.t("Claimed returned and checked out"),
651
+ )
652
+ else:
653
+ logging.debug(
654
+ 'Setting item %{item_barcode} to status "Available"',
655
+ item_barcode=legacy_loan.item_barcode,
656
+ )
657
+ legacy_loan.next_item_status = "Available"
658
+ self.set_item_status(legacy_loan)
659
+ res_checkout = self.circulation_helper.check_out_by_barcode(legacy_loan)
660
+ legacy_loan.next_item_status = "Claimed returned"
661
+ return res_checkout
662
+
534
663
  def update_open_loan(self, folio_loan: dict, legacy_loan: LegacyLoan):
535
664
  due_date = du_parser.isoparse(str(legacy_loan.due_date))
536
665
  out_date = du_parser.isoparse(str(legacy_loan.out_date))
@@ -541,7 +670,9 @@ class LoansMigrator(MigrationTaskBase):
541
670
  loan_to_put["dueDate"] = due_date.isoformat()
542
671
  loan_to_put["loanDate"] = out_date.isoformat()
543
672
  loan_to_put["renewalCount"] = renewal_count
544
- url = f"{self.folio_client.gateway_url}/circulation/loans/{loan_to_put['id']}"
673
+ url = (
674
+ f"{self.folio_client.gateway_url}/circulation/loans/{loan_to_put['id']}"
675
+ )
545
676
  req = self.http_client.put(
546
677
  url,
547
678
  headers=self.folio_client.okapi_headers,
@@ -562,7 +693,8 @@ class LoansMigrator(MigrationTaskBase):
562
693
  else:
563
694
  self.migration_report.add(
564
695
  "Details",
565
- i18n.t("Update open loan error http status") + f": {req.status_code}",
696
+ i18n.t("Update open loan error http status")
697
+ + f": {req.status_code}",
566
698
  )
567
699
  req.raise_for_status()
568
700
  logging.debug("Updating open loan was successful")
@@ -592,40 +724,59 @@ class LoansMigrator(MigrationTaskBase):
592
724
  "servicePointId": str(self.task_configuration.fallback_service_point_id),
593
725
  }
594
726
  logging.debug(f"Declare lost data: {json.dumps(data, indent=4)}")
595
- if self.folio_put_post(declare_lost_url, data, "POST", i18n.t("Declare item as lost")):
596
- self.migration_report.add("Details", i18n.t("Successfully declared loan as lost"))
727
+ if self.folio_put_post(
728
+ declare_lost_url, data, "POST", i18n.t("Declare item as lost")
729
+ ):
730
+ self.migration_report.add(
731
+ "Details", i18n.t("Successfully declared loan as lost")
732
+ )
597
733
  else:
598
734
  logging.error(f"Unsuccessfully declared loan {folio_loan} as lost")
599
- self.migration_report.add("Details", i18n.t("Unsuccessfully declared loan as lost"))
735
+ self.migration_report.add(
736
+ "Details", i18n.t("Unsuccessfully declared loan as lost")
737
+ )
600
738
 
601
739
  def claim_returned(self, folio_loan):
602
- claim_returned_url = f"/circulation/loans/{folio_loan['id']}/claim-item-returned"
740
+ claim_returned_url = (
741
+ f"/circulation/loans/{folio_loan['id']}/claim-item-returned"
742
+ )
603
743
  logging.debug(f"Claim returned url:{claim_returned_url}")
604
744
  due_date = du_parser.isoparse(folio_loan["dueDate"])
605
745
  data = {
606
- "itemClaimedReturnedDateTime": datetime.isoformat(due_date + timedelta(days=1)),
746
+ "itemClaimedReturnedDateTime": datetime.isoformat(
747
+ due_date + timedelta(days=1)
748
+ ),
607
749
  "comment": "Created at migration. Date is due date + 1 day",
608
750
  }
609
751
  logging.debug(f"Claim returned data:\t{json.dumps(data)}")
610
- if self.folio_put_post(claim_returned_url, data, "POST", i18n.t("Declare item as lost")):
752
+ if self.folio_put_post(
753
+ claim_returned_url, data, "POST", i18n.t("Claim item returned")
754
+ ):
611
755
  self.migration_report.add(
612
756
  "Details", i18n.t("Successfully declared loan as Claimed returned")
613
757
  )
614
758
  else:
615
- logging.error(f"Unsuccessfully declared loan {folio_loan} as Claimed returned")
759
+ logging.error(
760
+ f"Unsuccessfully declared loan {folio_loan} as Claimed returned"
761
+ )
616
762
  self.migration_report.add(
617
763
  "Details",
618
764
  i18n.t(
619
- "Unsuccessfully declared loan %{loan} as Claimed returned", loan=folio_loan
765
+ "Unsuccessfully declared loan %{loan} as Claimed returned",
766
+ loan=folio_loan,
620
767
  ),
621
768
  )
622
769
 
623
770
  def set_item_status(self, legacy_loan: LegacyLoan):
624
771
  try:
625
772
  # Get Item by barcode, update status.
626
- item_path = f'item-storage/items?query=(barcode=="{legacy_loan.item_barcode}")'
773
+ item_path = (
774
+ f'item-storage/items?query=(barcode=="{legacy_loan.item_barcode}")'
775
+ )
627
776
  item_url = f"{self.folio_client.gateway_url}/{item_path}"
628
- resp = self.http_client.get(item_url, headers=self.folio_client.okapi_headers)
777
+ resp = self.http_client.get(
778
+ item_url, headers=self.folio_client.okapi_headers
779
+ )
629
780
  resp.raise_for_status()
630
781
  data = resp.json()
631
782
  folio_item = data["items"][0]
@@ -675,11 +826,11 @@ class LoansMigrator(MigrationTaskBase):
675
826
  self.migration_report.add("Details", i18n.t("Successfully deactivated user"))
676
827
 
677
828
  def update_item(self, item):
678
- url = f'/item-storage/items/{item["id"]}'
829
+ url = f"/item-storage/items/{item['id']}"
679
830
  return self.folio_put_post(url, item, "PUT", i18n.t("Update item"))
680
831
 
681
832
  def update_user(self, user):
682
- url = f'/users/{user["id"]}'
833
+ url = f"/users/{user['id']}"
683
834
  self.folio_put_post(url, user, "PUT", i18n.t("Update user"))
684
835
 
685
836
  def get_user_by_barcode(self, barcode):
@@ -746,7 +897,9 @@ class LoansMigrator(MigrationTaskBase):
746
897
  try:
747
898
  api_path = f"{folio_loan['id']}/change-due-date"
748
899
  api_url = f"{self.folio_client.gateway_url}/circulation/loans/{api_path}"
749
- body = {"dueDate": du_parser.isoparse(str(legacy_loan.due_date)).isoformat()}
900
+ body = {
901
+ "dueDate": du_parser.isoparse(str(legacy_loan.due_date)).isoformat()
902
+ }
750
903
  req = self.http_client.post(
751
904
  api_url, headers=self.folio_client.okapi_headers, json=body
752
905
  )
@@ -762,12 +915,14 @@ class LoansMigrator(MigrationTaskBase):
762
915
  return False
763
916
  elif req.status_code == 201:
764
917
  self.migration_report.add(
765
- "Details", i18n.t("Successfully changed due date") + f" ({req.status_code})"
918
+ "Details",
919
+ i18n.t("Successfully changed due date") + f" ({req.status_code})",
766
920
  )
767
921
  return True, json.loads(req.text), None
768
922
  elif req.status_code == 204:
769
923
  self.migration_report.add(
770
- "Details", i18n.t("Successfully changed due date") + f" ({req.status_code})"
924
+ "Details",
925
+ i18n.t("Successfully changed due date") + f" ({req.status_code})",
771
926
  )
772
927
  return True, None, None
773
928
  else:
@@ -148,6 +148,7 @@ class OrganizationTransformer(MigrationTaskBase):
148
148
  self.mapper = OrganizationMapper(
149
149
  self.folio_client,
150
150
  self.library_configuration,
151
+ self.task_configuration,
151
152
  self.organization_map,
152
153
  self.load_ref_data_mapping_file(
153
154
  "organizationTypes",
@@ -7,6 +7,7 @@ from zoneinfo import ZoneInfo
7
7
  from dateutil import tz
8
8
  from dateutil.parser import parse, ParserError
9
9
 
10
+ from folio_migration_tools.helper import Helper
10
11
  from folio_migration_tools.migration_report import MigrationReport
11
12
  from folio_migration_tools.custom_exceptions import TransformationRecordFailedError
12
13
 
@@ -29,11 +30,13 @@ class LegacyLoan(object):
29
30
  "patron_barcode",
30
31
  "due_date",
31
32
  "out_date",
33
+ ]
34
+ optional_headers = [
35
+ "service_point_id",
36
+ "proxy_patron_barcode",
32
37
  "renewal_count",
33
38
  "next_item_status",
34
- "service_point_id",
35
39
  ]
36
- optional_headers = ["service_point_id", "proxy_patron_barcode"]
37
40
  legal_statuses = [
38
41
  "",
39
42
  "Aged to lost",
@@ -60,44 +63,67 @@ class LegacyLoan(object):
60
63
  temp_date_due: datetime = parse(self.legacy_loan_dict["due_date"])
61
64
  if temp_date_due.tzinfo != tz.UTC:
62
65
  temp_date_due = temp_date_due.replace(tzinfo=self.tenant_timezone)
63
- self.report(
66
+ Helper.log_data_issue(
67
+ self.row,
64
68
  f"Provided due_date is not UTC in {row=}, "
65
- f"setting tz-info to tenant timezone ({self.tenant_timezone})"
69
+ f"setting tz-info to tenant timezone ({self.tenant_timezone})",
70
+ json.dumps(self.legacy_loan_dict)
71
+ )
72
+ self.report(
73
+ f"Provided due_date is not UTC, setting tz-info to tenant timezone ({self.tenant_timezone})"
66
74
  )
67
75
  if temp_date_due.hour == 0 and temp_date_due.minute == 0:
68
76
  temp_date_due = temp_date_due.replace(hour=23, minute=59)
69
- self.report(
77
+ Helper.log_data_issue(
78
+ self.row,
70
79
  f"Hour and minute not specified for due date in {row=}. "
71
- "Assuming end of local calendar day (23:59)..."
80
+ "Assuming end of local calendar day (23:59)...",
81
+ json.dumps(self.legacy_loan_dict)
82
+ )
83
+ self.report(
84
+ "Hour and minute not specified for due date"
72
85
  )
73
86
  except (ParserError, OverflowError) as ee:
74
87
  logging.error(ee)
75
- self.errors.append((f"Parse date failure in {row=}. Setting UTC NOW", "due_date"))
88
+ self.errors.append(
89
+ (f"Parse date failure in {row=}. Setting UTC NOW", "due_date")
90
+ )
76
91
  temp_date_due = datetime.now(ZoneInfo("UTC"))
77
92
  try:
78
93
  temp_date_out: datetime = parse(self.legacy_loan_dict["out_date"])
79
94
  if temp_date_out.tzinfo != tz.UTC:
80
95
  temp_date_out = temp_date_out.replace(tzinfo=self.tenant_timezone)
81
- self.report(
96
+ Helper.log_data_issue(
97
+ self.row,
82
98
  f"Provided out_date is not UTC in {row=}, "
83
- f"setting tz-info to tenant timezone ({self.tenant_timezone})"
99
+ f"setting tz-info to tenant timezone ({self.tenant_timezone})",
100
+ json.dumps(self.legacy_loan_dict)
101
+ )
102
+ self.report(
103
+ f"Provided out_date is not UTC, setting tz-info to tenant timezone ({self.tenant_timezone})"
84
104
  )
85
105
  except (ParserError, OverflowError):
86
106
  temp_date_out = datetime.now(
87
107
  ZoneInfo("UTC")
88
108
  ) # TODO: Consider moving this assignment block above the temp_date_due
89
- self.errors.append((f"Parse date failure in {row=}. Setting UTC NOW", "out_date"))
109
+ self.errors.append(
110
+ (f"Parse date failure in {row=}. Setting UTC NOW", "out_date")
111
+ )
90
112
 
91
113
  # good to go, set properties
92
114
  self.item_barcode: str = self.legacy_loan_dict["item_barcode"].strip()
93
115
  self.patron_barcode: str = self.legacy_loan_dict["patron_barcode"].strip()
94
- self.proxy_patron_barcode: str = self.legacy_loan_dict.get("proxy_patron_barcode", "")
116
+ self.proxy_patron_barcode: str = self.legacy_loan_dict.get(
117
+ "proxy_patron_barcode", ""
118
+ )
95
119
  self.due_date: datetime = temp_date_due
96
120
  self.out_date: datetime = temp_date_out
97
121
  self.correct_for_1_day_loans()
98
122
  self.make_utc()
99
123
  self.renewal_count = self.set_renewal_count(self.legacy_loan_dict)
100
- self.next_item_status = self.legacy_loan_dict.get("next_item_status", "").strip()
124
+ self.next_item_status = self.legacy_loan_dict.get(
125
+ "next_item_status", ""
126
+ ).strip()
101
127
  if self.next_item_status not in legal_statuses:
102
128
  self.errors.append((f"Not an allowed status {row=}", self.next_item_status))
103
129
  self.service_point_id = (
@@ -112,9 +138,17 @@ class LegacyLoan(object):
112
138
  try:
113
139
  return int(renewal_count)
114
140
  except ValueError:
115
- self.report(f"Unresolvable {renewal_count=} was replaced with 0.")
141
+ Helper.log_data_issue(
142
+ self.row,
143
+ i18n.t("Unresolvable %{renewal_count=} was replaced with 0."),
144
+ json.dumps(loan)
145
+ )
116
146
  else:
117
- self.report(f"Missing renewal count was replaced with 0.")
147
+ Helper.log_data_issue(
148
+ self.row,
149
+ i18n.t("Missing renewal count was replaced with 0."),
150
+ json.dumps(loan)
151
+ )
118
152
  return 0
119
153
 
120
154
  def correct_for_1_day_loans(self):
@@ -129,17 +163,19 @@ class LegacyLoan(object):
129
163
  i18n.t(
130
164
  "Due date is before out date, or date information is missing from both"
131
165
  ),
132
- json.dumps(self.legacy_loan_dict, indent=2)
166
+ json.dumps(self.legacy_loan_dict, indent=2),
133
167
  )
134
168
 
135
169
  def to_dict(self):
136
170
  return {
137
171
  "item_barcode": self.item_barcode,
138
172
  "patron_barcode": self.patron_barcode,
173
+ "proxy_patron_barcode": self.proxy_patron_barcode,
139
174
  "due_date": self.due_date.isoformat(),
140
175
  "out_date": self.out_date.isoformat(),
141
176
  "renewal_count": self.renewal_count,
142
177
  "next_item_status": self.next_item_status,
178
+ "service_point_id": self.service_point_id,
143
179
  }
144
180
 
145
181
  def make_utc(self):
@@ -6,6 +6,7 @@
6
6
  "%{field} a, x and z are missing or empty": "%{field} a, x and z are missing or empty",
7
7
  "%{field} subfields a, x, and z missing from field": "%{field} subfields a, x, and z missing from field",
8
8
  "%{fro} mapped from %{record}": "%{fro} mapped from %{record}",
9
+ "%{lost_type} and checked out": "%{lost_type} and checked out",
9
10
  "%{props} were concatenated": "%{props} were concatenated",
10
11
  "%{schema_value} added to %{prop_name}": "%{schema_value} added to %{prop_name}",
11
12
  "%{tag} subfield %{subfield} not in field": "%{tag} subfield %{subfield} not in field",
@@ -36,6 +37,8 @@
36
37
  "Check mapping file against the schema.": "Check mapping file against the schema.",
37
38
  "Checked out on first try": "Checked out on first try",
38
39
  "Checked out on second try": "Checked out on second try",
40
+ "Claim item returned": "Claim item returned",
41
+ "Claimed returned and checked out": "Claimed returned and checked out",
39
42
  "Click to expand all %{count} things": {
40
43
  "many": "Click to expand all %{count} things",
41
44
  "one": "Click to expand one thing"
@@ -57,6 +60,7 @@
57
60
  "DATA ISSUE Users not in FOLIO": "DATA ISSUE Users not in FOLIO",
58
61
  "Data issue. Consider fixing the record. ": "Data issue. Consider fixing the record. ",
59
62
  "Declare item as lost": "Declare item as lost",
63
+ "Declared lost and checked out": "Declared lost and checked out",
60
64
  "Discarded reserves": "Discarded reserves",
61
65
  "Due date is before out date, or date information is missing from both": "Due date is before out date, or date information is missing from both",
62
66
  "Duplicate 001. Creating HRID instead.\n Previous 001 will be stored in a new 035 field": "Duplicate 001. Creating HRID instead.\n Previous 001 will be stored in a new 035 field",
@@ -88,6 +92,7 @@
88
92
  "Instances HRID starting number": "Instances HRID starting number",
89
93
  "Instances linked using instances_id_map": "Instances linked using instances_id_map",
90
94
  "Interfaces": "Interfaces",
95
+ "Invalid specific retention policy in 008/13-15: %{value}": "Invalid specific retention policy in 008/13-15: %{value}",
91
96
  "Inventory records written to disk": "Inventory records written to disk",
92
97
  "Item lookups performed": "Item lookups performed",
93
98
  "Item transformation report": "Item transformation report",
@@ -95,6 +100,7 @@
95
100
  "Legacy Field": "Legacy Field",
96
101
  "Legacy bib records without 001": "Legacy bib records without 001",
97
102
  "Legacy id is empty": "Legacy id is empty",
103
+ "Loan already exists for %{item_barcode}": "Loan already exists for %{item_barcode}",
98
104
  "Loan already in failed.": "Loan already in failed.",
99
105
  "Loans discarded. Had migrated item barcode": "Loans discarded. Had migrated item barcode",
100
106
  "Loans failed pre-validation": "Loans failed pre-validation",
@@ -176,6 +182,7 @@
176
182
  "Reserve discarded. Could not find migrated barcode": "Reserve discarded. Could not find migrated barcode",
177
183
  "Reserve verified against migrated item": "Reserve verified against migrated item",
178
184
  "Reserves migration report": "Reserves migration report",
185
+ "Retention policy 6 indicates a limited period. Specific retention period will be mapped from 008/13-15": "Retention policy 6 indicates a limited period. Specific retention period will be mapped from 008/13-15",
179
186
  "Rows merged to create Purchase Orders": "Rows merged to create Purchase Orders",
180
187
  "SRS records written to disk": "SRS records written to disk",
181
188
  "Second failure": "Second failure",
@@ -186,6 +193,7 @@
186
193
  "Set leader 11 (Subfield code count) from %{record} to 2": "Set leader 11 (Subfield code count) from %{record} to 2",
187
194
  "Set leader 20-23 from %{field} to 4500": "Set leader 20-23 from %{field} to 4500",
188
195
  "Set up statistical code id mapping...": "Set up statistical code id mapping...",
196
+ "Setting item %{item_barcode} to status \"Available\"": "Setting item %{item_barcode} to status \"Available\"",
189
197
  "Source digits": "Source digits",
190
198
  "Source of heading or term": "Source of heading or term",
191
199
  "Staff suppressed": "Staff suppressed",
@@ -201,6 +209,8 @@
201
209
  "Successful matching on %{criteria}": "Successful matching on %{criteria}",
202
210
  "Successful user transformations": "Successful user transformations",
203
211
  "Successfully %{action}": "Successfully %{action}",
212
+ "Successfully Checked out %{lost_type} item and put the status back": "Successfully Checked out %{lost_type} item and put the status back",
213
+ "Successfully Checked out %{lost_type} item. Item will be declared lost.": "Successfully Checked out %{lost_type} item. Item will be declared lost.",
204
214
  "Successfully activated user": "Successfully activated user",
205
215
  "Successfully changed due date": "Successfully changed due date",
206
216
  "Successfully checked out": "Successfully checked out",
@@ -280,6 +290,8 @@
280
290
  "blurbs.Details.title": "Details",
281
291
  "blurbs.DiffsBetweenOrders.description": "This is a technical report that helps you to identify differences in the mapped order fields. ",
282
292
  "blurbs.DiffsBetweenOrders.title": "Differences between generated orders with same Legacy Identifier",
293
+ "blurbs.DigitizationPolicyMapping.description": "Digitization policies mapped from `008[21]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
294
+ "blurbs.DigitizationPolicyMapping.title": "Digitization policy",
283
295
  "blurbs.DiscardedLoans.description": "List of loans discarded for various resons",
284
296
  "blurbs.DiscardedLoans.title": "Discarded loans",
285
297
  "blurbs.DiscardedRequests.description": "List of requests discarded for various resons",
@@ -320,6 +332,8 @@
320
332
  "blurbs.HoldingsTypeMapping.title": "Holdings type mapping",
321
333
  "blurbs.HridHandling.description": "There are two ways of handling HRIDs. The default behaviour is to take the current 001 and move that to a new 035. This will also emerge as an Identifier on the Inventory Instances. The 001 and Instance HRID will be generated from the HRID settings in FOLIO. The second option is to maintain the 001s in the records, and also add this as the Instance HRID",
322
334
  "blurbs.HridHandling.title": "HRID and 001/035 handling",
335
+ "blurbs.ILLPolicyMapping.description": "ILL policies mapped from `008[20]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
336
+ "blurbs.ILLPolicyMapping.title": "ILL policy",
323
337
  "blurbs.IncompleteEntityMapping.description": "**NO ACTION REQUIRED** <br/>This is a coding anomaly that FSE will look into. <br/>Usually, the library does not have to do anything about it.<br/> One thing to look at is if there are many repeated subfields or unexpected patterns of subfields in the table.",
324
338
  "blurbs.IncompleteEntityMapping.title": "Incomplete entity mapping adding entity",
325
339
  "blurbs.IncompleteSubPropertyRemoved.description": "Add the missing required information to the record in your current ILS to ensure that it can be migrated over.",
@@ -376,6 +390,8 @@
376
390
  "blurbs.MatchedModesOfIssuanceCode.title": "Matched Modes of issuance code",
377
391
  "blurbs.MaterialTypeMapping.description": "",
378
392
  "blurbs.MaterialTypeMapping.title": "Mapped Material Types",
393
+ "blurbs.MethodOfAcquisitionMapping.description": "Acquisition methods mapped from `008[7]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
394
+ "blurbs.MethodOfAcquisitionMapping.title": "Method of acquisition",
379
395
  "blurbs.MissingInstanceTypeIds.description": "**IC ACTION REQUIRED** These reords should get an instance type ID mapped from 336, or a default of Undefined, or they will not be transformed.",
380
396
  "blurbs.MissingInstanceTypeIds.title": "Records without Instance Type Ids",
381
397
  "blurbs.MissingRequiredProperties.description": "",
@@ -408,6 +424,8 @@
408
424
  "blurbs.RecourceTypeMapping.title": "Resource Type Mapping (336)",
409
425
  "blurbs.ReferenceDataMapping.description": "",
410
426
  "blurbs.ReferenceDataMapping.title": "Reference Data Mapping",
427
+ "blurbs.RetentionPolicyMapping.description": "Retention policies mapped from `008[12-15]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
428
+ "blurbs.RetentionPolicyMapping.title": "Retention policy",
411
429
  "blurbs.Section1.description": "This entries below seem to be related to instances",
412
430
  "blurbs.Section1.title": "__Section 1: instances",
413
431
  "blurbs.Section2.description": "The entries below seem to be related to holdings",
@@ -446,15 +464,6 @@
446
464
  "blurbs.ValueSetInMappingFile.title": "Value set in mapping file",
447
465
  "blurbs.ValuesMappedFromLegacyFields.description": "A list fo the values and what they were mapped to",
448
466
  "blurbs.ValuesMappedFromLegacyFields.title": "Values mapped from legacy fields",
449
- "blurbs.MethodOfAcquisitionMapping.title": "Method of acquisition",
450
- "blurbs.MethodOfAcquisitionMapping.description": "Acquisition methods mapped from `008[7]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
451
- "blurbs.RetentionPolicyMapping.title": "Retention policy",
452
- "blurbs.RetentionPolicyMapping.description": "Retention policies mapped from `008[12-15]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
453
- "blurbs.ILLPolicyMapping.title": "ILL policy",
454
- "blurbs.ILLPolicyMapping.description": "ILL policies mapped from `008[20]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
455
- "blurbs.DigitizationPolicyMapping.title": "Digitization policy",
456
- "blurbs.DigitizationPolicyMapping.description": "Digitization policies mapped from `008[21]` (LoC documentation)[https://www.loc.gov/marc/holdings/hd008.html]",
457
- "Invalid specific retention policy in 008/13-15: %{value}": "Invalid specific retention policy in 008/13-15: %{value}",
458
467
  "created": "created",
459
468
  "instance type code (%{code}) not found in FOLIO": "instance type code (%{code}) not found in FOLIO",
460
469
  "item barcode": "item barcode",
@@ -462,4 +471,4 @@
462
471
  "naturalId mapped from %{fro}": "naturalId mapped from %{fro}",
463
472
  "no matching identifier_types in %{names}": "no matching identifier_types in %{names}",
464
473
  "subfield present in %{linked_value_tag} but not in %{pattern_field}": "subfield present in %{linked_value_tag} but not in %{pattern_field}"
465
- }
474
+ }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: folio_migration_tools
3
- Version: 1.9.3
3
+ Version: 1.9.5
4
4
  Summary: A tool allowing you to migrate data from legacy ILS:s (Library systems) into FOLIO LSP
5
5
  License: MIT
6
6
  Keywords: FOLIO,ILS,LSP,Library Systems,MARC21,Library data
@@ -19,6 +19,7 @@ Requires-Dist: argparse-prompt (>=0.0.5,<0.0.6)
19
19
  Requires-Dist: art (>=6.5,<7.0)
20
20
  Requires-Dist: deepdiff (>=6.2.3,<7.0.0)
21
21
  Requires-Dist: defusedxml (>=0.7.1,<0.8.0)
22
+ Requires-Dist: folio-data-import (>=0.3.2,<0.4.0)
22
23
  Requires-Dist: folio-uuid (>=0.2.8,<0.3.0)
23
24
  Requires-Dist: folioclient (>=0.70.1,<0.71.0)
24
25
  Requires-Dist: pyaml (>=21.10.1,<22.0.0)
@@ -1,13 +1,13 @@
1
1
  folio_migration_tools/__init__.py,sha256=DXvzUKFSpSZjflFWaNm0L8yhFk0u7RVIvQMskwMmKFc,238
2
2
  folio_migration_tools/__main__.py,sha256=kfo4iKf3GJD7deh4RvIizKnC4zvIgCpNo-Bs7HBM34s,8453
3
- folio_migration_tools/circulation_helper.py,sha256=V2VM30i2OigOKb64B4FFKTeHu9NTkhptalaOfziPhTo,14199
3
+ folio_migration_tools/circulation_helper.py,sha256=iib2p0WGzOcAnwaJppDYiONc-1Jdna66dPAtBmAIuCE,14538
4
4
  folio_migration_tools/colors.py,sha256=GP0wdI_GZ2WD5SjrbPN-S3u8vvN_u6rGQIBBcWv_0ZM,227
5
5
  folio_migration_tools/config_file_load.py,sha256=zHHa6NDkN6EJiQE4DgjrFQPVKsd70POsfbGkB8308jg,2822
6
6
  folio_migration_tools/custom_dict.py,sha256=-FUnhKp90Dg8EHlY6twx-PYQxBUWEO7FgxL2b7pf-xk,678
7
7
  folio_migration_tools/custom_exceptions.py,sha256=1zgOKy3NBUVGG6i9YxK6w2Hntlea8MHmm7mdnjBtzvQ,2687
8
8
  folio_migration_tools/extradata_writer.py,sha256=fuchNcMc6BYb9IyfAcvXg7X4J2TfX6YiROfT2hr0JMw,1678
9
9
  folio_migration_tools/folder_structure.py,sha256=bZlmKGtxdytWcqjnM2lE4Vpx4nHyYRk7CNL1tZhLtXY,6917
10
- folio_migration_tools/helper.py,sha256=KkOkNAGO_fuYqxdLrsbLzCJLQHUrFZG1NzD4RmpQ-KM,2804
10
+ folio_migration_tools/helper.py,sha256=iWQhdcuzbGzVpEAiHQczO4hdhIH_iciLEp1SCGDynMI,2983
11
11
  folio_migration_tools/holdings_helper.py,sha256=yJpz6aJrKRBiJ1MtT5bs2vXAc88uJuGh2_KDuCySOKc,7559
12
12
  folio_migration_tools/i18n_config.py,sha256=3AH_2b9zTsxE4XTe4isM_zYtPJSlK0ix6eBmV7kAYUM,228
13
13
  folio_migration_tools/library_configuration.py,sha256=LzICsZQdOkXwIqdDfh59x0-Cx77Lb18qVnWroNqekS8,7046
@@ -32,22 +32,22 @@ folio_migration_tools/marc_rules_transformation/marc_file_processor.py,sha256=Qh
32
32
  folio_migration_tools/marc_rules_transformation/marc_reader_wrapper.py,sha256=9ATjYMRAjy0QcXtmNZaHVhHLJ5hE1WUgOcF6KMJjbgo,5309
33
33
  folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py,sha256=PGt2w8h2pj8_8sGjQe3L-odFDlquURtKnoNFRWQB3GI,9621
34
34
  folio_migration_tools/marc_rules_transformation/rules_mapper_base.py,sha256=loNZ9gEYaAwjkP2_wLlXGedjWvSdHoGF_oJN9g6gI3s,45928
35
- folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py,sha256=GYZmVrEKcHkOEH4U3027-vQjS6mfMbk84GJTqiVrD4E,30350
36
- folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py,sha256=wT9HDodIRYeGbjutVHDHpSBVWrXsuA2LO8e_MmBMmzE,28498
35
+ folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py,sha256=RD46EzS0NQArn5LCGbrxDm9vbbW9PO_6iNUQwJBAbSg,30364
36
+ folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py,sha256=ZgyDxmNE7LwW8Cd55wRIEE-u6iyMKCRRXdq2ZRjm2nc,28779
37
37
  folio_migration_tools/migration_report.py,sha256=BkRspM1hwTBnWeqsHamf7yVEofzLj560Q-9G--O00hw,4258
38
38
  folio_migration_tools/migration_tasks/__init__.py,sha256=ZkbY_yGyB84Ke8OMlYUzyyBj4cxxNrhMTwQlu_GbdDs,211
39
39
  folio_migration_tools/migration_tasks/authority_transformer.py,sha256=AoXg9s-GLO3yEEDCrQV7hc4YVXxwxsdxDdpj1zhHydE,4251
40
- folio_migration_tools/migration_tasks/batch_poster.py,sha256=xN1BBZNGW2lZHWPznF6nkYV15XGhwwzcZccCzTbPfA4,40868
40
+ folio_migration_tools/migration_tasks/batch_poster.py,sha256=7gH9KSdtTSbPIS3eXK6_JBi0OshUAupV8AEew9QBSoU,45327
41
41
  folio_migration_tools/migration_tasks/bibs_transformer.py,sha256=46d44pcDAodFXDYbrTCMRASISbDciXmA0CXYfhP2IaE,6298
42
42
  folio_migration_tools/migration_tasks/courses_migrator.py,sha256=CzXnsu-KGP7B4zcINJzLYUqz47D16NuFfzu_DPqRlTQ,7061
43
43
  folio_migration_tools/migration_tasks/holdings_csv_transformer.py,sha256=kMhtHE8DJjA4d6kXBcfflueha3R3nwlBQjdec8CaY8c,21926
44
44
  folio_migration_tools/migration_tasks/holdings_marc_transformer.py,sha256=c_ruhOgidyJdSnnRwWUs3wwFMiLqbVMPOhhCaYuH_TI,14343
45
- folio_migration_tools/migration_tasks/items_transformer.py,sha256=HlTzV7K0AiGBHw56VMascupMKXG0Pv8LS65O9EiQ2VU,19637
46
- folio_migration_tools/migration_tasks/loans_migrator.py,sha256=_7yZH951p5mhLjbyH1r496DG591dD1tg_mmTtHas62o,35316
45
+ folio_migration_tools/migration_tasks/items_transformer.py,sha256=oTbFX2saF7-ZCb1mO3baLvODnBSEbbN5F_GtSth3iG4,19755
46
+ folio_migration_tools/migration_tasks/loans_migrator.py,sha256=PF8DNpGKsppSDr7wX96Ao13UTFu6dl9cz2URLqSsOzE,40052
47
47
  folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py,sha256=CnmlTge7nChUJ10EiUkriQtJlVxWqglgfhjgneh2_yM,7247
48
48
  folio_migration_tools/migration_tasks/migration_task_base.py,sha256=Q-57h6rmt74bC9LidA9ZoagEcwVd_ytq8IUWelVOm2E,22521
49
49
  folio_migration_tools/migration_tasks/orders_transformer.py,sha256=6SnzU_rUTu2B5hQykI2nRA7vI1rg-uxuF9Ncupe0AEY,14302
50
- folio_migration_tools/migration_tasks/organization_transformer.py,sha256=vcCjhN1sS55c_a0LXi1Yw1eq3zpDn5E4BGbm2zDQ_Z4,16885
50
+ folio_migration_tools/migration_tasks/organization_transformer.py,sha256=Kuxkh1sKyUVBqm5qAK1Jrq-4xcyNz2JPZvvFRqfwI8s,16922
51
51
  folio_migration_tools/migration_tasks/requests_migrator.py,sha256=QP9OBezC3FfcKpI78oMmydxcPaUIYAgHyKevyLwC-WQ,14841
52
52
  folio_migration_tools/migration_tasks/reserves_migrator.py,sha256=4sSPer6_6yMwiiY1VYJmYZske_Ah1XG4KAM3NDadPhg,9952
53
53
  folio_migration_tools/migration_tasks/user_transformer.py,sha256=aylrMC9n47fdStgsNfW4ZbJh2E4FDSPypsaNv52ynKc,12330
@@ -55,13 +55,13 @@ folio_migration_tools/task_configuration.py,sha256=73OWc8TX--fwPRptv3eQVEVv0-XmN
55
55
  folio_migration_tools/test_infrastructure/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
56
56
  folio_migration_tools/test_infrastructure/mocked_classes.py,sha256=BurU3NGU_Q8as_BGmW98q9O6bujZDkOfFmvKKdVw9t8,15056
57
57
  folio_migration_tools/transaction_migration/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
- folio_migration_tools/transaction_migration/legacy_loan.py,sha256=phd9oO6xd91qC4ilRq3podZ-rKIIwQ01SXe0JxbZAbQ,6339
58
+ folio_migration_tools/transaction_migration/legacy_loan.py,sha256=sLw2fCBao1VWZykOx-BWpMndLHjJNj-HJZFYqbpVV5A,7561
59
59
  folio_migration_tools/transaction_migration/legacy_request.py,sha256=1ulyFzPQw_InOjyPzkWpGnNptgXdQ18nmri0J8Nlpkc,6124
60
60
  folio_migration_tools/transaction_migration/legacy_reserve.py,sha256=qzw0okg4axAE_ezXopP9gFsQ_e60o0zh7zqRzFBSWHY,1806
61
61
  folio_migration_tools/transaction_migration/transaction_result.py,sha256=cTdCN0BnlI9_ZJB2Z3Fdkl9gpymIi-9mGZsRFlQcmDk,656
62
- folio_migration_tools/translations/en.json,sha256=TPQRTDdvdkZI2iHczP4hKmFEbd7Hyo5BE37uSo54W_4,40691
63
- folio_migration_tools-1.9.3.dist-info/LICENSE,sha256=PhIEkitVi3ejgq56tt6sWoJIG_zmv82cjjd_aYPPGdI,1072
64
- folio_migration_tools-1.9.3.dist-info/METADATA,sha256=dPvDnsZ0qw3K6pPfHatlCPfSCov_7d8Ll7L2pJYSta8,7444
65
- folio_migration_tools-1.9.3.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
66
- folio_migration_tools-1.9.3.dist-info/entry_points.txt,sha256=Hbe-HjqMcU8FwVshVIkeWyZd9XwgT1CCMNf06EpHQu8,77
67
- folio_migration_tools-1.9.3.dist-info/RECORD,,
62
+ folio_migration_tools/translations/en.json,sha256=6IpYYNFCtQoXACndPM0d1Oa25GYuaF-G-b4YpzTjQH0,41656
63
+ folio_migration_tools-1.9.5.dist-info/LICENSE,sha256=PhIEkitVi3ejgq56tt6sWoJIG_zmv82cjjd_aYPPGdI,1072
64
+ folio_migration_tools-1.9.5.dist-info/METADATA,sha256=nkA-xhhnIR5-fawBRyzv7cLMqEycu7IqM00H5QqQpKs,7494
65
+ folio_migration_tools-1.9.5.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
66
+ folio_migration_tools-1.9.5.dist-info/entry_points.txt,sha256=Hbe-HjqMcU8FwVshVIkeWyZd9XwgT1CCMNf06EpHQu8,77
67
+ folio_migration_tools-1.9.5.dist-info/RECORD,,