folio-migration-tools 1.9.0rc13__py3-none-any.whl → 1.9.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- folio_migration_tools/mapping_file_transformation/holdings_mapper.py +18 -1
- folio_migration_tools/mapping_file_transformation/item_mapper.py +3 -3
- folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +29 -8
- folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +10 -0
- folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +1 -1
- folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +41 -6
- folio_migration_tools/migration_tasks/batch_poster.py +65 -2
- folio_migration_tools/migration_tasks/holdings_csv_transformer.py +0 -17
- folio_migration_tools/migration_tasks/items_transformer.py +1 -1
- {folio_migration_tools-1.9.0rc13.dist-info → folio_migration_tools-1.9.2.dist-info}/METADATA +1 -1
- {folio_migration_tools-1.9.0rc13.dist-info → folio_migration_tools-1.9.2.dist-info}/RECORD +14 -14
- {folio_migration_tools-1.9.0rc13.dist-info → folio_migration_tools-1.9.2.dist-info}/LICENSE +0 -0
- {folio_migration_tools-1.9.0rc13.dist-info → folio_migration_tools-1.9.2.dist-info}/WHEEL +0 -0
- {folio_migration_tools-1.9.0rc13.dist-info → folio_migration_tools-1.9.2.dist-info}/entry_points.txt +0 -0
|
@@ -1,10 +1,12 @@
|
|
|
1
1
|
import ast
|
|
2
|
+
import json
|
|
3
|
+
import logging
|
|
2
4
|
|
|
3
5
|
import i18n
|
|
4
6
|
from folio_uuid.folio_uuid import FOLIONamespaces
|
|
5
7
|
from folioclient import FolioClient
|
|
6
8
|
|
|
7
|
-
from folio_migration_tools.custom_exceptions import TransformationRecordFailedError
|
|
9
|
+
from folio_migration_tools.custom_exceptions import TransformationProcessError, TransformationRecordFailedError
|
|
8
10
|
from folio_migration_tools.library_configuration import (
|
|
9
11
|
FileDefinition,
|
|
10
12
|
LibraryConfiguration,
|
|
@@ -59,6 +61,21 @@ class HoldingsMapper(MappingFileMapperBase):
|
|
|
59
61
|
"name",
|
|
60
62
|
"CallNumberTypeMapping",
|
|
61
63
|
)
|
|
64
|
+
self.holdings_sources = self.get_holdings_sources()
|
|
65
|
+
|
|
66
|
+
def get_holdings_sources(self):
|
|
67
|
+
res = {}
|
|
68
|
+
holdings_sources = list(
|
|
69
|
+
self.folio_client.folio_get_all("/holdings-sources", "holdingsRecordsSources")
|
|
70
|
+
)
|
|
71
|
+
logging.info("Fetched %s holdingsRecordsSources from tenant", len(holdings_sources))
|
|
72
|
+
res = {n["name"].upper(): n["id"] for n in holdings_sources}
|
|
73
|
+
if "FOLIO" not in res:
|
|
74
|
+
raise TransformationProcessError("", "No holdings source with name FOLIO in tenant")
|
|
75
|
+
if "MARC" not in res:
|
|
76
|
+
raise TransformationProcessError("", "No holdings source with name MARC in tenant")
|
|
77
|
+
logging.info(json.dumps(res, indent=4))
|
|
78
|
+
return res
|
|
62
79
|
|
|
63
80
|
def perform_additional_mappings(self, legacy_ids, folio_rec, file_def):
|
|
64
81
|
self.handle_suppression(folio_rec, file_def)
|
|
@@ -2,7 +2,7 @@ import json
|
|
|
2
2
|
import logging
|
|
3
3
|
import sys
|
|
4
4
|
from datetime import datetime, timezone
|
|
5
|
-
from typing import Set
|
|
5
|
+
from typing import Dict, List, Set, Union
|
|
6
6
|
from uuid import uuid4
|
|
7
7
|
|
|
8
8
|
import i18n
|
|
@@ -117,12 +117,12 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
117
117
|
"LocationMapping",
|
|
118
118
|
)
|
|
119
119
|
|
|
120
|
-
def perform_additional_mappings(self, legacy_ids, folio_rec, file_def):
|
|
120
|
+
def perform_additional_mappings(self, legacy_ids: Union[str, List[str]], folio_rec: Dict, file_def: FileDefinition):
|
|
121
121
|
self.handle_suppression(folio_rec, file_def)
|
|
122
122
|
self.map_statistical_codes(folio_rec, file_def)
|
|
123
123
|
self.map_statistical_code_ids(legacy_ids, folio_rec)
|
|
124
124
|
|
|
125
|
-
def handle_suppression(self, folio_record, file_def: FileDefinition):
|
|
125
|
+
def handle_suppression(self, folio_record: Dict, file_def: FileDefinition):
|
|
126
126
|
folio_record["discoverySuppress"] = file_def.discovery_suppressed
|
|
127
127
|
self.migration_report.add(
|
|
128
128
|
"Suppression",
|
|
@@ -171,14 +171,15 @@ class MappingFileMapperBase(MapperBase):
|
|
|
171
171
|
object_type: FOLIONamespaces,
|
|
172
172
|
accept_duplicate_ids: bool = False,
|
|
173
173
|
):
|
|
174
|
+
folio_object = {}
|
|
174
175
|
if self.ignore_legacy_identifier:
|
|
175
|
-
|
|
176
|
+
folio_object.update(
|
|
176
177
|
{
|
|
177
178
|
"id": str(uuid.uuid4()),
|
|
178
179
|
"type": "object",
|
|
179
|
-
}
|
|
180
|
-
index_or_id,
|
|
180
|
+
}
|
|
181
181
|
)
|
|
182
|
+
return folio_object, index_or_id
|
|
182
183
|
|
|
183
184
|
if not (
|
|
184
185
|
legacy_id := " ".join(
|
|
@@ -205,13 +206,21 @@ class MappingFileMapperBase(MapperBase):
|
|
|
205
206
|
)
|
|
206
207
|
else:
|
|
207
208
|
self.unique_record_ids.add(generated_id)
|
|
208
|
-
|
|
209
|
+
folio_object.update(
|
|
209
210
|
{
|
|
210
211
|
"id": generated_id,
|
|
211
212
|
"type": "object",
|
|
212
|
-
}
|
|
213
|
-
legacy_id,
|
|
213
|
+
}
|
|
214
214
|
)
|
|
215
|
+
if object_type == FOLIONamespaces.holdings and hasattr(self, "holdings_sources"):
|
|
216
|
+
folio_object['sourceId'] = self.holdings_sources.get("FOLIO")
|
|
217
|
+
elif object_type == FOLIONamespaces.holdings and not hasattr(self, "holdings_sources"):
|
|
218
|
+
raise TransformationProcessError(
|
|
219
|
+
index_or_id,
|
|
220
|
+
"Holdings source not set in the mapper",
|
|
221
|
+
None
|
|
222
|
+
)
|
|
223
|
+
return folio_object, legacy_id
|
|
215
224
|
|
|
216
225
|
def get_statistical_code(self, legacy_item: dict, folio_prop_name: str, index_or_id):
|
|
217
226
|
if self.statistical_codes_mapping:
|
|
@@ -531,9 +540,21 @@ class MappingFileMapperBase(MapperBase):
|
|
|
531
540
|
isinstance(res, str)
|
|
532
541
|
and self.library_configuration.multi_field_delimiter in res
|
|
533
542
|
):
|
|
543
|
+
for delim_value in res.split(
|
|
544
|
+
self.library_configuration.multi_field_delimiter
|
|
545
|
+
):
|
|
546
|
+
if delim_value not in empty_vals:
|
|
547
|
+
self.validate_enums(
|
|
548
|
+
delim_value,
|
|
549
|
+
sub_prop,
|
|
550
|
+
sub_prop_name,
|
|
551
|
+
index_or_id,
|
|
552
|
+
required,
|
|
553
|
+
)
|
|
534
554
|
multi_field_props.append(sub_prop_name)
|
|
555
|
+
else:
|
|
556
|
+
self.validate_enums(res, sub_prop, sub_prop_name, index_or_id, required)
|
|
535
557
|
|
|
536
|
-
self.validate_enums(res, sub_prop, sub_prop_name, index_or_id, required)
|
|
537
558
|
if res or isinstance(res, bool):
|
|
538
559
|
temp_object[sub_prop_name] = res
|
|
539
560
|
|
|
@@ -809,7 +830,7 @@ class MappingFileMapperBase(MapperBase):
|
|
|
809
830
|
):
|
|
810
831
|
raise TransformationRecordFailedError(
|
|
811
832
|
index_or_id,
|
|
812
|
-
f"Allowed values for {mapped_schema_property_name}"
|
|
833
|
+
f"Allowed values for {mapped_schema_property_name} "
|
|
813
834
|
f"are {mapped_schema_property['enum']} "
|
|
814
835
|
f"Forbidden enum value found: ",
|
|
815
836
|
mapped_value,
|
|
@@ -559,6 +559,16 @@ class RulesMapperBase(MapperBase):
|
|
|
559
559
|
if k == "authorityId" and (legacy_subfield_9 := marc_field.get("9")):
|
|
560
560
|
marc_field.add_subfield("0", legacy_subfield_9)
|
|
561
561
|
marc_field.delete_subfield("9")
|
|
562
|
+
if k == "authorityId" and (entity_subfields := entity_mapping.get("subfield", [])):
|
|
563
|
+
for subfield in entity_subfields:
|
|
564
|
+
if subfield != "9":
|
|
565
|
+
Helper.log_data_issue(
|
|
566
|
+
index_or_legacy_id,
|
|
567
|
+
f"authorityId mapping from ${subfield} is not supported. Data Import will fail. "
|
|
568
|
+
"Use only $9 for authority id mapping in MARC-to-Instance mapping rules.",
|
|
569
|
+
marc_field,
|
|
570
|
+
)
|
|
571
|
+
entity_mapping["subfield"] = ["9"]
|
|
562
572
|
if my_values := [
|
|
563
573
|
v
|
|
564
574
|
for v in self.apply_rules(marc_field, entity_mapping, index_or_legacy_id)
|
|
@@ -165,7 +165,7 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
165
165
|
Helper.log_data_issue(
|
|
166
166
|
legacy_ids,
|
|
167
167
|
"Multiple main entry fields in record. Record will fail Data Import. Creating Instance anyway.",
|
|
168
|
-
main_entry_fields
|
|
168
|
+
[str(field) for field in main_entry_fields]
|
|
169
169
|
)
|
|
170
170
|
if not main_entry_fields:
|
|
171
171
|
main_entry_fields += marc_record.get_fields("700", "710", "711", "730")
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import copy
|
|
2
2
|
import json
|
|
3
3
|
import logging
|
|
4
|
+
import re
|
|
4
5
|
from typing import Dict, List, Set
|
|
5
6
|
|
|
6
7
|
import i18n
|
|
@@ -393,12 +394,29 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
393
394
|
) from ee
|
|
394
395
|
return [
|
|
395
396
|
{
|
|
396
|
-
"note":
|
|
397
|
+
"note": chunk,
|
|
397
398
|
"holdingsNoteTypeId": holdings_note_type_id,
|
|
398
399
|
"staffOnly": True,
|
|
399
|
-
}
|
|
400
|
+
} for chunk in self.split_mrk_by_max_note_size("\n".join(mrk_statement_notes))
|
|
400
401
|
]
|
|
401
402
|
|
|
403
|
+
@staticmethod
|
|
404
|
+
def split_mrk_by_max_note_size(s: str, max_chunk_size: int = 32000) -> List[str]:
|
|
405
|
+
lines = s.splitlines(keepends=True)
|
|
406
|
+
chunks = []
|
|
407
|
+
current_chunk = ""
|
|
408
|
+
for line in lines:
|
|
409
|
+
# If adding this line would exceed the limit, start a new chunk
|
|
410
|
+
if len(current_chunk) + len(line) > max_chunk_size:
|
|
411
|
+
if current_chunk:
|
|
412
|
+
chunks.append(current_chunk)
|
|
413
|
+
current_chunk = line
|
|
414
|
+
else:
|
|
415
|
+
current_chunk += line
|
|
416
|
+
if current_chunk:
|
|
417
|
+
chunks.append(current_chunk)
|
|
418
|
+
return chunks
|
|
419
|
+
|
|
402
420
|
def add_mfhd_as_mrk_note(self, marc_record: Record, folio_holding: Dict, legacy_ids: List[str]):
|
|
403
421
|
"""Adds the MFHD as a note to the holdings record
|
|
404
422
|
|
|
@@ -424,12 +442,29 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
424
442
|
) from ee
|
|
425
443
|
folio_holding["notes"] = folio_holding.get("notes", []) + [
|
|
426
444
|
{
|
|
427
|
-
"note":
|
|
445
|
+
"note": chunk,
|
|
428
446
|
"holdingsNoteTypeId": holdings_note_type_id,
|
|
429
447
|
"staffOnly": True,
|
|
430
|
-
}
|
|
448
|
+
} for chunk in self.split_mrk_by_max_note_size(str(marc_record))
|
|
431
449
|
]
|
|
432
450
|
|
|
451
|
+
@staticmethod
|
|
452
|
+
def split_mrc_by_max_note_size(data: bytes, sep: bytes = b"\x1e", max_chunk_size: int = 32000) -> List[bytes]:
|
|
453
|
+
# Split data into segments, each ending with the separator (except possibly the last)
|
|
454
|
+
pattern = re.compile(b'(.*?' + re.escape(sep) + b'|.+?$)', re.DOTALL)
|
|
455
|
+
parts = [m.group(0) for m in pattern.finditer(data) if m.group(0)]
|
|
456
|
+
chunks = []
|
|
457
|
+
current_chunk = b""
|
|
458
|
+
for part in parts:
|
|
459
|
+
if len(current_chunk) + len(part) > max_chunk_size and current_chunk:
|
|
460
|
+
chunks.append(current_chunk)
|
|
461
|
+
current_chunk = part
|
|
462
|
+
else:
|
|
463
|
+
current_chunk += part
|
|
464
|
+
if current_chunk:
|
|
465
|
+
chunks.append(current_chunk)
|
|
466
|
+
return chunks
|
|
467
|
+
|
|
433
468
|
def add_mfhd_as_mrc_note(self, marc_record: Record, folio_holding: Dict, legacy_ids: List[str]):
|
|
434
469
|
"""Adds the MFHD as a note to the holdings record
|
|
435
470
|
|
|
@@ -455,10 +490,10 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
455
490
|
) from ee
|
|
456
491
|
folio_holding["notes"] = folio_holding.get("notes", []) + [
|
|
457
492
|
{
|
|
458
|
-
"note":
|
|
493
|
+
"note": chunk.decode("utf-8"),
|
|
459
494
|
"holdingsNoteTypeId": holdings_note_type_id,
|
|
460
495
|
"staffOnly": True,
|
|
461
|
-
}
|
|
496
|
+
} for chunk in self.split_mrc_by_max_note_size(marc_record.as_marc())
|
|
462
497
|
]
|
|
463
498
|
|
|
464
499
|
def wrap_up(self):
|
|
@@ -6,7 +6,7 @@ import sys
|
|
|
6
6
|
import time
|
|
7
7
|
import traceback
|
|
8
8
|
from datetime import datetime
|
|
9
|
-
from typing import Annotated, List
|
|
9
|
+
from typing import Annotated, List, Optional
|
|
10
10
|
from uuid import uuid4
|
|
11
11
|
|
|
12
12
|
import httpx
|
|
@@ -173,11 +173,13 @@ class BatchPoster(MigrationTaskBase):
|
|
|
173
173
|
self.num_posted = 0
|
|
174
174
|
self.okapi_headers = self.folio_client.okapi_headers
|
|
175
175
|
self.http_client = None
|
|
176
|
+
self.starting_record_count_in_folio: Optional[int] = None
|
|
176
177
|
|
|
177
178
|
def do_work(self):
|
|
178
179
|
with self.folio_client.get_folio_http_client() as httpx_client:
|
|
179
180
|
self.http_client = httpx_client
|
|
180
181
|
with open(self.folder_structure.failed_recs_path, "w", encoding='utf-8') as failed_recs_file:
|
|
182
|
+
self.get_starting_record_count()
|
|
181
183
|
try:
|
|
182
184
|
batch = []
|
|
183
185
|
if self.task_configuration.object_type == "SRS":
|
|
@@ -317,6 +319,8 @@ class BatchPoster(MigrationTaskBase):
|
|
|
317
319
|
updates[record["id"]] = {
|
|
318
320
|
"_version": record["_version"],
|
|
319
321
|
}
|
|
322
|
+
if "hrid" in record:
|
|
323
|
+
updates[record["id"]]["hrid"] = record["hrid"]
|
|
320
324
|
if "status" in record:
|
|
321
325
|
updates[record["id"]]["status"] = record["status"]
|
|
322
326
|
if "lastCheckIn" in record:
|
|
@@ -604,6 +608,42 @@ class BatchPoster(MigrationTaskBase):
|
|
|
604
608
|
else:
|
|
605
609
|
return httpx.post(url, headers=self.okapi_headers, json=payload, params=self.query_params, timeout=None)
|
|
606
610
|
|
|
611
|
+
def get_current_record_count_in_folio(self):
|
|
612
|
+
if "query_endpoint" in self.api_info:
|
|
613
|
+
url = f"{self.folio_client.gateway_url}{self.api_info['query_endpoint']}"
|
|
614
|
+
query_params = {"query": "cql.allRecords=1", "limit": 0}
|
|
615
|
+
if self.http_client and not self.http_client.is_closed:
|
|
616
|
+
res = self.http_client.get(url, headers=self.folio_client.okapi_headers, params=query_params)
|
|
617
|
+
else:
|
|
618
|
+
res = httpx.get(url, headers=self.okapi_headers, params=query_params, timeout=None)
|
|
619
|
+
try:
|
|
620
|
+
res.raise_for_status()
|
|
621
|
+
return res.json()["totalRecords"]
|
|
622
|
+
except httpx.HTTPStatusError:
|
|
623
|
+
logging.error("Failed to get current record count. HTTP %s", res.status_code)
|
|
624
|
+
return 0
|
|
625
|
+
except KeyError:
|
|
626
|
+
logging.error(f"Failed to get current record count. No 'totalRecords' in response: {res.json()}")
|
|
627
|
+
return 0
|
|
628
|
+
else:
|
|
629
|
+
raise ValueError(
|
|
630
|
+
"No 'query_endpoint' available for %s. Cannot get current record count.", self.task_configuration.object_type
|
|
631
|
+
)
|
|
632
|
+
|
|
633
|
+
def get_starting_record_count(self):
|
|
634
|
+
if "query_endpoint" in self.api_info and not self.starting_record_count_in_folio:
|
|
635
|
+
logging.info("Getting starting record count in FOLIO")
|
|
636
|
+
self.starting_record_count_in_folio = self.get_current_record_count_in_folio()
|
|
637
|
+
else:
|
|
638
|
+
logging.info("No query_endpoint available for %s. Cannot get starting record count.", self.task_configuration.object_type)
|
|
639
|
+
|
|
640
|
+
def get_finished_record_count(self):
|
|
641
|
+
if "query_endpoint" in self.api_info:
|
|
642
|
+
logging.info("Getting finished record count in FOLIO")
|
|
643
|
+
self.finished_record_count_in_folio = self.get_current_record_count_in_folio()
|
|
644
|
+
else:
|
|
645
|
+
logging.info("No query_endpoint available for %s. Cannot get ending record count.", self.task_configuration.object_type)
|
|
646
|
+
|
|
607
647
|
def wrap_up(self):
|
|
608
648
|
logging.info("Done. Wrapping up")
|
|
609
649
|
self.extradata_writer.flush()
|
|
@@ -621,11 +661,34 @@ class BatchPoster(MigrationTaskBase):
|
|
|
621
661
|
)
|
|
622
662
|
else:
|
|
623
663
|
logging.info("Done posting %s records. %s failed", self.num_posted, self.num_failures)
|
|
624
|
-
|
|
664
|
+
if self.starting_record_count_in_folio:
|
|
665
|
+
self.get_finished_record_count()
|
|
666
|
+
total_on_server = self.finished_record_count_in_folio - self.starting_record_count_in_folio
|
|
667
|
+
discrepancy = self.processed - self.num_failures - total_on_server
|
|
668
|
+
if discrepancy != 0:
|
|
669
|
+
logging.error(
|
|
670
|
+
(
|
|
671
|
+
"Discrepancy in record count. "
|
|
672
|
+
"Starting record count: %s. Finished record count: %s. "
|
|
673
|
+
"Records posted: %s. Discrepancy: %s"
|
|
674
|
+
),
|
|
675
|
+
self.starting_record_count_in_folio,
|
|
676
|
+
self.finished_record_count_in_folio,
|
|
677
|
+
self.num_posted - self.num_failures,
|
|
678
|
+
discrepancy,
|
|
679
|
+
)
|
|
680
|
+
else:
|
|
681
|
+
discrepancy = 0
|
|
625
682
|
run = "second time" if self.performing_rerun else "first time"
|
|
626
683
|
self.migration_report.set("GeneralStatistics", f"Records processed {run}", self.processed)
|
|
627
684
|
self.migration_report.set("GeneralStatistics", f"Records posted {run}", self.num_posted)
|
|
628
685
|
self.migration_report.set("GeneralStatistics", f"Failed to post {run}", self.num_failures)
|
|
686
|
+
if discrepancy:
|
|
687
|
+
self.migration_report.set(
|
|
688
|
+
"GeneralStatistics",
|
|
689
|
+
f"Discrepancy in record count {run}",
|
|
690
|
+
discrepancy,
|
|
691
|
+
)
|
|
629
692
|
self.rerun_run()
|
|
630
693
|
with open(self.folder_structure.migration_reports_file, "w+") as report_file:
|
|
631
694
|
self.migration_report.write_migration_report(
|
|
@@ -209,7 +209,6 @@ class HoldingsCsvTransformer(MigrationTaskBase):
|
|
|
209
209
|
self.holdings = {}
|
|
210
210
|
self.total_records = 0
|
|
211
211
|
self.holdings_id_map = self.load_id_map(self.folder_structure.holdings_id_map_path)
|
|
212
|
-
self.holdings_sources = self.get_holdings_sources()
|
|
213
212
|
self.results_path = self.folder_structure.created_objects_path
|
|
214
213
|
self.holdings_types = list(
|
|
215
214
|
self.folio_client.folio_get_all("/holdings-types", "holdingsTypes")
|
|
@@ -432,8 +431,6 @@ class HoldingsCsvTransformer(MigrationTaskBase):
|
|
|
432
431
|
if not folio_rec.get("holdingsTypeId", ""):
|
|
433
432
|
folio_rec["holdingsTypeId"] = self.fallback_holdings_type["id"]
|
|
434
433
|
|
|
435
|
-
folio_rec["sourceId"] = self.holdings_sources.get("FOLIO")
|
|
436
|
-
|
|
437
434
|
holdings_from_row = []
|
|
438
435
|
all_instance_ids = folio_rec.get("instanceId", [])
|
|
439
436
|
if len(all_instance_ids) == 1:
|
|
@@ -521,20 +518,6 @@ class HoldingsCsvTransformer(MigrationTaskBase):
|
|
|
521
518
|
self.holdings[holdings_key], new_holdings_record
|
|
522
519
|
)
|
|
523
520
|
|
|
524
|
-
def get_holdings_sources(self):
|
|
525
|
-
res = {}
|
|
526
|
-
holdings_sources = list(
|
|
527
|
-
self.mapper.folio_client.folio_get_all("/holdings-sources", "holdingsRecordsSources")
|
|
528
|
-
)
|
|
529
|
-
logging.info("Fetched %s holdingsRecordsSources from tenant", len(holdings_sources))
|
|
530
|
-
res = {n["name"].upper(): n["id"] for n in holdings_sources}
|
|
531
|
-
if "FOLIO" not in res:
|
|
532
|
-
raise TransformationProcessError("", "No holdings source with name FOLIO in tenant")
|
|
533
|
-
if "MARC" not in res:
|
|
534
|
-
raise TransformationProcessError("", "No holdings source with name MARC in tenant")
|
|
535
|
-
logging.info(json.dumps(res, indent=4))
|
|
536
|
-
return res
|
|
537
|
-
|
|
538
521
|
|
|
539
522
|
def explode_former_ids(folio_holding: dict):
|
|
540
523
|
temp_ids = []
|
|
@@ -354,7 +354,7 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
354
354
|
record, f"row {idx}", FOLIONamespaces.items
|
|
355
355
|
)
|
|
356
356
|
|
|
357
|
-
self.mapper.perform_additional_mappings(folio_rec, file_def)
|
|
357
|
+
self.mapper.perform_additional_mappings(legacy_id, folio_rec, file_def)
|
|
358
358
|
self.handle_circiulation_notes(folio_rec, self.folio_client.current_user)
|
|
359
359
|
self.handle_notes(folio_rec)
|
|
360
360
|
if folio_rec["holdingsRecordId"] in self.boundwith_relationship_map:
|
|
@@ -14,10 +14,10 @@ folio_migration_tools/library_configuration.py,sha256=LzICsZQdOkXwIqdDfh59x0-Cx7
|
|
|
14
14
|
folio_migration_tools/mapper_base.py,sha256=IYER8Dq-4qLq3qiAvUpnzc33usUbfZtNKzyZJD6DNds,23567
|
|
15
15
|
folio_migration_tools/mapping_file_transformation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
16
|
folio_migration_tools/mapping_file_transformation/courses_mapper.py,sha256=yX8yCCw6B54r7JUn5IPhMYKBgImiagUzfwBxNVRf5MQ,8091
|
|
17
|
-
folio_migration_tools/mapping_file_transformation/holdings_mapper.py,sha256=
|
|
18
|
-
folio_migration_tools/mapping_file_transformation/item_mapper.py,sha256=
|
|
17
|
+
folio_migration_tools/mapping_file_transformation/holdings_mapper.py,sha256=qT8LMWZytSaOyUC9OzfJeekVIkLkNim4OzfoGlAh75g,8290
|
|
18
|
+
folio_migration_tools/mapping_file_transformation/item_mapper.py,sha256=ZmPc64X_l3hq8qc0WDhZ020TdjvucZoQciIVIpPzwiA,10472
|
|
19
19
|
folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py,sha256=2-W2Z8hwAhWT77zfDWuwWqm20j4w1mfzeAXWiyssQ8I,13434
|
|
20
|
-
folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py,sha256=
|
|
20
|
+
folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py,sha256=LqxpULlickaQhIv92iBA7SwSXy1PN7l-9swLHQdnx6g,38408
|
|
21
21
|
folio_migration_tools/mapping_file_transformation/notes_mapper.py,sha256=vCmZmjrjyYtXeFCyVqvWfnP8y1jGGu15RXzXIHh12xY,3530
|
|
22
22
|
folio_migration_tools/mapping_file_transformation/order_mapper.py,sha256=-JEBEeOntNPE9-NYhWAJ1hpQI03ZzMv-_mkyLzSa9x4,17750
|
|
23
23
|
folio_migration_tools/mapping_file_transformation/organization_mapper.py,sha256=u1Lb6tApn-nVLqbbJV38BuipKL3OK8Y2uQ4ogoyGQaI,14639
|
|
@@ -31,18 +31,18 @@ folio_migration_tools/marc_rules_transformation/loc_language_codes.xml,sha256=zt
|
|
|
31
31
|
folio_migration_tools/marc_rules_transformation/marc_file_processor.py,sha256=M-PHduzMYmZnrMwOSlwnWQ5bT-566gVRFSMo-JgS2d4,12346
|
|
32
32
|
folio_migration_tools/marc_rules_transformation/marc_reader_wrapper.py,sha256=9ATjYMRAjy0QcXtmNZaHVhHLJ5hE1WUgOcF6KMJjbgo,5309
|
|
33
33
|
folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py,sha256=PGt2w8h2pj8_8sGjQe3L-odFDlquURtKnoNFRWQB3GI,9621
|
|
34
|
-
folio_migration_tools/marc_rules_transformation/rules_mapper_base.py,sha256=
|
|
35
|
-
folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py,sha256=
|
|
36
|
-
folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py,sha256=
|
|
34
|
+
folio_migration_tools/marc_rules_transformation/rules_mapper_base.py,sha256=loNZ9gEYaAwjkP2_wLlXGedjWvSdHoGF_oJN9g6gI3s,45928
|
|
35
|
+
folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py,sha256=GYZmVrEKcHkOEH4U3027-vQjS6mfMbk84GJTqiVrD4E,30350
|
|
36
|
+
folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py,sha256=wT9HDodIRYeGbjutVHDHpSBVWrXsuA2LO8e_MmBMmzE,28498
|
|
37
37
|
folio_migration_tools/migration_report.py,sha256=BkRspM1hwTBnWeqsHamf7yVEofzLj560Q-9G--O00hw,4258
|
|
38
38
|
folio_migration_tools/migration_tasks/__init__.py,sha256=ZkbY_yGyB84Ke8OMlYUzyyBj4cxxNrhMTwQlu_GbdDs,211
|
|
39
39
|
folio_migration_tools/migration_tasks/authority_transformer.py,sha256=AoXg9s-GLO3yEEDCrQV7hc4YVXxwxsdxDdpj1zhHydE,4251
|
|
40
|
-
folio_migration_tools/migration_tasks/batch_poster.py,sha256=
|
|
40
|
+
folio_migration_tools/migration_tasks/batch_poster.py,sha256=uXxvmxlLSTEhvdBH9FgxhIHHXB4ezLt35LSN6lKprYE,39673
|
|
41
41
|
folio_migration_tools/migration_tasks/bibs_transformer.py,sha256=46d44pcDAodFXDYbrTCMRASISbDciXmA0CXYfhP2IaE,6298
|
|
42
42
|
folio_migration_tools/migration_tasks/courses_migrator.py,sha256=CzXnsu-KGP7B4zcINJzLYUqz47D16NuFfzu_DPqRlTQ,7061
|
|
43
|
-
folio_migration_tools/migration_tasks/holdings_csv_transformer.py,sha256=
|
|
43
|
+
folio_migration_tools/migration_tasks/holdings_csv_transformer.py,sha256=kMhtHE8DJjA4d6kXBcfflueha3R3nwlBQjdec8CaY8c,21926
|
|
44
44
|
folio_migration_tools/migration_tasks/holdings_marc_transformer.py,sha256=DVYdSNUPmdTv6GfLJdyT806dZV6UQHH_T8gkqtPgXaU,14143
|
|
45
|
-
folio_migration_tools/migration_tasks/items_transformer.py,sha256=
|
|
45
|
+
folio_migration_tools/migration_tasks/items_transformer.py,sha256=hkrllccEkKajp24BFSS85uiCpaEJinfkfvdjXGxNgCM,19223
|
|
46
46
|
folio_migration_tools/migration_tasks/loans_migrator.py,sha256=CPsin9XLzHwNrpKHPMHAvgRvpoH8QvAfYZYr1FSxAN4,34520
|
|
47
47
|
folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py,sha256=CnmlTge7nChUJ10EiUkriQtJlVxWqglgfhjgneh2_yM,7247
|
|
48
48
|
folio_migration_tools/migration_tasks/migration_task_base.py,sha256=Q-57h6rmt74bC9LidA9ZoagEcwVd_ytq8IUWelVOm2E,22521
|
|
@@ -60,8 +60,8 @@ folio_migration_tools/transaction_migration/legacy_request.py,sha256=1ulyFzPQw_I
|
|
|
60
60
|
folio_migration_tools/transaction_migration/legacy_reserve.py,sha256=qzw0okg4axAE_ezXopP9gFsQ_e60o0zh7zqRzFBSWHY,1806
|
|
61
61
|
folio_migration_tools/transaction_migration/transaction_result.py,sha256=cTdCN0BnlI9_ZJB2Z3Fdkl9gpymIi-9mGZsRFlQcmDk,656
|
|
62
62
|
folio_migration_tools/translations/en.json,sha256=FeoaN3INfim4_-l3DSamHo2hn1SUJr5DsgDgsV4XUek,39693
|
|
63
|
-
folio_migration_tools-1.9.
|
|
64
|
-
folio_migration_tools-1.9.
|
|
65
|
-
folio_migration_tools-1.9.
|
|
66
|
-
folio_migration_tools-1.9.
|
|
67
|
-
folio_migration_tools-1.9.
|
|
63
|
+
folio_migration_tools-1.9.2.dist-info/LICENSE,sha256=PhIEkitVi3ejgq56tt6sWoJIG_zmv82cjjd_aYPPGdI,1072
|
|
64
|
+
folio_migration_tools-1.9.2.dist-info/METADATA,sha256=Cqu9bvuBBESPXOo3GoNF9GgijMciQSPtMPFnl94q4a4,7444
|
|
65
|
+
folio_migration_tools-1.9.2.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
66
|
+
folio_migration_tools-1.9.2.dist-info/entry_points.txt,sha256=Hbe-HjqMcU8FwVshVIkeWyZd9XwgT1CCMNf06EpHQu8,77
|
|
67
|
+
folio_migration_tools-1.9.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
{folio_migration_tools-1.9.0rc13.dist-info → folio_migration_tools-1.9.2.dist-info}/entry_points.txt
RENAMED
|
File without changes
|