folio-migration-tools 1.9.0rc5__py3-none-any.whl → 1.9.0rc6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- folio_migration_tools/__init__.py +8 -0
- folio_migration_tools/custom_dict.py +2 -2
- folio_migration_tools/custom_exceptions.py +7 -4
- folio_migration_tools/mapper_base.py +23 -3
- folio_migration_tools/mapping_file_transformation/item_mapper.py +28 -11
- folio_migration_tools/marc_rules_transformation/conditions.py +54 -30
- folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +71 -38
- folio_migration_tools/migration_tasks/batch_poster.py +38 -14
- folio_migration_tools/migration_tasks/items_transformer.py +2 -2
- {folio_migration_tools-1.9.0rc5.dist-info → folio_migration_tools-1.9.0rc6.dist-info}/METADATA +1 -1
- {folio_migration_tools-1.9.0rc5.dist-info → folio_migration_tools-1.9.0rc6.dist-info}/RECORD +14 -14
- {folio_migration_tools-1.9.0rc5.dist-info → folio_migration_tools-1.9.0rc6.dist-info}/LICENSE +0 -0
- {folio_migration_tools-1.9.0rc5.dist-info → folio_migration_tools-1.9.0rc6.dist-info}/WHEEL +0 -0
- {folio_migration_tools-1.9.0rc5.dist-info → folio_migration_tools-1.9.0rc6.dist-info}/entry_points.txt +0 -0
|
@@ -7,10 +7,10 @@ class InsensitiveDictReader(csv.DictReader):
|
|
|
7
7
|
# spaces and to lower case.
|
|
8
8
|
@property
|
|
9
9
|
def fieldnames(self):
|
|
10
|
-
return [field.strip().lower() for field in csv.DictReader.fieldnames.fget(self)]
|
|
10
|
+
return [field.strip().lower() for field in csv.DictReader.fieldnames.fget(self)] # type: ignore
|
|
11
11
|
|
|
12
12
|
def next(self):
|
|
13
|
-
return InsensitiveDict(csv.DictReader.next(self))
|
|
13
|
+
return InsensitiveDict(csv.DictReader.next(self)) # type: ignore
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
class InsensitiveDict(dict):
|
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
import logging
|
|
2
|
+
from typing import Union
|
|
2
3
|
import i18n
|
|
3
4
|
|
|
5
|
+
from folio_migration_tools import StrCoercible
|
|
6
|
+
|
|
4
7
|
|
|
5
8
|
class TransfomationError(Exception):
|
|
6
9
|
pass
|
|
@@ -10,10 +13,10 @@ class TransformationFieldMappingError(TransfomationError):
|
|
|
10
13
|
"""Raised when the a field mapping fails, but the error is not critical.
|
|
11
14
|
The issue should be logged for the library to act upon it"""
|
|
12
15
|
|
|
13
|
-
def __init__(self, index_or_id="", message="", data_value=""):
|
|
16
|
+
def __init__(self, index_or_id="", message="", data_value: Union[str, StrCoercible]=""):
|
|
14
17
|
self.index_or_id = index_or_id or ""
|
|
15
18
|
self.message = message
|
|
16
|
-
self.data_value = data_value
|
|
19
|
+
self.data_value: Union[str, StrCoercible] = data_value
|
|
17
20
|
super().__init__(self.message)
|
|
18
21
|
|
|
19
22
|
def __str__(self):
|
|
@@ -38,7 +41,7 @@ class TransformationRecordFailedError(TransfomationError):
|
|
|
38
41
|
def __init__(self, index_or_id, message="", data_value=""):
|
|
39
42
|
self.index_or_id = index_or_id
|
|
40
43
|
self.message = message
|
|
41
|
-
self.data_value = data_value
|
|
44
|
+
self.data_value: Union[str, StrCoercible] = data_value
|
|
42
45
|
# logging.log(26, f"RECORD FAILED\t{self.id}\t{self.message}\t{self.data_value}")
|
|
43
46
|
super().__init__(self.message)
|
|
44
47
|
|
|
@@ -67,7 +70,7 @@ class TransformationProcessError(TransfomationError):
|
|
|
67
70
|
index_or_id,
|
|
68
71
|
message="Critical Process issue. Transformation failed."
|
|
69
72
|
" Check configuration, mapping files and reference data",
|
|
70
|
-
data_value="",
|
|
73
|
+
data_value: Union[str, StrCoercible]="",
|
|
71
74
|
):
|
|
72
75
|
self.index_or_id = index_or_id
|
|
73
76
|
self.message = message
|
|
@@ -6,7 +6,7 @@ import sys
|
|
|
6
6
|
import uuid
|
|
7
7
|
from datetime import datetime, timezone
|
|
8
8
|
from pathlib import Path
|
|
9
|
-
from typing import List
|
|
9
|
+
from typing import Dict, List
|
|
10
10
|
|
|
11
11
|
import i18n
|
|
12
12
|
from folio_uuid.folio_namespaces import FOLIONamespaces
|
|
@@ -35,7 +35,7 @@ class MapperBase:
|
|
|
35
35
|
self,
|
|
36
36
|
library_configuration: LibraryConfiguration,
|
|
37
37
|
folio_client: FolioClient,
|
|
38
|
-
parent_id_map: dict[str, tuple] =
|
|
38
|
+
parent_id_map: dict[str, tuple] = {},
|
|
39
39
|
):
|
|
40
40
|
logging.info("MapperBase initiating")
|
|
41
41
|
self.parent_id_map: dict[str, tuple] = parent_id_map
|
|
@@ -318,7 +318,14 @@ class MapperBase:
|
|
|
318
318
|
entry["MFHD_ID"],
|
|
319
319
|
)
|
|
320
320
|
)
|
|
321
|
-
|
|
321
|
+
if entry["BIB_ID"] in self.parent_id_map:
|
|
322
|
+
new_map[mfhd_uuid] = new_map.get(mfhd_uuid, []) + [instance_uuid]
|
|
323
|
+
else:
|
|
324
|
+
raise TransformationRecordFailedError(
|
|
325
|
+
entry["MFHD_ID"],
|
|
326
|
+
"Boundwith relationship map contains a BIB_ID id not in the instance id map. No boundwith holdings created.",
|
|
327
|
+
entry["BIB_ID"],
|
|
328
|
+
)
|
|
322
329
|
|
|
323
330
|
return new_map
|
|
324
331
|
|
|
@@ -469,6 +476,19 @@ class MapperBase:
|
|
|
469
476
|
)
|
|
470
477
|
)
|
|
471
478
|
|
|
479
|
+
@staticmethod
|
|
480
|
+
def validate_location_map(location_map: List[Dict], locations: List[Dict]) -> List[Dict]:
|
|
481
|
+
mapped_codes = [x['folio_code'] for x in location_map]
|
|
482
|
+
existing_codes = [x['code'] for x in locations]
|
|
483
|
+
missing_codes = set(mapped_codes) - set(existing_codes)
|
|
484
|
+
if missing_codes:
|
|
485
|
+
raise TransformationProcessError(
|
|
486
|
+
"",
|
|
487
|
+
f"Location map contains codes not found in locations: {', '.join(missing_codes)}",
|
|
488
|
+
"",
|
|
489
|
+
)
|
|
490
|
+
return location_map
|
|
491
|
+
|
|
472
492
|
|
|
473
493
|
def flatten(my_dict: dict, path=""):
|
|
474
494
|
for k, v in iter(my_dict.items()):
|
|
@@ -43,7 +43,7 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
43
43
|
temporary_location_mapping,
|
|
44
44
|
library_configuration: LibraryConfiguration,
|
|
45
45
|
boundwith_relationship_map,
|
|
46
|
-
task_configuration: AbstractTaskConfiguration
|
|
46
|
+
task_configuration: AbstractTaskConfiguration,
|
|
47
47
|
):
|
|
48
48
|
item_schema = folio_client.get_item_schema()
|
|
49
49
|
super().__init__(
|
|
@@ -75,7 +75,9 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
75
75
|
self.folio_client,
|
|
76
76
|
"/locations",
|
|
77
77
|
"locations",
|
|
78
|
-
|
|
78
|
+
self.validate_location_map(
|
|
79
|
+
temporary_location_mapping, self.folio_client.locations
|
|
80
|
+
),
|
|
79
81
|
"code",
|
|
80
82
|
"TemporaryLocationMapping",
|
|
81
83
|
)
|
|
@@ -116,7 +118,7 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
116
118
|
self.folio_client,
|
|
117
119
|
"/locations",
|
|
118
120
|
"locations",
|
|
119
|
-
location_map,
|
|
121
|
+
self.validate_location_map(location_map, self.folio_client.locations),
|
|
120
122
|
"code",
|
|
121
123
|
"LocationMapping",
|
|
122
124
|
)
|
|
@@ -128,17 +130,24 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
128
130
|
folio_record["discoverySuppress"] = file_def.discovery_suppressed
|
|
129
131
|
self.migration_report.add(
|
|
130
132
|
"Suppression",
|
|
131
|
-
i18n.t("Suppressed from discovery")
|
|
133
|
+
i18n.t("Suppressed from discovery")
|
|
134
|
+
+ f" = {folio_record['discoverySuppress']}",
|
|
132
135
|
)
|
|
133
136
|
|
|
134
137
|
def setup_status_mapping(self, item_statuses_map):
|
|
135
|
-
statuses = self.item_schema["properties"]["status"]["properties"]["name"][
|
|
138
|
+
statuses = self.item_schema["properties"]["status"]["properties"]["name"][
|
|
139
|
+
"enum"
|
|
140
|
+
]
|
|
136
141
|
for mapping in item_statuses_map:
|
|
137
142
|
if "folio_name" not in mapping:
|
|
138
|
-
logging.critical(
|
|
143
|
+
logging.critical(
|
|
144
|
+
"folio_name is not a column in the status mapping file"
|
|
145
|
+
)
|
|
139
146
|
sys.exit(1)
|
|
140
147
|
elif "legacy_code" not in mapping:
|
|
141
|
-
logging.critical(
|
|
148
|
+
logging.critical(
|
|
149
|
+
"legacy_code is not a column in the status mapping file"
|
|
150
|
+
)
|
|
142
151
|
sys.exit(1)
|
|
143
152
|
elif mapping["folio_name"] not in statuses:
|
|
144
153
|
logging.critical(
|
|
@@ -153,7 +162,9 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
153
162
|
)
|
|
154
163
|
sys.exit(1)
|
|
155
164
|
elif not all(mapping.values()):
|
|
156
|
-
logging.critical(
|
|
165
|
+
logging.critical(
|
|
166
|
+
"empty value in mapping %s. Check mapping file", mapping.values()
|
|
167
|
+
)
|
|
157
168
|
sys.exit(1)
|
|
158
169
|
else:
|
|
159
170
|
self.status_mapping = {
|
|
@@ -206,7 +217,9 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
206
217
|
index_or_id,
|
|
207
218
|
True,
|
|
208
219
|
)
|
|
209
|
-
self.migration_report.add(
|
|
220
|
+
self.migration_report.add(
|
|
221
|
+
"TemporaryLoanTypeMapping", f"{folio_prop_name} -> {ltid}"
|
|
222
|
+
)
|
|
210
223
|
return ltid
|
|
211
224
|
elif folio_prop_name == "permanentLoanTypeId":
|
|
212
225
|
return self.get_mapped_ref_data_value(
|
|
@@ -232,7 +245,9 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
232
245
|
normalized_barcode = barcode.strip().lower()
|
|
233
246
|
if normalized_barcode and normalized_barcode in self.unique_barcodes:
|
|
234
247
|
Helper.log_data_issue(index_or_id, "Duplicate barcode", mapped_value)
|
|
235
|
-
self.migration_report.add_general_statistics(
|
|
248
|
+
self.migration_report.add_general_statistics(
|
|
249
|
+
i18n.t("Duplicate barcodes")
|
|
250
|
+
)
|
|
236
251
|
return f"{barcode}-{uuid4()}"
|
|
237
252
|
else:
|
|
238
253
|
if normalized_barcode:
|
|
@@ -257,7 +272,9 @@ class ItemMapper(MappingFileMapperBase):
|
|
|
257
272
|
self.migration_report.add("UnmappedProperties", f"{folio_prop_name}")
|
|
258
273
|
return ""
|
|
259
274
|
|
|
260
|
-
def get_item_level_call_number_type_id(
|
|
275
|
+
def get_item_level_call_number_type_id(
|
|
276
|
+
self, legacy_item, folio_prop_name: str, index_or_id
|
|
277
|
+
):
|
|
261
278
|
if self.call_number_mapping:
|
|
262
279
|
return self.get_mapped_ref_data_value(
|
|
263
280
|
self.call_number_mapping, legacy_item, index_or_id, folio_prop_name
|
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import re
|
|
3
|
+
import traceback
|
|
4
|
+
from typing import Union
|
|
3
5
|
|
|
4
6
|
import i18n
|
|
5
|
-
import pymarc
|
|
6
7
|
from folioclient import FolioClient
|
|
7
8
|
from pymarc import field
|
|
8
9
|
|
|
@@ -41,8 +42,8 @@ class Conditions:
|
|
|
41
42
|
self.folio_release: FolioRelease = folio_release
|
|
42
43
|
self.filter_last_chars = r",$"
|
|
43
44
|
self.folio = folio
|
|
44
|
-
self.default_contributor_type =
|
|
45
|
-
self.mapper = mapper
|
|
45
|
+
self.default_contributor_type: dict = {}
|
|
46
|
+
self.mapper: RulesMapperBase = mapper
|
|
46
47
|
self.ref_data_dicts = {}
|
|
47
48
|
if object_type == "bibs":
|
|
48
49
|
self.setup_reference_data_for_all()
|
|
@@ -52,15 +53,16 @@ class Conditions:
|
|
|
52
53
|
else:
|
|
53
54
|
self.setup_reference_data_for_all()
|
|
54
55
|
self.setup_reference_data_for_items_and_holdings(default_call_number_type_name)
|
|
56
|
+
self.object_type = object_type
|
|
55
57
|
self.condition_cache: dict = {}
|
|
56
58
|
|
|
57
59
|
def setup_reference_data_for_bibs(self):
|
|
58
60
|
logging.info("Setting up reference data for bib transformation")
|
|
59
|
-
logging.info("%s\tcontrib_name_types", len(self.folio.contrib_name_types))
|
|
60
|
-
logging.info("%s\tcontributor_types", len(self.folio.contributor_types))
|
|
61
|
-
logging.info("%s\talt_title_types", len(self.folio.alt_title_types))
|
|
62
|
-
logging.info("%s\tidentifier_types", len(self.folio.identifier_types))
|
|
63
|
-
logging.info("%s\tsubject_types", len(self.folio.subject_types))
|
|
61
|
+
logging.info("%s\tcontrib_name_types", len(self.folio.contrib_name_types)) # type: ignore
|
|
62
|
+
logging.info("%s\tcontributor_types", len(self.folio.contributor_types)) # type: ignore
|
|
63
|
+
logging.info("%s\talt_title_types", len(self.folio.alt_title_types)) # type: ignore
|
|
64
|
+
logging.info("%s\tidentifier_types", len(self.folio.identifier_types)) # type: ignore
|
|
65
|
+
logging.info("%s\tsubject_types", len(self.folio.subject_types)) # type: ignore
|
|
64
66
|
# Raise for empty settings
|
|
65
67
|
if not self.folio.contributor_types:
|
|
66
68
|
raise TransformationProcessError("", "No contributor_types in FOLIO")
|
|
@@ -75,18 +77,18 @@ class Conditions:
|
|
|
75
77
|
|
|
76
78
|
# Set defaults
|
|
77
79
|
logging.info("Setting defaults")
|
|
78
|
-
self.default_contributor_name_type = self.folio.contrib_name_types[0]["id"]
|
|
80
|
+
self.default_contributor_name_type: str = self.folio.contrib_name_types[0]["id"] # type: ignore
|
|
79
81
|
logging.info("Contributor name type:\t%s", self.default_contributor_name_type)
|
|
80
82
|
self.default_contributor_type = next(
|
|
81
|
-
ct for ct in self.folio.contributor_types if ct["code"] == "ctb"
|
|
83
|
+
ct for ct in self.folio.contributor_types if ct["code"] == "ctb" # type: ignore
|
|
82
84
|
)
|
|
83
85
|
logging.info("Contributor type:\t%s", self.default_contributor_type["id"])
|
|
84
86
|
|
|
85
87
|
def setup_reference_data_for_items_and_holdings(self, default_call_number_type_name):
|
|
86
|
-
logging.info(f"{len(self.folio.locations)}\tlocations")
|
|
88
|
+
logging.info(f"{len(self.folio.locations)}\tlocations") # type: ignore
|
|
87
89
|
self.default_call_number_type = {}
|
|
88
|
-
logging.info("%s\tholding_note_types", len(self.folio.holding_note_types))
|
|
89
|
-
logging.info("%s\tcall_number_types", len(self.folio.call_number_types))
|
|
90
|
+
logging.info("%s\tholding_note_types", len(self.folio.holding_note_types)) # type: ignore
|
|
91
|
+
logging.info("%s\tcall_number_types", len(self.folio.call_number_types)) # type: ignore
|
|
90
92
|
self.setup_and_validate_holdings_types()
|
|
91
93
|
# Raise for empty settings
|
|
92
94
|
if not self.folio.holding_note_types:
|
|
@@ -98,10 +100,10 @@ class Conditions:
|
|
|
98
100
|
|
|
99
101
|
# Set defaults
|
|
100
102
|
logging.info("Defaults")
|
|
101
|
-
self.default_call_number_type = next(
|
|
103
|
+
self.default_call_number_type: dict = next(
|
|
102
104
|
(
|
|
103
105
|
ct
|
|
104
|
-
for ct in self.folio.call_number_types
|
|
106
|
+
for ct in self.folio.call_number_types # type: ignore
|
|
105
107
|
if ct["name"] == default_call_number_type_name
|
|
106
108
|
),
|
|
107
109
|
None,
|
|
@@ -124,7 +126,7 @@ class Conditions:
|
|
|
124
126
|
missing_holdings_types = [
|
|
125
127
|
ht
|
|
126
128
|
for ht in self.holdings_type_map.values()
|
|
127
|
-
if ht not in [ht_ref["name"] for ht_ref in self.holdings_types]
|
|
129
|
+
if ht not in [ht_ref["name"] for ht_ref in self.holdings_types] # type: ignore
|
|
128
130
|
]
|
|
129
131
|
if any(missing_holdings_types):
|
|
130
132
|
raise TransformationProcessError(
|
|
@@ -132,15 +134,15 @@ class Conditions:
|
|
|
132
134
|
"Holdings types are missing from the tenant. Please set them up",
|
|
133
135
|
missing_holdings_types,
|
|
134
136
|
)
|
|
135
|
-
logging.info("%s\tholdings types", len(self.holdings_types))
|
|
137
|
+
logging.info("%s\tholdings types", len(self.holdings_types)) # type: ignore
|
|
136
138
|
|
|
137
139
|
def setup_reference_data_for_all(self):
|
|
138
|
-
logging.info(f"{len(self.folio.class_types)}\tclass_types")
|
|
140
|
+
logging.info(f"{len(self.folio.class_types)}\tclass_types") # type: ignore
|
|
139
141
|
logging.info(
|
|
140
|
-
f"{len(self.folio.electronic_access_relationships)}\telectronic_access_relationships"
|
|
142
|
+
f"{len(self.folio.electronic_access_relationships)}\telectronic_access_relationships" # type: ignore
|
|
141
143
|
)
|
|
142
144
|
self.statistical_codes = self.folio.statistical_codes
|
|
143
|
-
logging.info(f"{len(self.statistical_codes)} \tstatistical_codes")
|
|
145
|
+
logging.info(f"{len(self.statistical_codes)} \tstatistical_codes") # type: ignore
|
|
144
146
|
|
|
145
147
|
# Raise for empty settings
|
|
146
148
|
if not self.folio.class_types:
|
|
@@ -153,13 +155,13 @@ class Conditions:
|
|
|
153
155
|
)
|
|
154
156
|
)
|
|
155
157
|
logging.info(f"{len(self.authority_note_types)} \tAuthority note types")
|
|
156
|
-
logging.info(f"{len(self.folio.identifier_types)} \tidentifier types")
|
|
158
|
+
logging.info(f"{len(self.folio.identifier_types)} \tidentifier types") # type: ignore
|
|
157
159
|
|
|
158
160
|
def get_condition(
|
|
159
|
-
self, name, legacy_id, value, parameter=None, marc_field: field.Field = None
|
|
161
|
+
self, name, legacy_id, value, parameter=None, marc_field: Union[None, field.Field] = None
|
|
160
162
|
):
|
|
161
163
|
try:
|
|
162
|
-
return self.condition_cache.get(name)(legacy_id, value, parameter, marc_field)
|
|
164
|
+
return self.condition_cache.get(name)(legacy_id, value, parameter, marc_field) # type: ignore
|
|
163
165
|
# Exception should only handle the missing condition from the cache.
|
|
164
166
|
# All other exceptions should propagate up
|
|
165
167
|
except Exception:
|
|
@@ -375,10 +377,10 @@ class Conditions:
|
|
|
375
377
|
"MappedIdentifierTypes", f"{marc_field.tag} -> {t[1]}"
|
|
376
378
|
)
|
|
377
379
|
return t[0]
|
|
378
|
-
identifier_type = next(
|
|
380
|
+
identifier_type: dict = next(
|
|
379
381
|
(
|
|
380
382
|
f
|
|
381
|
-
for f in self.folio.identifier_types
|
|
383
|
+
for f in self.folio.identifier_types # type: ignore
|
|
382
384
|
if (
|
|
383
385
|
f["name"] in parameter.get("names", "non existant")
|
|
384
386
|
or f["name"] in parameter.get("name", "non existant")
|
|
@@ -617,7 +619,7 @@ class Conditions:
|
|
|
617
619
|
return self._extracted_from_condition_set_electronic_access_relations_id_2("8", marc_field)
|
|
618
620
|
|
|
619
621
|
def condition_set_call_number_type_by_indicator(
|
|
620
|
-
self, legacy_id, value, parameter, marc_field:
|
|
622
|
+
self, legacy_id, value, parameter, marc_field: field.Field
|
|
621
623
|
):
|
|
622
624
|
self.mapper.migration_report.add(
|
|
623
625
|
"Exceptions",
|
|
@@ -629,7 +631,7 @@ class Conditions:
|
|
|
629
631
|
return self.condition_set_call_number_type_id(legacy_id, value, parameter, marc_field)
|
|
630
632
|
|
|
631
633
|
def condition_set_call_number_type_id(
|
|
632
|
-
self, legacy_id, value, parameter, marc_field:
|
|
634
|
+
self, legacy_id, value, parameter, marc_field: field.Field
|
|
633
635
|
):
|
|
634
636
|
first_level_map = {
|
|
635
637
|
"0": "Library of Congress classification",
|
|
@@ -690,12 +692,13 @@ class Conditions:
|
|
|
690
692
|
):
|
|
691
693
|
for subfield in marc_field.get_subfields("4", "e"):
|
|
692
694
|
normalized_subfield = re.sub(r"[^A-Za-z0-9 ]+", "", subfield.strip())
|
|
693
|
-
for cont_type in self.folio.contributor_types:
|
|
695
|
+
for cont_type in self.folio.contributor_types: # type: ignore
|
|
694
696
|
if normalized_subfield in [cont_type["code"], cont_type["name"]]:
|
|
695
697
|
return cont_type["name"]
|
|
696
698
|
try:
|
|
697
699
|
return value
|
|
698
|
-
except IndexError
|
|
700
|
+
except IndexError:
|
|
701
|
+
logging.debug("Exception occurred: %s", traceback.format_exc())
|
|
699
702
|
return ""
|
|
700
703
|
|
|
701
704
|
def condition_set_alternative_title_type_id(self, legacy_id, value, parameter, marc_field):
|
|
@@ -728,7 +731,7 @@ class Conditions:
|
|
|
728
731
|
):
|
|
729
732
|
if "legacy_locations" not in self.ref_data_dicts:
|
|
730
733
|
try:
|
|
731
|
-
d = {lm["legacy_code"]: lm["folio_code"] for lm in self.mapper.location_map}
|
|
734
|
+
d = {lm["legacy_code"]: lm["folio_code"] for lm in self.mapper.location_map} # type: ignore
|
|
732
735
|
self.ref_data_dicts["legacy_locations"] = d
|
|
733
736
|
for folio_code in d.values():
|
|
734
737
|
t = self.get_ref_data_tuple_by_code(
|
|
@@ -812,6 +815,27 @@ class Conditions:
|
|
|
812
815
|
def condition_set_electronic_access_relations_id(
|
|
813
816
|
self, legacy_id, value, parameter, marc_field: field.Field
|
|
814
817
|
):
|
|
818
|
+
"""
|
|
819
|
+
This method handles the mapping of electronic access relationship IDs.
|
|
820
|
+
If the record type being mapped is FOLIO holdings, it provides an (optional) alternative
|
|
821
|
+
mapping baseed on a provided name parameter, bypassing the FOLIO MARC-to-Holdings mapping
|
|
822
|
+
engine behavior. This requires use of a supplemental mapping rules file in the
|
|
823
|
+
HoldingsMarcTransformer task definition containing the name parameter.
|
|
824
|
+
"""
|
|
825
|
+
if self.object_type == "holdings" and "name" in parameter:
|
|
826
|
+
try:
|
|
827
|
+
t = self.get_ref_data_tuple_by_name(
|
|
828
|
+
self.folio.electronic_access_relationships,
|
|
829
|
+
"electronic_access_relationships",
|
|
830
|
+
parameter["name"],
|
|
831
|
+
)
|
|
832
|
+
self.mapper.migration_report.add("MappedElectronicRelationshipTypes", t[1])
|
|
833
|
+
return t[0]
|
|
834
|
+
except Exception:
|
|
835
|
+
raise TransformationProcessError(
|
|
836
|
+
legacy_id,
|
|
837
|
+
f"Electronic access relationship not found for {parameter['name']} {marc_field}",
|
|
838
|
+
)
|
|
815
839
|
return self._extracted_from_condition_set_electronic_access_relations_id_2("3", marc_field)
|
|
816
840
|
|
|
817
841
|
# TODO Rename this here and in `condition_set_url_relationship` and `condition_set_electronic_access_relations_id`
|
|
@@ -597,46 +597,48 @@ class RulesMapperBase(MapperBase):
|
|
|
597
597
|
legacy_ids,
|
|
598
598
|
):
|
|
599
599
|
entity_mapping = mapping["entity"]
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
(
|
|
606
|
-
|
|
600
|
+
if entity_indicators_match(entity_mapping, marc_field):
|
|
601
|
+
entity_mapping = [x for x in entity_mapping if "indicators" not in x]
|
|
602
|
+
e_parent = entity_mapping[0]["target"].split(".")[0]
|
|
603
|
+
if mapping.get("entityPerRepeatedSubfield", False):
|
|
604
|
+
for temp_field in self.grouped(marc_field):
|
|
605
|
+
entity = self.create_entity(entity_mapping, temp_field, e_parent, legacy_ids)
|
|
606
|
+
if entity and (
|
|
607
|
+
(isinstance(entity, dict) and all(entity.values()))
|
|
608
|
+
or (isinstance(entity, list) and all(entity))
|
|
609
|
+
):
|
|
610
|
+
self.add_entity_to_record(entity, e_parent, folio_record, self.schema)
|
|
611
|
+
else:
|
|
612
|
+
if mapping.get("ignoreSubsequentSubfields", False):
|
|
613
|
+
marc_field = self.remove_repeated_subfields(marc_field)
|
|
614
|
+
entity = self.create_entity(entity_mapping, marc_field, e_parent, legacy_ids)
|
|
615
|
+
if e_parent in ["precedingTitles", "succeedingTitles"]:
|
|
616
|
+
self.create_preceding_succeeding_titles(
|
|
617
|
+
entity, e_parent, folio_record["id"], marc_field
|
|
618
|
+
)
|
|
619
|
+
elif entity and (
|
|
620
|
+
all(
|
|
621
|
+
v
|
|
622
|
+
for k, v in entity.items()
|
|
623
|
+
if k not in ["staffOnly", "primary", "isbnValue", "issnValue"]
|
|
624
|
+
)
|
|
625
|
+
or e_parent in ["electronicAccess", "publication"]
|
|
626
|
+
or (
|
|
627
|
+
e_parent.startswith("holdingsStatements") and any(v for k, v in entity.items())
|
|
628
|
+
)
|
|
607
629
|
):
|
|
608
630
|
self.add_entity_to_record(entity, e_parent, folio_record, self.schema)
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
for k, v in entity.items()
|
|
621
|
-
if k not in ["staffOnly", "primary", "isbnValue", "issnValue"]
|
|
622
|
-
)
|
|
623
|
-
or e_parent in ["electronicAccess", "publication"]
|
|
624
|
-
or (
|
|
625
|
-
e_parent.startswith("holdingsStatements") and any(v for k, v in entity.items())
|
|
626
|
-
)
|
|
627
|
-
):
|
|
628
|
-
self.add_entity_to_record(entity, e_parent, folio_record, self.schema)
|
|
629
|
-
else:
|
|
630
|
-
sfs = " - ".join(
|
|
631
|
-
f"{f[0]}:{('has_value' if f[1].strip() else 'empty')}" for f in marc_field
|
|
632
|
-
)
|
|
633
|
-
pattern = " - ".join(f"{k}:'{bool(v)}'" for k, v in entity.items())
|
|
634
|
-
self.migration_report.add(
|
|
635
|
-
"IncompleteEntityMapping",
|
|
636
|
-
f"{marc_field.tag} {sfs} ->>-->> {e_parent} {pattern} ",
|
|
637
|
-
)
|
|
638
|
-
# Experimental
|
|
639
|
-
# self.add_entity_to_record(entity, e_parent, rec, self.schema)
|
|
631
|
+
else:
|
|
632
|
+
sfs = " - ".join(
|
|
633
|
+
f"{f[0]}:{('has_value' if f[1].strip() else 'empty')}" for f in marc_field
|
|
634
|
+
)
|
|
635
|
+
pattern = " - ".join(f"{k}:'{bool(v)}'" for k, v in entity.items())
|
|
636
|
+
self.migration_report.add(
|
|
637
|
+
"IncompleteEntityMapping",
|
|
638
|
+
f"{marc_field.tag} {sfs} ->>-->> {e_parent} {pattern} ",
|
|
639
|
+
)
|
|
640
|
+
# Experimental
|
|
641
|
+
# self.add_entity_to_record(entity, e_parent, rec, self.schema)
|
|
640
642
|
|
|
641
643
|
def handle_suppression(
|
|
642
644
|
self, folio_record, file_def: FileDefinition, only_discovery_suppress: bool = False
|
|
@@ -975,3 +977,34 @@ def is_array_of_strings(schema_property):
|
|
|
975
977
|
def is_array_of_objects(schema_property):
|
|
976
978
|
sc_prop_type = schema_property.get("type", "string")
|
|
977
979
|
return sc_prop_type == "array" and schema_property["items"]["type"] == "object"
|
|
980
|
+
|
|
981
|
+
def entity_indicators_match(entity_mapping, marc_field):
|
|
982
|
+
"""
|
|
983
|
+
Check if the indicators of the entity mapping match the indicators of the MARC field.
|
|
984
|
+
Entity mappings can limit the fields they are applied to by specifying indicator values that
|
|
985
|
+
must match the provided MARC field's indicators. If the entity mapping does not specify any
|
|
986
|
+
indicator values, it is assumed to match all MARC fields. Entity indicator values can be a
|
|
987
|
+
specific value or a wildcard "*", which matches any value.
|
|
988
|
+
|
|
989
|
+
This function compares the indicators of the entity mapping with the indicators of the MARC field.
|
|
990
|
+
If the entity does not specify any indicator values, the function returns True. If the entity does
|
|
991
|
+
specify indicator values, the function checks if the MARC field's indicators match the specified
|
|
992
|
+
values or if the specified values are wildcards. If both indicators match, the function returns True;
|
|
993
|
+
otherwise, it returns False.
|
|
994
|
+
|
|
995
|
+
Args:
|
|
996
|
+
entity_mapping (dict): _description_
|
|
997
|
+
marc_field (pymarc.Field): _description_
|
|
998
|
+
|
|
999
|
+
Returns:
|
|
1000
|
+
bool: True if the indicators match, False otherwise.
|
|
1001
|
+
"""
|
|
1002
|
+
if indicator_rule := [x["indicators"] for x in entity_mapping if "indicators" in x]:
|
|
1003
|
+
return all(
|
|
1004
|
+
[
|
|
1005
|
+
(marc_field.indicator1 == indicator_rule[0]['ind1'] or indicator_rule[0]['ind1'] == "*"),
|
|
1006
|
+
(marc_field.indicator2 == indicator_rule[0]['ind2'] or indicator_rule[0]['ind2'] == "*"),
|
|
1007
|
+
]
|
|
1008
|
+
)
|
|
1009
|
+
else:
|
|
1010
|
+
return True
|
|
@@ -132,6 +132,8 @@ class BatchPoster(MigrationTaskBase):
|
|
|
132
132
|
),
|
|
133
133
|
] = False
|
|
134
134
|
|
|
135
|
+
task_configuration: TaskConfiguration
|
|
136
|
+
|
|
135
137
|
@staticmethod
|
|
136
138
|
def get_object_type() -> FOLIONamespaces:
|
|
137
139
|
return FOLIONamespaces.other
|
|
@@ -289,35 +291,57 @@ class BatchPoster(MigrationTaskBase):
|
|
|
289
291
|
for i in range(0, len(batch), fetch_batch_size):
|
|
290
292
|
batch_slice = batch[i:i + fetch_batch_size]
|
|
291
293
|
fetch_tasks.append(
|
|
292
|
-
|
|
294
|
+
self.get_with_retry(
|
|
295
|
+
client,
|
|
293
296
|
query_api,
|
|
294
297
|
params={
|
|
295
298
|
"query": f"id==({' OR '.join([record['id'] for record in batch_slice if 'id' in record])})",
|
|
296
299
|
"limit": fetch_batch_size
|
|
297
300
|
},
|
|
298
|
-
headers=self.folio_client.okapi_headers
|
|
299
301
|
)
|
|
300
302
|
)
|
|
303
|
+
|
|
301
304
|
responses = await asyncio.gather(*fetch_tasks)
|
|
302
305
|
|
|
303
306
|
for response in responses:
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
307
|
+
self.update_record_versions(object_type, updates, response)
|
|
308
|
+
for record in batch:
|
|
309
|
+
if record["id"] in updates:
|
|
310
|
+
record.update(updates[record["id"]])
|
|
311
|
+
|
|
312
|
+
@staticmethod
|
|
313
|
+
def update_record_versions(object_type, updates, response):
|
|
314
|
+
if response.status_code == 200:
|
|
315
|
+
response_json = response.json()
|
|
316
|
+
for record in response_json[object_type]:
|
|
317
|
+
updates[record["id"]] = {
|
|
308
318
|
"_version": record["_version"],
|
|
309
319
|
}
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
320
|
+
if "status" in record:
|
|
321
|
+
updates[record["id"]]["status"] = record["status"]
|
|
322
|
+
if "lastCheckIn" in record:
|
|
323
|
+
updates[record["id"]]["lastCheckIn"] = record["lastCheckIn"]
|
|
324
|
+
else:
|
|
325
|
+
logging.error(
|
|
314
326
|
"Failed to fetch current records. HTTP %s\t%s",
|
|
315
327
|
response.status_code,
|
|
316
328
|
response.text,
|
|
317
329
|
)
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
330
|
+
|
|
331
|
+
async def get_with_retry(self, client: httpx.AsyncClient, url: str, params: dict = {}):
|
|
332
|
+
retries = 3
|
|
333
|
+
for attempt in range(retries):
|
|
334
|
+
try:
|
|
335
|
+
response = await client.get(url, params=params, headers=self.folio_client.okapi_headers)
|
|
336
|
+
response.raise_for_status()
|
|
337
|
+
return response
|
|
338
|
+
except httpx.HTTPError as e:
|
|
339
|
+
if attempt < retries - 1:
|
|
340
|
+
logging.warning(f"Retrying due to {e}")
|
|
341
|
+
await asyncio.sleep(2 ** attempt)
|
|
342
|
+
else:
|
|
343
|
+
logging.error(f"Failed to connect after {retries} attempts: {e}")
|
|
344
|
+
raise
|
|
321
345
|
|
|
322
346
|
def post_record_batch(self, batch, failed_recs_file, row):
|
|
323
347
|
json_rec = json.loads(row.split("\t")[-1])
|
|
@@ -449,7 +473,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
449
473
|
)
|
|
450
474
|
logging.info(last_row)
|
|
451
475
|
logging.info("=========Stack trace==============")
|
|
452
|
-
traceback.logging.info_exc()
|
|
476
|
+
traceback.logging.info_exc() # type: ignore
|
|
453
477
|
logging.info("=======================", flush=True)
|
|
454
478
|
|
|
455
479
|
def post_batch(self, batch, failed_recs_file, num_records, recursion_depth=0):
|
|
@@ -368,12 +368,12 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
368
368
|
self.handle_circiulation_notes(folio_rec, self.folio_client.current_user)
|
|
369
369
|
self.handle_notes(folio_rec)
|
|
370
370
|
if folio_rec["holdingsRecordId"] in self.mapper.boundwith_relationship_map:
|
|
371
|
-
for
|
|
371
|
+
for idx_, instance_id in enumerate(
|
|
372
372
|
self.mapper.boundwith_relationship_map.get(
|
|
373
373
|
folio_rec["holdingsRecordId"]
|
|
374
374
|
)
|
|
375
375
|
):
|
|
376
|
-
if
|
|
376
|
+
if idx_ == 0:
|
|
377
377
|
bw_id = folio_rec["holdingsRecordId"]
|
|
378
378
|
else:
|
|
379
379
|
bw_id = self.mapper.generate_boundwith_holding_uuid(
|
{folio_migration_tools-1.9.0rc5.dist-info → folio_migration_tools-1.9.0rc6.dist-info}/RECORD
RENAMED
|
@@ -1,21 +1,21 @@
|
|
|
1
|
-
folio_migration_tools/__init__.py,sha256=
|
|
1
|
+
folio_migration_tools/__init__.py,sha256=DXvzUKFSpSZjflFWaNm0L8yhFk0u7RVIvQMskwMmKFc,238
|
|
2
2
|
folio_migration_tools/__main__.py,sha256=0rbCmTq4HTxj8M3UjvX3rOEkq6-YqvaSGEMmuCORbho,7282
|
|
3
3
|
folio_migration_tools/circulation_helper.py,sha256=2kAkLM6caPiep0ZtBkMICbRDh53KdfdH21oEX1eMRDI,14193
|
|
4
4
|
folio_migration_tools/colors.py,sha256=GP0wdI_GZ2WD5SjrbPN-S3u8vvN_u6rGQIBBcWv_0ZM,227
|
|
5
5
|
folio_migration_tools/config_file_load.py,sha256=zHHa6NDkN6EJiQE4DgjrFQPVKsd70POsfbGkB8308jg,2822
|
|
6
|
-
folio_migration_tools/custom_dict.py,sha256
|
|
7
|
-
folio_migration_tools/custom_exceptions.py,sha256=
|
|
6
|
+
folio_migration_tools/custom_dict.py,sha256=-FUnhKp90Dg8EHlY6twx-PYQxBUWEO7FgxL2b7pf-xk,678
|
|
7
|
+
folio_migration_tools/custom_exceptions.py,sha256=fRdMt5AwH_XddZ-bts5ByslN5gYthkLdh7o22JroQeE,2686
|
|
8
8
|
folio_migration_tools/extradata_writer.py,sha256=fuchNcMc6BYb9IyfAcvXg7X4J2TfX6YiROfT2hr0JMw,1678
|
|
9
9
|
folio_migration_tools/folder_structure.py,sha256=yyVvbkM9PbczSHNI8vK0Ru7i0x4nbYGzrRriXrnIh38,6715
|
|
10
10
|
folio_migration_tools/helper.py,sha256=KkOkNAGO_fuYqxdLrsbLzCJLQHUrFZG1NzD4RmpQ-KM,2804
|
|
11
11
|
folio_migration_tools/holdings_helper.py,sha256=yJpz6aJrKRBiJ1MtT5bs2vXAc88uJuGh2_KDuCySOKc,7559
|
|
12
12
|
folio_migration_tools/i18n_config.py,sha256=3AH_2b9zTsxE4XTe4isM_zYtPJSlK0ix6eBmV7kAYUM,228
|
|
13
13
|
folio_migration_tools/library_configuration.py,sha256=JE23VSUKDeCxzj_fOeQwkXF4fwd_y9s_hNy91YtEy7A,3514
|
|
14
|
-
folio_migration_tools/mapper_base.py,sha256=
|
|
14
|
+
folio_migration_tools/mapper_base.py,sha256=ftYQJ6UDsP96EvCr5t3yMJemsmSNXsSH5zChDLu4Pp8,21224
|
|
15
15
|
folio_migration_tools/mapping_file_transformation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
16
16
|
folio_migration_tools/mapping_file_transformation/courses_mapper.py,sha256=mJQxxeTn1bCYb2zwFYyXJ6EGZpJ0DsmwOY3nED7D_gQ,8091
|
|
17
17
|
folio_migration_tools/mapping_file_transformation/holdings_mapper.py,sha256=nJS-xx1LszvbYfw0qdTUHX9xXHlxS7wP5mYmixFMh8A,7221
|
|
18
|
-
folio_migration_tools/mapping_file_transformation/item_mapper.py,sha256=
|
|
18
|
+
folio_migration_tools/mapping_file_transformation/item_mapper.py,sha256=CkpxQhwn-ZUpZu4nvybAyrQhdRVX2-wtCgT35-8JLh0,11005
|
|
19
19
|
folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py,sha256=nCkqbxaDHKxMuqQHh_afxQp48YrVD-SeCZ0L1iGvnkk,13402
|
|
20
20
|
folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py,sha256=RacwSOP6r6i28EOywaepq5K5FimD8Ld5MlBo89FYO7c,37963
|
|
21
21
|
folio_migration_tools/mapping_file_transformation/notes_mapper.py,sha256=auLQZqa4rSJo_MIV4Lc5-LG8RcBpp2bnKH243qNYq_0,3470
|
|
@@ -24,25 +24,25 @@ folio_migration_tools/mapping_file_transformation/organization_mapper.py,sha256=
|
|
|
24
24
|
folio_migration_tools/mapping_file_transformation/ref_data_mapping.py,sha256=qFsn_LwKZeKFdOudfEQnNA3DEHOdNQVKzTPdZAlDPX0,8864
|
|
25
25
|
folio_migration_tools/mapping_file_transformation/user_mapper.py,sha256=oWuIPRQL0anF_qTVFibHtc1oOaqyKCBH4O1hX5rQAZQ,7806
|
|
26
26
|
folio_migration_tools/marc_rules_transformation/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
27
|
-
folio_migration_tools/marc_rules_transformation/conditions.py,sha256=
|
|
27
|
+
folio_migration_tools/marc_rules_transformation/conditions.py,sha256=am_VQ5P8vx-AFbVNBVeu0WkIbIh4cQJBLROxNTj_yUE,39143
|
|
28
28
|
folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py,sha256=lTb5QWEAgwyFHy5vdSK6oDl1Q5v2GnzuV04xWV3p4rc,12401
|
|
29
29
|
folio_migration_tools/marc_rules_transformation/hrid_handler.py,sha256=Ihdv0_1q7gL_pZ3HWU3GcfV_jjpIfOLithWk9z_uH3Y,9997
|
|
30
30
|
folio_migration_tools/marc_rules_transformation/loc_language_codes.xml,sha256=ztn2_yKws6qySL4oSsZh7sOjxq5bCC1PhAnXJdtgmJ0,382912
|
|
31
31
|
folio_migration_tools/marc_rules_transformation/marc_file_processor.py,sha256=WkOQRDi7f4PZ5qmVH3Q-1_zdGEKYSvOGC6jixDwDp98,12349
|
|
32
32
|
folio_migration_tools/marc_rules_transformation/marc_reader_wrapper.py,sha256=9ATjYMRAjy0QcXtmNZaHVhHLJ5hE1WUgOcF6KMJjbgo,5309
|
|
33
33
|
folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py,sha256=GFw8j9UtCxnUdLShmPzJa1MpCK8a0NkQIN5C3jyouRs,9604
|
|
34
|
-
folio_migration_tools/marc_rules_transformation/rules_mapper_base.py,sha256
|
|
34
|
+
folio_migration_tools/marc_rules_transformation/rules_mapper_base.py,sha256=-vJDMNZe-7JbARgybVj6lLtZM79PgWU9V_k23330uLM,41195
|
|
35
35
|
folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py,sha256=ckVeysbpW9s19pmHvogdRFOCouzz17Y6oKJD0_QfQAk,28924
|
|
36
36
|
folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py,sha256=gPJaWcvt-CKIJxrEzheRzohnc3mFEnsznuZIXLPhyZM,18954
|
|
37
37
|
folio_migration_tools/migration_report.py,sha256=BkRspM1hwTBnWeqsHamf7yVEofzLj560Q-9G--O00hw,4258
|
|
38
38
|
folio_migration_tools/migration_tasks/__init__.py,sha256=ZkbY_yGyB84Ke8OMlYUzyyBj4cxxNrhMTwQlu_GbdDs,211
|
|
39
39
|
folio_migration_tools/migration_tasks/authority_transformer.py,sha256=AoXg9s-GLO3yEEDCrQV7hc4YVXxwxsdxDdpj1zhHydE,4251
|
|
40
|
-
folio_migration_tools/migration_tasks/batch_poster.py,sha256=
|
|
40
|
+
folio_migration_tools/migration_tasks/batch_poster.py,sha256=wI4lCXU5BQDbKErF6pQxT6srq_Wf_nfFAJc4f1sRCoo,36388
|
|
41
41
|
folio_migration_tools/migration_tasks/bibs_transformer.py,sha256=XzlPo-0uuugJA4SM80xOlOj5nDK6OMDXFnAYg80hOBc,7791
|
|
42
42
|
folio_migration_tools/migration_tasks/courses_migrator.py,sha256=CzXnsu-KGP7B4zcINJzLYUqz47D16NuFfzu_DPqRlTQ,7061
|
|
43
43
|
folio_migration_tools/migration_tasks/holdings_csv_transformer.py,sha256=WT-RlDRm2ILr2-2shfG3TZ3nlSfqxEXT3TklZSqtJCM,22311
|
|
44
44
|
folio_migration_tools/migration_tasks/holdings_marc_transformer.py,sha256=8dtrhxyA9hbISISzpvMJGYaMaDbtZ1MOZeoJJF5lk24,11164
|
|
45
|
-
folio_migration_tools/migration_tasks/items_transformer.py,sha256=
|
|
45
|
+
folio_migration_tools/migration_tasks/items_transformer.py,sha256=NNG7AdS_iuWurdySnWZ0o8kw3KNWPAvkRaHpW-18nV0,18779
|
|
46
46
|
folio_migration_tools/migration_tasks/loans_migrator.py,sha256=4n7zbwljX_jgj9ltnxZAegjN3e8QJMjr6JJa5XfVueY,34771
|
|
47
47
|
folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py,sha256=CnmlTge7nChUJ10EiUkriQtJlVxWqglgfhjgneh2_yM,7247
|
|
48
48
|
folio_migration_tools/migration_tasks/migration_task_base.py,sha256=8enHPNrgOHZs5sDGsz0yMPXap0HBprz8-1HVr9udvf0,16704
|
|
@@ -60,8 +60,8 @@ folio_migration_tools/transaction_migration/legacy_request.py,sha256=1ulyFzPQw_I
|
|
|
60
60
|
folio_migration_tools/transaction_migration/legacy_reserve.py,sha256=d0qbh2fWpwlVSYRL6wZyZG20__NAYNxh7sPSsB-LAes,1804
|
|
61
61
|
folio_migration_tools/transaction_migration/transaction_result.py,sha256=cTdCN0BnlI9_ZJB2Z3Fdkl9gpymIi-9mGZsRFlQcmDk,656
|
|
62
62
|
folio_migration_tools/translations/en.json,sha256=HOVpkb_T-SN_x0NpDp8gyvV1hMLCui3SsG7ByyIv0OU,38669
|
|
63
|
-
folio_migration_tools-1.9.
|
|
64
|
-
folio_migration_tools-1.9.
|
|
65
|
-
folio_migration_tools-1.9.
|
|
66
|
-
folio_migration_tools-1.9.
|
|
67
|
-
folio_migration_tools-1.9.
|
|
63
|
+
folio_migration_tools-1.9.0rc6.dist-info/LICENSE,sha256=PhIEkitVi3ejgq56tt6sWoJIG_zmv82cjjd_aYPPGdI,1072
|
|
64
|
+
folio_migration_tools-1.9.0rc6.dist-info/METADATA,sha256=EL9MObYfZSyJ1otUDkUQEL0T2P8yxU5E77qZ4HhKFrk,7415
|
|
65
|
+
folio_migration_tools-1.9.0rc6.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
|
|
66
|
+
folio_migration_tools-1.9.0rc6.dist-info/entry_points.txt,sha256=Hbe-HjqMcU8FwVshVIkeWyZd9XwgT1CCMNf06EpHQu8,77
|
|
67
|
+
folio_migration_tools-1.9.0rc6.dist-info/RECORD,,
|
{folio_migration_tools-1.9.0rc5.dist-info → folio_migration_tools-1.9.0rc6.dist-info}/LICENSE
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|