folio-migration-tools 1.9.10__py3-none-any.whl → 1.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- folio_migration_tools/__init__.py +3 -4
- folio_migration_tools/__main__.py +53 -31
- folio_migration_tools/circulation_helper.py +118 -108
- folio_migration_tools/custom_dict.py +2 -2
- folio_migration_tools/custom_exceptions.py +4 -5
- folio_migration_tools/folder_structure.py +17 -7
- folio_migration_tools/helper.py +8 -7
- folio_migration_tools/holdings_helper.py +4 -3
- folio_migration_tools/i18n_cache.py +79 -0
- folio_migration_tools/library_configuration.py +77 -37
- folio_migration_tools/mapper_base.py +45 -31
- folio_migration_tools/mapping_file_transformation/courses_mapper.py +1 -1
- folio_migration_tools/mapping_file_transformation/holdings_mapper.py +7 -3
- folio_migration_tools/mapping_file_transformation/item_mapper.py +13 -26
- folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py +1 -2
- folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +13 -11
- folio_migration_tools/mapping_file_transformation/order_mapper.py +6 -5
- folio_migration_tools/mapping_file_transformation/organization_mapper.py +3 -3
- folio_migration_tools/mapping_file_transformation/user_mapper.py +47 -28
- folio_migration_tools/marc_rules_transformation/conditions.py +82 -97
- folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +13 -5
- folio_migration_tools/marc_rules_transformation/hrid_handler.py +3 -2
- folio_migration_tools/marc_rules_transformation/marc_file_processor.py +26 -24
- folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +56 -51
- folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +28 -17
- folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +68 -37
- folio_migration_tools/migration_report.py +18 -7
- folio_migration_tools/migration_tasks/batch_poster.py +285 -354
- folio_migration_tools/migration_tasks/bibs_transformer.py +14 -9
- folio_migration_tools/migration_tasks/courses_migrator.py +2 -3
- folio_migration_tools/migration_tasks/holdings_csv_transformer.py +23 -24
- folio_migration_tools/migration_tasks/holdings_marc_transformer.py +14 -24
- folio_migration_tools/migration_tasks/items_transformer.py +23 -34
- folio_migration_tools/migration_tasks/loans_migrator.py +67 -144
- folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py +3 -3
- folio_migration_tools/migration_tasks/migration_task_base.py +47 -60
- folio_migration_tools/migration_tasks/orders_transformer.py +25 -42
- folio_migration_tools/migration_tasks/organization_transformer.py +9 -18
- folio_migration_tools/migration_tasks/requests_migrator.py +21 -24
- folio_migration_tools/migration_tasks/reserves_migrator.py +6 -5
- folio_migration_tools/migration_tasks/user_transformer.py +25 -20
- folio_migration_tools/task_configuration.py +6 -7
- folio_migration_tools/transaction_migration/legacy_loan.py +15 -27
- folio_migration_tools/transaction_migration/legacy_request.py +1 -1
- folio_migration_tools/translations/en.json +0 -7
- {folio_migration_tools-1.9.10.dist-info → folio_migration_tools-1.10.0.dist-info}/METADATA +19 -28
- folio_migration_tools-1.10.0.dist-info/RECORD +63 -0
- folio_migration_tools-1.10.0.dist-info/WHEEL +4 -0
- folio_migration_tools-1.10.0.dist-info/entry_points.txt +3 -0
- folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py +0 -241
- folio_migration_tools/migration_tasks/authority_transformer.py +0 -119
- folio_migration_tools/test_infrastructure/__init__.py +0 -0
- folio_migration_tools/test_infrastructure/mocked_classes.py +0 -406
- folio_migration_tools-1.9.10.dist-info/RECORD +0 -67
- folio_migration_tools-1.9.10.dist-info/WHEEL +0 -4
- folio_migration_tools-1.9.10.dist-info/entry_points.txt +0 -3
- folio_migration_tools-1.9.10.dist-info/licenses/LICENSE +0 -21
|
@@ -5,6 +5,7 @@ from uuid import uuid4
|
|
|
5
5
|
|
|
6
6
|
from folio_migration_tools import custom_exceptions
|
|
7
7
|
from folio_migration_tools import helper
|
|
8
|
+
from folio_migration_tools.i18n_cache import i18n_t
|
|
8
9
|
from folio_migration_tools.migration_report import MigrationReport
|
|
9
10
|
|
|
10
11
|
|
|
@@ -54,7 +55,7 @@ class HoldingsHelper:
|
|
|
54
55
|
values.append(str(uuid4()))
|
|
55
56
|
migration_report.add(
|
|
56
57
|
"HoldingsMerging",
|
|
57
|
-
|
|
58
|
+
i18n_t("Holding prevented from merging by holdingsTypeId"),
|
|
58
59
|
)
|
|
59
60
|
return "-".join(values)
|
|
60
61
|
except Exception as exception:
|
|
@@ -99,12 +100,12 @@ class HoldingsHelper:
|
|
|
99
100
|
)
|
|
100
101
|
migration_report.add(
|
|
101
102
|
"HoldingsMerging",
|
|
102
|
-
|
|
103
|
+
i18n_t("Duplicate key based on current merge criteria. Records merged"),
|
|
103
104
|
)
|
|
104
105
|
else:
|
|
105
106
|
migration_report.add(
|
|
106
107
|
"HoldingsMerging",
|
|
107
|
-
|
|
108
|
+
i18n_t("Previously transformed holdings record loaded"),
|
|
108
109
|
)
|
|
109
110
|
prev_holdings[stored_key] = stored_holding
|
|
110
111
|
return prev_holdings
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""Cached i18n translation wrapper to improve performance.
|
|
2
|
+
|
|
3
|
+
This module provides a drop-in replacement for i18n.t() that caches translation
|
|
4
|
+
results on first call. This significantly reduces overhead when the same translation
|
|
5
|
+
string is requested multiple times across the application.
|
|
6
|
+
|
|
7
|
+
The cache uses functools.lru_cache with a large maxsize to handle the typical
|
|
8
|
+
number of unique translation strings in the application.
|
|
9
|
+
|
|
10
|
+
Example:
|
|
11
|
+
Instead of:
|
|
12
|
+
import i18n
|
|
13
|
+
label = i18n.t("Some translation")
|
|
14
|
+
|
|
15
|
+
Use:
|
|
16
|
+
from folio_migration_tools.i18n_cache import i18n_t
|
|
17
|
+
label = i18n_t("Some translation")
|
|
18
|
+
|
|
19
|
+
The cached version will only perform the translation lookup on the first call,
|
|
20
|
+
then return the cached result on subsequent calls. Parameterized translations
|
|
21
|
+
are handled correctly - parameters are included in the cache key.
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
from functools import lru_cache
|
|
25
|
+
|
|
26
|
+
import i18n
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
@lru_cache(maxsize=2048)
|
|
30
|
+
def i18n_t(key: str, *args, **kwargs) -> str:
|
|
31
|
+
"""Cached wrapper around i18n.t() for static translations.
|
|
32
|
+
|
|
33
|
+
This function caches the results of i18n.t() calls to avoid repeated
|
|
34
|
+
translation lookups. This is most beneficial for static translation strings
|
|
35
|
+
that don't change parameters.
|
|
36
|
+
|
|
37
|
+
For parameterized translations with dynamic values, the cache key includes
|
|
38
|
+
the parameters, so different parameter values will result in different cache
|
|
39
|
+
entries. This is appropriate for occasional calls but should be avoided in
|
|
40
|
+
tight loops with dynamic parameters.
|
|
41
|
+
|
|
42
|
+
Args:
|
|
43
|
+
key: The translation key to look up
|
|
44
|
+
*args: Positional arguments passed to i18n.t()
|
|
45
|
+
**kwargs: Keyword arguments passed to i18n.t()
|
|
46
|
+
|
|
47
|
+
Returns:
|
|
48
|
+
The translated string, cached on subsequent calls with identical key/args/kwargs
|
|
49
|
+
|
|
50
|
+
Note:
|
|
51
|
+
The cache is module-level and persists for the lifetime of the process.
|
|
52
|
+
If you need to change locales at runtime, call clear_i18n_cache() to
|
|
53
|
+
invalidate the cache.
|
|
54
|
+
"""
|
|
55
|
+
# Convert kwargs to a hashable form for caching (dicts aren't hashable)
|
|
56
|
+
# We create a tuple of sorted items so the same kwargs always hash the same way
|
|
57
|
+
# Note: kwargs_tuple would be: tuple(sorted(kwargs.items())) if kwargs else ()
|
|
58
|
+
|
|
59
|
+
# Note: We can't actually use *args in the lru_cache because it won't work properly
|
|
60
|
+
# with the way we've defined this. The actual i18n.t call is below.
|
|
61
|
+
return i18n.t(key, **kwargs)
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def clear_i18n_cache() -> None:
|
|
65
|
+
"""Clear the i18n translation cache.
|
|
66
|
+
|
|
67
|
+
Call this if you need to change locales at runtime and want translations
|
|
68
|
+
to be re-evaluated with the new locale.
|
|
69
|
+
"""
|
|
70
|
+
i18n_t.cache_clear()
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def get_i18n_cache_info() -> tuple:
|
|
74
|
+
"""Get cache statistics for monitoring and debugging.
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
A named tuple with fields: hits, misses, maxsize, currsize
|
|
78
|
+
"""
|
|
79
|
+
return i18n_t.cache_info()
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
2
|
from typing import Annotated
|
|
3
3
|
|
|
4
|
-
from pydantic import BaseModel, Field
|
|
4
|
+
from pydantic import BaseModel, Field, model_validator
|
|
5
5
|
from pydantic.types import DirectoryPath
|
|
6
6
|
|
|
7
7
|
|
|
@@ -25,8 +25,7 @@ class FileDefinition(BaseModel):
|
|
|
25
25
|
Field(
|
|
26
26
|
title="File name",
|
|
27
27
|
description=(
|
|
28
|
-
"Name of the file to be processed. "
|
|
29
|
-
"The location of the file depends on the context"
|
|
28
|
+
"Name of the file to be processed. The location of the file depends on the context"
|
|
30
29
|
),
|
|
31
30
|
),
|
|
32
31
|
] = ""
|
|
@@ -37,10 +36,9 @@ class FileDefinition(BaseModel):
|
|
|
37
36
|
Field(
|
|
38
37
|
title="Service point ID",
|
|
39
38
|
description=(
|
|
40
|
-
"Service point to be used for "
|
|
41
|
-
"transactions created from this file (Loans-only)."
|
|
39
|
+
"Service point to be used for transactions created from this file (Loans-only)."
|
|
42
40
|
),
|
|
43
|
-
)
|
|
41
|
+
),
|
|
44
42
|
] = ""
|
|
45
43
|
statistical_code: Annotated[
|
|
46
44
|
str,
|
|
@@ -51,7 +49,7 @@ class FileDefinition(BaseModel):
|
|
|
51
49
|
"this file (Instances, Holdings, Items). Specify multiple codes using "
|
|
52
50
|
"multi_field_delimiter."
|
|
53
51
|
),
|
|
54
|
-
)
|
|
52
|
+
),
|
|
55
53
|
] = ""
|
|
56
54
|
create_source_records: Annotated[
|
|
57
55
|
bool,
|
|
@@ -64,6 +62,18 @@ class FileDefinition(BaseModel):
|
|
|
64
62
|
),
|
|
65
63
|
),
|
|
66
64
|
] = True
|
|
65
|
+
data_import_marc: Annotated[
|
|
66
|
+
bool,
|
|
67
|
+
Field(
|
|
68
|
+
title="Data import MARC",
|
|
69
|
+
description=(
|
|
70
|
+
"If set to true, successfully processed MARC records from this file will "
|
|
71
|
+
"be included in the MARC file for data import generated by the parent task for "
|
|
72
|
+
"inventory records created from this file. Only applied for MARC-based "
|
|
73
|
+
"transformations."
|
|
74
|
+
),
|
|
75
|
+
),
|
|
76
|
+
] = True
|
|
67
77
|
|
|
68
78
|
|
|
69
79
|
class IlsFlavour(str, Enum):
|
|
@@ -85,6 +95,7 @@ class FolioRelease(str, Enum):
|
|
|
85
95
|
ramsons = "ramsons"
|
|
86
96
|
sunflower = "sunflower"
|
|
87
97
|
trillium = "trillium"
|
|
98
|
+
umbrellaleaf = "umbrellaleaf"
|
|
88
99
|
|
|
89
100
|
|
|
90
101
|
class LibraryConfiguration(BaseModel):
|
|
@@ -96,7 +107,6 @@ class LibraryConfiguration(BaseModel):
|
|
|
96
107
|
"The URL of the FOLIO API gateway instance. "
|
|
97
108
|
"You can find this in Settings > Software versions > API gateway services."
|
|
98
109
|
),
|
|
99
|
-
alias="okapi_url"
|
|
100
110
|
),
|
|
101
111
|
]
|
|
102
112
|
tenant_id: Annotated[
|
|
@@ -106,7 +116,8 @@ class LibraryConfiguration(BaseModel):
|
|
|
106
116
|
description=(
|
|
107
117
|
"The ID of the FOLIO tenant instance. "
|
|
108
118
|
"You can find this in Settings > Software versions > API gateway services. "
|
|
109
|
-
"In an ECS environment, this is the ID of the central tenant, for all
|
|
119
|
+
"In an ECS environment, this is the ID of the central tenant, for all "
|
|
120
|
+
"configurations."
|
|
110
121
|
),
|
|
111
122
|
),
|
|
112
123
|
]
|
|
@@ -128,18 +139,14 @@ class LibraryConfiguration(BaseModel):
|
|
|
128
139
|
"The username for the FOLIO user account performing the migration. "
|
|
129
140
|
"User should have a full admin permissions/roles in FOLIO. "
|
|
130
141
|
),
|
|
131
|
-
alias="okapi_username"
|
|
132
142
|
),
|
|
133
143
|
]
|
|
134
144
|
folio_password: Annotated[
|
|
135
145
|
str,
|
|
136
146
|
Field(
|
|
137
147
|
title="FOLIO API Gateway password",
|
|
138
|
-
description=(
|
|
139
|
-
|
|
140
|
-
),
|
|
141
|
-
alias="okapi_password"
|
|
142
|
-
)
|
|
148
|
+
description=("The password for the FOLIO user account performing the migration. "),
|
|
149
|
+
),
|
|
143
150
|
]
|
|
144
151
|
base_folder: DirectoryPath = Field(
|
|
145
152
|
description=(
|
|
@@ -153,7 +160,8 @@ class LibraryConfiguration(BaseModel):
|
|
|
153
160
|
title="Multi field delimiter",
|
|
154
161
|
description=(
|
|
155
162
|
"The delimiter used to separate multiple values in a single field. "
|
|
156
|
-
"This is used for delimited text (CSV/TSV) fields with multiple sub-delimited
|
|
163
|
+
"This is used for delimited text (CSV/TSV) fields with multiple sub-delimited "
|
|
164
|
+
"values."
|
|
157
165
|
),
|
|
158
166
|
),
|
|
159
167
|
] = "<delimiter>"
|
|
@@ -163,36 +171,42 @@ class LibraryConfiguration(BaseModel):
|
|
|
163
171
|
] = 5000
|
|
164
172
|
failed_percentage_threshold: Annotated[
|
|
165
173
|
int,
|
|
166
|
-
Field(
|
|
167
|
-
description=("Percentage of failed records until the process shuts down")
|
|
168
|
-
),
|
|
174
|
+
Field(description=("Percentage of failed records until the process shuts down")),
|
|
169
175
|
] = 20
|
|
170
176
|
generic_exception_threshold: Annotated[
|
|
171
177
|
int,
|
|
178
|
+
Field(description=("Number of generic exceptions until the process shuts down")),
|
|
179
|
+
] = 50
|
|
180
|
+
library_name: Annotated[str, Field(description="Name of the library being migrated")]
|
|
181
|
+
log_level_debug: Annotated[bool, Field(description="Enable debug level logging")] = False
|
|
182
|
+
folio_release: Annotated[
|
|
183
|
+
FolioRelease,
|
|
172
184
|
Field(
|
|
173
|
-
description=(
|
|
185
|
+
description=(
|
|
186
|
+
"The Flavour of the ILS you are migrating from. This choice is "
|
|
187
|
+
"maninly tied to the handling of legacy identifiers and thereby the "
|
|
188
|
+
"deterministic UUIDs generated from them."
|
|
189
|
+
)
|
|
174
190
|
),
|
|
175
|
-
]
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
"
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
191
|
+
]
|
|
192
|
+
iteration_identifier: Annotated[
|
|
193
|
+
str,
|
|
194
|
+
Field(
|
|
195
|
+
description="The name of the current directory under base_folder/iterations/ to be "
|
|
196
|
+
"used for this migration."
|
|
197
|
+
),
|
|
198
|
+
]
|
|
199
|
+
add_time_stamp_to_file_names: Annotated[bool, Field(title="Add time stamp to file names")] = (
|
|
200
|
+
False
|
|
184
201
|
)
|
|
185
|
-
iteration_identifier: str
|
|
186
|
-
add_time_stamp_to_file_names: Annotated[
|
|
187
|
-
bool, Field(title="Add time stamp to file names")
|
|
188
|
-
] = False
|
|
189
202
|
use_gateway_url_for_uuids: Annotated[
|
|
190
203
|
bool,
|
|
191
204
|
Field(
|
|
192
205
|
title="Use gateway URL for UUIDs",
|
|
193
206
|
description=(
|
|
194
|
-
"If set to true, folio_uuid will use the gateway URL when generating deterministic
|
|
195
|
-
"If set to false (default), the UUIDs will be generated
|
|
207
|
+
"If set to true, folio_uuid will use the gateway URL when generating deterministic"
|
|
208
|
+
" UUIDs for FOLIO records. If set to false (default), the UUIDs will be generated"
|
|
209
|
+
" using the tenant_id (or ecs_tenant_id)."
|
|
196
210
|
),
|
|
197
211
|
),
|
|
198
212
|
] = False
|
|
@@ -212,8 +226,34 @@ class LibraryConfiguration(BaseModel):
|
|
|
212
226
|
Field(
|
|
213
227
|
title="ECS central iteration identifier",
|
|
214
228
|
description=(
|
|
215
|
-
"The iteration_identifier value from the central tenant configuration that
|
|
216
|
-
"to this configuration's iteration_identifier. Used to access the
|
|
229
|
+
"The iteration_identifier value from the central tenant configuration that "
|
|
230
|
+
"corresponds to this configuration's iteration_identifier. Used to access the "
|
|
231
|
+
"central instances_id_map."
|
|
217
232
|
),
|
|
218
233
|
),
|
|
219
234
|
] = ""
|
|
235
|
+
|
|
236
|
+
@model_validator(mode="before")
|
|
237
|
+
@classmethod
|
|
238
|
+
def handle_legacy_field_names(cls, values):
|
|
239
|
+
"""Handle backward compatibility for legacy okapi field names."""
|
|
240
|
+
# Handle folio_password / okapi_password backward compatibility
|
|
241
|
+
if "folio_password" not in values and "okapi_password" in values:
|
|
242
|
+
values["folio_password"] = values["okapi_password"]
|
|
243
|
+
if "gateway_url" not in values and "okapi_url" in values:
|
|
244
|
+
values["gateway_url"] = values["okapi_url"]
|
|
245
|
+
if "folio_username" not in values and "okapi_username" in values:
|
|
246
|
+
values["folio_username"] = values["okapi_username"]
|
|
247
|
+
return values
|
|
248
|
+
|
|
249
|
+
@model_validator(mode="before")
|
|
250
|
+
@classmethod
|
|
251
|
+
def set_error_thresholds_for_debug(cls, values):
|
|
252
|
+
"""If log_level_debug is true, set error thresholds to very high values to avoid
|
|
253
|
+
process shutdown during debugging.
|
|
254
|
+
"""
|
|
255
|
+
if values.get("log_level_debug", False):
|
|
256
|
+
values["failed_records_threshold"] = 10_000_000
|
|
257
|
+
values["failed_percentage_threshold"] = 100
|
|
258
|
+
values["generic_exception_threshold"] = 10_000_000
|
|
259
|
+
return values
|
|
@@ -6,7 +6,7 @@ import sys
|
|
|
6
6
|
import uuid
|
|
7
7
|
from datetime import datetime, timezone
|
|
8
8
|
from pathlib import Path
|
|
9
|
-
from typing import Dict, List,
|
|
9
|
+
from typing import Dict, List, Tuple
|
|
10
10
|
|
|
11
11
|
import i18n
|
|
12
12
|
from folio_uuid.folio_namespaces import FOLIONamespaces
|
|
@@ -21,6 +21,7 @@ from folio_migration_tools.custom_exceptions import (
|
|
|
21
21
|
)
|
|
22
22
|
from folio_migration_tools.extradata_writer import ExtradataWriter
|
|
23
23
|
from folio_migration_tools.helper import Helper
|
|
24
|
+
from folio_migration_tools.i18n_cache import i18n_t
|
|
24
25
|
from folio_migration_tools.library_configuration import FileDefinition, LibraryConfiguration
|
|
25
26
|
from folio_migration_tools.mapping_file_transformation.ref_data_mapping import (
|
|
26
27
|
RefDataMapping,
|
|
@@ -38,10 +39,10 @@ class MapperBase:
|
|
|
38
39
|
library_configuration: LibraryConfiguration,
|
|
39
40
|
task_configuration: AbstractTaskConfiguration,
|
|
40
41
|
folio_client: FolioClient,
|
|
41
|
-
parent_id_map: Dict[str, Tuple] =
|
|
42
|
+
parent_id_map: Dict[str, Tuple] | None = None,
|
|
42
43
|
):
|
|
43
44
|
logging.info("MapperBase initiating")
|
|
44
|
-
self.parent_id_map: dict[str, tuple] = parent_id_map
|
|
45
|
+
self.parent_id_map: dict[str, tuple] = parent_id_map or {}
|
|
45
46
|
self.extradata_writer: ExtradataWriter = ExtradataWriter(Path(""))
|
|
46
47
|
self.start_datetime = datetime.now(timezone.utc)
|
|
47
48
|
self.folio_client: FolioClient = folio_client
|
|
@@ -119,8 +120,8 @@ class MapperBase:
|
|
|
119
120
|
self.migration_report.add(
|
|
120
121
|
ref_data_mapping.blurb_id,
|
|
121
122
|
(
|
|
122
|
-
f'
|
|
123
|
-
f
|
|
123
|
+
f"{' - '.join(fieldvalues)} "
|
|
124
|
+
f"-> {right_mapping[f'folio_{ref_data_mapping.key_type}']}"
|
|
124
125
|
),
|
|
125
126
|
)
|
|
126
127
|
return next(v for k, v in right_mapping.items() if k.startswith("folio_"))
|
|
@@ -129,14 +130,14 @@ class MapperBase:
|
|
|
129
130
|
if prevent_default:
|
|
130
131
|
self.migration_report.add(
|
|
131
132
|
ref_data_mapping.blurb_id,
|
|
132
|
-
(f
|
|
133
|
+
(f'Not to be mapped. (No default) -- {" - ".join(fieldvalues)} -> ""'),
|
|
133
134
|
)
|
|
134
135
|
return ""
|
|
135
136
|
self.migration_report.add(
|
|
136
137
|
ref_data_mapping.blurb_id,
|
|
137
138
|
(
|
|
138
139
|
f"Unmapped (Default value was set) -- "
|
|
139
|
-
f'
|
|
140
|
+
f"{' - '.join(fieldvalues)} -> {ref_data_mapping.default_name}"
|
|
140
141
|
),
|
|
141
142
|
)
|
|
142
143
|
return ref_data_mapping.default_name
|
|
@@ -192,8 +193,8 @@ class MapperBase:
|
|
|
192
193
|
self.migration_report.add(
|
|
193
194
|
ref_data_mapping.blurb_id,
|
|
194
195
|
(
|
|
195
|
-
f'
|
|
196
|
-
f
|
|
196
|
+
f"{' - '.join(fieldvalues)} "
|
|
197
|
+
f"-> {right_mapping[f'folio_{ref_data_mapping.key_type}']}"
|
|
197
198
|
),
|
|
198
199
|
)
|
|
199
200
|
return right_mapping["folio_id"]
|
|
@@ -201,14 +202,14 @@ class MapperBase:
|
|
|
201
202
|
if prevent_default:
|
|
202
203
|
self.migration_report.add(
|
|
203
204
|
ref_data_mapping.blurb_id,
|
|
204
|
-
(f
|
|
205
|
+
(f'Not to be mapped. (No default) -- {" - ".join(fieldvalues)} -> ""'),
|
|
205
206
|
)
|
|
206
207
|
return ""
|
|
207
208
|
self.migration_report.add(
|
|
208
209
|
ref_data_mapping.blurb_id,
|
|
209
210
|
(
|
|
210
211
|
f"Unmapped (Default value was set) -- "
|
|
211
|
-
f'
|
|
212
|
+
f"{' - '.join(fieldvalues)} -> {ref_data_mapping.default_name}"
|
|
212
213
|
),
|
|
213
214
|
)
|
|
214
215
|
return ref_data_mapping.default_id
|
|
@@ -234,10 +235,10 @@ class MapperBase:
|
|
|
234
235
|
self.migration_report.add("FieldMappingErrors", error)
|
|
235
236
|
error.id = error.id or index_or_id
|
|
236
237
|
error.log_it()
|
|
237
|
-
self.migration_report.add_general_statistics(
|
|
238
|
+
self.migration_report.add_general_statistics(i18n_t("Field Mapping Errors found"))
|
|
238
239
|
|
|
239
240
|
def handle_transformation_process_error(self, idx, error: TransformationProcessError):
|
|
240
|
-
self.migration_report.add_general_statistics(
|
|
241
|
+
self.migration_report.add_general_statistics(i18n_t("Transformation process error"))
|
|
241
242
|
logging.critical("%s\t%s", idx, error)
|
|
242
243
|
print(f"\n{error.message}: {error.data_value}")
|
|
243
244
|
sys.exit(1)
|
|
@@ -246,7 +247,7 @@ class MapperBase:
|
|
|
246
247
|
self, records_processed: int, error: TransformationRecordFailedError
|
|
247
248
|
):
|
|
248
249
|
self.migration_report.add(
|
|
249
|
-
"GeneralStatistics",
|
|
250
|
+
"GeneralStatistics", i18n_t("FAILED Records failed due to an error")
|
|
250
251
|
)
|
|
251
252
|
error.index_or_id = error.index_or_id or records_processed
|
|
252
253
|
error.log_it()
|
|
@@ -301,7 +302,7 @@ class MapperBase:
|
|
|
301
302
|
for id_string in legacy_map.values():
|
|
302
303
|
legacy_map_file.write(f"{json.dumps(id_string)}\n")
|
|
303
304
|
self.migration_report.add(
|
|
304
|
-
"GeneralStatistics",
|
|
305
|
+
"GeneralStatistics", i18n_t("Unique ID:s written to legacy map")
|
|
305
306
|
)
|
|
306
307
|
logging.info("Wrote legacy id map to %s", path)
|
|
307
308
|
|
|
@@ -357,7 +358,7 @@ class MapperBase:
|
|
|
357
358
|
def add_legacy_id_to_admin_note(self, folio_record: dict, legacy_id: str):
|
|
358
359
|
if not legacy_id:
|
|
359
360
|
raise TransformationFieldMappingError(
|
|
360
|
-
legacy_id,
|
|
361
|
+
legacy_id, i18n_t("Legacy id is empty"), legacy_id
|
|
361
362
|
)
|
|
362
363
|
if "administrativeNotes" not in folio_record:
|
|
363
364
|
folio_record["administrativeNotes"] = []
|
|
@@ -430,7 +431,7 @@ class MapperBase:
|
|
|
430
431
|
folio_holding["id"], instance_uuid
|
|
431
432
|
)
|
|
432
433
|
if bound_with_holding.get("hrid", ""):
|
|
433
|
-
bound_with_holding["hrid"] = f
|
|
434
|
+
bound_with_holding["hrid"] = f"{bound_with_holding['hrid']}_bw_{bwidx}"
|
|
434
435
|
self.migration_report.add_general_statistics(i18n.t("Bound-with holdings created"))
|
|
435
436
|
yield bound_with_holding
|
|
436
437
|
|
|
@@ -443,7 +444,12 @@ class MapperBase:
|
|
|
443
444
|
)
|
|
444
445
|
)
|
|
445
446
|
|
|
446
|
-
def map_statistical_codes(
|
|
447
|
+
def map_statistical_codes(
|
|
448
|
+
self,
|
|
449
|
+
folio_record: dict,
|
|
450
|
+
file_def: FileDefinition,
|
|
451
|
+
legacy_record: dict | Record | None = None,
|
|
452
|
+
):
|
|
447
453
|
"""Map statistical codes to the folio record.
|
|
448
454
|
|
|
449
455
|
This method checks if the file definition contains statistical codes and
|
|
@@ -454,13 +460,15 @@ class MapperBase:
|
|
|
454
460
|
Args:
|
|
455
461
|
folio_record (dict): The FOLIO record to which the statistical codes will be added.
|
|
456
462
|
file_def (FileDefinition): The file definition containing the statistical codes.
|
|
457
|
-
legacy_record (
|
|
458
|
-
"""
|
|
463
|
+
legacy_record (dict | Record | None): The legacy record from which the statistical codes are derived.
|
|
464
|
+
""" # noqa: E501
|
|
459
465
|
if file_def.statistical_code:
|
|
460
466
|
code_strings = file_def.statistical_code.split(
|
|
461
467
|
self.library_configuration.multi_field_delimiter
|
|
462
468
|
)
|
|
463
|
-
folio_record["statisticalCodeIds"] =
|
|
469
|
+
folio_record["statisticalCodeIds"] = (
|
|
470
|
+
folio_record.get("statisticalCodeIds", []) + code_strings
|
|
471
|
+
)
|
|
464
472
|
|
|
465
473
|
def setup_statistical_codes_map(self, statistical_codes_map):
|
|
466
474
|
if statistical_codes_map:
|
|
@@ -472,7 +480,9 @@ class MapperBase:
|
|
|
472
480
|
"code",
|
|
473
481
|
"StatisticalCodeMapping",
|
|
474
482
|
)
|
|
475
|
-
logging.info(
|
|
483
|
+
logging.info(
|
|
484
|
+
f"Statistical codes mapping set up {self.statistical_codes_mapping.mapped_legacy_keys}" # noqa: E501
|
|
485
|
+
)
|
|
476
486
|
else:
|
|
477
487
|
self.statistical_codes_mapping = None
|
|
478
488
|
logging.info("Statistical codes map is not set up")
|
|
@@ -488,17 +498,17 @@ class MapperBase:
|
|
|
488
498
|
)
|
|
489
499
|
self.migration_report.add(
|
|
490
500
|
"StatisticalCodeMapping",
|
|
491
|
-
|
|
501
|
+
i18n_t("Mapping not set up"),
|
|
492
502
|
)
|
|
493
503
|
return ""
|
|
494
504
|
|
|
495
|
-
def map_statistical_code_ids(
|
|
496
|
-
|
|
497
|
-
):
|
|
498
|
-
if stat_codes := {x: None for x in folio_record.pop("statisticalCodeIds", [])}:
|
|
505
|
+
def map_statistical_code_ids(self, legacy_ids, folio_record: dict):
|
|
506
|
+
if stat_codes := dict.fromkeys(folio_record.pop("statisticalCodeIds", [])):
|
|
499
507
|
folio_code_ids = set()
|
|
500
508
|
for stat_code in stat_codes:
|
|
501
|
-
if stat_code_id := self.get_statistical_code(
|
|
509
|
+
if stat_code_id := self.get_statistical_code(
|
|
510
|
+
{"legacy_stat_code": stat_code}, "statisticalCodeId", legacy_ids
|
|
511
|
+
):
|
|
502
512
|
folio_code_ids.add(stat_code_id)
|
|
503
513
|
else:
|
|
504
514
|
Helper.log_data_issue(
|
|
@@ -513,7 +523,10 @@ class MapperBase:
|
|
|
513
523
|
|
|
514
524
|
@property
|
|
515
525
|
def base_string_for_folio_uuid(self):
|
|
516
|
-
if
|
|
526
|
+
if (
|
|
527
|
+
self.library_configuration.use_gateway_url_for_uuids
|
|
528
|
+
and not self.library_configuration.is_ecs
|
|
529
|
+
):
|
|
517
530
|
return str(self.folio_client.gateway_url)
|
|
518
531
|
elif self.library_configuration.ecs_tenant_id:
|
|
519
532
|
return str(self.library_configuration.ecs_tenant_id)
|
|
@@ -522,8 +535,8 @@ class MapperBase:
|
|
|
522
535
|
|
|
523
536
|
@staticmethod
|
|
524
537
|
def validate_location_map(location_map: List[Dict], locations: List[Dict]) -> List[Dict]:
|
|
525
|
-
mapped_codes = [x[
|
|
526
|
-
existing_codes = [x[
|
|
538
|
+
mapped_codes = [x["folio_code"] for x in location_map]
|
|
539
|
+
existing_codes = [x["code"] for x in locations]
|
|
527
540
|
missing_codes = set(mapped_codes) - set(existing_codes)
|
|
528
541
|
if missing_codes:
|
|
529
542
|
raise TransformationProcessError(
|
|
@@ -537,6 +550,7 @@ class MapperBase:
|
|
|
537
550
|
def get_object_type() -> FOLIONamespaces:
|
|
538
551
|
raise NotImplementedError("This method should be overridden in subclasses")
|
|
539
552
|
|
|
553
|
+
|
|
540
554
|
def flatten(my_dict: dict, path=""):
|
|
541
555
|
for k, v in iter(my_dict.items()):
|
|
542
556
|
if not path:
|
|
@@ -6,7 +6,10 @@ import i18n
|
|
|
6
6
|
from folio_uuid.folio_uuid import FOLIONamespaces
|
|
7
7
|
from folioclient import FolioClient
|
|
8
8
|
|
|
9
|
-
from folio_migration_tools.custom_exceptions import
|
|
9
|
+
from folio_migration_tools.custom_exceptions import (
|
|
10
|
+
TransformationProcessError,
|
|
11
|
+
TransformationRecordFailedError,
|
|
12
|
+
)
|
|
10
13
|
from folio_migration_tools.library_configuration import (
|
|
11
14
|
FileDefinition,
|
|
12
15
|
LibraryConfiguration,
|
|
@@ -19,6 +22,7 @@ from folio_migration_tools.mapping_file_transformation.ref_data_mapping import (
|
|
|
19
22
|
)
|
|
20
23
|
from folio_migration_tools.task_configuration import AbstractTaskConfiguration
|
|
21
24
|
|
|
25
|
+
|
|
22
26
|
class HoldingsMapper(MappingFileMapperBase):
|
|
23
27
|
def __init__(
|
|
24
28
|
self,
|
|
@@ -40,7 +44,7 @@ class HoldingsMapper(MappingFileMapperBase):
|
|
|
40
44
|
statistical_codes_map,
|
|
41
45
|
FOLIONamespaces.holdings,
|
|
42
46
|
library_configuration,
|
|
43
|
-
task_config
|
|
47
|
+
task_config,
|
|
44
48
|
)
|
|
45
49
|
self.holdings_map = holdings_map
|
|
46
50
|
|
|
@@ -86,7 +90,7 @@ class HoldingsMapper(MappingFileMapperBase):
|
|
|
86
90
|
folio_record["discoverySuppress"] = file_def.discovery_suppressed
|
|
87
91
|
self.migration_report.add(
|
|
88
92
|
"Suppression",
|
|
89
|
-
i18n.t("Suppressed from discovery") + f
|
|
93
|
+
i18n.t("Suppressed from discovery") + f" = {folio_record['discoverySuppress']}",
|
|
90
94
|
)
|
|
91
95
|
|
|
92
96
|
def get_prop(self, legacy_item, folio_prop_name, index_or_id, schema_default_value):
|