folio-migration-tools 1.9.0rc10__tar.gz → 1.9.0rc12__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/PKG-INFO +2 -2
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/pyproject.toml +2 -2
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/__main__.py +24 -7
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/circulation_helper.py +3 -3
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/library_configuration.py +54 -6
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapper_base.py +1 -1
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/organization_mapper.py +4 -4
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +7 -14
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/hrid_handler.py +1 -1
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +17 -4
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +117 -4
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/batch_poster.py +7 -7
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/bibs_transformer.py +3 -1
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/holdings_marc_transformer.py +78 -12
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/loans_migrator.py +5 -5
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/reserves_migrator.py +1 -1
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/test_infrastructure/mocked_classes.py +94 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/transaction_migration/legacy_loan.py +15 -12
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/transaction_migration/legacy_reserve.py +1 -1
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/translations/en.json +4 -2
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/LICENSE +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/README.md +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/__init__.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/colors.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/config_file_load.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/custom_dict.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/custom_exceptions.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/extradata_writer.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/folder_structure.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/helper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/holdings_helper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/i18n_config.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/__init__.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/courses_mapper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/holdings_mapper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/item_mapper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/notes_mapper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/order_mapper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/ref_data_mapping.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/mapping_file_transformation/user_mapper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/__init__.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/conditions.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/loc_language_codes.xml +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/marc_file_processor.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/marc_reader_wrapper.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_report.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/__init__.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/authority_transformer.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/courses_migrator.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/holdings_csv_transformer.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/items_transformer.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/migration_task_base.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/orders_transformer.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/organization_transformer.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/requests_migrator.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/migration_tasks/user_transformer.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/task_configuration.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/test_infrastructure/__init__.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/transaction_migration/__init__.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/transaction_migration/legacy_request.py +0 -0
- {folio_migration_tools-1.9.0rc10 → folio_migration_tools-1.9.0rc12}/src/folio_migration_tools/transaction_migration/transaction_result.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: folio_migration_tools
|
|
3
|
-
Version: 1.9.
|
|
3
|
+
Version: 1.9.0rc12
|
|
4
4
|
Summary: A tool allowing you to migrate data from legacy ILS:s (Library systems) into FOLIO LSP
|
|
5
5
|
License: MIT
|
|
6
6
|
Keywords: FOLIO,ILS,LSP,Library Systems,MARC21,Library data
|
|
@@ -20,7 +20,7 @@ Requires-Dist: art (>=6.5,<7.0)
|
|
|
20
20
|
Requires-Dist: deepdiff (>=6.2.3,<7.0.0)
|
|
21
21
|
Requires-Dist: defusedxml (>=0.7.1,<0.8.0)
|
|
22
22
|
Requires-Dist: folio-uuid (>=0.2.8,<0.3.0)
|
|
23
|
-
Requires-Dist: folioclient (>=0.
|
|
23
|
+
Requires-Dist: folioclient (>=0.70.1,<0.71.0)
|
|
24
24
|
Requires-Dist: pyaml (>=21.10.1,<22.0.0)
|
|
25
25
|
Requires-Dist: pydantic (>=1.10.2,<2.0.0)
|
|
26
26
|
Requires-Dist: pyhumps (>=3.7.3,<4.0.0)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "folio_migration_tools"
|
|
3
|
-
version = "1.9.
|
|
3
|
+
version = "1.9.0rc12"
|
|
4
4
|
description = "A tool allowing you to migrate data from legacy ILS:s (Library systems) into FOLIO LSP"
|
|
5
5
|
authors = [
|
|
6
6
|
{name = "Theodor Tolstoy", email = "github.teddes@tolstoy.se"},
|
|
@@ -44,7 +44,7 @@ profile = "black"
|
|
|
44
44
|
|
|
45
45
|
[tool.poetry.dependencies]
|
|
46
46
|
python = "^3.9"
|
|
47
|
-
folioclient = "^0.
|
|
47
|
+
folioclient = "^0.70.1"
|
|
48
48
|
pyhumps = "^3.7.3"
|
|
49
49
|
defusedxml = "^0.7.1"
|
|
50
50
|
python-dateutil = "^2.8.2"
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from importlib import metadata
|
|
1
2
|
import json
|
|
2
3
|
import logging
|
|
3
4
|
import sys
|
|
@@ -38,7 +39,7 @@ def parse_args(args):
|
|
|
38
39
|
default=environ.get("FOLIO_MIGRATION_TOOLS_TASK_NAME"),
|
|
39
40
|
)
|
|
40
41
|
parser.add_argument(
|
|
41
|
-
"--okapi_password",
|
|
42
|
+
"--folio_password", "--okapi_password",
|
|
42
43
|
help="password for the tenant in the configuration file",
|
|
43
44
|
prompt="FOLIO_MIGRATION_TOOLS_OKAPI_PASSWORD" not in environ,
|
|
44
45
|
default=environ.get("FOLIO_MIGRATION_TOOLS_OKAPI_PASSWORD"),
|
|
@@ -60,11 +61,17 @@ def parse_args(args):
|
|
|
60
61
|
default=environ.get("FOLIO_MIGRATION_TOOLS_REPORT_LANGUAGE", "en"),
|
|
61
62
|
prompt=False,
|
|
62
63
|
)
|
|
64
|
+
parser.add_argument(
|
|
65
|
+
"--version", "-V",
|
|
66
|
+
help="Show the version of the FOLIO Migration Tools",
|
|
67
|
+
action="store_true",
|
|
68
|
+
prompt=False,
|
|
69
|
+
)
|
|
63
70
|
return parser.parse_args(args)
|
|
64
71
|
|
|
65
72
|
def prep_library_config(args):
|
|
66
73
|
config_file_humped = merge_load(args.configuration_path)
|
|
67
|
-
config_file_humped["libraryInformation"]["okapiPassword"] = args.
|
|
74
|
+
config_file_humped["libraryInformation"]["okapiPassword"] = args.folio_password
|
|
68
75
|
config_file_humped["libraryInformation"]["baseFolder"] = args.base_folder_path
|
|
69
76
|
config_file = humps.decamelize(config_file_humped)
|
|
70
77
|
library_config = LibraryConfiguration(**config_file["library_information"])
|
|
@@ -78,11 +85,22 @@ def prep_library_config(args):
|
|
|
78
85
|
sys.exit("ECS Central Iteration Identifier Not Found")
|
|
79
86
|
return config_file, library_config
|
|
80
87
|
|
|
88
|
+
def print_version(args):
|
|
89
|
+
if "-V" in args or "--version" in args:
|
|
90
|
+
print(
|
|
91
|
+
f"FOLIO Migration Tools: {metadata.version('folio_migration_tools')}"
|
|
92
|
+
)
|
|
93
|
+
sys.exit(0)
|
|
94
|
+
return None
|
|
95
|
+
|
|
81
96
|
|
|
82
97
|
def main():
|
|
83
98
|
try:
|
|
84
99
|
task_classes = list(inheritors(migration_task_base.MigrationTaskBase))
|
|
100
|
+
# Check if the script is run with the --version or -V flag
|
|
101
|
+
print_version(sys.argv)
|
|
85
102
|
|
|
103
|
+
# Parse command line arguments
|
|
86
104
|
args = parse_args(sys.argv[1:])
|
|
87
105
|
try:
|
|
88
106
|
i18n.load_config(
|
|
@@ -124,10 +142,10 @@ def main():
|
|
|
124
142
|
try:
|
|
125
143
|
logging.getLogger("httpx").setLevel(logging.WARNING) # Exclude info messages from httpx
|
|
126
144
|
with FolioClient(
|
|
127
|
-
library_config.
|
|
145
|
+
library_config.gateway_url,
|
|
128
146
|
library_config.tenant_id,
|
|
129
|
-
library_config.
|
|
130
|
-
library_config.
|
|
147
|
+
library_config.folio_username,
|
|
148
|
+
library_config.folio_password,
|
|
131
149
|
) as folio_client:
|
|
132
150
|
task_config = task_class.TaskConfiguration(**migration_task_config)
|
|
133
151
|
task_obj = task_class(task_config, library_config, folio_client)
|
|
@@ -139,7 +157,6 @@ def main():
|
|
|
139
157
|
print("Task failure. Halting.")
|
|
140
158
|
sys.exit(1)
|
|
141
159
|
logging.info("Work done. Shutting down")
|
|
142
|
-
sys.exit(0)
|
|
143
160
|
except json.decoder.JSONDecodeError as json_error:
|
|
144
161
|
logging.critical(json_error)
|
|
145
162
|
print(json_error.doc)
|
|
@@ -180,7 +197,7 @@ def main():
|
|
|
180
197
|
logging.exception("Unhandled exception")
|
|
181
198
|
print(f"\n{ee}")
|
|
182
199
|
sys.exit(ee.__class__.__name__)
|
|
183
|
-
|
|
200
|
+
sys.exit(0)
|
|
184
201
|
|
|
185
202
|
def inheritors(base_class):
|
|
186
203
|
subclasses = set()
|
|
@@ -138,7 +138,7 @@ class CirculationHelper:
|
|
|
138
138
|
if legacy_loan.proxy_patron_barcode:
|
|
139
139
|
data.update({"proxyUserBarcode": legacy_loan.proxy_patron_barcode})
|
|
140
140
|
path = "/circulation/check-out-by-barcode"
|
|
141
|
-
url = f"{self.folio_client.
|
|
141
|
+
url = f"{self.folio_client.gateway_url}{path}"
|
|
142
142
|
try:
|
|
143
143
|
if legacy_loan.patron_barcode in self.missing_patron_barcodes:
|
|
144
144
|
error_message = i18n.t("Patron barcode already detected as missing")
|
|
@@ -249,7 +249,7 @@ class CirculationHelper:
|
|
|
249
249
|
):
|
|
250
250
|
try:
|
|
251
251
|
path = "/circulation/requests"
|
|
252
|
-
url = f"{folio_client.
|
|
252
|
+
url = f"{folio_client.gateway_url}{path}"
|
|
253
253
|
data = legacy_request.serialize()
|
|
254
254
|
data["requestProcessingParameters"] = {
|
|
255
255
|
"overrideBlocks": {
|
|
@@ -313,7 +313,7 @@ class CirculationHelper:
|
|
|
313
313
|
del loan_to_put["metadata"]
|
|
314
314
|
loan_to_put["dueDate"] = extension_due_date.isoformat()
|
|
315
315
|
loan_to_put["loanDate"] = extend_out_date.isoformat()
|
|
316
|
-
url = f"{folio_client.
|
|
316
|
+
url = f"{folio_client.gateway_url}/circulation/loans/{loan_to_put['id']}"
|
|
317
317
|
|
|
318
318
|
req = httpx.put(
|
|
319
319
|
url, headers=folio_client.okapi_headers, json=loan_to_put, timeout=None
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
|
-
from typing import Annotated
|
|
2
|
+
from typing import Annotated
|
|
3
3
|
|
|
4
4
|
from pydantic import BaseModel, Field
|
|
5
5
|
from pydantic.types import DirectoryPath
|
|
@@ -68,8 +68,28 @@ class FolioRelease(str, Enum):
|
|
|
68
68
|
|
|
69
69
|
|
|
70
70
|
class LibraryConfiguration(BaseModel):
|
|
71
|
-
|
|
72
|
-
|
|
71
|
+
gateway_url: Annotated[
|
|
72
|
+
str,
|
|
73
|
+
Field(
|
|
74
|
+
title="FOLIO API Gateway URL",
|
|
75
|
+
description=(
|
|
76
|
+
"The URL of the FOLIO API gateway instance. "
|
|
77
|
+
"You can find this in Settings > Software versions > API gateway services."
|
|
78
|
+
),
|
|
79
|
+
alias="okapi_url"
|
|
80
|
+
),
|
|
81
|
+
]
|
|
82
|
+
tenant_id: Annotated[
|
|
83
|
+
str,
|
|
84
|
+
Field(
|
|
85
|
+
title="FOLIO tenant ID",
|
|
86
|
+
description=(
|
|
87
|
+
"The ID of the FOLIO tenant instance. "
|
|
88
|
+
"You can find this in Settings > Software versions > API gateway services. "
|
|
89
|
+
"In an ECS environment, this is the ID of the central tenant, for all configurations."
|
|
90
|
+
),
|
|
91
|
+
),
|
|
92
|
+
]
|
|
73
93
|
ecs_tenant_id: Annotated[
|
|
74
94
|
str,
|
|
75
95
|
Field(
|
|
@@ -80,15 +100,43 @@ class LibraryConfiguration(BaseModel):
|
|
|
80
100
|
),
|
|
81
101
|
),
|
|
82
102
|
] = ""
|
|
83
|
-
|
|
84
|
-
|
|
103
|
+
folio_username: Annotated[
|
|
104
|
+
str,
|
|
105
|
+
Field(
|
|
106
|
+
title="FOLIO API Gateway username",
|
|
107
|
+
description=(
|
|
108
|
+
"The username for the FOLIO user account performing the migration. "
|
|
109
|
+
"User should have a full admin permissions/roles in FOLIO. "
|
|
110
|
+
),
|
|
111
|
+
alias="okapi_username"
|
|
112
|
+
),
|
|
113
|
+
]
|
|
114
|
+
folio_password: Annotated[
|
|
115
|
+
str,
|
|
116
|
+
Field(
|
|
117
|
+
title="FOLIO API Gateway password",
|
|
118
|
+
description=(
|
|
119
|
+
"The password for the FOLIO user account performing the migration. "
|
|
120
|
+
),
|
|
121
|
+
alias="okapi_password"
|
|
122
|
+
)
|
|
123
|
+
]
|
|
85
124
|
base_folder: DirectoryPath = Field(
|
|
86
125
|
description=(
|
|
87
126
|
"The base folder for migration. "
|
|
88
127
|
"Should ideally be a github clone of the migration_repo_template"
|
|
89
128
|
)
|
|
90
129
|
)
|
|
91
|
-
multi_field_delimiter:
|
|
130
|
+
multi_field_delimiter: Annotated[
|
|
131
|
+
str,
|
|
132
|
+
Field(
|
|
133
|
+
title="Multi field delimiter",
|
|
134
|
+
description=(
|
|
135
|
+
"The delimiter used to separate multiple values in a single field. "
|
|
136
|
+
"This is used for delimited text (CSV/TSV) fields with multiple sub-delimited values."
|
|
137
|
+
),
|
|
138
|
+
),
|
|
139
|
+
] = "<delimiter>"
|
|
92
140
|
failed_records_threshold: Annotated[
|
|
93
141
|
int,
|
|
94
142
|
Field(description=("Number of failed records until the process shuts down")),
|
|
@@ -443,7 +443,7 @@ class MapperBase:
|
|
|
443
443
|
@property
|
|
444
444
|
def base_string_for_folio_uuid(self):
|
|
445
445
|
if self.library_configuration.use_gateway_url_for_uuids and not self.library_configuration.is_ecs:
|
|
446
|
-
return str(self.folio_client.
|
|
446
|
+
return str(self.folio_client.gateway_url)
|
|
447
447
|
elif self.library_configuration.ecs_tenant_id:
|
|
448
448
|
return str(self.library_configuration.ecs_tenant_id)
|
|
449
449
|
else:
|
|
@@ -79,28 +79,28 @@ class OrganizationMapper(MappingFileMapperBase):
|
|
|
79
79
|
False,
|
|
80
80
|
)
|
|
81
81
|
|
|
82
|
-
elif re.compile("addresses\[(\d+)\]\.categories\[(\d+)\]").fullmatch(folio_prop_name):
|
|
82
|
+
elif re.compile(r"addresses\[(\d+)\]\.categories\[(\d+)\]").fullmatch(folio_prop_name):
|
|
83
83
|
return self.get_mapped_ref_data_value(
|
|
84
84
|
self.address_categories_map,
|
|
85
85
|
*value_tuple,
|
|
86
86
|
False,
|
|
87
87
|
)
|
|
88
88
|
|
|
89
|
-
elif re.compile("emails\[(\d+)\]\.categories\[(\d+)\]").fullmatch(folio_prop_name):
|
|
89
|
+
elif re.compile(r"emails\[(\d+)\]\.categories\[(\d+)\]").fullmatch(folio_prop_name):
|
|
90
90
|
return self.get_mapped_ref_data_value(
|
|
91
91
|
self.email_categories_map,
|
|
92
92
|
*value_tuple,
|
|
93
93
|
False,
|
|
94
94
|
)
|
|
95
95
|
|
|
96
|
-
elif re.compile("phoneNumbers\[(\d+)\]\.categories\[(\d+)\]").fullmatch(folio_prop_name):
|
|
96
|
+
elif re.compile(r"phoneNumbers\[(\d+)\]\.categories\[(\d+)\]").fullmatch(folio_prop_name):
|
|
97
97
|
return self.get_mapped_ref_data_value(
|
|
98
98
|
self.phone_categories_map,
|
|
99
99
|
*value_tuple,
|
|
100
100
|
False,
|
|
101
101
|
)
|
|
102
102
|
|
|
103
|
-
elif re.compile("interfaces\[(\d+)\]\.interfaceCredential.interfaceId").fullmatch(
|
|
103
|
+
elif re.compile(r"interfaces\[(\d+)\]\.interfaceCredential.interfaceId").fullmatch(
|
|
104
104
|
folio_prop_name
|
|
105
105
|
):
|
|
106
106
|
return "replace_with_interface_id"
|
|
@@ -165,23 +165,16 @@ class HoldingsStatementsParser:
|
|
|
165
165
|
TransformationFieldMappingError: _description_
|
|
166
166
|
"""
|
|
167
167
|
for f in marc_record.get_fields(field_textual):
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
),
|
|
175
|
-
f,
|
|
176
|
-
)
|
|
177
|
-
if not (
|
|
178
|
-
len(f.get_subfields("a")) == 0
|
|
179
|
-
or len(f.get_subfields("z")) == 0
|
|
180
|
-
or len(f.get_subfields("x")) == 0
|
|
168
|
+
if all(
|
|
169
|
+
[
|
|
170
|
+
len("".join(f.get_subfields("a")).strip()) == 0,
|
|
171
|
+
len("".join(f.get_subfields("z")).strip()) == 0,
|
|
172
|
+
len("".join(f.get_subfields("x")).strip()) == 0,
|
|
173
|
+
]
|
|
181
174
|
):
|
|
182
175
|
raise TransformationFieldMappingError(
|
|
183
176
|
legacy_ids,
|
|
184
|
-
i18n.t("%{field} a,x and z are
|
|
177
|
+
i18n.t("%{field} a, x and z are missing or empty", field=field_textual),
|
|
185
178
|
f,
|
|
186
179
|
)
|
|
187
180
|
return_dict["statements"].append(
|
|
@@ -160,7 +160,7 @@ class HRIDHandler:
|
|
|
160
160
|
self.hrid_settings["instances"]["startNumber"] = self.instance_hrid_counter
|
|
161
161
|
self.hrid_settings["holdings"]["startNumber"] = self.holdings_hrid_counter
|
|
162
162
|
self.hrid_settings["items"]["startNumber"] = self.items_hrid_counter
|
|
163
|
-
url = self.folio_client.
|
|
163
|
+
url = self.folio_client.gateway_url + self.hrid_path
|
|
164
164
|
resp = httpx.put(
|
|
165
165
|
url,
|
|
166
166
|
json=self.hrid_settings,
|
|
@@ -10,7 +10,6 @@ from pathlib import Path
|
|
|
10
10
|
from typing import Dict, Generator, List
|
|
11
11
|
|
|
12
12
|
import i18n
|
|
13
|
-
import pymarc
|
|
14
13
|
from defusedxml.ElementTree import fromstring
|
|
15
14
|
from folio_uuid.folio_namespaces import FOLIONamespaces
|
|
16
15
|
from folio_uuid.folio_uuid import FolioUUID
|
|
@@ -144,7 +143,7 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
144
143
|
self.report_folio_mapping(clean_folio_instance, self.schema)
|
|
145
144
|
return [clean_folio_instance]
|
|
146
145
|
|
|
147
|
-
def simple_bib_map(self,
|
|
146
|
+
def simple_bib_map(self, folio_instance: dict, marc_record: Record, ignored_subsequent_fields: set, legacy_ids: List[str]):
|
|
148
147
|
"""
|
|
149
148
|
This method applies a much simplified MARC-to-instance
|
|
150
149
|
mapping to create a minimal FOLIO Instance record to be
|
|
@@ -152,13 +151,27 @@ class BibsRulesMapper(RulesMapperBase):
|
|
|
152
151
|
than creating SRS records during transformation.
|
|
153
152
|
|
|
154
153
|
Args:
|
|
155
|
-
|
|
154
|
+
folio_instance (dict): _description_
|
|
156
155
|
marc_record (Record): _description_
|
|
157
156
|
legacy_ids (List[str]): _description_
|
|
158
157
|
file_def (FileDefinition): _description_
|
|
159
158
|
"""
|
|
159
|
+
main_entry_field_tags = ["100", "110", "111", "130"]
|
|
160
|
+
main_entry_fields = marc_record.get_fields(*main_entry_field_tags)
|
|
161
|
+
main_entry_fields.sort(key=lambda x: int(x.tag))
|
|
162
|
+
if len(main_entry_fields) > 1:
|
|
163
|
+
Helper.log_data_issue(
|
|
164
|
+
legacy_ids,
|
|
165
|
+
"Multiple main entry fields in record. Record will fail Data Import. Creating Instance anyway.",
|
|
166
|
+
main_entry_fields
|
|
167
|
+
)
|
|
168
|
+
if not main_entry_fields:
|
|
169
|
+
main_entry_fields += marc_record.get_fields("700", "710", "711", "730")
|
|
170
|
+
main_entry_fields.sort(key=lambda x: int(x.tag))
|
|
171
|
+
if main_entry_fields:
|
|
172
|
+
self.process_marc_field(folio_instance, main_entry_fields[0], ignored_subsequent_fields, legacy_ids)
|
|
160
173
|
try:
|
|
161
|
-
self.process_marc_field(
|
|
174
|
+
self.process_marc_field(folio_instance, marc_record['245'], ignored_subsequent_fields, legacy_ids)
|
|
162
175
|
except KeyError:
|
|
163
176
|
raise TransformationRecordFailedError(
|
|
164
177
|
legacy_ids,
|
|
@@ -34,12 +34,12 @@ from folio_migration_tools.marc_rules_transformation.rules_mapper_base import (
|
|
|
34
34
|
class RulesMapperHoldings(RulesMapperBase):
|
|
35
35
|
def __init__(
|
|
36
36
|
self,
|
|
37
|
-
folio_client,
|
|
37
|
+
folio_client: FolioClient,
|
|
38
38
|
location_map,
|
|
39
39
|
task_configuration,
|
|
40
40
|
library_configuration: LibraryConfiguration,
|
|
41
41
|
parent_id_map: dict,
|
|
42
|
-
|
|
42
|
+
boundwith_relationship_map_rows: List[Dict],
|
|
43
43
|
):
|
|
44
44
|
self.task_configuration = task_configuration
|
|
45
45
|
self.conditions = Conditions(
|
|
@@ -58,8 +58,8 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
58
58
|
self.conditions,
|
|
59
59
|
parent_id_map,
|
|
60
60
|
)
|
|
61
|
-
self.boundwith_relationship_map = self.setup_boundwith_relationship_map(
|
|
62
|
-
|
|
61
|
+
self.boundwith_relationship_map: Dict = self.setup_boundwith_relationship_map(
|
|
62
|
+
boundwith_relationship_map_rows
|
|
63
63
|
)
|
|
64
64
|
self.location_map = self.validate_location_map(
|
|
65
65
|
location_map,
|
|
@@ -278,6 +278,8 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
278
278
|
self.set_default_call_number_type_if_empty(folio_holding)
|
|
279
279
|
self.pick_first_location_if_many(folio_holding, legacy_ids)
|
|
280
280
|
self.parse_coded_holdings_statements(marc_record, folio_holding, legacy_ids)
|
|
281
|
+
self.add_mfhd_as_mrk_note(marc_record, folio_holding, legacy_ids)
|
|
282
|
+
self.add_mfhd_as_mrc_note(marc_record, folio_holding, legacy_ids)
|
|
281
283
|
HoldingsHelper.handle_notes(folio_holding)
|
|
282
284
|
if (
|
|
283
285
|
all([file_def.create_source_records, self.create_source_records])
|
|
@@ -341,6 +343,117 @@ class RulesMapperHoldings(RulesMapperBase):
|
|
|
341
343
|
except TransformationFieldMappingError as tfme:
|
|
342
344
|
Helper.log_data_issue(tfme.index_or_id, tfme.message, tfme.data_value)
|
|
343
345
|
self.migration_report.add("FieldMappingErrors", tfme.message)
|
|
346
|
+
self.collect_mrk_statement_notes(marc_record, folio_holding, legacy_ids)
|
|
347
|
+
|
|
348
|
+
def collect_mrk_statement_notes(self, marc_record, folio_holding, legacy_ids):
|
|
349
|
+
"""Collects MFHD holdings statements as MARC Maker field strings in a FOLIO holdings note
|
|
350
|
+
and adds them to the FOLIO holdings record.
|
|
351
|
+
|
|
352
|
+
This is done to preserve the information in the MARC record for future reference.
|
|
353
|
+
|
|
354
|
+
Args:
|
|
355
|
+
marc_record (Record): PyMARC record
|
|
356
|
+
folio_holding (Dict): FOLIO holdings record
|
|
357
|
+
|
|
358
|
+
"""
|
|
359
|
+
if self.task_configuration.include_mrk_statements:
|
|
360
|
+
mrk_statement_notes = []
|
|
361
|
+
for field in marc_record.get_fields("853", "854", "855", "863", "864", "865", "866", "867", "868"):
|
|
362
|
+
mrk_statement_notes.append(str(field))
|
|
363
|
+
if mrk_statement_notes:
|
|
364
|
+
folio_holding["notes"] = folio_holding.get("notes", []) + self.add_mrk_statements_note(mrk_statement_notes, legacy_ids)
|
|
365
|
+
|
|
366
|
+
def add_mrk_statements_note(self, mrk_statement_notes: List[str], legacy_ids) -> List[Dict]:
|
|
367
|
+
"""Creates a note from the MRK statements
|
|
368
|
+
|
|
369
|
+
Args:
|
|
370
|
+
mrk_statement_notes (List[str]): A list of MFHD holdings statements as MRK strings
|
|
371
|
+
|
|
372
|
+
Returns:
|
|
373
|
+
List: A list containing the FOLIO holdings note object (Dict)
|
|
374
|
+
"""
|
|
375
|
+
holdings_note_type_tuple = self.conditions.get_ref_data_tuple_by_name(
|
|
376
|
+
self.folio.holding_note_types, "holding_note_types", self.task_configuration.mrk_holdings_note_type
|
|
377
|
+
)
|
|
378
|
+
try:
|
|
379
|
+
holdings_note_type_id = holdings_note_type_tuple[0]
|
|
380
|
+
except Exception as ee:
|
|
381
|
+
logging.error(ee)
|
|
382
|
+
raise TransformationRecordFailedError(
|
|
383
|
+
legacy_ids,
|
|
384
|
+
f'Holdings note type mapping error.\tNote type name: {self.task_configuration.mrk_holdings_note_type}\t'
|
|
385
|
+
f"MFHD holdings statement note type not found in FOLIO.",
|
|
386
|
+
self.task_configuration.mrk_holdings_note_type,
|
|
387
|
+
) from ee
|
|
388
|
+
return [
|
|
389
|
+
{
|
|
390
|
+
"note": "\n".join(mrk_statement_notes),
|
|
391
|
+
"holdingsNoteTypeId": holdings_note_type_id,
|
|
392
|
+
"staffOnly": True,
|
|
393
|
+
}
|
|
394
|
+
]
|
|
395
|
+
|
|
396
|
+
def add_mfhd_as_mrk_note(self, marc_record: Record, folio_holding: Dict, legacy_ids: List[str]):
|
|
397
|
+
"""Adds the MFHD as a note to the holdings record
|
|
398
|
+
|
|
399
|
+
This is done to preserve the information in the MARC record for future reference.
|
|
400
|
+
|
|
401
|
+
Args:
|
|
402
|
+
marc_record (Record): PyMARC record
|
|
403
|
+
folio_holding (Dict): FOLIO holdings record
|
|
404
|
+
"""
|
|
405
|
+
if self.task_configuration.include_mfhd_mrk_as_note:
|
|
406
|
+
holdings_note_type_tuple = self.conditions.get_ref_data_tuple_by_name(
|
|
407
|
+
self.folio.holding_note_types, "holding_note_types", self.task_configuration.mfhd_mrk_note_type
|
|
408
|
+
)
|
|
409
|
+
try:
|
|
410
|
+
holdings_note_type_id = holdings_note_type_tuple[0]
|
|
411
|
+
except Exception as ee:
|
|
412
|
+
logging.error(ee)
|
|
413
|
+
raise TransformationRecordFailedError(
|
|
414
|
+
legacy_ids,
|
|
415
|
+
f'Holdings note type mapping error.\tNote type name: {self.task_configuration.mfhd_mrk_note_type}\t'
|
|
416
|
+
f"Note type not found in FOLIO.",
|
|
417
|
+
self.task_configuration.mfhd_mrk_note_type,
|
|
418
|
+
) from ee
|
|
419
|
+
folio_holding["notes"] = folio_holding.get("notes", []) + [
|
|
420
|
+
{
|
|
421
|
+
"note": str(marc_record),
|
|
422
|
+
"holdingsNoteTypeId": holdings_note_type_id,
|
|
423
|
+
"staffOnly": True,
|
|
424
|
+
}
|
|
425
|
+
]
|
|
426
|
+
|
|
427
|
+
def add_mfhd_as_mrc_note(self, marc_record: Record, folio_holding: Dict, legacy_ids: List[str]):
|
|
428
|
+
"""Adds the MFHD as a note to the holdings record
|
|
429
|
+
|
|
430
|
+
This is done to preserve the information in the MARC record for future reference.
|
|
431
|
+
|
|
432
|
+
Args:
|
|
433
|
+
marc_record (Record): PyMARC record
|
|
434
|
+
folio_holding (Dict): FOLIO holdings record
|
|
435
|
+
"""
|
|
436
|
+
if self.task_configuration.include_mfhd_mrc_as_note:
|
|
437
|
+
holdings_note_type_tuple = self.conditions.get_ref_data_tuple_by_name(
|
|
438
|
+
self.folio.holding_note_types, "holding_note_types", self.task_configuration.mfhd_mrc_note_type
|
|
439
|
+
)
|
|
440
|
+
try:
|
|
441
|
+
holdings_note_type_id = holdings_note_type_tuple[0]
|
|
442
|
+
except Exception as ee:
|
|
443
|
+
logging.error(ee)
|
|
444
|
+
raise TransformationRecordFailedError(
|
|
445
|
+
legacy_ids,
|
|
446
|
+
f'Holdings note type mapping error.\tNote type name: {self.task_configuration.mfhd_mrc_note_type}\t'
|
|
447
|
+
f"Note type not found in FOLIO.",
|
|
448
|
+
self.task_configuration.mfhd_mrc_note_type,
|
|
449
|
+
) from ee
|
|
450
|
+
folio_holding["notes"] = folio_holding.get("notes", []) + [
|
|
451
|
+
{
|
|
452
|
+
"note": marc_record.as_marc().decode("utf-8"),
|
|
453
|
+
"holdingsNoteTypeId": holdings_note_type_id,
|
|
454
|
+
"staffOnly": True,
|
|
455
|
+
}
|
|
456
|
+
]
|
|
344
457
|
|
|
345
458
|
def wrap_up(self):
|
|
346
459
|
logging.info("Mapper wrapping up")
|
|
@@ -287,7 +287,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
287
287
|
fetch_batch_size = 90
|
|
288
288
|
fetch_tasks = []
|
|
289
289
|
updates = {}
|
|
290
|
-
async with httpx.AsyncClient(base_url=self.folio_client.
|
|
290
|
+
async with httpx.AsyncClient(base_url=self.folio_client.gateway_url) as client:
|
|
291
291
|
for i in range(0, len(batch), fetch_batch_size):
|
|
292
292
|
batch_slice = batch[i:i + fetch_batch_size]
|
|
293
293
|
fetch_tasks.append(
|
|
@@ -360,7 +360,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
360
360
|
def post_extra_data(self, row: str, num_records: int, failed_recs_file):
|
|
361
361
|
(object_name, data) = row.split("\t")
|
|
362
362
|
endpoint = self.get_extradata_endpoint(self.task_configuration, object_name, data)
|
|
363
|
-
url = f"{self.folio_client.
|
|
363
|
+
url = f"{self.folio_client.gateway_url}/{endpoint}"
|
|
364
364
|
body = data
|
|
365
365
|
response = self.post_objects(url, body)
|
|
366
366
|
if response.status_code == 201:
|
|
@@ -415,7 +415,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
415
415
|
if self.api_info["is_batch"]:
|
|
416
416
|
raise TypeError("This record type supports batch processing, use post_batch method")
|
|
417
417
|
api_endpoint = self.api_info.get("api_endpoint")
|
|
418
|
-
url = f"{self.folio_client.
|
|
418
|
+
url = f"{self.folio_client.gateway_url}{api_endpoint}"
|
|
419
419
|
response = self.post_objects(url, row)
|
|
420
420
|
if response.status_code == 201:
|
|
421
421
|
self.num_posted += 1
|
|
@@ -590,7 +590,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
590
590
|
|
|
591
591
|
def do_post(self, batch):
|
|
592
592
|
path = self.api_info["api_endpoint"]
|
|
593
|
-
url = self.folio_client.
|
|
593
|
+
url = self.folio_client.gateway_url + path
|
|
594
594
|
if self.api_info["object_name"] == "users":
|
|
595
595
|
payload = {self.api_info["object_name"]: list(batch), "totalRecords": len(batch)}
|
|
596
596
|
elif self.api_info["total_records"]:
|
|
@@ -675,7 +675,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
675
675
|
"processingStartedDate": datetime.utcnow().isoformat(timespec="milliseconds"),
|
|
676
676
|
}
|
|
677
677
|
try:
|
|
678
|
-
url = f"{self.folio_client.
|
|
678
|
+
url = f"{self.folio_client.gateway_url}/source-storage/snapshots"
|
|
679
679
|
if self.http_client and not self.http_client.is_closed:
|
|
680
680
|
res = self.http_client.post(
|
|
681
681
|
url, json=snapshot, headers=self.folio_client.okapi_headers
|
|
@@ -684,7 +684,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
684
684
|
res = httpx.post(url, headers=self.okapi_headers, json=snapshot, timeout=None)
|
|
685
685
|
res.raise_for_status()
|
|
686
686
|
logging.info("Posted Snapshot to FOLIO: %s", json.dumps(snapshot, indent=4))
|
|
687
|
-
get_url = f"{self.folio_client.
|
|
687
|
+
get_url = f"{self.folio_client.gateway_url}/source-storage/snapshots/{self.snapshot_id}"
|
|
688
688
|
getted = False
|
|
689
689
|
while not getted:
|
|
690
690
|
logging.info("Sleeping while waiting for the snapshot to get created")
|
|
@@ -704,7 +704,7 @@ class BatchPoster(MigrationTaskBase):
|
|
|
704
704
|
def commit_snapshot(self):
|
|
705
705
|
snapshot = {"jobExecutionId": self.snapshot_id, "status": "COMMITTED"}
|
|
706
706
|
try:
|
|
707
|
-
url = f"{self.folio_client.
|
|
707
|
+
url = f"{self.folio_client.gateway_url}/source-storage/snapshots/{self.snapshot_id}"
|
|
708
708
|
if self.http_client and not self.http_client.is_closed:
|
|
709
709
|
res = self.http_client.put(
|
|
710
710
|
url, json=snapshot, headers=self.folio_client.okapi_headers
|
|
@@ -26,7 +26,9 @@ class BibsTransformer(MigrationTaskBase):
|
|
|
26
26
|
ils_flavour: Annotated[
|
|
27
27
|
IlsFlavour,
|
|
28
28
|
Field(
|
|
29
|
-
title="ILS flavour",
|
|
29
|
+
title="ILS flavour",
|
|
30
|
+
description="The type of ILS you are migrating records from.",
|
|
31
|
+
alias="ils_flavor"
|
|
30
32
|
),
|
|
31
33
|
]
|
|
32
34
|
custom_bib_id_field: Annotated[
|
|
@@ -143,6 +143,65 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
143
143
|
description="The name of the file in the mapping_files directory containing supplemental MFHD mapping rules",
|
|
144
144
|
),
|
|
145
145
|
] = ""
|
|
146
|
+
include_mrk_statements: Annotated[
|
|
147
|
+
bool,
|
|
148
|
+
Field(
|
|
149
|
+
title="Include MARC statements (MRK-format) as staff-only Holdings notes",
|
|
150
|
+
description=(
|
|
151
|
+
"If set to true, the MARC statements will be included in the output as MARC Maker format fields. "
|
|
152
|
+
"If set to false (default), the MARC statements will not be included in the output."
|
|
153
|
+
),
|
|
154
|
+
),
|
|
155
|
+
] = False
|
|
156
|
+
mrk_holdings_note_type: Annotated[
|
|
157
|
+
str,
|
|
158
|
+
Field(
|
|
159
|
+
title="MARC Holdings Note type",
|
|
160
|
+
description=(
|
|
161
|
+
"The name of the note type to use for MARC (MRK) statements. "
|
|
162
|
+
),
|
|
163
|
+
),
|
|
164
|
+
] = "Original MARC holdings statements"
|
|
165
|
+
include_mfhd_mrk_as_note: Annotated[
|
|
166
|
+
bool,
|
|
167
|
+
Field(
|
|
168
|
+
title="Include MARC Record (as MARC Maker Representation) as note",
|
|
169
|
+
description=(
|
|
170
|
+
"If set to true, the MARC statements will be included in the output as a "
|
|
171
|
+
"(MRK) note. If set to false (default), the MARC statements will not be "
|
|
172
|
+
"included in the output."
|
|
173
|
+
),
|
|
174
|
+
),
|
|
175
|
+
] = False
|
|
176
|
+
mfhd_mrk_note_type: Annotated[
|
|
177
|
+
str,
|
|
178
|
+
Field(
|
|
179
|
+
title="MARC Record (as MARC Maker Representation) note type",
|
|
180
|
+
description=(
|
|
181
|
+
"The name of the note type to use for MFHD (MRK) note. "
|
|
182
|
+
),
|
|
183
|
+
),
|
|
184
|
+
] = "Original MFHD Record"
|
|
185
|
+
include_mfhd_mrc_as_note: Annotated[
|
|
186
|
+
bool,
|
|
187
|
+
Field(
|
|
188
|
+
title="Include MARC Record (as MARC21 decoded string) as note",
|
|
189
|
+
description=(
|
|
190
|
+
"If set to true, the MARC record will be included in the output as a "
|
|
191
|
+
"decoded binary MARC21 record. If set to false (default), the MARC record will not be "
|
|
192
|
+
"included in the output."
|
|
193
|
+
),
|
|
194
|
+
),
|
|
195
|
+
] = False
|
|
196
|
+
mfhd_mrc_note_type: Annotated[
|
|
197
|
+
str,
|
|
198
|
+
Field(
|
|
199
|
+
title="MARC Record (as MARC21 decoded string) note type",
|
|
200
|
+
description=(
|
|
201
|
+
"The name of the note type to use for MFHD (MRC) note. "
|
|
202
|
+
),
|
|
203
|
+
),
|
|
204
|
+
] = "Original MFHD (MARC21)"
|
|
146
205
|
|
|
147
206
|
@staticmethod
|
|
148
207
|
def get_object_type() -> FOLIONamespaces:
|
|
@@ -183,19 +242,26 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
183
242
|
)
|
|
184
243
|
|
|
185
244
|
# Load Boundwith relationship map
|
|
186
|
-
self.
|
|
245
|
+
self.boundwith_relationship_map_rows = []
|
|
187
246
|
if self.task_config.boundwith_relationship_file_path:
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
247
|
+
try:
|
|
248
|
+
with open(
|
|
249
|
+
self.folder_structure.legacy_records_folder
|
|
250
|
+
/ self.task_config.boundwith_relationship_file_path
|
|
251
|
+
) as boundwith_relationship_file:
|
|
252
|
+
self.boundwith_relationship_map_rows = list(
|
|
253
|
+
csv.DictReader(boundwith_relationship_file, dialect="tsv")
|
|
254
|
+
)
|
|
255
|
+
logging.info(
|
|
256
|
+
"Rows in Bound with relationship map: %s",
|
|
257
|
+
len(self.boundwith_relationship_map_rows),
|
|
258
|
+
)
|
|
259
|
+
except FileNotFoundError:
|
|
260
|
+
raise TransformationProcessError(
|
|
261
|
+
"",
|
|
262
|
+
i18n.t("Provided boundwith relationship file not found"),
|
|
263
|
+
self.task_config.boundwith_relationship_file_path,
|
|
194
264
|
)
|
|
195
|
-
logging.info(
|
|
196
|
-
"Rows in Bound with relationship map: %s",
|
|
197
|
-
len(self.boundwith_relationship_map),
|
|
198
|
-
)
|
|
199
265
|
|
|
200
266
|
location_map_path = (
|
|
201
267
|
self.folder_structure.mapping_files_folder
|
|
@@ -215,7 +281,7 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
215
281
|
self.task_config,
|
|
216
282
|
self.library_configuration,
|
|
217
283
|
self.instance_id_map,
|
|
218
|
-
self.
|
|
284
|
+
self.boundwith_relationship_map_rows,
|
|
219
285
|
)
|
|
220
286
|
self.add_supplemental_mfhd_mappings()
|
|
221
287
|
if (
|
|
@@ -525,7 +525,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
525
525
|
loan_to_put["dueDate"] = due_date.isoformat()
|
|
526
526
|
loan_to_put["loanDate"] = out_date.isoformat()
|
|
527
527
|
loan_to_put["renewalCount"] = renewal_count
|
|
528
|
-
url = f"{self.folio_client.
|
|
528
|
+
url = f"{self.folio_client.gateway_url}/circulation/loans/{loan_to_put['id']}"
|
|
529
529
|
req = self.http_client.put(
|
|
530
530
|
url,
|
|
531
531
|
headers=self.folio_client.okapi_headers,
|
|
@@ -608,7 +608,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
608
608
|
try:
|
|
609
609
|
# Get Item by barcode, update status.
|
|
610
610
|
item_path = f'item-storage/items?query=(barcode=="{legacy_loan.item_barcode}")'
|
|
611
|
-
item_url = f"{self.folio_client.
|
|
611
|
+
item_url = f"{self.folio_client.gateway_url}/{item_path}"
|
|
612
612
|
resp = self.http_client.get(item_url, headers=self.folio_client.okapi_headers)
|
|
613
613
|
resp.raise_for_status()
|
|
614
614
|
data = resp.json()
|
|
@@ -667,14 +667,14 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
667
667
|
self.folio_put_post(url, user, "PUT", i18n.t("Update user"))
|
|
668
668
|
|
|
669
669
|
def get_user_by_barcode(self, barcode):
|
|
670
|
-
url = f'{self.folio_client.
|
|
670
|
+
url = f'{self.folio_client.gateway_url}/users?query=(barcode=="{barcode}")'
|
|
671
671
|
resp = self.http_client.get(url, headers=self.folio_client.okapi_headers)
|
|
672
672
|
resp.raise_for_status()
|
|
673
673
|
data = resp.json()
|
|
674
674
|
return data["users"][0]
|
|
675
675
|
|
|
676
676
|
def folio_put_post(self, url, data_dict, verb, action_description=""):
|
|
677
|
-
full_url = f"{self.folio_client.
|
|
677
|
+
full_url = f"{self.folio_client.gateway_url}{url}"
|
|
678
678
|
try:
|
|
679
679
|
if verb == "PUT":
|
|
680
680
|
resp = self.http_client.put(
|
|
@@ -729,7 +729,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
729
729
|
def change_due_date(self, folio_loan, legacy_loan):
|
|
730
730
|
try:
|
|
731
731
|
api_path = f"{folio_loan['id']}/change-due-date"
|
|
732
|
-
api_url = f"{self.folio_client.
|
|
732
|
+
api_url = f"{self.folio_client.gateway_url}/circulation/loans/{api_path}"
|
|
733
733
|
body = {"dueDate": du_parser.isoparse(str(legacy_loan.due_date)).isoformat()}
|
|
734
734
|
req = self.http_client.post(
|
|
735
735
|
api_url, headers=self.folio_client.okapi_headers, json=body
|
|
@@ -190,7 +190,7 @@ class ReservesMigrator(MigrationTaskBase):
|
|
|
190
190
|
sys.exit(1)
|
|
191
191
|
|
|
192
192
|
def folio_put_post(self, url, data_dict, verb, action_description=""):
|
|
193
|
-
full_url = f"{self.folio_client.
|
|
193
|
+
full_url = f"{self.folio_client.gateway_url}{url}"
|
|
194
194
|
try:
|
|
195
195
|
if verb == "PUT":
|
|
196
196
|
resp = httpx.put(
|
|
@@ -212,6 +212,100 @@ def folio_get_all_mocked(ref_data_path, array_name, query="", limit=10):
|
|
|
212
212
|
}
|
|
213
213
|
]
|
|
214
214
|
|
|
215
|
+
elif ref_data_path == "/holdings-note-types":
|
|
216
|
+
yield from [
|
|
217
|
+
{
|
|
218
|
+
"id": "88914775-f677-4759-b57b-1a33b90b24e0",
|
|
219
|
+
"name": "Electronic bookplate",
|
|
220
|
+
"source": "folio",
|
|
221
|
+
"metadata": {
|
|
222
|
+
"createdDate": "2024-09-04T01:54:20.719+00:00",
|
|
223
|
+
"updatedDate": "2024-09-04T01:54:20.719+00:00"
|
|
224
|
+
}
|
|
225
|
+
},
|
|
226
|
+
{
|
|
227
|
+
"id": "c4407cc7-d79f-4609-95bd-1cefb2e2b5c5",
|
|
228
|
+
"name": "Copy note",
|
|
229
|
+
"source": "folio",
|
|
230
|
+
"metadata": {
|
|
231
|
+
"createdDate": "2024-09-04T01:54:20.722+00:00",
|
|
232
|
+
"updatedDate": "2024-09-04T01:54:20.722+00:00"
|
|
233
|
+
}
|
|
234
|
+
},
|
|
235
|
+
{
|
|
236
|
+
"id": "d6510242-5ec3-42ed-b593-3585d2e48fd6",
|
|
237
|
+
"name": "Action note",
|
|
238
|
+
"source": "folio",
|
|
239
|
+
"metadata": {
|
|
240
|
+
"createdDate": "2024-09-04T01:54:20.723+00:00",
|
|
241
|
+
"updatedDate": "2024-09-04T01:54:20.723+00:00"
|
|
242
|
+
}
|
|
243
|
+
},
|
|
244
|
+
{
|
|
245
|
+
"id": "e19eabab-a85c-4aef-a7b2-33bd9acef24e",
|
|
246
|
+
"name": "Binding",
|
|
247
|
+
"source": "folio",
|
|
248
|
+
"metadata": {
|
|
249
|
+
"createdDate": "2024-09-04T01:54:20.724+00:00",
|
|
250
|
+
"updatedDate": "2024-09-04T01:54:20.724+00:00"
|
|
251
|
+
}
|
|
252
|
+
},
|
|
253
|
+
{
|
|
254
|
+
"id": "db9b4787-95f0-4e78-becf-26748ce6bdeb",
|
|
255
|
+
"name": "Provenance",
|
|
256
|
+
"source": "folio",
|
|
257
|
+
"metadata": {
|
|
258
|
+
"createdDate": "2024-09-04T01:54:20.725+00:00",
|
|
259
|
+
"updatedDate": "2024-09-04T01:54:20.725+00:00"
|
|
260
|
+
}
|
|
261
|
+
},
|
|
262
|
+
{
|
|
263
|
+
"id": "6a41b714-8574-4084-8d64-a9373c3fbb59",
|
|
264
|
+
"name": "Reproduction",
|
|
265
|
+
"source": "folio",
|
|
266
|
+
"metadata": {
|
|
267
|
+
"createdDate": "2024-09-04T01:54:20.728+00:00",
|
|
268
|
+
"updatedDate": "2024-09-04T01:54:20.728+00:00"
|
|
269
|
+
}
|
|
270
|
+
},
|
|
271
|
+
{
|
|
272
|
+
"id": "b160f13a-ddba-4053-b9c4-60ec5ea45d56",
|
|
273
|
+
"name": "Note",
|
|
274
|
+
"source": "folio",
|
|
275
|
+
"metadata": {
|
|
276
|
+
"createdDate": "2024-09-04T01:54:20.728+00:00",
|
|
277
|
+
"updatedDate": "2024-09-04T01:54:20.728+00:00"
|
|
278
|
+
}
|
|
279
|
+
},
|
|
280
|
+
{
|
|
281
|
+
"id": "841d1873-015b-4bfb-a69f-6cbb41d925ba",
|
|
282
|
+
"name": "Original MARC holdings statements",
|
|
283
|
+
"source": "local",
|
|
284
|
+
"metadata": {
|
|
285
|
+
"createdDate": "2025-05-02T01:54:20.728+00:00",
|
|
286
|
+
"updatedDate": "2025-05-02T01:54:20.728+00:00"
|
|
287
|
+
}
|
|
288
|
+
},
|
|
289
|
+
{
|
|
290
|
+
"id": "09c1e5c9-6f11-432e-bcbe-b9e733ccce57",
|
|
291
|
+
"name": "Original MFHD Record",
|
|
292
|
+
"source": "local",
|
|
293
|
+
"metadata": {
|
|
294
|
+
"createdDate": "2025-05-02T01:54:20.728+00:00",
|
|
295
|
+
"updatedDate": "2025-05-02T01:54:20.728+00:00"
|
|
296
|
+
}
|
|
297
|
+
},
|
|
298
|
+
{
|
|
299
|
+
"id": "474120b0-d64e-4a6f-9c9c-e7d3e76f3cf5",
|
|
300
|
+
"name": "Original MFHD (MARC21)",
|
|
301
|
+
"source": "local",
|
|
302
|
+
"metadata": {
|
|
303
|
+
"createdDate": "2025-05-02T01:54:20.728+00:00",
|
|
304
|
+
"updatedDate": "2025-05-02T01:54:20.728+00:00"
|
|
305
|
+
}
|
|
306
|
+
}
|
|
307
|
+
]
|
|
308
|
+
|
|
215
309
|
elif ref_data_path in super_schema:
|
|
216
310
|
yield from super_schema.get(ref_data_path)
|
|
217
311
|
else:
|
|
@@ -1,4 +1,6 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import logging
|
|
3
|
+
import i18n
|
|
2
4
|
from datetime import datetime
|
|
3
5
|
from zoneinfo import ZoneInfo
|
|
4
6
|
|
|
@@ -41,20 +43,21 @@ class LegacyLoan(object):
|
|
|
41
43
|
"Lost and paid",
|
|
42
44
|
]
|
|
43
45
|
|
|
46
|
+
self.legacy_loan_dict = legacy_loan_dict
|
|
44
47
|
self.tenant_timezone = tenant_timezone
|
|
45
48
|
self.errors = []
|
|
46
49
|
self.row = row
|
|
47
50
|
for prop in correct_headers:
|
|
48
|
-
if prop not in legacy_loan_dict and prop not in optional_headers:
|
|
51
|
+
if prop not in self.legacy_loan_dict and prop not in optional_headers:
|
|
49
52
|
self.errors.append((f"Missing properties in legacy data {row=}", prop))
|
|
50
53
|
if (
|
|
51
54
|
prop != "next_item_status"
|
|
52
|
-
and not legacy_loan_dict.get(prop, "").strip()
|
|
55
|
+
and not self.legacy_loan_dict.get(prop, "").strip()
|
|
53
56
|
and prop not in optional_headers
|
|
54
57
|
):
|
|
55
58
|
self.errors.append((f"Empty properties in legacy data {row=}", prop))
|
|
56
59
|
try:
|
|
57
|
-
temp_date_due: datetime = parse(legacy_loan_dict["due_date"])
|
|
60
|
+
temp_date_due: datetime = parse(self.legacy_loan_dict["due_date"])
|
|
58
61
|
if temp_date_due.tzinfo != tz.UTC:
|
|
59
62
|
temp_date_due = temp_date_due.replace(tzinfo=self.tenant_timezone)
|
|
60
63
|
self.report(
|
|
@@ -72,7 +75,7 @@ class LegacyLoan(object):
|
|
|
72
75
|
self.errors.append((f"Parse date failure in {row=}. Setting UTC NOW", "due_date"))
|
|
73
76
|
temp_date_due = datetime.now(ZoneInfo("UTC"))
|
|
74
77
|
try:
|
|
75
|
-
temp_date_out: datetime = parse(legacy_loan_dict["out_date"])
|
|
78
|
+
temp_date_out: datetime = parse(self.legacy_loan_dict["out_date"])
|
|
76
79
|
if temp_date_out.tzinfo != tz.UTC:
|
|
77
80
|
temp_date_out = temp_date_out.replace(tzinfo=self.tenant_timezone)
|
|
78
81
|
self.report(
|
|
@@ -86,20 +89,20 @@ class LegacyLoan(object):
|
|
|
86
89
|
self.errors.append((f"Parse date failure in {row=}. Setting UTC NOW", "out_date"))
|
|
87
90
|
|
|
88
91
|
# good to go, set properties
|
|
89
|
-
self.item_barcode: str = legacy_loan_dict["item_barcode"].strip()
|
|
90
|
-
self.patron_barcode: str = legacy_loan_dict["patron_barcode"].strip()
|
|
91
|
-
self.proxy_patron_barcode: str = legacy_loan_dict.get("proxy_patron_barcode", "")
|
|
92
|
+
self.item_barcode: str = self.legacy_loan_dict["item_barcode"].strip()
|
|
93
|
+
self.patron_barcode: str = self.legacy_loan_dict["patron_barcode"].strip()
|
|
94
|
+
self.proxy_patron_barcode: str = self.legacy_loan_dict.get("proxy_patron_barcode", "")
|
|
92
95
|
self.due_date: datetime = temp_date_due
|
|
93
96
|
self.out_date: datetime = temp_date_out
|
|
94
97
|
self.correct_for_1_day_loans()
|
|
95
98
|
self.make_utc()
|
|
96
|
-
self.renewal_count = self.set_renewal_count(legacy_loan_dict)
|
|
97
|
-
self.next_item_status = legacy_loan_dict.get("next_item_status", "").strip()
|
|
99
|
+
self.renewal_count = self.set_renewal_count(self.legacy_loan_dict)
|
|
100
|
+
self.next_item_status = self.legacy_loan_dict.get("next_item_status", "").strip()
|
|
98
101
|
if self.next_item_status not in legal_statuses:
|
|
99
102
|
self.errors.append((f"Not an allowed status {row=}", self.next_item_status))
|
|
100
103
|
self.service_point_id = (
|
|
101
|
-
legacy_loan_dict["service_point_id"]
|
|
102
|
-
if legacy_loan_dict.get("service_point_id", "")
|
|
104
|
+
self.legacy_loan_dict["service_point_id"]
|
|
105
|
+
if self.legacy_loan_dict.get("service_point_id", "")
|
|
103
106
|
else fallback_service_point_id
|
|
104
107
|
)
|
|
105
108
|
|
|
@@ -121,7 +124,7 @@ class LegacyLoan(object):
|
|
|
121
124
|
if self.out_date.hour == 0:
|
|
122
125
|
self.out_date = self.out_date.replace(hour=0, minute=1)
|
|
123
126
|
if self.due_date <= self.out_date:
|
|
124
|
-
raise TransformationProcessError(self.row, "Due date is before out date")
|
|
127
|
+
raise TransformationProcessError(self.row, i18n.t("Due date is before out date, or date information is missing from both"), json.dumps(self.legacy_loan_dict, indent=2))
|
|
125
128
|
|
|
126
129
|
def to_dict(self):
|
|
127
130
|
return {
|
|
@@ -33,7 +33,7 @@ class LegacyReserve(object):
|
|
|
33
33
|
self.errors.append(("Missing data.", "legacy_identifier"))
|
|
34
34
|
self.course_listing_id: str = str(
|
|
35
35
|
FolioUUID(
|
|
36
|
-
folio_client.
|
|
36
|
+
folio_client.gateway_url,
|
|
37
37
|
FOLIONamespaces.course_listing,
|
|
38
38
|
legacy_request_dict["legacy_identifier"],
|
|
39
39
|
)
|
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"%{action} error. http status: %{status}": "%{action} error. http status: %{status}",
|
|
4
4
|
"%{action} error: %{message}": "%{action} error: %{message}",
|
|
5
5
|
"%{fields_criteria} empty or not set": "%{fields_criteria} empty or not set",
|
|
6
|
-
"%{field} a,x and z are
|
|
6
|
+
"%{field} a, x and z are missing or empty": "%{field} a, x and z are missing or empty",
|
|
7
7
|
"%{field} subfields a, x, and z missing from field": "%{field} subfields a, x, and z missing from field",
|
|
8
8
|
"%{fro} mapped from %{record}": "%{fro} mapped from %{record}",
|
|
9
9
|
"%{props} were concatenated": "%{props} were concatenated",
|
|
@@ -441,5 +441,7 @@
|
|
|
441
441
|
"legacy id from %{fro}": "legacy id from %{fro}",
|
|
442
442
|
"naturalId mapped from %{fro}": "naturalId mapped from %{fro}",
|
|
443
443
|
"no matching identifier_types in %{names}": "no matching identifier_types in %{names}",
|
|
444
|
-
"subfield present in %{linked_value_tag} but not in %{pattern_field}": "subfield present in %{linked_value_tag} but not in %{pattern_field}"
|
|
444
|
+
"subfield present in %{linked_value_tag} but not in %{pattern_field}": "subfield present in %{linked_value_tag} but not in %{pattern_field}",
|
|
445
|
+
"Provided boundwith relationship file not found": "Provided boundwith relationship file not found",
|
|
446
|
+
"Due date is before out date, or date information is missing from both": "Due date is before out date, or date information is missing from both"
|
|
445
447
|
}
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|