folio-migration-tools 1.10.1__py3-none-any.whl → 1.10.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- folio_migration_tools/__init__.py +10 -2
- folio_migration_tools/__main__.py +7 -0
- folio_migration_tools/circulation_helper.py +23 -8
- folio_migration_tools/colors.py +7 -0
- folio_migration_tools/config_file_load.py +7 -0
- folio_migration_tools/custom_dict.py +17 -0
- folio_migration_tools/custom_exceptions.py +40 -4
- folio_migration_tools/extradata_writer.py +12 -0
- folio_migration_tools/folder_structure.py +16 -0
- folio_migration_tools/helper.py +7 -0
- folio_migration_tools/holdings_helper.py +11 -5
- folio_migration_tools/i18n_config.py +6 -0
- folio_migration_tools/library_configuration.py +19 -5
- folio_migration_tools/mapper_base.py +15 -0
- folio_migration_tools/mapping_file_transformation/__init__.py +1 -0
- folio_migration_tools/mapping_file_transformation/courses_mapper.py +17 -0
- folio_migration_tools/mapping_file_transformation/holdings_mapper.py +19 -0
- folio_migration_tools/mapping_file_transformation/item_mapper.py +24 -0
- folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py +18 -0
- folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +26 -9
- folio_migration_tools/mapping_file_transformation/notes_mapper.py +16 -0
- folio_migration_tools/mapping_file_transformation/order_mapper.py +40 -27
- folio_migration_tools/mapping_file_transformation/organization_mapper.py +40 -33
- folio_migration_tools/mapping_file_transformation/ref_data_mapping.py +17 -0
- folio_migration_tools/mapping_file_transformation/user_mapper.py +16 -0
- folio_migration_tools/marc_rules_transformation/__init__.py +1 -0
- folio_migration_tools/marc_rules_transformation/conditions.py +49 -36
- folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +9 -3
- folio_migration_tools/marc_rules_transformation/hrid_handler.py +16 -1
- folio_migration_tools/marc_rules_transformation/marc_file_processor.py +15 -1
- folio_migration_tools/marc_rules_transformation/marc_reader_wrapper.py +7 -0
- folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +35 -29
- folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +23 -18
- folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +46 -27
- folio_migration_tools/migration_report.py +14 -6
- folio_migration_tools/migration_tasks/__init__.py +2 -0
- folio_migration_tools/migration_tasks/batch_poster.py +41 -19
- folio_migration_tools/migration_tasks/bibs_transformer.py +16 -0
- folio_migration_tools/migration_tasks/courses_migrator.py +15 -0
- folio_migration_tools/migration_tasks/holdings_csv_transformer.py +18 -3
- folio_migration_tools/migration_tasks/holdings_marc_transformer.py +17 -0
- folio_migration_tools/migration_tasks/inventory_batch_poster.py +424 -0
- folio_migration_tools/migration_tasks/items_transformer.py +16 -0
- folio_migration_tools/migration_tasks/loans_migrator.py +17 -2
- folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py +16 -0
- folio_migration_tools/migration_tasks/marc_import.py +407 -0
- folio_migration_tools/migration_tasks/migration_task_base.py +49 -17
- folio_migration_tools/migration_tasks/orders_transformer.py +16 -0
- folio_migration_tools/migration_tasks/organization_transformer.py +17 -2
- folio_migration_tools/migration_tasks/requests_migrator.py +15 -0
- folio_migration_tools/migration_tasks/reserves_migrator.py +15 -0
- folio_migration_tools/migration_tasks/user_importer.py +347 -0
- folio_migration_tools/migration_tasks/user_transformer.py +16 -0
- folio_migration_tools/task_configuration.py +7 -0
- folio_migration_tools/transaction_migration/__init__.py +1 -0
- folio_migration_tools/transaction_migration/legacy_loan.py +16 -0
- folio_migration_tools/transaction_migration/legacy_request.py +14 -0
- folio_migration_tools/transaction_migration/legacy_reserve.py +14 -0
- folio_migration_tools/transaction_migration/transaction_result.py +16 -0
- {folio_migration_tools-1.10.1.dist-info → folio_migration_tools-1.10.3.dist-info}/METADATA +1 -1
- folio_migration_tools-1.10.3.dist-info/RECORD +66 -0
- folio_migration_tools-1.10.1.dist-info/RECORD +0 -63
- {folio_migration_tools-1.10.1.dist-info → folio_migration_tools-1.10.3.dist-info}/WHEEL +0 -0
- {folio_migration_tools-1.10.1.dist-info → folio_migration_tools-1.10.3.dist-info}/entry_points.txt +0 -0
|
@@ -1,3 +1,10 @@
|
|
|
1
|
+
"""Holdings records transformation from CSV files.
|
|
2
|
+
|
|
3
|
+
Transforms holdings records from CSV/TSV files to FOLIO Holdings records using
|
|
4
|
+
mapping files. Handles bound-with relationships, location mapping, and statistical
|
|
5
|
+
code assignments.
|
|
6
|
+
"""
|
|
7
|
+
|
|
1
8
|
import csv
|
|
2
9
|
import ctypes
|
|
3
10
|
import json
|
|
@@ -39,6 +46,8 @@ csv.register_dialect("tsv", delimiter="\t")
|
|
|
39
46
|
|
|
40
47
|
class HoldingsCsvTransformer(MigrationTaskBase):
|
|
41
48
|
class TaskConfiguration(AbstractTaskConfiguration):
|
|
49
|
+
"""Task configuration for HoldingsCsvTransformer."""
|
|
50
|
+
|
|
42
51
|
name: Annotated[
|
|
43
52
|
str,
|
|
44
53
|
Field(
|
|
@@ -176,6 +185,14 @@ class HoldingsCsvTransformer(MigrationTaskBase):
|
|
|
176
185
|
folio_client,
|
|
177
186
|
use_logging: bool = True,
|
|
178
187
|
):
|
|
188
|
+
"""Initialize HoldingsCsvTransformer for CSV holdings transformations.
|
|
189
|
+
|
|
190
|
+
Args:
|
|
191
|
+
task_config (TaskConfiguration): Holdings CSV transformation configuration.
|
|
192
|
+
library_config (LibraryConfiguration): Library configuration.
|
|
193
|
+
folio_client: FOLIO API client.
|
|
194
|
+
use_logging (bool): Whether to set up task logging.
|
|
195
|
+
"""
|
|
179
196
|
super().__init__(library_config, task_config, folio_client, use_logging)
|
|
180
197
|
self.fallback_holdings_type = None
|
|
181
198
|
self.folio_keys, self.holdings_field_map = self.load_mapped_fields()
|
|
@@ -463,9 +480,7 @@ class HoldingsCsvTransformer(MigrationTaskBase):
|
|
|
463
480
|
def merge_holding_in(
|
|
464
481
|
self, incoming_holding: dict, instance_ids: list[str], legacy_item_id: str
|
|
465
482
|
) -> None:
|
|
466
|
-
"""
|
|
467
|
-
previously created ones. When that is done, it generates the correct boundwith
|
|
468
|
-
parts needed.
|
|
483
|
+
"""Merge newly generated holdings with existing ones and create boundwith parts.
|
|
469
484
|
|
|
470
485
|
Args:
|
|
471
486
|
incoming_holding (dict): The newly created FOLIO Holding
|
|
@@ -1,3 +1,10 @@
|
|
|
1
|
+
"""Holdings records transformation from MARC21 holdings (MFHD).
|
|
2
|
+
|
|
3
|
+
Transforms MARC21 holdings records to FOLIO Holdings using a rules-based mapping system similar to
|
|
4
|
+
that implemented by FOLIO. Supports holdings statements, location mapping, and bound-with
|
|
5
|
+
relationships.
|
|
6
|
+
"""
|
|
7
|
+
|
|
1
8
|
import csv
|
|
2
9
|
import json
|
|
3
10
|
import logging
|
|
@@ -25,6 +32,8 @@ from folio_migration_tools.migration_tasks.migration_task_base import (
|
|
|
25
32
|
|
|
26
33
|
class HoldingsMarcTransformer(MigrationTaskBase):
|
|
27
34
|
class TaskConfiguration(MarcTaskConfigurationBase):
|
|
35
|
+
"""Task configuration for HoldingsMarcTransformer."""
|
|
36
|
+
|
|
28
37
|
name: Annotated[
|
|
29
38
|
str,
|
|
30
39
|
Field(
|
|
@@ -215,6 +224,14 @@ class HoldingsMarcTransformer(MigrationTaskBase):
|
|
|
215
224
|
folio_client,
|
|
216
225
|
use_logging: bool = True,
|
|
217
226
|
):
|
|
227
|
+
"""Initialize HoldingsMarcTransformer for MARC holdings transformations.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
task_config (TaskConfiguration): Holdings MARC transformation configuration.
|
|
231
|
+
library_config (LibraryConfiguration): Library configuration.
|
|
232
|
+
folio_client: FOLIO API client.
|
|
233
|
+
use_logging (bool): Whether to set up task logging.
|
|
234
|
+
"""
|
|
218
235
|
csv.register_dialect("tsv", delimiter="\t")
|
|
219
236
|
super().__init__(library_config, task_config, folio_client, use_logging)
|
|
220
237
|
if self.task_configuration.statistical_codes_map_file_name:
|
|
@@ -0,0 +1,424 @@
|
|
|
1
|
+
"""InventoryBatchPoster module for FOLIO inventory batch operations.
|
|
2
|
+
|
|
3
|
+
This module provides an adapter that wraps folio_data_import.BatchPoster
|
|
4
|
+
to conform to the folio_migration_tools MigrationTaskBase interface.
|
|
5
|
+
It supports posting Instances, Holdings, Items, and ShadowInstances
|
|
6
|
+
to FOLIO's inventory storage endpoints with support for upsert operations.
|
|
7
|
+
|
|
8
|
+
This is intended to eventually replace the existing BatchPoster implementation.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import asyncio
|
|
12
|
+
import logging
|
|
13
|
+
from typing import Annotated, List, Literal
|
|
14
|
+
|
|
15
|
+
from folio_uuid.folio_namespaces import FOLIONamespaces
|
|
16
|
+
from pydantic import Field
|
|
17
|
+
|
|
18
|
+
from folio_data_import.BatchPoster import BatchPoster as FDIBatchPoster
|
|
19
|
+
from folio_data_import.BatchPoster import BatchPosterStats
|
|
20
|
+
|
|
21
|
+
from folio_migration_tools.library_configuration import (
|
|
22
|
+
FileDefinition,
|
|
23
|
+
LibraryConfiguration,
|
|
24
|
+
)
|
|
25
|
+
from folio_migration_tools.migration_report import MigrationReport
|
|
26
|
+
from folio_migration_tools.migration_tasks.migration_task_base import MigrationTaskBase
|
|
27
|
+
from folio_migration_tools.task_configuration import AbstractTaskConfiguration
|
|
28
|
+
from folio_data_import._progress import RichProgressReporter
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
class InventoryBatchPoster(MigrationTaskBase):
|
|
32
|
+
"""InventoryBatchPoster.
|
|
33
|
+
|
|
34
|
+
An adapter that wraps folio_data_import.BatchPoster to provide batch posting
|
|
35
|
+
functionality for Instances, Holdings, Items, and ShadowInstances while
|
|
36
|
+
conforming to the MigrationTaskBase interface.
|
|
37
|
+
|
|
38
|
+
This implementation uses async operations internally and provides improved
|
|
39
|
+
error handling, progress reporting, and upsert capabilities.
|
|
40
|
+
|
|
41
|
+
Parents:
|
|
42
|
+
MigrationTaskBase: Base class for all migration tasks
|
|
43
|
+
|
|
44
|
+
Raises:
|
|
45
|
+
TransformationProcessError: When a critical error occurs during processing
|
|
46
|
+
TransformationRecordFailedError: When individual records fail to post
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
class TaskConfiguration(AbstractTaskConfiguration):
|
|
50
|
+
"""Task configuration for InventoryBatchPoster."""
|
|
51
|
+
|
|
52
|
+
name: Annotated[
|
|
53
|
+
str,
|
|
54
|
+
Field(
|
|
55
|
+
title="Task name",
|
|
56
|
+
description="The name of the task",
|
|
57
|
+
),
|
|
58
|
+
]
|
|
59
|
+
migration_task_type: Annotated[
|
|
60
|
+
str,
|
|
61
|
+
Field(
|
|
62
|
+
title="Migration task type",
|
|
63
|
+
description="The type of migration task",
|
|
64
|
+
),
|
|
65
|
+
]
|
|
66
|
+
object_type: Annotated[
|
|
67
|
+
Literal["Instances", "Holdings", "Items", "ShadowInstances"],
|
|
68
|
+
Field(
|
|
69
|
+
title="Object type",
|
|
70
|
+
description=(
|
|
71
|
+
"The type of inventory object to post: Instances, Holdings, Items, "
|
|
72
|
+
"or ShadowInstances (for consortium shadow copies)"
|
|
73
|
+
),
|
|
74
|
+
),
|
|
75
|
+
]
|
|
76
|
+
files: Annotated[
|
|
77
|
+
List[FileDefinition],
|
|
78
|
+
Field(
|
|
79
|
+
title="List of files",
|
|
80
|
+
description="List of files to be processed from the results folder",
|
|
81
|
+
),
|
|
82
|
+
]
|
|
83
|
+
batch_size: Annotated[
|
|
84
|
+
int,
|
|
85
|
+
Field(
|
|
86
|
+
title="Batch size",
|
|
87
|
+
description="Number of records to include in each batch (1-1000)",
|
|
88
|
+
ge=1,
|
|
89
|
+
le=1000,
|
|
90
|
+
),
|
|
91
|
+
] = 100
|
|
92
|
+
upsert: Annotated[
|
|
93
|
+
bool,
|
|
94
|
+
Field(
|
|
95
|
+
title="Upsert",
|
|
96
|
+
description=(
|
|
97
|
+
"Enable upsert mode to create new records or update existing ones. "
|
|
98
|
+
"When enabled, records with matching IDs will be updated instead "
|
|
99
|
+
"of causing errors."
|
|
100
|
+
),
|
|
101
|
+
),
|
|
102
|
+
] = False
|
|
103
|
+
preserve_statistical_codes: Annotated[
|
|
104
|
+
bool,
|
|
105
|
+
Field(
|
|
106
|
+
title="Preserve statistical codes",
|
|
107
|
+
description=(
|
|
108
|
+
"Preserve existing statistical codes during upsert. "
|
|
109
|
+
"When enabled, statistical codes from existing records will be retained "
|
|
110
|
+
"and merged with new codes."
|
|
111
|
+
),
|
|
112
|
+
),
|
|
113
|
+
] = False
|
|
114
|
+
preserve_administrative_notes: Annotated[
|
|
115
|
+
bool,
|
|
116
|
+
Field(
|
|
117
|
+
title="Preserve administrative notes",
|
|
118
|
+
description=(
|
|
119
|
+
"Preserve existing administrative notes during upsert. "
|
|
120
|
+
"When enabled, administrative notes from existing records will be retained "
|
|
121
|
+
"and merged with new notes."
|
|
122
|
+
),
|
|
123
|
+
),
|
|
124
|
+
] = False
|
|
125
|
+
preserve_temporary_locations: Annotated[
|
|
126
|
+
bool,
|
|
127
|
+
Field(
|
|
128
|
+
title="Preserve temporary locations",
|
|
129
|
+
description=(
|
|
130
|
+
"Preserve temporary location assignments on items during upsert. "
|
|
131
|
+
"Only applicable when object_type is 'Items'."
|
|
132
|
+
),
|
|
133
|
+
),
|
|
134
|
+
] = False
|
|
135
|
+
preserve_temporary_loan_types: Annotated[
|
|
136
|
+
bool,
|
|
137
|
+
Field(
|
|
138
|
+
title="Preserve temporary loan types",
|
|
139
|
+
description=(
|
|
140
|
+
"Preserve temporary loan type assignments on items during upsert. "
|
|
141
|
+
"Only applicable when object_type is 'Items'."
|
|
142
|
+
),
|
|
143
|
+
),
|
|
144
|
+
] = False
|
|
145
|
+
preserve_item_status: Annotated[
|
|
146
|
+
bool,
|
|
147
|
+
Field(
|
|
148
|
+
title="Preserve item status",
|
|
149
|
+
description=(
|
|
150
|
+
"Preserve item status during upsert. When enabled, the status "
|
|
151
|
+
"field from existing records will be retained. Only applicable "
|
|
152
|
+
"when object_type is 'Items'."
|
|
153
|
+
),
|
|
154
|
+
),
|
|
155
|
+
] = True
|
|
156
|
+
patch_existing_records: Annotated[
|
|
157
|
+
bool,
|
|
158
|
+
Field(
|
|
159
|
+
title="Patch existing records",
|
|
160
|
+
description=(
|
|
161
|
+
"Enable selective field patching during upsert. When enabled, only fields "
|
|
162
|
+
"specified in patch_paths will be updated, preserving all other fields."
|
|
163
|
+
),
|
|
164
|
+
),
|
|
165
|
+
] = False
|
|
166
|
+
patch_paths: Annotated[
|
|
167
|
+
List[str],
|
|
168
|
+
Field(
|
|
169
|
+
title="Patch paths",
|
|
170
|
+
description=(
|
|
171
|
+
"List of field paths to patch during upsert "
|
|
172
|
+
"(e.g., ['barcode', 'status']). "
|
|
173
|
+
"If empty and patch_existing_records is True, all fields "
|
|
174
|
+
"will be patched. Use this to selectively update only "
|
|
175
|
+
"specific fields while preserving others."
|
|
176
|
+
),
|
|
177
|
+
),
|
|
178
|
+
] = []
|
|
179
|
+
rerun_failed_records: Annotated[
|
|
180
|
+
bool,
|
|
181
|
+
Field(
|
|
182
|
+
title="Rerun failed records",
|
|
183
|
+
description=(
|
|
184
|
+
"After the main run, reprocess any failed records one at a time. "
|
|
185
|
+
"This gives each record a second chance with individual error handling."
|
|
186
|
+
),
|
|
187
|
+
),
|
|
188
|
+
] = True
|
|
189
|
+
no_progress: Annotated[
|
|
190
|
+
bool,
|
|
191
|
+
Field(
|
|
192
|
+
title="No progress",
|
|
193
|
+
description="Disable progress reporting in the console output.",
|
|
194
|
+
),
|
|
195
|
+
] = False
|
|
196
|
+
|
|
197
|
+
task_configuration: TaskConfiguration
|
|
198
|
+
|
|
199
|
+
@staticmethod
|
|
200
|
+
def get_object_type() -> FOLIONamespaces:
|
|
201
|
+
return FOLIONamespaces.other
|
|
202
|
+
|
|
203
|
+
def __init__(
|
|
204
|
+
self,
|
|
205
|
+
task_config: TaskConfiguration,
|
|
206
|
+
library_config: LibraryConfiguration,
|
|
207
|
+
folio_client,
|
|
208
|
+
use_logging: bool = True,
|
|
209
|
+
):
|
|
210
|
+
"""Initialize InventoryBatchPoster for posting inventory records to FOLIO.
|
|
211
|
+
|
|
212
|
+
Args:
|
|
213
|
+
task_config (TaskConfiguration): Inventory batch posting configuration.
|
|
214
|
+
library_config (LibraryConfiguration): Library configuration.
|
|
215
|
+
folio_client: FOLIO API client.
|
|
216
|
+
use_logging (bool): Whether to set up task logging.
|
|
217
|
+
"""
|
|
218
|
+
super().__init__(library_config, task_config, folio_client, use_logging)
|
|
219
|
+
self.migration_report = MigrationReport()
|
|
220
|
+
self.stats: BatchPosterStats = BatchPosterStats()
|
|
221
|
+
self.batch_errors: List[str] = []
|
|
222
|
+
|
|
223
|
+
logging.info("InventoryBatchPoster initialized")
|
|
224
|
+
logging.info("Object type: %s", self.task_configuration.object_type)
|
|
225
|
+
logging.info("Batch size: %s", self.task_configuration.batch_size)
|
|
226
|
+
logging.info("Upsert mode: %s", "On" if self.task_configuration.upsert else "Off")
|
|
227
|
+
|
|
228
|
+
def _create_fdi_config(self) -> FDIBatchPoster.Config:
|
|
229
|
+
"""Create a folio_data_import.BatchPoster.Config from our TaskConfiguration.
|
|
230
|
+
|
|
231
|
+
Returns:
|
|
232
|
+
FDIBatchPoster.Config: Configuration for the underlying BatchPoster
|
|
233
|
+
"""
|
|
234
|
+
return FDIBatchPoster.Config(
|
|
235
|
+
object_type=self.task_configuration.object_type,
|
|
236
|
+
batch_size=self.task_configuration.batch_size,
|
|
237
|
+
upsert=self.task_configuration.upsert,
|
|
238
|
+
preserve_statistical_codes=self.task_configuration.preserve_statistical_codes,
|
|
239
|
+
preserve_administrative_notes=self.task_configuration.preserve_administrative_notes,
|
|
240
|
+
preserve_temporary_locations=self.task_configuration.preserve_temporary_locations,
|
|
241
|
+
preserve_temporary_loan_types=self.task_configuration.preserve_temporary_loan_types,
|
|
242
|
+
preserve_item_status=self.task_configuration.preserve_item_status,
|
|
243
|
+
patch_existing_records=self.task_configuration.patch_existing_records,
|
|
244
|
+
patch_paths=self.task_configuration.patch_paths or None,
|
|
245
|
+
rerun_failed_records=self.task_configuration.rerun_failed_records,
|
|
246
|
+
)
|
|
247
|
+
|
|
248
|
+
def _on_batch_error(self, batch: list, error_message: str) -> None:
|
|
249
|
+
"""Callback for batch errors to capture in migration report.
|
|
250
|
+
|
|
251
|
+
Args:
|
|
252
|
+
batch: The batch of records that failed
|
|
253
|
+
error_message: The error message
|
|
254
|
+
"""
|
|
255
|
+
self.batch_errors.append(error_message)
|
|
256
|
+
self.migration_report.add("Details", error_message)
|
|
257
|
+
|
|
258
|
+
async def _do_work_async(self) -> None:
|
|
259
|
+
"""Async implementation of the work logic."""
|
|
260
|
+
# Build list of file paths
|
|
261
|
+
file_paths = []
|
|
262
|
+
for file_def in self.task_configuration.files:
|
|
263
|
+
path = self.folder_structure.results_folder / file_def.file_name
|
|
264
|
+
if not path.exists():
|
|
265
|
+
logging.error("File not found: %s", path)
|
|
266
|
+
raise FileNotFoundError(f"File not found: {path}")
|
|
267
|
+
file_paths.append(path)
|
|
268
|
+
logging.info("Will process file: %s", path)
|
|
269
|
+
|
|
270
|
+
# Create the folio_data_import BatchPoster config
|
|
271
|
+
fdi_config = self._create_fdi_config()
|
|
272
|
+
|
|
273
|
+
# Create the Progress Reporter
|
|
274
|
+
if self.task_configuration.no_progress:
|
|
275
|
+
from folio_data_import._progress import NoOpProgressReporter
|
|
276
|
+
|
|
277
|
+
reporter = NoOpProgressReporter()
|
|
278
|
+
else:
|
|
279
|
+
reporter = RichProgressReporter(enabled=True)
|
|
280
|
+
|
|
281
|
+
# Create the poster with our failed records path
|
|
282
|
+
failed_records_path = self.folder_structure.failed_recs_path
|
|
283
|
+
|
|
284
|
+
async with self.folio_client:
|
|
285
|
+
poster = FDIBatchPoster(
|
|
286
|
+
folio_client=self.folio_client,
|
|
287
|
+
config=fdi_config,
|
|
288
|
+
failed_records_file=failed_records_path,
|
|
289
|
+
reporter=reporter,
|
|
290
|
+
)
|
|
291
|
+
|
|
292
|
+
async with poster:
|
|
293
|
+
# Process all files
|
|
294
|
+
self.stats = await poster.do_work(file_paths)
|
|
295
|
+
|
|
296
|
+
# If rerun is enabled and there are failures, reprocess them
|
|
297
|
+
if self.task_configuration.rerun_failed_records and self.stats.records_failed > 0:
|
|
298
|
+
logging.info(
|
|
299
|
+
"Rerunning %s failed records one at a time",
|
|
300
|
+
self.stats.records_failed,
|
|
301
|
+
)
|
|
302
|
+
await poster.rerun_failed_records_one_by_one()
|
|
303
|
+
# Update stats after rerun
|
|
304
|
+
self.stats = poster.get_stats()
|
|
305
|
+
|
|
306
|
+
def do_work(self) -> None:
|
|
307
|
+
"""Main work method that processes files and posts records to FOLIO.
|
|
308
|
+
|
|
309
|
+
This method reads records from the configured files and posts them
|
|
310
|
+
to FOLIO in batches using the folio_data_import.BatchPoster.
|
|
311
|
+
"""
|
|
312
|
+
logging.info("Starting InventoryBatchPoster work...")
|
|
313
|
+
|
|
314
|
+
try:
|
|
315
|
+
# Run the async work in an event loop
|
|
316
|
+
asyncio.run(self._do_work_async())
|
|
317
|
+
except FileNotFoundError as e:
|
|
318
|
+
logging.error("File not found: %s", e)
|
|
319
|
+
raise
|
|
320
|
+
except Exception as e:
|
|
321
|
+
logging.error("Error during batch posting: %s", e)
|
|
322
|
+
raise
|
|
323
|
+
|
|
324
|
+
logging.info("InventoryBatchPoster work complete")
|
|
325
|
+
|
|
326
|
+
def _translate_stats_to_migration_report(self) -> None:
|
|
327
|
+
"""Translate BatchPosterStats to MigrationReport format."""
|
|
328
|
+
# General statistics
|
|
329
|
+
self.migration_report.set(
|
|
330
|
+
"GeneralStatistics",
|
|
331
|
+
"Records processed",
|
|
332
|
+
self.stats.records_processed,
|
|
333
|
+
)
|
|
334
|
+
self.migration_report.set(
|
|
335
|
+
"GeneralStatistics",
|
|
336
|
+
"Records posted successfully",
|
|
337
|
+
self.stats.records_posted,
|
|
338
|
+
)
|
|
339
|
+
self.migration_report.set(
|
|
340
|
+
"GeneralStatistics",
|
|
341
|
+
"Records created",
|
|
342
|
+
self.stats.records_created,
|
|
343
|
+
)
|
|
344
|
+
self.migration_report.set(
|
|
345
|
+
"GeneralStatistics",
|
|
346
|
+
"Records updated",
|
|
347
|
+
self.stats.records_updated,
|
|
348
|
+
)
|
|
349
|
+
self.migration_report.set(
|
|
350
|
+
"GeneralStatistics",
|
|
351
|
+
"Records failed",
|
|
352
|
+
self.stats.records_failed,
|
|
353
|
+
)
|
|
354
|
+
self.migration_report.set(
|
|
355
|
+
"GeneralStatistics",
|
|
356
|
+
"Batches posted",
|
|
357
|
+
self.stats.batches_posted,
|
|
358
|
+
)
|
|
359
|
+
self.migration_report.set(
|
|
360
|
+
"GeneralStatistics",
|
|
361
|
+
"Batches failed",
|
|
362
|
+
self.stats.batches_failed,
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
# Rerun statistics if applicable
|
|
366
|
+
if self.task_configuration.rerun_failed_records:
|
|
367
|
+
self.migration_report.set(
|
|
368
|
+
"GeneralStatistics",
|
|
369
|
+
"Rerun succeeded",
|
|
370
|
+
self.stats.rerun_succeeded,
|
|
371
|
+
)
|
|
372
|
+
self.migration_report.set(
|
|
373
|
+
"GeneralStatistics",
|
|
374
|
+
"Rerun still failed",
|
|
375
|
+
self.stats.rerun_still_failed,
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
# Add file information
|
|
379
|
+
for file_def in self.task_configuration.files:
|
|
380
|
+
self.migration_report.add("FilesProcessed", file_def.file_name)
|
|
381
|
+
|
|
382
|
+
def wrap_up(self) -> None:
|
|
383
|
+
"""Finalize the migration task and write reports.
|
|
384
|
+
|
|
385
|
+
This method translates statistics from the underlying BatchPoster
|
|
386
|
+
to the MigrationReport format and writes both markdown and JSON reports.
|
|
387
|
+
"""
|
|
388
|
+
logging.info("Done. Wrapping up InventoryBatchPoster")
|
|
389
|
+
|
|
390
|
+
# Translate stats to migration report
|
|
391
|
+
self._translate_stats_to_migration_report()
|
|
392
|
+
|
|
393
|
+
# Log summary
|
|
394
|
+
logging.info("=" * 60)
|
|
395
|
+
logging.info("InventoryBatchPoster Summary")
|
|
396
|
+
logging.info("=" * 60)
|
|
397
|
+
logging.info("Records processed: %d", self.stats.records_processed)
|
|
398
|
+
logging.info("Records posted: %d", self.stats.records_posted)
|
|
399
|
+
logging.info("Records created: %d", self.stats.records_created)
|
|
400
|
+
logging.info("Records updated: %d", self.stats.records_updated)
|
|
401
|
+
logging.info("Records failed: %d", self.stats.records_failed)
|
|
402
|
+
if self.task_configuration.rerun_failed_records:
|
|
403
|
+
logging.info("Rerun succeeded: %d", self.stats.rerun_succeeded)
|
|
404
|
+
logging.info("Rerun still failed: %d", self.stats.rerun_still_failed)
|
|
405
|
+
if self.stats.records_failed > 0:
|
|
406
|
+
logging.info(
|
|
407
|
+
"Failed records written to: %s",
|
|
408
|
+
self.folder_structure.failed_recs_path,
|
|
409
|
+
)
|
|
410
|
+
|
|
411
|
+
# Write markdown report
|
|
412
|
+
with open(self.folder_structure.migration_reports_file, "w+") as report_file:
|
|
413
|
+
self.migration_report.write_migration_report(
|
|
414
|
+
f"{self.task_configuration.object_type} loading report",
|
|
415
|
+
report_file,
|
|
416
|
+
self.start_datetime,
|
|
417
|
+
)
|
|
418
|
+
|
|
419
|
+
# Write raw JSON report
|
|
420
|
+
with open(self.folder_structure.migration_reports_raw_file, "w") as raw_report_file:
|
|
421
|
+
self.migration_report.write_json_report(raw_report_file)
|
|
422
|
+
|
|
423
|
+
# Clean up empty log files
|
|
424
|
+
self.clean_out_empty_logs()
|
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
"""Item records transformation task.
|
|
2
|
+
|
|
3
|
+
Transforms item records from CSV/TSV files to FOLIO Item records using mapping
|
|
4
|
+
files. Handles material types, loan types, location mapping, and statistical codes.
|
|
5
|
+
"""
|
|
6
|
+
|
|
1
7
|
import csv
|
|
2
8
|
import ctypes
|
|
3
9
|
import json
|
|
@@ -37,6 +43,8 @@ csv.field_size_limit(int(ctypes.c_ulong(-1).value // 2))
|
|
|
37
43
|
|
|
38
44
|
class ItemsTransformer(MigrationTaskBase):
|
|
39
45
|
class TaskConfiguration(AbstractTaskConfiguration):
|
|
46
|
+
"""Task configuration for ItemsTransformer."""
|
|
47
|
+
|
|
40
48
|
name: Annotated[
|
|
41
49
|
str,
|
|
42
50
|
Field(
|
|
@@ -196,6 +204,14 @@ class ItemsTransformer(MigrationTaskBase):
|
|
|
196
204
|
folio_client,
|
|
197
205
|
use_logging: bool = True,
|
|
198
206
|
):
|
|
207
|
+
"""Initialize ItemsTransformer for transforming item records.
|
|
208
|
+
|
|
209
|
+
Args:
|
|
210
|
+
task_config (TaskConfiguration): Items transformation configuration.
|
|
211
|
+
library_config (LibraryConfiguration): Library configuration.
|
|
212
|
+
folio_client: FOLIO API client.
|
|
213
|
+
use_logging (bool): Whether to set up task logging.
|
|
214
|
+
"""
|
|
199
215
|
csv.register_dialect("tsv", delimiter="\t")
|
|
200
216
|
super().__init__(library_config, task_config, folio_client, use_logging)
|
|
201
217
|
self.task_config = task_config
|
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
"""Open loans migration task.
|
|
2
|
+
|
|
3
|
+
Migrates open/active circulation loans from legacy ILS to FOLIO. Validates patron
|
|
4
|
+
and item barcodes, handles loan policies, and maintains due dates and renewal counts.
|
|
5
|
+
"""
|
|
6
|
+
|
|
1
7
|
import copy
|
|
2
8
|
import csv
|
|
3
9
|
import json
|
|
@@ -38,6 +44,8 @@ from folio_migration_tools.transaction_migration.transaction_result import (
|
|
|
38
44
|
|
|
39
45
|
class LoansMigrator(MigrationTaskBase):
|
|
40
46
|
class TaskConfiguration(AbstractTaskConfiguration):
|
|
47
|
+
"""Task configuration for LoansMigrator."""
|
|
48
|
+
|
|
41
49
|
name: Annotated[
|
|
42
50
|
str,
|
|
43
51
|
Field(
|
|
@@ -98,6 +106,13 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
98
106
|
library_config: LibraryConfiguration,
|
|
99
107
|
folio_client,
|
|
100
108
|
):
|
|
109
|
+
"""Initialize LoansMigrator for migrating circulation loans.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
task_configuration (TaskConfiguration): Loans migration configuration.
|
|
113
|
+
library_config (LibraryConfiguration): Library configuration.
|
|
114
|
+
folio_client: FOLIO API client.
|
|
115
|
+
"""
|
|
101
116
|
csv.register_dialect("tsv", delimiter="\t")
|
|
102
117
|
self.patron_item_combos: set = set()
|
|
103
118
|
self.t0 = time.time()
|
|
@@ -284,7 +299,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
284
299
|
)
|
|
285
300
|
|
|
286
301
|
def set_new_status(self, legacy_loan: LegacyLoan, res_checkout: TransactionResult):
|
|
287
|
-
"""Updates checkout loans with their destination statuses
|
|
302
|
+
"""Updates checkout loans with their destination statuses.
|
|
288
303
|
|
|
289
304
|
Args:
|
|
290
305
|
legacy_loan (LegacyLoan): _description_
|
|
@@ -441,7 +456,7 @@ class LoansMigrator(MigrationTaskBase):
|
|
|
441
456
|
def handle_checkout_failure(
|
|
442
457
|
self, legacy_loan, folio_checkout: TransactionResult
|
|
443
458
|
) -> TransactionResult:
|
|
444
|
-
"""Determines what can be done about a previously failed transaction
|
|
459
|
+
"""Determines what can be done about a previously failed transaction.
|
|
445
460
|
|
|
446
461
|
Args:
|
|
447
462
|
legacy_loan (_type_): The legacy loan
|
|
@@ -1,3 +1,9 @@
|
|
|
1
|
+
"""Manual fee/fine records transformation task.
|
|
2
|
+
|
|
3
|
+
Transforms manual fee and fine data from CSV files to FOLIO Accounts (fees/fines).
|
|
4
|
+
Handles fee/fine types, owners, and amounts with proper validation.
|
|
5
|
+
"""
|
|
6
|
+
|
|
1
7
|
import csv
|
|
2
8
|
import json
|
|
3
9
|
import logging
|
|
@@ -31,6 +37,8 @@ from folio_migration_tools.task_configuration import AbstractTaskConfiguration
|
|
|
31
37
|
|
|
32
38
|
class ManualFeeFinesTransformer(MigrationTaskBase):
|
|
33
39
|
class TaskConfiguration(AbstractTaskConfiguration):
|
|
40
|
+
"""Task configuration for ManualFeeFinesTransformer."""
|
|
41
|
+
|
|
34
42
|
name: str
|
|
35
43
|
feefines_map: str
|
|
36
44
|
migration_task_type: str
|
|
@@ -50,6 +58,14 @@ class ManualFeeFinesTransformer(MigrationTaskBase):
|
|
|
50
58
|
folio_client,
|
|
51
59
|
use_logging: bool = True,
|
|
52
60
|
):
|
|
61
|
+
"""Initialize ManualFeeFinesTransformer for fee/fine transformations.
|
|
62
|
+
|
|
63
|
+
Args:
|
|
64
|
+
task_configuration (TaskConfiguration): Manual fee fines transformation config.
|
|
65
|
+
library_config (LibraryConfiguration): Library configuration.
|
|
66
|
+
folio_client: FOLIO API client.
|
|
67
|
+
use_logging (bool): Whether to set up task logging.
|
|
68
|
+
"""
|
|
53
69
|
csv.register_dialect("tsv", delimiter="\t")
|
|
54
70
|
|
|
55
71
|
super().__init__(library_config, task_configuration, folio_client, use_logging)
|