folio-migration-tools 1.2.1__py3-none-any.whl → 1.9.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (73) hide show
  1. folio_migration_tools/__init__.py +11 -0
  2. folio_migration_tools/__main__.py +169 -85
  3. folio_migration_tools/circulation_helper.py +96 -59
  4. folio_migration_tools/config_file_load.py +66 -0
  5. folio_migration_tools/custom_dict.py +6 -4
  6. folio_migration_tools/custom_exceptions.py +21 -19
  7. folio_migration_tools/extradata_writer.py +46 -0
  8. folio_migration_tools/folder_structure.py +63 -66
  9. folio_migration_tools/helper.py +29 -21
  10. folio_migration_tools/holdings_helper.py +57 -34
  11. folio_migration_tools/i18n_config.py +9 -0
  12. folio_migration_tools/library_configuration.py +173 -13
  13. folio_migration_tools/mapper_base.py +317 -106
  14. folio_migration_tools/mapping_file_transformation/courses_mapper.py +203 -0
  15. folio_migration_tools/mapping_file_transformation/holdings_mapper.py +83 -69
  16. folio_migration_tools/mapping_file_transformation/item_mapper.py +98 -94
  17. folio_migration_tools/mapping_file_transformation/manual_fee_fines_mapper.py +352 -0
  18. folio_migration_tools/mapping_file_transformation/mapping_file_mapper_base.py +702 -223
  19. folio_migration_tools/mapping_file_transformation/notes_mapper.py +90 -0
  20. folio_migration_tools/mapping_file_transformation/order_mapper.py +492 -0
  21. folio_migration_tools/mapping_file_transformation/organization_mapper.py +389 -0
  22. folio_migration_tools/mapping_file_transformation/ref_data_mapping.py +38 -27
  23. folio_migration_tools/mapping_file_transformation/user_mapper.py +149 -361
  24. folio_migration_tools/marc_rules_transformation/conditions.py +650 -246
  25. folio_migration_tools/marc_rules_transformation/holdings_statementsparser.py +292 -130
  26. folio_migration_tools/marc_rules_transformation/hrid_handler.py +244 -0
  27. folio_migration_tools/marc_rules_transformation/loc_language_codes.xml +20846 -0
  28. folio_migration_tools/marc_rules_transformation/marc_file_processor.py +300 -0
  29. folio_migration_tools/marc_rules_transformation/marc_reader_wrapper.py +136 -0
  30. folio_migration_tools/marc_rules_transformation/rules_mapper_authorities.py +241 -0
  31. folio_migration_tools/marc_rules_transformation/rules_mapper_base.py +681 -201
  32. folio_migration_tools/marc_rules_transformation/rules_mapper_bibs.py +395 -429
  33. folio_migration_tools/marc_rules_transformation/rules_mapper_holdings.py +531 -100
  34. folio_migration_tools/migration_report.py +85 -38
  35. folio_migration_tools/migration_tasks/__init__.py +1 -3
  36. folio_migration_tools/migration_tasks/authority_transformer.py +119 -0
  37. folio_migration_tools/migration_tasks/batch_poster.py +911 -198
  38. folio_migration_tools/migration_tasks/bibs_transformer.py +121 -116
  39. folio_migration_tools/migration_tasks/courses_migrator.py +192 -0
  40. folio_migration_tools/migration_tasks/holdings_csv_transformer.py +252 -247
  41. folio_migration_tools/migration_tasks/holdings_marc_transformer.py +321 -115
  42. folio_migration_tools/migration_tasks/items_transformer.py +264 -84
  43. folio_migration_tools/migration_tasks/loans_migrator.py +506 -195
  44. folio_migration_tools/migration_tasks/manual_fee_fines_transformer.py +187 -0
  45. folio_migration_tools/migration_tasks/migration_task_base.py +364 -74
  46. folio_migration_tools/migration_tasks/orders_transformer.py +373 -0
  47. folio_migration_tools/migration_tasks/organization_transformer.py +451 -0
  48. folio_migration_tools/migration_tasks/requests_migrator.py +130 -62
  49. folio_migration_tools/migration_tasks/reserves_migrator.py +253 -0
  50. folio_migration_tools/migration_tasks/user_transformer.py +180 -139
  51. folio_migration_tools/task_configuration.py +46 -0
  52. folio_migration_tools/test_infrastructure/__init__.py +0 -0
  53. folio_migration_tools/test_infrastructure/mocked_classes.py +406 -0
  54. folio_migration_tools/transaction_migration/legacy_loan.py +148 -34
  55. folio_migration_tools/transaction_migration/legacy_request.py +65 -25
  56. folio_migration_tools/transaction_migration/legacy_reserve.py +47 -0
  57. folio_migration_tools/transaction_migration/transaction_result.py +12 -1
  58. folio_migration_tools/translations/en.json +476 -0
  59. folio_migration_tools-1.9.10.dist-info/METADATA +169 -0
  60. folio_migration_tools-1.9.10.dist-info/RECORD +67 -0
  61. {folio_migration_tools-1.2.1.dist-info → folio_migration_tools-1.9.10.dist-info}/WHEEL +1 -2
  62. folio_migration_tools-1.9.10.dist-info/entry_points.txt +3 -0
  63. folio_migration_tools/generate_schemas.py +0 -46
  64. folio_migration_tools/mapping_file_transformation/mapping_file_mapping_base_impl.py +0 -44
  65. folio_migration_tools/mapping_file_transformation/user_mapper_base.py +0 -212
  66. folio_migration_tools/marc_rules_transformation/bibs_processor.py +0 -163
  67. folio_migration_tools/marc_rules_transformation/holdings_processor.py +0 -284
  68. folio_migration_tools/report_blurbs.py +0 -219
  69. folio_migration_tools/transaction_migration/legacy_fee_fine.py +0 -36
  70. folio_migration_tools-1.2.1.dist-info/METADATA +0 -134
  71. folio_migration_tools-1.2.1.dist-info/RECORD +0 -50
  72. folio_migration_tools-1.2.1.dist-info/top_level.txt +0 -1
  73. {folio_migration_tools-1.2.1.dist-info → folio_migration_tools-1.9.10.dist-info/licenses}/LICENSE +0 -0
@@ -1,37 +1,84 @@
1
1
  import csv
2
- from datetime import datetime
3
2
  import json
4
- import sys
5
3
  import logging
4
+ import sys
6
5
  import time
7
- from datetime import timezone
8
- from pydantic import BaseModel
9
- from folio_migration_tools.helper import Helper
6
+ from typing import Optional, Annotated
7
+ from pydantic import Field
8
+
9
+ import i18n
10
10
  from folio_uuid.folio_namespaces import FOLIONamespaces
11
+ from zoneinfo import ZoneInfo
12
+
11
13
  from folio_migration_tools.circulation_helper import CirculationHelper
12
14
  from folio_migration_tools.custom_dict import InsensitiveDictReader
15
+ from folio_migration_tools.helper import Helper
13
16
  from folio_migration_tools.library_configuration import (
14
17
  FileDefinition,
15
18
  LibraryConfiguration,
16
19
  )
17
20
  from folio_migration_tools.migration_report import MigrationReport
18
21
  from folio_migration_tools.migration_tasks.migration_task_base import MigrationTaskBase
19
-
20
- from typing import Dict, List, Optional
21
- from folio_migration_tools.report_blurbs import Blurbs
22
-
22
+ from folio_migration_tools.task_configuration import AbstractTaskConfiguration
23
23
  from folio_migration_tools.transaction_migration.legacy_request import LegacyRequest
24
24
 
25
25
 
26
26
  class RequestsMigrator(MigrationTaskBase):
27
- class TaskConfiguration(BaseModel):
28
- name: str
29
- utc_difference: int
30
- migration_task_type: str
31
- open_requests_file: FileDefinition
32
- starting_row: Optional[int] = 1
33
- item_files: Optional[list[FileDefinition]] = []
34
- patron_files: Optional[list[FileDefinition]] = []
27
+ class TaskConfiguration(AbstractTaskConfiguration):
28
+ name: Annotated[
29
+ str,
30
+ Field(
31
+ title="Task name",
32
+ description=(
33
+ "Name of this migration task. The name is being used to call "
34
+ "the specific task, and to distinguish tasks of similar types"
35
+ )
36
+ ),
37
+ ]
38
+ migration_task_type: Annotated[
39
+ str,
40
+ Field(
41
+ title="Migration task type",
42
+ description="The type of migration task you want to perform",
43
+ ),
44
+ ]
45
+ open_requests_file: Annotated[
46
+ FileDefinition,
47
+ Field(
48
+ title="Open requests file",
49
+ description="File with list of open requests",
50
+ ),
51
+ ]
52
+ starting_row: Annotated[
53
+ Optional[int],
54
+ Field(
55
+ title="Starting row",
56
+ description=(
57
+ "Row number to start processing data from. "
58
+ "Optional, by default is first row"
59
+ ),
60
+ ),
61
+ ] = 1
62
+ item_files: Annotated[
63
+ Optional[list[FileDefinition]],
64
+ Field(
65
+ title="Item files",
66
+ description=(
67
+ "List of files containing item data. "
68
+ "Optional, by default is empty list"
69
+ ),
70
+ ),
71
+ ] = []
72
+ patron_files: Annotated[
73
+ Optional[list[FileDefinition]],
74
+ Field(
75
+ title="Patron files",
76
+ description=(
77
+ "List of files containing patron data. "
78
+ "Optional, by default is empty list"
79
+ ),
80
+ ),
81
+ ] = []
35
82
 
36
83
  @staticmethod
37
84
  def get_object_type() -> FOLIONamespaces:
@@ -41,16 +88,30 @@ class RequestsMigrator(MigrationTaskBase):
41
88
  self,
42
89
  task_configuration: TaskConfiguration,
43
90
  library_config: LibraryConfiguration,
91
+ folio_client
44
92
  ):
45
93
  csv.register_dialect("tsv", delimiter="\t")
46
94
  self.migration_report = MigrationReport()
47
95
  self.valid_legacy_requests = []
48
- super().__init__(library_config, task_configuration)
96
+ super().__init__(library_config, task_configuration, folio_client)
49
97
  self.circulation_helper = CirculationHelper(
50
98
  self.folio_client,
51
99
  "",
52
100
  self.migration_report,
53
101
  )
102
+ try:
103
+ logging.info("Attempting to retrieve tenant timezone configuration...")
104
+ my_path = (
105
+ "/configurations/entries?query=(module==ORG%20and%20configName==localeSettings)"
106
+ )
107
+ self.tenant_timezone_str = json.loads(
108
+ self.folio_client.folio_get_single_object(my_path)["configs"][0]["value"]
109
+ )["timezone"]
110
+ logging.info("Tenant timezone is: %s", self.tenant_timezone_str)
111
+ except Exception:
112
+ logging.info('Tenant locale settings not available. Using "UTC".')
113
+ self.tenant_timezone_str = "UTC"
114
+ self.tenant_timezone = ZoneInfo(self.tenant_timezone_str)
54
115
  with open(
55
116
  self.folder_structure.legacy_records_folder
56
117
  / task_configuration.open_requests_file.file_name,
@@ -66,9 +127,7 @@ class RequestsMigrator(MigrationTaskBase):
66
127
  "Loaded and validated %s requests in file",
67
128
  len(self.semi_valid_legacy_requests),
68
129
  )
69
- if any(self.task_configuration.item_files) or any(
70
- self.task_configuration.patron_files
71
- ):
130
+ if any(self.task_configuration.item_files) or any(self.task_configuration.patron_files):
72
131
  self.valid_legacy_requests = list(self.check_barcodes())
73
132
  logging.info(
74
133
  "Loaded and validated %s requests against barcodes",
@@ -80,8 +139,10 @@ class RequestsMigrator(MigrationTaskBase):
80
139
  "previously migrated objects"
81
140
  )
82
141
  self.valid_legacy_requests = self.semi_valid_legacy_requests
142
+
83
143
  self.valid_legacy_requests.sort(key=lambda x: x.request_date)
84
144
  logging.info("Sorted the list of requests by request date")
145
+
85
146
  self.t0 = time.time()
86
147
  self.skipped_since_already_added = 0
87
148
  self.failed_requests = set()
@@ -89,29 +150,30 @@ class RequestsMigrator(MigrationTaskBase):
89
150
  logging.info("Init completed")
90
151
 
91
152
  def prepare_legacy_request(self, legacy_request: LegacyRequest):
92
- patron = self.circulation_helper.get_user_by_barcode(
93
- legacy_request.patron_barcode
94
- )
153
+ patron = self.circulation_helper.get_user_by_barcode(legacy_request.patron_barcode)
154
+ self.migration_report.add_general_statistics(i18n.t("Patron lookups performed"))
155
+
95
156
  if not patron:
96
- logging.error(
97
- f"No user with barcode {legacy_request.patron_barcode} found in FOLIO"
98
- )
157
+ logging.error(f"No user with barcode {legacy_request.patron_barcode} found in FOLIO")
99
158
  Helper.log_data_issue(
100
159
  f"{legacy_request.patron_barcode}",
101
- "No user with barcode",
160
+ "No user with barcode.",
102
161
  f"{legacy_request.patron_barcode}",
103
162
  )
104
- self.migration_report.add_general_statistics("No user with barcode")
163
+ self.migration_report.add_general_statistics(
164
+ i18n.t("No user with barcode found in FOLIO")
165
+ )
105
166
  self.failed_requests.add(legacy_request)
106
167
  return False, legacy_request
107
168
  legacy_request.patron_id = patron.get("id")
108
169
 
109
170
  item = self.circulation_helper.get_item_by_barcode(legacy_request.item_barcode)
171
+ self.migration_report.add_general_statistics(i18n.t("Item lookups performed"))
110
172
  if not item:
111
- logging.error(
112
- f"No item with barcode {legacy_request.item_barcode} found in FOLIO"
173
+ logging.error(f"No item with barcode {legacy_request.item_barcode} found in FOLIO")
174
+ self.migration_report.add_general_statistics(
175
+ i18n.t("No item with barcode found in FOLIO")
113
176
  )
114
- self.migration_report.add_general_statistics("No item with barcode")
115
177
  Helper.log_data_issue(
116
178
  f"{legacy_request.item_barcode}",
117
179
  "No item with barcode",
@@ -119,12 +181,17 @@ class RequestsMigrator(MigrationTaskBase):
119
181
  )
120
182
  self.failed_requests.add(legacy_request)
121
183
  return False, legacy_request
184
+ holding = self.circulation_helper.get_holding_by_uuid(item.get("holdingsRecordId"))
185
+ self.migration_report.add_general_statistics(i18n.t("Holdings lookups performed"))
122
186
  legacy_request.item_id = item.get("id")
123
- if item["status"]["name"] in ["Available", "Aged to lost", "Missing"]:
187
+ legacy_request.holdings_record_id = item.get("holdingsRecordId")
188
+ legacy_request.instance_id = holding.get("instanceId")
189
+ if item["status"]["name"] in ["Available"]:
124
190
  legacy_request.request_type = "Page"
125
- logging.info(
126
- f'Setting request to Page, since the status is {item["status"]["name"]}'
127
- )
191
+ logging.info(f'Setting request to Page, since the status is {item["status"]["name"]}')
192
+ self.migration_report.add_general_statistics(
193
+ i18n.t("Valid, prepared requests, ready for posting")
194
+ )
128
195
  return True, legacy_request
129
196
 
130
197
  def do_work(self):
@@ -135,9 +202,7 @@ class RequestsMigrator(MigrationTaskBase):
135
202
  self.valid_legacy_requests[self.task_configuration.starting_row - 1 :],
136
203
  start=1,
137
204
  ):
138
-
139
205
  t0_migration = time.time()
140
- self.migration_report.add_general_statistics("Processed requests")
141
206
  try:
142
207
  res, legacy_request = self.prepare_legacy_request(legacy_request)
143
208
  if res:
@@ -145,11 +210,11 @@ class RequestsMigrator(MigrationTaskBase):
145
210
  self.folio_client, legacy_request, self.migration_report
146
211
  ):
147
212
  self.migration_report.add_general_statistics(
148
- "Successfully processed requests"
213
+ i18n.t("Successfully migrated requests")
149
214
  )
150
215
  else:
151
216
  self.migration_report.add_general_statistics(
152
- "Unsuccessfully processed requests"
217
+ i18n.t("Unsuccessfully migrated requests")
153
218
  )
154
219
  self.failed_requests.add(legacy_request)
155
220
  if num_requests == 1:
@@ -163,19 +228,18 @@ class RequestsMigrator(MigrationTaskBase):
163
228
  )
164
229
  sys.exit(1)
165
230
  if num_requests % 10 == 0:
166
- logging.info(
167
- f"{timings(self.t0, t0_migration, num_requests)} {num_requests}"
168
- )
231
+ logging.info(f"{timings(self.t0, t0_migration, num_requests)} {num_requests}")
232
+ logging.info(f"{timings(self.t0, t0_migration, num_requests)} {num_requests}")
169
233
 
170
234
  def wrap_up(self):
235
+ self.extradata_writer.flush()
171
236
  self.write_failed_request_to_file()
172
237
 
173
238
  with open(self.folder_structure.migration_reports_file, "w+") as report_file:
174
- report_file.write("# Requests migration results \n")
175
- report_file.write(
176
- f"Time Finished: {datetime.isoformat(datetime.now(timezone.utc))}\n"
239
+ self.migration_report.write_migration_report(
240
+ i18n.t("Requests migration report"), report_file, self.start_datetime
177
241
  )
178
- self.migration_report.write_migration_report(report_file)
242
+ self.clean_out_empty_logs()
179
243
 
180
244
  def write_failed_request_to_file(self):
181
245
  csv_columns = [
@@ -188,9 +252,7 @@ class RequestsMigrator(MigrationTaskBase):
188
252
  "pickup_servicepoint_id",
189
253
  ]
190
254
  with open(self.folder_structure.failed_recs_path, "w+") as failed_requests_file:
191
- writer = csv.DictWriter(
192
- failed_requests_file, fieldnames=csv_columns, dialect="tsv"
193
- )
255
+ writer = csv.DictWriter(failed_requests_file, fieldnames=csv_columns, dialect="tsv")
194
256
  writer.writeheader()
195
257
  failed: LegacyRequest
196
258
  for failed in self.failed_requests:
@@ -212,14 +274,20 @@ class RequestsMigrator(MigrationTaskBase):
212
274
  has_patron_barcode = request.patron_barcode in user_barcodes
213
275
  if has_item_barcode and has_patron_barcode:
214
276
  self.migration_report.add_general_statistics(
215
- "Requests verified against migrated user and item"
277
+ i18n.t("Requests successfully verified against migrated users and items")
216
278
  )
217
279
  yield request
218
280
  else:
219
281
  self.migration_report.add(
220
- Blurbs.DiscardedLoans,
221
- f"Requests discarded. Had migrated item barcode: {has_item_barcode}. "
222
- f"Had migrated user barcode: {has_patron_barcode}",
282
+ "DiscardedLoans",
283
+ i18n.t(
284
+ "Requests discarded. Had migrated item barcode: %{item_barcode}.\n Had migrated user barcode: %{patron_barcode}",
285
+ item_barcode=has_item_barcode,
286
+ patron_barcode=has_patron_barcode,
287
+ ),
288
+ )
289
+ self.migration_report.add_general_statistics(
290
+ i18n.t("Requests that failed verification against migrated users and items")
223
291
  )
224
292
  if not has_item_barcode:
225
293
  Helper.log_data_issue(
@@ -237,30 +305,30 @@ class RequestsMigrator(MigrationTaskBase):
237
305
  def load_and_validate_legacy_requests(self, requests_reader):
238
306
  num_bad = 0
239
307
  logging.info("Validating legacy requests in file...")
240
- for legacy_reques_count, legacy_request_dict in enumerate(
241
- requests_reader, start=1
242
- ):
308
+ for legacy_reques_count, legacy_request_dict in enumerate(requests_reader, start=1):
309
+ self.migration_report.add_general_statistics(i18n.t("Requests in file"))
243
310
  try:
244
311
  legacy_request = LegacyRequest(
245
312
  legacy_request_dict,
246
- self.task_configuration.utc_difference,
313
+ self.tenant_timezone,
247
314
  legacy_reques_count,
248
315
  )
249
316
  if any(legacy_request.errors):
250
317
  num_bad += 1
251
318
  self.migration_report.add_general_statistics(
252
- "Requests with valueErrors"
319
+ i18n.t("Requests with valueErrors")
253
320
  )
254
321
  for error in legacy_request.errors:
255
- self.migration_report.add(
256
- Blurbs.DiscardedRequests, f"{error[0]} - {error[1]}"
257
- )
322
+ self.migration_report.add("DiscardedRequests", f"{error[0]} - {error[1]}")
258
323
  Helper.log_data_issue(
259
324
  legacy_request.item_barcode,
260
325
  f"{error[0]} - {error[1]}",
261
326
  json.dumps(legacy_request.to_source_dict()),
262
327
  )
263
328
  else:
329
+ self.migration_report.add_general_statistics(
330
+ i18n.t("Requests with valid source data")
331
+ )
264
332
  yield legacy_request
265
333
  except ValueError as ve:
266
334
  logging.exception(ve)
@@ -0,0 +1,253 @@
1
+ import csv
2
+ import json
3
+ import logging
4
+ import sys
5
+ import time
6
+ import traceback
7
+ from typing import Dict, Annotated
8
+ from urllib.error import HTTPError
9
+ from pydantic import Field
10
+
11
+ import httpx
12
+ import i18n
13
+ from folio_uuid.folio_namespaces import FOLIONamespaces
14
+
15
+ from folio_migration_tools.custom_dict import InsensitiveDictReader
16
+ from folio_migration_tools.custom_exceptions import TransformationProcessError
17
+ from folio_migration_tools.library_configuration import (
18
+ FileDefinition,
19
+ LibraryConfiguration,
20
+ )
21
+ from folio_migration_tools.migration_report import MigrationReport
22
+ from folio_migration_tools.migration_tasks.migration_task_base import MigrationTaskBase
23
+ from folio_migration_tools.task_configuration import AbstractTaskConfiguration
24
+ from folio_migration_tools.transaction_migration.legacy_reserve import LegacyReserve
25
+
26
+
27
+ class ReservesMigrator(MigrationTaskBase):
28
+ class TaskConfiguration(AbstractTaskConfiguration):
29
+ name: Annotated[
30
+ str,
31
+ Field(
32
+ title="Migration task name",
33
+ description=(
34
+ "Name of this migration task. The name is being used to call the specific "
35
+ "task, and to distinguish tasks of similar types"
36
+ ),
37
+ ),
38
+ ]
39
+ migration_task_type: Annotated[
40
+ str,
41
+ Field(
42
+ title="Migration task type",
43
+ description="The type of migration task you want to perform",
44
+ ),
45
+ ]
46
+ course_reserve_file_path: Annotated[
47
+ FileDefinition,
48
+ Field(
49
+ title="Course reserve file path",
50
+ description="Path to the file with course reserves",
51
+ ),
52
+ ]
53
+
54
+ @staticmethod
55
+ def get_object_type() -> FOLIONamespaces:
56
+ return FOLIONamespaces.reserve
57
+
58
+ def __init__(
59
+ self,
60
+ task_configuration: TaskConfiguration,
61
+ library_config: LibraryConfiguration,
62
+ folio_client
63
+ ):
64
+ csv.register_dialect("tsv", delimiter="\t")
65
+ self.migration_report = MigrationReport()
66
+ self.valid_reserves = []
67
+ super().__init__(library_config, task_configuration, folio_client)
68
+ with open(
69
+ self.folder_structure.legacy_records_folder
70
+ / task_configuration.course_reserve_file_path.file_name,
71
+ "r",
72
+ encoding="utf-8",
73
+ ) as reserves_file:
74
+ self.semi_valid_reserves = list(
75
+ self.load_and_validate_legacy_reserves(
76
+ InsensitiveDictReader(reserves_file, dialect="tsv")
77
+ )
78
+ )
79
+ logging.info(
80
+ "Loaded and validated %s reserves in file",
81
+ len(self.semi_valid_reserves),
82
+ )
83
+
84
+ self.valid_reserves = self.semi_valid_reserves
85
+ self.t0 = time.time()
86
+ self.failed: Dict = {}
87
+ logging.info("Init completed")
88
+
89
+ def do_work(self):
90
+ logging.info("Starting")
91
+ for num_reserves, legacy_reserve in enumerate(self.valid_reserves, start=1):
92
+ t0_migration = time.time()
93
+ self.migration_report.add_general_statistics(i18n.t("Processed reserves"))
94
+ try:
95
+ self.post_single_reserve(legacy_reserve)
96
+ except Exception as ee:
97
+ logging.exception(
98
+ f"Error in row {num_reserves} Reserve: {json.dumps(legacy_reserve)} {ee}"
99
+ )
100
+ if num_reserves % 50 == 0:
101
+ logging.info(f"{timings(self.t0, t0_migration, num_reserves)} {num_reserves}")
102
+
103
+ def post_single_reserve(self, legacy_reserve: LegacyReserve):
104
+ try:
105
+ path = f"/coursereserves/courselistings/{legacy_reserve.course_listing_id}/reserves"
106
+ if self.folio_put_post(
107
+ path, legacy_reserve.to_dict(), "POST", i18n.t("Posted reserves")
108
+ ):
109
+ self.migration_report.add_general_statistics(
110
+ i18n.t("Successfully posted reserves")
111
+ )
112
+ else:
113
+ self.migration_report.add_general_statistics(i18n.t("Failure to post reserve"))
114
+ except Exception as ee:
115
+ logging.error(ee)
116
+
117
+ def wrap_up(self):
118
+ self.extradata_writer.flush()
119
+ for k, v in self.failed.items():
120
+ self.failed_and_not_dupe[k] = [v.to_dict()]
121
+ self.migration_report.set("GeneralStatistics", "Failed loans", len(self.failed))
122
+ self.write_failed_reserves_to_file()
123
+
124
+ with open(self.folder_structure.migration_reports_file, "w+") as report_file:
125
+ self.migration_report.write_migration_report(
126
+ i18n.t("Reserves migration report"), report_file, self.start_datetime
127
+ )
128
+ self.clean_out_empty_logs()
129
+
130
+ def write_failed_reserves_to_file(self):
131
+ # POST /coursereserves/courselistings/40a085bd-b44b-42b3-b92f-61894a75e3ce/reserves
132
+ # Match on legacy course number ()
133
+
134
+ csv_columns = ["legacy_identifier", "barcode"]
135
+ with open(self.folder_structure.failed_recs_path, "w+") as failed_reserves_file:
136
+ writer = csv.DictWriter(failed_reserves_file, fieldnames=csv_columns, dialect="tsv")
137
+ writer.writeheader()
138
+ for _k, failed_reserve in self.failed.items():
139
+ writer.writerow(failed_reserve[0])
140
+
141
+ def check_barcodes(self):
142
+ """Stub for extension.
143
+
144
+ Yields:
145
+ _type_: _description_
146
+ """
147
+ item_barcodes = set()
148
+ self.circulation_helper.load_migrated_item_barcodes(
149
+ item_barcodes, self.task_configuration.item_files, self.folder_structure
150
+ )
151
+ for loan in self.semi_valid_legacy_loans:
152
+ has_item_barcode = loan.item_barcode in item_barcodes or not any(item_barcodes)
153
+ if has_item_barcode:
154
+ self.migration_report.add_general_statistics(
155
+ i18n.t("Reserve verified against migrated item")
156
+ )
157
+ yield loan
158
+ else:
159
+ self.migration_report.add(
160
+ "DiscardedLoans", i18n.t("Reserve discarded. Could not find migrated barcode")
161
+ )
162
+
163
+ def load_and_validate_legacy_reserves(self, reserves_reader):
164
+ num_bad = 0
165
+ logging.info("Validating legacy loans in file...")
166
+ for legacy_reserve_count, legacy_reserve_dict in enumerate(reserves_reader):
167
+ try:
168
+ legacy_reserve = LegacyReserve(
169
+ legacy_reserve_dict,
170
+ self.folio_client,
171
+ legacy_reserve_count,
172
+ )
173
+ if any(legacy_reserve.errors):
174
+ num_bad += 1
175
+ self.migration_report.add_general_statistics(i18n.t("Discarded reserves"))
176
+ for error in legacy_reserve.errors:
177
+ self.migration_report.add("DiscardedReserves", f"{error[0]} - {error[1]}")
178
+ else:
179
+ yield legacy_reserve
180
+ except ValueError as ve:
181
+ logging.exception(ve)
182
+ logging.info(
183
+ f"Done validating {legacy_reserve_count} legacy reserves with {num_bad} rotten apples"
184
+ )
185
+ if num_bad / legacy_reserve_count > 0.5:
186
+ q = num_bad / legacy_reserve_count
187
+ logging.error("%s percent of reserves failed to validate.", (q * 100))
188
+ self.migration_report.log_me()
189
+ logging.critical("Halting...")
190
+ sys.exit(1)
191
+
192
+ def folio_put_post(self, url, data_dict, verb, action_description=""):
193
+ full_url = f"{self.folio_client.gateway_url}{url}"
194
+ try:
195
+ if verb == "PUT":
196
+ resp = httpx.put(
197
+ full_url,
198
+ headers=self.folio_client.okapi_headers,
199
+ json=data_dict,
200
+ )
201
+ elif verb == "POST":
202
+ resp = httpx.post(
203
+ full_url,
204
+ headers=self.folio_client.okapi_headers,
205
+ json=data_dict,
206
+ )
207
+ else:
208
+ raise TransformationProcessError("Bad verb supplied. This is a code issue.")
209
+ if resp.status_code == 422:
210
+ error_message = json.loads(resp.text)["errors"][0]["message"]
211
+ logging.error(error_message)
212
+ self.migration_report.add(
213
+ "Details",
214
+ i18n.t(
215
+ "%{action} error: %{message}",
216
+ action=action_description,
217
+ message=error_message,
218
+ ),
219
+ )
220
+ resp.raise_for_status()
221
+ elif resp.status_code in [201, 204]:
222
+ self.migration_report.add(
223
+ "Details",
224
+ i18n.t("Successfully %{action}", action=action_description)
225
+ + f" ({resp.status_code})",
226
+ )
227
+ else:
228
+ self.migration_report.add(
229
+ "Details",
230
+ i18n.t(
231
+ "%{action} error. http status: %{status}",
232
+ action=action_description,
233
+ status=resp.status_code,
234
+ ),
235
+ )
236
+ logging.error(json.dumps(data_dict))
237
+ resp.raise_for_status()
238
+ return True
239
+ except HTTPError as exception:
240
+ logging.error(f"{resp.status_code}. {verb} FAILED for {url}")
241
+ traceback.print_exc()
242
+ logging.info(exception)
243
+ return False
244
+
245
+
246
+ def timings(t0, t0func, num_objects):
247
+ avg = num_objects / (time.time() - t0)
248
+ elapsed = time.time() - t0
249
+ elapsed_func = time.time() - t0func
250
+ return (
251
+ f"Total objects: {num_objects}\tTotal elapsed: {elapsed:.2f}\t"
252
+ f"Average per object: {avg:.2f}\tElapsed this time: {elapsed_func:.2f}"
253
+ )