folio-data-import 0.2.7__py3-none-any.whl → 0.2.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of folio-data-import might be problematic. Click here for more details.

@@ -5,6 +5,7 @@ import getpass
5
5
  import json
6
6
  import os
7
7
  import time
8
+ import uuid
8
9
  from datetime import datetime as dt
9
10
  from pathlib import Path
10
11
  from typing import Tuple
@@ -94,6 +95,23 @@ class UserImporter: # noqa: R0902
94
95
  """
95
96
  return {x[name]: x["id"] for x in folio_client.folio_get_all(endpoint, key)}
96
97
 
98
+ @staticmethod
99
+ def validate_uuid(uuid_string: str) -> bool:
100
+ """
101
+ Validate a UUID string.
102
+
103
+ Args:
104
+ uuid_string (str): The UUID string to validate.
105
+
106
+ Returns:
107
+ bool: True if the UUID is valid, otherwise False.
108
+ """
109
+ try:
110
+ uuid.UUID(uuid_string)
111
+ return True
112
+ except ValueError:
113
+ return False
114
+
97
115
  async def do_import(self) -> None:
98
116
  """
99
117
  Main method to import users.
@@ -119,7 +137,7 @@ class UserImporter: # noqa: R0902
119
137
  match_key = "id" if ("id" in user_obj) else self.match_key
120
138
  try:
121
139
  existing_user = await self.http_client.get(
122
- self.folio_client.okapi_url + "/users",
140
+ self.folio_client.gateway_url + "/users",
123
141
  headers=self.folio_client.okapi_headers,
124
142
  params={"query": f"{match_key}=={user_obj[match_key]}"},
125
143
  )
@@ -143,7 +161,7 @@ class UserImporter: # noqa: R0902
143
161
  """
144
162
  try:
145
163
  existing_rp = await self.http_client.get(
146
- self.folio_client.okapi_url
164
+ self.folio_client.gateway_url
147
165
  + "/request-preference-storage/request-preference",
148
166
  headers=self.folio_client.okapi_headers,
149
167
  params={
@@ -170,7 +188,7 @@ class UserImporter: # noqa: R0902
170
188
  """
171
189
  try:
172
190
  existing_pu = await self.http_client.get(
173
- self.folio_client.okapi_url + "/perms/users",
191
+ self.folio_client.gateway_url + "/perms/users",
174
192
  headers=self.folio_client.okapi_headers,
175
193
  params={
176
194
  "query": f"userId=={existing_user.get('id', user_obj.get('id', ''))}"
@@ -203,10 +221,20 @@ class UserImporter: # noqa: R0902
203
221
  mapped_addresses = []
204
222
  for address in addresses:
205
223
  try:
206
- address["addressTypeId"] = self.address_type_map[
207
- address["addressTypeId"]
208
- ]
209
- mapped_addresses.append(address)
224
+ if (
225
+ self.validate_uuid(address["addressTypeId"])
226
+ and address["addressTypeId"] in self.address_type_map.values()
227
+ ):
228
+ await self.logfile.write(
229
+ f"Row {line_number}: Address type {address['addressTypeId']} is a UUID, "
230
+ f"skipping mapping\n"
231
+ )
232
+ mapped_addresses.append(address)
233
+ else:
234
+ address["addressTypeId"] = self.address_type_map[
235
+ address["addressTypeId"]
236
+ ]
237
+ mapped_addresses.append(address)
210
238
  except KeyError:
211
239
  if address["addressTypeId"] not in self.address_type_map.values():
212
240
  print(
@@ -232,7 +260,16 @@ class UserImporter: # noqa: R0902
232
260
  None
233
261
  """
234
262
  try:
235
- user_obj["patronGroup"] = self.patron_group_map[user_obj["patronGroup"]]
263
+ if (
264
+ self.validate_uuid(user_obj["patronGroup"])
265
+ and user_obj["patronGroup"] in self.patron_group_map.values()
266
+ ):
267
+ await self.logfile.write(
268
+ f"Row {line_number}: Patron group {user_obj['patronGroup']} is a UUID, "
269
+ f"skipping mapping\n"
270
+ )
271
+ else:
272
+ user_obj["patronGroup"] = self.patron_group_map[user_obj["patronGroup"]]
236
273
  except KeyError:
237
274
  if user_obj["patronGroup"] not in self.patron_group_map.values():
238
275
  print(
@@ -259,7 +296,16 @@ class UserImporter: # noqa: R0902
259
296
  mapped_departments = []
260
297
  for department in user_obj.pop("departments", []):
261
298
  try:
262
- mapped_departments.append(self.department_map[department])
299
+ if (
300
+ self.validate_uuid(department)
301
+ and department in self.department_map.values()
302
+ ):
303
+ await self.logfile.write(
304
+ f"Row {line_number}: Department {department} is a UUID, skipping mapping\n"
305
+ )
306
+ mapped_departments.append(department)
307
+ else:
308
+ mapped_departments.append(self.department_map[department])
263
309
  except KeyError:
264
310
  print(
265
311
  f'Row {line_number}: Department "{department}" not found, ' # noqa: B907
@@ -323,7 +369,7 @@ class UserImporter: # noqa: R0902
323
369
  else:
324
370
  existing_user[key] = value
325
371
  create_update_user = await self.http_client.put(
326
- self.folio_client.okapi_url + f"/users/{existing_user['id']}",
372
+ self.folio_client.gateway_url + f"/users/{existing_user['id']}",
327
373
  headers=self.folio_client.okapi_headers,
328
374
  json=existing_user,
329
375
  )
@@ -343,7 +389,7 @@ class UserImporter: # noqa: R0902
343
389
  HTTPError: If the HTTP request to create the user fails.
344
390
  """
345
391
  response = await self.http_client.post(
346
- self.folio_client.okapi_url + "/users",
392
+ self.folio_client.gateway_url + "/users",
347
393
  headers=self.folio_client.okapi_headers,
348
394
  json=user_obj,
349
395
  )
@@ -493,7 +539,7 @@ class UserImporter: # noqa: R0902
493
539
  and the existing PU object (existing_pu).
494
540
  """
495
541
  rp_obj = user_obj.pop("requestPreference", {})
496
- spu_obj = user_obj.pop("servicePointsUser")
542
+ spu_obj = user_obj.pop("servicePointsUser", {})
497
543
  existing_user = await self.get_existing_user(user_obj)
498
544
  if existing_user:
499
545
  existing_rp = await self.get_existing_rp(user_obj, existing_user)
@@ -543,7 +589,7 @@ class UserImporter: # noqa: R0902
543
589
  rp_obj["userId"] = new_user_obj["id"]
544
590
  # print(rp_obj)
545
591
  response = await self.http_client.post(
546
- self.folio_client.okapi_url
592
+ self.folio_client.gateway_url
547
593
  + "/request-preference-storage/request-preference",
548
594
  headers=self.folio_client.okapi_headers,
549
595
  json=rp_obj,
@@ -567,7 +613,7 @@ class UserImporter: # noqa: R0902
567
613
  existing_rp.update(rp_obj)
568
614
  # print(existing_rp)
569
615
  response = await self.http_client.put(
570
- self.folio_client.okapi_url
616
+ self.folio_client.gateway_url
571
617
  + f"/request-preference-storage/request-preference/{existing_rp['id']}",
572
618
  headers=self.folio_client.okapi_headers,
573
619
  json=existing_rp,
@@ -589,7 +635,7 @@ class UserImporter: # noqa: R0902
589
635
  """
590
636
  perms_user_obj = {"userId": new_user_obj["id"], "permissions": []}
591
637
  response = await self.http_client.post(
592
- self.folio_client.okapi_url + "/perms/users",
638
+ self.folio_client.gateway_url + "/perms/users",
593
639
  headers=self.folio_client.okapi_headers,
594
640
  json=perms_user_obj,
595
641
  )
@@ -677,7 +723,13 @@ class UserImporter: # noqa: R0902
677
723
  mapped_service_points = []
678
724
  for sp in spu_obj.pop("servicePointsIds", []):
679
725
  try:
680
- mapped_service_points.append(self.service_point_map[sp])
726
+ if self.validate_uuid(sp) and sp in self.service_point_map.values():
727
+ await self.logfile.write(
728
+ f"Service point {sp} is a UUID, skipping mapping\n"
729
+ )
730
+ mapped_service_points.append(sp)
731
+ else:
732
+ mapped_service_points.append(self.service_point_map[sp])
681
733
  except KeyError:
682
734
  print(
683
735
  f'Service point "{sp}" not found, excluding service point from user: '
@@ -688,7 +740,13 @@ class UserImporter: # noqa: R0902
688
740
  if "defaultServicePointId" in spu_obj:
689
741
  sp_code = spu_obj.pop('defaultServicePointId', '')
690
742
  try:
691
- mapped_sp_id = self.service_point_map[sp_code]
743
+ if self.validate_uuid(sp_code) and sp_code in self.service_point_map.values():
744
+ await self.logfile.write(
745
+ f"Default service point {sp_code} is a UUID, skipping mapping\n"
746
+ )
747
+ mapped_sp_id = sp_code
748
+ else:
749
+ mapped_sp_id = self.service_point_map[sp_code]
692
750
  if mapped_sp_id not in spu_obj.get('servicePointsIds', []):
693
751
  print(
694
752
  f'Default service point "{sp_code}" not found in assigned service points, '
@@ -711,7 +769,7 @@ class UserImporter: # noqa: R0902
711
769
  existing_spu (dict): The existing service-points-user object, if it exists.
712
770
  existing_user (dict): The existing user object associated with the spu_obj.
713
771
  """
714
- if spu_obj is not None:
772
+ if spu_obj:
715
773
  await self.map_service_points(spu_obj, existing_user)
716
774
  if existing_spu:
717
775
  await self.update_existing_spu(spu_obj, existing_spu)
@@ -730,7 +788,7 @@ class UserImporter: # noqa: R0902
730
788
  """
731
789
  try:
732
790
  existing_spu = await self.http_client.get(
733
- self.folio_client.okapi_url + "/service-points-users",
791
+ self.folio_client.gateway_url + "/service-points-users",
734
792
  headers=self.folio_client.okapi_headers,
735
793
  params={"query": f"userId=={existing_user['id']}"},
736
794
  )
@@ -754,7 +812,7 @@ class UserImporter: # noqa: R0902
754
812
  """
755
813
  spu_obj["userId"] = existing_user["id"]
756
814
  response = await self.http_client.post(
757
- self.folio_client.okapi_url + "/service-points-users",
815
+ self.folio_client.gateway_url + "/service-points-users",
758
816
  headers=self.folio_client.okapi_headers,
759
817
  json=spu_obj,
760
818
  )
@@ -773,7 +831,7 @@ class UserImporter: # noqa: R0902
773
831
  """
774
832
  existing_spu.update(spu_obj)
775
833
  response = await self.http_client.put(
776
- self.folio_client.okapi_url + f"/service-points-users/{existing_spu['id']}",
834
+ self.folio_client.gateway_url + f"/service-points-users/{existing_spu['id']}",
777
835
  headers=self.folio_client.okapi_headers,
778
836
  json=existing_spu,
779
837
  )
@@ -1 +1 @@
1
- from ._preprocessors import prepend_ppn_prefix_001, strip_999_ff_fields
1
+ from ._preprocessors import *
@@ -1,4 +1,23 @@
1
1
  import pymarc
2
+ import logging
3
+
4
+ logger = logging.getLogger("folio_data_import.MARCDataImport")
5
+
6
+
7
+ def prepend_prefix_001(record: pymarc.Record, prefix: str) -> pymarc.Record:
8
+ """
9
+ Prepend a prefix to the record's 001 field.
10
+
11
+ Args:
12
+ record (pymarc.Record): The MARC record to preprocess.
13
+ prefix (str): The prefix to prepend to the 001 field.
14
+
15
+ Returns:
16
+ pymarc.Record: The preprocessed MARC record.
17
+ """
18
+ record["001"].data = f"({prefix})" + record["001"].data
19
+ return record
20
+
2
21
 
3
22
  def prepend_ppn_prefix_001(record: pymarc.Record) -> pymarc.Record:
4
23
  """
@@ -11,8 +30,22 @@ def prepend_ppn_prefix_001(record: pymarc.Record) -> pymarc.Record:
11
30
  Returns:
12
31
  pymarc.Record: The preprocessed MARC record.
13
32
  """
14
- record['001'].data = '(PPN)' + record['001'].data
15
- return record
33
+ return prepend_prefix_001(record, "PPN")
34
+
35
+
36
+ def prepend_abes_prefix_001(record: pymarc.Record) -> pymarc.Record:
37
+ """
38
+ Prepend the ABES prefix to the record's 001 field. Useful when
39
+ importing records from the ABES SUDOC catalog
40
+
41
+ Args:
42
+ record (pymarc.Record): The MARC record to preprocess.
43
+
44
+ Returns:
45
+ pymarc.Record: The preprocessed MARC record.
46
+ """
47
+ return prepend_prefix_001(record, "ABES")
48
+
16
49
 
17
50
  def strip_999_ff_fields(record: pymarc.Record) -> pymarc.Record:
18
51
  """
@@ -25,7 +58,276 @@ def strip_999_ff_fields(record: pymarc.Record) -> pymarc.Record:
25
58
  Returns:
26
59
  pymarc.Record: The preprocessed MARC record.
27
60
  """
28
- for field in record.get_fields('999'):
29
- if field.indicators == pymarc.Indicators(*['f', 'f']):
61
+ for field in record.get_fields("999"):
62
+ if field.indicators == pymarc.Indicators(*["f", "f"]):
30
63
  record.remove_field(field)
31
64
  return record
65
+
66
+ def clean_999_fields(record: pymarc.Record) -> pymarc.Record:
67
+ """
68
+ The presence of 999 fields, with or without ff indicators, can cause
69
+ issues with data import mapping in FOLIO. This function calls strip_999_ff_fields
70
+ to remove 999 fields with ff indicators and then copies the remaining 999 fields
71
+ to 945 fields.
72
+
73
+ Args:
74
+ record (pymarc.Record): The MARC record to preprocess.
75
+
76
+ Returns:
77
+ pymarc.Record: The preprocessed MARC record.
78
+ """
79
+ record = strip_999_ff_fields(record)
80
+ for field in record.get_fields("999"):
81
+ _945 = pymarc.Field(
82
+ tag="945",
83
+ indicators=field.indicators,
84
+ subfields=field.subfields,
85
+ )
86
+ record.add_ordered_field(_945)
87
+ record.remove_field(field)
88
+ return record
89
+
90
+ def sudoc_supercede_prep(record: pymarc.Record) -> pymarc.Record:
91
+ """
92
+ Preprocesses a record from the ABES SUDOC catalog to copy 035 fields
93
+ with a $9 subfield value of 'sudoc' to 935 fields with a $a subfield
94
+ prefixed with "(ABES)". This is useful when importing newly-merged records
95
+ from the SUDOC catalog when you want the new record to replace the old one
96
+ in FOLIO. This also applyes the prepend_ppn_prefix_001 function to the record.
97
+
98
+ Args:
99
+ record (pymarc.Record): The MARC record to preprocess.
100
+
101
+ Returns:
102
+ pymarc.Record: The preprocessed MARC record.
103
+ """
104
+ record = prepend_abes_prefix_001(record)
105
+ for field in record.get_fields("035"):
106
+ if "a" in field and "9" in field and field["9"] == "sudoc":
107
+ _935 = pymarc.Field(
108
+ tag="935",
109
+ indicators=["f", "f"],
110
+ subfields=[pymarc.field.Subfield("a", "(ABES)" + field["a"])],
111
+ )
112
+ record.add_ordered_field(_935)
113
+ return record
114
+
115
+
116
+ def clean_empty_fields(record: pymarc.Record) -> pymarc.Record:
117
+ """
118
+ Remove empty fields and subfields from the record. These can cause
119
+ data import mapping issues in FOLIO. Removals are logged at custom
120
+ log level 26, which is used by folio_migration_tools to populate the
121
+ data issues report.
122
+
123
+ Args:
124
+ record (pymarc.Record): The MARC record to preprocess.
125
+
126
+ Returns:
127
+ pymarc.Record: The preprocessed MARC record.
128
+ """
129
+ MAPPED_FIELDS = {
130
+ "010": ["a", "z"],
131
+ "020": ["a", "y", "z"],
132
+ "035": ["a", "z"],
133
+ "040": ["a", "b", "c", "d", "e", "f", "g", "h", "k", "m", "n", "p", "r", "s"],
134
+ "050": ["a", "b"],
135
+ "082": ["a", "b"],
136
+ "100": ["a", "b", "c", "d", "q"],
137
+ "110": ["a", "b", "c"],
138
+ "111": ["a", "c", "d"],
139
+ "130": [
140
+ "a",
141
+ "d",
142
+ "f",
143
+ "k",
144
+ "l",
145
+ "m",
146
+ "n",
147
+ "o",
148
+ "p",
149
+ "r",
150
+ "s",
151
+ "t",
152
+ "x",
153
+ "y",
154
+ "z",
155
+ ],
156
+ "180": ["x", "y", "z"],
157
+ "210": ["a", "c"],
158
+ "240": ["a", "f", "k", "l", "m", "n", "o", "p", "r", "s", "t", "x", "y", "z"],
159
+ "245": ["a", "b", "c", "f", "g", "h", "k", "n", "p", "s"],
160
+ "246": ["a", "f", "g", "n", "p", "s"],
161
+ "250": ["a", "b"],
162
+ "260": ["a", "b", "c", "e", "f", "g"],
163
+ "300": ["a", "b", "c", "e", "f", "g"],
164
+ "440": ["a", "n", "p", "v", "x", "y", "z"],
165
+ "490": ["a", "v", "x", "y", "z"],
166
+ "500": ["a", "c", "d", "n", "p", "v", "x", "y", "z"],
167
+ "505": ["a", "g", "r", "t", "u"],
168
+ "520": ["a", "b", "c", "u"],
169
+ "600": ["a", "b", "c", "d", "q", "t", "v", "x", "y", "z"],
170
+ "610": ["a", "b", "c", "d", "t", "v", "x", "y", "z"],
171
+ "611": ["a", "c", "d", "t", "v", "x", "y", "z"],
172
+ "630": [
173
+ "a",
174
+ "d",
175
+ "f",
176
+ "k",
177
+ "l",
178
+ "m",
179
+ "n",
180
+ "o",
181
+ "p",
182
+ "r",
183
+ "s",
184
+ "t",
185
+ "x",
186
+ "y",
187
+ "z",
188
+ ],
189
+ "650": ["a", "d", "v", "x", "y", "z"],
190
+ "651": ["a", "v", "x", "y", "z"],
191
+ "655": ["a", "v", "x", "y", "z"],
192
+ "700": ["a", "b", "c", "d", "q", "t", "v", "x", "y", "z"],
193
+ "710": ["a", "b", "c", "d", "t", "v", "x", "y", "z"],
194
+ "711": ["a", "c", "d", "t", "v", "x", "y", "z"],
195
+ "730": [
196
+ "a",
197
+ "d",
198
+ "f",
199
+ "k",
200
+ "l",
201
+ "m",
202
+ "n",
203
+ "o",
204
+ "p",
205
+ "r",
206
+ "s",
207
+ "t",
208
+ "x",
209
+ "y",
210
+ "z",
211
+ ],
212
+ "740": ["a", "n", "p", "v", "x", "y", "z"],
213
+ "800": ["a", "b", "c", "d", "q", "t", "v", "x", "y", "z"],
214
+ "810": ["a", "b", "c", "d", "t", "v", "x", "y", "z"],
215
+ "811": ["a", "c", "d", "t", "v", "x", "y", "z"],
216
+ "830": [
217
+ "a",
218
+ "d",
219
+ "f",
220
+ "k",
221
+ "l",
222
+ "m",
223
+ "n",
224
+ "o",
225
+ "p",
226
+ "r",
227
+ "s",
228
+ "t",
229
+ "x",
230
+ "y",
231
+ "z",
232
+ ],
233
+ "856": ["u", "y", "z"],
234
+ }
235
+
236
+ for field in list(record.get_fields()):
237
+ len_subs = len(field.subfields)
238
+ subfield_value = bool(field.subfields[0].value) if len_subs > 0 else False
239
+ if not int(field.tag) >= 900 and field.tag in MAPPED_FIELDS:
240
+ if int(field.tag) > 9 and len_subs == 0:
241
+ logger.log(
242
+ 26,
243
+ "DATA ISSUE\t%s\t%s\t%s",
244
+ record["001"].value(),
245
+ f"{field.tag} is empty, removing field",
246
+ field,
247
+ )
248
+ record.remove_field(field)
249
+ elif len_subs == 1 and not subfield_value:
250
+ logger.log(
251
+ 26,
252
+ "DATA ISSUE\t%s\t%s\t%s",
253
+ record["001"].value(),
254
+ f"{field.tag}${field.subfields[0].code} is empty, no other subfields present, removing field",
255
+ field,
256
+ )
257
+ record.remove_field(field)
258
+ else:
259
+ if len_subs > 1 and "a" in field and not field["a"].strip():
260
+ logger.log(
261
+ 26,
262
+ "DATA ISSUE\t%s\t%s\t%s",
263
+ record["001"].value(),
264
+ f"{field.tag}$a is empty, removing subfield",
265
+ field,
266
+ )
267
+ field.delete_subfield("a")
268
+ for idx, subfield in enumerate(list(field.subfields), start=1):
269
+ if (
270
+ subfield.code in MAPPED_FIELDS.get(field.tag, [])
271
+ and not subfield.value
272
+ ):
273
+ logger.log(
274
+ 26,
275
+ "DATA ISSUE\t%s\t%s\t%s",
276
+ record["001"].value(),
277
+ f"{field.tag}${subfield.code} ({ordinal(idx)} subfield) is empty, but other subfields have values, removing subfield",
278
+ field,
279
+ )
280
+ field.delete_subfield(subfield.code)
281
+ if len(field.subfields) == 0:
282
+ logger.log(
283
+ 26,
284
+ "DATA ISSUE\t%s\t%s\t%s",
285
+ record["001"].value(),
286
+ f"{field.tag} has no non-empty subfields after cleaning, removing field",
287
+ field,
288
+ )
289
+ record.remove_field(field)
290
+ return record
291
+
292
+
293
+ def fix_leader(record: pymarc.Record) -> pymarc.Record:
294
+ """
295
+ Fixes the leader of the record by setting the record status to 'c' (modified
296
+ record) and the type of record to 'a' (language material).
297
+
298
+ Args:
299
+ record (pymarc.Record): The MARC record to preprocess.
300
+
301
+ Returns:
302
+ pymarc.Record: The preprocessed MARC record.
303
+ """
304
+ VALID_STATUSES = ["a", "c", "d", "n", "p"]
305
+ VALID_TYPES = ["a", "c", "d", "e", "f", "g", "i", "j", "k", "m", "o", "p", "r", "t"]
306
+ if record.leader[5] not in VALID_STATUSES:
307
+ logger.log(
308
+ 26,
309
+ "DATA ISSUE\t%s\t%s\t%s",
310
+ record["001"].value(),
311
+ f"Invalid record status: {record.leader[5]}, setting to 'c'",
312
+ record,
313
+ )
314
+ record.leader = pymarc.Leader(record.leader[:5] + "c" + record.leader[6:])
315
+ if record.leader[6] not in VALID_TYPES:
316
+ logger.log(
317
+ 26,
318
+ "DATA ISSUE\t%s\t%s\t%s",
319
+ record["001"].value(),
320
+ f"Invalid record type: {record.leader[6]}, setting to 'a'",
321
+ record,
322
+ )
323
+ record.leader = pymarc.Leader(record.leader[:6] + "a" + record.leader[7:])
324
+ return record
325
+
326
+
327
+ def ordinal(n):
328
+ s = ("th", "st", "nd", "rd") + ("th",) * 10
329
+ v = n % 100
330
+ if v > 13:
331
+ return f"{n}{s[v % 10]}"
332
+ else:
333
+ return f"{n}{s[v]}"
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.3
2
2
  Name: folio_data_import
3
- Version: 0.2.7
3
+ Version: 0.2.8
4
4
  Summary: A python module to interact with the data importing capabilities of the open-source FOLIO ILS
5
5
  License: MIT
6
6
  Author: Brooks Travis
@@ -19,8 +19,7 @@ Requires-Dist: flake8-black (>=0.3.6,<0.4.0)
19
19
  Requires-Dist: flake8-bugbear (>=24.8.19,<25.0.0)
20
20
  Requires-Dist: flake8-docstrings (>=1.7.0,<2.0.0)
21
21
  Requires-Dist: flake8-isort (>=6.1.1,<7.0.0)
22
- Requires-Dist: folioclient (>=0.61.0,<0.62.0)
23
- Requires-Dist: httpx (>=0.27.2,<0.28.0)
22
+ Requires-Dist: folioclient (==0.70.1)
24
23
  Requires-Dist: inquirer (>=3.4.0,<4.0.0)
25
24
  Requires-Dist: pyhumps (>=3.8.0,<4.0.0)
26
25
  Requires-Dist: pymarc (>=5.2.2,<6.0.0)
@@ -108,11 +107,11 @@ Unlike mod-user-import, this importer does not require `externalSystemId` as the
108
107
 
109
108
  #### Preferred Contact Type Mapping
110
109
 
111
- Another point of departure from the behavior of `mod-user-import` is the handling of `preferredContactTypeId`. This importer will accept either the `"001", "002", "003"...` values stored by the FOLIO, or the human-friendly strings used by `mod-user-import` (`"mail", "email", "text", "phone", "mobile"`). It will also __*set a customizable default for all users that do not otherwise have a valid value specified*__ (using `--default_preferred_contact_type`), unless a (valid) value is already present in the user record being updated.
110
+ Another point of departure from the behavior of `mod-user-import` is the handling of `preferredContactTypeId`. This importer will accept either the `"001", "002", "003"...` values stored by FOLIO, or the human-friendly strings used by `mod-user-import` (`"mail", "email", "text", "phone", "mobile"`). It will also __*set a customizable default for all users that do not otherwise have a valid value specified*__ (using `--default_preferred_contact_type`), unless a (valid) value is already present in the user record being updated.
112
111
 
113
112
  #### Field Protection (*experimental*)
114
113
 
115
- This script offers a rudimentary field protection implementation using custom fields. To enable this functionality, create a text custom field that has the field name `protectedFields`. In this field, you ca specify a comma-separated list of User schema field names, using dot-notation for nested fields. This protection should support all standard fields except addresses within `personal.addresses`. If you include `personal.addresses` in a user record, any existing addresses will be replaced by the new values.
114
+ This script offers a rudimentary field protection implementation using custom fields. To enable this functionality, create a text custom field that has the field name `protectedFields`. In this field, you can specify a comma-separated list of User schema field names, using dot-notation for nested fields. This protection should support all standard fields except addresses within `personal.addresses`. If you include `personal.addresses` in a user record, any existing addresses will be replaced by the new values.
116
115
 
117
116
  ##### Example
118
117
 
@@ -0,0 +1,11 @@
1
+ folio_data_import/MARCDataImport.py,sha256=LfysPxdcgRLLRKAhwcTr9RwNLPZdCWGY6veFvkpqoLg,37884
2
+ folio_data_import/UserImport.py,sha256=ZulGaGJhI_N5vmR69YF_qbzbGeVyzcthXklSjDpZCyA,40998
3
+ folio_data_import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
+ folio_data_import/__main__.py,sha256=kav_uUsnrIjGjVxQkk3exLKrc1mah9t2x3G6bGS-5I0,3710
5
+ folio_data_import/marc_preprocessors/__init__.py,sha256=urExfNTQoZsDCtDPcUY9EEC5OFcUihxhYEQkQFVzbMY,30
6
+ folio_data_import/marc_preprocessors/_preprocessors.py,sha256=4i1_lEnptzZDx3DojX9sfvJ_hmehwFJUC3aZsUADcwA,10851
7
+ folio_data_import-0.2.8.dist-info/LICENSE,sha256=qJX7wxMC7ky9Kq4v3zij8MjGEiC5wsB7pYeOhLj5TDk,1083
8
+ folio_data_import-0.2.8.dist-info/METADATA,sha256=FeZqPsdp8-rgLHaq7WP_KiTUX2zQv6EGJcmeiuY1VIQ,6061
9
+ folio_data_import-0.2.8.dist-info/WHEEL,sha256=fGIA9gx4Qxk2KDKeNJCbOEwSrmLtjWCwzBz351GyrPQ,88
10
+ folio_data_import-0.2.8.dist-info/entry_points.txt,sha256=498SxWVXeEMRNw3PUf-eoReZvKewmYwPBtZhIUPr_Jg,192
11
+ folio_data_import-0.2.8.dist-info/RECORD,,
@@ -1,4 +1,4 @@
1
1
  Wheel-Version: 1.0
2
- Generator: poetry-core 1.9.1
2
+ Generator: poetry-core 2.1.2
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
@@ -1,11 +0,0 @@
1
- folio_data_import/MARCDataImport.py,sha256=gFBq6DwghC3hXPkkM-c0XlPjtoZwITVAeEhH8joPIQo,23450
2
- folio_data_import/UserImport.py,sha256=DPZz6yG2SGWlDvOthohjybOVs7_r494mtNOwv6q66m0,38588
3
- folio_data_import/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
- folio_data_import/__main__.py,sha256=kav_uUsnrIjGjVxQkk3exLKrc1mah9t2x3G6bGS-5I0,3710
5
- folio_data_import/marc_preprocessors/__init__.py,sha256=Wt-TKkMhUyZWFS-WhAmbShKQLPjXmHKPb2vL6kvkqVA,72
6
- folio_data_import/marc_preprocessors/_preprocessors.py,sha256=srx36pgY0cwl6_0z6CVOyM_Uzr_g2RObo1jJJjSEZJs,944
7
- folio_data_import-0.2.7.dist-info/LICENSE,sha256=qJX7wxMC7ky9Kq4v3zij8MjGEiC5wsB7pYeOhLj5TDk,1083
8
- folio_data_import-0.2.7.dist-info/METADATA,sha256=YR-xCFmHuQvwIpMGZu4VC_VVlUd2US2m7ANJ6GGvto8,6112
9
- folio_data_import-0.2.7.dist-info/WHEEL,sha256=Nq82e9rUAnEjt98J6MlVmMCZb-t9cYE2Ir1kpBmnWfs,88
10
- folio_data_import-0.2.7.dist-info/entry_points.txt,sha256=498SxWVXeEMRNw3PUf-eoReZvKewmYwPBtZhIUPr_Jg,192
11
- folio_data_import-0.2.7.dist-info/RECORD,,