dsp-tools 17.0.0.post29__py3-none-any.whl → 18.0.0.post3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dsp-tools might be problematic. Click here for more details.

Files changed (50) hide show
  1. dsp_tools/cli/args.py +13 -0
  2. dsp_tools/cli/call_action.py +34 -330
  3. dsp_tools/cli/call_action_files_only.py +74 -0
  4. dsp_tools/cli/call_action_with_network.py +202 -0
  5. dsp_tools/cli/create_parsers.py +53 -14
  6. dsp_tools/cli/utils.py +87 -0
  7. dsp_tools/clients/list_client.py +49 -0
  8. dsp_tools/clients/list_client_live.py +166 -0
  9. dsp_tools/clients/{ontology_client.py → ontology_clients.py} +17 -2
  10. dsp_tools/clients/{ontology_client_live.py → ontology_create_client_live.py} +21 -40
  11. dsp_tools/clients/ontology_get_client_live.py +66 -0
  12. dsp_tools/clients/project_client.py +10 -0
  13. dsp_tools/clients/project_client_live.py +36 -0
  14. dsp_tools/commands/create/create_on_server/cardinalities.py +14 -8
  15. dsp_tools/commands/create/create_on_server/lists.py +163 -0
  16. dsp_tools/commands/create/lists_only.py +45 -0
  17. dsp_tools/commands/create/models/input_problems.py +13 -0
  18. dsp_tools/commands/create/models/parsed_project.py +14 -1
  19. dsp_tools/commands/create/models/rdf_ontology.py +0 -7
  20. dsp_tools/commands/create/models/server_project_info.py +17 -3
  21. dsp_tools/commands/create/parsing/parse_lists.py +45 -0
  22. dsp_tools/commands/create/parsing/parse_project.py +23 -4
  23. dsp_tools/commands/ingest_xmlupload/create_resources/upload_xml.py +4 -4
  24. dsp_tools/commands/project/create/project_create_all.py +17 -13
  25. dsp_tools/commands/project/create/project_create_default_permissions.py +8 -6
  26. dsp_tools/commands/project/create/project_create_ontologies.py +30 -18
  27. dsp_tools/commands/project/legacy_models/listnode.py +0 -30
  28. dsp_tools/commands/validate_data/models/api_responses.py +2 -16
  29. dsp_tools/commands/validate_data/prepare_data/prepare_data.py +11 -10
  30. dsp_tools/commands/validate_data/shacl_cli_validator.py +3 -1
  31. dsp_tools/commands/validate_data/sparql/value_shacl.py +1 -1
  32. dsp_tools/commands/validate_data/validate_data.py +3 -3
  33. dsp_tools/commands/validate_data/validation/get_validation_report.py +1 -1
  34. dsp_tools/commands/validate_data/validation/validate_ontology.py +1 -1
  35. dsp_tools/commands/xmlupload/models/input_problems.py +1 -1
  36. dsp_tools/commands/xmlupload/upload_config.py +1 -1
  37. dsp_tools/commands/xmlupload/xmlupload.py +2 -2
  38. dsp_tools/error/custom_warnings.py +7 -0
  39. dsp_tools/error/exceptions.py +25 -2
  40. dsp_tools/resources/start-stack/docker-compose.yml +23 -23
  41. dsp_tools/utils/ansi_colors.py +2 -0
  42. dsp_tools/utils/fuseki_bloating.py +4 -2
  43. dsp_tools/utils/request_utils.py +31 -0
  44. dsp_tools/xmllib/models/res.py +2 -0
  45. {dsp_tools-17.0.0.post29.dist-info → dsp_tools-18.0.0.post3.dist-info}/METADATA +1 -1
  46. {dsp_tools-17.0.0.post29.dist-info → dsp_tools-18.0.0.post3.dist-info}/RECORD +48 -39
  47. {dsp_tools-17.0.0.post29.dist-info → dsp_tools-18.0.0.post3.dist-info}/WHEEL +1 -1
  48. dsp_tools/commands/project/create/project_create_lists.py +0 -200
  49. dsp_tools/commands/validate_data/api_clients.py +0 -124
  50. {dsp_tools-17.0.0.post29.dist-info → dsp_tools-18.0.0.post3.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,202 @@
1
+ import argparse
2
+ from pathlib import Path
3
+
4
+ from dsp_tools.cli.args import NetworkRequirements
5
+ from dsp_tools.cli.args import PathDependencies
6
+ from dsp_tools.cli.args import ValidationSeverity
7
+ from dsp_tools.cli.utils import check_docker_health
8
+ from dsp_tools.cli.utils import check_input_dependencies
9
+ from dsp_tools.cli.utils import get_creds
10
+ from dsp_tools.commands.create.lists_only import create_lists_only
11
+ from dsp_tools.commands.excel2json.old_lists import validate_lists_section_with_schema
12
+ from dsp_tools.commands.ingest_xmlupload.create_resources.upload_xml import ingest_xmlupload
13
+ from dsp_tools.commands.ingest_xmlupload.ingest_files.ingest_files import ingest_files
14
+ from dsp_tools.commands.ingest_xmlupload.upload_files.upload_files import upload_files
15
+ from dsp_tools.commands.project.create.project_create_all import create_project
16
+ from dsp_tools.commands.project.create.project_validate import validate_project
17
+ from dsp_tools.commands.project.get.get import get_project
18
+ from dsp_tools.commands.resume_xmlupload.resume_xmlupload import resume_xmlupload
19
+ from dsp_tools.commands.start_stack import StackConfiguration
20
+ from dsp_tools.commands.start_stack import StackHandler
21
+ from dsp_tools.commands.validate_data.validate_data import validate_data
22
+ from dsp_tools.commands.xmlupload.upload_config import UploadConfig
23
+ from dsp_tools.commands.xmlupload.xmlupload import xmlupload
24
+ from dsp_tools.error.exceptions import InputError
25
+ from dsp_tools.utils.xml_parsing.parse_clean_validate_xml import parse_and_validate_xml_file
26
+
27
+
28
+ def call_start_stack(args: argparse.Namespace) -> bool:
29
+ check_docker_health()
30
+ stack_handler = StackHandler(
31
+ StackConfiguration(
32
+ max_file_size=args.max_file_size,
33
+ enforce_docker_system_prune=args.prune,
34
+ suppress_docker_system_prune=args.no_prune,
35
+ latest_dev_version=args.latest,
36
+ upload_test_data=args.with_test_data,
37
+ custom_host=args.custom_host,
38
+ )
39
+ )
40
+ return stack_handler.start_stack()
41
+
42
+
43
+ def call_stop_stack() -> bool:
44
+ check_docker_health()
45
+ stack_handler = StackHandler(StackConfiguration())
46
+ return stack_handler.stop_stack()
47
+
48
+
49
+ def call_upload_files(args: argparse.Namespace) -> bool:
50
+ xml_path = Path(args.xml_file)
51
+ image_dir = Path(args.imgdir)
52
+ network_requirements = NetworkRequirements(api_url=args.server)
53
+ path_requirements = PathDependencies([xml_path], required_directories=[image_dir])
54
+ check_input_dependencies(path_requirements, network_requirements)
55
+
56
+ return upload_files(
57
+ xml_file=xml_path,
58
+ creds=get_creds(args),
59
+ imgdir=image_dir,
60
+ )
61
+
62
+
63
+ def call_ingest_files(args: argparse.Namespace) -> bool:
64
+ check_input_dependencies(network_dependencies=NetworkRequirements(api_url=args.server))
65
+ return ingest_files(creds=get_creds(args), shortcode=args.shortcode)
66
+
67
+
68
+ def call_ingest_xmlupload(args: argparse.Namespace) -> bool:
69
+ xml_path = Path(args.xml_file)
70
+ required_files = [xml_path]
71
+ id2iri_file = args.id2iri_file
72
+ if id2iri_file:
73
+ required_files.append(Path(id2iri_file))
74
+ network_requirements = NetworkRequirements(args.server, always_requires_docker=True)
75
+ path_deps = PathDependencies(required_files)
76
+ check_input_dependencies(path_deps, network_requirements)
77
+
78
+ interrupt_after = args.interrupt_after if args.interrupt_after > 0 else None
79
+ return ingest_xmlupload(
80
+ xml_file=xml_path,
81
+ creds=get_creds(args),
82
+ interrupt_after=interrupt_after,
83
+ skip_validation=args.skip_validation,
84
+ skip_ontology_validation=args.skip_ontology_validation,
85
+ id2iri_file=id2iri_file,
86
+ do_not_request_resource_metadata_from_db=args.do_not_request_resource_metadata_from_db,
87
+ )
88
+
89
+
90
+ def call_xmlupload(args: argparse.Namespace) -> bool:
91
+ xml_path = Path(args.xmlfile)
92
+ required_files = [xml_path]
93
+ id2iri_file = args.id2iri_file
94
+ if id2iri_file:
95
+ required_files.append(Path(id2iri_file))
96
+ network_requirements = NetworkRequirements(args.server, always_requires_docker=True)
97
+ path_deps = PathDependencies(required_files, [Path(args.imgdir)])
98
+ check_input_dependencies(path_deps, network_requirements)
99
+
100
+ if args.validate_only:
101
+ success = parse_and_validate_xml_file(xml_path)
102
+ print("The XML file is syntactically correct.")
103
+ return success
104
+ else:
105
+ interrupt_after = args.interrupt_after if args.interrupt_after > 0 else None
106
+ match args.validation_severity:
107
+ case "info":
108
+ severity = ValidationSeverity.INFO
109
+ case "warning":
110
+ severity = ValidationSeverity.WARNING
111
+ case "error":
112
+ severity = ValidationSeverity.ERROR
113
+ case _:
114
+ raise InputError(
115
+ f"The entered validation severity '{args.validation_severity}' "
116
+ f"is not part of the allowed values: info, warning, error."
117
+ )
118
+ return xmlupload(
119
+ input_file=xml_path,
120
+ creds=get_creds(args),
121
+ imgdir=args.imgdir,
122
+ config=UploadConfig(
123
+ interrupt_after=interrupt_after,
124
+ skip_iiif_validation=args.no_iiif_uri_validation,
125
+ skip_validation=args.skip_validation,
126
+ ignore_duplicate_files_warning=args.ignore_duplicate_files_warning,
127
+ validation_severity=severity,
128
+ skip_ontology_validation=args.skip_ontology_validation,
129
+ do_not_request_resource_metadata_from_db=args.do_not_request_resource_metadata_from_db,
130
+ id2iri_file=id2iri_file,
131
+ ),
132
+ )
133
+
134
+
135
+ def call_validate_data(args: argparse.Namespace) -> bool:
136
+ xml_path = Path(args.xmlfile)
137
+ required_files = [xml_path]
138
+ id2iri_file = args.id2iri_file
139
+ if id2iri_file:
140
+ required_files.append(Path(id2iri_file))
141
+ network_requirements = NetworkRequirements(args.server, always_requires_docker=True)
142
+ path_deps = PathDependencies(required_files)
143
+ check_input_dependencies(path_deps, network_requirements)
144
+
145
+ return validate_data(
146
+ filepath=xml_path,
147
+ creds=get_creds(args),
148
+ save_graphs=args.save_graphs,
149
+ ignore_duplicate_files_warning=args.ignore_duplicate_files_warning,
150
+ skip_ontology_validation=args.skip_ontology_validation,
151
+ id2iri_file=id2iri_file,
152
+ do_not_request_resource_metadata_from_db=args.do_not_request_resource_metadata_from_db,
153
+ )
154
+
155
+
156
+ def call_resume_xmlupload(args: argparse.Namespace) -> bool:
157
+ # this does not need docker if not on localhost, as does not need to validate
158
+ check_input_dependencies(network_dependencies=NetworkRequirements(args.server))
159
+ return resume_xmlupload(
160
+ creds=get_creds(args),
161
+ skip_first_resource=args.skip_first_resource,
162
+ )
163
+
164
+
165
+ def call_get(args: argparse.Namespace) -> bool:
166
+ network_dependencies = NetworkRequirements(args.server)
167
+ path_dependencies = PathDependencies(required_directories=[Path(args.project_definition).parent])
168
+ check_input_dependencies(path_dependencies, network_dependencies)
169
+
170
+ return get_project(
171
+ project_identifier=args.project,
172
+ outfile_path=args.project_definition,
173
+ creds=get_creds(args),
174
+ verbose=args.verbose,
175
+ )
176
+
177
+
178
+ def call_create(args: argparse.Namespace) -> bool:
179
+ network_dependencies = NetworkRequirements(args.server)
180
+ path_dependencies = PathDependencies([Path(args.project_definition)])
181
+ check_input_dependencies(path_dependencies, network_dependencies)
182
+
183
+ success = False
184
+ match args.lists_only, args.validate_only:
185
+ case True, True:
186
+ success = validate_lists_section_with_schema(args.project_definition)
187
+ print("'Lists' section of the JSON project file is syntactically correct and passed validation.")
188
+ case True, False:
189
+ success = create_lists_only(
190
+ project_file_as_path_or_parsed=args.project_definition,
191
+ creds=get_creds(args),
192
+ )
193
+ case False, True:
194
+ success = validate_project(args.project_definition)
195
+ print("JSON project file is syntactically correct and passed validation.")
196
+ case False, False:
197
+ success = create_project(
198
+ project_file_as_path_or_parsed=args.project_definition,
199
+ creds=get_creds(args),
200
+ verbose=args.verbose,
201
+ )
202
+ return success
@@ -96,7 +96,7 @@ def _add_start_stack(subparsers: _SubParsersAction[ArgumentParser]) -> None:
96
96
  max_file_size_text = "max. multimedia file size allowed for ingest, in MB (default: 2000, max: 100'000)"
97
97
  no_prune_text = "don't execute 'docker system prune' (and don't ask)"
98
98
  with_test_data_text = "initialise the database with built-in test data"
99
- custom_host = "set host to use stack on a server"
99
+ custom_host = "set a host to an IP or a domain to run the instance on a server"
100
100
  subparser = subparsers.add_parser(name="start-stack", help="Run a local instance of DSP-API and DSP-APP")
101
101
  subparser.set_defaults(action="start-stack")
102
102
  subparser.add_argument("--max_file_size", type=int, help=max_file_size_text)
@@ -237,10 +237,20 @@ def _add_ingest_xmlupload(
237
237
  subparser.add_argument("--interrupt-after", type=int, default=-1, help="interrupt after this number of resources")
238
238
  subparser.add_argument("xml_file", help="path to XML file containing the data")
239
239
  subparser.add_argument("--skip-validation", action="store_true", help="Skip the SHACL schema validation")
240
- subparser.add_argument("--skip-ontology-validation", action="store_true", help="skip the ontology validation")
241
240
  subparser.add_argument(
242
- "--id2iri-replacement-with-file",
243
- help="replaces internal IDs of an XML file by IRIs provided in this mapping file",
241
+ "--skip-ontology-validation",
242
+ action="store_true",
243
+ help=(
244
+ "don't validate the ontology itself, only the data. "
245
+ "This is intended for projects that are already on the production server"
246
+ ),
247
+ )
248
+ subparser.add_argument(
249
+ "--id2iri-file",
250
+ help=(
251
+ "replaces internal IDs of an XML file (links and stand-off links inside richtext) "
252
+ "by IRIs provided in this mapping file"
253
+ ),
244
254
  )
245
255
  subparser.add_argument(
246
256
  "--do-not-request-resource-metadata-from-db",
@@ -268,13 +278,24 @@ def _add_xmlupload(
268
278
  "-i", "--imgdir", default=".", help="folder from where the paths in the <bitstream> tags are evaluated"
269
279
  )
270
280
  subparser.add_argument(
271
- "-V", "--validate-only", action="store_true", help="validate the XML file without uploading it"
281
+ "-V", "--validate-only", action="store_true", help="run the XML Schema validation without uploading the XML"
272
282
  )
273
283
  subparser.add_argument("--skip-validation", action="store_true", help="Skip the SHACL schema validation")
274
- subparser.add_argument("--skip-ontology-validation", action="store_true", help="skip the ontology validation")
284
+ subparser.add_argument(
285
+ "--skip-ontology-validation",
286
+ action="store_true",
287
+ help=(
288
+ "don't validate the ontology itself, only the data. "
289
+ "This is intended for projects that are already on the production server"
290
+ ),
291
+ )
275
292
  subparser.add_argument("--interrupt-after", type=int, default=-1, help="interrupt after this number of resources")
276
293
  subparser.add_argument("xmlfile", help="path to the XML file containing the data")
277
- subparser.add_argument("--no-iiif-uri-validation", action="store_true", help="skip the IIIF URI validation")
294
+ subparser.add_argument(
295
+ "--no-iiif-uri-validation",
296
+ action="store_true",
297
+ help="don't check if the IIIF links are valid URLs that can be reached online",
298
+ )
278
299
  subparser.add_argument(
279
300
  "--ignore-duplicate-files-warning",
280
301
  action="store_true",
@@ -283,7 +304,10 @@ def _add_xmlupload(
283
304
  subparser.add_argument(
284
305
  "--validation-severity",
285
306
  choices=["error", "warning", "info"],
286
- help="Which severity level of validation message should be printed out",
307
+ help=(
308
+ "Which severity level of validation message should be printed out. "
309
+ "Each level of severity includes the higher levels."
310
+ ),
287
311
  default="info",
288
312
  )
289
313
  subparser.add_argument(
@@ -294,8 +318,11 @@ def _add_xmlupload(
294
318
  ),
295
319
  )
296
320
  subparser.add_argument(
297
- "--id2iri-replacement-with-file",
298
- help="replaces internal IDs of an XML file by IRIs provided in this mapping file",
321
+ "--id2iri-file",
322
+ help=(
323
+ "replaces internal IDs of an XML file (links and stand-off links inside richtext) "
324
+ "by IRIs provided in this mapping file"
325
+ ),
299
326
  )
300
327
 
301
328
 
@@ -315,13 +342,23 @@ def _add_validate_data(
315
342
  action="store_true",
316
343
  help="don't check if multimedia files are referenced more than once",
317
344
  )
318
- subparser.add_argument("--skip-ontology-validation", action="store_true", help="skip the ontology validation")
345
+ subparser.add_argument(
346
+ "--skip-ontology-validation",
347
+ action="store_true",
348
+ help=(
349
+ "don't validate the ontology itself, only the data. "
350
+ "This is intended for projects that are already on the production server"
351
+ ),
352
+ )
319
353
  subparser.add_argument(
320
354
  "-s", "--server", default=default_dsp_api_url, help="URL of the DSP server where DSP-TOOLS sends the data to"
321
355
  )
322
356
  subparser.add_argument(
323
- "--id2iri-replacement-with-file",
324
- help="replaces internal IDs of an XML file by IRIs provided in this mapping file",
357
+ "--id2iri-file",
358
+ help=(
359
+ "replaces internal IDs of an XML file (links and stand-off links inside richtext) "
360
+ "by IRIs provided in this mapping file"
361
+ ),
325
362
  )
326
363
  subparser.add_argument(
327
364
  "--do-not-request-resource-metadata-from-db",
@@ -331,7 +368,9 @@ def _add_validate_data(
331
368
  ),
332
369
  )
333
370
  subparser.add_argument(
334
- "--save-graphs", action="store_true", help="Save the data, onto and shacl graph as ttl files."
371
+ "--save-graphs",
372
+ action="store_true",
373
+ help="Save the data, onto and shacl graph as ttl files. This is primarily intended for development use.",
335
374
  )
336
375
 
337
376
 
dsp_tools/cli/utils.py ADDED
@@ -0,0 +1,87 @@
1
+ import argparse
2
+ import subprocess
3
+ from pathlib import Path
4
+
5
+ import requests
6
+ from loguru import logger
7
+
8
+ from dsp_tools.cli.args import NetworkRequirements
9
+ from dsp_tools.cli.args import PathDependencies
10
+ from dsp_tools.cli.args import ServerCredentials
11
+ from dsp_tools.error.exceptions import DockerNotReachableError
12
+ from dsp_tools.error.exceptions import DspApiNotReachableError
13
+ from dsp_tools.error.exceptions import UserDirectoryNotFoundError
14
+ from dsp_tools.error.exceptions import UserFilepathNotFoundError
15
+
16
+ LOCALHOST_API = "http://0.0.0.0:3333"
17
+
18
+
19
+ def get_creds(args: argparse.Namespace) -> ServerCredentials:
20
+ return ServerCredentials(
21
+ server=args.server,
22
+ user=args.user,
23
+ password=args.password,
24
+ dsp_ingest_url=args.dsp_ingest_url,
25
+ )
26
+
27
+
28
+ def check_input_dependencies(
29
+ paths: PathDependencies | None = None, network_dependencies: NetworkRequirements | None = None
30
+ ) -> None:
31
+ if paths:
32
+ check_path_dependencies(paths)
33
+ if network_dependencies:
34
+ _check_network_health(network_dependencies)
35
+
36
+
37
+ def check_path_dependencies(paths: PathDependencies) -> None:
38
+ for f_path in paths.required_files:
39
+ _check_filepath_exists(f_path)
40
+ for dir_path in paths.required_directories:
41
+ _check_directory_exists(dir_path)
42
+
43
+
44
+ def _check_filepath_exists(file_path: Path) -> None:
45
+ if not file_path.exists():
46
+ raise UserFilepathNotFoundError(file_path)
47
+
48
+
49
+ def _check_directory_exists(dir_path: Path) -> None:
50
+ if not dir_path.is_dir():
51
+ raise UserDirectoryNotFoundError(dir_path)
52
+
53
+
54
+ def _check_network_health(network_requirements: NetworkRequirements) -> None:
55
+ if network_requirements.api_url == LOCALHOST_API or network_requirements.always_requires_docker:
56
+ check_docker_health()
57
+ _check_api_health(network_requirements.api_url)
58
+
59
+
60
+ def check_docker_health() -> None:
61
+ if subprocess.run("docker stats --no-stream".split(), check=False, capture_output=True).returncode != 0:
62
+ raise DockerNotReachableError()
63
+
64
+
65
+ def _check_api_health(api_url: str) -> None:
66
+ health_url = f"{api_url}/health"
67
+ msg = (
68
+ "The DSP-API could not be reached. Please check if your stack is healthy "
69
+ "or start a stack with 'dsp-tools start-stack' if none is running."
70
+ )
71
+ try:
72
+ response = requests.get(health_url, timeout=2)
73
+ if not response.ok:
74
+ if api_url != LOCALHOST_API:
75
+ msg = (
76
+ f"The DSP-API could not be reached (returned status {response.status_code}). "
77
+ f"Please contact the DaSCH engineering team for help."
78
+ )
79
+ logger.error(msg)
80
+ raise DspApiNotReachableError(msg)
81
+ logger.debug(f"DSP API health check passed: {health_url}")
82
+ except requests.exceptions.RequestException as e:
83
+ logger.error(e)
84
+ if api_url != LOCALHOST_API:
85
+ msg = "The DSP-API responded with a request exception. Please contact the DaSCH engineering team for help."
86
+ logger.error(msg)
87
+ raise DspApiNotReachableError(msg) from None
@@ -0,0 +1,49 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Any
5
+ from typing import Protocol
6
+
7
+ from dsp_tools.clients.authentication_client import AuthenticationClient
8
+
9
+
10
+ @dataclass
11
+ class OneList:
12
+ list_iri: str
13
+ list_name: str
14
+ nodes: list[OneNode]
15
+
16
+ def hlist(self) -> str:
17
+ return f'"hlist=<{self.list_iri}>"'
18
+
19
+
20
+ @dataclass
21
+ class OneNode:
22
+ name: str
23
+ iri: str
24
+
25
+
26
+ @dataclass
27
+ class ListGetClient(Protocol):
28
+ """Client to request and reformat the lists of a project."""
29
+
30
+ api_url: str
31
+ shortcode: str
32
+
33
+ def get_all_lists_and_nodes(self) -> list[OneList]:
34
+ """Get all lists and its nodes from a project."""
35
+
36
+ def get_all_list_iris_and_names(self) -> dict[str, str]:
37
+ """Get all list names and IRIs"""
38
+
39
+
40
+ @dataclass
41
+ class ListCreateClient(Protocol):
42
+ api_url: str
43
+ auth: AuthenticationClient
44
+
45
+ def create_new_list(self, list_info: dict[str, Any]) -> str | None:
46
+ """Create a new list."""
47
+
48
+ def add_list_node(self, node_info: dict[str, Any], parent_iri: str) -> str | None:
49
+ """Add a list node to an existing list."""
@@ -0,0 +1,166 @@
1
+ from dataclasses import dataclass
2
+ from http import HTTPStatus
3
+ from typing import Any
4
+ from typing import cast
5
+ from urllib.parse import quote_plus
6
+
7
+ import requests
8
+ from requests import RequestException
9
+ from requests import Response
10
+
11
+ from dsp_tools.clients.authentication_client import AuthenticationClient
12
+ from dsp_tools.clients.list_client import ListCreateClient
13
+ from dsp_tools.clients.list_client import ListGetClient
14
+ from dsp_tools.clients.list_client import OneList
15
+ from dsp_tools.clients.list_client import OneNode
16
+ from dsp_tools.error.exceptions import BadCredentialsError
17
+ from dsp_tools.error.exceptions import FatalNonOkApiResponseCode
18
+ from dsp_tools.utils.request_utils import RequestParameters
19
+ from dsp_tools.utils.request_utils import log_and_raise_request_exception
20
+ from dsp_tools.utils.request_utils import log_and_warn_unexpected_non_ok_response
21
+ from dsp_tools.utils.request_utils import log_request
22
+ from dsp_tools.utils.request_utils import log_response
23
+
24
+ TIMEOUT = 60
25
+
26
+
27
+ @dataclass
28
+ class ListGetClientLive(ListGetClient):
29
+ """Client to request and reformat the lists of a project."""
30
+
31
+ api_url: str
32
+ shortcode: str
33
+
34
+ def get_all_lists_and_nodes(self) -> list[OneList]:
35
+ list_json = self._get_all_list_iris()
36
+ all_iris = self._extract_list_iris(list_json)
37
+ all_lists = [self._get_one_list(iri) for iri in all_iris]
38
+ return [self._reformat_one_list(lst) for lst in all_lists]
39
+
40
+ def get_all_list_iris_and_names(self) -> dict[str, str]:
41
+ response_json = self._get_all_list_iris()
42
+ iris = self._extract_list_iris(response_json)
43
+ names = [x["name"] for x in response_json["lists"]]
44
+ return dict(zip(names, iris))
45
+
46
+ def _get_all_list_iris(self) -> dict[str, Any]:
47
+ url = f"{self.api_url}/admin/lists?projectShortcode={self.shortcode}"
48
+ timeout = 10
49
+ log_request(RequestParameters("GET", url, timeout))
50
+ try:
51
+ response = requests.get(url=url, timeout=timeout)
52
+ except RequestException as err:
53
+ log_and_raise_request_exception(err)
54
+
55
+ log_response(response)
56
+ if response.ok:
57
+ json_response = cast(dict[str, Any], response.json())
58
+ return json_response
59
+ raise FatalNonOkApiResponseCode(url, response.status_code, response.text)
60
+
61
+ def _extract_list_iris(self, response_json: dict[str, Any]) -> list[str]:
62
+ return [x["id"] for x in response_json["lists"]]
63
+
64
+ def _get_one_list(self, list_iri: str) -> dict[str, Any]:
65
+ encoded_list_iri = quote_plus(list_iri)
66
+ url = f"{self.api_url}/admin/lists/{encoded_list_iri}"
67
+ timeout = 30
68
+ log_request(RequestParameters("GET", url, timeout))
69
+ try:
70
+ response = requests.get(url=url, timeout=timeout)
71
+ except RequestException as err:
72
+ log_and_raise_request_exception(err)
73
+
74
+ log_response(response, include_response_content=False)
75
+ if response.ok:
76
+ response_json = cast(dict[str, Any], response.json())
77
+ return response_json
78
+ raise FatalNonOkApiResponseCode(url, response.status_code, response.text)
79
+
80
+ def _reformat_one_list(self, response_json: dict[str, Any]) -> OneList:
81
+ list_name = response_json["list"]["listinfo"]["name"]
82
+ list_id = response_json["list"]["listinfo"]["id"]
83
+ nodes = response_json["list"]["children"]
84
+ all_nodes = []
85
+ for child in nodes:
86
+ all_nodes.append(OneNode(child["name"], child["id"]))
87
+ if node_child := child.get("children"):
88
+ self._reformat_children(node_child, all_nodes)
89
+ return OneList(list_iri=list_id, list_name=list_name, nodes=all_nodes)
90
+
91
+ def _reformat_children(self, list_child: list[dict[str, Any]], current_nodes: list[OneNode]) -> None:
92
+ for child in list_child:
93
+ current_nodes.append(OneNode(child["name"], child["id"]))
94
+ if grand_child := child.get("children"):
95
+ self._reformat_children(grand_child, current_nodes)
96
+
97
+
98
+ @dataclass
99
+ class ListCreateClientLive(ListCreateClient):
100
+ api_url: str
101
+ project_iri: str
102
+ auth: AuthenticationClient
103
+
104
+ def create_new_list(self, list_info: dict[str, Any]) -> str | None:
105
+ url = f"{self.api_url}/admin/lists"
106
+ headers = self._get_request_header()
107
+ try:
108
+ response = _post_and_log_request(url, list_info, headers)
109
+ except RequestException as err:
110
+ log_and_raise_request_exception(err)
111
+
112
+ if response.ok:
113
+ result = response.json()
114
+ list_iri = cast(str, result["list"]["listinfo"]["id"])
115
+ return list_iri
116
+ if response.status_code == HTTPStatus.FORBIDDEN:
117
+ raise BadCredentialsError(
118
+ "Only a project or system administrator can create lists. "
119
+ "Your permissions are insufficient for this action."
120
+ )
121
+ log_and_warn_unexpected_non_ok_response(response.status_code, response.text)
122
+ return None
123
+
124
+ def add_list_node(self, node_info: dict[str, Any], parent_iri: str) -> str | None:
125
+ encoded_parent_iri = quote_plus(parent_iri)
126
+ url = f"{self.api_url}/admin/lists/{encoded_parent_iri}"
127
+ headers = self._get_request_header()
128
+ try:
129
+ response = _post_and_log_request(url, node_info, headers)
130
+ except RequestException as err:
131
+ log_and_raise_request_exception(err)
132
+
133
+ if response.ok:
134
+ result = response.json()
135
+ node_iri = cast(str, result["nodeinfo"]["id"])
136
+ return node_iri
137
+ if response.status_code == HTTPStatus.FORBIDDEN:
138
+ raise BadCredentialsError(
139
+ "Only a project or system administrator can add nodes to lists. "
140
+ "Your permissions are insufficient for this action."
141
+ )
142
+ log_and_warn_unexpected_non_ok_response(response.status_code, response.text)
143
+ return None
144
+
145
+ def _get_request_header(self) -> dict[str, str]:
146
+ return {
147
+ "Content-Type": "application/json",
148
+ "Authorization": f"Bearer {self.auth.get_token()}",
149
+ }
150
+
151
+
152
+ def _post_and_log_request(
153
+ url: str,
154
+ data: dict[str, Any],
155
+ headers: dict[str, str] | None = None,
156
+ ) -> Response:
157
+ params = RequestParameters("POST", url, TIMEOUT, data, headers)
158
+ log_request(params)
159
+ response = requests.post(
160
+ url=params.url,
161
+ headers=params.headers,
162
+ data=params.data_serialized,
163
+ timeout=params.timeout,
164
+ )
165
+ log_response(response)
166
+ return response
@@ -6,9 +6,9 @@ from rdflib import Literal
6
6
  from dsp_tools.clients.authentication_client import AuthenticationClient
7
7
 
8
8
 
9
- class OntologyClient(Protocol):
9
+ class OntologyCreateClient(Protocol):
10
10
  """
11
- Protocol class/interface for the ontology endpoint in the API.
11
+ Protocol class/interface to create / update the ontology through the API.
12
12
  """
13
13
 
14
14
  server: str
@@ -19,3 +19,18 @@ class OntologyClient(Protocol):
19
19
 
20
20
  def post_resource_cardinalities(self, cardinality_graph: dict[str, Any]) -> Literal | None:
21
21
  """Add cardinalities to an existing resource class."""
22
+
23
+
24
+ class OntologyGetClient(Protocol):
25
+ """
26
+ Protocol class/interface to get ontologies from the API.
27
+ """
28
+
29
+ api_url: str
30
+ shortcode: str
31
+
32
+ def get_knora_api(self) -> str:
33
+ """Get the knora-api ontology."""
34
+
35
+ def get_ontologies(self) -> tuple[list[str], list[str]]:
36
+ """Get all project ontologies."""