dsp-tools 17.0.0.post29__py3-none-any.whl → 18.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of dsp-tools might be problematic. Click here for more details.

Files changed (38) hide show
  1. dsp_tools/cli/args.py +13 -0
  2. dsp_tools/cli/call_action.py +34 -330
  3. dsp_tools/cli/call_action_files_only.py +74 -0
  4. dsp_tools/cli/call_action_with_network.py +203 -0
  5. dsp_tools/cli/create_parsers.py +50 -11
  6. dsp_tools/cli/utils.py +87 -0
  7. dsp_tools/clients/list_client.py +49 -0
  8. dsp_tools/clients/list_client_live.py +157 -0
  9. dsp_tools/clients/{ontology_client.py → ontology_clients.py} +17 -2
  10. dsp_tools/clients/{ontology_client_live.py → ontology_create_client_live.py} +2 -2
  11. dsp_tools/clients/ontology_get_client_live.py +65 -0
  12. dsp_tools/clients/project_client.py +10 -0
  13. dsp_tools/clients/project_client_live.py +30 -0
  14. dsp_tools/commands/create/create_on_server/cardinalities.py +14 -8
  15. dsp_tools/commands/create/create_on_server/lists.py +150 -0
  16. dsp_tools/commands/create/lists_only.py +45 -0
  17. dsp_tools/commands/create/models/input_problems.py +13 -0
  18. dsp_tools/commands/create/models/parsed_project.py +14 -1
  19. dsp_tools/commands/create/models/rdf_ontology.py +0 -7
  20. dsp_tools/commands/create/models/server_project_info.py +17 -3
  21. dsp_tools/commands/create/parsing/parse_lists.py +45 -0
  22. dsp_tools/commands/create/parsing/parse_project.py +23 -4
  23. dsp_tools/commands/project/create/project_create_all.py +17 -13
  24. dsp_tools/commands/project/create/project_create_default_permissions.py +8 -6
  25. dsp_tools/commands/project/create/project_create_ontologies.py +30 -18
  26. dsp_tools/commands/project/legacy_models/listnode.py +0 -30
  27. dsp_tools/commands/validate_data/models/api_responses.py +2 -16
  28. dsp_tools/commands/validate_data/prepare_data/prepare_data.py +8 -7
  29. dsp_tools/commands/validate_data/sparql/value_shacl.py +1 -1
  30. dsp_tools/error/exceptions.py +8 -0
  31. dsp_tools/resources/start-stack/docker-compose.yml +23 -23
  32. dsp_tools/utils/ansi_colors.py +2 -0
  33. {dsp_tools-17.0.0.post29.dist-info → dsp_tools-18.0.0.dist-info}/METADATA +1 -1
  34. {dsp_tools-17.0.0.post29.dist-info → dsp_tools-18.0.0.dist-info}/RECORD +36 -27
  35. {dsp_tools-17.0.0.post29.dist-info → dsp_tools-18.0.0.dist-info}/WHEEL +1 -1
  36. dsp_tools/commands/project/create/project_create_lists.py +0 -200
  37. dsp_tools/commands/validate_data/api_clients.py +0 -124
  38. {dsp_tools-17.0.0.post29.dist-info → dsp_tools-18.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,203 @@
1
+ import argparse
2
+ from pathlib import Path
3
+
4
+ from dsp_tools.cli.args import NetworkRequirements
5
+ from dsp_tools.cli.args import PathDependencies
6
+ from dsp_tools.cli.args import ValidationSeverity
7
+ from dsp_tools.cli.utils import check_docker_health
8
+ from dsp_tools.cli.utils import check_input_dependencies
9
+ from dsp_tools.cli.utils import check_network_health
10
+ from dsp_tools.cli.utils import get_creds
11
+ from dsp_tools.commands.create.lists_only import create_lists_only
12
+ from dsp_tools.commands.excel2json.old_lists import validate_lists_section_with_schema
13
+ from dsp_tools.commands.ingest_xmlupload.create_resources.upload_xml import ingest_xmlupload
14
+ from dsp_tools.commands.ingest_xmlupload.ingest_files.ingest_files import ingest_files
15
+ from dsp_tools.commands.ingest_xmlupload.upload_files.upload_files import upload_files
16
+ from dsp_tools.commands.project.create.project_create_all import create_project
17
+ from dsp_tools.commands.project.create.project_validate import validate_project
18
+ from dsp_tools.commands.project.get.get import get_project
19
+ from dsp_tools.commands.resume_xmlupload.resume_xmlupload import resume_xmlupload
20
+ from dsp_tools.commands.start_stack import StackConfiguration
21
+ from dsp_tools.commands.start_stack import StackHandler
22
+ from dsp_tools.commands.validate_data.validate_data import validate_data
23
+ from dsp_tools.commands.xmlupload.upload_config import UploadConfig
24
+ from dsp_tools.commands.xmlupload.xmlupload import xmlupload
25
+ from dsp_tools.error.exceptions import InputError
26
+ from dsp_tools.utils.xml_parsing.parse_clean_validate_xml import parse_and_validate_xml_file
27
+
28
+
29
+ def call_start_stack(args: argparse.Namespace) -> bool:
30
+ check_docker_health()
31
+ stack_handler = StackHandler(
32
+ StackConfiguration(
33
+ max_file_size=args.max_file_size,
34
+ enforce_docker_system_prune=args.prune,
35
+ suppress_docker_system_prune=args.no_prune,
36
+ latest_dev_version=args.latest,
37
+ upload_test_data=args.with_test_data,
38
+ custom_host=args.custom_host,
39
+ )
40
+ )
41
+ return stack_handler.start_stack()
42
+
43
+
44
+ def call_stop_stack() -> bool:
45
+ check_docker_health()
46
+ stack_handler = StackHandler(StackConfiguration())
47
+ return stack_handler.stop_stack()
48
+
49
+
50
+ def call_upload_files(args: argparse.Namespace) -> bool:
51
+ xml_path = Path(args.xml_file)
52
+ image_dir = Path(args.imgdir)
53
+ network_requirements = NetworkRequirements(api_url=args.server)
54
+ path_requirements = PathDependencies([xml_path], required_directories=[image_dir])
55
+ check_input_dependencies(path_requirements, network_requirements)
56
+
57
+ return upload_files(
58
+ xml_file=xml_path,
59
+ creds=get_creds(args),
60
+ imgdir=image_dir,
61
+ )
62
+
63
+
64
+ def call_ingest_files(args: argparse.Namespace) -> bool:
65
+ check_network_health(NetworkRequirements(api_url=args.server))
66
+ return ingest_files(creds=get_creds(args), shortcode=args.shortcode)
67
+
68
+
69
+ def call_ingest_xmlupload(args: argparse.Namespace) -> bool:
70
+ xml_path = Path(args.xml_file)
71
+ required_files = [xml_path]
72
+ id2iri_file = args.id2iri_replacement_with_file
73
+ if id2iri_file:
74
+ required_files.append(Path(id2iri_file))
75
+ network_requirements = NetworkRequirements(args.server, always_requires_docker=True)
76
+ path_deps = PathDependencies(required_files)
77
+ check_input_dependencies(path_deps, network_requirements)
78
+
79
+ interrupt_after = args.interrupt_after if args.interrupt_after > 0 else None
80
+ return ingest_xmlupload(
81
+ xml_file=xml_path,
82
+ creds=get_creds(args),
83
+ interrupt_after=interrupt_after,
84
+ skip_validation=args.skip_validation,
85
+ skip_ontology_validation=args.skip_ontology_validation,
86
+ id2iri_replacement_file=id2iri_file,
87
+ do_not_request_resource_metadata_from_db=args.do_not_request_resource_metadata_from_db,
88
+ )
89
+
90
+
91
+ def call_xmlupload(args: argparse.Namespace) -> bool:
92
+ xml_path = Path(args.xmlfile)
93
+ required_files = [xml_path]
94
+ id2iri_file = args.id2iri_replacement_with_file
95
+ if id2iri_file:
96
+ required_files.append(Path(id2iri_file))
97
+ network_requirements = NetworkRequirements(args.server, always_requires_docker=True)
98
+ path_deps = PathDependencies(required_files, [Path(args.imgdir)])
99
+ check_input_dependencies(path_deps, network_requirements)
100
+
101
+ if args.validate_only:
102
+ success = parse_and_validate_xml_file(xml_path)
103
+ print("The XML file is syntactically correct.")
104
+ return success
105
+ else:
106
+ interrupt_after = args.interrupt_after if args.interrupt_after > 0 else None
107
+ match args.validation_severity:
108
+ case "info":
109
+ severity = ValidationSeverity.INFO
110
+ case "warning":
111
+ severity = ValidationSeverity.WARNING
112
+ case "error":
113
+ severity = ValidationSeverity.ERROR
114
+ case _:
115
+ raise InputError(
116
+ f"The entered validation severity '{args.validation_severity}' "
117
+ f"is not part of the allowed values: info, warning, error."
118
+ )
119
+ return xmlupload(
120
+ input_file=xml_path,
121
+ creds=get_creds(args),
122
+ imgdir=args.imgdir,
123
+ config=UploadConfig(
124
+ interrupt_after=interrupt_after,
125
+ skip_iiif_validation=args.no_iiif_uri_validation,
126
+ skip_validation=args.skip_validation,
127
+ ignore_duplicate_files_warning=args.ignore_duplicate_files_warning,
128
+ validation_severity=severity,
129
+ skip_ontology_validation=args.skip_ontology_validation,
130
+ do_not_request_resource_metadata_from_db=args.do_not_request_resource_metadata_from_db,
131
+ id2iri_replacement_file=id2iri_file,
132
+ ),
133
+ )
134
+
135
+
136
+ def call_validate_data(args: argparse.Namespace) -> bool:
137
+ xml_path = Path(args.xmlfile)
138
+ required_files = [xml_path]
139
+ id2iri_file = args.id2iri_replacement_with_file
140
+ if id2iri_file:
141
+ required_files.append(Path(id2iri_file))
142
+ network_requirements = NetworkRequirements(args.server, always_requires_docker=True)
143
+ path_deps = PathDependencies(required_files)
144
+ check_input_dependencies(path_deps, network_requirements)
145
+
146
+ return validate_data(
147
+ filepath=xml_path,
148
+ creds=get_creds(args),
149
+ save_graphs=args.save_graphs,
150
+ ignore_duplicate_files_warning=args.ignore_duplicate_files_warning,
151
+ skip_ontology_validation=args.skip_ontology_validation,
152
+ id2iri_replacement_file=id2iri_file,
153
+ do_not_request_resource_metadata_from_db=args.do_not_request_resource_metadata_from_db,
154
+ )
155
+
156
+
157
+ def call_resume_xmlupload(args: argparse.Namespace) -> bool:
158
+ # this does not need docker if not on localhost, as does not need to validate
159
+ check_network_health(NetworkRequirements(args.server))
160
+ return resume_xmlupload(
161
+ creds=get_creds(args),
162
+ skip_first_resource=args.skip_first_resource,
163
+ )
164
+
165
+
166
+ def call_get(args: argparse.Namespace) -> bool:
167
+ network_dependencies = NetworkRequirements(args.server)
168
+ path_dependencies = PathDependencies(required_directories=[Path(args.project_definition).parent])
169
+ check_input_dependencies(path_dependencies, network_dependencies)
170
+
171
+ return get_project(
172
+ project_identifier=args.project,
173
+ outfile_path=args.project_definition,
174
+ creds=get_creds(args),
175
+ verbose=args.verbose,
176
+ )
177
+
178
+
179
+ def call_create(args: argparse.Namespace) -> bool:
180
+ network_dependencies = NetworkRequirements(args.server)
181
+ path_dependencies = PathDependencies([Path(args.project_definition)])
182
+ check_input_dependencies(path_dependencies, network_dependencies)
183
+
184
+ success = False
185
+ match args.lists_only, args.validate_only:
186
+ case True, True:
187
+ success = validate_lists_section_with_schema(args.project_definition)
188
+ print("'Lists' section of the JSON project file is syntactically correct and passed validation.")
189
+ case True, False:
190
+ success = create_lists_only(
191
+ project_file_as_path_or_parsed=args.project_definition,
192
+ creds=get_creds(args),
193
+ )
194
+ case False, True:
195
+ success = validate_project(args.project_definition)
196
+ print("JSON project file is syntactically correct and passed validation.")
197
+ case False, False:
198
+ success = create_project(
199
+ project_file_as_path_or_parsed=args.project_definition,
200
+ creds=get_creds(args),
201
+ verbose=args.verbose,
202
+ )
203
+ return success
@@ -96,7 +96,7 @@ def _add_start_stack(subparsers: _SubParsersAction[ArgumentParser]) -> None:
96
96
  max_file_size_text = "max. multimedia file size allowed for ingest, in MB (default: 2000, max: 100'000)"
97
97
  no_prune_text = "don't execute 'docker system prune' (and don't ask)"
98
98
  with_test_data_text = "initialise the database with built-in test data"
99
- custom_host = "set host to use stack on a server"
99
+ custom_host = "set a host to an IP or a domain to run the instance on a server"
100
100
  subparser = subparsers.add_parser(name="start-stack", help="Run a local instance of DSP-API and DSP-APP")
101
101
  subparser.set_defaults(action="start-stack")
102
102
  subparser.add_argument("--max_file_size", type=int, help=max_file_size_text)
@@ -237,10 +237,20 @@ def _add_ingest_xmlupload(
237
237
  subparser.add_argument("--interrupt-after", type=int, default=-1, help="interrupt after this number of resources")
238
238
  subparser.add_argument("xml_file", help="path to XML file containing the data")
239
239
  subparser.add_argument("--skip-validation", action="store_true", help="Skip the SHACL schema validation")
240
- subparser.add_argument("--skip-ontology-validation", action="store_true", help="skip the ontology validation")
240
+ subparser.add_argument(
241
+ "--skip-ontology-validation",
242
+ action="store_true",
243
+ help=(
244
+ "don't validate the ontology itself, only the data. "
245
+ "This is intended for projects that are already on the production server"
246
+ ),
247
+ )
241
248
  subparser.add_argument(
242
249
  "--id2iri-replacement-with-file",
243
- help="replaces internal IDs of an XML file by IRIs provided in this mapping file",
250
+ help=(
251
+ "replaces internal IDs of an XML file (links and stand-off links inside richtext) "
252
+ "by IRIs provided in this mapping file"
253
+ ),
244
254
  )
245
255
  subparser.add_argument(
246
256
  "--do-not-request-resource-metadata-from-db",
@@ -268,13 +278,24 @@ def _add_xmlupload(
268
278
  "-i", "--imgdir", default=".", help="folder from where the paths in the <bitstream> tags are evaluated"
269
279
  )
270
280
  subparser.add_argument(
271
- "-V", "--validate-only", action="store_true", help="validate the XML file without uploading it"
281
+ "-V", "--validate-only", action="store_true", help="run the XML Schema validation without uploading the XML"
272
282
  )
273
283
  subparser.add_argument("--skip-validation", action="store_true", help="Skip the SHACL schema validation")
274
- subparser.add_argument("--skip-ontology-validation", action="store_true", help="skip the ontology validation")
284
+ subparser.add_argument(
285
+ "--skip-ontology-validation",
286
+ action="store_true",
287
+ help=(
288
+ "don't validate the ontology itself, only the data. "
289
+ "This is intended for projects that are already on the production server"
290
+ ),
291
+ )
275
292
  subparser.add_argument("--interrupt-after", type=int, default=-1, help="interrupt after this number of resources")
276
293
  subparser.add_argument("xmlfile", help="path to the XML file containing the data")
277
- subparser.add_argument("--no-iiif-uri-validation", action="store_true", help="skip the IIIF URI validation")
294
+ subparser.add_argument(
295
+ "--no-iiif-uri-validation",
296
+ action="store_true",
297
+ help="don't check if the IIIF links are valid URLs that can be reached online",
298
+ )
278
299
  subparser.add_argument(
279
300
  "--ignore-duplicate-files-warning",
280
301
  action="store_true",
@@ -283,7 +304,10 @@ def _add_xmlupload(
283
304
  subparser.add_argument(
284
305
  "--validation-severity",
285
306
  choices=["error", "warning", "info"],
286
- help="Which severity level of validation message should be printed out",
307
+ help=(
308
+ "Which severity level of validation message should be printed out. "
309
+ "Each level of severity includes the higher levels."
310
+ ),
287
311
  default="info",
288
312
  )
289
313
  subparser.add_argument(
@@ -295,7 +319,10 @@ def _add_xmlupload(
295
319
  )
296
320
  subparser.add_argument(
297
321
  "--id2iri-replacement-with-file",
298
- help="replaces internal IDs of an XML file by IRIs provided in this mapping file",
322
+ help=(
323
+ "replaces internal IDs of an XML file (links and stand-off links inside richtext) "
324
+ "by IRIs provided in this mapping file"
325
+ ),
299
326
  )
300
327
 
301
328
 
@@ -315,13 +342,23 @@ def _add_validate_data(
315
342
  action="store_true",
316
343
  help="don't check if multimedia files are referenced more than once",
317
344
  )
318
- subparser.add_argument("--skip-ontology-validation", action="store_true", help="skip the ontology validation")
345
+ subparser.add_argument(
346
+ "--skip-ontology-validation",
347
+ action="store_true",
348
+ help=(
349
+ "don't validate the ontology itself, only the data. "
350
+ "This is intended for projects that are already on the production server"
351
+ ),
352
+ )
319
353
  subparser.add_argument(
320
354
  "-s", "--server", default=default_dsp_api_url, help="URL of the DSP server where DSP-TOOLS sends the data to"
321
355
  )
322
356
  subparser.add_argument(
323
357
  "--id2iri-replacement-with-file",
324
- help="replaces internal IDs of an XML file by IRIs provided in this mapping file",
358
+ help=(
359
+ "replaces internal IDs of an XML file (links and stand-off links inside richtext) "
360
+ "by IRIs provided in this mapping file"
361
+ ),
325
362
  )
326
363
  subparser.add_argument(
327
364
  "--do-not-request-resource-metadata-from-db",
@@ -331,7 +368,9 @@ def _add_validate_data(
331
368
  ),
332
369
  )
333
370
  subparser.add_argument(
334
- "--save-graphs", action="store_true", help="Save the data, onto and shacl graph as ttl files."
371
+ "--save-graphs",
372
+ action="store_true",
373
+ help="Save the data, onto and shacl graph as ttl files. This is primarily intended for development use.",
335
374
  )
336
375
 
337
376
 
dsp_tools/cli/utils.py ADDED
@@ -0,0 +1,87 @@
1
+ import argparse
2
+ import subprocess
3
+ from pathlib import Path
4
+
5
+ import requests
6
+ from loguru import logger
7
+
8
+ from dsp_tools.cli.args import NetworkRequirements
9
+ from dsp_tools.cli.args import PathDependencies
10
+ from dsp_tools.cli.args import ServerCredentials
11
+ from dsp_tools.error.exceptions import DockerNotReachableError
12
+ from dsp_tools.error.exceptions import DspApiNotReachableError
13
+ from dsp_tools.error.exceptions import UserDirectoryNotFoundError
14
+ from dsp_tools.error.exceptions import UserFilepathNotFoundError
15
+
16
+ LOCALHOST_API = "http://0.0.0.0:3333"
17
+
18
+
19
+ def get_creds(args: argparse.Namespace) -> ServerCredentials:
20
+ return ServerCredentials(
21
+ server=args.server,
22
+ user=args.user,
23
+ password=args.password,
24
+ dsp_ingest_url=args.dsp_ingest_url,
25
+ )
26
+
27
+
28
+ def check_input_dependencies(
29
+ paths: PathDependencies | None = None, network_dependencies: NetworkRequirements | None = None
30
+ ) -> None:
31
+ if paths:
32
+ check_path_dependencies(paths)
33
+ if network_dependencies:
34
+ check_network_health(network_dependencies)
35
+
36
+
37
+ def check_path_dependencies(paths: PathDependencies) -> None:
38
+ for f_path in paths.required_files:
39
+ _check_filepath_exists(f_path)
40
+ for dir_path in paths.required_directories:
41
+ _check_directory_exists(dir_path)
42
+
43
+
44
+ def _check_filepath_exists(file_path: Path) -> None:
45
+ if not file_path.exists():
46
+ raise UserFilepathNotFoundError(file_path)
47
+
48
+
49
+ def _check_directory_exists(dir_path: Path) -> None:
50
+ if not dir_path.is_dir():
51
+ raise UserDirectoryNotFoundError(dir_path)
52
+
53
+
54
+ def check_network_health(network_requirements: NetworkRequirements) -> None:
55
+ if network_requirements.api_url == LOCALHOST_API or network_requirements.always_requires_docker:
56
+ check_docker_health()
57
+ _check_api_health(network_requirements.api_url)
58
+
59
+
60
+ def check_docker_health() -> None:
61
+ if subprocess.run("docker stats --no-stream".split(), check=False, capture_output=True).returncode != 0:
62
+ raise DockerNotReachableError()
63
+
64
+
65
+ def _check_api_health(api_url: str) -> None:
66
+ health_url = f"{api_url}/health"
67
+ msg = (
68
+ "The DSP-API could not be reached. Please check if your stack is healthy "
69
+ "or start a stack with 'dsp-tools start-stack' if none is running."
70
+ )
71
+ try:
72
+ response = requests.get(health_url, timeout=2)
73
+ if not response.ok:
74
+ if api_url != LOCALHOST_API:
75
+ msg = (
76
+ f"The DSP-API could not be reached (returned status {response.status_code}). "
77
+ f"Please contact the DaSCH engineering team for help."
78
+ )
79
+ logger.error(msg)
80
+ raise DspApiNotReachableError(msg)
81
+ logger.debug(f"DSP API health check passed: {health_url}")
82
+ except requests.exceptions.RequestException as e:
83
+ logger.error(e)
84
+ if api_url != LOCALHOST_API:
85
+ msg = "The DSP-API responded with a request exception. Please contact the DaSCH engineering team for help."
86
+ logger.error(msg)
87
+ raise DspApiNotReachableError(msg) from None
@@ -0,0 +1,49 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from typing import Any
5
+ from typing import Protocol
6
+
7
+ from dsp_tools.clients.authentication_client import AuthenticationClient
8
+
9
+
10
+ @dataclass
11
+ class OneList:
12
+ list_iri: str
13
+ list_name: str
14
+ nodes: list[OneNode]
15
+
16
+ def hlist(self) -> str:
17
+ return f'"hlist=<{self.list_iri}>"'
18
+
19
+
20
+ @dataclass
21
+ class OneNode:
22
+ name: str
23
+ iri: str
24
+
25
+
26
+ @dataclass
27
+ class ListGetClient(Protocol):
28
+ """Client to request and reformat the lists of a project."""
29
+
30
+ api_url: str
31
+ shortcode: str
32
+
33
+ def get_all_lists_and_nodes(self) -> list[OneList]:
34
+ """Get all lists and its nodes from a project."""
35
+
36
+ def get_all_list_iris_and_names(self) -> dict[str, str]:
37
+ """Get all list names and IRIs"""
38
+
39
+
40
+ @dataclass
41
+ class ListCreateClient(Protocol):
42
+ api_url: str
43
+ auth: AuthenticationClient
44
+
45
+ def create_new_list(self, list_info: dict[str, Any]) -> str | None:
46
+ """Create a new list."""
47
+
48
+ def add_list_node(self, node_info: dict[str, Any], parent_iri: str) -> str | None:
49
+ """Add a list node to an existing list."""
@@ -0,0 +1,157 @@
1
+ from dataclasses import dataclass
2
+ from http import HTTPStatus
3
+ from typing import Any
4
+ from typing import cast
5
+ from urllib.parse import quote_plus
6
+
7
+ import requests
8
+ from loguru import logger
9
+ from requests import RequestException
10
+ from requests import Response
11
+
12
+ from dsp_tools.clients.authentication_client import AuthenticationClient
13
+ from dsp_tools.clients.list_client import ListCreateClient
14
+ from dsp_tools.clients.list_client import ListGetClient
15
+ from dsp_tools.clients.list_client import OneList
16
+ from dsp_tools.clients.list_client import OneNode
17
+ from dsp_tools.error.exceptions import BadCredentialsError
18
+ from dsp_tools.error.exceptions import InternalError
19
+ from dsp_tools.utils.request_utils import RequestParameters
20
+ from dsp_tools.utils.request_utils import log_request
21
+ from dsp_tools.utils.request_utils import log_response
22
+
23
+ TIMEOUT = 60
24
+
25
+
26
+ @dataclass
27
+ class ListGetClientLive(ListGetClient):
28
+ """Client to request and reformat the lists of a project."""
29
+
30
+ api_url: str
31
+ shortcode: str
32
+
33
+ def get_all_lists_and_nodes(self) -> list[OneList]:
34
+ list_json = self._get_all_list_iris()
35
+ all_iris = self._extract_list_iris(list_json)
36
+ all_lists = [self._get_one_list(iri) for iri in all_iris]
37
+ return [self._reformat_one_list(lst) for lst in all_lists]
38
+
39
+ def get_all_list_iris_and_names(self) -> dict[str, str]:
40
+ response_json = self._get_all_list_iris()
41
+ iris = self._extract_list_iris(response_json)
42
+ names = [x["name"] for x in response_json["lists"]]
43
+ return dict(zip(names, iris))
44
+
45
+ def _get_all_list_iris(self) -> dict[str, Any]:
46
+ url = f"{self.api_url}/admin/lists?projectShortcode={self.shortcode}"
47
+ timeout = 10
48
+ log_request(RequestParameters("GET", url, timeout))
49
+ response = requests.get(url=url, timeout=timeout)
50
+ log_response(response)
51
+ if not response.ok:
52
+ raise InternalError(f"Failed Request: {response.status_code} {response.text}")
53
+ json_response = cast(dict[str, Any], response.json())
54
+ return json_response
55
+
56
+ def _extract_list_iris(self, response_json: dict[str, Any]) -> list[str]:
57
+ return [x["id"] for x in response_json["lists"]]
58
+
59
+ def _get_one_list(self, list_iri: str) -> dict[str, Any]:
60
+ encoded_list_iri = quote_plus(list_iri)
61
+ url = f"{self.api_url}/admin/lists/{encoded_list_iri}"
62
+ timeout = 30
63
+ log_request(RequestParameters("GET", url, timeout))
64
+ response = requests.get(url=url, timeout=timeout)
65
+ log_response(response, include_response_content=False)
66
+ if not response.ok:
67
+ raise InternalError(f"Failed Request: {response.status_code} {response.text}")
68
+ response_json = cast(dict[str, Any], response.json())
69
+ return response_json
70
+
71
+ def _reformat_one_list(self, response_json: dict[str, Any]) -> OneList:
72
+ list_name = response_json["list"]["listinfo"]["name"]
73
+ list_id = response_json["list"]["listinfo"]["id"]
74
+ nodes = response_json["list"]["children"]
75
+ all_nodes = []
76
+ for child in nodes:
77
+ all_nodes.append(OneNode(child["name"], child["id"]))
78
+ if node_child := child.get("children"):
79
+ self._reformat_children(node_child, all_nodes)
80
+ return OneList(list_iri=list_id, list_name=list_name, nodes=all_nodes)
81
+
82
+ def _reformat_children(self, list_child: list[dict[str, Any]], current_nodes: list[OneNode]) -> None:
83
+ for child in list_child:
84
+ current_nodes.append(OneNode(child["name"], child["id"]))
85
+ if grand_child := child.get("children"):
86
+ self._reformat_children(grand_child, current_nodes)
87
+
88
+
89
+ @dataclass
90
+ class ListCreateClientLive(ListCreateClient):
91
+ api_url: str
92
+ project_iri: str
93
+ auth: AuthenticationClient
94
+
95
+ def create_new_list(self, list_info: dict[str, Any]) -> str | None:
96
+ url = f"{self.api_url}/admin/lists"
97
+ try:
98
+ headers = self._get_request_header()
99
+ response = _post_and_log_request(url, list_info, headers)
100
+ except RequestException as err:
101
+ logger.exception(err)
102
+ return None
103
+ if response.ok:
104
+ result = response.json()
105
+ list_iri = cast(str, result["list"]["listinfo"]["id"])
106
+ return list_iri
107
+ if response.status_code == HTTPStatus.FORBIDDEN:
108
+ raise BadCredentialsError(
109
+ "Only a project or system administrator can create lists. "
110
+ "Your permissions are insufficient for this action."
111
+ )
112
+ logger.exception(f"Failed to create list: '{list_info['name']}'")
113
+ return None
114
+
115
+ def add_list_node(self, node_info: dict[str, Any], parent_iri: str) -> str | None:
116
+ encoded_parent_iri = quote_plus(parent_iri)
117
+ url = f"{self.api_url}/admin/lists/{encoded_parent_iri}"
118
+ try:
119
+ headers = self._get_request_header()
120
+ response = _post_and_log_request(url, node_info, headers)
121
+ except RequestException as err:
122
+ logger.error(err)
123
+ return None
124
+ if response.ok:
125
+ result = response.json()
126
+ node_iri = cast(str, result["nodeinfo"]["id"])
127
+ return node_iri
128
+ if response.status_code == HTTPStatus.FORBIDDEN:
129
+ raise BadCredentialsError(
130
+ "Only a project or system administrator can add nodes to lists. "
131
+ "Your permissions are insufficient for this action."
132
+ )
133
+ logger.error(f"Failed to add node: '{node_info['name']}'")
134
+ return None
135
+
136
+ def _get_request_header(self) -> dict[str, str]:
137
+ return {
138
+ "Content-Type": "application/json",
139
+ "Authorization": f"Bearer {self.auth.get_token()}",
140
+ }
141
+
142
+
143
+ def _post_and_log_request(
144
+ url: str,
145
+ data: dict[str, Any],
146
+ headers: dict[str, str] | None = None,
147
+ ) -> Response:
148
+ params = RequestParameters("POST", url, TIMEOUT, data, headers)
149
+ log_request(params)
150
+ response = requests.post(
151
+ url=params.url,
152
+ headers=params.headers,
153
+ data=params.data_serialized,
154
+ timeout=params.timeout,
155
+ )
156
+ log_response(response)
157
+ return response
@@ -6,9 +6,9 @@ from rdflib import Literal
6
6
  from dsp_tools.clients.authentication_client import AuthenticationClient
7
7
 
8
8
 
9
- class OntologyClient(Protocol):
9
+ class OntologyCreateClient(Protocol):
10
10
  """
11
- Protocol class/interface for the ontology endpoint in the API.
11
+ Protocol class/interface to create / update the ontology through the API.
12
12
  """
13
13
 
14
14
  server: str
@@ -19,3 +19,18 @@ class OntologyClient(Protocol):
19
19
 
20
20
  def post_resource_cardinalities(self, cardinality_graph: dict[str, Any]) -> Literal | None:
21
21
  """Add cardinalities to an existing resource class."""
22
+
23
+
24
+ class OntologyGetClient(Protocol):
25
+ """
26
+ Protocol class/interface to get ontologies from the API.
27
+ """
28
+
29
+ api_url: str
30
+ shortcode: str
31
+
32
+ def get_knora_api(self) -> str:
33
+ """Get the knora-api ontology."""
34
+
35
+ def get_ontologies(self) -> tuple[list[str], list[str]]:
36
+ """Get all project ontologies."""
@@ -11,7 +11,7 @@ from requests import ReadTimeout
11
11
  from requests import Response
12
12
 
13
13
  from dsp_tools.clients.authentication_client import AuthenticationClient
14
- from dsp_tools.clients.ontology_client import OntologyClient
14
+ from dsp_tools.clients.ontology_clients import OntologyCreateClient
15
15
  from dsp_tools.error.exceptions import BadCredentialsError
16
16
  from dsp_tools.error.exceptions import UnexpectedApiResponseError
17
17
  from dsp_tools.utils.rdflib_constants import KNORA_API
@@ -24,7 +24,7 @@ TIMEOUT = 60
24
24
 
25
25
 
26
26
  @dataclass
27
- class OntologyClientLive(OntologyClient):
27
+ class OntologyCreateClientLive(OntologyCreateClient):
28
28
  """
29
29
  Client for the ontology endpoint in the API.
30
30
  """