dsp-tools 9.1.0.post11__py3-none-any.whl → 18.3.0.post13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- dsp_tools/__init__.py +4 -0
- dsp_tools/cli/args.py +36 -0
- dsp_tools/cli/call_action.py +51 -231
- dsp_tools/cli/call_action_files_only.py +101 -0
- dsp_tools/cli/call_action_with_network.py +207 -0
- dsp_tools/cli/create_parsers.py +156 -58
- dsp_tools/cli/entry_point.py +56 -26
- dsp_tools/cli/utils.py +87 -0
- dsp_tools/clients/CLAUDE.md +420 -0
- dsp_tools/clients/authentication_client.py +14 -0
- dsp_tools/clients/authentication_client_live.py +66 -0
- dsp_tools/{utils → clients}/connection.py +2 -18
- dsp_tools/clients/connection_live.py +233 -0
- dsp_tools/clients/fuseki_metrics.py +60 -0
- dsp_tools/clients/group_user_clients.py +35 -0
- dsp_tools/clients/group_user_clients_live.py +181 -0
- dsp_tools/clients/legal_info_client.py +23 -0
- dsp_tools/clients/legal_info_client_live.py +132 -0
- dsp_tools/clients/list_client.py +49 -0
- dsp_tools/clients/list_client_live.py +166 -0
- dsp_tools/clients/metadata_client.py +24 -0
- dsp_tools/clients/metadata_client_live.py +47 -0
- dsp_tools/clients/ontology_clients.py +49 -0
- dsp_tools/clients/ontology_create_client_live.py +166 -0
- dsp_tools/clients/ontology_get_client_live.py +80 -0
- dsp_tools/clients/permissions_client.py +68 -0
- dsp_tools/clients/project_client.py +16 -0
- dsp_tools/clients/project_client_live.py +66 -0
- dsp_tools/commands/create/communicate_problems.py +24 -0
- dsp_tools/commands/create/create.py +134 -0
- dsp_tools/commands/create/create_on_server/cardinalities.py +111 -0
- dsp_tools/commands/create/create_on_server/classes.py +99 -0
- dsp_tools/commands/create/create_on_server/complete_ontologies.py +116 -0
- dsp_tools/commands/create/create_on_server/default_permissions.py +134 -0
- dsp_tools/commands/create/create_on_server/group_users.py +165 -0
- dsp_tools/commands/create/create_on_server/lists.py +163 -0
- dsp_tools/commands/create/create_on_server/mappers.py +12 -0
- dsp_tools/commands/create/create_on_server/onto_utils.py +74 -0
- dsp_tools/commands/create/create_on_server/ontology.py +52 -0
- dsp_tools/commands/create/create_on_server/project.py +68 -0
- dsp_tools/commands/create/create_on_server/properties.py +119 -0
- dsp_tools/commands/create/exceptions.py +29 -0
- dsp_tools/commands/create/lists_only.py +66 -0
- dsp_tools/commands/create/models/create_problems.py +87 -0
- dsp_tools/commands/create/models/parsed_ontology.py +88 -0
- dsp_tools/commands/create/models/parsed_project.py +81 -0
- dsp_tools/commands/create/models/rdf_ontology.py +12 -0
- dsp_tools/commands/create/models/server_project_info.py +100 -0
- dsp_tools/commands/create/parsing/parse_lists.py +45 -0
- dsp_tools/commands/create/parsing/parse_ontology.py +243 -0
- dsp_tools/commands/create/parsing/parse_project.py +149 -0
- dsp_tools/commands/create/parsing/parsing_utils.py +40 -0
- dsp_tools/commands/create/project_validate.py +595 -0
- dsp_tools/commands/create/serialisation/ontology.py +119 -0
- dsp_tools/commands/create/serialisation/project.py +44 -0
- dsp_tools/commands/excel2json/CLAUDE.md +101 -0
- dsp_tools/commands/excel2json/json_header.py +57 -23
- dsp_tools/commands/excel2json/{new_lists → lists}/compliance_checks.py +26 -26
- dsp_tools/commands/excel2json/{new_lists/make_new_lists.py → lists/make_lists.py} +19 -18
- dsp_tools/commands/excel2json/{new_lists → lists}/models/input_error.py +1 -12
- dsp_tools/commands/excel2json/{new_lists → lists}/models/serialise.py +9 -5
- dsp_tools/commands/excel2json/{new_lists → lists}/utils.py +4 -4
- dsp_tools/commands/excel2json/models/input_error.py +31 -11
- dsp_tools/commands/excel2json/models/json_header.py +53 -15
- dsp_tools/commands/excel2json/models/ontology.py +4 -3
- dsp_tools/commands/excel2json/{lists.py → old_lists.py} +26 -112
- dsp_tools/commands/excel2json/project.py +78 -34
- dsp_tools/commands/excel2json/properties.py +57 -36
- dsp_tools/commands/excel2json/resources.py +32 -12
- dsp_tools/commands/excel2json/utils.py +20 -1
- dsp_tools/commands/excel2xml/__init__.py +2 -2
- dsp_tools/commands/excel2xml/excel2xml_cli.py +7 -15
- dsp_tools/commands/excel2xml/excel2xml_lib.py +138 -493
- dsp_tools/commands/excel2xml/propertyelement.py +5 -5
- dsp_tools/commands/{project → get}/get.py +29 -13
- dsp_tools/commands/get/get_permissions.py +257 -0
- dsp_tools/commands/get/get_permissions_legacy.py +89 -0
- dsp_tools/commands/{project/models → get/legacy_models}/context.py +6 -6
- dsp_tools/commands/{project/models → get/legacy_models}/group.py +5 -10
- dsp_tools/commands/{project/models → get/legacy_models}/listnode.py +5 -35
- dsp_tools/commands/{project/models → get/legacy_models}/model.py +1 -1
- dsp_tools/commands/{project/models → get/legacy_models}/ontology.py +9 -14
- dsp_tools/commands/{project/models → get/legacy_models}/project.py +13 -6
- dsp_tools/commands/{project/models → get/legacy_models}/propertyclass.py +9 -16
- dsp_tools/commands/{project/models → get/legacy_models}/resourceclass.py +8 -46
- dsp_tools/commands/{project/models → get/legacy_models}/user.py +19 -60
- dsp_tools/commands/get/models/permissions_models.py +10 -0
- dsp_tools/commands/id2iri.py +20 -10
- dsp_tools/commands/ingest_xmlupload/bulk_ingest_client.py +81 -56
- dsp_tools/commands/ingest_xmlupload/create_resources/apply_ingest_id.py +4 -10
- dsp_tools/commands/ingest_xmlupload/create_resources/upload_xml.py +97 -37
- dsp_tools/commands/ingest_xmlupload/create_resources/user_information.py +2 -2
- dsp_tools/commands/ingest_xmlupload/ingest_files/ingest_files.py +9 -10
- dsp_tools/commands/ingest_xmlupload/upload_files/filechecker.py +3 -3
- dsp_tools/commands/ingest_xmlupload/upload_files/input_error.py +2 -10
- dsp_tools/commands/ingest_xmlupload/upload_files/upload_failures.py +12 -2
- dsp_tools/commands/ingest_xmlupload/upload_files/upload_files.py +8 -9
- dsp_tools/commands/resume_xmlupload/resume_xmlupload.py +18 -18
- dsp_tools/commands/start_stack.py +126 -77
- dsp_tools/commands/update_legal/CLAUDE.md +344 -0
- dsp_tools/commands/update_legal/__init__.py +0 -0
- dsp_tools/commands/update_legal/core.py +182 -0
- dsp_tools/commands/update_legal/csv_operations.py +135 -0
- dsp_tools/commands/update_legal/models.py +87 -0
- dsp_tools/commands/update_legal/xml_operations.py +247 -0
- dsp_tools/commands/validate_data/CLAUDE.md +159 -0
- dsp_tools/commands/validate_data/__init__.py +0 -0
- dsp_tools/commands/validate_data/constants.py +59 -0
- dsp_tools/commands/validate_data/mappers.py +143 -0
- dsp_tools/commands/validate_data/models/__init__.py +0 -0
- dsp_tools/commands/validate_data/models/api_responses.py +45 -0
- dsp_tools/commands/validate_data/models/input_problems.py +119 -0
- dsp_tools/commands/validate_data/models/rdf_like_data.py +117 -0
- dsp_tools/commands/validate_data/models/validation.py +106 -0
- dsp_tools/commands/validate_data/prepare_data/__init__.py +0 -0
- dsp_tools/commands/validate_data/prepare_data/get_rdf_like_data.py +296 -0
- dsp_tools/commands/validate_data/prepare_data/make_data_graph.py +91 -0
- dsp_tools/commands/validate_data/prepare_data/prepare_data.py +184 -0
- dsp_tools/commands/validate_data/process_validation_report/__init__.py +0 -0
- dsp_tools/commands/validate_data/process_validation_report/get_user_validation_message.py +358 -0
- dsp_tools/commands/validate_data/process_validation_report/query_validation_result.py +507 -0
- dsp_tools/commands/validate_data/process_validation_report/reformat_validation_results.py +150 -0
- dsp_tools/commands/validate_data/shacl_cli_validator.py +70 -0
- dsp_tools/commands/validate_data/sparql/__init__.py +0 -0
- dsp_tools/commands/{xml_validate/sparql/resource_shacl.py → validate_data/sparql/cardinality_shacl.py} +45 -47
- dsp_tools/commands/validate_data/sparql/construct_shacl.py +92 -0
- dsp_tools/commands/validate_data/sparql/legal_info_shacl.py +36 -0
- dsp_tools/commands/validate_data/sparql/value_shacl.py +357 -0
- dsp_tools/commands/validate_data/utils.py +59 -0
- dsp_tools/commands/validate_data/validate_data.py +283 -0
- dsp_tools/commands/validate_data/validation/__init__.py +0 -0
- dsp_tools/commands/validate_data/validation/check_duplicate_files.py +55 -0
- dsp_tools/commands/validate_data/validation/check_for_unknown_classes.py +67 -0
- dsp_tools/commands/validate_data/validation/get_validation_report.py +94 -0
- dsp_tools/commands/validate_data/validation/validate_ontology.py +107 -0
- dsp_tools/commands/xmlupload/CLAUDE.md +292 -0
- dsp_tools/commands/xmlupload/make_rdf_graph/__init__.py +0 -0
- dsp_tools/commands/xmlupload/make_rdf_graph/constants.py +63 -0
- dsp_tools/commands/xmlupload/make_rdf_graph/jsonld_utils.py +44 -0
- dsp_tools/commands/xmlupload/make_rdf_graph/make_file_value.py +77 -0
- dsp_tools/commands/xmlupload/make_rdf_graph/make_resource_and_values.py +114 -0
- dsp_tools/commands/xmlupload/make_rdf_graph/make_values.py +262 -0
- dsp_tools/commands/xmlupload/models/bitstream_info.py +18 -0
- dsp_tools/commands/xmlupload/models/formatted_text_value.py +0 -25
- dsp_tools/commands/xmlupload/models/ingest.py +56 -70
- dsp_tools/commands/xmlupload/models/input_problems.py +6 -14
- dsp_tools/commands/xmlupload/models/lookup_models.py +21 -0
- dsp_tools/commands/xmlupload/models/permission.py +0 -39
- dsp_tools/commands/xmlupload/models/{deserialise/xmlpermission.py → permissions_parsed.py} +2 -2
- dsp_tools/commands/xmlupload/models/processed/__init__.py +0 -0
- dsp_tools/commands/xmlupload/models/processed/file_values.py +29 -0
- dsp_tools/commands/xmlupload/models/processed/res.py +27 -0
- dsp_tools/commands/xmlupload/models/processed/values.py +101 -0
- dsp_tools/commands/xmlupload/models/rdf_models.py +26 -0
- dsp_tools/commands/xmlupload/models/upload_clients.py +3 -3
- dsp_tools/commands/xmlupload/models/upload_state.py +2 -4
- dsp_tools/commands/xmlupload/prepare_xml_input/__init__.py +0 -0
- dsp_tools/commands/xmlupload/{ark2iri.py → prepare_xml_input/ark2iri.py} +1 -1
- dsp_tools/commands/xmlupload/prepare_xml_input/get_processed_resources.py +252 -0
- dsp_tools/commands/xmlupload/{iiif_uri_validator.py → prepare_xml_input/iiif_uri_validator.py} +2 -14
- dsp_tools/commands/xmlupload/{list_client.py → prepare_xml_input/list_client.py} +15 -10
- dsp_tools/commands/xmlupload/prepare_xml_input/prepare_xml_input.py +67 -0
- dsp_tools/commands/xmlupload/prepare_xml_input/read_validate_xml_file.py +58 -0
- dsp_tools/commands/xmlupload/prepare_xml_input/transform_input_values.py +118 -0
- dsp_tools/commands/xmlupload/resource_create_client.py +7 -468
- dsp_tools/commands/xmlupload/richtext_id2iri.py +37 -0
- dsp_tools/commands/xmlupload/stash/{construct_and_analyze_graph.py → analyse_circular_reference_graph.py} +64 -157
- dsp_tools/commands/xmlupload/stash/create_info_for_graph.py +53 -0
- dsp_tools/commands/xmlupload/stash/graph_models.py +13 -8
- dsp_tools/commands/xmlupload/stash/stash_circular_references.py +48 -115
- dsp_tools/commands/xmlupload/stash/stash_models.py +4 -9
- dsp_tools/commands/xmlupload/stash/upload_stashed_resptr_props.py +34 -40
- dsp_tools/commands/xmlupload/stash/upload_stashed_xml_texts.py +98 -108
- dsp_tools/commands/xmlupload/upload_config.py +8 -0
- dsp_tools/commands/xmlupload/write_diagnostic_info.py +14 -9
- dsp_tools/commands/xmlupload/xmlupload.py +214 -192
- dsp_tools/config/__init__.py +0 -0
- dsp_tools/config/logger_config.py +69 -0
- dsp_tools/{utils → config}/warnings_config.py +4 -1
- dsp_tools/error/__init__.py +0 -0
- dsp_tools/error/custom_warnings.py +39 -0
- dsp_tools/error/exceptions.py +204 -0
- dsp_tools/error/problems.py +10 -0
- dsp_tools/error/xmllib_errors.py +20 -0
- dsp_tools/error/xmllib_warnings.py +54 -0
- dsp_tools/error/xmllib_warnings_util.py +159 -0
- dsp_tools/error/xsd_validation_error_msg.py +19 -0
- dsp_tools/legacy_models/__init__.py +0 -0
- dsp_tools/{models → legacy_models}/datetimestamp.py +7 -7
- dsp_tools/{models → legacy_models}/langstring.py +1 -1
- dsp_tools/{models → legacy_models}/projectContext.py +4 -4
- dsp_tools/resources/schema/data.xsd +108 -83
- dsp_tools/resources/schema/lists-only.json +4 -23
- dsp_tools/resources/schema/project.json +80 -35
- dsp_tools/resources/schema/properties-only.json +1 -4
- dsp_tools/resources/start-stack/docker-compose.override-host.j2 +11 -0
- dsp_tools/resources/start-stack/docker-compose.yml +34 -30
- dsp_tools/resources/start-stack/dsp-app-config.json +45 -0
- dsp_tools/resources/start-stack/dsp-app-config.override-host.j2 +26 -0
- dsp_tools/resources/validate_data/api-shapes-resource-cardinalities.ttl +191 -0
- dsp_tools/resources/validate_data/api-shapes.ttl +804 -0
- dsp_tools/resources/validate_data/shacl-cli-image.yml +4 -0
- dsp_tools/resources/validate_data/validate-ontology.ttl +99 -0
- dsp_tools/utils/ansi_colors.py +32 -0
- dsp_tools/utils/data_formats/__init__.py +0 -0
- dsp_tools/utils/{date_util.py → data_formats/date_util.py} +13 -1
- dsp_tools/utils/data_formats/iri_util.py +30 -0
- dsp_tools/utils/{shared.py → data_formats/shared.py} +1 -35
- dsp_tools/utils/{uri_util.py → data_formats/uri_util.py} +12 -2
- dsp_tools/utils/fuseki_bloating.py +63 -0
- dsp_tools/utils/json_parsing.py +22 -0
- dsp_tools/utils/rdf_constants.py +42 -0
- dsp_tools/utils/rdflib_utils.py +10 -0
- dsp_tools/utils/replace_id_with_iri.py +66 -0
- dsp_tools/utils/request_utils.py +238 -0
- dsp_tools/utils/xml_parsing/__init__.py +0 -0
- dsp_tools/utils/xml_parsing/get_lookups.py +32 -0
- dsp_tools/utils/xml_parsing/get_parsed_resources.py +325 -0
- dsp_tools/utils/xml_parsing/models/__init__.py +0 -0
- dsp_tools/utils/xml_parsing/models/parsed_resource.py +76 -0
- dsp_tools/utils/xml_parsing/parse_clean_validate_xml.py +137 -0
- dsp_tools/xmllib/CLAUDE.md +302 -0
- dsp_tools/xmllib/__init__.py +49 -0
- dsp_tools/xmllib/general_functions.py +877 -0
- dsp_tools/xmllib/internal/__init__.py +0 -0
- dsp_tools/xmllib/internal/checkers.py +162 -0
- dsp_tools/xmllib/internal/circumvent_circular_imports.py +36 -0
- dsp_tools/xmllib/internal/constants.py +46 -0
- dsp_tools/xmllib/internal/input_converters.py +155 -0
- dsp_tools/xmllib/internal/serialise_file_value.py +57 -0
- dsp_tools/xmllib/internal/serialise_resource.py +177 -0
- dsp_tools/xmllib/internal/serialise_values.py +152 -0
- dsp_tools/xmllib/internal/type_aliases.py +11 -0
- dsp_tools/xmllib/models/config_options.py +28 -0
- dsp_tools/xmllib/models/date_formats.py +48 -0
- dsp_tools/xmllib/models/dsp_base_resources.py +1380 -400
- dsp_tools/xmllib/models/internal/__init__.py +0 -0
- dsp_tools/xmllib/models/internal/file_values.py +172 -0
- dsp_tools/xmllib/models/internal/geometry.py +162 -0
- dsp_tools/xmllib/models/{migration_metadata.py → internal/migration_metadata.py} +14 -10
- dsp_tools/xmllib/models/internal/serialise_permissions.py +66 -0
- dsp_tools/xmllib/models/internal/values.py +342 -0
- dsp_tools/xmllib/models/licenses/__init__.py +0 -0
- dsp_tools/xmllib/models/licenses/other.py +59 -0
- dsp_tools/xmllib/models/licenses/recommended.py +107 -0
- dsp_tools/xmllib/models/permissions.py +41 -0
- dsp_tools/xmllib/models/res.py +1782 -0
- dsp_tools/xmllib/models/root.py +313 -26
- dsp_tools/xmllib/value_checkers.py +310 -47
- dsp_tools/xmllib/value_converters.py +765 -8
- dsp_tools-18.3.0.post13.dist-info/METADATA +90 -0
- dsp_tools-18.3.0.post13.dist-info/RECORD +286 -0
- dsp_tools-18.3.0.post13.dist-info/WHEEL +4 -0
- {dsp_tools-9.1.0.post11.dist-info → dsp_tools-18.3.0.post13.dist-info}/entry_points.txt +1 -0
- dsp_tools/commands/project/create/project_create.py +0 -1107
- dsp_tools/commands/project/create/project_create_lists.py +0 -204
- dsp_tools/commands/project/create/project_validate.py +0 -453
- dsp_tools/commands/project/models/project_definition.py +0 -12
- dsp_tools/commands/rosetta.py +0 -124
- dsp_tools/commands/template.py +0 -30
- dsp_tools/commands/xml_validate/api_connection.py +0 -122
- dsp_tools/commands/xml_validate/deserialise_input.py +0 -135
- dsp_tools/commands/xml_validate/make_data_rdf.py +0 -193
- dsp_tools/commands/xml_validate/models/data_deserialised.py +0 -108
- dsp_tools/commands/xml_validate/models/data_rdf.py +0 -214
- dsp_tools/commands/xml_validate/models/input_problems.py +0 -191
- dsp_tools/commands/xml_validate/models/validation.py +0 -29
- dsp_tools/commands/xml_validate/reformat_validaton_result.py +0 -89
- dsp_tools/commands/xml_validate/sparql/construct_shapes.py +0 -16
- dsp_tools/commands/xml_validate/xml_validate.py +0 -151
- dsp_tools/commands/xmlupload/check_consistency_with_ontology.py +0 -253
- dsp_tools/commands/xmlupload/models/deserialise/deserialise_value.py +0 -236
- dsp_tools/commands/xmlupload/models/deserialise/xmlresource.py +0 -171
- dsp_tools/commands/xmlupload/models/namespace_context.py +0 -39
- dsp_tools/commands/xmlupload/models/ontology_lookup_models.py +0 -161
- dsp_tools/commands/xmlupload/models/ontology_problem_models.py +0 -178
- dsp_tools/commands/xmlupload/models/serialise/jsonld_serialiser.py +0 -40
- dsp_tools/commands/xmlupload/models/serialise/serialise_value.py +0 -51
- dsp_tools/commands/xmlupload/ontology_client.py +0 -92
- dsp_tools/commands/xmlupload/project_client.py +0 -91
- dsp_tools/commands/xmlupload/read_validate_xml_file.py +0 -99
- dsp_tools/models/custom_warnings.py +0 -31
- dsp_tools/models/exceptions.py +0 -90
- dsp_tools/resources/0100-template-repo/template.json +0 -45
- dsp_tools/resources/0100-template-repo/template.xml +0 -27
- dsp_tools/resources/start-stack/docker-compose-validation.yml +0 -5
- dsp_tools/resources/start-stack/start-stack-config.yml +0 -4
- dsp_tools/resources/xml_validate/api-shapes.ttl +0 -411
- dsp_tools/resources/xml_validate/replace_namespace.xslt +0 -61
- dsp_tools/utils/connection_live.py +0 -383
- dsp_tools/utils/iri_util.py +0 -14
- dsp_tools/utils/logger_config.py +0 -41
- dsp_tools/utils/set_encoder.py +0 -20
- dsp_tools/utils/xml_utils.py +0 -145
- dsp_tools/utils/xml_validation.py +0 -197
- dsp_tools/utils/xml_validation_models.py +0 -68
- dsp_tools/xmllib/models/file_values.py +0 -78
- dsp_tools/xmllib/models/resource.py +0 -415
- dsp_tools/xmllib/models/values.py +0 -428
- dsp_tools-9.1.0.post11.dist-info/METADATA +0 -130
- dsp_tools-9.1.0.post11.dist-info/RECORD +0 -167
- dsp_tools-9.1.0.post11.dist-info/WHEEL +0 -4
- dsp_tools-9.1.0.post11.dist-info/licenses/LICENSE +0 -674
- /dsp_tools/{commands/excel2json/new_lists → clients}/__init__.py +0 -0
- /dsp_tools/commands/{excel2json/new_lists/models → create}/__init__.py +0 -0
- /dsp_tools/commands/{project → create/create_on_server}/__init__.py +0 -0
- /dsp_tools/commands/{project/create → create/models}/__init__.py +0 -0
- /dsp_tools/commands/{project/models → create/parsing}/__init__.py +0 -0
- /dsp_tools/commands/{xml_validate → create/serialisation}/__init__.py +0 -0
- /dsp_tools/commands/{xml_validate/models → excel2json/lists}/__init__.py +0 -0
- /dsp_tools/commands/{xml_validate/sparql → excel2json/lists/models}/__init__.py +0 -0
- /dsp_tools/commands/excel2json/{new_lists → lists}/models/deserialise.py +0 -0
- /dsp_tools/commands/{xmlupload/models/deserialise → get}/__init__.py +0 -0
- /dsp_tools/commands/{xmlupload/models/serialise → get/legacy_models}/__init__.py +0 -0
- /dsp_tools/commands/{project/models → get/legacy_models}/helpers.py +0 -0
- /dsp_tools/{models → commands/get/models}/__init__.py +0 -0
|
@@ -1,1107 +0,0 @@
|
|
|
1
|
-
"""This module handles the ontology creation, update and upload to a DSP server. This includes the creation and update
|
|
2
|
-
of the project, the creation of groups, users, lists, resource classes, properties and cardinalities."""
|
|
3
|
-
|
|
4
|
-
from pathlib import Path
|
|
5
|
-
from typing import Any
|
|
6
|
-
from typing import Optional
|
|
7
|
-
from typing import cast
|
|
8
|
-
|
|
9
|
-
import regex
|
|
10
|
-
from loguru import logger
|
|
11
|
-
|
|
12
|
-
from dsp_tools.cli.args import ServerCredentials
|
|
13
|
-
from dsp_tools.commands.excel2json.lists import expand_lists_from_excel
|
|
14
|
-
from dsp_tools.commands.project.create.project_create_lists import create_lists_on_server
|
|
15
|
-
from dsp_tools.commands.project.create.project_validate import validate_project
|
|
16
|
-
from dsp_tools.commands.project.models.context import Context
|
|
17
|
-
from dsp_tools.commands.project.models.group import Group
|
|
18
|
-
from dsp_tools.commands.project.models.helpers import Cardinality
|
|
19
|
-
from dsp_tools.commands.project.models.ontology import Ontology
|
|
20
|
-
from dsp_tools.commands.project.models.project import Project
|
|
21
|
-
from dsp_tools.commands.project.models.project_definition import ProjectDefinition
|
|
22
|
-
from dsp_tools.commands.project.models.propertyclass import PropertyClass
|
|
23
|
-
from dsp_tools.commands.project.models.resourceclass import ResourceClass
|
|
24
|
-
from dsp_tools.commands.project.models.user import User
|
|
25
|
-
from dsp_tools.models.datetimestamp import DateTimeStamp
|
|
26
|
-
from dsp_tools.models.exceptions import BaseError
|
|
27
|
-
from dsp_tools.models.exceptions import UserError
|
|
28
|
-
from dsp_tools.models.langstring import LangString
|
|
29
|
-
from dsp_tools.utils.connection import Connection
|
|
30
|
-
from dsp_tools.utils.connection_live import ConnectionLive
|
|
31
|
-
from dsp_tools.utils.shared import parse_json_input
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
def _create_project_on_server(
|
|
35
|
-
project_definition: ProjectDefinition,
|
|
36
|
-
con: Connection,
|
|
37
|
-
) -> tuple[Project, bool]:
|
|
38
|
-
"""
|
|
39
|
-
Create the project on the DSP server.
|
|
40
|
-
If it already exists: update its longname, description, and keywords.
|
|
41
|
-
|
|
42
|
-
Args:
|
|
43
|
-
project_definition: object with information about the project
|
|
44
|
-
con: connection to the DSP server
|
|
45
|
-
|
|
46
|
-
Raises:
|
|
47
|
-
UserError: if the project cannot be created on the DSP server
|
|
48
|
-
|
|
49
|
-
Returns:
|
|
50
|
-
a tuple of the remote project and the success status (True if everything went smoothly, False otherwise)
|
|
51
|
-
"""
|
|
52
|
-
all_projects = Project.getAllProjects(con=con)
|
|
53
|
-
if project_definition.shortcode in [proj.shortcode for proj in all_projects]:
|
|
54
|
-
msg = (
|
|
55
|
-
f"The project with the shortcode '{project_definition.shortcode}' already exists on the server.\n"
|
|
56
|
-
f"No changes were made to the project metadata.\n"
|
|
57
|
-
f"Continue with the upload of lists and ontologies ..."
|
|
58
|
-
)
|
|
59
|
-
print(f"WARNING: {msg}")
|
|
60
|
-
logger.warning(msg)
|
|
61
|
-
|
|
62
|
-
success = True
|
|
63
|
-
project_local = Project(
|
|
64
|
-
con=con,
|
|
65
|
-
shortcode=project_definition.shortcode,
|
|
66
|
-
shortname=project_definition.shortname,
|
|
67
|
-
longname=project_definition.longname,
|
|
68
|
-
description=LangString(project_definition.descriptions), # type: ignore[arg-type]
|
|
69
|
-
keywords=set(project_definition.keywords) if project_definition.keywords else None,
|
|
70
|
-
selfjoin=False,
|
|
71
|
-
status=True,
|
|
72
|
-
)
|
|
73
|
-
try:
|
|
74
|
-
project_remote = project_local.create()
|
|
75
|
-
except BaseError:
|
|
76
|
-
err_msg = (
|
|
77
|
-
f"Cannot create project '{project_definition.shortname}' "
|
|
78
|
-
f"({project_definition.shortcode}) on DSP server."
|
|
79
|
-
)
|
|
80
|
-
logger.opt(exception=True).error(err_msg)
|
|
81
|
-
raise UserError(err_msg) from None
|
|
82
|
-
print(f" Created project '{project_remote.shortname}' ({project_remote.shortcode}).")
|
|
83
|
-
logger.info(f"Created project '{project_remote.shortname}' ({project_remote.shortcode}).")
|
|
84
|
-
return project_remote, success
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def _create_groups(
|
|
88
|
-
con: Connection,
|
|
89
|
-
groups: list[dict[str, str]],
|
|
90
|
-
project: Project,
|
|
91
|
-
) -> tuple[dict[str, Group], bool]:
|
|
92
|
-
"""
|
|
93
|
-
Creates groups on a DSP server from the "groups" section of a JSON project file. If a group cannot be created, it is
|
|
94
|
-
skipped and a warning is printed, but such a group will still be part of the returned dict.
|
|
95
|
-
Returns a tuple consisting of a dict and a bool. The dict contains the groups that have successfully been created
|
|
96
|
-
(or already exist). The bool indicates if everything went smoothly during the process. If a warning or error
|
|
97
|
-
occurred, it is False.
|
|
98
|
-
|
|
99
|
-
Args:
|
|
100
|
-
con: connection instance to connect to the DSP server
|
|
101
|
-
groups: "groups" section of a parsed JSON project file
|
|
102
|
-
project: Project the group(s) should be added to (must exist on DSP server)
|
|
103
|
-
|
|
104
|
-
Returns:
|
|
105
|
-
A tuple consisting of a dict and the success status.
|
|
106
|
-
The dict has the form ``{group name: group object}``
|
|
107
|
-
for all groups that have successfully been created (or already exist).
|
|
108
|
-
The dict is empty if no group was created.
|
|
109
|
-
"""
|
|
110
|
-
overall_success = True
|
|
111
|
-
current_project_groups: dict[str, Group] = {}
|
|
112
|
-
try:
|
|
113
|
-
remote_groups = Group.getAllGroupsForProject(con=con, proj_iri=str(project.iri))
|
|
114
|
-
except BaseError:
|
|
115
|
-
err_msg = (
|
|
116
|
-
"Unable to check if group names are already existing on DSP server, because it is "
|
|
117
|
-
"not possible to retrieve the remote groups from the DSP server."
|
|
118
|
-
)
|
|
119
|
-
print(f"WARNING: {err_msg}")
|
|
120
|
-
logger.opt(exception=True).warning(err_msg)
|
|
121
|
-
remote_groups = []
|
|
122
|
-
overall_success = False
|
|
123
|
-
|
|
124
|
-
for group in groups:
|
|
125
|
-
group_name = group["name"]
|
|
126
|
-
|
|
127
|
-
# if the group already exists, add it to "current_project_groups" (for later usage), then skip it
|
|
128
|
-
if remotely_existing_group := [g for g in remote_groups if g.name == group_name]:
|
|
129
|
-
current_project_groups[group_name] = remotely_existing_group[0]
|
|
130
|
-
err_msg = f"Group name '{group_name}' already exists on the DSP server. Skipping..."
|
|
131
|
-
print(f" WARNING: {err_msg}")
|
|
132
|
-
logger.opt(exception=True).warning(err_msg)
|
|
133
|
-
overall_success = False
|
|
134
|
-
continue
|
|
135
|
-
|
|
136
|
-
# create the group
|
|
137
|
-
group_local = Group(
|
|
138
|
-
con=con,
|
|
139
|
-
name=group_name,
|
|
140
|
-
descriptions=LangString(group["descriptions"]),
|
|
141
|
-
project=project,
|
|
142
|
-
status=bool(group.get("status", True)),
|
|
143
|
-
selfjoin=bool(group.get("selfjoin", False)),
|
|
144
|
-
)
|
|
145
|
-
try:
|
|
146
|
-
group_remote: Group = group_local.create()
|
|
147
|
-
except BaseError:
|
|
148
|
-
err_msg = "Unable to create group '{group_name}'."
|
|
149
|
-
print(f" WARNING: {err_msg}")
|
|
150
|
-
logger.opt(exception=True).warning(err_msg)
|
|
151
|
-
overall_success = False
|
|
152
|
-
continue
|
|
153
|
-
|
|
154
|
-
current_project_groups[str(group_remote.name)] = group_remote
|
|
155
|
-
print(f" Created group '{group_name}'.")
|
|
156
|
-
logger.info(f"Created group '{group_name}'.")
|
|
157
|
-
|
|
158
|
-
return current_project_groups, overall_success
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
def _get_group_iris_for_user(
|
|
162
|
-
json_user_definition: dict[str, str],
|
|
163
|
-
current_project: Project,
|
|
164
|
-
current_project_groups: dict[str, Group],
|
|
165
|
-
con: Connection,
|
|
166
|
-
verbose: bool,
|
|
167
|
-
) -> tuple[set[str], bool, bool]:
|
|
168
|
-
"""
|
|
169
|
-
Retrieve the IRIs of the groups that the user belongs to.
|
|
170
|
-
|
|
171
|
-
Args:
|
|
172
|
-
json_user_definition: the section of the JSON file that defines a user
|
|
173
|
-
current_project: the Project object
|
|
174
|
-
current_project_groups: dict of the form ``{group name: group object}``
|
|
175
|
-
with the groups that exist on the DSP server
|
|
176
|
-
con: connection to the DSP server
|
|
177
|
-
verbose: verbose switch
|
|
178
|
-
|
|
179
|
-
Returns:
|
|
180
|
-
a tuple consisting of the group IRIs,
|
|
181
|
-
the system admin status (True if the user is sysadmin, False otherwise),
|
|
182
|
-
and the success status (True if everything went well)
|
|
183
|
-
|
|
184
|
-
Raises:
|
|
185
|
-
BaseError: if no groups can be retrieved from the DSP server, or if the retrieved group has no IRI
|
|
186
|
-
"""
|
|
187
|
-
success = True
|
|
188
|
-
username = json_user_definition["username"]
|
|
189
|
-
group_iris: set[str] = set()
|
|
190
|
-
sysadmin = False
|
|
191
|
-
remote_groups: list[Group] = []
|
|
192
|
-
for full_group_name in json_user_definition.get("groups", []):
|
|
193
|
-
# full_group_name has the form '[project_shortname]:group_name' or 'SystemAdmin'
|
|
194
|
-
inexisting_group_msg = (
|
|
195
|
-
f"User {username} cannot be added to group {full_group_name}, because such a group doesn't exist."
|
|
196
|
-
)
|
|
197
|
-
if ":" not in full_group_name and full_group_name != "SystemAdmin":
|
|
198
|
-
print(f" WARNING: {inexisting_group_msg}")
|
|
199
|
-
logger.opt(exception=True).warning(inexisting_group_msg)
|
|
200
|
-
success = False
|
|
201
|
-
continue
|
|
202
|
-
|
|
203
|
-
if full_group_name == "SystemAdmin":
|
|
204
|
-
sysadmin = True
|
|
205
|
-
if verbose:
|
|
206
|
-
print(f" Added user '{username}' to group 'SystemAdmin'.")
|
|
207
|
-
logger.info(f"Added user '{username}' to group 'SystemAdmin'.")
|
|
208
|
-
continue
|
|
209
|
-
|
|
210
|
-
# all other cases (":" in full_group_name)
|
|
211
|
-
project_shortname, group_name = full_group_name.split(":")
|
|
212
|
-
if not project_shortname:
|
|
213
|
-
# full_group_name refers to a group inside the same project
|
|
214
|
-
if group_name not in current_project_groups:
|
|
215
|
-
print(f" WARNING: {inexisting_group_msg}")
|
|
216
|
-
logger.opt(exception=True).warning(inexisting_group_msg)
|
|
217
|
-
success = False
|
|
218
|
-
continue
|
|
219
|
-
group = current_project_groups[group_name]
|
|
220
|
-
else:
|
|
221
|
-
# full_group_name refers to an already existing group on DSP
|
|
222
|
-
try:
|
|
223
|
-
# "remote_groups" might be available from a previous loop cycle
|
|
224
|
-
remote_groups = remote_groups or Group.getAllGroups(con=con)
|
|
225
|
-
except BaseError:
|
|
226
|
-
err_msg = (
|
|
227
|
-
f"User '{username}' is referring to the group {full_group_name} that "
|
|
228
|
-
f"exists on the DSP server, but no groups could be retrieved from the DSP server."
|
|
229
|
-
)
|
|
230
|
-
print(f" WARNING: {err_msg}")
|
|
231
|
-
logger.opt(exception=True).warning(err_msg)
|
|
232
|
-
success = False
|
|
233
|
-
continue
|
|
234
|
-
existing_group = [g for g in remote_groups if g.project == current_project.iri and g.name == group_name]
|
|
235
|
-
if not existing_group:
|
|
236
|
-
print(f" WARNING: {inexisting_group_msg}")
|
|
237
|
-
logger.opt(exception=True).warning(inexisting_group_msg)
|
|
238
|
-
success = False
|
|
239
|
-
continue
|
|
240
|
-
group = existing_group[0]
|
|
241
|
-
|
|
242
|
-
if not group.iri:
|
|
243
|
-
raise BaseError(f"Group '{group}' has no IRI.")
|
|
244
|
-
group_iris.add(group.iri)
|
|
245
|
-
if verbose:
|
|
246
|
-
print(f" Added user '{username}' to group '{full_group_name}'.")
|
|
247
|
-
logger.info(f"Added user '{username}' to group '{full_group_name}'.")
|
|
248
|
-
|
|
249
|
-
return group_iris, sysadmin, success
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
def _get_projects_where_user_is_admin(
|
|
253
|
-
json_user_definition: dict[str, str],
|
|
254
|
-
current_project: Project,
|
|
255
|
-
con: Connection,
|
|
256
|
-
verbose: bool,
|
|
257
|
-
) -> tuple[dict[str, bool], bool]:
|
|
258
|
-
"""
|
|
259
|
-
Create a dict that tells for every project if the user is administrator in that project or not.
|
|
260
|
-
|
|
261
|
-
Args:
|
|
262
|
-
json_user_definition: the section of the JSON file that defines a user
|
|
263
|
-
current_project: the Project object
|
|
264
|
-
con: connection to the DSP server
|
|
265
|
-
verbose: verbose switch
|
|
266
|
-
|
|
267
|
-
Returns:
|
|
268
|
-
a tuple consisting of a dict in the form {project IRI: isAdmin}, and the success status
|
|
269
|
-
"""
|
|
270
|
-
success = True
|
|
271
|
-
username = json_user_definition["username"]
|
|
272
|
-
project_info: dict[str, bool] = {}
|
|
273
|
-
remote_projects: list[Project] = []
|
|
274
|
-
for full_project_name in json_user_definition.get("projects", []):
|
|
275
|
-
# full_project_name has the form '[project_name]:member' or '[project_name]:admin'
|
|
276
|
-
if ":" not in full_project_name:
|
|
277
|
-
err_msg = "Provided project '{full_project_name}' for user '{username}' is not valid. Skipping..."
|
|
278
|
-
print(f" WARNING: {err_msg}")
|
|
279
|
-
logger.opt(exception=True).warning(err_msg)
|
|
280
|
-
success = False
|
|
281
|
-
continue
|
|
282
|
-
|
|
283
|
-
project_name, project_role = full_project_name.split(":")
|
|
284
|
-
if not project_name:
|
|
285
|
-
# full_project_name refers to the current project
|
|
286
|
-
in_project = current_project
|
|
287
|
-
else:
|
|
288
|
-
# full_project_name refers to an already existing project on DSP
|
|
289
|
-
try:
|
|
290
|
-
# "remote_projects" might be available from a previous loop cycle
|
|
291
|
-
remote_projects = remote_projects or current_project.getAllProjects(con=con)
|
|
292
|
-
except BaseError:
|
|
293
|
-
err_msg = (
|
|
294
|
-
f"User '{username}' cannot be added to the projects {json_user_definition['projects']} "
|
|
295
|
-
f"because the projects cannot be retrieved from the DSP server."
|
|
296
|
-
)
|
|
297
|
-
print(f" WARNING: {err_msg}")
|
|
298
|
-
logger.opt(exception=True).warning(err_msg)
|
|
299
|
-
success = False
|
|
300
|
-
continue
|
|
301
|
-
in_project_list = [p for p in remote_projects if p.shortname == project_name]
|
|
302
|
-
if not in_project_list:
|
|
303
|
-
msg = f"Provided project '{full_project_name}' for user '{username}' is not valid. Skipping..."
|
|
304
|
-
print(f" WARNING: {msg}")
|
|
305
|
-
logger.opt(exception=True).warning(msg)
|
|
306
|
-
success = False
|
|
307
|
-
continue
|
|
308
|
-
in_project = in_project_list[0]
|
|
309
|
-
|
|
310
|
-
is_admin = project_role == "admin"
|
|
311
|
-
project_info[str(in_project.iri)] = is_admin
|
|
312
|
-
if verbose:
|
|
313
|
-
print(f" Added user '{username}' as {project_role} to project '{in_project.shortname}'.")
|
|
314
|
-
logger.info(f"Added user '{username}' as {project_role} to project '{in_project.shortname}'.")
|
|
315
|
-
|
|
316
|
-
return project_info, success
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
def _create_users(
|
|
320
|
-
con: Connection,
|
|
321
|
-
users_section: list[dict[str, str]],
|
|
322
|
-
current_project_groups: dict[str, Group],
|
|
323
|
-
current_project: Project,
|
|
324
|
-
verbose: bool,
|
|
325
|
-
) -> bool:
|
|
326
|
-
"""
|
|
327
|
-
Creates users on a DSP server from the "users" section of a JSON project file.
|
|
328
|
-
If a user cannot be created, a warning is printed and the user is skipped.
|
|
329
|
-
|
|
330
|
-
Args:
|
|
331
|
-
con: connection instance to connect to the DSP server
|
|
332
|
-
users_section: "users" section of a parsed JSON project file
|
|
333
|
-
current_project_groups: groups defined in the current project, in the form ``{group name: group object}``
|
|
334
|
-
(must exist on DSP server)
|
|
335
|
-
current_project: "project" object of the current project (must exist on DSP server)
|
|
336
|
-
verbose: Prints more information if set to True
|
|
337
|
-
|
|
338
|
-
Returns:
|
|
339
|
-
True if all users could be created without any problems. False if a warning/error occurred.
|
|
340
|
-
"""
|
|
341
|
-
overall_success = True
|
|
342
|
-
for json_user_definition in users_section:
|
|
343
|
-
username = json_user_definition["username"]
|
|
344
|
-
|
|
345
|
-
# skip the user if he already exists
|
|
346
|
-
all_users = User.getAllUsers(con)
|
|
347
|
-
if json_user_definition["email"] in [user.email for user in all_users]:
|
|
348
|
-
err_msg = (
|
|
349
|
-
f"User '{username}' already exists on the DSP server.\n"
|
|
350
|
-
f"Please manually add this user to the project in DSP-APP."
|
|
351
|
-
)
|
|
352
|
-
print(f" WARNING: {err_msg}")
|
|
353
|
-
logger.opt(exception=True).warning(err_msg)
|
|
354
|
-
overall_success = False
|
|
355
|
-
continue
|
|
356
|
-
# add user to the group(s)
|
|
357
|
-
group_iris, sysadmin, success = _get_group_iris_for_user(
|
|
358
|
-
json_user_definition=json_user_definition,
|
|
359
|
-
current_project=current_project,
|
|
360
|
-
current_project_groups=current_project_groups,
|
|
361
|
-
con=con,
|
|
362
|
-
verbose=verbose,
|
|
363
|
-
)
|
|
364
|
-
if not success:
|
|
365
|
-
overall_success = False
|
|
366
|
-
|
|
367
|
-
# add user to the project(s)
|
|
368
|
-
project_info, success = _get_projects_where_user_is_admin(
|
|
369
|
-
json_user_definition=json_user_definition,
|
|
370
|
-
current_project=current_project,
|
|
371
|
-
con=con,
|
|
372
|
-
verbose=verbose,
|
|
373
|
-
)
|
|
374
|
-
if not success:
|
|
375
|
-
overall_success = False
|
|
376
|
-
|
|
377
|
-
# create the user
|
|
378
|
-
user_local = User(
|
|
379
|
-
con=con,
|
|
380
|
-
username=json_user_definition["username"],
|
|
381
|
-
email=json_user_definition["email"],
|
|
382
|
-
givenName=json_user_definition["givenName"],
|
|
383
|
-
familyName=json_user_definition["familyName"],
|
|
384
|
-
password=json_user_definition["password"],
|
|
385
|
-
status=bool(json_user_definition.get("status", True)),
|
|
386
|
-
lang=json_user_definition.get("lang", "en"),
|
|
387
|
-
sysadmin=sysadmin,
|
|
388
|
-
in_projects=project_info,
|
|
389
|
-
in_groups=group_iris,
|
|
390
|
-
)
|
|
391
|
-
try:
|
|
392
|
-
user_local.create()
|
|
393
|
-
except BaseError:
|
|
394
|
-
print(f" WARNING: Unable to create user '{username}'.")
|
|
395
|
-
logger.opt(exception=True).warning(f"Unable to create user '{username}'.")
|
|
396
|
-
overall_success = False
|
|
397
|
-
continue
|
|
398
|
-
print(f" Created user '{username}'.")
|
|
399
|
-
logger.info(f"Created user '{username}'.")
|
|
400
|
-
|
|
401
|
-
return overall_success
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
def _sort_resources(
|
|
405
|
-
unsorted_resources: list[dict[str, Any]],
|
|
406
|
-
onto_name: str,
|
|
407
|
-
) -> list[dict[str, Any]]:
|
|
408
|
-
"""
|
|
409
|
-
This method sorts the resource classes in an ontology according to their inheritance order (parent classes first).
|
|
410
|
-
|
|
411
|
-
Args:
|
|
412
|
-
unsorted_resources: list of resources from a parsed JSON project file
|
|
413
|
-
onto_name: name of the onto
|
|
414
|
-
|
|
415
|
-
Returns:
|
|
416
|
-
sorted list of resource classes
|
|
417
|
-
"""
|
|
418
|
-
|
|
419
|
-
# do not modify the original unsorted_resources, which points to the original JSON project file
|
|
420
|
-
resources_to_sort = unsorted_resources.copy()
|
|
421
|
-
sorted_resources: list[dict[str, Any]] = []
|
|
422
|
-
ok_resource_names: list[str] = []
|
|
423
|
-
while resources_to_sort:
|
|
424
|
-
# inside the for loop, resources_to_sort is modified, so a copy must be made to iterate over
|
|
425
|
-
for res in resources_to_sort.copy():
|
|
426
|
-
parent_classes = res["super"]
|
|
427
|
-
if isinstance(parent_classes, str):
|
|
428
|
-
parent_classes = [parent_classes]
|
|
429
|
-
parent_classes = [regex.sub(r"^:([^:]+)$", f"{onto_name}:\\1", elem) for elem in parent_classes]
|
|
430
|
-
parent_classes_ok = [not p.startswith(onto_name) or p in ok_resource_names for p in parent_classes]
|
|
431
|
-
if all(parent_classes_ok):
|
|
432
|
-
sorted_resources.append(res)
|
|
433
|
-
res_name = f'{onto_name}:{res["name"]}'
|
|
434
|
-
ok_resource_names.append(res_name)
|
|
435
|
-
resources_to_sort.remove(res)
|
|
436
|
-
return sorted_resources
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
def _sort_prop_classes(
|
|
440
|
-
unsorted_prop_classes: list[dict[str, Any]],
|
|
441
|
-
onto_name: str,
|
|
442
|
-
) -> list[dict[str, Any]]:
|
|
443
|
-
"""
|
|
444
|
-
In case of inheritance, parent properties must be uploaded before their children. This method sorts the
|
|
445
|
-
properties.
|
|
446
|
-
|
|
447
|
-
Args:
|
|
448
|
-
unsorted_prop_classes: list of properties from a parsed JSON project file
|
|
449
|
-
onto_name: name of the onto
|
|
450
|
-
|
|
451
|
-
Returns:
|
|
452
|
-
sorted list of properties
|
|
453
|
-
"""
|
|
454
|
-
|
|
455
|
-
# do not modify the original unsorted_prop_classes, which points to the original JSON project file
|
|
456
|
-
prop_classes_to_sort = unsorted_prop_classes.copy()
|
|
457
|
-
sorted_prop_classes: list[dict[str, Any]] = []
|
|
458
|
-
ok_propclass_names: list[str] = []
|
|
459
|
-
while prop_classes_to_sort:
|
|
460
|
-
# inside the for loop, resources_to_sort is modified, so a copy must be made to iterate over
|
|
461
|
-
for prop in prop_classes_to_sort.copy():
|
|
462
|
-
prop_name = f'{onto_name}:{prop["name"]}'
|
|
463
|
-
parent_classes = prop.get("super", "hasValue")
|
|
464
|
-
if isinstance(parent_classes, str):
|
|
465
|
-
parent_classes = [parent_classes]
|
|
466
|
-
parent_classes = [regex.sub(r"^:([^:]+)$", f"{onto_name}:\\1", elem) for elem in parent_classes]
|
|
467
|
-
parent_classes_ok = [not p.startswith(onto_name) or p in ok_propclass_names for p in parent_classes]
|
|
468
|
-
if all(parent_classes_ok):
|
|
469
|
-
sorted_prop_classes.append(prop)
|
|
470
|
-
ok_propclass_names.append(prop_name)
|
|
471
|
-
prop_classes_to_sort.remove(prop)
|
|
472
|
-
return sorted_prop_classes
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
def _create_ontology(
|
|
476
|
-
onto_name: str,
|
|
477
|
-
onto_label: str,
|
|
478
|
-
onto_comment: Optional[str],
|
|
479
|
-
project_ontologies: list[Ontology],
|
|
480
|
-
con: Connection,
|
|
481
|
-
project_remote: Project,
|
|
482
|
-
context: Context,
|
|
483
|
-
verbose: bool,
|
|
484
|
-
) -> Optional[Ontology]:
|
|
485
|
-
"""
|
|
486
|
-
Create an ontology on the DSP server,
|
|
487
|
-
and add the prefixes defined in the JSON file to its context.
|
|
488
|
-
If the ontology already exists on the DSP server, it is skipped.
|
|
489
|
-
|
|
490
|
-
Args:
|
|
491
|
-
onto_name: name of the ontology
|
|
492
|
-
onto_label: label of the ontology
|
|
493
|
-
onto_comment: comment of the ontology
|
|
494
|
-
project_ontologies: ontologies existing on the DSP server
|
|
495
|
-
con: Connection to the DSP server
|
|
496
|
-
project_remote: representation of the project on the DSP server
|
|
497
|
-
context: prefixes and the ontology IRIs they stand for
|
|
498
|
-
verbose: verbose switch
|
|
499
|
-
|
|
500
|
-
Raises:
|
|
501
|
-
UserError: if the ontology cannot be created on the DSP server
|
|
502
|
-
|
|
503
|
-
Returns:
|
|
504
|
-
representation of the created ontology on the DSP server, or None if it already existed
|
|
505
|
-
"""
|
|
506
|
-
# skip if it already exists on the DSP server
|
|
507
|
-
if onto_name in [onto.name for onto in project_ontologies]:
|
|
508
|
-
err_msg = f"Ontology '{onto_name}' already exists on the DSP server. Skipping..."
|
|
509
|
-
print(f" WARNING: {err_msg}")
|
|
510
|
-
logger.opt(exception=True).warning(err_msg)
|
|
511
|
-
return None
|
|
512
|
-
|
|
513
|
-
print(f"Create ontology '{onto_name}'...")
|
|
514
|
-
logger.info(f"Create ontology '{onto_name}'...")
|
|
515
|
-
ontology_local = Ontology(
|
|
516
|
-
con=con,
|
|
517
|
-
project=project_remote,
|
|
518
|
-
label=onto_label,
|
|
519
|
-
name=onto_name,
|
|
520
|
-
comment=onto_comment,
|
|
521
|
-
)
|
|
522
|
-
try:
|
|
523
|
-
ontology_remote = ontology_local.create()
|
|
524
|
-
except BaseError:
|
|
525
|
-
# if ontology cannot be created, let the error escalate
|
|
526
|
-
logger.opt(exception=True).error(f"ERROR while trying to create ontology '{onto_name}'.")
|
|
527
|
-
raise UserError(f"ERROR while trying to create ontology '{onto_name}'.") from None
|
|
528
|
-
|
|
529
|
-
if verbose:
|
|
530
|
-
print(f" Created ontology '{onto_name}'.")
|
|
531
|
-
logger.info(f"Created ontology '{onto_name}'.")
|
|
532
|
-
|
|
533
|
-
context.add_context(
|
|
534
|
-
ontology_remote.name,
|
|
535
|
-
ontology_remote.iri + ("" if ontology_remote.iri.endswith("#") else "#"),
|
|
536
|
-
)
|
|
537
|
-
|
|
538
|
-
# add the prefixes defined in the JSON file
|
|
539
|
-
for onto_prefix, onto_info in context:
|
|
540
|
-
if onto_info and str(onto_prefix) not in ontology_remote.context:
|
|
541
|
-
onto_iri = onto_info.iri + ("#" if onto_info.hashtag else "")
|
|
542
|
-
ontology_remote.context.add_context(prefix=str(onto_prefix), iri=onto_iri)
|
|
543
|
-
|
|
544
|
-
return ontology_remote
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
def _create_ontologies(
|
|
548
|
-
con: Connection,
|
|
549
|
-
context: Context,
|
|
550
|
-
knora_api_prefix: str,
|
|
551
|
-
names_and_iris_of_list_nodes: dict[str, Any],
|
|
552
|
-
ontology_definitions: list[dict[str, Any]],
|
|
553
|
-
project_remote: Project,
|
|
554
|
-
verbose: bool,
|
|
555
|
-
) -> bool:
|
|
556
|
-
"""
|
|
557
|
-
Iterates over the ontologies in a JSON project file and creates the ontologies that don't exist on the DSP server
|
|
558
|
-
yet. For every ontology, it first creates the resource classes, then the properties, and then adds the cardinalities
|
|
559
|
-
to the resource classes.
|
|
560
|
-
|
|
561
|
-
Args:
|
|
562
|
-
con: Connection to the DSP server
|
|
563
|
-
context: prefixes and the ontology IRIs they stand for
|
|
564
|
-
knora_api_prefix: the prefix that stands for the knora-api ontology
|
|
565
|
-
names_and_iris_of_list_nodes: IRIs of list nodes that were already created and are available on the DSP server
|
|
566
|
-
ontology_definitions: the "ontologies" section of the parsed JSON project file
|
|
567
|
-
project_remote: representation of the project on the DSP server
|
|
568
|
-
verbose: verbose switch
|
|
569
|
-
|
|
570
|
-
Raises:
|
|
571
|
-
UserError: if an error occurs during the creation of an ontology.
|
|
572
|
-
All other errors are printed, the process continues, but the success status will be false.
|
|
573
|
-
|
|
574
|
-
Returns:
|
|
575
|
-
True if everything went smoothly, False otherwise
|
|
576
|
-
"""
|
|
577
|
-
|
|
578
|
-
overall_success = True
|
|
579
|
-
|
|
580
|
-
print("Create ontologies...")
|
|
581
|
-
logger.info("Create ontologies...")
|
|
582
|
-
try:
|
|
583
|
-
project_ontologies = Ontology.getProjectOntologies(con=con, project_id=str(project_remote.iri))
|
|
584
|
-
except BaseError:
|
|
585
|
-
err_msg = "Unable to retrieve remote ontologies. Cannot check if your ontology already exists."
|
|
586
|
-
print("WARNING: {err_msg}")
|
|
587
|
-
logger.opt(exception=True).warning(err_msg)
|
|
588
|
-
project_ontologies = []
|
|
589
|
-
|
|
590
|
-
for ontology_definition in ontology_definitions:
|
|
591
|
-
ontology_remote = _create_ontology(
|
|
592
|
-
onto_name=ontology_definition["name"],
|
|
593
|
-
onto_label=ontology_definition["label"],
|
|
594
|
-
onto_comment=ontology_definition.get("comment"),
|
|
595
|
-
project_ontologies=project_ontologies,
|
|
596
|
-
con=con,
|
|
597
|
-
project_remote=project_remote,
|
|
598
|
-
context=context,
|
|
599
|
-
verbose=verbose,
|
|
600
|
-
)
|
|
601
|
-
if not ontology_remote:
|
|
602
|
-
overall_success = False
|
|
603
|
-
continue
|
|
604
|
-
|
|
605
|
-
# add the empty resource classes to the remote ontology
|
|
606
|
-
last_modification_date, remote_res_classes, success = _add_resource_classes_to_remote_ontology(
|
|
607
|
-
onto_name=ontology_definition["name"],
|
|
608
|
-
resclass_definitions=ontology_definition.get("resources", []),
|
|
609
|
-
ontology_remote=ontology_remote,
|
|
610
|
-
con=con,
|
|
611
|
-
last_modification_date=ontology_remote.lastModificationDate,
|
|
612
|
-
verbose=verbose,
|
|
613
|
-
)
|
|
614
|
-
if not success:
|
|
615
|
-
overall_success = False
|
|
616
|
-
|
|
617
|
-
# add the property classes to the remote ontology
|
|
618
|
-
last_modification_date, success = _add_property_classes_to_remote_ontology(
|
|
619
|
-
onto_name=ontology_definition["name"],
|
|
620
|
-
property_definitions=ontology_definition.get("properties", []),
|
|
621
|
-
ontology_remote=ontology_remote,
|
|
622
|
-
names_and_iris_of_list_nodes=names_and_iris_of_list_nodes,
|
|
623
|
-
con=con,
|
|
624
|
-
last_modification_date=last_modification_date,
|
|
625
|
-
knora_api_prefix=knora_api_prefix,
|
|
626
|
-
verbose=verbose,
|
|
627
|
-
)
|
|
628
|
-
if not success:
|
|
629
|
-
overall_success = False
|
|
630
|
-
|
|
631
|
-
# Add cardinalities to class
|
|
632
|
-
success = _add_cardinalities_to_resource_classes(
|
|
633
|
-
resclass_definitions=ontology_definition.get("resources", []),
|
|
634
|
-
ontology_remote=ontology_remote,
|
|
635
|
-
remote_res_classes=remote_res_classes,
|
|
636
|
-
last_modification_date=last_modification_date,
|
|
637
|
-
knora_api_prefix=knora_api_prefix,
|
|
638
|
-
verbose=verbose,
|
|
639
|
-
)
|
|
640
|
-
if not success:
|
|
641
|
-
overall_success = False
|
|
642
|
-
|
|
643
|
-
return overall_success
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
def _add_resource_classes_to_remote_ontology(
|
|
647
|
-
onto_name: str,
|
|
648
|
-
resclass_definitions: list[dict[str, Any]],
|
|
649
|
-
ontology_remote: Ontology,
|
|
650
|
-
con: Connection,
|
|
651
|
-
last_modification_date: DateTimeStamp,
|
|
652
|
-
verbose: bool,
|
|
653
|
-
) -> tuple[DateTimeStamp, dict[str, ResourceClass], bool]:
|
|
654
|
-
"""
|
|
655
|
-
Creates the resource classes (without cardinalities) defined in the "resources" section of an ontology. The
|
|
656
|
-
containing project and the containing ontology must already be existing on the DSP server.
|
|
657
|
-
If an error occurs during creation of a resource class, it is printed out, the process continues, but the success
|
|
658
|
-
status will be false.
|
|
659
|
-
|
|
660
|
-
Args:
|
|
661
|
-
onto_name: name of the current ontology
|
|
662
|
-
resclass_definitions: the part of the parsed JSON project file that contains the resources of the current onto
|
|
663
|
-
ontology_remote: representation of the current ontology on the DSP server
|
|
664
|
-
con: connection to the DSP server
|
|
665
|
-
last_modification_date: last modification date of the ontology on the DSP server
|
|
666
|
-
verbose: verbose switch
|
|
667
|
-
|
|
668
|
-
Returns:
|
|
669
|
-
last modification date of the ontology,
|
|
670
|
-
new resource classes,
|
|
671
|
-
success status
|
|
672
|
-
"""
|
|
673
|
-
|
|
674
|
-
overall_success = True
|
|
675
|
-
print(" Create resource classes...")
|
|
676
|
-
logger.info("Create resource classes...")
|
|
677
|
-
new_res_classes: dict[str, ResourceClass] = {}
|
|
678
|
-
sorted_resources = _sort_resources(resclass_definitions, onto_name)
|
|
679
|
-
for res_class in sorted_resources:
|
|
680
|
-
super_classes = res_class["super"]
|
|
681
|
-
if isinstance(super_classes, str):
|
|
682
|
-
super_classes = [super_classes]
|
|
683
|
-
res_class_local = ResourceClass(
|
|
684
|
-
con=con,
|
|
685
|
-
context=ontology_remote.context,
|
|
686
|
-
ontology_id=ontology_remote.iri,
|
|
687
|
-
name=res_class["name"],
|
|
688
|
-
superclasses=super_classes,
|
|
689
|
-
label=LangString(res_class.get("labels")),
|
|
690
|
-
comment=LangString(res_class.get("comments")) if res_class.get("comments") else None,
|
|
691
|
-
)
|
|
692
|
-
try:
|
|
693
|
-
last_modification_date, res_class_remote = res_class_local.create(last_modification_date)
|
|
694
|
-
new_res_classes[str(res_class_remote.iri)] = res_class_remote
|
|
695
|
-
ontology_remote.lastModificationDate = last_modification_date
|
|
696
|
-
if verbose:
|
|
697
|
-
print(f" Created resource class '{res_class['name']}'")
|
|
698
|
-
logger.info(f"Created resource class '{res_class['name']}'")
|
|
699
|
-
except BaseError:
|
|
700
|
-
err_msg = f"Unable to create resource class '{res_class['name']}'."
|
|
701
|
-
print(f"WARNING: {err_msg}")
|
|
702
|
-
logger.opt(exception=True).warning(err_msg)
|
|
703
|
-
overall_success = False
|
|
704
|
-
|
|
705
|
-
return last_modification_date, new_res_classes, overall_success
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
def _add_property_classes_to_remote_ontology(
|
|
709
|
-
onto_name: str,
|
|
710
|
-
property_definitions: list[dict[str, Any]],
|
|
711
|
-
ontology_remote: Ontology,
|
|
712
|
-
names_and_iris_of_list_nodes: dict[str, Any],
|
|
713
|
-
con: Connection,
|
|
714
|
-
last_modification_date: DateTimeStamp,
|
|
715
|
-
knora_api_prefix: str,
|
|
716
|
-
verbose: bool,
|
|
717
|
-
) -> tuple[DateTimeStamp, bool]:
|
|
718
|
-
"""
|
|
719
|
-
Creates the property classes defined in the "properties" section of an ontology. The
|
|
720
|
-
containing project and the containing ontology must already be existing on the DSP server.
|
|
721
|
-
If an error occurs during creation of a property class, it is printed out, the process continues, but the success
|
|
722
|
-
status will be false.
|
|
723
|
-
|
|
724
|
-
Args:
|
|
725
|
-
onto_name: name of the current ontology
|
|
726
|
-
property_definitions: the part of the parsed JSON project file that contains the properties of the current onto
|
|
727
|
-
ontology_remote: representation of the current ontology on the DSP server
|
|
728
|
-
names_and_iris_of_list_nodes: IRIs of list nodes that were already created and are available on the DSP server
|
|
729
|
-
con: connection to the DSP server
|
|
730
|
-
last_modification_date: last modification date of the ontology on the DSP server
|
|
731
|
-
knora_api_prefix: the prefix that stands for the knora-api ontology
|
|
732
|
-
verbose: verbose switch
|
|
733
|
-
|
|
734
|
-
Returns:
|
|
735
|
-
a tuple consisting of the last modification date of the ontology, and the success status
|
|
736
|
-
"""
|
|
737
|
-
overall_success = True
|
|
738
|
-
print(" Create property classes...")
|
|
739
|
-
logger.info("Create property classes...")
|
|
740
|
-
sorted_prop_classes = _sort_prop_classes(property_definitions, onto_name)
|
|
741
|
-
for prop_class in sorted_prop_classes:
|
|
742
|
-
# get the super-property/ies, valid forms are:
|
|
743
|
-
# - "prefix:super-property" : fully qualified name of property in another ontology. The prefix has to be
|
|
744
|
-
# defined in the prefixes part.
|
|
745
|
-
# - ":super-property" : super-property defined in current ontology
|
|
746
|
-
# - "super-property" : super-property defined in the knora-api ontology
|
|
747
|
-
# - if omitted, "knora-api:hasValue" is assumed
|
|
748
|
-
if prop_class.get("super"):
|
|
749
|
-
super_props = []
|
|
750
|
-
for super_class in prop_class["super"]:
|
|
751
|
-
if ":" in super_class:
|
|
752
|
-
prefix, _class = super_class.split(":")
|
|
753
|
-
super_props.append(super_class if prefix else f"{ontology_remote.name}:{_class}")
|
|
754
|
-
else:
|
|
755
|
-
super_props.append(knora_api_prefix + super_class)
|
|
756
|
-
else:
|
|
757
|
-
super_props = ["knora-api:hasValue"]
|
|
758
|
-
|
|
759
|
-
# get the "object", valid forms are:
|
|
760
|
-
# - "prefix:object_name" : fully qualified object. The prefix has to be defined in the prefixes part.
|
|
761
|
-
# - ":object_name" : The object is defined in the current ontology.
|
|
762
|
-
# - "object_name" : The object is defined in "knora-api"
|
|
763
|
-
if ":" in prop_class["object"]:
|
|
764
|
-
prefix, _object = prop_class["object"].split(":")
|
|
765
|
-
prop_object = f"{prefix}:{_object}" if prefix else f"{ontology_remote.name}:{_object}"
|
|
766
|
-
else:
|
|
767
|
-
prop_object = knora_api_prefix + prop_class["object"]
|
|
768
|
-
|
|
769
|
-
# get the gui_attributes
|
|
770
|
-
gui_attributes = prop_class.get("gui_attributes")
|
|
771
|
-
if gui_attributes and gui_attributes.get("hlist"):
|
|
772
|
-
list_iri = names_and_iris_of_list_nodes[gui_attributes["hlist"]]["id"]
|
|
773
|
-
gui_attributes["hlist"] = f"<{list_iri}>"
|
|
774
|
-
|
|
775
|
-
# create the property class
|
|
776
|
-
prop_class_local = PropertyClass(
|
|
777
|
-
con=con,
|
|
778
|
-
context=ontology_remote.context,
|
|
779
|
-
label=LangString(prop_class.get("labels")),
|
|
780
|
-
name=prop_class["name"],
|
|
781
|
-
ontology_id=ontology_remote.iri,
|
|
782
|
-
superproperties=super_props,
|
|
783
|
-
rdf_object=prop_object,
|
|
784
|
-
rdf_subject=prop_class.get("subject"),
|
|
785
|
-
gui_element="salsah-gui:" + prop_class["gui_element"],
|
|
786
|
-
gui_attributes=gui_attributes,
|
|
787
|
-
comment=LangString(prop_class["comments"]) if prop_class.get("comments") else None,
|
|
788
|
-
)
|
|
789
|
-
try:
|
|
790
|
-
last_modification_date, _ = prop_class_local.create(last_modification_date)
|
|
791
|
-
ontology_remote.lastModificationDate = last_modification_date
|
|
792
|
-
if verbose:
|
|
793
|
-
print(f" Created property class '{prop_class['name']}'")
|
|
794
|
-
logger.info(f"Created property class '{prop_class['name']}'")
|
|
795
|
-
except BaseError:
|
|
796
|
-
err_msg = f"Unable to create property class '{prop_class['name']}'."
|
|
797
|
-
print(f"WARNING: {err_msg}")
|
|
798
|
-
logger.opt(exception=True).warning(f"Unable to create property class '{prop_class['name']}'.")
|
|
799
|
-
overall_success = False
|
|
800
|
-
|
|
801
|
-
return last_modification_date, overall_success
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
def _add_cardinalities_to_resource_classes(
|
|
805
|
-
resclass_definitions: list[dict[str, Any]],
|
|
806
|
-
ontology_remote: Ontology,
|
|
807
|
-
remote_res_classes: dict[str, ResourceClass],
|
|
808
|
-
last_modification_date: DateTimeStamp,
|
|
809
|
-
knora_api_prefix: str,
|
|
810
|
-
verbose: bool,
|
|
811
|
-
) -> bool:
|
|
812
|
-
"""
|
|
813
|
-
Iterates over the resource classes of an ontology of a JSON project definition, and adds the cardinalities to each
|
|
814
|
-
resource class. The resource classes and the properties must already be existing on the DSP server.
|
|
815
|
-
If an error occurs during creation of a cardinality, it is printed out, the process continues, but the success
|
|
816
|
-
status will be false.
|
|
817
|
-
|
|
818
|
-
Args:
|
|
819
|
-
resclass_definitions: the part of the parsed JSON project file that contains the resources of the current onto
|
|
820
|
-
ontology_remote: representation of the current ontology on the DSP server
|
|
821
|
-
remote_res_classes: representations of the resource classes on the DSP server
|
|
822
|
-
last_modification_date: last modification date of the ontology on the DSP server
|
|
823
|
-
knora_api_prefix: the prefix that stands for the knora-api ontology
|
|
824
|
-
verbose: verbose switch
|
|
825
|
-
|
|
826
|
-
Returns:
|
|
827
|
-
success status
|
|
828
|
-
"""
|
|
829
|
-
overall_success = True
|
|
830
|
-
print(" Add cardinalities to resource classes...")
|
|
831
|
-
logger.info("Add cardinalities to resource classes...")
|
|
832
|
-
switcher = {
|
|
833
|
-
"1": Cardinality.C_1,
|
|
834
|
-
"0-1": Cardinality.C_0_1,
|
|
835
|
-
"0-n": Cardinality.C_0_n,
|
|
836
|
-
"1-n": Cardinality.C_1_n,
|
|
837
|
-
}
|
|
838
|
-
for res_class in resclass_definitions:
|
|
839
|
-
res_class_remote = remote_res_classes.get(f"{ontology_remote.iri}#{res_class['name']}")
|
|
840
|
-
if not res_class_remote:
|
|
841
|
-
msg = (
|
|
842
|
-
f"Unable to add cardinalities to resource class '{res_class['name']}': "
|
|
843
|
-
f"This class doesn't exist on the DSP server."
|
|
844
|
-
)
|
|
845
|
-
print(f"WARNINIG: {msg}")
|
|
846
|
-
logger.opt(exception=True).warning(msg)
|
|
847
|
-
overall_success = False
|
|
848
|
-
continue
|
|
849
|
-
for card_info in res_class.get("cardinalities", []):
|
|
850
|
-
if ":" in card_info["propname"]:
|
|
851
|
-
prefix, prop = card_info["propname"].split(":")
|
|
852
|
-
qualified_propname = card_info["propname"] if prefix else f"{ontology_remote.name}:{prop}"
|
|
853
|
-
else:
|
|
854
|
-
qualified_propname = knora_api_prefix + card_info["propname"]
|
|
855
|
-
|
|
856
|
-
try:
|
|
857
|
-
last_modification_date = res_class_remote.addProperty(
|
|
858
|
-
property_id=qualified_propname,
|
|
859
|
-
cardinality=switcher[card_info["cardinality"]],
|
|
860
|
-
gui_order=card_info.get("gui_order"),
|
|
861
|
-
last_modification_date=last_modification_date,
|
|
862
|
-
)
|
|
863
|
-
if verbose:
|
|
864
|
-
print(f" Added cardinality '{card_info['propname']}' to resource class '{res_class['name']}'")
|
|
865
|
-
logger.info(f"Added cardinality '{card_info['propname']}' to resource class '{res_class['name']}'")
|
|
866
|
-
except BaseError:
|
|
867
|
-
err_msg = f"Unable to add cardinality '{qualified_propname}' to resource class {res_class['name']}."
|
|
868
|
-
print(f"WARNING: {err_msg}")
|
|
869
|
-
logger.opt(exception=True).warning(err_msg)
|
|
870
|
-
overall_success = False
|
|
871
|
-
|
|
872
|
-
ontology_remote.lastModificationDate = last_modification_date
|
|
873
|
-
|
|
874
|
-
return overall_success
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
def _rectify_hlist_of_properties(
|
|
878
|
-
lists: list[dict[str, Any]],
|
|
879
|
-
properties: list[dict[str, Any]],
|
|
880
|
-
) -> list[dict[str, Any]]:
|
|
881
|
-
"""
|
|
882
|
-
Check the "hlist" of the "gui_attributes" of the properties.
|
|
883
|
-
If they don't refer to an existing list name,
|
|
884
|
-
check if there is a label of a list that corresponds to the "hlist".
|
|
885
|
-
If so, rectify the "hlist" to refer to the name of the list instead of the label.
|
|
886
|
-
|
|
887
|
-
Args:
|
|
888
|
-
lists: "lists" section of the JSON project definition
|
|
889
|
-
properties: "properties" section of one of the ontologies of the JSON project definition
|
|
890
|
-
|
|
891
|
-
Raises:
|
|
892
|
-
UserError: if the "hlist" refers to no existing list name or label
|
|
893
|
-
|
|
894
|
-
Returns:
|
|
895
|
-
the rectified "properties" section
|
|
896
|
-
"""
|
|
897
|
-
|
|
898
|
-
if not lists or not properties:
|
|
899
|
-
return properties
|
|
900
|
-
|
|
901
|
-
existing_list_names = [lst["name"] for lst in lists]
|
|
902
|
-
|
|
903
|
-
for prop in properties:
|
|
904
|
-
if not prop.get("gui_attributes"):
|
|
905
|
-
continue
|
|
906
|
-
if not prop["gui_attributes"].get("hlist"):
|
|
907
|
-
continue
|
|
908
|
-
list_name = prop["gui_attributes"]["hlist"] if prop["gui_attributes"]["hlist"] in existing_list_names else None
|
|
909
|
-
if list_name:
|
|
910
|
-
continue
|
|
911
|
-
|
|
912
|
-
deduced_list_name = None
|
|
913
|
-
for root_node in lists:
|
|
914
|
-
if prop["gui_attributes"]["hlist"] in root_node["labels"].values():
|
|
915
|
-
deduced_list_name = cast(str, root_node["name"])
|
|
916
|
-
if deduced_list_name:
|
|
917
|
-
msg = (
|
|
918
|
-
f"INFO: Property '{prop['name']}' references the list '{prop['gui_attributes']['hlist']}' "
|
|
919
|
-
f"which is not a valid list name. "
|
|
920
|
-
f"Assuming that you meant '{deduced_list_name}' instead."
|
|
921
|
-
)
|
|
922
|
-
logger.opt(exception=True).warning(msg)
|
|
923
|
-
print(msg)
|
|
924
|
-
else:
|
|
925
|
-
msg = f"Property '{prop['name']}' references an unknown list: '{prop['gui_attributes']['hlist']}'"
|
|
926
|
-
logger.error(msg)
|
|
927
|
-
raise UserError(f"ERROR: {msg}")
|
|
928
|
-
prop["gui_attributes"]["hlist"] = deduced_list_name
|
|
929
|
-
|
|
930
|
-
return properties
|
|
931
|
-
|
|
932
|
-
|
|
933
|
-
def create_project(
|
|
934
|
-
project_file_as_path_or_parsed: str | Path | dict[str, Any],
|
|
935
|
-
creds: ServerCredentials,
|
|
936
|
-
verbose: bool = False,
|
|
937
|
-
) -> bool:
|
|
938
|
-
"""
|
|
939
|
-
Creates a project from a JSON project file on a DSP server.
|
|
940
|
-
A project must contain at least one ontology,
|
|
941
|
-
and it may contain lists, users, and groups.
|
|
942
|
-
Severe errors lead to a BaseError,
|
|
943
|
-
while other errors are printed without interrupting the process.
|
|
944
|
-
|
|
945
|
-
Args:
|
|
946
|
-
project_file_as_path_or_parsed: path to the JSON project definition, or parsed JSON object
|
|
947
|
-
creds: credentials to connect to the DSP server
|
|
948
|
-
verbose: prints more information if set to True
|
|
949
|
-
|
|
950
|
-
Raises:
|
|
951
|
-
UserError:
|
|
952
|
-
- if the project cannot be created
|
|
953
|
-
- if the login fails
|
|
954
|
-
- if an ontology cannot be created
|
|
955
|
-
|
|
956
|
-
BaseError:
|
|
957
|
-
- if the input is invalid
|
|
958
|
-
- if an Excel file referenced in the "lists" section cannot be expanded
|
|
959
|
-
- if the validation doesn't pass
|
|
960
|
-
|
|
961
|
-
Returns:
|
|
962
|
-
True if everything went smoothly, False if a warning or error occurred
|
|
963
|
-
"""
|
|
964
|
-
|
|
965
|
-
knora_api_prefix = "knora-api:"
|
|
966
|
-
overall_success = True
|
|
967
|
-
|
|
968
|
-
project_json = parse_json_input(project_file_as_path_or_parsed=project_file_as_path_or_parsed)
|
|
969
|
-
|
|
970
|
-
context = Context(project_json.get("prefixes", {}))
|
|
971
|
-
|
|
972
|
-
project_definition = _prepare_and_validate_project(project_json)
|
|
973
|
-
|
|
974
|
-
all_lists = _get_all_lists(project_json)
|
|
975
|
-
|
|
976
|
-
all_ontos = _get_all_ontos(project_json, all_lists)
|
|
977
|
-
|
|
978
|
-
# establish connection to DSP server
|
|
979
|
-
con = ConnectionLive(creds.server)
|
|
980
|
-
con.login(creds.user, creds.password)
|
|
981
|
-
|
|
982
|
-
# create project on DSP server
|
|
983
|
-
info_str = f"Create project '{project_definition.shortname}' ({project_definition.shortcode})..."
|
|
984
|
-
print(info_str)
|
|
985
|
-
logger.info(info_str)
|
|
986
|
-
project_remote, success = _create_project_on_server(
|
|
987
|
-
project_definition=project_definition,
|
|
988
|
-
con=con,
|
|
989
|
-
)
|
|
990
|
-
if not success:
|
|
991
|
-
overall_success = False
|
|
992
|
-
|
|
993
|
-
# create the lists
|
|
994
|
-
names_and_iris_of_list_nodes: dict[str, Any] = {}
|
|
995
|
-
if all_lists:
|
|
996
|
-
print("Create lists...")
|
|
997
|
-
logger.info("Create lists...")
|
|
998
|
-
names_and_iris_of_list_nodes, success = create_lists_on_server(
|
|
999
|
-
lists_to_create=all_lists,
|
|
1000
|
-
con=con,
|
|
1001
|
-
project_remote=project_remote,
|
|
1002
|
-
)
|
|
1003
|
-
if not success:
|
|
1004
|
-
overall_success = False
|
|
1005
|
-
|
|
1006
|
-
# create the groups
|
|
1007
|
-
current_project_groups: dict[str, Group] = {}
|
|
1008
|
-
if project_definition.groups:
|
|
1009
|
-
print("Create groups...")
|
|
1010
|
-
logger.info("Create groups...")
|
|
1011
|
-
current_project_groups, success = _create_groups(
|
|
1012
|
-
con=con,
|
|
1013
|
-
groups=project_definition.groups,
|
|
1014
|
-
project=project_remote,
|
|
1015
|
-
)
|
|
1016
|
-
if not success:
|
|
1017
|
-
overall_success = False
|
|
1018
|
-
|
|
1019
|
-
# create or update the users
|
|
1020
|
-
if project_definition.users:
|
|
1021
|
-
print("Create users...")
|
|
1022
|
-
logger.info("Create users...")
|
|
1023
|
-
success = _create_users(
|
|
1024
|
-
con=con,
|
|
1025
|
-
users_section=project_definition.users,
|
|
1026
|
-
current_project_groups=current_project_groups,
|
|
1027
|
-
current_project=project_remote,
|
|
1028
|
-
verbose=verbose,
|
|
1029
|
-
)
|
|
1030
|
-
if not success:
|
|
1031
|
-
overall_success = False
|
|
1032
|
-
|
|
1033
|
-
# create the ontologies
|
|
1034
|
-
success = _create_ontologies(
|
|
1035
|
-
con=con,
|
|
1036
|
-
context=context,
|
|
1037
|
-
knora_api_prefix=knora_api_prefix,
|
|
1038
|
-
names_and_iris_of_list_nodes=names_and_iris_of_list_nodes,
|
|
1039
|
-
ontology_definitions=all_ontos,
|
|
1040
|
-
project_remote=project_remote,
|
|
1041
|
-
verbose=verbose,
|
|
1042
|
-
)
|
|
1043
|
-
if not success:
|
|
1044
|
-
overall_success = False
|
|
1045
|
-
|
|
1046
|
-
# final steps
|
|
1047
|
-
if overall_success:
|
|
1048
|
-
msg = (
|
|
1049
|
-
f"Successfully created project '{project_definition.shortname}' "
|
|
1050
|
-
f"({project_definition.shortcode}) with all its ontologies. "
|
|
1051
|
-
f"There were no problems during the creation process."
|
|
1052
|
-
)
|
|
1053
|
-
print(f"========================================================\n{msg}")
|
|
1054
|
-
logger.info(msg)
|
|
1055
|
-
else:
|
|
1056
|
-
msg = (
|
|
1057
|
-
f"The project '{project_definition.shortname}' ({project_definition.shortcode}) "
|
|
1058
|
-
f"with its ontologies could be created, "
|
|
1059
|
-
f"but during the creation process, some problems occurred. Please carefully check the console output."
|
|
1060
|
-
)
|
|
1061
|
-
print(f"========================================================\nWARNING: {msg}")
|
|
1062
|
-
logger.opt(exception=True).warning(msg)
|
|
1063
|
-
|
|
1064
|
-
return overall_success
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
def _prepare_and_validate_project(
|
|
1068
|
-
project_json: dict[str, Any],
|
|
1069
|
-
) -> ProjectDefinition:
|
|
1070
|
-
project_def = ProjectDefinition(
|
|
1071
|
-
shortcode=project_json["project"]["shortcode"],
|
|
1072
|
-
shortname=project_json["project"]["shortname"],
|
|
1073
|
-
longname=project_json["project"]["longname"],
|
|
1074
|
-
keywords=project_json["project"].get("keywords"),
|
|
1075
|
-
descriptions=project_json["project"].get("descriptions"),
|
|
1076
|
-
groups=project_json["project"].get("groups"),
|
|
1077
|
-
users=project_json["project"].get("users"),
|
|
1078
|
-
)
|
|
1079
|
-
|
|
1080
|
-
# validate against JSON schema
|
|
1081
|
-
validate_project(project_json, expand_lists=False)
|
|
1082
|
-
print(" JSON project file is syntactically correct and passed validation.")
|
|
1083
|
-
logger.info("JSON project file is syntactically correct and passed validation.")
|
|
1084
|
-
|
|
1085
|
-
return project_def
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
def _get_all_lists(project_json: dict[str, Any]) -> list[dict[str, Any]] | None:
|
|
1089
|
-
# expand the Excel files referenced in the "lists" section of the project, if any
|
|
1090
|
-
if all_lists := expand_lists_from_excel(project_json.get("project", {}).get("lists", [])):
|
|
1091
|
-
return all_lists
|
|
1092
|
-
new_lists: list[dict[str, Any]] | None = project_json["project"].get("lists")
|
|
1093
|
-
return new_lists
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
def _get_all_ontos(project_json: dict[str, Any], all_lists: list[dict[str, Any]] | None) -> list[dict[str, Any]]:
|
|
1097
|
-
all_ontos: list[dict[str, Any]] = project_json["project"]["ontologies"]
|
|
1098
|
-
if all_lists is None:
|
|
1099
|
-
return all_ontos
|
|
1100
|
-
# rectify the "hlist" of the "gui_attributes" of the properties
|
|
1101
|
-
for onto in all_ontos:
|
|
1102
|
-
if onto.get("properties"):
|
|
1103
|
-
onto["properties"] = _rectify_hlist_of_properties(
|
|
1104
|
-
lists=all_lists,
|
|
1105
|
-
properties=onto["properties"],
|
|
1106
|
-
)
|
|
1107
|
-
return all_ontos
|