das-cli 1.0.15__tar.gz → 1.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of das-cli might be problematic. Click here for more details.
- {das_cli-1.0.15/das_cli.egg-info → das_cli-1.1.0}/PKG-INFO +8 -1
- {das_cli-1.0.15 → das_cli-1.1.0}/README.md +7 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/cli.py +35 -0
- das_cli-1.1.0/das/managers/digital_objects_manager.py +84 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/managers/search_manager.py +0 -1
- das_cli-1.1.0/das/services/digital_objects.py +142 -0
- {das_cli-1.0.15 → das_cli-1.1.0/das_cli.egg-info}/PKG-INFO +8 -1
- {das_cli-1.0.15 → das_cli-1.1.0}/pyproject.toml +1 -1
- das_cli-1.0.15/das/managers/digital_objects_manager.py +0 -46
- das_cli-1.0.15/das/services/digital_objects.py +0 -44
- {das_cli-1.0.15 → das_cli-1.1.0}/LICENSE +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/MANIFEST.in +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/__init__.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/ai/plugins/dasai.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/ai/plugins/entries/entries_plugin.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/app.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/authentication/auth.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/authentication/secure_input.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/common/api.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/common/config.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/common/entry_fields_constants.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/common/enums.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/common/file_utils.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/managers/__init__.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/managers/download_manager.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/managers/entries_manager.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/services/attributes.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/services/cache.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/services/downloads.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/services/entries.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/services/entry_fields.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/services/hangfire.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/services/search.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das/services/users.py +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das_cli.egg-info/SOURCES.txt +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das_cli.egg-info/dependency_links.txt +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das_cli.egg-info/entry_points.txt +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das_cli.egg-info/requires.txt +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/das_cli.egg-info/top_level.txt +0 -0
- {das_cli-1.0.15 → das_cli-1.1.0}/setup.cfg +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: das-cli
|
|
3
|
-
Version: 1.0
|
|
3
|
+
Version: 1.1.0
|
|
4
4
|
Summary: DAS api client.
|
|
5
5
|
Author: Royal Netherlands Institute for Sea Research
|
|
6
6
|
License-Expression: MIT
|
|
@@ -166,6 +166,13 @@ das entry update --attribute <AttributeName> [--code CODE] <file_path>
|
|
|
166
166
|
# das entry update --attribute core --data [{ 'Code': 'ENT001' }, { 'Code': 'ENT002' }]
|
|
167
167
|
```
|
|
168
168
|
|
|
169
|
+
#### Upload and link a digital object
|
|
170
|
+
|
|
171
|
+
```bash
|
|
172
|
+
# Upload a file as a digital object and link it to an entry
|
|
173
|
+
das entry upload-digital-object --entry-code ENT001 --type Dataset --description "CTD raw" c:\data\ctd.zip
|
|
174
|
+
```
|
|
175
|
+
|
|
169
176
|
#### Link or unlink digital objects
|
|
170
177
|
|
|
171
178
|
```bash
|
|
@@ -143,6 +143,13 @@ das entry update --attribute <AttributeName> [--code CODE] <file_path>
|
|
|
143
143
|
# das entry update --attribute core --data [{ 'Code': 'ENT001' }, { 'Code': 'ENT002' }]
|
|
144
144
|
```
|
|
145
145
|
|
|
146
|
+
#### Upload and link a digital object
|
|
147
|
+
|
|
148
|
+
```bash
|
|
149
|
+
# Upload a file as a digital object and link it to an entry
|
|
150
|
+
das entry upload-digital-object --entry-code ENT001 --type Dataset --description "CTD raw" c:\data\ctd.zip
|
|
151
|
+
```
|
|
152
|
+
|
|
146
153
|
#### Link or unlink digital objects
|
|
147
154
|
|
|
148
155
|
```bash
|
|
@@ -551,6 +551,41 @@ def create_entry(das_ctx, attribute, file_path=None, data=None):
|
|
|
551
551
|
except Exception as e:
|
|
552
552
|
click.secho(f"Error: {e}", fg="red")
|
|
553
553
|
|
|
554
|
+
@entry.command("upload-digital-object")
|
|
555
|
+
@click.option('--entry-code', required=True, help='Entry code to attach the digital object to')
|
|
556
|
+
@click.option('--type', 'digital_object_type', required=True, help='Digital object type name (e.g., Dataset, File, Image)')
|
|
557
|
+
@click.option('--description', 'file_description', default='', help='Description for the uploaded file')
|
|
558
|
+
@click.argument('file_path', required=True)
|
|
559
|
+
@pass_das_context
|
|
560
|
+
def upload_digital_object(das_ctx, entry_code, digital_object_type, file_description, file_path):
|
|
561
|
+
"""Upload a file as a digital object and link it to an entry.
|
|
562
|
+
|
|
563
|
+
Examples:
|
|
564
|
+
|
|
565
|
+
\b
|
|
566
|
+
# Upload a dataset file and link to an entry
|
|
567
|
+
das entry upload-digital-object --entry-code ENT001 --type Dataset --description "CTD raw" c:\\data\\ctd.zip
|
|
568
|
+
"""
|
|
569
|
+
try:
|
|
570
|
+
# Ensure services are initialized
|
|
571
|
+
das_ctx.get_client()
|
|
572
|
+
|
|
573
|
+
# Perform upload and link
|
|
574
|
+
digital_object_id = das_ctx.digital_objects_manager.upload_digital_object(
|
|
575
|
+
entry_code=entry_code,
|
|
576
|
+
file_description=file_description,
|
|
577
|
+
digital_object_type=digital_object_type,
|
|
578
|
+
file_path=file_path,
|
|
579
|
+
)
|
|
580
|
+
|
|
581
|
+
if digital_object_id:
|
|
582
|
+
click.secho("✓ Digital object uploaded and linked successfully!", fg="green")
|
|
583
|
+
click.echo(f"Digital Object ID: {digital_object_id}")
|
|
584
|
+
else:
|
|
585
|
+
click.secho("Upload completed but no ID was returned.", fg="yellow")
|
|
586
|
+
except Exception as e:
|
|
587
|
+
click.secho(f"Error: {e}", fg="red")
|
|
588
|
+
|
|
554
589
|
@entry.command("get")
|
|
555
590
|
@click.option('--code', default=None, help='Entry code')
|
|
556
591
|
@click.option('--id', type=int, default=None, help='Entry ID')
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
from das.common.config import load_api_url
|
|
4
|
+
from das.services.search import SearchService
|
|
5
|
+
from das.services.entries import EntriesService
|
|
6
|
+
from das.services.digital_objects import DigitalObjectsService
|
|
7
|
+
|
|
8
|
+
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
|
9
|
+
|
|
10
|
+
class DigitalObjectsManager:
|
|
11
|
+
"""Manager for digital objects."""
|
|
12
|
+
|
|
13
|
+
def __init__(self):
|
|
14
|
+
base_url = load_api_url()
|
|
15
|
+
if base_url is None or base_url == "":
|
|
16
|
+
raise ValueError(f"Base URL is required - {self.__class__.__name__}")
|
|
17
|
+
|
|
18
|
+
self.__attribute_id_digital_object_type = 5;
|
|
19
|
+
self.digital_objects_service = DigitalObjectsService(base_url)
|
|
20
|
+
self.entry_service = EntriesService(base_url)
|
|
21
|
+
self.search_service = SearchService(base_url)
|
|
22
|
+
|
|
23
|
+
def link_existing_digital_objects(
|
|
24
|
+
self, entry_code: str, digital_object_code_list: list[str], is_unlink: bool = False
|
|
25
|
+
) -> bool:
|
|
26
|
+
"""Attach or detach (unlink) digital objects to an entry using codes."""
|
|
27
|
+
entry_response = self.entry_service.get_entry(entry_code)
|
|
28
|
+
|
|
29
|
+
if entry_response is None:
|
|
30
|
+
raise ValueError(f"Entry with code '{entry_code}' not found")
|
|
31
|
+
|
|
32
|
+
entry_payload = entry_response.get("entry")
|
|
33
|
+
if entry_payload is None:
|
|
34
|
+
raise ValueError(f"Entry with code '{entry_code}' not found")
|
|
35
|
+
|
|
36
|
+
digital_object_id_list: list[str] = []
|
|
37
|
+
|
|
38
|
+
for code in digital_object_code_list:
|
|
39
|
+
do_response = self.entry_service.get_entry(code)
|
|
40
|
+
do_entry = do_response.get("entry") if do_response else None
|
|
41
|
+
if do_entry is None:
|
|
42
|
+
raise ValueError(f"Digital object with code '{code}' not found")
|
|
43
|
+
digital_object_id_list.append(do_entry.get("id"))
|
|
44
|
+
|
|
45
|
+
result = self.digital_objects_service.link_existing_digital_objects(
|
|
46
|
+
attribute_id=entry_response.get("attributeId"),
|
|
47
|
+
entry_id=entry_payload.get("id"),
|
|
48
|
+
digital_object_id_list=digital_object_id_list,
|
|
49
|
+
is_unlink=is_unlink,
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
return result
|
|
53
|
+
|
|
54
|
+
def upload_digital_object(self, entry_code: str, file_description: str, digital_object_type: str, file_path: str):
|
|
55
|
+
"""Upload a digital object to the digital object service."""
|
|
56
|
+
response = self.search_service.search_entries(
|
|
57
|
+
queryString=f"displayname({digital_object_type})",
|
|
58
|
+
attributeId=self.__attribute_id_digital_object_type,
|
|
59
|
+
maxResultCount=1,
|
|
60
|
+
skipCount=0
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
entry_response = self.entry_service.get_entry(entry_code)
|
|
64
|
+
if entry_response is None:
|
|
65
|
+
raise ValueError(f"Entry with code '{entry_code}' not found")
|
|
66
|
+
|
|
67
|
+
if response.get('totalCount', 0) == 0:
|
|
68
|
+
raise ValueError(f"Digital object type '{digital_object_type}' not found")
|
|
69
|
+
|
|
70
|
+
digital_object_type_id = response.get('items', [])[0].get('entry').get('id')
|
|
71
|
+
digital_object_id = self.digital_objects_service.upload_digital_object(file_description, digital_object_type_id, file_path)
|
|
72
|
+
|
|
73
|
+
self.digital_objects_service.link_existing_digital_objects(
|
|
74
|
+
attribute_id=entry_response.get('attributeId'),
|
|
75
|
+
entry_id=entry_response.get('entry').get('id'),
|
|
76
|
+
digital_object_id_list=[digital_object_id]
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
return digital_object_id
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
if __name__ == "__main__":
|
|
83
|
+
digital_objects_manager = DigitalObjectsManager()
|
|
84
|
+
digital_objects_manager.upload_digital_object(entry_code="zb.b.f7", file_description="test", digital_object_type="Dataset", file_path="my_new_file.txt")
|
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
from math import ceil
|
|
4
|
+
from os.path import exists
|
|
5
|
+
import json
|
|
6
|
+
from base64 import b64encode
|
|
7
|
+
from das.common.api import post_data
|
|
8
|
+
from das.common.config import load_token, load_verify_ssl
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
import math
|
|
11
|
+
import uuid
|
|
12
|
+
import requests
|
|
13
|
+
|
|
14
|
+
CHUNK_SIZE = 1000000 # 1MB
|
|
15
|
+
class DigitalObjectsService:
|
|
16
|
+
def __init__(self, base_url):
|
|
17
|
+
self.base_url = f"{base_url}/api/services/app/DigitalObject"
|
|
18
|
+
# Common possible upload endpoints observed across deployments
|
|
19
|
+
self.upload_digital_object_url = f"{base_url}/File/UploadDigitalObject"
|
|
20
|
+
|
|
21
|
+
def link_existing_digital_objects(self, attribute_id: int, entry_id: str, digital_object_id_list: list[str], is_unlink: bool = False):
|
|
22
|
+
"""Link existing digital objects to an entry."""
|
|
23
|
+
token = load_token()
|
|
24
|
+
|
|
25
|
+
if token is None or token == "":
|
|
26
|
+
raise ValueError("Authorization token is required")
|
|
27
|
+
|
|
28
|
+
headers = {
|
|
29
|
+
"Authorization": f"Bearer {token}",
|
|
30
|
+
"Content-Type": "application/json",
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
payload = {
|
|
34
|
+
"attributeId": attribute_id,
|
|
35
|
+
"attributeValueId": entry_id,
|
|
36
|
+
"digitalObjects": [],
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
for digital_object_id in digital_object_id_list:
|
|
40
|
+
payload["digitalObjects"].append(
|
|
41
|
+
{
|
|
42
|
+
"attributeId": attribute_id,
|
|
43
|
+
"attributeValueId": entry_id,
|
|
44
|
+
"digitalObjectId": digital_object_id,
|
|
45
|
+
"isDeleted": is_unlink,
|
|
46
|
+
}
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
response = post_data(
|
|
50
|
+
f"{self.base_url}/LinkExistingDigitalObject", data=payload, headers=headers
|
|
51
|
+
)
|
|
52
|
+
|
|
53
|
+
return response.get("success")
|
|
54
|
+
|
|
55
|
+
# This is our chunk reader. This is what gets the next chunk of data ready to send.
|
|
56
|
+
def __read_in_chunks(self, file_object, chunk_size):
|
|
57
|
+
while True:
|
|
58
|
+
data = file_object.read(chunk_size)
|
|
59
|
+
if not data:
|
|
60
|
+
break
|
|
61
|
+
yield data
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def upload_digital_object(self, file_description: str, digital_object_type_id: str, file_path: str):
|
|
65
|
+
|
|
66
|
+
if not exists(file_path):
|
|
67
|
+
raise ValueError(f"File '{file_path}' does not exist")
|
|
68
|
+
|
|
69
|
+
head, tail = os.path.split(file_path)
|
|
70
|
+
|
|
71
|
+
metadata = {
|
|
72
|
+
"fileName": tail,
|
|
73
|
+
"fileSize": os.path.getsize(file_path),
|
|
74
|
+
"description": file_description,
|
|
75
|
+
"digitalObjectTypeId": digital_object_type_id,
|
|
76
|
+
"id": str(uuid.uuid4()).lower(),
|
|
77
|
+
"description": file_description,
|
|
78
|
+
"totalCount": ceil(os.path.getsize(file_path) / CHUNK_SIZE),
|
|
79
|
+
"index": 0,
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
binary_file = open(file_path, "rb")
|
|
83
|
+
index = 0
|
|
84
|
+
offset = 0
|
|
85
|
+
digital_object_id = None
|
|
86
|
+
headers = {}
|
|
87
|
+
|
|
88
|
+
try:
|
|
89
|
+
for chunk in self.__read_in_chunks(binary_file, CHUNK_SIZE):
|
|
90
|
+
offset = index + len(chunk)
|
|
91
|
+
headers['Content-Range'] = 'bytes %s-%s/%s' % (index, offset - 1, metadata.get('fileSize'))
|
|
92
|
+
index = offset
|
|
93
|
+
json_metadata = json.dumps(metadata)
|
|
94
|
+
base654_bytes = b64encode(json_metadata.encode('utf-8')).decode('ascii')
|
|
95
|
+
headers['metadata'] = base654_bytes
|
|
96
|
+
|
|
97
|
+
r = self.upload_file(chunk, metadata, headers)
|
|
98
|
+
|
|
99
|
+
if r.get('result', None) is None:
|
|
100
|
+
continue
|
|
101
|
+
|
|
102
|
+
digital_object_id = r.get('result').get('id')
|
|
103
|
+
metadata['index'] = index + 1
|
|
104
|
+
|
|
105
|
+
binary_file.close()
|
|
106
|
+
|
|
107
|
+
except Exception as e:
|
|
108
|
+
raise ValueError(f"Error uploading file '{file_path}': {str(e)}")
|
|
109
|
+
finally:
|
|
110
|
+
binary_file.close()
|
|
111
|
+
|
|
112
|
+
return digital_object_id
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def upload_file(self, file, body, headers):
|
|
117
|
+
"""Upload a file to the digital object service."""
|
|
118
|
+
token = load_token()
|
|
119
|
+
headers.update({
|
|
120
|
+
"Accept": "application/json",
|
|
121
|
+
"Authorization": f"Bearer {token}",
|
|
122
|
+
# Do NOT set Content-Type here when sending files; requests will set proper multipart boundary
|
|
123
|
+
})
|
|
124
|
+
|
|
125
|
+
files = {
|
|
126
|
+
"file": ("chunk", file, "application/octet-stream"),
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
response = requests.post(self.upload_digital_object_url, headers=headers, files=files, verify=load_verify_ssl())
|
|
131
|
+
response.raise_for_status()
|
|
132
|
+
if response.status_code == 200:
|
|
133
|
+
return response.json()
|
|
134
|
+
else:
|
|
135
|
+
raise ValueError(f"Error uploading file: {response.status_code} - {response.text}")
|
|
136
|
+
except requests.RequestException as e:
|
|
137
|
+
raise ValueError(f"Error uploading file: {str(e)}")
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: das-cli
|
|
3
|
-
Version: 1.0
|
|
3
|
+
Version: 1.1.0
|
|
4
4
|
Summary: DAS api client.
|
|
5
5
|
Author: Royal Netherlands Institute for Sea Research
|
|
6
6
|
License-Expression: MIT
|
|
@@ -166,6 +166,13 @@ das entry update --attribute <AttributeName> [--code CODE] <file_path>
|
|
|
166
166
|
# das entry update --attribute core --data [{ 'Code': 'ENT001' }, { 'Code': 'ENT002' }]
|
|
167
167
|
```
|
|
168
168
|
|
|
169
|
+
#### Upload and link a digital object
|
|
170
|
+
|
|
171
|
+
```bash
|
|
172
|
+
# Upload a file as a digital object and link it to an entry
|
|
173
|
+
das entry upload-digital-object --entry-code ENT001 --type Dataset --description "CTD raw" c:\data\ctd.zip
|
|
174
|
+
```
|
|
175
|
+
|
|
169
176
|
#### Link or unlink digital objects
|
|
170
177
|
|
|
171
178
|
```bash
|
|
@@ -1,46 +0,0 @@
|
|
|
1
|
-
from das.common.config import load_api_url
|
|
2
|
-
from das.services.entries import EntriesService
|
|
3
|
-
from das.services.digital_objects import DigitalObjectsService
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class DigitalObjectsManager:
|
|
7
|
-
def __init__(self):
|
|
8
|
-
base_url = load_api_url()
|
|
9
|
-
if base_url is None or base_url == "":
|
|
10
|
-
raise ValueError(f"Base URL is required - {self.__class__.__name__}")
|
|
11
|
-
|
|
12
|
-
self.digital_objects_service = DigitalObjectsService(base_url)
|
|
13
|
-
self.entry_service = EntriesService(base_url)
|
|
14
|
-
|
|
15
|
-
def link_existing_digital_objects(
|
|
16
|
-
self, entry_code: str, digital_object_code_list: list[str], is_unlink: bool = False
|
|
17
|
-
) -> bool:
|
|
18
|
-
"""Attach or detach (unlink) digital objects to an entry using codes."""
|
|
19
|
-
entry_response = self.entry_service.get_entry(entry_code)
|
|
20
|
-
|
|
21
|
-
if entry_response is None:
|
|
22
|
-
raise ValueError(f"Entry with code '{entry_code}' not found")
|
|
23
|
-
|
|
24
|
-
entry_payload = entry_response.get("entry")
|
|
25
|
-
if entry_payload is None:
|
|
26
|
-
raise ValueError(f"Entry with code '{entry_code}' not found")
|
|
27
|
-
|
|
28
|
-
digital_object_id_list: list[str] = []
|
|
29
|
-
|
|
30
|
-
for code in digital_object_code_list:
|
|
31
|
-
do_response = self.entry_service.get_entry(code)
|
|
32
|
-
do_entry = do_response.get("entry") if do_response else None
|
|
33
|
-
if do_entry is None:
|
|
34
|
-
raise ValueError(f"Digital object with code '{code}' not found")
|
|
35
|
-
digital_object_id_list.append(do_entry.get("id"))
|
|
36
|
-
|
|
37
|
-
result = self.digital_objects_service.link_existing_digital_objects(
|
|
38
|
-
attribute_id=entry_response.get("attributeId"),
|
|
39
|
-
entry_id=entry_payload.get("id"),
|
|
40
|
-
digital_object_id_list=digital_object_id_list,
|
|
41
|
-
is_unlink=is_unlink,
|
|
42
|
-
)
|
|
43
|
-
|
|
44
|
-
return result
|
|
45
|
-
|
|
46
|
-
|
|
@@ -1,44 +0,0 @@
|
|
|
1
|
-
from das.common.api import post_data
|
|
2
|
-
from das.common.config import load_token
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
class DigitalObjectsService:
|
|
6
|
-
def __init__(self, base_url):
|
|
7
|
-
self.base_url = f"{base_url}/api/services/app/DigitalObject"
|
|
8
|
-
|
|
9
|
-
def link_existing_digital_objects(self, attribute_id: int, entry_id: str, digital_object_id_list: list[str], is_unlink: bool = False):
|
|
10
|
-
"""Link existing digital objects to an entry."""
|
|
11
|
-
token = load_token()
|
|
12
|
-
|
|
13
|
-
if token is None or token == "":
|
|
14
|
-
raise ValueError("Authorization token is required")
|
|
15
|
-
|
|
16
|
-
headers = {
|
|
17
|
-
"Authorization": f"Bearer {token}",
|
|
18
|
-
"Content-Type": "application/json",
|
|
19
|
-
}
|
|
20
|
-
|
|
21
|
-
payload = {
|
|
22
|
-
"attributeId": attribute_id,
|
|
23
|
-
"attributeValueId": entry_id,
|
|
24
|
-
"digitalObjects": [],
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
for digital_object_id in digital_object_id_list:
|
|
28
|
-
payload["digitalObjects"].append(
|
|
29
|
-
{
|
|
30
|
-
"attributeId": attribute_id,
|
|
31
|
-
"attributeValueId": entry_id,
|
|
32
|
-
"digitalObjectId": digital_object_id,
|
|
33
|
-
"isDeleted": is_unlink,
|
|
34
|
-
}
|
|
35
|
-
)
|
|
36
|
-
|
|
37
|
-
response = post_data(
|
|
38
|
-
f"{self.base_url}/LinkExistingDigitalObject", data=payload, headers=headers
|
|
39
|
-
)
|
|
40
|
-
|
|
41
|
-
return response.get("success")
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|