albert 1.10.0rc2__py3-none-any.whl → 1.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- albert/__init__.py +1 -1
- albert/client.py +5 -0
- albert/collections/custom_templates.py +3 -0
- albert/collections/data_templates.py +118 -264
- albert/collections/entity_types.py +19 -3
- albert/collections/inventory.py +1 -1
- albert/collections/notebooks.py +154 -26
- albert/collections/parameters.py +1 -0
- albert/collections/property_data.py +384 -280
- albert/collections/reports.py +4 -0
- albert/collections/synthesis.py +292 -0
- albert/collections/tasks.py +2 -1
- albert/collections/worksheets.py +3 -0
- albert/core/shared/models/base.py +3 -1
- albert/core/shared/models/patch.py +1 -1
- albert/resources/batch_data.py +4 -2
- albert/resources/cas.py +3 -1
- albert/resources/custom_fields.py +3 -1
- albert/resources/data_templates.py +60 -12
- albert/resources/inventory.py +6 -4
- albert/resources/lists.py +3 -1
- albert/resources/notebooks.py +12 -7
- albert/resources/parameter_groups.py +3 -1
- albert/resources/property_data.py +64 -5
- albert/resources/sheets.py +16 -14
- albert/resources/synthesis.py +61 -0
- albert/resources/tags.py +3 -1
- albert/resources/tasks.py +4 -7
- albert/resources/workflows.py +4 -2
- albert/utils/data_template.py +392 -37
- albert/utils/property_data.py +638 -0
- albert/utils/tasks.py +3 -3
- {albert-1.10.0rc2.dist-info → albert-1.11.0.dist-info}/METADATA +1 -1
- {albert-1.10.0rc2.dist-info → albert-1.11.0.dist-info}/RECORD +36 -33
- {albert-1.10.0rc2.dist-info → albert-1.11.0.dist-info}/WHEEL +0 -0
- {albert-1.10.0rc2.dist-info → albert-1.11.0.dist-info}/licenses/LICENSE +0 -0
albert/__init__.py
CHANGED
albert/client.py
CHANGED
|
@@ -38,6 +38,7 @@ from albert.collections.roles import RoleCollection
|
|
|
38
38
|
from albert.collections.storage_classes import StorageClassesCollection
|
|
39
39
|
from albert.collections.storage_locations import StorageLocationsCollection
|
|
40
40
|
from albert.collections.substance import SubstanceCollection
|
|
41
|
+
from albert.collections.synthesis import SynthesisCollection
|
|
41
42
|
from albert.collections.tags import TagCollection
|
|
42
43
|
from albert.collections.tasks import TaskCollection
|
|
43
44
|
from albert.collections.un_numbers import UnNumberCollection
|
|
@@ -188,6 +189,10 @@ class Albert:
|
|
|
188
189
|
def lots(self) -> LotCollection:
|
|
189
190
|
return LotCollection(session=self.session)
|
|
190
191
|
|
|
192
|
+
@property
|
|
193
|
+
def synthesis(self) -> SynthesisCollection:
|
|
194
|
+
return SynthesisCollection(session=self.session)
|
|
195
|
+
|
|
191
196
|
@property
|
|
192
197
|
def units(self) -> UnitCollection:
|
|
193
198
|
return UnitCollection(session=self.session)
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from collections.abc import Iterator
|
|
2
2
|
|
|
3
|
+
from pydantic import validate_call
|
|
4
|
+
|
|
3
5
|
from albert.collections.base import BaseCollection
|
|
4
6
|
from albert.core.logging import logger
|
|
5
7
|
from albert.core.pagination import AlbertPaginator
|
|
@@ -28,6 +30,7 @@ class CustomTemplatesCollection(BaseCollection):
|
|
|
28
30
|
super().__init__(session=session)
|
|
29
31
|
self.base_path = f"/api/{CustomTemplatesCollection._api_version}/customtemplates"
|
|
30
32
|
|
|
33
|
+
@validate_call
|
|
31
34
|
def get_by_id(self, *, id: CustomTemplateId) -> CustomTemplate:
|
|
32
35
|
"""Get a Custom Template by ID
|
|
33
36
|
|
|
@@ -1,17 +1,14 @@
|
|
|
1
1
|
from collections.abc import Iterator
|
|
2
2
|
from itertools import islice
|
|
3
|
-
from pathlib import Path
|
|
4
3
|
|
|
5
4
|
from pydantic import Field, validate_call
|
|
6
5
|
|
|
7
|
-
from albert.collections.attachments import AttachmentCollection
|
|
8
6
|
from albert.collections.base import BaseCollection
|
|
9
|
-
from albert.collections.files import FileCollection
|
|
10
7
|
from albert.core.logging import logger
|
|
11
8
|
from albert.core.pagination import AlbertPaginator
|
|
12
9
|
from albert.core.session import AlbertSession
|
|
13
10
|
from albert.core.shared.enums import OrderBy, PaginationMode
|
|
14
|
-
from albert.core.shared.identifiers import
|
|
11
|
+
from albert.core.shared.identifiers import DataColumnId, DataTemplateId, UserId
|
|
15
12
|
from albert.core.shared.models.patch import (
|
|
16
13
|
GeneralPatchDatum,
|
|
17
14
|
GeneralPatchPayload,
|
|
@@ -20,25 +17,21 @@ from albert.core.shared.models.patch import (
|
|
|
20
17
|
)
|
|
21
18
|
from albert.exceptions import AlbertHTTPError
|
|
22
19
|
from albert.resources.data_templates import (
|
|
20
|
+
CurveExample,
|
|
23
21
|
DataColumnValue,
|
|
24
22
|
DataTemplate,
|
|
25
23
|
DataTemplateSearchItem,
|
|
24
|
+
ImageExample,
|
|
26
25
|
ParameterValue,
|
|
27
26
|
)
|
|
28
27
|
from albert.resources.parameter_groups import DataType, EnumValidationValue, ValueValidation
|
|
29
|
-
from albert.resources.tasks import ImportMode
|
|
30
28
|
from albert.utils._patch import generate_data_template_patches
|
|
31
29
|
from albert.utils.data_template import (
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
exec_curve_script,
|
|
36
|
-
get_script_attachment,
|
|
30
|
+
add_parameter_enums,
|
|
31
|
+
build_curve_example,
|
|
32
|
+
build_image_example,
|
|
37
33
|
get_target_data_column,
|
|
38
|
-
prepare_curve_input_attachment,
|
|
39
|
-
validate_data_column_type,
|
|
40
34
|
)
|
|
41
|
-
from albert.utils.tasks import CSV_EXTENSIONS, fetch_csv_table_rows
|
|
42
35
|
|
|
43
36
|
|
|
44
37
|
class DCPatchDatum(PGPatchPayload):
|
|
@@ -97,255 +90,6 @@ class DataTemplateCollection(BaseCollection):
|
|
|
97
90
|
else:
|
|
98
91
|
return self.add_parameters(data_template_id=dt.id, parameters=parameter_values)
|
|
99
92
|
|
|
100
|
-
@validate_call
|
|
101
|
-
def import_curve_data(
|
|
102
|
-
self,
|
|
103
|
-
*,
|
|
104
|
-
data_template_id: DataTemplateId,
|
|
105
|
-
data_column_id: DataColumnId | None = None,
|
|
106
|
-
data_column_name: str | None = None,
|
|
107
|
-
mode: ImportMode = ImportMode.CSV,
|
|
108
|
-
field_mapping: dict[str, str] | None = None,
|
|
109
|
-
file_path: str | Path | None = None,
|
|
110
|
-
attachment_id: AttachmentId | None = None,
|
|
111
|
-
) -> DataTemplate:
|
|
112
|
-
"""Import curve data in a data template column.
|
|
113
|
-
|
|
114
|
-
Parameters
|
|
115
|
-
----------
|
|
116
|
-
data_template_id : DataTemplateId
|
|
117
|
-
Target data template Id.
|
|
118
|
-
data_column_id : DataColumnId | None, optional
|
|
119
|
-
Specific data column to upload the curve data to. Provide exactly one of ``data_column_id`` or
|
|
120
|
-
``data_column_name``.
|
|
121
|
-
data_column_name : str | None, optional
|
|
122
|
-
Case-insensitive data column display name. Provide exactly one of the column
|
|
123
|
-
identifier or name.
|
|
124
|
-
mode : ImportMode, optional
|
|
125
|
-
Import mode. ``ImportMode.SCRIPT`` runs the attached automation script and requires
|
|
126
|
-
a script attachment on the data template; ``ImportMode.CSV`` ingests the
|
|
127
|
-
uploaded CSV directly. Defaults to ``ImportMode.CSV``.
|
|
128
|
-
field_mapping : dict[str, str] | None, optional
|
|
129
|
-
Optional manual mapping from CSV headers to curve result column names on the target column. Example: ``{"visc": "Viscosity"}`` maps the
|
|
130
|
-
"visc" CSV header to the "Viscosity" curve result column. Mappings are matched
|
|
131
|
-
case-insensitively and override auto-detection. In ``ImportMode.SCRIPT`` this applies to the headers emitted by the script before ingestion.
|
|
132
|
-
attachment_id : AttachmentId | None, optional
|
|
133
|
-
Existing attachment to use. Exactly one of ``attachment_id`` or ``file_path`` must be provided.
|
|
134
|
-
file_path : str | Path | None, optional
|
|
135
|
-
Local file to upload and attach to a new note on the data template. Exactly one of
|
|
136
|
-
``attachment_id`` or ``file_path`` must be provided.
|
|
137
|
-
Returns
|
|
138
|
-
-------
|
|
139
|
-
DataTemplate
|
|
140
|
-
The data template refreshed after the curve import job completes.
|
|
141
|
-
|
|
142
|
-
Examples
|
|
143
|
-
--------
|
|
144
|
-
!!! example "Import curve data from a CSV file"
|
|
145
|
-
```python
|
|
146
|
-
dt = client.data_templates.import_curve_data(
|
|
147
|
-
data_template_id="DT123",
|
|
148
|
-
data_column_name="APHA Color",
|
|
149
|
-
mode=ImportMode.CSV,
|
|
150
|
-
file_path="path/to/curve.csv",
|
|
151
|
-
field_mapping={"visc": "Viscosity"},
|
|
152
|
-
)
|
|
153
|
-
```
|
|
154
|
-
"""
|
|
155
|
-
data_template = self.get_by_id(id=data_template_id)
|
|
156
|
-
target_column = get_target_data_column(
|
|
157
|
-
data_template=data_template,
|
|
158
|
-
data_template_id=data_template_id,
|
|
159
|
-
data_column_id=data_column_id,
|
|
160
|
-
data_column_name=data_column_name,
|
|
161
|
-
)
|
|
162
|
-
validate_data_column_type(target_column=target_column)
|
|
163
|
-
column_id = target_column.data_column_id
|
|
164
|
-
attachment_collection = AttachmentCollection(session=self.session)
|
|
165
|
-
|
|
166
|
-
script_attachment_signed_url: str | None = None
|
|
167
|
-
|
|
168
|
-
if mode is ImportMode.SCRIPT:
|
|
169
|
-
script_attachment, script_extensions = get_script_attachment(
|
|
170
|
-
attachment_collection=attachment_collection,
|
|
171
|
-
data_template_id=data_template_id,
|
|
172
|
-
column_id=column_id,
|
|
173
|
-
)
|
|
174
|
-
if not script_extensions:
|
|
175
|
-
raise ValueError("Script attachment must define allowed extensions.")
|
|
176
|
-
script_attachment_signed_url = script_attachment.signed_url
|
|
177
|
-
allowed_extensions = set(script_extensions)
|
|
178
|
-
else:
|
|
179
|
-
allowed_extensions = set(CSV_EXTENSIONS)
|
|
180
|
-
raw_attachment = prepare_curve_input_attachment(
|
|
181
|
-
attachment_collection=attachment_collection,
|
|
182
|
-
data_template_id=data_template_id,
|
|
183
|
-
column_id=column_id,
|
|
184
|
-
allowed_extensions=allowed_extensions,
|
|
185
|
-
file_path=file_path,
|
|
186
|
-
attachment_id=attachment_id,
|
|
187
|
-
require_signed_url=mode is ImportMode.SCRIPT,
|
|
188
|
-
)
|
|
189
|
-
raw_key = raw_attachment.key
|
|
190
|
-
if raw_attachment.id is None:
|
|
191
|
-
raise ValueError("Curve input attachment did not return an identifier.")
|
|
192
|
-
resolved_attachment_id = AttachmentId(raw_attachment.id)
|
|
193
|
-
|
|
194
|
-
processed_input_key: str = raw_key
|
|
195
|
-
column_headers: dict[str, str] = {}
|
|
196
|
-
|
|
197
|
-
if mode is ImportMode.SCRIPT:
|
|
198
|
-
file_collection = FileCollection(session=self.session)
|
|
199
|
-
processed_input_key, column_headers = exec_curve_script(
|
|
200
|
-
session=self.session,
|
|
201
|
-
api_version=self._api_version,
|
|
202
|
-
data_template_id=data_template_id,
|
|
203
|
-
column_id=column_id,
|
|
204
|
-
raw_attachment=raw_attachment,
|
|
205
|
-
file_collection=file_collection,
|
|
206
|
-
script_attachment_signed_url=script_attachment_signed_url,
|
|
207
|
-
)
|
|
208
|
-
else:
|
|
209
|
-
table_rows = fetch_csv_table_rows(
|
|
210
|
-
session=self.session,
|
|
211
|
-
attachment_id=resolved_attachment_id,
|
|
212
|
-
headers_only=True,
|
|
213
|
-
)
|
|
214
|
-
header_row = table_rows[0]
|
|
215
|
-
if not isinstance(header_row, dict):
|
|
216
|
-
raise ValueError("Unexpected CSV header format returned by preview endpoint.")
|
|
217
|
-
column_headers = {
|
|
218
|
-
key: value
|
|
219
|
-
for key, value in header_row.items()
|
|
220
|
-
if isinstance(key, str) and isinstance(value, str) and value
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
csv_mapping = derive_curve_csv_mapping(
|
|
224
|
-
target_column=target_column,
|
|
225
|
-
column_headers=column_headers,
|
|
226
|
-
field_mapping=field_mapping,
|
|
227
|
-
)
|
|
228
|
-
|
|
229
|
-
job_id, partition_uuid, s3_output_key = create_curve_import_job(
|
|
230
|
-
session=self.session,
|
|
231
|
-
data_template_id=data_template_id,
|
|
232
|
-
column_id=column_id,
|
|
233
|
-
csv_mapping=csv_mapping,
|
|
234
|
-
raw_attachment=raw_attachment,
|
|
235
|
-
processed_input_key=processed_input_key,
|
|
236
|
-
)
|
|
237
|
-
|
|
238
|
-
patch_payload = build_curve_import_patch_payload(
|
|
239
|
-
target_column=target_column,
|
|
240
|
-
job_id=job_id,
|
|
241
|
-
csv_mapping=csv_mapping,
|
|
242
|
-
raw_attachment=raw_attachment,
|
|
243
|
-
partition_uuid=partition_uuid,
|
|
244
|
-
s3_output_key=s3_output_key,
|
|
245
|
-
)
|
|
246
|
-
self.session.patch(
|
|
247
|
-
f"{self.base_path}/{data_template_id}",
|
|
248
|
-
json=patch_payload.model_dump(by_alias=True, mode="json", exclude_none=True),
|
|
249
|
-
)
|
|
250
|
-
|
|
251
|
-
return self.get_by_id(id=data_template_id)
|
|
252
|
-
|
|
253
|
-
def _add_param_enums(
|
|
254
|
-
self,
|
|
255
|
-
*,
|
|
256
|
-
data_template_id: DataTemplateId,
|
|
257
|
-
new_parameters: list[ParameterValue],
|
|
258
|
-
) -> list[EnumValidationValue]:
|
|
259
|
-
"""Adds enum values to a parameter."""
|
|
260
|
-
|
|
261
|
-
data_template = self.get_by_id(id=data_template_id)
|
|
262
|
-
existing_parameters = data_template.parameter_values
|
|
263
|
-
enums_by_sequence = {}
|
|
264
|
-
for parameter in new_parameters:
|
|
265
|
-
this_sequence = next(
|
|
266
|
-
(
|
|
267
|
-
p.sequence
|
|
268
|
-
for p in existing_parameters
|
|
269
|
-
if p.id == parameter.id and p.short_name == parameter.short_name
|
|
270
|
-
),
|
|
271
|
-
None,
|
|
272
|
-
)
|
|
273
|
-
enum_patches = []
|
|
274
|
-
if (
|
|
275
|
-
parameter.validation
|
|
276
|
-
and len(parameter.validation) > 0
|
|
277
|
-
and isinstance(parameter.validation[0].value, list)
|
|
278
|
-
):
|
|
279
|
-
existing_validation = (
|
|
280
|
-
[x for x in existing_parameters if x.sequence == parameter.sequence]
|
|
281
|
-
if existing_parameters
|
|
282
|
-
else []
|
|
283
|
-
)
|
|
284
|
-
existing_enums = (
|
|
285
|
-
[
|
|
286
|
-
x
|
|
287
|
-
for x in existing_validation[0].validation[0].value
|
|
288
|
-
if isinstance(x, EnumValidationValue) and x.id is not None
|
|
289
|
-
]
|
|
290
|
-
if (
|
|
291
|
-
existing_validation
|
|
292
|
-
and len(existing_validation) > 0
|
|
293
|
-
and existing_validation[0].validation
|
|
294
|
-
and len(existing_validation[0].validation) > 0
|
|
295
|
-
and existing_validation[0].validation[0].value
|
|
296
|
-
and isinstance(existing_validation[0].validation[0].value, list)
|
|
297
|
-
)
|
|
298
|
-
else []
|
|
299
|
-
)
|
|
300
|
-
updated_enums = (
|
|
301
|
-
[
|
|
302
|
-
x
|
|
303
|
-
for x in parameter.validation[0].value
|
|
304
|
-
if isinstance(x, EnumValidationValue)
|
|
305
|
-
]
|
|
306
|
-
if parameter.validation[0].value
|
|
307
|
-
else []
|
|
308
|
-
)
|
|
309
|
-
|
|
310
|
-
deleted_enums = [
|
|
311
|
-
x for x in existing_enums if x.id not in [y.id for y in updated_enums]
|
|
312
|
-
]
|
|
313
|
-
|
|
314
|
-
new_enums = [
|
|
315
|
-
x for x in updated_enums if x.id not in [y.id for y in existing_enums]
|
|
316
|
-
]
|
|
317
|
-
|
|
318
|
-
matching_enums = [
|
|
319
|
-
x for x in updated_enums if x.id in [y.id for y in existing_enums]
|
|
320
|
-
]
|
|
321
|
-
|
|
322
|
-
for new_enum in new_enums:
|
|
323
|
-
enum_patches.append({"operation": "add", "text": new_enum.text})
|
|
324
|
-
for deleted_enum in deleted_enums:
|
|
325
|
-
enum_patches.append({"operation": "delete", "id": deleted_enum.id})
|
|
326
|
-
for matching_enum in matching_enums:
|
|
327
|
-
if (
|
|
328
|
-
matching_enum.text
|
|
329
|
-
!= [x for x in existing_enums if x.id == matching_enum.id][0].text
|
|
330
|
-
):
|
|
331
|
-
enum_patches.append(
|
|
332
|
-
{
|
|
333
|
-
"operation": "update",
|
|
334
|
-
"id": matching_enum.id,
|
|
335
|
-
"text": matching_enum.text,
|
|
336
|
-
}
|
|
337
|
-
)
|
|
338
|
-
|
|
339
|
-
if len(enum_patches) > 0:
|
|
340
|
-
enum_response = self.session.put(
|
|
341
|
-
f"{self.base_path}/{data_template_id}/parameters/{this_sequence}/enums",
|
|
342
|
-
json=enum_patches,
|
|
343
|
-
)
|
|
344
|
-
enums_by_sequence[this_sequence] = [
|
|
345
|
-
EnumValidationValue(**x) for x in enum_response.json()
|
|
346
|
-
]
|
|
347
|
-
return enums_by_sequence
|
|
348
|
-
|
|
349
93
|
@validate_call
|
|
350
94
|
def get_by_id(self, *, id: DataTemplateId) -> DataTemplate:
|
|
351
95
|
"""Get a data template by its ID.
|
|
@@ -363,6 +107,7 @@ class DataTemplateCollection(BaseCollection):
|
|
|
363
107
|
response = self.session.get(f"{self.base_path}/{id}")
|
|
364
108
|
return DataTemplate(**response.json())
|
|
365
109
|
|
|
110
|
+
@validate_call
|
|
366
111
|
def get_by_ids(self, *, ids: list[DataTemplateId]) -> list[DataTemplate]:
|
|
367
112
|
"""Get a list of data templates by their IDs.
|
|
368
113
|
|
|
@@ -402,6 +147,7 @@ class DataTemplateCollection(BaseCollection):
|
|
|
402
147
|
return t.hydrate()
|
|
403
148
|
return None
|
|
404
149
|
|
|
150
|
+
@validate_call
|
|
405
151
|
def add_data_columns(
|
|
406
152
|
self, *, data_template_id: DataTemplateId, data_columns: list[DataColumnValue]
|
|
407
153
|
) -> DataTemplate:
|
|
@@ -445,6 +191,7 @@ class DataTemplateCollection(BaseCollection):
|
|
|
445
191
|
)
|
|
446
192
|
return self.get_by_id(id=data_template_id)
|
|
447
193
|
|
|
194
|
+
@validate_call
|
|
448
195
|
def add_parameters(
|
|
449
196
|
self, *, data_template_id: DataTemplateId, parameters: list[ParameterValue]
|
|
450
197
|
) -> DataTemplate:
|
|
@@ -493,7 +240,9 @@ class DataTemplateCollection(BaseCollection):
|
|
|
493
240
|
if param.id in initial_enum_values:
|
|
494
241
|
param.validation[0].value = initial_enum_values[param.id]
|
|
495
242
|
param.validation[0].datatype = DataType.ENUM
|
|
496
|
-
|
|
243
|
+
add_parameter_enums(
|
|
244
|
+
session=self.session,
|
|
245
|
+
base_path=self.base_path,
|
|
497
246
|
data_template_id=data_template_id,
|
|
498
247
|
new_parameters=[param],
|
|
499
248
|
)
|
|
@@ -564,6 +313,12 @@ class DataTemplateCollection(BaseCollection):
|
|
|
564
313
|
-------
|
|
565
314
|
DataTemplate
|
|
566
315
|
The Updated DataTemplate object.
|
|
316
|
+
|
|
317
|
+
Warnings
|
|
318
|
+
--------
|
|
319
|
+
Only scalar data column values (text, number, dropdown) can be updated using this function. Use
|
|
320
|
+
`set_curve_example` / `set_image_example` to set example values for other data column types.
|
|
321
|
+
|
|
567
322
|
"""
|
|
568
323
|
|
|
569
324
|
existing = self.get_by_id(id=data_template.id)
|
|
@@ -639,7 +394,9 @@ class DataTemplateCollection(BaseCollection):
|
|
|
639
394
|
param.validation[0].datatype = DataType.ENUM # Add this line
|
|
640
395
|
|
|
641
396
|
# Add enum values to newly created parameters
|
|
642
|
-
|
|
397
|
+
add_parameter_enums(
|
|
398
|
+
session=self.session,
|
|
399
|
+
base_path=self.base_path,
|
|
643
400
|
data_template_id=existing.id,
|
|
644
401
|
new_parameters=returned_parameters,
|
|
645
402
|
)
|
|
@@ -723,6 +480,7 @@ class DataTemplateCollection(BaseCollection):
|
|
|
723
480
|
)
|
|
724
481
|
return self.get_by_id(id=data_template.id)
|
|
725
482
|
|
|
483
|
+
@validate_call
|
|
726
484
|
def delete(self, *, id: DataTemplateId) -> None:
|
|
727
485
|
"""Deletes a data template by its ID.
|
|
728
486
|
|
|
@@ -791,3 +549,99 @@ class DataTemplateCollection(BaseCollection):
|
|
|
791
549
|
yield from hydrated_templates
|
|
792
550
|
except AlbertHTTPError as e:
|
|
793
551
|
logger.warning(f"Error hydrating batch {batch}: {e}")
|
|
552
|
+
|
|
553
|
+
@validate_call
|
|
554
|
+
def set_curve_example(
|
|
555
|
+
self,
|
|
556
|
+
*,
|
|
557
|
+
data_template_id: DataTemplateId,
|
|
558
|
+
data_column_id: DataColumnId | None = None,
|
|
559
|
+
data_column_name: str | None = None,
|
|
560
|
+
example: CurveExample,
|
|
561
|
+
) -> DataTemplate:
|
|
562
|
+
"""Set a curve example on a Curve data column.
|
|
563
|
+
|
|
564
|
+
Parameters
|
|
565
|
+
----------
|
|
566
|
+
data_template_id : DataTemplateId
|
|
567
|
+
Target data template ID.
|
|
568
|
+
data_column_id : DataColumnId, optional
|
|
569
|
+
Target curve column ID (provide exactly one of id or name).
|
|
570
|
+
data_column_name : str, optional
|
|
571
|
+
Target curve column name (provide exactly one of id or name).
|
|
572
|
+
example : CurveExample
|
|
573
|
+
Curve example payload
|
|
574
|
+
|
|
575
|
+
Returns
|
|
576
|
+
-------
|
|
577
|
+
DataTemplate
|
|
578
|
+
The updated data template after the example is applied.
|
|
579
|
+
"""
|
|
580
|
+
data_template = self.get_by_id(id=data_template_id)
|
|
581
|
+
target_column = get_target_data_column(
|
|
582
|
+
data_template=data_template,
|
|
583
|
+
data_template_id=data_template_id,
|
|
584
|
+
data_column_id=data_column_id,
|
|
585
|
+
data_column_name=data_column_name,
|
|
586
|
+
)
|
|
587
|
+
payload = build_curve_example(
|
|
588
|
+
session=self.session,
|
|
589
|
+
data_template_id=data_template_id,
|
|
590
|
+
example=example,
|
|
591
|
+
target_column=target_column,
|
|
592
|
+
)
|
|
593
|
+
if not payload.data:
|
|
594
|
+
return data_template
|
|
595
|
+
self.session.patch(
|
|
596
|
+
f"{self.base_path}/{data_template_id}",
|
|
597
|
+
json=payload.model_dump(mode="json", by_alias=True, exclude_none=True),
|
|
598
|
+
)
|
|
599
|
+
return self.get_by_id(id=data_template_id)
|
|
600
|
+
|
|
601
|
+
@validate_call
|
|
602
|
+
def set_image_example(
|
|
603
|
+
self,
|
|
604
|
+
*,
|
|
605
|
+
data_template_id: DataTemplateId,
|
|
606
|
+
data_column_id: DataColumnId | None = None,
|
|
607
|
+
data_column_name: str | None = None,
|
|
608
|
+
example: ImageExample,
|
|
609
|
+
) -> DataTemplate:
|
|
610
|
+
"""Set an image example on a Image data column.
|
|
611
|
+
|
|
612
|
+
Parameters
|
|
613
|
+
----------
|
|
614
|
+
data_template_id : DataTemplateId
|
|
615
|
+
Target data template ID.
|
|
616
|
+
data_column_id : DataColumnId, optional
|
|
617
|
+
Target image column ID (provide exactly one of id or name).
|
|
618
|
+
data_column_name : str, optional
|
|
619
|
+
Target image column name (provide exactly one of id or name).
|
|
620
|
+
example : ImageExample
|
|
621
|
+
Image example payload
|
|
622
|
+
|
|
623
|
+
Returns
|
|
624
|
+
-------
|
|
625
|
+
DataTemplate
|
|
626
|
+
The updated data template after the example is applied.
|
|
627
|
+
"""
|
|
628
|
+
data_template = self.get_by_id(id=data_template_id)
|
|
629
|
+
target_column = get_target_data_column(
|
|
630
|
+
data_template=data_template,
|
|
631
|
+
data_template_id=data_template_id,
|
|
632
|
+
data_column_id=data_column_id,
|
|
633
|
+
data_column_name=data_column_name,
|
|
634
|
+
)
|
|
635
|
+
payload = build_image_example(
|
|
636
|
+
session=self.session,
|
|
637
|
+
data_template_id=data_template_id,
|
|
638
|
+
example=example,
|
|
639
|
+
target_column=target_column,
|
|
640
|
+
)
|
|
641
|
+
if not payload.data:
|
|
642
|
+
return data_template
|
|
643
|
+
self.session.patch(
|
|
644
|
+
f"{self.base_path}/{data_template_id}",
|
|
645
|
+
json=payload.model_dump(mode="json", by_alias=True, exclude_none=True),
|
|
646
|
+
)
|
|
647
|
+
return self.get_by_id(id=data_template_id)
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
from collections.abc import Iterator
|
|
2
2
|
|
|
3
|
+
from pydantic import validate_call
|
|
4
|
+
|
|
3
5
|
from albert.collections.base import BaseCollection
|
|
4
6
|
from albert.core.pagination import AlbertPaginator, PaginationMode
|
|
5
7
|
from albert.core.session import AlbertSession
|
|
@@ -41,6 +43,7 @@ class EntityTypeCollection(BaseCollection):
|
|
|
41
43
|
super().__init__(session=session)
|
|
42
44
|
self.base_path = f"/api/{EntityTypeCollection._api_version}/entitytypes"
|
|
43
45
|
|
|
46
|
+
@validate_call
|
|
44
47
|
def get_by_id(self, *, id: EntityTypeId) -> EntityType:
|
|
45
48
|
"""Get an entity type by its ID.
|
|
46
49
|
Parameters
|
|
@@ -115,8 +118,14 @@ class EntityTypeCollection(BaseCollection):
|
|
|
115
118
|
PatchDatum(
|
|
116
119
|
operation=PatchOperation.UPDATE,
|
|
117
120
|
attribute="customFields",
|
|
118
|
-
new_value=[
|
|
119
|
-
|
|
121
|
+
new_value=[
|
|
122
|
+
x.model_dump(by_alias=True, exclude_none=True)
|
|
123
|
+
for x in updated.custom_fields
|
|
124
|
+
],
|
|
125
|
+
old_value=[
|
|
126
|
+
x.model_dump(by_alias=True, exclude_none=True)
|
|
127
|
+
for x in existing.custom_fields
|
|
128
|
+
],
|
|
120
129
|
)
|
|
121
130
|
)
|
|
122
131
|
if updated.custom_fields is not None and existing.custom_fields is None:
|
|
@@ -124,7 +133,10 @@ class EntityTypeCollection(BaseCollection):
|
|
|
124
133
|
PatchDatum(
|
|
125
134
|
operation=PatchOperation.ADD,
|
|
126
135
|
attribute="customFields",
|
|
127
|
-
new_value=[
|
|
136
|
+
new_value=[
|
|
137
|
+
x.model_dump(by_alias=True, exclude_none=True)
|
|
138
|
+
for x in updated.custom_fields
|
|
139
|
+
],
|
|
128
140
|
)
|
|
129
141
|
)
|
|
130
142
|
|
|
@@ -194,6 +206,7 @@ class EntityTypeCollection(BaseCollection):
|
|
|
194
206
|
|
|
195
207
|
return patches
|
|
196
208
|
|
|
209
|
+
@validate_call
|
|
197
210
|
def delete(self, *, id: EntityTypeId) -> None:
|
|
198
211
|
"""Delete an entity type.
|
|
199
212
|
Parameters
|
|
@@ -203,6 +216,7 @@ class EntityTypeCollection(BaseCollection):
|
|
|
203
216
|
"""
|
|
204
217
|
self.session.delete(f"{self.base_path}/{id}")
|
|
205
218
|
|
|
219
|
+
@validate_call
|
|
206
220
|
def get_rules(self, *, id: EntityTypeId) -> list[EntityTypeRule]:
|
|
207
221
|
"""Get the rules for an entity type.
|
|
208
222
|
Parameters
|
|
@@ -213,6 +227,7 @@ class EntityTypeCollection(BaseCollection):
|
|
|
213
227
|
response = self.session.get(f"{self.base_path}/rules/{id}")
|
|
214
228
|
return [EntityTypeRule(**rule) for rule in response.json()]
|
|
215
229
|
|
|
230
|
+
@validate_call
|
|
216
231
|
def set_rules(self, *, id: EntityTypeId, rules: list[EntityTypeRule]) -> list[EntityTypeRule]:
|
|
217
232
|
"""Create or update the rules for an entity type.
|
|
218
233
|
Parameters
|
|
@@ -232,6 +247,7 @@ class EntityTypeCollection(BaseCollection):
|
|
|
232
247
|
)
|
|
233
248
|
return [EntityTypeRule(**rule) for rule in response.json()]
|
|
234
249
|
|
|
250
|
+
@validate_call
|
|
235
251
|
def delete_rules(self, *, id: EntityTypeId) -> None:
|
|
236
252
|
"""Delete the rules for an entity type.
|
|
237
253
|
Parameters
|
albert/collections/inventory.py
CHANGED
|
@@ -281,7 +281,7 @@ class InventoryCollection(BaseCollection):
|
|
|
281
281
|
"""Add inventory specs to the inventory item.
|
|
282
282
|
|
|
283
283
|
An `InventorySpec` is a property that was not directly measured via a task,
|
|
284
|
-
but is a generic property of that
|
|
284
|
+
but is a generic property of that inventory item.
|
|
285
285
|
|
|
286
286
|
Parameters
|
|
287
287
|
----------
|