albert 1.10.0__py3-none-any.whl → 1.10.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- albert/__init__.py +1 -1
- albert/collections/custom_templates.py +0 -3
- albert/collections/data_templates.py +264 -118
- albert/collections/entity_types.py +3 -19
- albert/collections/inventory.py +1 -1
- albert/collections/parameters.py +0 -1
- albert/collections/property_data.py +280 -384
- albert/collections/reports.py +0 -4
- albert/collections/tasks.py +1 -2
- albert/collections/worksheets.py +0 -3
- albert/core/shared/models/base.py +1 -3
- albert/core/shared/models/patch.py +1 -1
- albert/resources/batch_data.py +2 -4
- albert/resources/cas.py +1 -3
- albert/resources/custom_fields.py +1 -3
- albert/resources/data_templates.py +12 -60
- albert/resources/inventory.py +4 -6
- albert/resources/lists.py +1 -3
- albert/resources/notebooks.py +2 -4
- albert/resources/parameter_groups.py +1 -3
- albert/resources/property_data.py +5 -64
- albert/resources/sheets.py +14 -16
- albert/resources/tags.py +1 -3
- albert/resources/tasks.py +7 -4
- albert/resources/workflows.py +2 -4
- albert/utils/data_template.py +37 -392
- albert/utils/tasks.py +3 -3
- {albert-1.10.0.dist-info → albert-1.10.0rc2.dist-info}/METADATA +1 -1
- {albert-1.10.0.dist-info → albert-1.10.0rc2.dist-info}/RECORD +31 -32
- albert/utils/property_data.py +0 -638
- {albert-1.10.0.dist-info → albert-1.10.0rc2.dist-info}/WHEEL +0 -0
- {albert-1.10.0.dist-info → albert-1.10.0rc2.dist-info}/licenses/LICENSE +0 -0
albert/__init__.py
CHANGED
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
from collections.abc import Iterator
|
|
2
2
|
|
|
3
|
-
from pydantic import validate_call
|
|
4
|
-
|
|
5
3
|
from albert.collections.base import BaseCollection
|
|
6
4
|
from albert.core.logging import logger
|
|
7
5
|
from albert.core.pagination import AlbertPaginator
|
|
@@ -30,7 +28,6 @@ class CustomTemplatesCollection(BaseCollection):
|
|
|
30
28
|
super().__init__(session=session)
|
|
31
29
|
self.base_path = f"/api/{CustomTemplatesCollection._api_version}/customtemplates"
|
|
32
30
|
|
|
33
|
-
@validate_call
|
|
34
31
|
def get_by_id(self, *, id: CustomTemplateId) -> CustomTemplate:
|
|
35
32
|
"""Get a Custom Template by ID
|
|
36
33
|
|
|
@@ -1,14 +1,17 @@
|
|
|
1
1
|
from collections.abc import Iterator
|
|
2
2
|
from itertools import islice
|
|
3
|
+
from pathlib import Path
|
|
3
4
|
|
|
4
5
|
from pydantic import Field, validate_call
|
|
5
6
|
|
|
7
|
+
from albert.collections.attachments import AttachmentCollection
|
|
6
8
|
from albert.collections.base import BaseCollection
|
|
9
|
+
from albert.collections.files import FileCollection
|
|
7
10
|
from albert.core.logging import logger
|
|
8
11
|
from albert.core.pagination import AlbertPaginator
|
|
9
12
|
from albert.core.session import AlbertSession
|
|
10
13
|
from albert.core.shared.enums import OrderBy, PaginationMode
|
|
11
|
-
from albert.core.shared.identifiers import DataColumnId, DataTemplateId, UserId
|
|
14
|
+
from albert.core.shared.identifiers import AttachmentId, DataColumnId, DataTemplateId, UserId
|
|
12
15
|
from albert.core.shared.models.patch import (
|
|
13
16
|
GeneralPatchDatum,
|
|
14
17
|
GeneralPatchPayload,
|
|
@@ -17,21 +20,25 @@ from albert.core.shared.models.patch import (
|
|
|
17
20
|
)
|
|
18
21
|
from albert.exceptions import AlbertHTTPError
|
|
19
22
|
from albert.resources.data_templates import (
|
|
20
|
-
CurveExample,
|
|
21
23
|
DataColumnValue,
|
|
22
24
|
DataTemplate,
|
|
23
25
|
DataTemplateSearchItem,
|
|
24
|
-
ImageExample,
|
|
25
26
|
ParameterValue,
|
|
26
27
|
)
|
|
27
28
|
from albert.resources.parameter_groups import DataType, EnumValidationValue, ValueValidation
|
|
29
|
+
from albert.resources.tasks import ImportMode
|
|
28
30
|
from albert.utils._patch import generate_data_template_patches
|
|
29
31
|
from albert.utils.data_template import (
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
32
|
+
build_curve_import_patch_payload,
|
|
33
|
+
create_curve_import_job,
|
|
34
|
+
derive_curve_csv_mapping,
|
|
35
|
+
exec_curve_script,
|
|
36
|
+
get_script_attachment,
|
|
33
37
|
get_target_data_column,
|
|
38
|
+
prepare_curve_input_attachment,
|
|
39
|
+
validate_data_column_type,
|
|
34
40
|
)
|
|
41
|
+
from albert.utils.tasks import CSV_EXTENSIONS, fetch_csv_table_rows
|
|
35
42
|
|
|
36
43
|
|
|
37
44
|
class DCPatchDatum(PGPatchPayload):
|
|
@@ -90,6 +97,255 @@ class DataTemplateCollection(BaseCollection):
|
|
|
90
97
|
else:
|
|
91
98
|
return self.add_parameters(data_template_id=dt.id, parameters=parameter_values)
|
|
92
99
|
|
|
100
|
+
@validate_call
|
|
101
|
+
def import_curve_data(
|
|
102
|
+
self,
|
|
103
|
+
*,
|
|
104
|
+
data_template_id: DataTemplateId,
|
|
105
|
+
data_column_id: DataColumnId | None = None,
|
|
106
|
+
data_column_name: str | None = None,
|
|
107
|
+
mode: ImportMode = ImportMode.CSV,
|
|
108
|
+
field_mapping: dict[str, str] | None = None,
|
|
109
|
+
file_path: str | Path | None = None,
|
|
110
|
+
attachment_id: AttachmentId | None = None,
|
|
111
|
+
) -> DataTemplate:
|
|
112
|
+
"""Import curve data in a data template column.
|
|
113
|
+
|
|
114
|
+
Parameters
|
|
115
|
+
----------
|
|
116
|
+
data_template_id : DataTemplateId
|
|
117
|
+
Target data template Id.
|
|
118
|
+
data_column_id : DataColumnId | None, optional
|
|
119
|
+
Specific data column to upload the curve data to. Provide exactly one of ``data_column_id`` or
|
|
120
|
+
``data_column_name``.
|
|
121
|
+
data_column_name : str | None, optional
|
|
122
|
+
Case-insensitive data column display name. Provide exactly one of the column
|
|
123
|
+
identifier or name.
|
|
124
|
+
mode : ImportMode, optional
|
|
125
|
+
Import mode. ``ImportMode.SCRIPT`` runs the attached automation script and requires
|
|
126
|
+
a script attachment on the data template; ``ImportMode.CSV`` ingests the
|
|
127
|
+
uploaded CSV directly. Defaults to ``ImportMode.CSV``.
|
|
128
|
+
field_mapping : dict[str, str] | None, optional
|
|
129
|
+
Optional manual mapping from CSV headers to curve result column names on the target column. Example: ``{"visc": "Viscosity"}`` maps the
|
|
130
|
+
"visc" CSV header to the "Viscosity" curve result column. Mappings are matched
|
|
131
|
+
case-insensitively and override auto-detection. In ``ImportMode.SCRIPT`` this applies to the headers emitted by the script before ingestion.
|
|
132
|
+
attachment_id : AttachmentId | None, optional
|
|
133
|
+
Existing attachment to use. Exactly one of ``attachment_id`` or ``file_path`` must be provided.
|
|
134
|
+
file_path : str | Path | None, optional
|
|
135
|
+
Local file to upload and attach to a new note on the data template. Exactly one of
|
|
136
|
+
``attachment_id`` or ``file_path`` must be provided.
|
|
137
|
+
Returns
|
|
138
|
+
-------
|
|
139
|
+
DataTemplate
|
|
140
|
+
The data template refreshed after the curve import job completes.
|
|
141
|
+
|
|
142
|
+
Examples
|
|
143
|
+
--------
|
|
144
|
+
!!! example "Import curve data from a CSV file"
|
|
145
|
+
```python
|
|
146
|
+
dt = client.data_templates.import_curve_data(
|
|
147
|
+
data_template_id="DT123",
|
|
148
|
+
data_column_name="APHA Color",
|
|
149
|
+
mode=ImportMode.CSV,
|
|
150
|
+
file_path="path/to/curve.csv",
|
|
151
|
+
field_mapping={"visc": "Viscosity"},
|
|
152
|
+
)
|
|
153
|
+
```
|
|
154
|
+
"""
|
|
155
|
+
data_template = self.get_by_id(id=data_template_id)
|
|
156
|
+
target_column = get_target_data_column(
|
|
157
|
+
data_template=data_template,
|
|
158
|
+
data_template_id=data_template_id,
|
|
159
|
+
data_column_id=data_column_id,
|
|
160
|
+
data_column_name=data_column_name,
|
|
161
|
+
)
|
|
162
|
+
validate_data_column_type(target_column=target_column)
|
|
163
|
+
column_id = target_column.data_column_id
|
|
164
|
+
attachment_collection = AttachmentCollection(session=self.session)
|
|
165
|
+
|
|
166
|
+
script_attachment_signed_url: str | None = None
|
|
167
|
+
|
|
168
|
+
if mode is ImportMode.SCRIPT:
|
|
169
|
+
script_attachment, script_extensions = get_script_attachment(
|
|
170
|
+
attachment_collection=attachment_collection,
|
|
171
|
+
data_template_id=data_template_id,
|
|
172
|
+
column_id=column_id,
|
|
173
|
+
)
|
|
174
|
+
if not script_extensions:
|
|
175
|
+
raise ValueError("Script attachment must define allowed extensions.")
|
|
176
|
+
script_attachment_signed_url = script_attachment.signed_url
|
|
177
|
+
allowed_extensions = set(script_extensions)
|
|
178
|
+
else:
|
|
179
|
+
allowed_extensions = set(CSV_EXTENSIONS)
|
|
180
|
+
raw_attachment = prepare_curve_input_attachment(
|
|
181
|
+
attachment_collection=attachment_collection,
|
|
182
|
+
data_template_id=data_template_id,
|
|
183
|
+
column_id=column_id,
|
|
184
|
+
allowed_extensions=allowed_extensions,
|
|
185
|
+
file_path=file_path,
|
|
186
|
+
attachment_id=attachment_id,
|
|
187
|
+
require_signed_url=mode is ImportMode.SCRIPT,
|
|
188
|
+
)
|
|
189
|
+
raw_key = raw_attachment.key
|
|
190
|
+
if raw_attachment.id is None:
|
|
191
|
+
raise ValueError("Curve input attachment did not return an identifier.")
|
|
192
|
+
resolved_attachment_id = AttachmentId(raw_attachment.id)
|
|
193
|
+
|
|
194
|
+
processed_input_key: str = raw_key
|
|
195
|
+
column_headers: dict[str, str] = {}
|
|
196
|
+
|
|
197
|
+
if mode is ImportMode.SCRIPT:
|
|
198
|
+
file_collection = FileCollection(session=self.session)
|
|
199
|
+
processed_input_key, column_headers = exec_curve_script(
|
|
200
|
+
session=self.session,
|
|
201
|
+
api_version=self._api_version,
|
|
202
|
+
data_template_id=data_template_id,
|
|
203
|
+
column_id=column_id,
|
|
204
|
+
raw_attachment=raw_attachment,
|
|
205
|
+
file_collection=file_collection,
|
|
206
|
+
script_attachment_signed_url=script_attachment_signed_url,
|
|
207
|
+
)
|
|
208
|
+
else:
|
|
209
|
+
table_rows = fetch_csv_table_rows(
|
|
210
|
+
session=self.session,
|
|
211
|
+
attachment_id=resolved_attachment_id,
|
|
212
|
+
headers_only=True,
|
|
213
|
+
)
|
|
214
|
+
header_row = table_rows[0]
|
|
215
|
+
if not isinstance(header_row, dict):
|
|
216
|
+
raise ValueError("Unexpected CSV header format returned by preview endpoint.")
|
|
217
|
+
column_headers = {
|
|
218
|
+
key: value
|
|
219
|
+
for key, value in header_row.items()
|
|
220
|
+
if isinstance(key, str) and isinstance(value, str) and value
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
csv_mapping = derive_curve_csv_mapping(
|
|
224
|
+
target_column=target_column,
|
|
225
|
+
column_headers=column_headers,
|
|
226
|
+
field_mapping=field_mapping,
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
job_id, partition_uuid, s3_output_key = create_curve_import_job(
|
|
230
|
+
session=self.session,
|
|
231
|
+
data_template_id=data_template_id,
|
|
232
|
+
column_id=column_id,
|
|
233
|
+
csv_mapping=csv_mapping,
|
|
234
|
+
raw_attachment=raw_attachment,
|
|
235
|
+
processed_input_key=processed_input_key,
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
patch_payload = build_curve_import_patch_payload(
|
|
239
|
+
target_column=target_column,
|
|
240
|
+
job_id=job_id,
|
|
241
|
+
csv_mapping=csv_mapping,
|
|
242
|
+
raw_attachment=raw_attachment,
|
|
243
|
+
partition_uuid=partition_uuid,
|
|
244
|
+
s3_output_key=s3_output_key,
|
|
245
|
+
)
|
|
246
|
+
self.session.patch(
|
|
247
|
+
f"{self.base_path}/{data_template_id}",
|
|
248
|
+
json=patch_payload.model_dump(by_alias=True, mode="json", exclude_none=True),
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
return self.get_by_id(id=data_template_id)
|
|
252
|
+
|
|
253
|
+
def _add_param_enums(
|
|
254
|
+
self,
|
|
255
|
+
*,
|
|
256
|
+
data_template_id: DataTemplateId,
|
|
257
|
+
new_parameters: list[ParameterValue],
|
|
258
|
+
) -> list[EnumValidationValue]:
|
|
259
|
+
"""Adds enum values to a parameter."""
|
|
260
|
+
|
|
261
|
+
data_template = self.get_by_id(id=data_template_id)
|
|
262
|
+
existing_parameters = data_template.parameter_values
|
|
263
|
+
enums_by_sequence = {}
|
|
264
|
+
for parameter in new_parameters:
|
|
265
|
+
this_sequence = next(
|
|
266
|
+
(
|
|
267
|
+
p.sequence
|
|
268
|
+
for p in existing_parameters
|
|
269
|
+
if p.id == parameter.id and p.short_name == parameter.short_name
|
|
270
|
+
),
|
|
271
|
+
None,
|
|
272
|
+
)
|
|
273
|
+
enum_patches = []
|
|
274
|
+
if (
|
|
275
|
+
parameter.validation
|
|
276
|
+
and len(parameter.validation) > 0
|
|
277
|
+
and isinstance(parameter.validation[0].value, list)
|
|
278
|
+
):
|
|
279
|
+
existing_validation = (
|
|
280
|
+
[x for x in existing_parameters if x.sequence == parameter.sequence]
|
|
281
|
+
if existing_parameters
|
|
282
|
+
else []
|
|
283
|
+
)
|
|
284
|
+
existing_enums = (
|
|
285
|
+
[
|
|
286
|
+
x
|
|
287
|
+
for x in existing_validation[0].validation[0].value
|
|
288
|
+
if isinstance(x, EnumValidationValue) and x.id is not None
|
|
289
|
+
]
|
|
290
|
+
if (
|
|
291
|
+
existing_validation
|
|
292
|
+
and len(existing_validation) > 0
|
|
293
|
+
and existing_validation[0].validation
|
|
294
|
+
and len(existing_validation[0].validation) > 0
|
|
295
|
+
and existing_validation[0].validation[0].value
|
|
296
|
+
and isinstance(existing_validation[0].validation[0].value, list)
|
|
297
|
+
)
|
|
298
|
+
else []
|
|
299
|
+
)
|
|
300
|
+
updated_enums = (
|
|
301
|
+
[
|
|
302
|
+
x
|
|
303
|
+
for x in parameter.validation[0].value
|
|
304
|
+
if isinstance(x, EnumValidationValue)
|
|
305
|
+
]
|
|
306
|
+
if parameter.validation[0].value
|
|
307
|
+
else []
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
deleted_enums = [
|
|
311
|
+
x for x in existing_enums if x.id not in [y.id for y in updated_enums]
|
|
312
|
+
]
|
|
313
|
+
|
|
314
|
+
new_enums = [
|
|
315
|
+
x for x in updated_enums if x.id not in [y.id for y in existing_enums]
|
|
316
|
+
]
|
|
317
|
+
|
|
318
|
+
matching_enums = [
|
|
319
|
+
x for x in updated_enums if x.id in [y.id for y in existing_enums]
|
|
320
|
+
]
|
|
321
|
+
|
|
322
|
+
for new_enum in new_enums:
|
|
323
|
+
enum_patches.append({"operation": "add", "text": new_enum.text})
|
|
324
|
+
for deleted_enum in deleted_enums:
|
|
325
|
+
enum_patches.append({"operation": "delete", "id": deleted_enum.id})
|
|
326
|
+
for matching_enum in matching_enums:
|
|
327
|
+
if (
|
|
328
|
+
matching_enum.text
|
|
329
|
+
!= [x for x in existing_enums if x.id == matching_enum.id][0].text
|
|
330
|
+
):
|
|
331
|
+
enum_patches.append(
|
|
332
|
+
{
|
|
333
|
+
"operation": "update",
|
|
334
|
+
"id": matching_enum.id,
|
|
335
|
+
"text": matching_enum.text,
|
|
336
|
+
}
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
if len(enum_patches) > 0:
|
|
340
|
+
enum_response = self.session.put(
|
|
341
|
+
f"{self.base_path}/{data_template_id}/parameters/{this_sequence}/enums",
|
|
342
|
+
json=enum_patches,
|
|
343
|
+
)
|
|
344
|
+
enums_by_sequence[this_sequence] = [
|
|
345
|
+
EnumValidationValue(**x) for x in enum_response.json()
|
|
346
|
+
]
|
|
347
|
+
return enums_by_sequence
|
|
348
|
+
|
|
93
349
|
@validate_call
|
|
94
350
|
def get_by_id(self, *, id: DataTemplateId) -> DataTemplate:
|
|
95
351
|
"""Get a data template by its ID.
|
|
@@ -107,7 +363,6 @@ class DataTemplateCollection(BaseCollection):
|
|
|
107
363
|
response = self.session.get(f"{self.base_path}/{id}")
|
|
108
364
|
return DataTemplate(**response.json())
|
|
109
365
|
|
|
110
|
-
@validate_call
|
|
111
366
|
def get_by_ids(self, *, ids: list[DataTemplateId]) -> list[DataTemplate]:
|
|
112
367
|
"""Get a list of data templates by their IDs.
|
|
113
368
|
|
|
@@ -147,7 +402,6 @@ class DataTemplateCollection(BaseCollection):
|
|
|
147
402
|
return t.hydrate()
|
|
148
403
|
return None
|
|
149
404
|
|
|
150
|
-
@validate_call
|
|
151
405
|
def add_data_columns(
|
|
152
406
|
self, *, data_template_id: DataTemplateId, data_columns: list[DataColumnValue]
|
|
153
407
|
) -> DataTemplate:
|
|
@@ -191,7 +445,6 @@ class DataTemplateCollection(BaseCollection):
|
|
|
191
445
|
)
|
|
192
446
|
return self.get_by_id(id=data_template_id)
|
|
193
447
|
|
|
194
|
-
@validate_call
|
|
195
448
|
def add_parameters(
|
|
196
449
|
self, *, data_template_id: DataTemplateId, parameters: list[ParameterValue]
|
|
197
450
|
) -> DataTemplate:
|
|
@@ -240,9 +493,7 @@ class DataTemplateCollection(BaseCollection):
|
|
|
240
493
|
if param.id in initial_enum_values:
|
|
241
494
|
param.validation[0].value = initial_enum_values[param.id]
|
|
242
495
|
param.validation[0].datatype = DataType.ENUM
|
|
243
|
-
|
|
244
|
-
session=self.session,
|
|
245
|
-
base_path=self.base_path,
|
|
496
|
+
self._add_param_enums(
|
|
246
497
|
data_template_id=data_template_id,
|
|
247
498
|
new_parameters=[param],
|
|
248
499
|
)
|
|
@@ -313,12 +564,6 @@ class DataTemplateCollection(BaseCollection):
|
|
|
313
564
|
-------
|
|
314
565
|
DataTemplate
|
|
315
566
|
The Updated DataTemplate object.
|
|
316
|
-
|
|
317
|
-
Warnings
|
|
318
|
-
--------
|
|
319
|
-
Only scalar data column values (text, number, dropdown) can be updated using this function. Use
|
|
320
|
-
`set_curve_example` / `set_image_example` to set example values for other data column types.
|
|
321
|
-
|
|
322
567
|
"""
|
|
323
568
|
|
|
324
569
|
existing = self.get_by_id(id=data_template.id)
|
|
@@ -394,9 +639,7 @@ class DataTemplateCollection(BaseCollection):
|
|
|
394
639
|
param.validation[0].datatype = DataType.ENUM # Add this line
|
|
395
640
|
|
|
396
641
|
# Add enum values to newly created parameters
|
|
397
|
-
|
|
398
|
-
session=self.session,
|
|
399
|
-
base_path=self.base_path,
|
|
642
|
+
self._add_param_enums(
|
|
400
643
|
data_template_id=existing.id,
|
|
401
644
|
new_parameters=returned_parameters,
|
|
402
645
|
)
|
|
@@ -480,7 +723,6 @@ class DataTemplateCollection(BaseCollection):
|
|
|
480
723
|
)
|
|
481
724
|
return self.get_by_id(id=data_template.id)
|
|
482
725
|
|
|
483
|
-
@validate_call
|
|
484
726
|
def delete(self, *, id: DataTemplateId) -> None:
|
|
485
727
|
"""Deletes a data template by its ID.
|
|
486
728
|
|
|
@@ -549,99 +791,3 @@ class DataTemplateCollection(BaseCollection):
|
|
|
549
791
|
yield from hydrated_templates
|
|
550
792
|
except AlbertHTTPError as e:
|
|
551
793
|
logger.warning(f"Error hydrating batch {batch}: {e}")
|
|
552
|
-
|
|
553
|
-
@validate_call
|
|
554
|
-
def set_curve_example(
|
|
555
|
-
self,
|
|
556
|
-
*,
|
|
557
|
-
data_template_id: DataTemplateId,
|
|
558
|
-
data_column_id: DataColumnId | None = None,
|
|
559
|
-
data_column_name: str | None = None,
|
|
560
|
-
example: CurveExample,
|
|
561
|
-
) -> DataTemplate:
|
|
562
|
-
"""Set a curve example on a Curve data column.
|
|
563
|
-
|
|
564
|
-
Parameters
|
|
565
|
-
----------
|
|
566
|
-
data_template_id : DataTemplateId
|
|
567
|
-
Target data template ID.
|
|
568
|
-
data_column_id : DataColumnId, optional
|
|
569
|
-
Target curve column ID (provide exactly one of id or name).
|
|
570
|
-
data_column_name : str, optional
|
|
571
|
-
Target curve column name (provide exactly one of id or name).
|
|
572
|
-
example : CurveExample
|
|
573
|
-
Curve example payload
|
|
574
|
-
|
|
575
|
-
Returns
|
|
576
|
-
-------
|
|
577
|
-
DataTemplate
|
|
578
|
-
The updated data template after the example is applied.
|
|
579
|
-
"""
|
|
580
|
-
data_template = self.get_by_id(id=data_template_id)
|
|
581
|
-
target_column = get_target_data_column(
|
|
582
|
-
data_template=data_template,
|
|
583
|
-
data_template_id=data_template_id,
|
|
584
|
-
data_column_id=data_column_id,
|
|
585
|
-
data_column_name=data_column_name,
|
|
586
|
-
)
|
|
587
|
-
payload = build_curve_example(
|
|
588
|
-
session=self.session,
|
|
589
|
-
data_template_id=data_template_id,
|
|
590
|
-
example=example,
|
|
591
|
-
target_column=target_column,
|
|
592
|
-
)
|
|
593
|
-
if not payload.data:
|
|
594
|
-
return data_template
|
|
595
|
-
self.session.patch(
|
|
596
|
-
f"{self.base_path}/{data_template_id}",
|
|
597
|
-
json=payload.model_dump(mode="json", by_alias=True, exclude_none=True),
|
|
598
|
-
)
|
|
599
|
-
return self.get_by_id(id=data_template_id)
|
|
600
|
-
|
|
601
|
-
@validate_call
|
|
602
|
-
def set_image_example(
|
|
603
|
-
self,
|
|
604
|
-
*,
|
|
605
|
-
data_template_id: DataTemplateId,
|
|
606
|
-
data_column_id: DataColumnId | None = None,
|
|
607
|
-
data_column_name: str | None = None,
|
|
608
|
-
example: ImageExample,
|
|
609
|
-
) -> DataTemplate:
|
|
610
|
-
"""Set an image example on a Image data column.
|
|
611
|
-
|
|
612
|
-
Parameters
|
|
613
|
-
----------
|
|
614
|
-
data_template_id : DataTemplateId
|
|
615
|
-
Target data template ID.
|
|
616
|
-
data_column_id : DataColumnId, optional
|
|
617
|
-
Target image column ID (provide exactly one of id or name).
|
|
618
|
-
data_column_name : str, optional
|
|
619
|
-
Target image column name (provide exactly one of id or name).
|
|
620
|
-
example : ImageExample
|
|
621
|
-
Image example payload
|
|
622
|
-
|
|
623
|
-
Returns
|
|
624
|
-
-------
|
|
625
|
-
DataTemplate
|
|
626
|
-
The updated data template after the example is applied.
|
|
627
|
-
"""
|
|
628
|
-
data_template = self.get_by_id(id=data_template_id)
|
|
629
|
-
target_column = get_target_data_column(
|
|
630
|
-
data_template=data_template,
|
|
631
|
-
data_template_id=data_template_id,
|
|
632
|
-
data_column_id=data_column_id,
|
|
633
|
-
data_column_name=data_column_name,
|
|
634
|
-
)
|
|
635
|
-
payload = build_image_example(
|
|
636
|
-
session=self.session,
|
|
637
|
-
data_template_id=data_template_id,
|
|
638
|
-
example=example,
|
|
639
|
-
target_column=target_column,
|
|
640
|
-
)
|
|
641
|
-
if not payload.data:
|
|
642
|
-
return data_template
|
|
643
|
-
self.session.patch(
|
|
644
|
-
f"{self.base_path}/{data_template_id}",
|
|
645
|
-
json=payload.model_dump(mode="json", by_alias=True, exclude_none=True),
|
|
646
|
-
)
|
|
647
|
-
return self.get_by_id(id=data_template_id)
|
|
@@ -1,7 +1,5 @@
|
|
|
1
1
|
from collections.abc import Iterator
|
|
2
2
|
|
|
3
|
-
from pydantic import validate_call
|
|
4
|
-
|
|
5
3
|
from albert.collections.base import BaseCollection
|
|
6
4
|
from albert.core.pagination import AlbertPaginator, PaginationMode
|
|
7
5
|
from albert.core.session import AlbertSession
|
|
@@ -43,7 +41,6 @@ class EntityTypeCollection(BaseCollection):
|
|
|
43
41
|
super().__init__(session=session)
|
|
44
42
|
self.base_path = f"/api/{EntityTypeCollection._api_version}/entitytypes"
|
|
45
43
|
|
|
46
|
-
@validate_call
|
|
47
44
|
def get_by_id(self, *, id: EntityTypeId) -> EntityType:
|
|
48
45
|
"""Get an entity type by its ID.
|
|
49
46
|
Parameters
|
|
@@ -118,14 +115,8 @@ class EntityTypeCollection(BaseCollection):
|
|
|
118
115
|
PatchDatum(
|
|
119
116
|
operation=PatchOperation.UPDATE,
|
|
120
117
|
attribute="customFields",
|
|
121
|
-
new_value=[
|
|
122
|
-
|
|
123
|
-
for x in updated.custom_fields
|
|
124
|
-
],
|
|
125
|
-
old_value=[
|
|
126
|
-
x.model_dump(by_alias=True, exclude_none=True)
|
|
127
|
-
for x in existing.custom_fields
|
|
128
|
-
],
|
|
118
|
+
new_value=[x.model_dump(by_alias=True) for x in updated.custom_fields],
|
|
119
|
+
old_value=[x.model_dump(by_alias=True) for x in existing.custom_fields],
|
|
129
120
|
)
|
|
130
121
|
)
|
|
131
122
|
if updated.custom_fields is not None and existing.custom_fields is None:
|
|
@@ -133,10 +124,7 @@ class EntityTypeCollection(BaseCollection):
|
|
|
133
124
|
PatchDatum(
|
|
134
125
|
operation=PatchOperation.ADD,
|
|
135
126
|
attribute="customFields",
|
|
136
|
-
new_value=[
|
|
137
|
-
x.model_dump(by_alias=True, exclude_none=True)
|
|
138
|
-
for x in updated.custom_fields
|
|
139
|
-
],
|
|
127
|
+
new_value=[x.model_dump(by_alias=True) for x in updated.custom_fields],
|
|
140
128
|
)
|
|
141
129
|
)
|
|
142
130
|
|
|
@@ -206,7 +194,6 @@ class EntityTypeCollection(BaseCollection):
|
|
|
206
194
|
|
|
207
195
|
return patches
|
|
208
196
|
|
|
209
|
-
@validate_call
|
|
210
197
|
def delete(self, *, id: EntityTypeId) -> None:
|
|
211
198
|
"""Delete an entity type.
|
|
212
199
|
Parameters
|
|
@@ -216,7 +203,6 @@ class EntityTypeCollection(BaseCollection):
|
|
|
216
203
|
"""
|
|
217
204
|
self.session.delete(f"{self.base_path}/{id}")
|
|
218
205
|
|
|
219
|
-
@validate_call
|
|
220
206
|
def get_rules(self, *, id: EntityTypeId) -> list[EntityTypeRule]:
|
|
221
207
|
"""Get the rules for an entity type.
|
|
222
208
|
Parameters
|
|
@@ -227,7 +213,6 @@ class EntityTypeCollection(BaseCollection):
|
|
|
227
213
|
response = self.session.get(f"{self.base_path}/rules/{id}")
|
|
228
214
|
return [EntityTypeRule(**rule) for rule in response.json()]
|
|
229
215
|
|
|
230
|
-
@validate_call
|
|
231
216
|
def set_rules(self, *, id: EntityTypeId, rules: list[EntityTypeRule]) -> list[EntityTypeRule]:
|
|
232
217
|
"""Create or update the rules for an entity type.
|
|
233
218
|
Parameters
|
|
@@ -247,7 +232,6 @@ class EntityTypeCollection(BaseCollection):
|
|
|
247
232
|
)
|
|
248
233
|
return [EntityTypeRule(**rule) for rule in response.json()]
|
|
249
234
|
|
|
250
|
-
@validate_call
|
|
251
235
|
def delete_rules(self, *, id: EntityTypeId) -> None:
|
|
252
236
|
"""Delete the rules for an entity type.
|
|
253
237
|
Parameters
|
albert/collections/inventory.py
CHANGED
|
@@ -281,7 +281,7 @@ class InventoryCollection(BaseCollection):
|
|
|
281
281
|
"""Add inventory specs to the inventory item.
|
|
282
282
|
|
|
283
283
|
An `InventorySpec` is a property that was not directly measured via a task,
|
|
284
|
-
but is a generic property of that
|
|
284
|
+
but is a generic property of that inentory item.
|
|
285
285
|
|
|
286
286
|
Parameters
|
|
287
287
|
----------
|