albert 1.10.0rc2__py3-none-any.whl → 1.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. albert/__init__.py +1 -1
  2. albert/client.py +5 -0
  3. albert/collections/custom_templates.py +3 -0
  4. albert/collections/data_templates.py +118 -264
  5. albert/collections/entity_types.py +19 -3
  6. albert/collections/inventory.py +1 -1
  7. albert/collections/notebooks.py +154 -26
  8. albert/collections/parameters.py +1 -0
  9. albert/collections/property_data.py +384 -280
  10. albert/collections/reports.py +4 -0
  11. albert/collections/synthesis.py +292 -0
  12. albert/collections/tasks.py +2 -1
  13. albert/collections/worksheets.py +3 -0
  14. albert/core/shared/models/base.py +3 -1
  15. albert/core/shared/models/patch.py +1 -1
  16. albert/resources/batch_data.py +4 -2
  17. albert/resources/cas.py +3 -1
  18. albert/resources/custom_fields.py +3 -1
  19. albert/resources/data_templates.py +60 -12
  20. albert/resources/inventory.py +6 -4
  21. albert/resources/lists.py +3 -1
  22. albert/resources/notebooks.py +12 -7
  23. albert/resources/parameter_groups.py +3 -1
  24. albert/resources/property_data.py +64 -5
  25. albert/resources/sheets.py +16 -14
  26. albert/resources/synthesis.py +61 -0
  27. albert/resources/tags.py +3 -1
  28. albert/resources/tasks.py +4 -7
  29. albert/resources/workflows.py +4 -2
  30. albert/utils/data_template.py +392 -37
  31. albert/utils/property_data.py +638 -0
  32. albert/utils/tasks.py +3 -3
  33. {albert-1.10.0rc2.dist-info → albert-1.11.0.dist-info}/METADATA +1 -1
  34. {albert-1.10.0rc2.dist-info → albert-1.11.0.dist-info}/RECORD +36 -33
  35. {albert-1.10.0rc2.dist-info → albert-1.11.0.dist-info}/WHEEL +0 -0
  36. {albert-1.10.0rc2.dist-info → albert-1.11.0.dist-info}/licenses/LICENSE +0 -0
@@ -0,0 +1,638 @@
1
+ """Utilities for task property data operations."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import mimetypes
6
+ import re
7
+ import uuid
8
+ from collections.abc import Callable
9
+ from pathlib import Path
10
+
11
+ import pandas as pd
12
+
13
+ from albert.collections.attachments import AttachmentCollection
14
+ from albert.collections.data_templates import DataTemplateCollection
15
+ from albert.collections.files import FileCollection
16
+ from albert.core.logging import logger
17
+ from albert.core.session import AlbertSession
18
+ from albert.core.shared.identifiers import AttachmentId, BlockId, DataTemplateId, TaskId
19
+ from albert.core.shared.models.base import EntityLink
20
+ from albert.core.shared.models.patch import PatchOperation
21
+ from albert.resources.data_templates import CurveDBMetadata, ImportMode, StorageKeyReference
22
+ from albert.resources.files import FileNamespace
23
+ from albert.resources.property_data import (
24
+ CurvePropertyValue,
25
+ CurvePropertyValuePayload,
26
+ ImagePropertyValue,
27
+ ImagePropertyValuePayload,
28
+ PropertyDataPatchDatum,
29
+ PropertyValue,
30
+ ReturnScope,
31
+ TaskDataColumn,
32
+ TaskPropertyCreate,
33
+ TaskPropertyData,
34
+ Trial,
35
+ )
36
+ from albert.resources.tasks import PropertyTask
37
+ from albert.utils.data_template import (
38
+ create_curve_import_job,
39
+ derive_curve_csv_mapping,
40
+ exec_curve_script,
41
+ get_script_attachment,
42
+ get_target_data_column,
43
+ prepare_curve_input_attachment,
44
+ validate_data_column_type,
45
+ )
46
+ from albert.utils.tasks import CSV_EXTENSIONS, fetch_csv_table_rows
47
+
48
+
49
+ def get_task_from_id(*, session: AlbertSession, id: TaskId) -> PropertyTask:
50
+ """Fetch a PropertyTask by id using the task collection."""
51
+ from albert.collections.tasks import TaskCollection
52
+
53
+ return TaskCollection(session=session).get_by_id(id=id)
54
+
55
+
56
+ def resolve_return_scope(
57
+ *,
58
+ task_id: TaskId,
59
+ return_scope: ReturnScope,
60
+ inventory_id,
61
+ block_id,
62
+ lot_id,
63
+ prefetched_block: TaskPropertyData | None,
64
+ get_all_task_properties: Callable[..., list[TaskPropertyData]],
65
+ get_task_block_properties: Callable[..., TaskPropertyData],
66
+ ) -> list[TaskPropertyData]:
67
+ """Resolve the return payload based on scope and cached block data."""
68
+ if return_scope == "task":
69
+ return get_all_task_properties(task_id=task_id)
70
+
71
+ if return_scope == "block":
72
+ if prefetched_block is not None:
73
+ return [prefetched_block]
74
+ if inventory_id is None or block_id is None:
75
+ raise ValueError("inventory_id and block_id are required when return_scope='combo'.")
76
+ return [
77
+ get_task_block_properties(
78
+ inventory_id=inventory_id,
79
+ task_id=task_id,
80
+ block_id=block_id,
81
+ lot_id=lot_id,
82
+ )
83
+ ]
84
+
85
+ return []
86
+
87
+
88
+ def resolve_image_property_value(
89
+ *, session: AlbertSession, task_id: TaskId, image_value: ImagePropertyValue
90
+ ) -> dict:
91
+ """Upload an image file and return the resolved payload dict."""
92
+ resolved_path = Path(image_value.file_path).expanduser()
93
+ if not resolved_path.exists() or not resolved_path.is_file():
94
+ raise FileNotFoundError(f"File not found at '{resolved_path}'.")
95
+ upload_ext = resolved_path.suffix.lower()
96
+ if not upload_ext:
97
+ raise ValueError("File extension is required for image property data.")
98
+
99
+ upload_key = f"imagedata/original/{task_id}/{uuid.uuid4().hex[:10]}{upload_ext}"
100
+ file_name = resolved_path.name
101
+ content_type = mimetypes.guess_type(file_name)[0] or "application/octet-stream"
102
+ file_collection = FileCollection(session=session)
103
+ with resolved_path.open("rb") as file_handle:
104
+ file_collection.sign_and_upload_file(
105
+ data=file_handle,
106
+ name=upload_key,
107
+ namespace=FileNamespace.RESULT,
108
+ content_type=content_type,
109
+ )
110
+
111
+ value = ImagePropertyValuePayload(
112
+ file_name=file_name,
113
+ s3_key=StorageKeyReference(
114
+ original=upload_key,
115
+ thumb=upload_key,
116
+ preview=upload_key,
117
+ ),
118
+ )
119
+ return value.model_dump(by_alias=True, mode="json", exclude_none=True)
120
+
121
+
122
+ def resolve_curve_property_value(
123
+ *,
124
+ session: AlbertSession,
125
+ task_id: TaskId,
126
+ block_id: BlockId | None,
127
+ prop: TaskPropertyCreate,
128
+ curve_value: CurvePropertyValue,
129
+ ) -> dict:
130
+ """Upload/import curve data and return the resolved payload dict."""
131
+ if block_id is None:
132
+ raise ValueError("block_id is required to import curve data for task properties.")
133
+
134
+ data_template_id = resolve_data_template_id(prop=prop)
135
+ data_template = DataTemplateCollection(session=session).get_by_id(id=data_template_id)
136
+ target_column = get_target_data_column(
137
+ data_template=data_template,
138
+ data_template_id=data_template_id,
139
+ data_column_id=prop.data_column.data_column_id,
140
+ data_column_name=None,
141
+ )
142
+ validate_data_column_type(target_column=target_column)
143
+ column_id = target_column.data_column_id
144
+ if column_id is None:
145
+ raise ValueError("Curve data column is missing an identifier.")
146
+
147
+ attachment_collection = AttachmentCollection(session=session)
148
+ file_collection = FileCollection(session=session)
149
+
150
+ script_attachment_signed_url: str | None = None
151
+ if curve_value.mode is ImportMode.SCRIPT:
152
+ script_attachment, script_extensions = get_script_attachment(
153
+ attachment_collection=attachment_collection,
154
+ data_template_id=data_template_id,
155
+ column_id=column_id,
156
+ )
157
+ if not script_extensions:
158
+ raise ValueError("Script attachment must define allowed extensions.")
159
+ script_attachment_signed_url = script_attachment.signed_url
160
+ allowed_extensions = set(script_extensions)
161
+ else:
162
+ allowed_extensions = set(CSV_EXTENSIONS)
163
+
164
+ upload_key = (
165
+ f"curve-input/{task_id}/{block_id}/{data_template_id}/"
166
+ f"{column_id}/{uuid.uuid4().hex[:10]}.csv"
167
+ )
168
+ raw_attachment = prepare_curve_input_attachment(
169
+ attachment_collection=attachment_collection,
170
+ data_template_id=data_template_id,
171
+ column_id=column_id,
172
+ allowed_extensions=allowed_extensions,
173
+ file_path=curve_value.file_path,
174
+ attachment_id=None,
175
+ require_signed_url=curve_value.mode is ImportMode.SCRIPT,
176
+ parent_id=task_id,
177
+ upload_key=upload_key,
178
+ )
179
+
180
+ raw_key = raw_attachment.key
181
+ if not raw_key:
182
+ raise ValueError("Curve input attachment does not include an S3 key.")
183
+ if raw_attachment.id is None:
184
+ raise ValueError("Curve input attachment did not return an identifier.")
185
+ resolved_attachment_id = AttachmentId(raw_attachment.id)
186
+
187
+ processed_input_key = raw_key
188
+ column_headers: dict[str, str] = {}
189
+
190
+ if curve_value.mode is ImportMode.SCRIPT:
191
+ processed_input_key, column_headers = exec_curve_script(
192
+ session=session,
193
+ data_template_id=data_template_id,
194
+ column_id=column_id,
195
+ raw_attachment=raw_attachment,
196
+ file_collection=file_collection,
197
+ script_attachment_signed_url=script_attachment_signed_url,
198
+ task_id=task_id,
199
+ block_id=block_id,
200
+ )
201
+ else:
202
+ table_rows = fetch_csv_table_rows(
203
+ session=session,
204
+ attachment_id=resolved_attachment_id,
205
+ headers_only=True,
206
+ )
207
+ header_row = table_rows[0]
208
+ if not isinstance(header_row, dict):
209
+ raise ValueError("Unexpected CSV header format returned by preview endpoint.")
210
+ column_headers = {
211
+ key: value
212
+ for key, value in header_row.items()
213
+ if isinstance(key, str) and isinstance(value, str) and value
214
+ }
215
+
216
+ csv_mapping = derive_curve_csv_mapping(
217
+ target_column=target_column,
218
+ column_headers=column_headers,
219
+ field_mapping=curve_value.field_mapping,
220
+ )
221
+
222
+ job_id, partition_uuid, s3_output_key = create_curve_import_job(
223
+ session=session,
224
+ data_template_id=data_template_id,
225
+ column_id=column_id,
226
+ csv_mapping=csv_mapping,
227
+ raw_attachment=raw_attachment,
228
+ processed_input_key=processed_input_key,
229
+ task_id=task_id,
230
+ block_id=block_id,
231
+ )
232
+
233
+ table_name = f"{str(data_template_id).lower()}_{str(column_id).lower()}"
234
+ value = CurvePropertyValuePayload(
235
+ file_name=raw_attachment.name or "",
236
+ s3_key=StorageKeyReference(
237
+ s3_input=processed_input_key,
238
+ rawfile=processed_input_key,
239
+ s3_output=s3_output_key,
240
+ ),
241
+ job_id=job_id,
242
+ csv_mapping=csv_mapping,
243
+ athena=CurveDBMetadata(
244
+ table_name=table_name,
245
+ partition_key=partition_uuid,
246
+ ),
247
+ )
248
+ return value.model_dump(by_alias=True, mode="json", exclude_none=True)
249
+
250
+
251
+ def resolve_data_template_id(*, prop: TaskPropertyCreate) -> DataTemplateId:
252
+ """Extract the data template id from a task property."""
253
+ data_template = prop.data_template
254
+ data_template_id = getattr(data_template, "id", None)
255
+ if data_template_id is None and isinstance(data_template, dict):
256
+ data_template_id = data_template.get("id") or data_template.get("albertId")
257
+ if data_template_id is None:
258
+ raise ValueError("data_template is required to import curve data.")
259
+ return data_template_id
260
+
261
+
262
+ def resolve_task_property_payload(
263
+ *,
264
+ session: AlbertSession,
265
+ task_id: TaskId,
266
+ block_id: BlockId | None,
267
+ properties: list[TaskPropertyCreate],
268
+ ) -> list[dict]:
269
+ """Build POST payloads for task properties, resolving image/curve values."""
270
+ payload = []
271
+ for prop in properties:
272
+ prop_payload = prop.model_dump(exclude_none=True, by_alias=True, mode="json")
273
+ if isinstance(prop.value, ImagePropertyValue):
274
+ prop_payload["value"] = resolve_image_property_value(
275
+ session=session,
276
+ task_id=task_id,
277
+ image_value=prop.value,
278
+ )
279
+ elif isinstance(prop.value, CurvePropertyValue):
280
+ prop_payload["value"] = resolve_curve_property_value(
281
+ session=session,
282
+ task_id=task_id,
283
+ block_id=block_id,
284
+ prop=prop,
285
+ curve_value=prop.value,
286
+ )
287
+ # For curve property data, remove DataTemplate from payload as it's not needed
288
+ prop_payload.pop("DataTemplate", None)
289
+ payload.append(prop_payload)
290
+ return payload
291
+
292
+
293
+ def resolve_patch_payload(
294
+ *,
295
+ session: AlbertSession,
296
+ task_id: TaskId,
297
+ patch_payload: list[PropertyDataPatchDatum],
298
+ ) -> list[dict]:
299
+ """Build PATCH payloads."""
300
+ resolved_payload = []
301
+ for patch in patch_payload:
302
+ if isinstance(patch.new_value, ImagePropertyValue | CurvePropertyValue):
303
+ raise ValueError(
304
+ "Update ImagePropertyValue and CurvePropertyValue via "
305
+ "update_or_create_task_properties."
306
+ )
307
+ resolved_payload.append(patch.model_dump(exclude_none=True, by_alias=True, mode="json"))
308
+ return resolved_payload
309
+
310
+
311
+ def _get_column_map(
312
+ *, dataframe: pd.DataFrame, property_data: TaskPropertyData
313
+ ) -> dict[str, PropertyValue]:
314
+ """Map dataframe columns to property data columns for bulk loads."""
315
+ data_col_info = property_data.data[0].trials[0].data_columns
316
+ column_map: dict[str, PropertyValue] = {}
317
+ for col in dataframe.columns:
318
+ column = [x for x in data_col_info if x.name == col]
319
+ if len(column) == 1:
320
+ column_map[col] = column[0]
321
+ else:
322
+ raise ValueError(
323
+ f"Column '{col}' not found in block data columns or multiple matches found."
324
+ )
325
+ return column_map
326
+
327
+
328
+ def _df_to_task_prop_create_list(
329
+ *,
330
+ dataframe: pd.DataFrame,
331
+ column_map: dict[str, PropertyValue],
332
+ data_template_id: DataTemplateId,
333
+ interval: str,
334
+ ) -> list[TaskPropertyCreate]:
335
+ """Convert a dataframe into TaskPropertyCreate entries."""
336
+ task_prop_create_list: list[TaskPropertyCreate] = []
337
+ for i, row in dataframe.iterrows():
338
+ for col_name, col_info in column_map.items():
339
+ if col_name not in dataframe.columns:
340
+ raise ValueError(f"Column '{col_name}' not found in DataFrame.")
341
+
342
+ task_prop_create_list.append(
343
+ TaskPropertyCreate(
344
+ data_column=TaskDataColumn(
345
+ data_column_id=col_info.id,
346
+ column_sequence=col_info.sequence,
347
+ ),
348
+ value=str(row[col_name]),
349
+ visible_trial_number=i + 1,
350
+ interval_combination=interval,
351
+ data_template=EntityLink(id=data_template_id),
352
+ )
353
+ )
354
+ return task_prop_create_list
355
+
356
+
357
+ def form_existing_row_value_patches(
358
+ *,
359
+ session: AlbertSession,
360
+ task_id: TaskId,
361
+ block_id: BlockId,
362
+ existing_data_rows: TaskPropertyData,
363
+ properties: list[TaskPropertyCreate],
364
+ ):
365
+ """Split incoming properties into patches vs new rows."""
366
+ patches = []
367
+ new_properties = []
368
+
369
+ for prop in properties:
370
+ resolved_trial_number = resolve_trial_number(
371
+ prop=prop,
372
+ existing_data_rows=existing_data_rows,
373
+ )
374
+ if resolved_trial_number is None:
375
+ new_properties.append(prop)
376
+ continue
377
+
378
+ prop_patches = process_property(
379
+ session=session,
380
+ task_id=task_id,
381
+ block_id=block_id,
382
+ prop=prop,
383
+ existing_data_rows=existing_data_rows,
384
+ trial_number=resolved_trial_number,
385
+ )
386
+ if prop_patches is not None:
387
+ if prop_patches:
388
+ patches.extend(prop_patches)
389
+ continue
390
+ new_properties.append(prop)
391
+
392
+ return patches, new_properties
393
+
394
+
395
+ def process_property(
396
+ *,
397
+ session: AlbertSession,
398
+ task_id: TaskId,
399
+ block_id: BlockId,
400
+ prop: TaskPropertyCreate,
401
+ existing_data_rows: TaskPropertyData,
402
+ trial_number: int,
403
+ ) -> list | None:
404
+ """Resolve patches for a property against existing trials."""
405
+ for interval in existing_data_rows.data:
406
+ if interval.interval_combination != prop.interval_combination:
407
+ continue
408
+
409
+ for trial in interval.trials:
410
+ if trial.trial_number != trial_number:
411
+ continue
412
+
413
+ trial_patches = process_trial(
414
+ session=session,
415
+ task_id=task_id,
416
+ block_id=block_id,
417
+ trial=trial,
418
+ prop=prop,
419
+ )
420
+ if trial_patches is not None:
421
+ return trial_patches
422
+
423
+ return None
424
+
425
+
426
+ def resolve_trial_number(
427
+ *, prop: TaskPropertyCreate, existing_data_rows: TaskPropertyData
428
+ ) -> int | None:
429
+ """Resolve the trial number for a property using visible trial numbers."""
430
+ if prop.trial_number is not None:
431
+ return prop.trial_number
432
+
433
+ visible_trial_number = prop.visible_trial_number
434
+ if visible_trial_number is None:
435
+ return None
436
+ if isinstance(visible_trial_number, str):
437
+ try:
438
+ visible_trial_number = int(visible_trial_number)
439
+ except ValueError:
440
+ return None
441
+
442
+ matching_trials = []
443
+ for interval in existing_data_rows.data:
444
+ if interval.interval_combination != prop.interval_combination:
445
+ continue
446
+ for trial in interval.trials:
447
+ if trial.visible_trial_number == visible_trial_number:
448
+ matching_trials.append(trial.trial_number)
449
+
450
+ if len(matching_trials) == 1:
451
+ return matching_trials[0]
452
+ return None
453
+
454
+
455
+ def process_trial(
456
+ *,
457
+ session: AlbertSession,
458
+ task_id: TaskId,
459
+ block_id: BlockId,
460
+ trial: Trial,
461
+ prop: TaskPropertyCreate,
462
+ ) -> list | None:
463
+ """Generate patch operations for a trial's matching data column."""
464
+ for data_column in trial.data_columns:
465
+ if (
466
+ data_column.data_column_unique_id
467
+ == f"{prop.data_column.data_column_id}#{prop.data_column.column_sequence}"
468
+ and data_column.property_data is not None
469
+ ):
470
+ if isinstance(prop.value, CurvePropertyValue):
471
+ resolved_value = resolve_curve_property_value(
472
+ session=session,
473
+ task_id=task_id,
474
+ block_id=block_id,
475
+ prop=prop,
476
+ curve_value=prop.value,
477
+ )
478
+ return [
479
+ PropertyDataPatchDatum(
480
+ id=data_column.property_data.id,
481
+ operation=PatchOperation.UPDATE,
482
+ attribute="value",
483
+ new_value=resolved_value,
484
+ old_value=data_column.property_data.value,
485
+ )
486
+ ]
487
+ if isinstance(prop.value, ImagePropertyValue):
488
+ resolved_value = resolve_image_property_value(
489
+ session=session,
490
+ task_id=task_id,
491
+ image_value=prop.value,
492
+ )
493
+ return [
494
+ PropertyDataPatchDatum(
495
+ id=data_column.property_data.id,
496
+ operation=PatchOperation.UPDATE,
497
+ attribute="value",
498
+ new_value=resolved_value,
499
+ old_value=data_column.property_data.value,
500
+ )
501
+ ]
502
+ if data_column.property_data.value == prop.value:
503
+ return []
504
+ return [
505
+ PropertyDataPatchDatum(
506
+ id=data_column.property_data.id,
507
+ operation=PatchOperation.UPDATE,
508
+ attribute="value",
509
+ new_value=prop.value,
510
+ old_value=data_column.property_data.value,
511
+ )
512
+ ]
513
+
514
+ return None
515
+
516
+
517
+ def form_calculated_task_property_patches(
518
+ *, existing_data_rows: TaskPropertyData, properties: list[TaskPropertyCreate]
519
+ ):
520
+ """Build patches for calculated columns after property updates."""
521
+ patches = []
522
+ covered_interval_trials = set()
523
+ first_row_data_column = existing_data_rows.data[0].trials[0].data_columns
524
+ columns_used_in_calculations = get_all_columns_used_in_calculations(
525
+ first_row_data_column=first_row_data_column
526
+ )
527
+ for posted_prop in properties:
528
+ this_interval_trial = f"{posted_prop.interval_combination}-{posted_prop.trial_number}"
529
+ if (
530
+ this_interval_trial in covered_interval_trials
531
+ or posted_prop.data_column.column_sequence not in columns_used_in_calculations
532
+ ):
533
+ continue
534
+ on_platform_row = get_on_platform_row(
535
+ existing_data_rows=existing_data_rows,
536
+ trial_number=posted_prop.trial_number,
537
+ interval_combination=posted_prop.interval_combination,
538
+ )
539
+ if on_platform_row is not None:
540
+ these_patches = generate_data_patch_payload(trial=on_platform_row)
541
+ patches.extend(these_patches)
542
+ covered_interval_trials.add(this_interval_trial)
543
+ return patches
544
+
545
+
546
+ def get_on_platform_row(
547
+ *, existing_data_rows: TaskPropertyData, interval_combination: str, trial_number: int
548
+ ):
549
+ """Find the matching trial row by interval and trial number."""
550
+ for interval in existing_data_rows.data:
551
+ if interval.interval_combination == interval_combination:
552
+ for trial in interval.trials:
553
+ if trial.trial_number == trial_number:
554
+ return trial
555
+ return None
556
+
557
+
558
+ def get_columns_used_in_calculation(*, calculation: str | None, used_columns: set[str]):
559
+ """Collect column identifiers referenced in a calculation string."""
560
+ if calculation is None:
561
+ return used_columns
562
+ column_pattern = r"COL\d+"
563
+ matches = re.findall(column_pattern, calculation)
564
+ used_columns.update(set(matches))
565
+ return used_columns
566
+
567
+
568
+ def get_all_columns_used_in_calculations(*, first_row_data_column: list):
569
+ """Aggregate column identifiers used in calculation fields."""
570
+ used_columns = set()
571
+ for calc in [x.calculation for x in first_row_data_column]:
572
+ used_columns = get_columns_used_in_calculation(calculation=calc, used_columns=used_columns)
573
+ return used_columns
574
+
575
+
576
+ def evaluate_calculation(*, calculation: str, column_values: dict) -> float | None:
577
+ """Evaluate a calculation expression against column values."""
578
+ calculation = calculation.lstrip("=")
579
+ try:
580
+ if column_values:
581
+ escaped_cols = [re.escape(col) for col in column_values]
582
+ pattern = re.compile(rf"\b({'|'.join(escaped_cols)})\b")
583
+
584
+ def repl(match: re.Match) -> str:
585
+ """Replace column tokens with values in a calculation expression."""
586
+ col = match.group(0)
587
+ return str(column_values.get(col, match.group(0)))
588
+
589
+ calculation = pattern.sub(repl, calculation)
590
+
591
+ calculation = calculation.replace("^", "**")
592
+ return eval(calculation)
593
+ except Exception as e:
594
+ logger.info(
595
+ "Error evaluating calculation '%s': %s. Likely do not have all values needed.",
596
+ calculation,
597
+ e,
598
+ )
599
+ return None
600
+
601
+
602
+ def generate_data_patch_payload(*, trial: Trial) -> list[PropertyDataPatchDatum]:
603
+ """Generate patch payloads for calculated columns in a trial."""
604
+ column_values = {
605
+ col.sequence: col.property_data.value
606
+ for col in trial.data_columns
607
+ if col.property_data is not None
608
+ }
609
+
610
+ patch_data = []
611
+ for column in trial.data_columns:
612
+ if column.calculation:
613
+ recalculated_value = evaluate_calculation(
614
+ calculation=column.calculation, column_values=column_values
615
+ )
616
+ if recalculated_value is not None:
617
+ if column.property_data.value is None:
618
+ patch_data.append(
619
+ PropertyDataPatchDatum(
620
+ id=column.property_data.id,
621
+ operation=PatchOperation.ADD,
622
+ attribute="value",
623
+ new_value=recalculated_value,
624
+ old_value=None,
625
+ )
626
+ )
627
+ elif str(column.property_data.value) != str(recalculated_value):
628
+ patch_data.append(
629
+ PropertyDataPatchDatum(
630
+ id=column.property_data.id,
631
+ operation=PatchOperation.UPDATE,
632
+ attribute="value",
633
+ new_value=recalculated_value,
634
+ old_value=column.property_data.value,
635
+ )
636
+ )
637
+
638
+ return patch_data
albert/utils/tasks.py CHANGED
@@ -511,7 +511,7 @@ def map_csv_headers_to_columns(
511
511
  continue
512
512
  identifier, _display_name = matching_entry
513
513
  if identifier in used_columns:
514
- logger.warning(
514
+ logger.info(
515
515
  "Column %s already mapped; skipping CSV header '%s'.",
516
516
  identifier,
517
517
  header_name,
@@ -532,11 +532,11 @@ def map_csv_headers_to_columns(
532
532
  normalized_header = header_name.lower()
533
533
  matching_entry = columns_by_name.get(normalized_header)
534
534
  if matching_entry is None:
535
- logger.warning("No matching column found for CSV header '%s'.", header_name)
535
+ logger.info("No matching column found for CSV header '%s'.", header_name)
536
536
  continue
537
537
  identifier, _display_name = matching_entry
538
538
  if identifier in used_columns:
539
- logger.warning(
539
+ logger.info(
540
540
  "Column %s already mapped; skipping CSV header '%s'.",
541
541
  identifier,
542
542
  header_name,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: albert
3
- Version: 1.10.0rc2
3
+ Version: 1.11.0
4
4
  Summary: The official Python SDK for the Albert Invent platform.
5
5
  Project-URL: Homepage, https://www.albertinvent.com/
6
6
  Project-URL: Documentation, https://docs.developer.albertinvent.com/albert-python