liminal-orm 3.2.1__py3-none-any.whl → 4.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,207 @@
1
+ from __future__ import annotations
2
+
3
+ import inspect
4
+ import logging
5
+ from types import FunctionType
6
+ from typing import Any, Generic, TypeVar # noqa: UP035
7
+
8
+ import pandas as pd # type: ignore
9
+ from sqlalchemy import DATETIME, Boolean, ForeignKey, String
10
+ from sqlalchemy import Column as SqlColumn
11
+ from sqlalchemy.orm import Query, RelationshipProperty, Session, relationship
12
+ from sqlalchemy.orm.decl_api import declared_attr
13
+
14
+ from liminal.base.base_validation_filters import BaseValidatorFilters
15
+ from liminal.orm.base import Base
16
+ from liminal.orm.base_tables.user import User
17
+ from liminal.orm.results_schema_properties import ResultsSchemaProperties
18
+ from liminal.validation import BenchlingValidatorReport
19
+
20
+ T = TypeVar("T")
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ class BaseResultsModel(Generic[T], Base):
26
+ """Base class for all results models. Defines all common columns for results tables in Postgres Benchling warehouse."""
27
+
28
+ __abstract__ = True
29
+ __schema_properties__: ResultsSchemaProperties
30
+
31
+ @declared_attr
32
+ def creator_id(cls) -> SqlColumn:
33
+ return SqlColumn(
34
+ "creator_id$", String, ForeignKey("user$raw.id"), nullable=True
35
+ )
36
+
37
+ @declared_attr
38
+ def creator(cls) -> RelationshipProperty:
39
+ return relationship("User", foreign_keys=[cls.creator_id])
40
+
41
+ id = SqlColumn("id", String, nullable=True, primary_key=True)
42
+ archived = SqlColumn("archived$", Boolean, nullable=True)
43
+ archive_purpose = SqlColumn("archive_purpose$", String, nullable=True)
44
+ created_at = SqlColumn("created_at$", DATETIME, nullable=True)
45
+ entry_id = SqlColumn("entry_id$", String, nullable=True)
46
+ modified_at = SqlColumn("modified_at$", DATETIME, nullable=True)
47
+ run_id = SqlColumn("run_id$", String, nullable=True)
48
+ v3_id = SqlColumn("v3_id$", String, nullable=True)
49
+
50
+ def __init_subclass__(cls, **kwargs: Any):
51
+ super().__init_subclass__(**kwargs)
52
+ warehouse_name = cls.__schema_properties__.warehouse_name
53
+ cls.__tablename__ = warehouse_name + "$raw"
54
+
55
+ @classmethod
56
+ def apply_base_filters(
57
+ cls,
58
+ query: Query,
59
+ filter_archived: bool = True,
60
+ filter_unregistered: bool = True,
61
+ base_filters: BaseValidatorFilters | None = None,
62
+ ) -> Query:
63
+ """Applies the base model filters to the given query."""
64
+ if filter_archived:
65
+ query = query.filter(cls.archived.is_(False))
66
+ if filter_unregistered:
67
+ if hasattr(cls, "is_registered"):
68
+ query = query.filter(cls.is_registered.is_(True))
69
+
70
+ if base_filters is None:
71
+ return query
72
+ if base_filters.created_date_start:
73
+ query = query.filter(cls.created_at >= base_filters.created_date_start)
74
+ if base_filters.created_date_end:
75
+ query = query.filter(cls.created_at <= base_filters.created_date_end)
76
+ if base_filters.updated_date_start:
77
+ query = query.filter(cls.modified_at >= base_filters.updated_date_start)
78
+ if base_filters.updated_date_end:
79
+ query = query.filter(cls.modified_at <= base_filters.updated_date_end)
80
+ if base_filters.entity_ids:
81
+ query = query.filter(cls.v3_id.in_(base_filters.entity_ids))
82
+ if base_filters.creator_full_names:
83
+ query = query.filter(User.name.in_(base_filters.creator_full_names))
84
+ return query
85
+
86
+ @classmethod
87
+ def all(cls, session: Session) -> list[T]:
88
+ """Uses the get_query method to retrieve all results schema rows from the database.
89
+
90
+ Parameters
91
+ ----------
92
+ session : Session
93
+ Benchling database session.
94
+
95
+ Returns
96
+ -------
97
+ list[T]
98
+ List of all results schema rows from the database.
99
+ """
100
+ return cls.query(session).all()
101
+
102
+ @classmethod
103
+ def df(cls, session: Session) -> pd.DataFrame:
104
+ """Uses the get_query method to retrieve all results schema rows from the database.
105
+
106
+ Parameters
107
+ ----------
108
+ session : Session
109
+ Benchling database session.
110
+
111
+ Returns
112
+ -------
113
+ pd.DataFrame
114
+ A pandas dataframe of all results schema rows from the database.
115
+ """
116
+ query = cls.query(session)
117
+ return pd.read_sql(query.statement, session.connection())
118
+
119
+ @classmethod
120
+ def query(cls, session: Session) -> Query:
121
+ """Abstract method that users can override to define a specific query
122
+ to retrieve results schema rows from the database and cover any distinct relationships.
123
+
124
+ Parameters
125
+ ----------
126
+ session : Session
127
+ Benchling database session.
128
+
129
+ Returns
130
+ -------
131
+ Query
132
+ sqlalchemy query to retrieve results schema rows from the database.
133
+ """
134
+ return session.query(cls)
135
+
136
+ @classmethod
137
+ def get_validators(cls) -> list[FunctionType]:
138
+ """Returns a list of all validators defined on the class. Validators are functions that are decorated with @validator."""
139
+ validators = []
140
+ for name, method in inspect.getmembers(cls, predicate=inspect.isfunction):
141
+ if hasattr(method, "_is_liminal_validator"):
142
+ validators.append(method)
143
+ return validators
144
+
145
+ @classmethod
146
+ def validate(
147
+ cls,
148
+ session: Session,
149
+ base_filters: BaseValidatorFilters | None = None,
150
+ only_invalid: bool = False,
151
+ ) -> list[BenchlingValidatorReport]:
152
+ """Runs all validators for all results schema rows returned from the query and returns a list of reports.
153
+ This returns a report for each results schema row, validator pair, regardless of whether the validation passed or failed.
154
+
155
+ Parameters
156
+ ----------
157
+ session : Session
158
+ Benchling database session.
159
+ base_filters: BaseValidatorFilters
160
+ Filters to apply to the query.
161
+ only_invalid: bool
162
+ If True, only returns reports for entities that failed validation.
163
+
164
+ Returns
165
+ -------
166
+ list[BenchlingValidatorReport]
167
+ List of reports from running all validators on all results schema rows returned from the query.
168
+ """
169
+ results: list[BenchlingValidatorReport] = []
170
+ table: list[T] = cls.apply_base_filters(
171
+ cls.query(session), base_filters=base_filters
172
+ ).all()
173
+ logger.info(
174
+ f"Validating {len(table)} results schema rows for {cls.__name__}..."
175
+ )
176
+ validator_functions = cls.get_validators()
177
+ for entity in table:
178
+ for validator_func in validator_functions:
179
+ report: BenchlingValidatorReport = validator_func(entity)
180
+ if only_invalid and report.valid:
181
+ continue
182
+ results.append(report)
183
+ return results
184
+
185
+ @classmethod
186
+ def validate_to_df(
187
+ cls,
188
+ session: Session,
189
+ base_filters: BaseValidatorFilters | None = None,
190
+ only_invalid: bool = False,
191
+ ) -> pd.DataFrame:
192
+ """Runs all validators for all results schema rows returned from the query and returns reports as a pandas dataframe.
193
+
194
+ Parameters
195
+ ----------
196
+ session : Session
197
+ Benchling database session.
198
+ base_filters: BaseValidatorFilters
199
+ Filters to apply to the query.
200
+
201
+ Returns
202
+ -------
203
+ pd.Dataframe
204
+ Dataframe of reports from running all validators on all results schema rows returned from the query.
205
+ """
206
+ results = cls.validate(session, base_filters, only_invalid)
207
+ return pd.DataFrame([r.model_dump() for r in results])
@@ -36,7 +36,7 @@ def single_relationship(
36
36
 
37
37
 
38
38
  def multi_relationship(*args: Any, **kwargs: Any) -> RelationshipProperty:
39
- """Wrapper for generating a multi-relationship. Supporting the usage of a deprecated signature until v4 release."""
39
+ """Wrapper for generating a multi-relationship. Supporting the usage of a deprecated signature until v5 release."""
40
40
  if len(args) == 2 and isinstance(args[1], Column):
41
41
  return multi_relationship_v2(*args, **kwargs)
42
42
  else:
@@ -64,7 +64,7 @@ def multi_relationship_deprecated(
64
64
  SQLAlchemy RelationshipProperty
65
65
  """
66
66
  warnings.warn(
67
- "This version of multi_relationship is deprecated. New function signature is multi_relationship(target_class_name: str, entity_link_field: Column). Support for this signature will end with the v4 release.",
67
+ "This version of multi_relationship is deprecated. New function signature is multi_relationship(target_class_name: str, entity_link_field: Column). Support for this signature will end with the v5 release.",
68
68
  FutureWarning,
69
69
  stacklevel=2,
70
70
  )
@@ -0,0 +1,23 @@
1
+ from __future__ import annotations
2
+
3
+ from pydantic import BaseModel
4
+
5
+
6
+ class ResultsSchemaProperties(BaseModel):
7
+ """
8
+ This class is the validated class that is public facing, and represents the properties of a results schema properties.
9
+
10
+ Parameters
11
+ ----------
12
+ name : str
13
+ The name of the schema.
14
+ warehouse_name : str
15
+ The sql table name of the schema in the benchling warehouse.
16
+ """
17
+
18
+ name: str
19
+ warehouse_name: str
20
+
21
+ def __repr__(self) -> str:
22
+ """Generates a string representation of the class so that it can be executed."""
23
+ return f"{self.__class__.__name__}({', '.join([f'{k}={v.__repr__()}' for k, v in self.model_dump().items()])})"
@@ -94,3 +94,7 @@ class SchemaProperties(BaseSchemaProperties):
94
94
  def set_archived(self, value: bool) -> SchemaProperties:
95
95
  self._archived = value
96
96
  return self
97
+
98
+ def set_warehouse_name(self, value: str) -> SchemaProperties:
99
+ self.warehouse_name = value
100
+ return self
@@ -0,0 +1,197 @@
1
+ import shutil
2
+ from pathlib import Path
3
+
4
+ from rich import print
5
+
6
+ from liminal.base.base_dropdown import BaseDropdown
7
+ from liminal.connection import BenchlingService
8
+ from liminal.dropdowns.utils import get_benchling_dropdowns_dict
9
+ from liminal.entity_schemas.utils import get_converted_tag_schemas
10
+ from liminal.enums import BenchlingFieldType
11
+ from liminal.mappers import convert_benchling_type_to_python_type
12
+ from liminal.orm.base_model import BaseModel
13
+ from liminal.results_schemas.utils import get_converted_results_schemas
14
+ from liminal.utils import to_pascal_case, to_snake_case
15
+
16
+ TAB = " "
17
+
18
+
19
+ def generate_all_results_schema_files(
20
+ benchling_service: BenchlingService, write_path: Path, overwrite: bool = False
21
+ ) -> None:
22
+ """Generate all results schema files from your Benchling tenant and writes to the given results_schemas/ path.
23
+ This is used to initialize your code for Liminal and transfer the information from your Benchling tenant to your local codebase.
24
+
25
+ Parameters
26
+ ----------
27
+ benchling_service : BenchlingService
28
+ The Benchling service object that is connected to a specified Benchling tenant.
29
+ write_path : Path
30
+ The path to write the generated files to. results_schemas/ directory will be created within this path.
31
+ overwrite : bool
32
+ Whether to overwrite existing the existing results_schemas/ directory.
33
+ """
34
+ write_path = write_path / "results_schemas"
35
+ if write_path.exists() and overwrite:
36
+ shutil.rmtree(write_path)
37
+ print(f"[dim]Removed directory: {write_path}")
38
+ if not write_path.exists():
39
+ write_path.mkdir(parents=True, exist_ok=True)
40
+ print(f"[green]Created directory: {write_path}")
41
+
42
+ results_schemas = get_converted_results_schemas(benchling_service)
43
+ entity_schemas_wh_name_to_classname = _get_entity_schemas_wh_name_to_classname(
44
+ benchling_service
45
+ )
46
+ dropdown_name_to_classname_map = _get_dropdown_name_to_classname_map(
47
+ benchling_service
48
+ )
49
+ init_file_imports = []
50
+
51
+ for schema_properties, field_properties_dict in results_schemas:
52
+ has_date = False
53
+ file_name = to_snake_case(schema_properties.warehouse_name) + ".py"
54
+ schema_name = to_pascal_case(schema_properties.warehouse_name)
55
+ init_file_imports.append(
56
+ f"from .{to_snake_case(schema_properties.warehouse_name)} import {schema_name}"
57
+ )
58
+ import_strings = [
59
+ "from sqlalchemy import Column as SqlColumn",
60
+ "from liminal.orm.base_results_model import BaseResultsModel",
61
+ "from liminal.orm.results_schema_properties import ResultsSchemaProperties",
62
+ "from liminal.orm.column import Column",
63
+ "from liminal.enums import BenchlingFieldType",
64
+ ]
65
+ init_strings = [f"{TAB}def __init__(", f"{TAB}self,"]
66
+ column_strings = []
67
+ dropdowns = []
68
+ relationship_strings = []
69
+ for col_name, col in field_properties_dict.items():
70
+ column_props = col.column_dump()
71
+ dropdown_classname = None
72
+ if col.dropdown_link:
73
+ dropdown_classname = dropdown_name_to_classname_map[col.dropdown_link]
74
+ dropdowns.append(dropdown_classname)
75
+ column_props["dropdown_link"] = dropdown_classname
76
+ column_props_string = ""
77
+ for k, v in column_props.items():
78
+ if k == "dropdown_link":
79
+ column_props_string += f"""dropdown={v},"""
80
+ else:
81
+ column_props_string += f"""{k}={v.__repr__()},"""
82
+ column_string = f"""{TAB}{col_name}: SqlColumn = Column({column_props_string.rstrip(',')})"""
83
+ column_strings.append(column_string)
84
+ if col.required and col.type:
85
+ init_strings.append(
86
+ f"""{TAB}{col_name}: {convert_benchling_type_to_python_type(col.type).__name__},"""
87
+ )
88
+
89
+ if (
90
+ col.type == BenchlingFieldType.DATE
91
+ or col.type == BenchlingFieldType.DATETIME
92
+ ):
93
+ if not has_date:
94
+ import_strings.append("from datetime import datetime")
95
+ if (
96
+ col.type in BenchlingFieldType.get_entity_link_types()
97
+ and col.entity_link is not None
98
+ ):
99
+ if not col.is_multi:
100
+ relationship_strings.append(
101
+ f"""{TAB}{col_name}_entity = single_relationship("{entity_schemas_wh_name_to_classname[col.entity_link]}", {col_name})"""
102
+ )
103
+ import_strings.append(
104
+ "from liminal.orm.relationship import single_relationship"
105
+ )
106
+ else:
107
+ relationship_strings.append(
108
+ f"""{TAB}{col_name}_entities = multi_relationship("{entity_schemas_wh_name_to_classname[col.entity_link]}", {col_name})"""
109
+ )
110
+ import_strings.append(
111
+ "from liminal.orm.relationship import multi_relationship"
112
+ )
113
+ for col_name, col in field_properties_dict.items():
114
+ if not col.required and col.type:
115
+ init_strings.append(
116
+ f"""{TAB}{col_name}: {convert_benchling_type_to_python_type(col.type).__name__} | None = None,"""
117
+ )
118
+ init_strings.append("):")
119
+ for col_name in field_properties_dict.keys():
120
+ init_strings.append(f"{TAB}self.{col_name} = {col_name}")
121
+ if len(dropdowns) > 0:
122
+ import_strings.append(f"from ..dropdowns import {', '.join(dropdowns)}")
123
+ for col_name, col in field_properties_dict.items():
124
+ if col.dropdown_link:
125
+ init_strings.append(
126
+ TAB
127
+ + dropdown_name_to_classname_map[col.dropdown_link]
128
+ + f".validate({col_name})"
129
+ )
130
+
131
+ import_string = "\n".join(list(set(import_strings)))
132
+ columns_string = "\n".join(column_strings)
133
+ relationship_string = "\n".join(relationship_strings)
134
+ init_string = (
135
+ f"\n{TAB}".join(init_strings) if len(field_properties_dict) > 0 else ""
136
+ )
137
+ schema_content = f"""{import_string}
138
+
139
+
140
+ class {schema_name}(BaseResultsModel):
141
+ __schema_properties__ = {schema_properties.__repr__()}
142
+
143
+ {columns_string}
144
+
145
+ {relationship_string}
146
+
147
+ {init_string}
148
+ """
149
+
150
+ with open(write_path / file_name, "w") as file:
151
+ file.write(schema_content)
152
+
153
+ with open(write_path / "__init__.py", "w") as file:
154
+ file.write("\n".join(init_file_imports))
155
+ print(
156
+ f"[green]Generated {write_path / '__init__.py'} with {len(results_schemas)} entity schema imports."
157
+ )
158
+
159
+
160
+ def _get_dropdown_name_to_classname_map(
161
+ benchling_service: BenchlingService,
162
+ ) -> dict[str, str]:
163
+ """Gets the dropdown name to classname map.
164
+ If there are dropdowns imported, use BenchlingDropdown.get_all_subclasses()
165
+ Otherwise, it will query for Benchling dropdowns and use those.
166
+ """
167
+ if len(BaseDropdown.get_all_subclasses()) > 0:
168
+ return {
169
+ dropdown.__benchling_name__: dropdown.__name__
170
+ for dropdown in BaseDropdown.get_all_subclasses()
171
+ }
172
+ benchling_dropdowns = get_benchling_dropdowns_dict(benchling_service)
173
+ if len(benchling_dropdowns) > 0:
174
+ raise Exception(
175
+ "No dropdowns found locally. Please ensure your env.py file imports your dropdown classes or generate dropdowns from your Benchling tenant first."
176
+ )
177
+ return {}
178
+
179
+
180
+ def _get_entity_schemas_wh_name_to_classname(
181
+ benchling_service: BenchlingService,
182
+ ) -> dict[str, str]:
183
+ """Gets the entity schema warehouse name to classname map.
184
+ If there are entity schemas imported, use BenchlingEntitySchema.get_all_subclasses()
185
+ Otherwise, it will query for Benchling entity schemas and use those.
186
+ """
187
+ if len(BaseModel.get_all_subclasses()) > 0:
188
+ return {
189
+ s.__schema_properties__.warehouse_name: s._sa_class_manager.class_.__name__
190
+ for s in BaseModel.get_all_subclasses()
191
+ }
192
+ tag_schemas = get_converted_tag_schemas(benchling_service)
193
+ if len(tag_schemas) > 0:
194
+ raise Exception(
195
+ "No entity schemas found locally. Please ensure your env.py file imports your entity schema classes or generate entity schemas from your Benchling tenant first."
196
+ )
197
+ return {}
@@ -0,0 +1,145 @@
1
+ from __future__ import annotations
2
+
3
+ from functools import lru_cache
4
+ from typing import Any
5
+
6
+ import requests
7
+ from pydantic import BaseModel
8
+
9
+ from liminal.connection.benchling_service import BenchlingService
10
+ from liminal.entity_schemas.tag_schema_models import TagSchemaFieldModel
11
+
12
+
13
+ class ResultsSchemaModel(BaseModel):
14
+ """A pydantic model to define a results schema, which is used when querying for results schemas from Benchling's internal API."""
15
+
16
+ allFields: list[TagSchemaFieldModel]
17
+ archiveRecord: dict[str, str] | None
18
+ derivedParent: Any | None
19
+ fields: list[TagSchemaFieldModel]
20
+ id: str
21
+ name: str | None
22
+ organization: Any | None
23
+ permissions: dict[str, bool] | None
24
+ prefix: str | None
25
+ publishedDataTableColumns: Any | None
26
+ requestTaskSchemaIds: list[Any] | None
27
+ requestTemplateIds: list[Any] | None
28
+ sampleGroupSchema: Any | None
29
+ schemaType: str
30
+ sqlIdentifier: str | None
31
+
32
+ @classmethod
33
+ def get_all_json(
34
+ cls,
35
+ benchling_service: BenchlingService,
36
+ ) -> list[dict[str, Any]]:
37
+ """This function gets all results schemas from Benchling's internal API, returning the raw JSON data.
38
+
39
+ Parameters
40
+ ----------
41
+ benchling_service : BenchlingService
42
+ The benchling service to use to get the results schemas.
43
+
44
+ Returns
45
+ -------
46
+ list[dict[str, Any]]
47
+ A list of results schemas, in their raw JSON format.
48
+ """
49
+
50
+ with requests.Session() as session:
51
+ response = session.get(
52
+ f"https://{benchling_service.benchling_tenant}.benchling.com/1/api/result-schemas",
53
+ headers=benchling_service.custom_post_headers,
54
+ cookies=benchling_service.custom_post_cookies,
55
+ )
56
+ if not response.ok:
57
+ raise Exception("Failed to get result schemas.")
58
+ return response.json()["data"]
59
+
60
+ @classmethod
61
+ def get_all(
62
+ cls,
63
+ benchling_service: BenchlingService,
64
+ wh_schema_names: set[str] | None = None,
65
+ ) -> list[ResultsSchemaModel]:
66
+ """This function gets all results schemas from Benchling's internal API.
67
+ If a list of warehouse names is provided, the function will only return the results schemas with the given warehouse names.
68
+
69
+ Parameters
70
+ ----------
71
+ benchling_service : BenchlingService
72
+ The benchling service to use to get the results schemas.
73
+ wh_schema_names : set[str] | None, optional
74
+ The set of warehouse names to filter the results schemas by. If not provided, all results schemas will be returned.
75
+
76
+ Returns
77
+ -------
78
+ list[ResultsSchemaModel]
79
+ A list of results schema models.
80
+ """
81
+ schemas_data = cls.get_all_json(benchling_service)
82
+ filtered_schemas: list[ResultsSchemaModel] = []
83
+ if wh_schema_names:
84
+ for schema in schemas_data:
85
+ if schema["sqlIdentifier"] in wh_schema_names:
86
+ filtered_schemas.append(cls.model_validate(schema))
87
+ if len(filtered_schemas) == len(wh_schema_names):
88
+ break
89
+ else:
90
+ for schema in schemas_data:
91
+ try:
92
+ filtered_schemas.append(cls.model_validate(schema))
93
+ except Exception as e:
94
+ print(f"Error validating schema {schema['sqlIdentifier']}: {e}")
95
+ return filtered_schemas
96
+
97
+ @classmethod
98
+ def get_one(
99
+ cls,
100
+ benchling_service: BenchlingService,
101
+ wh_schema_name: str,
102
+ schemas_data: list[dict[str, Any]] | None = None,
103
+ ) -> ResultsSchemaModel:
104
+ """This function gets a singular results schema, and raises an error if a schema with the given warehouse name is not found.
105
+
106
+ Parameters
107
+ ----------
108
+ benchling_service : BenchlingService
109
+ The benchling service to use to get the results schema.
110
+ wh_schema_name : str
111
+ The warehouse name of the results schema to search for.
112
+ schemas_data : list[dict[str, Any]] | None
113
+ The list of results schemas to search through, to avoid making extra API calls. If not provided, the function will get all results schemas from Benchling.
114
+
115
+ Returns
116
+ -------
117
+ ResultsSchemaModel
118
+ The corresponding results schema model.
119
+ """
120
+ if schemas_data is None:
121
+ schemas_data = cls.get_all_json(benchling_service)
122
+ schema = next(
123
+ (
124
+ schema
125
+ for schema in schemas_data
126
+ if schema["sqlIdentifier"] == wh_schema_name
127
+ and schema["registryId"] == benchling_service.registry_id
128
+ ),
129
+ None,
130
+ )
131
+ if schema is None:
132
+ raise ValueError(
133
+ f"Schema {wh_schema_name} not found in Benchling {benchling_service.benchling_tenant}."
134
+ )
135
+ return cls.model_validate(schema)
136
+
137
+ @classmethod
138
+ @lru_cache(maxsize=100)
139
+ def get_one_cached(
140
+ cls,
141
+ benchling_service: BenchlingService,
142
+ wh_schema_name: str,
143
+ ) -> ResultsSchemaModel:
144
+ """This function gets a singular results schema from Benchling and caches it."""
145
+ return cls.get_one(benchling_service, wh_schema_name)
@@ -0,0 +1,48 @@
1
+ from benchling_api_client.v2.stable.models.assay_result_schema import AssayResultSchema
2
+
3
+ from liminal.base.properties.base_field_properties import BaseFieldProperties
4
+ from liminal.connection import BenchlingService
5
+ from liminal.dropdowns.utils import get_benchling_dropdown_id_name_map
6
+ from liminal.entity_schemas.utils import convert_tag_schema_field_to_field_properties
7
+ from liminal.orm.results_schema_properties import ResultsSchemaProperties
8
+ from liminal.results_schemas.models.results_schema_model import ResultsSchemaModel
9
+ from liminal.unit_dictionary.utils import get_unit_id_to_name_map
10
+
11
+
12
+ def get_converted_results_schemas(
13
+ benchling_service: BenchlingService,
14
+ ) -> list[tuple[ResultsSchemaProperties, dict[str, BaseFieldProperties]]]:
15
+ """This functions gets all Results Schema schemas from Benchling and converts them to our internal representation of a schema and its fields.
16
+ It parses the Results Schema and creates ResultsSchemaProperties and a list of FieldProperties for each field in the schema.
17
+ """
18
+ results_schemas = ResultsSchemaModel.get_all(benchling_service)
19
+ dropdowns_map = get_benchling_dropdown_id_name_map(benchling_service)
20
+ unit_id_to_name_map = get_unit_id_to_name_map(benchling_service)
21
+ results_schemas_list = []
22
+ for schema in results_schemas:
23
+ schema_properties = ResultsSchemaProperties(
24
+ name=schema.name,
25
+ warehouse_name=schema.sqlIdentifier,
26
+ )
27
+ field_properties_dict = {}
28
+ for field in schema.fields:
29
+ field_properties_dict[field.systemName] = (
30
+ convert_tag_schema_field_to_field_properties(
31
+ field, dropdowns_map, unit_id_to_name_map
32
+ )
33
+ )
34
+ results_schemas_list.append((schema_properties, field_properties_dict))
35
+ return results_schemas_list
36
+
37
+
38
+ def get_results_schemas_dict(
39
+ benchling_service: BenchlingService,
40
+ ) -> dict[str, AssayResultSchema]:
41
+ """This function gets all Results Schema schemas using the Benchling API and returns a dictionary of the schemas by their system name."""
42
+ flattened_schemas = [
43
+ s
44
+ for schemas in list(benchling_service.schemas.list_assay_result_schemas())
45
+ for s in schemas
46
+ ]
47
+ schemas_dict = {s.system_name: s for s in flattened_schemas}
48
+ return schemas_dict
@@ -41,23 +41,22 @@ class TestCompareEntitySchemas:
41
41
 
42
42
  mock_get_benchling_entity_schemas.assert_called_once()
43
43
  mock_get_all_subclasses.assert_called()
44
- assert len(invalid_models["mock_entity_two_wh"]) == 2
44
+ assert len(invalid_models["mock_entity_two"]) == 2
45
45
  assert isinstance(
46
- invalid_models["mock_entity_two_wh"][0].op, CreateEntitySchema
46
+ invalid_models["mock_entity_two"][0].op, CreateEntitySchema
47
47
  )
48
48
  assert (
49
- invalid_models["mock_entity_two_wh"][0].op.schema_properties.name
49
+ invalid_models["mock_entity_two"][0].op.schema_properties.name
50
50
  == "Mock Entity Two"
51
51
  )
52
52
  assert [
53
- f.warehouse_name
54
- for f in invalid_models["mock_entity_two_wh"][0].op.fields
53
+ f.warehouse_name for f in invalid_models["mock_entity_two"][0].op.fields
55
54
  ] == ["parent_link_field"]
56
55
  assert isinstance(
57
- invalid_models["mock_entity_two_wh"][1].op, UpdateEntitySchema
56
+ invalid_models["mock_entity_two"][1].op, UpdateEntitySchema
58
57
  )
59
58
  assert (
60
- invalid_models["mock_entity_two_wh"][1].op.update_props.warehouse_name
59
+ invalid_models["mock_entity_two"][1].op.update_props.warehouse_name
61
60
  == "mock_entity_two_wh"
62
61
  )
63
62