pytrilogy 0.3.149__cp313-cp313-win_amd64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- LICENSE.md +19 -0
- _preql_import_resolver/__init__.py +5 -0
- _preql_import_resolver/_preql_import_resolver.cp313-win_amd64.pyd +0 -0
- pytrilogy-0.3.149.dist-info/METADATA +555 -0
- pytrilogy-0.3.149.dist-info/RECORD +207 -0
- pytrilogy-0.3.149.dist-info/WHEEL +4 -0
- pytrilogy-0.3.149.dist-info/entry_points.txt +2 -0
- pytrilogy-0.3.149.dist-info/licenses/LICENSE.md +19 -0
- trilogy/__init__.py +27 -0
- trilogy/ai/README.md +10 -0
- trilogy/ai/__init__.py +19 -0
- trilogy/ai/constants.py +92 -0
- trilogy/ai/conversation.py +107 -0
- trilogy/ai/enums.py +7 -0
- trilogy/ai/execute.py +50 -0
- trilogy/ai/models.py +34 -0
- trilogy/ai/prompts.py +100 -0
- trilogy/ai/providers/__init__.py +0 -0
- trilogy/ai/providers/anthropic.py +106 -0
- trilogy/ai/providers/base.py +24 -0
- trilogy/ai/providers/google.py +146 -0
- trilogy/ai/providers/openai.py +89 -0
- trilogy/ai/providers/utils.py +68 -0
- trilogy/authoring/README.md +3 -0
- trilogy/authoring/__init__.py +148 -0
- trilogy/constants.py +119 -0
- trilogy/core/README.md +52 -0
- trilogy/core/__init__.py +0 -0
- trilogy/core/constants.py +6 -0
- trilogy/core/enums.py +454 -0
- trilogy/core/env_processor.py +239 -0
- trilogy/core/environment_helpers.py +320 -0
- trilogy/core/ergonomics.py +193 -0
- trilogy/core/exceptions.py +123 -0
- trilogy/core/functions.py +1240 -0
- trilogy/core/graph_models.py +142 -0
- trilogy/core/internal.py +85 -0
- trilogy/core/models/__init__.py +0 -0
- trilogy/core/models/author.py +2670 -0
- trilogy/core/models/build.py +2603 -0
- trilogy/core/models/build_environment.py +165 -0
- trilogy/core/models/core.py +506 -0
- trilogy/core/models/datasource.py +436 -0
- trilogy/core/models/environment.py +756 -0
- trilogy/core/models/execute.py +1213 -0
- trilogy/core/optimization.py +251 -0
- trilogy/core/optimizations/__init__.py +12 -0
- trilogy/core/optimizations/base_optimization.py +17 -0
- trilogy/core/optimizations/hide_unused_concept.py +47 -0
- trilogy/core/optimizations/inline_datasource.py +102 -0
- trilogy/core/optimizations/predicate_pushdown.py +245 -0
- trilogy/core/processing/README.md +94 -0
- trilogy/core/processing/READMEv2.md +121 -0
- trilogy/core/processing/VIRTUAL_UNNEST.md +30 -0
- trilogy/core/processing/__init__.py +0 -0
- trilogy/core/processing/concept_strategies_v3.py +508 -0
- trilogy/core/processing/constants.py +15 -0
- trilogy/core/processing/discovery_node_factory.py +451 -0
- trilogy/core/processing/discovery_utility.py +548 -0
- trilogy/core/processing/discovery_validation.py +167 -0
- trilogy/core/processing/graph_utils.py +43 -0
- trilogy/core/processing/node_generators/README.md +9 -0
- trilogy/core/processing/node_generators/__init__.py +31 -0
- trilogy/core/processing/node_generators/basic_node.py +160 -0
- trilogy/core/processing/node_generators/common.py +270 -0
- trilogy/core/processing/node_generators/constant_node.py +38 -0
- trilogy/core/processing/node_generators/filter_node.py +315 -0
- trilogy/core/processing/node_generators/group_node.py +213 -0
- trilogy/core/processing/node_generators/group_to_node.py +117 -0
- trilogy/core/processing/node_generators/multiselect_node.py +207 -0
- trilogy/core/processing/node_generators/node_merge_node.py +695 -0
- trilogy/core/processing/node_generators/recursive_node.py +88 -0
- trilogy/core/processing/node_generators/rowset_node.py +165 -0
- trilogy/core/processing/node_generators/select_helpers/__init__.py +0 -0
- trilogy/core/processing/node_generators/select_helpers/datasource_injection.py +261 -0
- trilogy/core/processing/node_generators/select_merge_node.py +846 -0
- trilogy/core/processing/node_generators/select_node.py +95 -0
- trilogy/core/processing/node_generators/synonym_node.py +98 -0
- trilogy/core/processing/node_generators/union_node.py +91 -0
- trilogy/core/processing/node_generators/unnest_node.py +182 -0
- trilogy/core/processing/node_generators/window_node.py +201 -0
- trilogy/core/processing/nodes/README.md +28 -0
- trilogy/core/processing/nodes/__init__.py +179 -0
- trilogy/core/processing/nodes/base_node.py +522 -0
- trilogy/core/processing/nodes/filter_node.py +75 -0
- trilogy/core/processing/nodes/group_node.py +194 -0
- trilogy/core/processing/nodes/merge_node.py +420 -0
- trilogy/core/processing/nodes/recursive_node.py +46 -0
- trilogy/core/processing/nodes/select_node_v2.py +242 -0
- trilogy/core/processing/nodes/union_node.py +53 -0
- trilogy/core/processing/nodes/unnest_node.py +62 -0
- trilogy/core/processing/nodes/window_node.py +56 -0
- trilogy/core/processing/utility.py +823 -0
- trilogy/core/query_processor.py +604 -0
- trilogy/core/statements/README.md +35 -0
- trilogy/core/statements/__init__.py +0 -0
- trilogy/core/statements/author.py +536 -0
- trilogy/core/statements/build.py +0 -0
- trilogy/core/statements/common.py +20 -0
- trilogy/core/statements/execute.py +155 -0
- trilogy/core/table_processor.py +66 -0
- trilogy/core/utility.py +8 -0
- trilogy/core/validation/README.md +46 -0
- trilogy/core/validation/__init__.py +0 -0
- trilogy/core/validation/common.py +161 -0
- trilogy/core/validation/concept.py +146 -0
- trilogy/core/validation/datasource.py +227 -0
- trilogy/core/validation/environment.py +73 -0
- trilogy/core/validation/fix.py +256 -0
- trilogy/dialect/__init__.py +32 -0
- trilogy/dialect/base.py +1432 -0
- trilogy/dialect/bigquery.py +314 -0
- trilogy/dialect/common.py +147 -0
- trilogy/dialect/config.py +159 -0
- trilogy/dialect/dataframe.py +50 -0
- trilogy/dialect/duckdb.py +397 -0
- trilogy/dialect/enums.py +151 -0
- trilogy/dialect/metadata.py +173 -0
- trilogy/dialect/mock.py +190 -0
- trilogy/dialect/postgres.py +117 -0
- trilogy/dialect/presto.py +110 -0
- trilogy/dialect/results.py +89 -0
- trilogy/dialect/snowflake.py +129 -0
- trilogy/dialect/sql_server.py +137 -0
- trilogy/engine.py +48 -0
- trilogy/execution/__init__.py +17 -0
- trilogy/execution/config.py +119 -0
- trilogy/execution/state/__init__.py +0 -0
- trilogy/execution/state/exceptions.py +26 -0
- trilogy/execution/state/file_state_store.py +0 -0
- trilogy/execution/state/sqllite_state_store.py +0 -0
- trilogy/execution/state/state_store.py +406 -0
- trilogy/executor.py +692 -0
- trilogy/hooks/__init__.py +4 -0
- trilogy/hooks/base_hook.py +40 -0
- trilogy/hooks/graph_hook.py +135 -0
- trilogy/hooks/query_debugger.py +166 -0
- trilogy/metadata/__init__.py +0 -0
- trilogy/parser.py +10 -0
- trilogy/parsing/README.md +21 -0
- trilogy/parsing/__init__.py +0 -0
- trilogy/parsing/common.py +1069 -0
- trilogy/parsing/config.py +5 -0
- trilogy/parsing/exceptions.py +8 -0
- trilogy/parsing/helpers.py +1 -0
- trilogy/parsing/parse_engine.py +2876 -0
- trilogy/parsing/render.py +775 -0
- trilogy/parsing/trilogy.lark +546 -0
- trilogy/py.typed +0 -0
- trilogy/render.py +45 -0
- trilogy/scripts/README.md +9 -0
- trilogy/scripts/__init__.py +0 -0
- trilogy/scripts/agent.py +41 -0
- trilogy/scripts/agent_info.py +306 -0
- trilogy/scripts/common.py +432 -0
- trilogy/scripts/dependency/Cargo.lock +617 -0
- trilogy/scripts/dependency/Cargo.toml +39 -0
- trilogy/scripts/dependency/README.md +131 -0
- trilogy/scripts/dependency/build.sh +25 -0
- trilogy/scripts/dependency/src/directory_resolver.rs +387 -0
- trilogy/scripts/dependency/src/lib.rs +16 -0
- trilogy/scripts/dependency/src/main.rs +770 -0
- trilogy/scripts/dependency/src/parser.rs +435 -0
- trilogy/scripts/dependency/src/preql.pest +208 -0
- trilogy/scripts/dependency/src/python_bindings.rs +311 -0
- trilogy/scripts/dependency/src/resolver.rs +716 -0
- trilogy/scripts/dependency/tests/base.preql +3 -0
- trilogy/scripts/dependency/tests/cli_integration.rs +377 -0
- trilogy/scripts/dependency/tests/customer.preql +6 -0
- trilogy/scripts/dependency/tests/main.preql +9 -0
- trilogy/scripts/dependency/tests/orders.preql +7 -0
- trilogy/scripts/dependency/tests/test_data/base.preql +9 -0
- trilogy/scripts/dependency/tests/test_data/consumer.preql +1 -0
- trilogy/scripts/dependency.py +323 -0
- trilogy/scripts/display.py +555 -0
- trilogy/scripts/environment.py +59 -0
- trilogy/scripts/fmt.py +32 -0
- trilogy/scripts/ingest.py +487 -0
- trilogy/scripts/ingest_helpers/__init__.py +1 -0
- trilogy/scripts/ingest_helpers/foreign_keys.py +123 -0
- trilogy/scripts/ingest_helpers/formatting.py +93 -0
- trilogy/scripts/ingest_helpers/typing.py +161 -0
- trilogy/scripts/init.py +105 -0
- trilogy/scripts/parallel_execution.py +762 -0
- trilogy/scripts/plan.py +189 -0
- trilogy/scripts/refresh.py +161 -0
- trilogy/scripts/run.py +79 -0
- trilogy/scripts/serve.py +202 -0
- trilogy/scripts/serve_helpers/__init__.py +41 -0
- trilogy/scripts/serve_helpers/file_discovery.py +142 -0
- trilogy/scripts/serve_helpers/index_generation.py +206 -0
- trilogy/scripts/serve_helpers/models.py +38 -0
- trilogy/scripts/single_execution.py +131 -0
- trilogy/scripts/testing.py +143 -0
- trilogy/scripts/trilogy.py +75 -0
- trilogy/std/__init__.py +0 -0
- trilogy/std/color.preql +3 -0
- trilogy/std/date.preql +13 -0
- trilogy/std/display.preql +18 -0
- trilogy/std/geography.preql +22 -0
- trilogy/std/metric.preql +15 -0
- trilogy/std/money.preql +67 -0
- trilogy/std/net.preql +14 -0
- trilogy/std/ranking.preql +7 -0
- trilogy/std/report.preql +5 -0
- trilogy/std/semantic.preql +6 -0
- trilogy/utility.py +34 -0
|
@@ -0,0 +1,756 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import difflib
|
|
4
|
+
import os
|
|
5
|
+
from collections import defaultdict
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import (
|
|
9
|
+
TYPE_CHECKING,
|
|
10
|
+
Annotated,
|
|
11
|
+
Any,
|
|
12
|
+
Dict,
|
|
13
|
+
ItemsView,
|
|
14
|
+
List,
|
|
15
|
+
Never,
|
|
16
|
+
Optional,
|
|
17
|
+
Self,
|
|
18
|
+
Tuple,
|
|
19
|
+
ValuesView,
|
|
20
|
+
)
|
|
21
|
+
|
|
22
|
+
from lark.tree import Meta
|
|
23
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
24
|
+
from pydantic.functional_validators import PlainValidator
|
|
25
|
+
|
|
26
|
+
from trilogy.constants import DEFAULT_NAMESPACE, ENV_CACHE_NAME, logger
|
|
27
|
+
from trilogy.core.constants import (
|
|
28
|
+
INTERNAL_NAMESPACE,
|
|
29
|
+
WORKING_PATH_CONCEPT,
|
|
30
|
+
)
|
|
31
|
+
from trilogy.core.enums import (
|
|
32
|
+
ConceptSource,
|
|
33
|
+
Derivation,
|
|
34
|
+
FunctionType,
|
|
35
|
+
Granularity,
|
|
36
|
+
Modifier,
|
|
37
|
+
Purpose,
|
|
38
|
+
)
|
|
39
|
+
from trilogy.core.exceptions import (
|
|
40
|
+
FrozenEnvironmentException,
|
|
41
|
+
UndefinedConceptException,
|
|
42
|
+
)
|
|
43
|
+
from trilogy.core.models.author import (
|
|
44
|
+
Concept,
|
|
45
|
+
ConceptRef,
|
|
46
|
+
CustomFunctionFactory,
|
|
47
|
+
CustomType,
|
|
48
|
+
Function,
|
|
49
|
+
SelectLineage,
|
|
50
|
+
UndefinedConcept,
|
|
51
|
+
UndefinedConceptFull,
|
|
52
|
+
address_with_namespace,
|
|
53
|
+
)
|
|
54
|
+
from trilogy.core.models.core import DataType
|
|
55
|
+
from trilogy.core.models.datasource import Datasource, EnvironmentDatasourceDict
|
|
56
|
+
|
|
57
|
+
if TYPE_CHECKING:
|
|
58
|
+
from trilogy.core.models.build import BuildConcept, BuildEnvironment
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@dataclass
|
|
62
|
+
class Import:
|
|
63
|
+
alias: str
|
|
64
|
+
path: Path
|
|
65
|
+
input_path: Path | None = (
|
|
66
|
+
None # filepath where the text came from (path is the import path, but may be resolved from a dictionary for some resolvers)
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class BaseImportResolver(BaseModel):
|
|
71
|
+
pass
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
class FileSystemImportResolver(BaseImportResolver):
|
|
75
|
+
pass
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class DictImportResolver(BaseImportResolver):
|
|
79
|
+
content: Dict[str, str]
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
class EnvironmentConfig(BaseModel):
|
|
83
|
+
allow_duplicate_declaration: bool = True
|
|
84
|
+
import_resolver: BaseImportResolver = Field(
|
|
85
|
+
default_factory=FileSystemImportResolver
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
def copy_for_root(self, root: str | None) -> Self:
|
|
89
|
+
new = self.model_copy(deep=True)
|
|
90
|
+
if isinstance(new.import_resolver, DictImportResolver) and root:
|
|
91
|
+
new.import_resolver = DictImportResolver(
|
|
92
|
+
content={
|
|
93
|
+
k[len(root) + 1 :]: v
|
|
94
|
+
for k, v in new.import_resolver.content.items()
|
|
95
|
+
if k.startswith(f"{root}.")
|
|
96
|
+
}
|
|
97
|
+
)
|
|
98
|
+
return new
|
|
99
|
+
|
|
100
|
+
|
|
101
|
+
class EnvironmentConceptDict(dict):
|
|
102
|
+
def __init__(self, *args, **kwargs) -> None:
|
|
103
|
+
super().__init__(self, *args, **kwargs)
|
|
104
|
+
self.undefined: dict[str, UndefinedConceptFull] = {}
|
|
105
|
+
self.fail_on_missing: bool = True
|
|
106
|
+
self.populate_default_concepts()
|
|
107
|
+
|
|
108
|
+
def duplicate(self) -> "EnvironmentConceptDict":
|
|
109
|
+
new = EnvironmentConceptDict()
|
|
110
|
+
new.update({k: v.duplicate() for k, v in self.items()})
|
|
111
|
+
new.undefined = self.undefined
|
|
112
|
+
new.fail_on_missing = self.fail_on_missing
|
|
113
|
+
return new
|
|
114
|
+
|
|
115
|
+
def populate_default_concepts(self):
|
|
116
|
+
from trilogy.core.internal import DEFAULT_CONCEPTS
|
|
117
|
+
|
|
118
|
+
for concept in DEFAULT_CONCEPTS.values():
|
|
119
|
+
self[concept.address] = concept
|
|
120
|
+
|
|
121
|
+
def values(self) -> ValuesView[Concept]: # type: ignore
|
|
122
|
+
return super().values()
|
|
123
|
+
|
|
124
|
+
def get(self, key: str, default: Concept | None = None) -> Concept | None: # type: ignore
|
|
125
|
+
try:
|
|
126
|
+
return self.__getitem__(key)
|
|
127
|
+
except UndefinedConceptException:
|
|
128
|
+
return default
|
|
129
|
+
|
|
130
|
+
def raise_undefined(
|
|
131
|
+
self, key: str, line_no: int | None = None, file: Path | str | None = None
|
|
132
|
+
) -> Never:
|
|
133
|
+
|
|
134
|
+
matches = self._find_similar_concepts(key)
|
|
135
|
+
message = f"Undefined concept: {key}."
|
|
136
|
+
if matches:
|
|
137
|
+
message += f" Suggestions: {matches}"
|
|
138
|
+
|
|
139
|
+
if line_no:
|
|
140
|
+
if file:
|
|
141
|
+
raise UndefinedConceptException(
|
|
142
|
+
f"{file}: {line_no}: " + message, matches
|
|
143
|
+
)
|
|
144
|
+
raise UndefinedConceptException(f"line: {line_no}: " + message, matches)
|
|
145
|
+
raise UndefinedConceptException(message, matches)
|
|
146
|
+
|
|
147
|
+
def __getitem__(
|
|
148
|
+
self, key: str, line_no: int | None = None, file: Path | None = None
|
|
149
|
+
) -> Concept | UndefinedConceptFull:
|
|
150
|
+
# fast access path
|
|
151
|
+
if key in self.keys():
|
|
152
|
+
return super(EnvironmentConceptDict, self).__getitem__(key)
|
|
153
|
+
if isinstance(key, ConceptRef):
|
|
154
|
+
return self.__getitem__(key.address, line_no=line_no, file=file)
|
|
155
|
+
try:
|
|
156
|
+
return super(EnvironmentConceptDict, self).__getitem__(key)
|
|
157
|
+
except KeyError:
|
|
158
|
+
if "." in key and key.split(".", 1)[0] == DEFAULT_NAMESPACE:
|
|
159
|
+
return self.__getitem__(key.split(".", 1)[1], line_no)
|
|
160
|
+
if DEFAULT_NAMESPACE + "." + key in self:
|
|
161
|
+
return self.__getitem__(DEFAULT_NAMESPACE + "." + key, line_no)
|
|
162
|
+
if not self.fail_on_missing:
|
|
163
|
+
if "." in key:
|
|
164
|
+
ns, rest = key.rsplit(".", 1)
|
|
165
|
+
else:
|
|
166
|
+
ns = DEFAULT_NAMESPACE
|
|
167
|
+
rest = key
|
|
168
|
+
if key in self.undefined:
|
|
169
|
+
return self.undefined[key]
|
|
170
|
+
undefined = UndefinedConceptFull(
|
|
171
|
+
line_no=line_no,
|
|
172
|
+
datatype=DataType.UNKNOWN,
|
|
173
|
+
name=rest,
|
|
174
|
+
purpose=Purpose.UNKNOWN,
|
|
175
|
+
namespace=ns,
|
|
176
|
+
)
|
|
177
|
+
self.undefined[key] = undefined
|
|
178
|
+
return undefined
|
|
179
|
+
self.raise_undefined(key, line_no, file)
|
|
180
|
+
|
|
181
|
+
def _find_similar_concepts(self, concept_name: str):
|
|
182
|
+
def strip_local(input: str):
|
|
183
|
+
if input.startswith(f"{DEFAULT_NAMESPACE}."):
|
|
184
|
+
return input[len(DEFAULT_NAMESPACE) + 1 :]
|
|
185
|
+
return input
|
|
186
|
+
|
|
187
|
+
matches = difflib.get_close_matches(
|
|
188
|
+
strip_local(concept_name), [strip_local(x) for x in self.keys()]
|
|
189
|
+
)
|
|
190
|
+
return matches
|
|
191
|
+
|
|
192
|
+
def items(self) -> ItemsView[str, Concept]: # type: ignore
|
|
193
|
+
return super().items()
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def validate_concepts(v) -> EnvironmentConceptDict:
|
|
197
|
+
if isinstance(v, EnvironmentConceptDict):
|
|
198
|
+
return v
|
|
199
|
+
elif isinstance(v, dict):
|
|
200
|
+
return EnvironmentConceptDict(
|
|
201
|
+
**{x: Concept.model_validate(y) for x, y in v.items()}
|
|
202
|
+
)
|
|
203
|
+
raise ValueError
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def validate_datasources(v) -> EnvironmentDatasourceDict:
|
|
207
|
+
if isinstance(v, EnvironmentDatasourceDict):
|
|
208
|
+
return v
|
|
209
|
+
elif isinstance(v, dict):
|
|
210
|
+
return EnvironmentDatasourceDict(
|
|
211
|
+
**{x: Datasource.model_validate(y) for x, y in v.items()}
|
|
212
|
+
)
|
|
213
|
+
raise ValueError
|
|
214
|
+
|
|
215
|
+
|
|
216
|
+
def get_version():
|
|
217
|
+
from trilogy import __version__
|
|
218
|
+
|
|
219
|
+
return __version__
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
class Environment(BaseModel):
|
|
223
|
+
model_config = ConfigDict(arbitrary_types_allowed=True, strict=False)
|
|
224
|
+
|
|
225
|
+
concepts: Annotated[EnvironmentConceptDict, PlainValidator(validate_concepts)] = (
|
|
226
|
+
Field(default_factory=EnvironmentConceptDict)
|
|
227
|
+
)
|
|
228
|
+
datasources: Annotated[
|
|
229
|
+
EnvironmentDatasourceDict, PlainValidator(validate_datasources)
|
|
230
|
+
] = Field(default_factory=EnvironmentDatasourceDict)
|
|
231
|
+
functions: Dict[str, CustomFunctionFactory] = Field(default_factory=dict)
|
|
232
|
+
data_types: Dict[str, CustomType] = Field(default_factory=dict)
|
|
233
|
+
named_statements: Dict[str, SelectLineage] = Field(default_factory=dict)
|
|
234
|
+
imports: defaultdict[str, list[Import]] = Field(
|
|
235
|
+
default_factory=lambda: defaultdict(list) # type: ignore
|
|
236
|
+
)
|
|
237
|
+
namespace: str = DEFAULT_NAMESPACE
|
|
238
|
+
working_path: str | Path = Field(default_factory=lambda: os.getcwd())
|
|
239
|
+
config: EnvironmentConfig = Field(default_factory=EnvironmentConfig)
|
|
240
|
+
version: str = Field(default_factory=get_version)
|
|
241
|
+
cte_name_map: Dict[str, str] = Field(default_factory=dict)
|
|
242
|
+
alias_origin_lookup: Dict[str, Concept] = Field(default_factory=dict)
|
|
243
|
+
# TODO: support freezing environments to avoid mutation
|
|
244
|
+
frozen: bool = False
|
|
245
|
+
env_file_path: Path | str | None = None
|
|
246
|
+
parameters: Dict[str, Any] = Field(default_factory=dict)
|
|
247
|
+
|
|
248
|
+
def freeze(self):
|
|
249
|
+
self.frozen = True
|
|
250
|
+
|
|
251
|
+
def thaw(self):
|
|
252
|
+
self.frozen = False
|
|
253
|
+
|
|
254
|
+
def set_parameters(self, **kwargs) -> Self:
|
|
255
|
+
|
|
256
|
+
self.parameters.update(kwargs)
|
|
257
|
+
return self
|
|
258
|
+
|
|
259
|
+
def materialize_for_select(
|
|
260
|
+
self,
|
|
261
|
+
local_concepts: dict[str, "BuildConcept"] | None = None,
|
|
262
|
+
build_cache: dict | None = None,
|
|
263
|
+
) -> "BuildEnvironment":
|
|
264
|
+
"""helper method"""
|
|
265
|
+
from trilogy.core.models.build import Factory
|
|
266
|
+
|
|
267
|
+
factory: Factory = Factory(
|
|
268
|
+
self, local_concepts=local_concepts, build_cache=build_cache
|
|
269
|
+
)
|
|
270
|
+
return factory.build(self)
|
|
271
|
+
|
|
272
|
+
def add_rowset(self, name: str, lineage: SelectLineage):
|
|
273
|
+
self.named_statements[name] = lineage
|
|
274
|
+
|
|
275
|
+
def duplicate(self):
|
|
276
|
+
return Environment.model_construct(
|
|
277
|
+
datasources=self.datasources.duplicate(),
|
|
278
|
+
concepts=self.concepts.duplicate(),
|
|
279
|
+
functions=dict(self.functions),
|
|
280
|
+
data_types=dict(self.data_types),
|
|
281
|
+
imports=defaultdict(list, self.imports),
|
|
282
|
+
namespace=self.namespace,
|
|
283
|
+
working_path=self.working_path,
|
|
284
|
+
environment_config=self.config.model_copy(deep=True),
|
|
285
|
+
version=self.version,
|
|
286
|
+
cte_name_map=dict(self.cte_name_map),
|
|
287
|
+
alias_origin_lookup={
|
|
288
|
+
k: v.duplicate() for k, v in self.alias_origin_lookup.items()
|
|
289
|
+
},
|
|
290
|
+
env_file_path=self.env_file_path,
|
|
291
|
+
)
|
|
292
|
+
|
|
293
|
+
def _add_path_concepts(self):
|
|
294
|
+
concept = Concept(
|
|
295
|
+
name=WORKING_PATH_CONCEPT,
|
|
296
|
+
namespace=self.namespace,
|
|
297
|
+
lineage=Function(
|
|
298
|
+
operator=FunctionType.CONSTANT,
|
|
299
|
+
arguments=[str(self.working_path)],
|
|
300
|
+
output_datatype=DataType.STRING,
|
|
301
|
+
output_purpose=Purpose.CONSTANT,
|
|
302
|
+
),
|
|
303
|
+
datatype=DataType.STRING,
|
|
304
|
+
granularity=Granularity.SINGLE_ROW,
|
|
305
|
+
derivation=Derivation.CONSTANT,
|
|
306
|
+
purpose=Purpose.CONSTANT,
|
|
307
|
+
)
|
|
308
|
+
self.add_concept(concept)
|
|
309
|
+
|
|
310
|
+
def model_post_init(self, context: Any) -> None:
|
|
311
|
+
self._add_path_concepts()
|
|
312
|
+
|
|
313
|
+
@classmethod
|
|
314
|
+
def from_file(cls, path: str | Path) -> "Environment":
|
|
315
|
+
if isinstance(path, str):
|
|
316
|
+
path = Path(path)
|
|
317
|
+
with open(path, "r") as f:
|
|
318
|
+
read = f.read()
|
|
319
|
+
return Environment(working_path=path.parent, env_file_path=path).parse(read)[0]
|
|
320
|
+
|
|
321
|
+
@classmethod
|
|
322
|
+
def from_string(
|
|
323
|
+
cls, input: str, config: EnvironmentConfig | None = None
|
|
324
|
+
) -> "Environment":
|
|
325
|
+
config = config or EnvironmentConfig()
|
|
326
|
+
return Environment(config=config).parse(input)[0]
|
|
327
|
+
|
|
328
|
+
@classmethod
|
|
329
|
+
def from_cache(cls, path) -> Optional["Environment"]:
|
|
330
|
+
with open(path, "r") as f:
|
|
331
|
+
read = f.read()
|
|
332
|
+
base = cls.model_validate_json(read)
|
|
333
|
+
version = get_version()
|
|
334
|
+
if base.version != version:
|
|
335
|
+
return None
|
|
336
|
+
return base
|
|
337
|
+
|
|
338
|
+
def to_cache(self, path: Optional[str | Path] = None) -> Path:
|
|
339
|
+
if not path:
|
|
340
|
+
ppath = Path(self.working_path) / ENV_CACHE_NAME
|
|
341
|
+
else:
|
|
342
|
+
ppath = Path(path)
|
|
343
|
+
with open(ppath, "w") as f:
|
|
344
|
+
f.write(self.model_dump_json())
|
|
345
|
+
return ppath
|
|
346
|
+
|
|
347
|
+
def validate_concept(
|
|
348
|
+
self, new_concept: Concept, meta: Meta | None = None
|
|
349
|
+
) -> Concept | None:
|
|
350
|
+
lookup = new_concept.address
|
|
351
|
+
if lookup not in self.concepts:
|
|
352
|
+
return None
|
|
353
|
+
existing: Concept = self.concepts.get(lookup) # type: ignore
|
|
354
|
+
if isinstance(existing, UndefinedConcept):
|
|
355
|
+
return None
|
|
356
|
+
|
|
357
|
+
def handle_currently_bound_sources():
|
|
358
|
+
if str(existing.lineage) == str(new_concept.lineage):
|
|
359
|
+
return None
|
|
360
|
+
|
|
361
|
+
invalidated = False
|
|
362
|
+
for k, datasource in self.datasources.items():
|
|
363
|
+
if existing.address in datasource.output_concepts:
|
|
364
|
+
logger.warning(
|
|
365
|
+
f"Removed concept for {existing} assignment from {k}"
|
|
366
|
+
)
|
|
367
|
+
clen = len(datasource.columns)
|
|
368
|
+
datasource.columns = [
|
|
369
|
+
x
|
|
370
|
+
for x in datasource.columns
|
|
371
|
+
if x.concept.address != existing.address
|
|
372
|
+
]
|
|
373
|
+
assert len(datasource.columns) < clen
|
|
374
|
+
invalidated = len(datasource.columns) < clen
|
|
375
|
+
if invalidated:
|
|
376
|
+
logger.warning(
|
|
377
|
+
f"Persisted concept {existing.address} lineage {str(existing.lineage)} did not match redeclaration {str(new_concept.lineage)}, invalidated current bound datasource."
|
|
378
|
+
)
|
|
379
|
+
return None
|
|
380
|
+
|
|
381
|
+
if existing and self.config.allow_duplicate_declaration:
|
|
382
|
+
if (
|
|
383
|
+
existing.metadata
|
|
384
|
+
and existing.metadata.concept_source == ConceptSource.AUTO_DERIVED
|
|
385
|
+
):
|
|
386
|
+
# auto derived concepts will not have sources nad do not need to be checked
|
|
387
|
+
return None
|
|
388
|
+
return handle_currently_bound_sources()
|
|
389
|
+
elif (
|
|
390
|
+
existing.metadata
|
|
391
|
+
and existing.metadata.concept_source == ConceptSource.AUTO_DERIVED
|
|
392
|
+
):
|
|
393
|
+
return None
|
|
394
|
+
elif meta and existing.metadata:
|
|
395
|
+
raise ValueError(
|
|
396
|
+
f"Assignment to concept '{lookup}' on line {meta.line} is a duplicate"
|
|
397
|
+
f" declaration; '{lookup}' was originally defined on line"
|
|
398
|
+
f" {existing.metadata.line_number}"
|
|
399
|
+
)
|
|
400
|
+
elif existing.metadata:
|
|
401
|
+
raise ValueError(
|
|
402
|
+
f"Assignment to concept '{lookup}' is a duplicate declaration;"
|
|
403
|
+
f" '{lookup}' was originally defined on line"
|
|
404
|
+
f" {existing.metadata.line_number}"
|
|
405
|
+
)
|
|
406
|
+
raise ValueError(
|
|
407
|
+
f"Assignment to concept '{lookup}' is a duplicate declaration;"
|
|
408
|
+
)
|
|
409
|
+
|
|
410
|
+
def add_import(
|
|
411
|
+
self, alias: str, source: Environment, imp_stm: Import | None = None
|
|
412
|
+
):
|
|
413
|
+
if self.frozen:
|
|
414
|
+
raise ValueError("Environment is frozen, cannot add imports")
|
|
415
|
+
exists = False
|
|
416
|
+
existing = self.imports[alias]
|
|
417
|
+
if imp_stm:
|
|
418
|
+
if any(
|
|
419
|
+
[x.path == imp_stm.path and x.alias == imp_stm.alias for x in existing]
|
|
420
|
+
):
|
|
421
|
+
exists = True
|
|
422
|
+
else:
|
|
423
|
+
if any(
|
|
424
|
+
[x.path == source.working_path and x.alias == alias for x in existing]
|
|
425
|
+
):
|
|
426
|
+
exists = True
|
|
427
|
+
imp_stm = Import(alias=alias, path=Path(source.working_path))
|
|
428
|
+
same_namespace = alias == DEFAULT_NAMESPACE
|
|
429
|
+
|
|
430
|
+
if not exists:
|
|
431
|
+
self.imports[alias].append(imp_stm)
|
|
432
|
+
# we can't exit early
|
|
433
|
+
# as there may be new concepts
|
|
434
|
+
iteration: list[tuple[str, Concept]] = list(source.concepts.items())
|
|
435
|
+
for k, concept in iteration:
|
|
436
|
+
# skip internal namespace
|
|
437
|
+
if INTERNAL_NAMESPACE in concept.address:
|
|
438
|
+
continue
|
|
439
|
+
# don't overwrite working path
|
|
440
|
+
if concept.name == WORKING_PATH_CONCEPT:
|
|
441
|
+
continue
|
|
442
|
+
if same_namespace:
|
|
443
|
+
new = self.add_concept(concept, add_derived=False)
|
|
444
|
+
else:
|
|
445
|
+
new = self.add_concept(concept.with_namespace(alias), add_derived=False)
|
|
446
|
+
|
|
447
|
+
k = address_with_namespace(k, alias)
|
|
448
|
+
# set this explicitly, to handle aliasing
|
|
449
|
+
self.concepts[k] = new
|
|
450
|
+
|
|
451
|
+
for _, datasource in source.datasources.items():
|
|
452
|
+
if same_namespace:
|
|
453
|
+
self.add_datasource(datasource)
|
|
454
|
+
else:
|
|
455
|
+
self.add_datasource(datasource.with_namespace(alias))
|
|
456
|
+
for key, val in source.alias_origin_lookup.items():
|
|
457
|
+
|
|
458
|
+
if same_namespace:
|
|
459
|
+
self.alias_origin_lookup[key] = val
|
|
460
|
+
else:
|
|
461
|
+
self.alias_origin_lookup[address_with_namespace(key, alias)] = (
|
|
462
|
+
val.with_namespace(alias)
|
|
463
|
+
)
|
|
464
|
+
|
|
465
|
+
for key, function in source.functions.items():
|
|
466
|
+
if same_namespace:
|
|
467
|
+
self.functions[key] = function
|
|
468
|
+
else:
|
|
469
|
+
self.functions[address_with_namespace(key, alias)] = (
|
|
470
|
+
function.with_namespace(alias)
|
|
471
|
+
)
|
|
472
|
+
for key, type in source.data_types.items():
|
|
473
|
+
if same_namespace:
|
|
474
|
+
self.data_types[key] = type
|
|
475
|
+
else:
|
|
476
|
+
self.data_types[address_with_namespace(key, alias)] = (
|
|
477
|
+
type.with_namespace(alias)
|
|
478
|
+
)
|
|
479
|
+
return self
|
|
480
|
+
|
|
481
|
+
def add_file_import(
|
|
482
|
+
self, path: str | Path, alias: str, env: "Environment" | None = None
|
|
483
|
+
):
|
|
484
|
+
if self.frozen:
|
|
485
|
+
raise ValueError("Environment is frozen, cannot add imports")
|
|
486
|
+
from trilogy.parsing.parse_engine import (
|
|
487
|
+
PARSER,
|
|
488
|
+
ParseToObjects,
|
|
489
|
+
)
|
|
490
|
+
|
|
491
|
+
if isinstance(path, str):
|
|
492
|
+
if path.endswith(".preql"):
|
|
493
|
+
path = path.rsplit(".", 1)[0]
|
|
494
|
+
if "." not in path:
|
|
495
|
+
target = Path(self.working_path, path)
|
|
496
|
+
else:
|
|
497
|
+
target = Path(self.working_path, *path.split("."))
|
|
498
|
+
target = target.with_suffix(".preql")
|
|
499
|
+
else:
|
|
500
|
+
target = path
|
|
501
|
+
if not env:
|
|
502
|
+
import_keys = ["root", alias]
|
|
503
|
+
parse_address = "-".join(import_keys)
|
|
504
|
+
try:
|
|
505
|
+
with open(target, "r", encoding="utf-8") as f:
|
|
506
|
+
text = f.read()
|
|
507
|
+
nenv = Environment(
|
|
508
|
+
working_path=target.parent,
|
|
509
|
+
)
|
|
510
|
+
nenv.concepts.fail_on_missing = False
|
|
511
|
+
nparser = ParseToObjects(
|
|
512
|
+
environment=Environment(
|
|
513
|
+
working_path=target.parent,
|
|
514
|
+
),
|
|
515
|
+
parse_address=parse_address,
|
|
516
|
+
token_address=target,
|
|
517
|
+
import_keys=import_keys,
|
|
518
|
+
)
|
|
519
|
+
nparser.set_text(text)
|
|
520
|
+
nparser.environment.concepts.fail_on_missing = False
|
|
521
|
+
nparser.transform(PARSER.parse(text))
|
|
522
|
+
nparser.run_second_parse_pass()
|
|
523
|
+
nparser.environment.concepts.fail_on_missing = True
|
|
524
|
+
|
|
525
|
+
except Exception as e:
|
|
526
|
+
raise ImportError(
|
|
527
|
+
f"Unable to import file {target.parent}, parsing error: {e}"
|
|
528
|
+
)
|
|
529
|
+
env = nparser.environment
|
|
530
|
+
imps = Import(alias=alias, path=target)
|
|
531
|
+
self.add_import(alias, source=env, imp_stm=imps)
|
|
532
|
+
return imps
|
|
533
|
+
|
|
534
|
+
def parse(
|
|
535
|
+
self, input: str, namespace: str | None = None, persist: bool = False
|
|
536
|
+
) -> Tuple["Environment", list]:
|
|
537
|
+
from trilogy import parse
|
|
538
|
+
from trilogy.core.query_processor import process_persist
|
|
539
|
+
from trilogy.core.statements.author import (
|
|
540
|
+
MultiSelectStatement,
|
|
541
|
+
PersistStatement,
|
|
542
|
+
SelectStatement,
|
|
543
|
+
ShowStatement,
|
|
544
|
+
)
|
|
545
|
+
|
|
546
|
+
if namespace:
|
|
547
|
+
new = Environment()
|
|
548
|
+
_, queries = new.parse(input)
|
|
549
|
+
self.add_import(namespace, new)
|
|
550
|
+
return self, queries
|
|
551
|
+
_, queries = parse(input, self)
|
|
552
|
+
generatable = [
|
|
553
|
+
x
|
|
554
|
+
for x in queries
|
|
555
|
+
if isinstance(
|
|
556
|
+
x,
|
|
557
|
+
(
|
|
558
|
+
SelectStatement,
|
|
559
|
+
PersistStatement,
|
|
560
|
+
MultiSelectStatement,
|
|
561
|
+
ShowStatement,
|
|
562
|
+
),
|
|
563
|
+
)
|
|
564
|
+
]
|
|
565
|
+
while generatable:
|
|
566
|
+
t = generatable.pop(0)
|
|
567
|
+
if isinstance(t, PersistStatement) and persist:
|
|
568
|
+
processed = process_persist(self, t)
|
|
569
|
+
self.add_datasource(processed.datasource)
|
|
570
|
+
return self, queries
|
|
571
|
+
|
|
572
|
+
def add_concept(
|
|
573
|
+
self,
|
|
574
|
+
concept: Concept,
|
|
575
|
+
meta: Meta | None = None,
|
|
576
|
+
force: bool = False,
|
|
577
|
+
add_derived: bool = True,
|
|
578
|
+
):
|
|
579
|
+
|
|
580
|
+
if self.frozen:
|
|
581
|
+
raise FrozenEnvironmentException(
|
|
582
|
+
"Environment is frozen, cannot add concepts"
|
|
583
|
+
)
|
|
584
|
+
if not force:
|
|
585
|
+
existing = self.validate_concept(concept, meta=meta)
|
|
586
|
+
if existing:
|
|
587
|
+
concept = existing
|
|
588
|
+
|
|
589
|
+
self.concepts[concept.address] = concept
|
|
590
|
+
|
|
591
|
+
from trilogy.core.environment_helpers import generate_related_concepts
|
|
592
|
+
|
|
593
|
+
generate_related_concepts(concept, self, meta=meta, add_derived=add_derived)
|
|
594
|
+
|
|
595
|
+
return concept
|
|
596
|
+
|
|
597
|
+
def remove_concept(
|
|
598
|
+
self,
|
|
599
|
+
concept: Concept | str,
|
|
600
|
+
) -> bool:
|
|
601
|
+
if self.frozen:
|
|
602
|
+
raise FrozenEnvironmentException(
|
|
603
|
+
"Environment is frozen, cannot remove concepts"
|
|
604
|
+
)
|
|
605
|
+
if isinstance(concept, Concept):
|
|
606
|
+
address = concept.address
|
|
607
|
+
c_instance = concept
|
|
608
|
+
else:
|
|
609
|
+
address = concept
|
|
610
|
+
c_instance_check = self.concepts.get(address)
|
|
611
|
+
if not c_instance_check:
|
|
612
|
+
return False
|
|
613
|
+
c_instance = c_instance_check
|
|
614
|
+
from trilogy.core.environment_helpers import remove_related_concepts
|
|
615
|
+
|
|
616
|
+
remove_related_concepts(c_instance, self)
|
|
617
|
+
if address in self.concepts:
|
|
618
|
+
del self.concepts[address]
|
|
619
|
+
return True
|
|
620
|
+
if address in self.alias_origin_lookup:
|
|
621
|
+
del self.alias_origin_lookup[address]
|
|
622
|
+
|
|
623
|
+
return False
|
|
624
|
+
|
|
625
|
+
def add_datasource(
|
|
626
|
+
self,
|
|
627
|
+
datasource: Datasource,
|
|
628
|
+
meta: Meta | None = None,
|
|
629
|
+
):
|
|
630
|
+
if self.frozen:
|
|
631
|
+
raise FrozenEnvironmentException(
|
|
632
|
+
"Environment is frozen, cannot add datasource"
|
|
633
|
+
)
|
|
634
|
+
self.datasources[datasource.identifier] = datasource
|
|
635
|
+
return datasource
|
|
636
|
+
|
|
637
|
+
def delete_datasource(
|
|
638
|
+
self,
|
|
639
|
+
address: str,
|
|
640
|
+
meta: Meta | None = None,
|
|
641
|
+
) -> bool:
|
|
642
|
+
if self.frozen:
|
|
643
|
+
raise ValueError("Environment is frozen, cannot delete datsources")
|
|
644
|
+
if address in self.datasources:
|
|
645
|
+
del self.datasources[address]
|
|
646
|
+
# self.gen_concept_list_caches()
|
|
647
|
+
return True
|
|
648
|
+
return False
|
|
649
|
+
|
|
650
|
+
def merge_concept(
|
|
651
|
+
self,
|
|
652
|
+
source: Concept,
|
|
653
|
+
target: Concept,
|
|
654
|
+
modifiers: List[Modifier],
|
|
655
|
+
force: bool = False,
|
|
656
|
+
) -> bool:
|
|
657
|
+
from trilogy.core.models.build import BuildConcept
|
|
658
|
+
|
|
659
|
+
if isinstance(source, BuildConcept):
|
|
660
|
+
raise SyntaxError(source)
|
|
661
|
+
elif isinstance(target, BuildConcept):
|
|
662
|
+
raise SyntaxError(target)
|
|
663
|
+
if self.frozen:
|
|
664
|
+
raise ValueError("Environment is frozen, cannot merge concepts")
|
|
665
|
+
replacements = {}
|
|
666
|
+
|
|
667
|
+
# exit early if we've run this
|
|
668
|
+
if source.address in self.alias_origin_lookup and not force:
|
|
669
|
+
if self.concepts[source.address] == target:
|
|
670
|
+
return False
|
|
671
|
+
|
|
672
|
+
self.alias_origin_lookup[source.address] = source
|
|
673
|
+
self.alias_origin_lookup[source.address].pseudonyms.add(target.address)
|
|
674
|
+
for k, v in self.concepts.items():
|
|
675
|
+
|
|
676
|
+
if v.address == target.address:
|
|
677
|
+
if source.address != target.address:
|
|
678
|
+
v.pseudonyms.add(source.address)
|
|
679
|
+
|
|
680
|
+
if v.address == source.address:
|
|
681
|
+
replacements[k] = target
|
|
682
|
+
# we need to update keys and grains of all concepts
|
|
683
|
+
else:
|
|
684
|
+
if source.address in v.sources or source.address in v.grain.components:
|
|
685
|
+
replacements[k] = v.with_merge(source, target, modifiers)
|
|
686
|
+
self.concepts.update(replacements)
|
|
687
|
+
for k, ds in self.datasources.items():
|
|
688
|
+
if source.address in ds.output_lcl:
|
|
689
|
+
ds.merge_concept(source, target, modifiers=modifiers)
|
|
690
|
+
|
|
691
|
+
return True
|
|
692
|
+
|
|
693
|
+
|
|
694
|
+
class LazyEnvironment(Environment):
|
|
695
|
+
"""Variant of environment to defer parsing of a path
|
|
696
|
+
until relevant attributes accessed."""
|
|
697
|
+
|
|
698
|
+
load_path: Path
|
|
699
|
+
setup_queries: list[Any] = Field(default_factory=list)
|
|
700
|
+
loaded: bool = False
|
|
701
|
+
|
|
702
|
+
@property
|
|
703
|
+
def setup_path(self) -> Path:
|
|
704
|
+
return self.load_path.parent / "setup.preql"
|
|
705
|
+
|
|
706
|
+
def __init__(self, **data):
|
|
707
|
+
if not data.get("working_path"):
|
|
708
|
+
data["working_path"] = data["load_path"].parent
|
|
709
|
+
super().__init__(**data)
|
|
710
|
+
assert self.working_path == self.load_path.parent
|
|
711
|
+
|
|
712
|
+
def _add_path_concepts(self):
|
|
713
|
+
pass
|
|
714
|
+
|
|
715
|
+
def _load(self):
|
|
716
|
+
if self.loaded:
|
|
717
|
+
return
|
|
718
|
+
from trilogy import parse
|
|
719
|
+
|
|
720
|
+
env = Environment(working_path=self.load_path.parent)
|
|
721
|
+
assert env.working_path == self.load_path.parent
|
|
722
|
+
with open(self.load_path, "r") as f:
|
|
723
|
+
env, _ = parse(f.read(), env)
|
|
724
|
+
if self.setup_path.exists():
|
|
725
|
+
with open(self.setup_path, "r") as f2:
|
|
726
|
+
env, q = parse(f2.read(), env)
|
|
727
|
+
for q in q:
|
|
728
|
+
self.setup_queries.append(q)
|
|
729
|
+
self.loaded = True
|
|
730
|
+
self.datasources = env.datasources
|
|
731
|
+
self.concepts = env.concepts
|
|
732
|
+
self.imports = env.imports
|
|
733
|
+
self.alias_origin_lookup = env.alias_origin_lookup
|
|
734
|
+
self.functions = env.functions
|
|
735
|
+
self.data_types = env.data_types
|
|
736
|
+
self.cte_name_map = env.cte_name_map
|
|
737
|
+
|
|
738
|
+
def __getattr__(self, name):
|
|
739
|
+
return self.__getattribute__(name)
|
|
740
|
+
|
|
741
|
+
def __getattribute__(self, name):
|
|
742
|
+
if name not in (
|
|
743
|
+
"datasources",
|
|
744
|
+
"concepts",
|
|
745
|
+
"imports",
|
|
746
|
+
"functions",
|
|
747
|
+
"datatypes",
|
|
748
|
+
"cte_name_map",
|
|
749
|
+
) or name.startswith("_"):
|
|
750
|
+
return super().__getattribute__(name)
|
|
751
|
+
if not self.loaded:
|
|
752
|
+
self._load()
|
|
753
|
+
return super().__getattribute__(name)
|
|
754
|
+
|
|
755
|
+
|
|
756
|
+
Environment.model_rebuild()
|