krons 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kronos/__init__.py +0 -0
- kronos/core/__init__.py +145 -0
- kronos/core/broadcaster.py +116 -0
- kronos/core/element.py +225 -0
- kronos/core/event.py +316 -0
- kronos/core/eventbus.py +116 -0
- kronos/core/flow.py +356 -0
- kronos/core/graph.py +442 -0
- kronos/core/node.py +982 -0
- kronos/core/pile.py +575 -0
- kronos/core/processor.py +494 -0
- kronos/core/progression.py +296 -0
- kronos/enforcement/__init__.py +57 -0
- kronos/enforcement/common/__init__.py +34 -0
- kronos/enforcement/common/boolean.py +85 -0
- kronos/enforcement/common/choice.py +97 -0
- kronos/enforcement/common/mapping.py +118 -0
- kronos/enforcement/common/model.py +102 -0
- kronos/enforcement/common/number.py +98 -0
- kronos/enforcement/common/string.py +140 -0
- kronos/enforcement/context.py +129 -0
- kronos/enforcement/policy.py +80 -0
- kronos/enforcement/registry.py +153 -0
- kronos/enforcement/rule.py +312 -0
- kronos/enforcement/service.py +370 -0
- kronos/enforcement/validator.py +198 -0
- kronos/errors.py +146 -0
- kronos/operations/__init__.py +32 -0
- kronos/operations/builder.py +228 -0
- kronos/operations/flow.py +398 -0
- kronos/operations/node.py +101 -0
- kronos/operations/registry.py +92 -0
- kronos/protocols.py +414 -0
- kronos/py.typed +0 -0
- kronos/services/__init__.py +81 -0
- kronos/services/backend.py +286 -0
- kronos/services/endpoint.py +608 -0
- kronos/services/hook.py +471 -0
- kronos/services/imodel.py +465 -0
- kronos/services/registry.py +115 -0
- kronos/services/utilities/__init__.py +36 -0
- kronos/services/utilities/header_factory.py +87 -0
- kronos/services/utilities/rate_limited_executor.py +271 -0
- kronos/services/utilities/rate_limiter.py +180 -0
- kronos/services/utilities/resilience.py +414 -0
- kronos/session/__init__.py +41 -0
- kronos/session/exchange.py +258 -0
- kronos/session/message.py +60 -0
- kronos/session/session.py +411 -0
- kronos/specs/__init__.py +25 -0
- kronos/specs/adapters/__init__.py +0 -0
- kronos/specs/adapters/_utils.py +45 -0
- kronos/specs/adapters/dataclass_field.py +246 -0
- kronos/specs/adapters/factory.py +56 -0
- kronos/specs/adapters/pydantic_adapter.py +309 -0
- kronos/specs/adapters/sql_ddl.py +946 -0
- kronos/specs/catalog/__init__.py +36 -0
- kronos/specs/catalog/_audit.py +39 -0
- kronos/specs/catalog/_common.py +43 -0
- kronos/specs/catalog/_content.py +59 -0
- kronos/specs/catalog/_enforcement.py +70 -0
- kronos/specs/factory.py +120 -0
- kronos/specs/operable.py +314 -0
- kronos/specs/phrase.py +405 -0
- kronos/specs/protocol.py +140 -0
- kronos/specs/spec.py +506 -0
- kronos/types/__init__.py +60 -0
- kronos/types/_sentinel.py +311 -0
- kronos/types/base.py +369 -0
- kronos/types/db_types.py +260 -0
- kronos/types/identity.py +66 -0
- kronos/utils/__init__.py +40 -0
- kronos/utils/_hash.py +234 -0
- kronos/utils/_json_dump.py +392 -0
- kronos/utils/_lazy_init.py +63 -0
- kronos/utils/_to_list.py +165 -0
- kronos/utils/_to_num.py +85 -0
- kronos/utils/_utils.py +375 -0
- kronos/utils/concurrency/__init__.py +205 -0
- kronos/utils/concurrency/_async_call.py +333 -0
- kronos/utils/concurrency/_cancel.py +122 -0
- kronos/utils/concurrency/_errors.py +96 -0
- kronos/utils/concurrency/_patterns.py +363 -0
- kronos/utils/concurrency/_primitives.py +328 -0
- kronos/utils/concurrency/_priority_queue.py +135 -0
- kronos/utils/concurrency/_resource_tracker.py +110 -0
- kronos/utils/concurrency/_run_async.py +67 -0
- kronos/utils/concurrency/_task.py +95 -0
- kronos/utils/concurrency/_utils.py +79 -0
- kronos/utils/fuzzy/__init__.py +14 -0
- kronos/utils/fuzzy/_extract_json.py +90 -0
- kronos/utils/fuzzy/_fuzzy_json.py +288 -0
- kronos/utils/fuzzy/_fuzzy_match.py +149 -0
- kronos/utils/fuzzy/_string_similarity.py +187 -0
- kronos/utils/fuzzy/_to_dict.py +396 -0
- kronos/utils/sql/__init__.py +13 -0
- kronos/utils/sql/_sql_validation.py +142 -0
- krons-0.1.0.dist-info/METADATA +70 -0
- krons-0.1.0.dist-info/RECORD +101 -0
- krons-0.1.0.dist-info/WHEEL +4 -0
- krons-0.1.0.dist-info/licenses/LICENSE +201 -0
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Spec Catalog - Reusable field Specs for Node composition and DDL generation.
|
|
5
|
+
|
|
6
|
+
Pre-defined Specs for common database patterns:
|
|
7
|
+
- **ContentSpecs**: id, created_at, content, metadata, embedding
|
|
8
|
+
- **AuditSpecs**: updated_at/by, deleted_at/by, is_deleted, version, hashes
|
|
9
|
+
- **CommonSpecs**: name, slug, status, email, phone, tenant_id, settings
|
|
10
|
+
|
|
11
|
+
Usage:
|
|
12
|
+
from kronos.specs.catalog import ContentSpecs, AuditSpecs
|
|
13
|
+
|
|
14
|
+
content_specs = ContentSpecs.get_specs(dim=1536)
|
|
15
|
+
audit_specs = AuditSpecs.get_specs(use_uuid=True)
|
|
16
|
+
all_specs = content_specs + audit_specs
|
|
17
|
+
|
|
18
|
+
For custom Specs, use the factories directly:
|
|
19
|
+
from kronos.specs.factory import create_embedding_spec, create_content_spec
|
|
20
|
+
|
|
21
|
+
my_embedding = create_embedding_spec("embedding", dim=1536)
|
|
22
|
+
my_content = create_content_spec("payload", content_type=MyModel)
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
from ._audit import AuditSpecs
|
|
26
|
+
from ._common import CommonSpecs
|
|
27
|
+
from ._content import ContentSpecs
|
|
28
|
+
from ._enforcement import EnforcementLevel, EnforcementSpecs
|
|
29
|
+
|
|
30
|
+
__all__ = (
|
|
31
|
+
"AuditSpecs",
|
|
32
|
+
"CommonSpecs",
|
|
33
|
+
"ContentSpecs",
|
|
34
|
+
"EnforcementLevel",
|
|
35
|
+
"EnforcementSpecs",
|
|
36
|
+
)
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Audit field Specs - tracking, versioning, soft delete, hashing."""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from uuid import UUID
|
|
10
|
+
|
|
11
|
+
from pydantic import BaseModel, Field
|
|
12
|
+
|
|
13
|
+
from kronos.specs.operable import Operable
|
|
14
|
+
from kronos.specs.spec import Spec
|
|
15
|
+
from kronos.utils import now_utc
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AuditSpecs(BaseModel):
|
|
19
|
+
updated_at: datetime = Field(default_factory=now_utc)
|
|
20
|
+
updated_by: str | None = None
|
|
21
|
+
is_active: bool = True
|
|
22
|
+
is_deleted: bool = False
|
|
23
|
+
deleted_at: datetime | None = None
|
|
24
|
+
deleted_by: str | None = None
|
|
25
|
+
version: int = Field(default=1, ge=0)
|
|
26
|
+
content_hash: str | None = None
|
|
27
|
+
integrity_hash: str | None = None
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def get_specs(cls, use_uuid: bool) -> list[Spec]:
|
|
31
|
+
"""Get list of audit Specs based on actor ID type."""
|
|
32
|
+
operable = Operable.from_structure(cls)
|
|
33
|
+
specs = {spec.name: spec for spec in operable.get_specs()}
|
|
34
|
+
|
|
35
|
+
if use_uuid:
|
|
36
|
+
specs["updated_by"] = Spec(UUID, name="updated_by").as_nullable()
|
|
37
|
+
specs["deleted_by"] = Spec(UUID, name="deleted_by").as_nullable()
|
|
38
|
+
|
|
39
|
+
return list(specs.values())
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Common field Specs - reusable patterns across domain entities."""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from typing import Any
|
|
9
|
+
from uuid import UUID
|
|
10
|
+
|
|
11
|
+
from pydantic import BaseModel
|
|
12
|
+
|
|
13
|
+
from kronos.specs.operable import Operable
|
|
14
|
+
from kronos.specs.spec import Spec
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class CommonSpecs(BaseModel):
|
|
18
|
+
"""Common fields for domain entities."""
|
|
19
|
+
|
|
20
|
+
name: str
|
|
21
|
+
slug: str
|
|
22
|
+
status: str = "active"
|
|
23
|
+
email: str | None = None
|
|
24
|
+
phone: str | None = None
|
|
25
|
+
tenant_id: UUID
|
|
26
|
+
settings: dict[str, Any] | None = None
|
|
27
|
+
data: dict[str, Any] | None = None
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def get_specs(cls, *, status_default: str = "active") -> list[Spec]:
|
|
31
|
+
"""Get list of common Specs.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
status_default: Default value for status field.
|
|
35
|
+
"""
|
|
36
|
+
operable = Operable.from_structure(cls)
|
|
37
|
+
specs = {spec.name: spec for spec in operable.get_specs()}
|
|
38
|
+
|
|
39
|
+
# Override status default if different
|
|
40
|
+
if status_default != "active":
|
|
41
|
+
specs["status"] = Spec(str, name="status", default=status_default)
|
|
42
|
+
|
|
43
|
+
return list(specs.values())
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Content field Specs - identity, timestamps, content, metadata, embeddings."""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import Any
|
|
10
|
+
from uuid import UUID, uuid4
|
|
11
|
+
|
|
12
|
+
from pydantic import BaseModel, Field
|
|
13
|
+
|
|
14
|
+
from kronos.specs.operable import Operable
|
|
15
|
+
from kronos.specs.spec import Spec
|
|
16
|
+
from kronos.types._sentinel import Unset, UnsetType
|
|
17
|
+
from kronos.types.db_types import VectorMeta
|
|
18
|
+
from kronos.utils import now_utc
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ContentSpecs(BaseModel):
|
|
22
|
+
"""Core content fields for elements/nodes."""
|
|
23
|
+
|
|
24
|
+
id: UUID = Field(default_factory=uuid4)
|
|
25
|
+
created_at: datetime = Field(default_factory=now_utc)
|
|
26
|
+
content: dict[str, Any] | None = None
|
|
27
|
+
metadata: dict[str, Any] | None = None
|
|
28
|
+
embedding: list[float] | None = None
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def get_specs(
|
|
32
|
+
cls,
|
|
33
|
+
*,
|
|
34
|
+
content_type: type | UnsetType = Unset,
|
|
35
|
+
dim: int | UnsetType = Unset,
|
|
36
|
+
) -> list[Spec]:
|
|
37
|
+
"""Get list of content Specs.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
content_type: Type for content/metadata fields (default: dict).
|
|
41
|
+
dim: Embedding dimension. Unset = list[float], int = Vector[dim].
|
|
42
|
+
"""
|
|
43
|
+
operable = Operable.from_structure(cls)
|
|
44
|
+
specs = {spec.name: spec for spec in operable.get_specs()}
|
|
45
|
+
|
|
46
|
+
# Override content/metadata type if specified
|
|
47
|
+
if content_type is not Unset:
|
|
48
|
+
specs["content"] = Spec(content_type, name="content").as_nullable()
|
|
49
|
+
specs["metadata"] = Spec(content_type, name="metadata").as_nullable()
|
|
50
|
+
|
|
51
|
+
# Override embedding with vector dimension if specified
|
|
52
|
+
if dim is not Unset and isinstance(dim, int):
|
|
53
|
+
specs["embedding"] = Spec(
|
|
54
|
+
list[float],
|
|
55
|
+
name="embedding",
|
|
56
|
+
embedding=VectorMeta(dim),
|
|
57
|
+
).as_nullable()
|
|
58
|
+
|
|
59
|
+
return list(specs.values())
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Enforcement levels and specs for policy evaluation."""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from typing import Any
|
|
10
|
+
|
|
11
|
+
from pydantic import BaseModel, Field, field_validator
|
|
12
|
+
|
|
13
|
+
from kronos.specs.operable import Operable
|
|
14
|
+
from kronos.specs.spec import Spec
|
|
15
|
+
from kronos.types.base import Enum
|
|
16
|
+
from kronos.utils import now_utc
|
|
17
|
+
|
|
18
|
+
__all__ = (
|
|
19
|
+
"EnforcementLevel",
|
|
20
|
+
"EnforcementSpecs",
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
class EnforcementLevel(Enum):
|
|
25
|
+
"""How strictly to enforce policy violations.
|
|
26
|
+
|
|
27
|
+
HARD_MANDATORY: Blocks action, no override possible
|
|
28
|
+
SOFT_MANDATORY: Blocks action, but can be overridden with justification
|
|
29
|
+
ADVISORY: Warns but allows action to proceed
|
|
30
|
+
"""
|
|
31
|
+
|
|
32
|
+
HARD_MANDATORY = "hard_mandatory"
|
|
33
|
+
SOFT_MANDATORY = "soft_mandatory"
|
|
34
|
+
ADVISORY = "advisory"
|
|
35
|
+
|
|
36
|
+
@classmethod
|
|
37
|
+
def is_blocking(cls, result: Any) -> bool:
|
|
38
|
+
"""Check if policy result blocks the action."""
|
|
39
|
+
enforcement = getattr(result, "enforcement", "")
|
|
40
|
+
return enforcement in (
|
|
41
|
+
cls.HARD_MANDATORY.value,
|
|
42
|
+
cls.SOFT_MANDATORY.value,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
@classmethod
|
|
46
|
+
def is_advisory(cls, result: Any) -> bool:
|
|
47
|
+
"""Check if policy result is advisory (not blocking)."""
|
|
48
|
+
return getattr(result, "enforcement", "") == cls.ADVISORY.value
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class EnforcementSpecs(BaseModel):
|
|
52
|
+
"""Fields for policy enforcement results."""
|
|
53
|
+
|
|
54
|
+
enforcement: str = EnforcementLevel.HARD_MANDATORY.value
|
|
55
|
+
policy_id: str
|
|
56
|
+
violation_code: str | None = None
|
|
57
|
+
evaluated_at: datetime = Field(default_factory=now_utc)
|
|
58
|
+
evaluation_ms: float = Field(default=0.0, ge=0.0)
|
|
59
|
+
|
|
60
|
+
@field_validator("enforcement", mode="before")
|
|
61
|
+
@classmethod
|
|
62
|
+
def _extract_enum_value(cls, v):
|
|
63
|
+
"""Extract .value from enum members."""
|
|
64
|
+
return v.value if hasattr(v, "value") else v
|
|
65
|
+
|
|
66
|
+
@classmethod
|
|
67
|
+
def get_specs(cls) -> list[Spec]:
|
|
68
|
+
"""Get list of enforcement Specs."""
|
|
69
|
+
operable = Operable.from_structure(cls)
|
|
70
|
+
return list(operable.get_specs())
|
kronos/specs/factory.py
ADDED
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
"""Content field Specs - structured content, metadata, embeddings."""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from uuid import UUID, uuid4
|
|
10
|
+
|
|
11
|
+
from kronos.specs.spec import Spec, not_sentinel
|
|
12
|
+
from kronos.types import UnsetType
|
|
13
|
+
from kronos.types._sentinel import Unset
|
|
14
|
+
from kronos.types.base import is_sentinel
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def create_datetime_spec(name: str, *, use_default: bool) -> Spec:
|
|
18
|
+
from kronos.utils._utils import coerce_created_at, now_utc
|
|
19
|
+
|
|
20
|
+
return Spec(
|
|
21
|
+
datetime,
|
|
22
|
+
name=name,
|
|
23
|
+
default_factory=now_utc if use_default else Unset,
|
|
24
|
+
validator=lambda cls, v: coerce_created_at(v),
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def create_uuid_spec(name: str, *, use_default: bool) -> Spec:
|
|
29
|
+
from kronos.utils._utils import to_uuid
|
|
30
|
+
|
|
31
|
+
return Spec(
|
|
32
|
+
UUID,
|
|
33
|
+
name=name,
|
|
34
|
+
default_factory=uuid4 if use_default else Unset,
|
|
35
|
+
validator=lambda cls, v: to_uuid(v) if v is not None else None,
|
|
36
|
+
)
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def create_content_spec(
|
|
40
|
+
name: str = "content",
|
|
41
|
+
*,
|
|
42
|
+
content_type: type = Unset,
|
|
43
|
+
use_default: bool = False,
|
|
44
|
+
default_factory=Unset,
|
|
45
|
+
) -> Spec:
|
|
46
|
+
content_type = dict if is_sentinel(content_type) else content_type
|
|
47
|
+
if use_default:
|
|
48
|
+
_df = default_factory if not_sentinel(default_factory) else content_type
|
|
49
|
+
return Spec(content_type, name=name, default_factory=_df)
|
|
50
|
+
return Spec(content_type, name=name)
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def create_embedding_spec(
|
|
54
|
+
name: str = "embedding",
|
|
55
|
+
*,
|
|
56
|
+
use_default: bool = False,
|
|
57
|
+
dim: int | UnsetType = Unset,
|
|
58
|
+
) -> Spec:
|
|
59
|
+
"""Create dimensioned embedding Spec
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
dim: Vector dimension (1536=OpenAI, 768=BERT, 384=MiniLM).
|
|
63
|
+
name: DB column name.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
Spec[Vector[dim]] for DDL generation with correct pgvector type.
|
|
67
|
+
|
|
68
|
+
Example:
|
|
69
|
+
create_embedding_spec(1536) # -> Vector(1536) in DDL
|
|
70
|
+
"""
|
|
71
|
+
|
|
72
|
+
if is_sentinel(dim):
|
|
73
|
+
if use_default:
|
|
74
|
+
return Spec(list[float], name=name, default_factory=list)
|
|
75
|
+
return Spec(list[float], name=name)
|
|
76
|
+
|
|
77
|
+
from kronos.specs.adapters.sql_ddl import Vector
|
|
78
|
+
|
|
79
|
+
return Spec(Vector[dim], name=name)
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def create_change_by_spec(name: str, *, use_uuid: bool = True):
|
|
83
|
+
"""Create 'created_by'/'updated_by' Spec with UUID or str type.
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
name: Field name
|
|
87
|
+
use_uuid: True=UUID type, False=str type
|
|
88
|
+
|
|
89
|
+
Returns:
|
|
90
|
+
Spec for 'created_by'/'updated_by' field
|
|
91
|
+
"""
|
|
92
|
+
if use_uuid:
|
|
93
|
+
return create_uuid_spec(name, use_default=False)
|
|
94
|
+
return Spec(str, name=name)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def create_enumed_str_spec(name: str, *, default=None) -> Spec:
|
|
98
|
+
"""Create a Spec that stores enum values as strings.
|
|
99
|
+
|
|
100
|
+
Args:
|
|
101
|
+
name: Field name.
|
|
102
|
+
enum_cls: Enum class (for documentation).
|
|
103
|
+
default: Default enum member or string value.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
Spec[str] with validator that extracts .value from enum members.
|
|
107
|
+
"""
|
|
108
|
+
if default:
|
|
109
|
+
return Spec(
|
|
110
|
+
str,
|
|
111
|
+
name=name,
|
|
112
|
+
default=_extract_enum_value(None, default),
|
|
113
|
+
validator=_extract_enum_value,
|
|
114
|
+
)
|
|
115
|
+
return Spec(str, name=name, validator=_extract_enum_value)
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def _extract_enum_value(_cls, v, /):
|
|
119
|
+
"""Extract .value from enum members."""
|
|
120
|
+
return v.value if hasattr(v, "value") else v
|
kronos/specs/operable.py
ADDED
|
@@ -0,0 +1,314 @@
|
|
|
1
|
+
# Copyright (c) 2025 - 2026, HaiyangLi <quantocean.li at gmail dot com>
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
|
|
4
|
+
from __future__ import annotations
|
|
5
|
+
|
|
6
|
+
from dataclasses import dataclass
|
|
7
|
+
from typing import TYPE_CHECKING, Any, Self
|
|
8
|
+
|
|
9
|
+
from kronos.protocols import Allowable, Hashable, implements
|
|
10
|
+
from kronos.types._sentinel import MaybeUnset, Unset, UnsetType, is_unset, not_sentinel
|
|
11
|
+
|
|
12
|
+
from .adapters.factory import AdapterType, get_adapter
|
|
13
|
+
from .protocol import SpecAdapter
|
|
14
|
+
from .spec import Spec
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from pydantic import BaseModel
|
|
18
|
+
|
|
19
|
+
__all__ = ("Operable",)
|
|
20
|
+
|
|
21
|
+
DEFAULT_ADAPTER: AdapterType = "pydantic"
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
@implements(Hashable, Allowable)
|
|
25
|
+
@dataclass(frozen=True, slots=True)
|
|
26
|
+
class Operable:
|
|
27
|
+
"""Ordered Spec collection for framework-agnostic schema definition.
|
|
28
|
+
|
|
29
|
+
Operable collects Spec objects into a semantic namespace with unique field names,
|
|
30
|
+
then delegates structure composition to framework-specific adapters (Pydantic, SQL, etc.).
|
|
31
|
+
|
|
32
|
+
Design:
|
|
33
|
+
- Immutable: frozen dataclass, specs cannot change after creation
|
|
34
|
+
- Ordered: field order preserved for serialization consistency
|
|
35
|
+
- Adapter-agnostic: same Operable works with any supported framework
|
|
36
|
+
- Composable: extend() for schema inheritance/override patterns
|
|
37
|
+
|
|
38
|
+
Attributes:
|
|
39
|
+
__op_fields__: Ordered tuple of Spec objects
|
|
40
|
+
__adapter_name__: Adapter identifier ("pydantic", "sql", "dataclass")
|
|
41
|
+
name: Optional schema name (used as default model name)
|
|
42
|
+
|
|
43
|
+
Usage:
|
|
44
|
+
# Define specs and compose into model
|
|
45
|
+
specs = [Spec(str, name="title"), Spec(int, name="count")]
|
|
46
|
+
op = Operable(specs, adapter="pydantic")
|
|
47
|
+
Model = op.compose_structure("Record")
|
|
48
|
+
|
|
49
|
+
# Extend existing schema
|
|
50
|
+
extended = op.extend([Spec(float, name="score")])
|
|
51
|
+
|
|
52
|
+
# Extract specs from existing model
|
|
53
|
+
op = Operable.from_structure(ExistingModel, "pydantic")
|
|
54
|
+
|
|
55
|
+
Adapter Interface:
|
|
56
|
+
All framework operations go through op.adapter:
|
|
57
|
+
- op.adapter.compose_structure(op, name) -> framework model class
|
|
58
|
+
- op.adapter.validate_instance(Model, data) -> validated instance
|
|
59
|
+
- op.adapter.extract_specs(Model) -> tuple of Specs
|
|
60
|
+
|
|
61
|
+
See Also:
|
|
62
|
+
Spec: Individual field specification
|
|
63
|
+
get_adapter: Factory for adapter classes
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
__op_fields__: tuple[Spec, ...]
|
|
67
|
+
__adapter_name__: str
|
|
68
|
+
name: MaybeUnset[str | None] = Unset
|
|
69
|
+
|
|
70
|
+
def __init__(
|
|
71
|
+
self,
|
|
72
|
+
specs: tuple[Spec, ...] | list[Spec] = tuple(),
|
|
73
|
+
*,
|
|
74
|
+
name: MaybeUnset[str | None] = Unset,
|
|
75
|
+
adapter: AdapterType = DEFAULT_ADAPTER,
|
|
76
|
+
):
|
|
77
|
+
"""Initialize Operable with Spec collection and adapter.
|
|
78
|
+
|
|
79
|
+
Args:
|
|
80
|
+
specs: Tuple or list of Spec objects (order preserved)
|
|
81
|
+
name: Schema name (defaults model name in compose_structure)
|
|
82
|
+
adapter: Framework adapter ("pydantic", "sql", "dataclass")
|
|
83
|
+
|
|
84
|
+
Raises:
|
|
85
|
+
TypeError: If specs contains non-Spec objects
|
|
86
|
+
ValueError: If duplicate field names detected
|
|
87
|
+
"""
|
|
88
|
+
if isinstance(specs, list):
|
|
89
|
+
specs = tuple(specs)
|
|
90
|
+
|
|
91
|
+
for i, item in enumerate(specs):
|
|
92
|
+
if not isinstance(item, Spec):
|
|
93
|
+
raise TypeError(
|
|
94
|
+
f"All specs must be Spec objects, got {type(item).__name__} at index {i}"
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
names = [s.name for s in specs if s.name is not None]
|
|
98
|
+
if len(names) != len(set(names)):
|
|
99
|
+
from collections import Counter
|
|
100
|
+
|
|
101
|
+
duplicates = [name for name, count in Counter(names).items() if count > 1]
|
|
102
|
+
raise ValueError(
|
|
103
|
+
f"Duplicate field names found: {duplicates}. Each spec must have a unique name."
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
object.__setattr__(self, "__op_fields__", specs)
|
|
107
|
+
object.__setattr__(self, "__adapter_name__", adapter)
|
|
108
|
+
object.__setattr__(self, "name", name)
|
|
109
|
+
|
|
110
|
+
@property
|
|
111
|
+
def adapter(self) -> type[SpecAdapter]:
|
|
112
|
+
"""Get adapter class for this Operable."""
|
|
113
|
+
return get_adapter(self.__adapter_name__)
|
|
114
|
+
|
|
115
|
+
def allowed(self) -> frozenset[str]:
|
|
116
|
+
"""Return set of valid field names from all specs."""
|
|
117
|
+
return frozenset({i.name for i in self.__op_fields__})
|
|
118
|
+
|
|
119
|
+
def check_allowed(self, *args, as_boolean: bool = False):
|
|
120
|
+
"""Validate field names exist in this Operable.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
*args: Field names to check
|
|
124
|
+
as_boolean: If True, return bool instead of raising
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
True if all names valid, False if as_boolean=True and invalid
|
|
128
|
+
|
|
129
|
+
Raises:
|
|
130
|
+
ValueError: If any name invalid and as_boolean=False
|
|
131
|
+
"""
|
|
132
|
+
if not set(args).issubset(self.allowed()):
|
|
133
|
+
if as_boolean:
|
|
134
|
+
return False
|
|
135
|
+
raise ValueError(
|
|
136
|
+
f"Some specified fields are not allowed: {set(args).difference(self.allowed())}"
|
|
137
|
+
)
|
|
138
|
+
return True
|
|
139
|
+
|
|
140
|
+
def get(self, key: str, /, default=Unset) -> MaybeUnset[Spec]:
|
|
141
|
+
"""Get Spec by field name, returning default if not found."""
|
|
142
|
+
if not self.check_allowed(key, as_boolean=True):
|
|
143
|
+
return default
|
|
144
|
+
for i in self.__op_fields__:
|
|
145
|
+
if i.name == key:
|
|
146
|
+
return i
|
|
147
|
+
return default
|
|
148
|
+
|
|
149
|
+
def extend(
|
|
150
|
+
self,
|
|
151
|
+
specs: list[Spec] | tuple[Spec, ...],
|
|
152
|
+
*,
|
|
153
|
+
name: MaybeUnset[str | None] = Unset,
|
|
154
|
+
adapter: AdapterType | None = None,
|
|
155
|
+
) -> Operable:
|
|
156
|
+
"""Create new Operable with additional specs (overrides existing by name).
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
specs: Additional Spec objects to append/override
|
|
160
|
+
name: Override name (defaults to self.name)
|
|
161
|
+
adapter: Override adapter (defaults to self.__adapter_name__)
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
New Operable with combined specs. If a spec in `specs` has the
|
|
165
|
+
same name as an existing spec, the new spec replaces the old one.
|
|
166
|
+
|
|
167
|
+
Example:
|
|
168
|
+
extended = AUDIT_SPECS.extend([
|
|
169
|
+
spec_embedding(1536),
|
|
170
|
+
spec_content(JobContent), # Overrides SPEC_CONTENT_JSONB
|
|
171
|
+
])
|
|
172
|
+
Model = extended.compose_structure("Job", include={...}, base_type=Node)
|
|
173
|
+
"""
|
|
174
|
+
new_names = {s.name for s in specs if s.name}
|
|
175
|
+
combined = [s for s in self.__op_fields__ if s.name not in new_names]
|
|
176
|
+
combined.extend(specs)
|
|
177
|
+
|
|
178
|
+
return Operable(
|
|
179
|
+
combined,
|
|
180
|
+
name=name or self.name,
|
|
181
|
+
adapter=adapter or self.__adapter_name__,
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
def get_specs(
|
|
185
|
+
self,
|
|
186
|
+
*,
|
|
187
|
+
include: set[str] | UnsetType = Unset,
|
|
188
|
+
exclude: set[str] | UnsetType = Unset,
|
|
189
|
+
) -> tuple[Spec, ...]:
|
|
190
|
+
"""Get filtered specs by include/exclude field names.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
include: Only return specs with these names (mutually exclusive with exclude)
|
|
194
|
+
exclude: Exclude specs with these names (mutually exclusive with include)
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
Filtered tuple of Spec objects
|
|
198
|
+
|
|
199
|
+
Raises:
|
|
200
|
+
ValueError: If both include and exclude specified, or invalid names
|
|
201
|
+
"""
|
|
202
|
+
if not_sentinel(include) and not_sentinel(exclude):
|
|
203
|
+
raise ValueError("Cannot specify both include and exclude")
|
|
204
|
+
|
|
205
|
+
if not_sentinel(include):
|
|
206
|
+
if self.check_allowed(*include, as_boolean=True) is False:
|
|
207
|
+
raise ValueError(
|
|
208
|
+
"Some specified fields are not allowed: "
|
|
209
|
+
f"{set(include).difference(self.allowed())}"
|
|
210
|
+
)
|
|
211
|
+
return tuple(self.get(i) for i in include if not is_unset(self.get(i))) # type: ignore[misc]
|
|
212
|
+
|
|
213
|
+
if not_sentinel(exclude):
|
|
214
|
+
_discards = {self.get(i) for i in exclude if not is_unset(self.get(i))}
|
|
215
|
+
return tuple(s for s in self.__op_fields__ if s not in _discards)
|
|
216
|
+
|
|
217
|
+
return self.__op_fields__
|
|
218
|
+
|
|
219
|
+
def compose_structure(
|
|
220
|
+
self,
|
|
221
|
+
name: str | UnsetType = Unset,
|
|
222
|
+
*,
|
|
223
|
+
include: set[str] | UnsetType = Unset,
|
|
224
|
+
exclude: set[str] | UnsetType = Unset,
|
|
225
|
+
**kw,
|
|
226
|
+
):
|
|
227
|
+
"""Compose a typed structure from specs via adapter.
|
|
228
|
+
|
|
229
|
+
Args:
|
|
230
|
+
name: Structure name (default: self.name or "DynamicStructure")
|
|
231
|
+
include: Only include these field names
|
|
232
|
+
exclude: Exclude these field names
|
|
233
|
+
**kw: Additional adapter-specific kwargs
|
|
234
|
+
|
|
235
|
+
Returns:
|
|
236
|
+
Framework structure (e.g., Pydantic BaseModel, SQL DDL)
|
|
237
|
+
"""
|
|
238
|
+
# Determine structure name: explicit > operable.name > fallback
|
|
239
|
+
if is_unset(name):
|
|
240
|
+
structure_name = self.name if self.name else "DynamicStructure"
|
|
241
|
+
else:
|
|
242
|
+
structure_name = name
|
|
243
|
+
return self.adapter.compose_structure(
|
|
244
|
+
self,
|
|
245
|
+
structure_name,
|
|
246
|
+
include=include,
|
|
247
|
+
exclude=exclude,
|
|
248
|
+
**kw,
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
@classmethod
|
|
252
|
+
def from_structure(
|
|
253
|
+
cls,
|
|
254
|
+
structure: type[BaseModel],
|
|
255
|
+
*,
|
|
256
|
+
adapter: AdapterType = DEFAULT_ADAPTER,
|
|
257
|
+
name: MaybeUnset[str | None] = Unset,
|
|
258
|
+
) -> Self:
|
|
259
|
+
"""Create Operable by extracting specs from a structure.
|
|
260
|
+
|
|
261
|
+
Disassembles a structure and returns an Operable with Specs
|
|
262
|
+
representing top-level fields.
|
|
263
|
+
|
|
264
|
+
Args:
|
|
265
|
+
structure: Structure class to extract specs from (e.g., Pydantic BaseModel)
|
|
266
|
+
name: Optional operable name (defaults to structure class name)
|
|
267
|
+
adapter: Adapter type for the operable
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
Operable with Specs for each top-level field
|
|
271
|
+
|
|
272
|
+
Example:
|
|
273
|
+
>>> class MyModel(BaseModel):
|
|
274
|
+
... name: str
|
|
275
|
+
... age: int = 0
|
|
276
|
+
... tags: list[str] | None = None
|
|
277
|
+
>>> op = Operable.from_structure(MyModel, "pydantic")
|
|
278
|
+
>>> op.allowed() # {'name', 'age', 'tags'}
|
|
279
|
+
"""
|
|
280
|
+
specs = get_adapter(adapter).extract_specs(structure)
|
|
281
|
+
return cls(
|
|
282
|
+
specs=specs,
|
|
283
|
+
name=name or structure.__name__,
|
|
284
|
+
adapter=adapter,
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
def validate_instance(self, structure: Any, data: dict, /) -> Any:
|
|
288
|
+
"""Validate data instance against this Operable's structure.
|
|
289
|
+
|
|
290
|
+
Args:
|
|
291
|
+
instance: Data instance to validate (e.g., dict, dataclass)
|
|
292
|
+
|
|
293
|
+
Returns:
|
|
294
|
+
Validated instance (may be transformed by adapter)
|
|
295
|
+
|
|
296
|
+
Raises:
|
|
297
|
+
ValidationError: If validation fails
|
|
298
|
+
"""
|
|
299
|
+
specs = self.adapter.extract_specs(structure)
|
|
300
|
+
if not {s.name for s in specs}.issubset(self.allowed()):
|
|
301
|
+
raise ValueError("Structure contains fields not defined in this Operable")
|
|
302
|
+
|
|
303
|
+
return self.adapter.validate_instance(structure, data)
|
|
304
|
+
|
|
305
|
+
def dump_instance(self, instance: Any) -> dict:
|
|
306
|
+
"""Dump data instance to dict via this Operable's structure.
|
|
307
|
+
|
|
308
|
+
Args:
|
|
309
|
+
instance: Data instance to dump (e.g., Pydantic model, dataclass)
|
|
310
|
+
|
|
311
|
+
Returns:
|
|
312
|
+
Dict representation of the instance
|
|
313
|
+
"""
|
|
314
|
+
return self.adapter.dump_instance(instance, self)
|