tstring-core 0.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,71 @@
1
+ Metadata-Version: 2.3
2
+ Name: tstring-core
3
+ Version: 0.1.0
4
+ Summary: Shared runtime primitives for PEP 750 structural template renderers
5
+ Keywords: pep750,t-strings,template-strings,runtime,structured-data
6
+ Author: Koudai Aono
7
+ Author-email: Koudai Aono <koxudaxi@gmail.com>
8
+ License: MIT
9
+ Classifier: Development Status :: 4 - Beta
10
+ Classifier: Intended Audience :: Developers
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Programming Language :: Python :: 3
13
+ Classifier: Programming Language :: Python :: 3 :: Only
14
+ Classifier: Programming Language :: Python :: 3.14
15
+ Classifier: Topic :: Software Development :: Libraries :: Python Modules
16
+ Requires-Dist: tstring-bindings>=0.1.0
17
+ Maintainer: Koudai Aono
18
+ Maintainer-email: Koudai Aono <koxudaxi@gmail.com>
19
+ Requires-Python: >=3.14
20
+ Project-URL: Homepage, https://github.com/koxudaxi/tstring-structured-data
21
+ Project-URL: Repository, https://github.com/koxudaxi/tstring-structured-data
22
+ Project-URL: Documentation, https://github.com/koxudaxi/tstring-structured-data/blob/main/tstring-core/README.md
23
+ Project-URL: Changelog, https://github.com/koxudaxi/tstring-structured-data/blob/main/CHANGELOG.md
24
+ Project-URL: Issues, https://github.com/koxudaxi/tstring-structured-data/issues
25
+ Description-Content-Type: text/markdown
26
+
27
+ # tstring-core
28
+
29
+ Shared Python compatibility layer for the Rust-first JSON, TOML, and YAML
30
+ t-string backends.
31
+
32
+ Requires Python 3.14+.
33
+
34
+ This package depends on `tstring-bindings`, a native PyO3 extension. On
35
+ supported platforms, install from prebuilt wheels. Other environments require a
36
+ local Rust 1.94.0 toolchain build.
37
+
38
+ ## What It Provides
39
+
40
+ - shared error categories re-exported from the Rust bindings
41
+ - compatibility helper APIs for tokenization, spans, diagnostics, and slots
42
+ - a stable import surface for the Python wrapper packages and tests
43
+
44
+ ## What It Does Not Provide
45
+
46
+ - JSON grammar rules
47
+ - TOML grammar rules
48
+ - YAML grammar rules
49
+ - backend-specific representability policies
50
+
51
+ Those responsibilities now live in the Rust workspace under `rust/`.
52
+
53
+ ## Runtime Contract
54
+
55
+ Each backend exposed through the Python packages follows the same high-level
56
+ pipeline:
57
+
58
+ 1. validate that the input is a PEP 750 `Template`
59
+ 2. pass the template into the shared PyO3 bindings
60
+ 3. parse backend-specific structure in Rust
61
+ 4. run semantic checks in Rust
62
+ 5. render text and backend-native Python data
63
+ 6. materialize backend-native data from the same parsed/rendered structure
64
+
65
+ The shared layer is also responsible for keeping the Python-facing exception and
66
+ typing surface stable across the JSON, TOML, and YAML wrapper packages.
67
+
68
+ ## See also
69
+
70
+ - [Project README](https://github.com/koxudaxi/tstring-structured-data#readme)
71
+ - [Architecture](https://github.com/koxudaxi/tstring-structured-data/blob/main/docs/architecture.md)
@@ -0,0 +1,45 @@
1
+ # tstring-core
2
+
3
+ Shared Python compatibility layer for the Rust-first JSON, TOML, and YAML
4
+ t-string backends.
5
+
6
+ Requires Python 3.14+.
7
+
8
+ This package depends on `tstring-bindings`, a native PyO3 extension. On
9
+ supported platforms, install from prebuilt wheels. Other environments require a
10
+ local Rust 1.94.0 toolchain build.
11
+
12
+ ## What It Provides
13
+
14
+ - shared error categories re-exported from the Rust bindings
15
+ - compatibility helper APIs for tokenization, spans, diagnostics, and slots
16
+ - a stable import surface for the Python wrapper packages and tests
17
+
18
+ ## What It Does Not Provide
19
+
20
+ - JSON grammar rules
21
+ - TOML grammar rules
22
+ - YAML grammar rules
23
+ - backend-specific representability policies
24
+
25
+ Those responsibilities now live in the Rust workspace under `rust/`.
26
+
27
+ ## Runtime Contract
28
+
29
+ Each backend exposed through the Python packages follows the same high-level
30
+ pipeline:
31
+
32
+ 1. validate that the input is a PEP 750 `Template`
33
+ 2. pass the template into the shared PyO3 bindings
34
+ 3. parse backend-specific structure in Rust
35
+ 4. run semantic checks in Rust
36
+ 5. render text and backend-native Python data
37
+ 6. materialize backend-native data from the same parsed/rendered structure
38
+
39
+ The shared layer is also responsible for keeping the Python-facing exception and
40
+ typing surface stable across the JSON, TOML, and YAML wrapper packages.
41
+
42
+ ## See also
43
+
44
+ - [Project README](https://github.com/koxudaxi/tstring-structured-data#readme)
45
+ - [Architecture](https://github.com/koxudaxi/tstring-structured-data/blob/main/docs/architecture.md)
@@ -0,0 +1,53 @@
1
+ [project]
2
+ name = "tstring-core"
3
+ version = "0.1.0"
4
+ description = "Shared runtime primitives for PEP 750 structural template renderers"
5
+ readme = "README.md"
6
+ license = { text = "MIT" }
7
+ authors = [{ name = "Koudai Aono", email = "koxudaxi@gmail.com" }]
8
+ maintainers = [{ name = "Koudai Aono", email = "koxudaxi@gmail.com" }]
9
+ requires-python = ">=3.14"
10
+ dependencies = ["tstring-bindings>=0.1.0"]
11
+ keywords = ["pep750", "t-strings", "template-strings", "runtime", "structured-data"]
12
+ classifiers = [
13
+ "Development Status :: 4 - Beta",
14
+ "Intended Audience :: Developers",
15
+ "License :: OSI Approved :: MIT License",
16
+ "Programming Language :: Python :: 3",
17
+ "Programming Language :: Python :: 3 :: Only",
18
+ "Programming Language :: Python :: 3.14",
19
+ "Topic :: Software Development :: Libraries :: Python Modules",
20
+ ]
21
+
22
+ [project.urls]
23
+ Homepage = "https://github.com/koxudaxi/tstring-structured-data"
24
+ Repository = "https://github.com/koxudaxi/tstring-structured-data"
25
+ Documentation = "https://github.com/koxudaxi/tstring-structured-data/blob/main/tstring-core/README.md"
26
+ Changelog = "https://github.com/koxudaxi/tstring-structured-data/blob/main/CHANGELOG.md"
27
+ Issues = "https://github.com/koxudaxi/tstring-structured-data/issues"
28
+
29
+ [build-system]
30
+ requires = ["uv_build>=0.10.9,<0.11.0"]
31
+ build-backend = "uv_build"
32
+
33
+ [dependency-groups]
34
+ dev = [
35
+ "ruff>=0.11.0",
36
+ "ty>=0.0.23",
37
+ ]
38
+
39
+ [tool.uv.sources]
40
+ tstring-bindings = { workspace = true }
41
+
42
+ [tool.uv.build-backend]
43
+ source-exclude = [".DS_Store"]
44
+ wheel-exclude = [".DS_Store"]
45
+
46
+ [tool.ty.environment]
47
+ extra-paths = ["../rust/python-bindings/python"]
48
+
49
+ [tool.ruff]
50
+ target-version = "py314"
51
+
52
+ [tool.ruff.lint]
53
+ select = ["B", "E", "F", "I"]
@@ -0,0 +1,73 @@
1
+ from __future__ import annotations
2
+
3
+ import warnings
4
+
5
+ from ._diagnostics import Diagnostic, DiagnosticSeverity
6
+ from ._errors import (
7
+ TemplateError,
8
+ TemplateParseError,
9
+ TemplateSemanticError,
10
+ UnrepresentableValueError,
11
+ )
12
+ from ._nodes import TemplateNode
13
+ from ._slots import FragmentGroup, Slot, SlotContext
14
+ from ._spans import SourcePosition, SourceSpan
15
+ from ._tokens import (
16
+ InterpolationToken,
17
+ StaticTextToken,
18
+ StreamItem,
19
+ TemplateToken,
20
+ flatten_template,
21
+ tokenize_template,
22
+ )
23
+ from ._types import JsonValue, StructuredData, TomlValue, YamlKey, YamlValue
24
+ from ._values import RenderResult, ValueKind
25
+
26
+ _DEPRECATED_ROOT_EXPORTS = frozenset(
27
+ {"ParserFirstBackend", "ReturnMode", "render_with_backend"}
28
+ )
29
+
30
+
31
+ def __getattr__(name: str) -> object:
32
+ if name not in _DEPRECATED_ROOT_EXPORTS:
33
+ raise AttributeError(f"module {__name__!r} has no attribute {name!r}")
34
+
35
+ from . import _render
36
+
37
+ value = getattr(_render, name)
38
+ warnings.warn(
39
+ f"tstring_core.{name} is deprecated and will be removed in a future release.",
40
+ DeprecationWarning,
41
+ stacklevel=2,
42
+ )
43
+ globals()[name] = value
44
+ return value
45
+
46
+
47
+ __all__ = [
48
+ "Diagnostic",
49
+ "DiagnosticSeverity",
50
+ "FragmentGroup",
51
+ "InterpolationToken",
52
+ "JsonValue",
53
+ "RenderResult",
54
+ "Slot",
55
+ "SlotContext",
56
+ "SourcePosition",
57
+ "SourceSpan",
58
+ "StaticTextToken",
59
+ "StreamItem",
60
+ "TemplateError",
61
+ "TemplateNode",
62
+ "TemplateParseError",
63
+ "TemplateSemanticError",
64
+ "TomlValue",
65
+ "TemplateToken",
66
+ "UnrepresentableValueError",
67
+ "ValueKind",
68
+ "YamlKey",
69
+ "YamlValue",
70
+ "StructuredData",
71
+ "flatten_template",
72
+ "tokenize_template",
73
+ ]
@@ -0,0 +1,129 @@
1
+ from __future__ import annotations
2
+
3
+ import json
4
+ import tomllib
5
+ from dataclasses import dataclass
6
+ from pathlib import Path
7
+ from typing import TypedDict, cast
8
+
9
+ from ._types import JsonValue
10
+
11
+
12
+ class _ConformanceProfile(TypedDict):
13
+ manifest_path: str
14
+
15
+
16
+ class _ConformanceProfileIndex(TypedDict):
17
+ supported_profiles: list[str]
18
+ profiles: dict[str, _ConformanceProfile]
19
+
20
+
21
+ class _ConformanceProvenance(TypedDict):
22
+ source: str
23
+ snapshot: str
24
+
25
+
26
+ class _ConformanceCaseRecord(TypedDict, total=False):
27
+ case_id: str
28
+ spec_ref: str
29
+ expected: str
30
+ execution_layer: str
31
+ input_path: str
32
+ note: str
33
+ expected_json_path: str
34
+ classification: str
35
+
36
+
37
+ class _ConformanceManifest(TypedDict):
38
+ spec_title: str
39
+ claim_status: str
40
+ provenance: _ConformanceProvenance
41
+ cases: list[_ConformanceCaseRecord]
42
+
43
+
44
+ @dataclass(frozen=True)
45
+ class ConformanceCase:
46
+ format_name: str
47
+ case_id: str
48
+ spec_ref: str
49
+ expected: str
50
+ execution_layer: str
51
+ input_path: str
52
+ note: str
53
+ expected_json_path: str | None = None
54
+ classification: str | None = None
55
+
56
+ def input_text(self) -> str:
57
+ with self.base_dir.joinpath(self.input_path).open(
58
+ "r", encoding="utf-8", newline=""
59
+ ) as handle:
60
+ return handle.read()
61
+
62
+ def expected_json(self) -> JsonValue | None:
63
+ if self.expected_json_path is None:
64
+ return None
65
+ return cast(
66
+ JsonValue,
67
+ json.loads(
68
+ self.base_dir.joinpath(self.expected_json_path).read_text(
69
+ encoding="utf-8"
70
+ )
71
+ ),
72
+ )
73
+
74
+ @property
75
+ def base_dir(self) -> Path:
76
+ return _repo_root() / "conformance" / self.format_name
77
+
78
+
79
+ @dataclass(frozen=True)
80
+ class ConformanceSuite:
81
+ format_name: str
82
+ profile: str
83
+ spec_title: str
84
+ claim_status: str
85
+ source: str
86
+ snapshot: str
87
+ cases: tuple[ConformanceCase, ...]
88
+
89
+ def iter_cases(self, layer: str) -> tuple[ConformanceCase, ...]:
90
+ return tuple(
91
+ case for case in self.cases if case.execution_layer in {layer, "both"}
92
+ )
93
+
94
+
95
+ def load_conformance_suite(format_name: str, profile: str) -> ConformanceSuite:
96
+ format_root = _repo_root() / "conformance" / format_name
97
+ profile_index_path = format_root / "profiles.toml"
98
+ profile_index = cast(
99
+ _ConformanceProfileIndex,
100
+ tomllib.loads(profile_index_path.read_text(encoding="utf-8")),
101
+ )
102
+ supported_profiles = tuple(profile_index["supported_profiles"])
103
+ if profile not in supported_profiles:
104
+ supported_profile_list = ", ".join(repr(value) for value in supported_profiles)
105
+ raise ValueError(
106
+ f"Unsupported {format_name} conformance profile {profile!r}. "
107
+ f"Supported profiles: {supported_profile_list}."
108
+ )
109
+ manifest_path = format_root / profile_index["profiles"][profile]["manifest_path"]
110
+ manifest = cast(
111
+ _ConformanceManifest,
112
+ tomllib.loads(manifest_path.read_text(encoding="utf-8")),
113
+ )
114
+ return ConformanceSuite(
115
+ format_name=format_name,
116
+ profile=profile,
117
+ spec_title=manifest["spec_title"],
118
+ claim_status=manifest["claim_status"],
119
+ source=manifest["provenance"]["source"],
120
+ snapshot=manifest["provenance"]["snapshot"],
121
+ cases=tuple(
122
+ ConformanceCase(format_name=format_name, **case)
123
+ for case in manifest["cases"]
124
+ ),
125
+ )
126
+
127
+
128
+ def _repo_root() -> Path:
129
+ return Path(__file__).resolve().parents[3]
@@ -0,0 +1,20 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from enum import Enum
5
+
6
+ from ._spans import SourceSpan
7
+
8
+
9
+ class DiagnosticSeverity(str, Enum):
10
+ ERROR = "error"
11
+ WARNING = "warning"
12
+ INFO = "info"
13
+
14
+
15
+ @dataclass(frozen=True, slots=True)
16
+ class Diagnostic:
17
+ code: str
18
+ message: str
19
+ span: SourceSpan | None = None
20
+ severity: DiagnosticSeverity = DiagnosticSeverity.ERROR
@@ -0,0 +1,15 @@
1
+ from __future__ import annotations
2
+
3
+ from tstring_bindings import (
4
+ TemplateError,
5
+ TemplateParseError,
6
+ TemplateSemanticError,
7
+ UnrepresentableValueError,
8
+ )
9
+
10
+ __all__ = [
11
+ "TemplateError",
12
+ "TemplateParseError",
13
+ "TemplateSemanticError",
14
+ "UnrepresentableValueError",
15
+ ]
@@ -0,0 +1,12 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+
5
+ from ._diagnostics import Diagnostic
6
+ from ._spans import SourceSpan
7
+
8
+
9
+ @dataclass(slots=True)
10
+ class TemplateNode:
11
+ span: SourceSpan
12
+ diagnostics: list[Diagnostic] = field(default_factory=list, kw_only=True)
@@ -0,0 +1,83 @@
1
+ from __future__ import annotations
2
+
3
+ from string.templatelib import Template
4
+ from typing import Literal, Protocol, TypeIs, TypeVar, overload
5
+
6
+ from ._errors import TemplateParseError
7
+ from ._tokens import tokenize_template
8
+ from ._types import StructuredData
9
+ from ._values import RenderResult
10
+
11
+ ReturnMode = Literal["data", "text"]
12
+ NodeT = TypeVar("NodeT")
13
+ DataT = TypeVar("DataT", bound=StructuredData)
14
+
15
+
16
+ class ParserFirstBackend(Protocol[NodeT, DataT]):
17
+ def parse(self, template: Template) -> NodeT: ...
18
+
19
+ def analyze(self, node: NodeT) -> None: ...
20
+
21
+ def render(self, node: NodeT) -> RenderResult[DataT]: ...
22
+
23
+
24
+ @overload
25
+ def render_with_backend(
26
+ template: Template,
27
+ *,
28
+ api_name: str,
29
+ backend: ParserFirstBackend[NodeT, DataT],
30
+ return_mode: Literal["data"],
31
+ ) -> DataT: ...
32
+
33
+
34
+ @overload
35
+ def render_with_backend(
36
+ template: Template,
37
+ *,
38
+ api_name: str,
39
+ backend: ParserFirstBackend[NodeT, DataT],
40
+ return_mode: Literal["text"],
41
+ ) -> str: ...
42
+
43
+
44
+ def render_with_backend(
45
+ template: Template,
46
+ *,
47
+ api_name: str,
48
+ backend: ParserFirstBackend[NodeT, DataT],
49
+ return_mode: ReturnMode,
50
+ ) -> DataT | str:
51
+ _ensure_template(template, api_name)
52
+ tokenize_template(template)
53
+
54
+ node = backend.parse(template)
55
+ backend.analyze(node)
56
+ rendered = backend.render(node)
57
+
58
+ if return_mode == "text":
59
+ return rendered.text
60
+ return rendered.data
61
+
62
+
63
+ def _is_template(value: object) -> TypeIs[Template]:
64
+ return isinstance(value, Template)
65
+
66
+
67
+ def _ensure_template(template: object, api_name: str) -> None:
68
+ if _is_template(template):
69
+ return
70
+
71
+ message = (
72
+ f"{api_name} require a PEP 750 Template object. "
73
+ f"Got {type(template).__name__} instead."
74
+ )
75
+ raise TypeError(message)
76
+
77
+
78
+ __all__ = [
79
+ "ParserFirstBackend",
80
+ "ReturnMode",
81
+ "TemplateParseError",
82
+ "render_with_backend",
83
+ ]
@@ -0,0 +1,24 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from enum import Enum
5
+
6
+
7
+ class SlotContext(str, Enum):
8
+ VALUE = "value"
9
+ KEY = "key"
10
+ STRING_FRAGMENT = "string_fragment"
11
+ UNSUPPORTED = "unsupported"
12
+
13
+
14
+ @dataclass(slots=True)
15
+ class Slot:
16
+ id: int
17
+
18
+
19
+ @dataclass(slots=True)
20
+ class FragmentGroup:
21
+ start_slot: int
22
+ end_slot: int
23
+ start_offset: int
24
+ end_offset: int
@@ -0,0 +1,30 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+
5
+
6
+ @dataclass(frozen=True, slots=True)
7
+ class SourcePosition:
8
+ token_index: int
9
+ offset: int
10
+
11
+
12
+ @dataclass(frozen=True, slots=True)
13
+ class SourceSpan:
14
+ start: SourcePosition
15
+ end: SourcePosition
16
+
17
+ @classmethod
18
+ def point(cls, token_index: int, offset: int) -> "SourceSpan":
19
+ position = SourcePosition(token_index=token_index, offset=offset)
20
+ return cls(start=position, end=position)
21
+
22
+ @classmethod
23
+ def between(cls, start: SourcePosition, end: SourcePosition) -> "SourceSpan":
24
+ return cls(start=start, end=end)
25
+
26
+ def extend(self, end: SourcePosition) -> "SourceSpan":
27
+ return SourceSpan(start=self.start, end=end)
28
+
29
+ def merge(self, other: "SourceSpan") -> "SourceSpan":
30
+ return SourceSpan(start=self.start, end=other.end)
@@ -0,0 +1,136 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from string.templatelib import Interpolation, Template
5
+ from typing import Literal, cast
6
+
7
+ from ._spans import SourcePosition, SourceSpan
8
+
9
+ TemplateTokenKind = Literal["text", "interpolation"]
10
+ StreamItemKind = Literal["char", "interpolation", "eof"]
11
+
12
+
13
+ @dataclass(frozen=True, slots=True)
14
+ class StaticTextToken:
15
+ text: str
16
+ token_index: int
17
+ span: SourceSpan
18
+ kind: TemplateTokenKind = "text"
19
+
20
+
21
+ @dataclass(frozen=True, slots=True)
22
+ class InterpolationToken:
23
+ interpolation: Interpolation
24
+ interpolation_index: int
25
+ token_index: int
26
+ span: SourceSpan
27
+ kind: TemplateTokenKind = "interpolation"
28
+
29
+ @property
30
+ def expression(self) -> str:
31
+ return self.interpolation.expression or f"slot {self.interpolation_index}"
32
+
33
+
34
+ TemplateToken = StaticTextToken | InterpolationToken
35
+
36
+
37
+ @dataclass(frozen=True, slots=True)
38
+ class StreamItem:
39
+ kind: StreamItemKind
40
+ value: str | Interpolation | None
41
+ span: SourceSpan
42
+ interpolation_index: int | None = None
43
+
44
+ @property
45
+ def char(self) -> str | None:
46
+ return cast("str | None", self.value) if self.kind == "char" else None
47
+
48
+ @property
49
+ def interpolation(self) -> Interpolation | None:
50
+ return (
51
+ cast("Interpolation | None", self.value)
52
+ if self.kind == "interpolation"
53
+ else None
54
+ )
55
+
56
+
57
+ def tokenize_template(template: Template) -> list[TemplateToken]:
58
+ tokens: list[TemplateToken] = []
59
+ token_index = 0
60
+
61
+ for interpolation_index, interpolation in enumerate(template.interpolations):
62
+ text = template.strings[interpolation_index]
63
+ if text:
64
+ tokens.append(
65
+ StaticTextToken(
66
+ text=text,
67
+ token_index=token_index,
68
+ span=SourceSpan.between(
69
+ SourcePosition(token_index=token_index, offset=0),
70
+ SourcePosition(token_index=token_index, offset=len(text)),
71
+ ),
72
+ )
73
+ )
74
+ token_index += 1
75
+
76
+ tokens.append(
77
+ InterpolationToken(
78
+ interpolation=interpolation,
79
+ interpolation_index=interpolation_index,
80
+ token_index=token_index,
81
+ span=SourceSpan.point(token_index=token_index, offset=0),
82
+ )
83
+ )
84
+ token_index += 1
85
+
86
+ tail = template.strings[len(template.interpolations)]
87
+ if tail or not tokens:
88
+ tokens.append(
89
+ StaticTextToken(
90
+ text=tail,
91
+ token_index=token_index,
92
+ span=SourceSpan.between(
93
+ SourcePosition(token_index=token_index, offset=0),
94
+ SourcePosition(token_index=token_index, offset=len(tail)),
95
+ ),
96
+ )
97
+ )
98
+
99
+ return tokens
100
+
101
+
102
+ def flatten_template(template: Template) -> list[StreamItem]:
103
+ items: list[StreamItem] = []
104
+
105
+ for token in tokenize_template(template):
106
+ if isinstance(token, StaticTextToken):
107
+ for offset, char in enumerate(token.text):
108
+ items.append(
109
+ StreamItem(
110
+ kind="char",
111
+ value=char,
112
+ span=SourceSpan.between(
113
+ SourcePosition(
114
+ token_index=token.token_index, offset=offset
115
+ ),
116
+ SourcePosition(
117
+ token_index=token.token_index,
118
+ offset=offset + 1,
119
+ ),
120
+ ),
121
+ )
122
+ )
123
+ continue
124
+
125
+ items.append(
126
+ StreamItem(
127
+ kind="interpolation",
128
+ value=token.interpolation,
129
+ interpolation_index=token.interpolation_index,
130
+ span=token.span,
131
+ )
132
+ )
133
+
134
+ eof_span = items[-1].span if items else SourceSpan.point(token_index=0, offset=0)
135
+ items.append(StreamItem(kind="eof", value=None, span=eof_span))
136
+ return items
@@ -0,0 +1,15 @@
1
+ from __future__ import annotations
2
+
3
+ from datetime import date, datetime, time
4
+
5
+ type JsonScalar = None | bool | int | float | str
6
+ type JsonValue = JsonScalar | list[JsonValue] | dict[str, JsonValue]
7
+
8
+ type TomlScalar = bool | int | float | str | date | time | datetime
9
+ type TomlValue = TomlScalar | list[TomlValue] | dict[str, TomlValue]
10
+
11
+ type YamlScalar = None | bool | int | float | str
12
+ type YamlKey = YamlScalar | tuple[YamlKey, ...] | frozenset[tuple[YamlKey, YamlKey]]
13
+ type YamlValue = YamlScalar | list[YamlValue] | dict[YamlKey, YamlValue]
14
+
15
+ type StructuredData = JsonValue | TomlValue | YamlValue
@@ -0,0 +1,20 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from enum import Enum
5
+ from typing import Generic, TypeVar
6
+
7
+ from ._types import StructuredData
8
+
9
+ TData = TypeVar("TData", bound=StructuredData, covariant=True)
10
+
11
+
12
+ class ValueKind(str, Enum):
13
+ TEXT = "text"
14
+ DATA = "data"
15
+
16
+
17
+ @dataclass(frozen=True, slots=True)
18
+ class RenderResult(Generic[TData]):
19
+ text: str
20
+ data: TData
@@ -0,0 +1 @@
1
+ # Marker file for PEP 561 typing support.