kodit 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of kodit might be problematic. Click here for more details.
- kodit/_version.py +2 -2
- kodit/app.py +10 -12
- kodit/application/factories/server_factory.py +53 -11
- kodit/application/services/commit_indexing_application_service.py +188 -31
- kodit/config.py +3 -3
- kodit/domain/enrichments/__init__.py +1 -0
- kodit/domain/enrichments/architecture/__init__.py +1 -0
- kodit/domain/enrichments/architecture/architecture.py +20 -0
- kodit/domain/enrichments/architecture/physical/__init__.py +1 -0
- kodit/domain/enrichments/architecture/physical/discovery_notes.py +14 -0
- kodit/domain/enrichments/architecture/physical/formatter.py +11 -0
- kodit/domain/enrichments/architecture/physical/physical.py +17 -0
- kodit/domain/enrichments/development/__init__.py +1 -0
- kodit/domain/enrichments/development/development.py +18 -0
- kodit/domain/enrichments/development/snippet/__init__.py +1 -0
- kodit/domain/enrichments/development/snippet/snippet.py +21 -0
- kodit/domain/enrichments/enricher.py +17 -0
- kodit/domain/enrichments/enrichment.py +39 -0
- kodit/domain/enrichments/request.py +12 -0
- kodit/domain/enrichments/response.py +11 -0
- kodit/domain/enrichments/usage/__init__.py +1 -0
- kodit/domain/enrichments/usage/api_docs.py +19 -0
- kodit/domain/enrichments/usage/usage.py +18 -0
- kodit/domain/protocols.py +7 -6
- kodit/domain/services/enrichment_service.py +9 -30
- kodit/domain/services/physical_architecture_service.py +182 -0
- kodit/domain/value_objects.py +6 -23
- kodit/infrastructure/api/v1/routers/commits.py +81 -0
- kodit/infrastructure/api/v1/schemas/enrichment.py +29 -0
- kodit/infrastructure/cloning/git/git_python_adaptor.py +71 -4
- kodit/infrastructure/enricher/__init__.py +1 -0
- kodit/infrastructure/enricher/enricher_factory.py +53 -0
- kodit/infrastructure/{enrichment/litellm_enrichment_provider.py → enricher/litellm_enricher.py} +20 -33
- kodit/infrastructure/{enrichment/local_enrichment_provider.py → enricher/local_enricher.py} +19 -24
- kodit/infrastructure/enricher/null_enricher.py +36 -0
- kodit/infrastructure/mappers/enrichment_mapper.py +83 -0
- kodit/infrastructure/mappers/snippet_mapper.py +20 -22
- kodit/infrastructure/physical_architecture/__init__.py +1 -0
- kodit/infrastructure/physical_architecture/detectors/__init__.py +1 -0
- kodit/infrastructure/physical_architecture/detectors/docker_compose_detector.py +336 -0
- kodit/infrastructure/physical_architecture/formatters/__init__.py +1 -0
- kodit/infrastructure/physical_architecture/formatters/narrative_formatter.py +149 -0
- kodit/infrastructure/slicing/api_doc_extractor.py +836 -0
- kodit/infrastructure/slicing/ast_analyzer.py +1128 -0
- kodit/infrastructure/slicing/slicer.py +56 -391
- kodit/infrastructure/sqlalchemy/enrichment_v2_repository.py +118 -0
- kodit/infrastructure/sqlalchemy/entities.py +46 -38
- kodit/infrastructure/sqlalchemy/git_branch_repository.py +22 -11
- kodit/infrastructure/sqlalchemy/git_commit_repository.py +23 -14
- kodit/infrastructure/sqlalchemy/git_repository.py +27 -17
- kodit/infrastructure/sqlalchemy/git_tag_repository.py +22 -11
- kodit/infrastructure/sqlalchemy/snippet_v2_repository.py +101 -106
- kodit/migrations/versions/19f8c7faf8b9_add_generic_enrichment_type.py +260 -0
- kodit/utils/dump_config.py +361 -0
- kodit/utils/dump_openapi.py +5 -6
- {kodit-0.5.0.dist-info → kodit-0.5.1.dist-info}/METADATA +1 -1
- {kodit-0.5.0.dist-info → kodit-0.5.1.dist-info}/RECORD +61 -30
- kodit/infrastructure/enrichment/__init__.py +0 -1
- kodit/infrastructure/enrichment/enrichment_factory.py +0 -52
- kodit/infrastructure/enrichment/null_enrichment_provider.py +0 -19
- /kodit/infrastructure/{enrichment → enricher}/utils.py +0 -0
- {kodit-0.5.0.dist-info → kodit-0.5.1.dist-info}/WHEEL +0 -0
- {kodit-0.5.0.dist-info → kodit-0.5.1.dist-info}/entry_points.txt +0 -0
- {kodit-0.5.0.dist-info → kodit-0.5.1.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,260 @@
|
|
|
1
|
+
# ruff: noqa
|
|
2
|
+
"""add generic enrichment type
|
|
3
|
+
|
|
4
|
+
Revision ID: 19f8c7faf8b9
|
|
5
|
+
Revises: f9e5ef5e688f
|
|
6
|
+
Create Date: 2025-09-29 21:38:19.093821
|
|
7
|
+
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from typing import Sequence, Union
|
|
11
|
+
|
|
12
|
+
from alembic import op
|
|
13
|
+
import sqlalchemy as sa
|
|
14
|
+
|
|
15
|
+
from kodit.domain.enrichments.development.development import ENRICHMENT_TYPE_DEVELOPMENT
|
|
16
|
+
from kodit.domain.enrichments.development.snippet.snippet import (
|
|
17
|
+
ENRICHMENT_SUBTYPE_SNIPPET_SUMMARY,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# revision identifiers, used by Alembic.
|
|
22
|
+
revision: str = "19f8c7faf8b9"
|
|
23
|
+
down_revision: Union[str, None] = "f9e5ef5e688f"
|
|
24
|
+
branch_labels: Union[str, Sequence[str], None] = None
|
|
25
|
+
depends_on: Union[str, Sequence[str], None] = None
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def upgrade() -> None:
|
|
29
|
+
"""Upgrade schema."""
|
|
30
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
31
|
+
op.create_table(
|
|
32
|
+
"enrichments_v2",
|
|
33
|
+
sa.Column("content", sa.UnicodeText(), nullable=False),
|
|
34
|
+
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
|
35
|
+
sa.Column("type", sa.String(length=255), nullable=False),
|
|
36
|
+
sa.Column("subtype", sa.String(length=255), nullable=False),
|
|
37
|
+
sa.Column(
|
|
38
|
+
"created_at",
|
|
39
|
+
sa.DateTime(timezone=True),
|
|
40
|
+
nullable=False,
|
|
41
|
+
),
|
|
42
|
+
sa.Column(
|
|
43
|
+
"updated_at",
|
|
44
|
+
sa.DateTime(timezone=True),
|
|
45
|
+
nullable=False,
|
|
46
|
+
),
|
|
47
|
+
sa.PrimaryKeyConstraint("id"),
|
|
48
|
+
)
|
|
49
|
+
op.create_index(
|
|
50
|
+
"idx_type_subtype", "enrichments_v2", ["type", "subtype"], unique=False
|
|
51
|
+
)
|
|
52
|
+
op.create_index(
|
|
53
|
+
op.f("ix_enrichments_v2_type"), "enrichments_v2", ["type"], unique=False
|
|
54
|
+
)
|
|
55
|
+
op.create_index(
|
|
56
|
+
op.f("ix_enrichments_v2_subtype"), "enrichments_v2", ["subtype"], unique=False
|
|
57
|
+
)
|
|
58
|
+
op.create_table(
|
|
59
|
+
"enrichment_associations",
|
|
60
|
+
sa.Column("enrichment_id", sa.Integer(), nullable=False),
|
|
61
|
+
sa.Column("entity_type", sa.String(length=50), nullable=False),
|
|
62
|
+
sa.Column("entity_id", sa.String(length=255), nullable=False),
|
|
63
|
+
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
|
64
|
+
sa.Column(
|
|
65
|
+
"created_at",
|
|
66
|
+
sa.DateTime(timezone=True),
|
|
67
|
+
nullable=False,
|
|
68
|
+
),
|
|
69
|
+
sa.Column(
|
|
70
|
+
"updated_at",
|
|
71
|
+
sa.DateTime(timezone=True),
|
|
72
|
+
nullable=False,
|
|
73
|
+
),
|
|
74
|
+
sa.ForeignKeyConstraint(
|
|
75
|
+
["enrichment_id"], ["enrichments_v2.id"], ondelete="CASCADE"
|
|
76
|
+
),
|
|
77
|
+
sa.PrimaryKeyConstraint("id"),
|
|
78
|
+
sa.UniqueConstraint(
|
|
79
|
+
"entity_type", "entity_id", "enrichment_id", name="uix_entity_enrichment"
|
|
80
|
+
),
|
|
81
|
+
sqlite_autoincrement=True,
|
|
82
|
+
)
|
|
83
|
+
op.create_index(
|
|
84
|
+
"idx_entity_lookup",
|
|
85
|
+
"enrichment_associations",
|
|
86
|
+
["entity_type", "entity_id"],
|
|
87
|
+
unique=False,
|
|
88
|
+
)
|
|
89
|
+
op.create_index(
|
|
90
|
+
op.f("ix_enrichment_associations_enrichment_id"),
|
|
91
|
+
"enrichment_associations",
|
|
92
|
+
["enrichment_id"],
|
|
93
|
+
unique=False,
|
|
94
|
+
)
|
|
95
|
+
op.create_index(
|
|
96
|
+
op.f("ix_enrichment_associations_entity_id"),
|
|
97
|
+
"enrichment_associations",
|
|
98
|
+
["entity_id"],
|
|
99
|
+
unique=False,
|
|
100
|
+
)
|
|
101
|
+
op.create_index(
|
|
102
|
+
op.f("ix_enrichment_associations_entity_type"),
|
|
103
|
+
"enrichment_associations",
|
|
104
|
+
["entity_type"],
|
|
105
|
+
unique=False,
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# Migrate existing enrichments from old table to new structure
|
|
109
|
+
# All old enrichments are snippet enrichments
|
|
110
|
+
connection = op.get_bind()
|
|
111
|
+
|
|
112
|
+
# Check if old enrichments table exists
|
|
113
|
+
inspector = sa.inspect(connection)
|
|
114
|
+
if "enrichments" in inspector.get_table_names():
|
|
115
|
+
# Copy enrichments to new table
|
|
116
|
+
old_enrichments = connection.execute(
|
|
117
|
+
sa.text(
|
|
118
|
+
"SELECT id, content, created_at, updated_at, snippet_sha FROM enrichments"
|
|
119
|
+
)
|
|
120
|
+
).fetchall()
|
|
121
|
+
|
|
122
|
+
for old_enrichment in old_enrichments:
|
|
123
|
+
# Check if database supports RETURNING (PostgreSQL) or use lastrowid (SQLite)
|
|
124
|
+
dialect_name = connection.dialect.name
|
|
125
|
+
|
|
126
|
+
if dialect_name == "postgresql":
|
|
127
|
+
result = connection.execute(
|
|
128
|
+
sa.text(
|
|
129
|
+
"INSERT INTO enrichments_v2 (type, subtype, content, created_at, updated_at) "
|
|
130
|
+
"VALUES (:type, :subtype, :content, :created_at, :updated_at) "
|
|
131
|
+
"RETURNING id"
|
|
132
|
+
),
|
|
133
|
+
{
|
|
134
|
+
"type": ENRICHMENT_TYPE_DEVELOPMENT,
|
|
135
|
+
"subtype": ENRICHMENT_SUBTYPE_SNIPPET_SUMMARY,
|
|
136
|
+
"content": old_enrichment[1],
|
|
137
|
+
"created_at": old_enrichment[2],
|
|
138
|
+
"updated_at": old_enrichment[3],
|
|
139
|
+
},
|
|
140
|
+
)
|
|
141
|
+
row = result.fetchone()
|
|
142
|
+
if row is None:
|
|
143
|
+
raise RuntimeError("Failed to insert enrichment")
|
|
144
|
+
new_enrichment_id = row[0]
|
|
145
|
+
else:
|
|
146
|
+
# SQLite and other databases
|
|
147
|
+
result = connection.execute(
|
|
148
|
+
sa.text(
|
|
149
|
+
"INSERT INTO enrichments_v2 (type, subtype, content, created_at, updated_at) "
|
|
150
|
+
"VALUES (:type, :subtype, :content, :created_at, :updated_at)"
|
|
151
|
+
),
|
|
152
|
+
{
|
|
153
|
+
"type": ENRICHMENT_TYPE_DEVELOPMENT,
|
|
154
|
+
"subtype": ENRICHMENT_SUBTYPE_SNIPPET_SUMMARY,
|
|
155
|
+
"content": old_enrichment[1],
|
|
156
|
+
"created_at": old_enrichment[2],
|
|
157
|
+
"updated_at": old_enrichment[3],
|
|
158
|
+
},
|
|
159
|
+
)
|
|
160
|
+
new_enrichment_id = result.lastrowid
|
|
161
|
+
|
|
162
|
+
# Insert association (snippet_v2 is the entity type for snippets)
|
|
163
|
+
connection.execute(
|
|
164
|
+
sa.text(
|
|
165
|
+
"INSERT INTO enrichment_associations "
|
|
166
|
+
"(enrichment_id, entity_type, entity_id, created_at, updated_at) "
|
|
167
|
+
"VALUES (:enrichment_id, :entity_type, :entity_id, :created_at, :updated_at)"
|
|
168
|
+
),
|
|
169
|
+
{
|
|
170
|
+
"enrichment_id": new_enrichment_id,
|
|
171
|
+
"entity_type": "snippet_v2",
|
|
172
|
+
"entity_id": old_enrichment[4], # snippet_sha
|
|
173
|
+
"created_at": old_enrichment[2],
|
|
174
|
+
"updated_at": old_enrichment[3],
|
|
175
|
+
},
|
|
176
|
+
)
|
|
177
|
+
|
|
178
|
+
# Drop old enrichments table
|
|
179
|
+
op.drop_table("enrichments")
|
|
180
|
+
|
|
181
|
+
# ### end Alembic commands ###
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
def downgrade() -> None:
|
|
185
|
+
"""Downgrade schema."""
|
|
186
|
+
# ### commands auto generated by Alembic - please adjust! ###
|
|
187
|
+
# Recreate old enrichments table
|
|
188
|
+
op.create_table(
|
|
189
|
+
"enrichments",
|
|
190
|
+
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
|
|
191
|
+
sa.Column(
|
|
192
|
+
"created_at",
|
|
193
|
+
sa.DateTime(timezone=True),
|
|
194
|
+
nullable=False,
|
|
195
|
+
),
|
|
196
|
+
sa.Column(
|
|
197
|
+
"updated_at",
|
|
198
|
+
sa.DateTime(timezone=True),
|
|
199
|
+
nullable=False,
|
|
200
|
+
),
|
|
201
|
+
sa.Column("snippet_sha", sa.String(length=64), nullable=False),
|
|
202
|
+
sa.Column("type", sa.String(length=50), nullable=False),
|
|
203
|
+
sa.Column("content", sa.UnicodeText(), nullable=False),
|
|
204
|
+
sa.PrimaryKeyConstraint("id"),
|
|
205
|
+
sa.ForeignKeyConstraint(["snippet_sha"], ["snippets_v2.sha"]),
|
|
206
|
+
)
|
|
207
|
+
op.create_index(
|
|
208
|
+
op.f("ix_enrichments_snippet_sha"), "enrichments", ["snippet_sha"], unique=False
|
|
209
|
+
)
|
|
210
|
+
op.create_index(op.f("ix_enrichments_type"), "enrichments", ["type"], unique=False)
|
|
211
|
+
|
|
212
|
+
# Migrate data back from new structure to old table
|
|
213
|
+
connection = op.get_bind()
|
|
214
|
+
|
|
215
|
+
# Get enrichments with their associations
|
|
216
|
+
enrichments_with_associations = connection.execute(
|
|
217
|
+
sa.text(
|
|
218
|
+
"SELECT e.id, e.content, e.created_at, e.updated_at, "
|
|
219
|
+
"a.entity_id, a.entity_type "
|
|
220
|
+
"FROM enrichments_v2 e "
|
|
221
|
+
"JOIN enrichment_associations a ON e.id = a.enrichment_id "
|
|
222
|
+
"WHERE a.entity_type = 'snippet_v2'"
|
|
223
|
+
)
|
|
224
|
+
).fetchall()
|
|
225
|
+
|
|
226
|
+
for enrichment in enrichments_with_associations:
|
|
227
|
+
connection.execute(
|
|
228
|
+
sa.text(
|
|
229
|
+
"INSERT INTO enrichments "
|
|
230
|
+
"(content, created_at, updated_at, snippet_sha, type) "
|
|
231
|
+
"VALUES (:content, :created_at, :updated_at, :snippet_sha, :type)"
|
|
232
|
+
),
|
|
233
|
+
{
|
|
234
|
+
"content": enrichment[1],
|
|
235
|
+
"created_at": enrichment[2],
|
|
236
|
+
"updated_at": enrichment[3],
|
|
237
|
+
"snippet_sha": enrichment[4],
|
|
238
|
+
"type": "summarization", # Default to summarization type
|
|
239
|
+
},
|
|
240
|
+
)
|
|
241
|
+
|
|
242
|
+
op.drop_index(
|
|
243
|
+
op.f("ix_enrichment_associations_entity_type"),
|
|
244
|
+
table_name="enrichment_associations",
|
|
245
|
+
)
|
|
246
|
+
op.drop_index(
|
|
247
|
+
op.f("ix_enrichment_associations_entity_id"),
|
|
248
|
+
table_name="enrichment_associations",
|
|
249
|
+
)
|
|
250
|
+
op.drop_index(
|
|
251
|
+
op.f("ix_enrichment_associations_enrichment_id"),
|
|
252
|
+
table_name="enrichment_associations",
|
|
253
|
+
)
|
|
254
|
+
op.drop_index("idx_entity_lookup", table_name="enrichment_associations")
|
|
255
|
+
op.drop_table("enrichment_associations")
|
|
256
|
+
op.drop_index(op.f("ix_enrichments_v2_subtype"), table_name="enrichments_v2")
|
|
257
|
+
op.drop_index(op.f("ix_enrichments_v2_type"), table_name="enrichments_v2")
|
|
258
|
+
op.drop_index("idx_type_subtype", table_name="enrichments_v2")
|
|
259
|
+
op.drop_table("enrichments_v2")
|
|
260
|
+
# ### end Alembic commands ###
|
|
@@ -0,0 +1,361 @@
|
|
|
1
|
+
"""Dump Pydantic Settings configuration to markdown."""
|
|
2
|
+
|
|
3
|
+
import argparse
|
|
4
|
+
import inspect
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, get_args, get_origin
|
|
7
|
+
|
|
8
|
+
import jinja2
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
from pydantic_settings import BaseSettings
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def get_model_info(model_class: type[BaseModel]) -> dict[str, Any]:
|
|
14
|
+
"""Extract information from a Pydantic model."""
|
|
15
|
+
model_info: dict[str, Any] = {
|
|
16
|
+
"description": inspect.getdoc(model_class) or "",
|
|
17
|
+
"env_vars": [],
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
# Extract environment variables if it's a BaseSettings class
|
|
21
|
+
if issubclass(model_class, BaseSettings):
|
|
22
|
+
model_info["env_vars"] = _extract_env_vars(model_class)
|
|
23
|
+
|
|
24
|
+
return model_info
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def _format_type(type_annotation: Any) -> str: # noqa: C901, PLR0911
|
|
28
|
+
"""Format type annotation for display."""
|
|
29
|
+
if hasattr(type_annotation, "__name__"):
|
|
30
|
+
return type_annotation.__name__
|
|
31
|
+
|
|
32
|
+
origin = get_origin(type_annotation)
|
|
33
|
+
args = get_args(type_annotation)
|
|
34
|
+
|
|
35
|
+
if origin is None:
|
|
36
|
+
return str(type_annotation)
|
|
37
|
+
|
|
38
|
+
if origin is list:
|
|
39
|
+
if args:
|
|
40
|
+
return f"list[{_format_type(args[0])}]"
|
|
41
|
+
return "list"
|
|
42
|
+
|
|
43
|
+
if origin is dict:
|
|
44
|
+
if len(args) >= 2:
|
|
45
|
+
return f"dict[{_format_type(args[0])}, {_format_type(args[1])}]"
|
|
46
|
+
return "dict"
|
|
47
|
+
|
|
48
|
+
if origin is type(None) or origin is type:
|
|
49
|
+
return str(type_annotation)
|
|
50
|
+
|
|
51
|
+
# Handle Union types (including Optional)
|
|
52
|
+
has_union_name = hasattr(origin, "__name__") and origin.__name__ in (
|
|
53
|
+
"UnionType",
|
|
54
|
+
"_UnionGenericAlias",
|
|
55
|
+
)
|
|
56
|
+
is_union = has_union_name or str(origin).startswith("typing.Union")
|
|
57
|
+
if is_union:
|
|
58
|
+
if len(args) == 2 and type(None) in args:
|
|
59
|
+
# Optional type
|
|
60
|
+
non_none_type = next(arg for arg in args if arg is not type(None))
|
|
61
|
+
return f"`{_format_type(non_none_type)} | None`"
|
|
62
|
+
# Union type
|
|
63
|
+
type_names = [_format_type(arg) for arg in args]
|
|
64
|
+
return f"`{' | '.join(type_names)}`"
|
|
65
|
+
|
|
66
|
+
if origin and hasattr(origin, "__name__"):
|
|
67
|
+
if args:
|
|
68
|
+
arg_names = [_format_type(arg) for arg in args]
|
|
69
|
+
return f"{origin.__name__}[{', '.join(arg_names)}]"
|
|
70
|
+
return origin.__name__
|
|
71
|
+
|
|
72
|
+
return str(type_annotation)
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _extract_env_vars(
|
|
76
|
+
settings_class: type[BaseSettings], prefix: str = ""
|
|
77
|
+
) -> list[dict[str, str]]:
|
|
78
|
+
"""Extract environment variable names from a BaseSettings class with inheritance."""
|
|
79
|
+
env_vars: list[dict[str, str]] = []
|
|
80
|
+
|
|
81
|
+
# Get the model config
|
|
82
|
+
config = getattr(settings_class, "model_config", None)
|
|
83
|
+
if config:
|
|
84
|
+
env_prefix = getattr(config, "env_prefix", "")
|
|
85
|
+
env_nested_delimiter = getattr(config, "env_nested_delimiter", "_")
|
|
86
|
+
else:
|
|
87
|
+
env_prefix = ""
|
|
88
|
+
env_nested_delimiter = "_"
|
|
89
|
+
|
|
90
|
+
# Generate env vars for each field
|
|
91
|
+
for field_name, field_info in settings_class.model_fields.items():
|
|
92
|
+
current_prefix = f"{prefix}{env_prefix}{field_name.upper()}"
|
|
93
|
+
|
|
94
|
+
# Extract description and default
|
|
95
|
+
description = field_info.description or ""
|
|
96
|
+
|
|
97
|
+
# Extract default value
|
|
98
|
+
from pydantic_core import PydanticUndefined
|
|
99
|
+
if field_info.default is not PydanticUndefined:
|
|
100
|
+
if field_info.default is None:
|
|
101
|
+
default_value = "None"
|
|
102
|
+
else:
|
|
103
|
+
default_value = _format_default_value(field_info.default, field_name)
|
|
104
|
+
elif field_info.default_factory is not None:
|
|
105
|
+
try:
|
|
106
|
+
factory_result = field_info.default_factory() # type: ignore[call-arg]
|
|
107
|
+
default_value = _format_default_value(factory_result, field_name)
|
|
108
|
+
except (TypeError, ValueError, AttributeError):
|
|
109
|
+
default_value = f"{field_info.default_factory.__name__}()"
|
|
110
|
+
else:
|
|
111
|
+
default_value = "Required"
|
|
112
|
+
|
|
113
|
+
# Extract type
|
|
114
|
+
field_type = field_info.annotation
|
|
115
|
+
type_name = _format_type(field_type) if field_type else "Any"
|
|
116
|
+
|
|
117
|
+
env_vars.append({
|
|
118
|
+
"name": current_prefix,
|
|
119
|
+
"type": type_name,
|
|
120
|
+
"default": default_value,
|
|
121
|
+
"description": description,
|
|
122
|
+
})
|
|
123
|
+
|
|
124
|
+
# Handle nested models (inheritance)
|
|
125
|
+
if field_info.annotation:
|
|
126
|
+
nested_vars = _extract_nested_env_vars(
|
|
127
|
+
field_info.annotation, current_prefix + env_nested_delimiter
|
|
128
|
+
)
|
|
129
|
+
env_vars.extend(nested_vars)
|
|
130
|
+
|
|
131
|
+
return env_vars
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def _extract_nested_env_vars( # noqa: C901, PLR0912
|
|
135
|
+
model_class: type, prefix: str
|
|
136
|
+
) -> list[dict[str, str]]:
|
|
137
|
+
"""Extract environment variables from nested Pydantic models."""
|
|
138
|
+
env_vars: list[dict[str, str]] = []
|
|
139
|
+
|
|
140
|
+
# Handle Optional types
|
|
141
|
+
origin = get_origin(model_class)
|
|
142
|
+
args = get_args(model_class)
|
|
143
|
+
|
|
144
|
+
if origin and args:
|
|
145
|
+
# Check if it's Optional[SomeModel]
|
|
146
|
+
if len(args) == 2 and type(None) in args:
|
|
147
|
+
actual_model = next(arg for arg in args if arg is not type(None))
|
|
148
|
+
if _is_pydantic_model(actual_model):
|
|
149
|
+
model_class = actual_model
|
|
150
|
+
# Check if it's Union but not Optional
|
|
151
|
+
elif (
|
|
152
|
+
(
|
|
153
|
+
hasattr(origin, "__name__")
|
|
154
|
+
and origin.__name__ in ("Union", "UnionType", "_UnionGenericAlias")
|
|
155
|
+
)
|
|
156
|
+
or str(origin) == "<class 'types.UnionType'>"
|
|
157
|
+
):
|
|
158
|
+
# For Union types, we'll use the first non-None type
|
|
159
|
+
for arg in args:
|
|
160
|
+
if arg is not type(None) and _is_pydantic_model(arg):
|
|
161
|
+
model_class = arg
|
|
162
|
+
break
|
|
163
|
+
else:
|
|
164
|
+
return env_vars
|
|
165
|
+
|
|
166
|
+
if not _is_pydantic_model(model_class):
|
|
167
|
+
return env_vars
|
|
168
|
+
|
|
169
|
+
if not hasattr(model_class, "model_fields"):
|
|
170
|
+
return env_vars
|
|
171
|
+
|
|
172
|
+
for field_name, field_info in model_class.model_fields.items(): # type: ignore[attr-defined]
|
|
173
|
+
nested_var_name = f"{prefix}{field_name.upper()}"
|
|
174
|
+
|
|
175
|
+
# Extract field information
|
|
176
|
+
description = field_info.description or ""
|
|
177
|
+
|
|
178
|
+
# Extract default value
|
|
179
|
+
from pydantic_core import PydanticUndefined
|
|
180
|
+
if field_info.default is not PydanticUndefined:
|
|
181
|
+
if field_info.default is None:
|
|
182
|
+
default_value = "None"
|
|
183
|
+
else:
|
|
184
|
+
default_value = _format_default_value(field_info.default, field_name)
|
|
185
|
+
elif field_info.default_factory is not None:
|
|
186
|
+
try:
|
|
187
|
+
factory_result = field_info.default_factory() # type: ignore[call-arg]
|
|
188
|
+
default_value = _format_default_value(factory_result, field_name)
|
|
189
|
+
except (TypeError, ValueError, AttributeError):
|
|
190
|
+
default_value = f"{field_info.default_factory.__name__}()"
|
|
191
|
+
else:
|
|
192
|
+
default_value = "Required"
|
|
193
|
+
|
|
194
|
+
# Extract type
|
|
195
|
+
field_type = field_info.annotation
|
|
196
|
+
type_name = _format_type(field_type) if field_type else "Any"
|
|
197
|
+
|
|
198
|
+
env_vars.append({
|
|
199
|
+
"name": nested_var_name,
|
|
200
|
+
"type": type_name,
|
|
201
|
+
"default": default_value,
|
|
202
|
+
"description": description,
|
|
203
|
+
})
|
|
204
|
+
|
|
205
|
+
# Handle further nesting
|
|
206
|
+
if _is_pydantic_model(field_info.annotation):
|
|
207
|
+
further_nested = _extract_nested_env_vars(
|
|
208
|
+
field_info.annotation, nested_var_name + "_"
|
|
209
|
+
)
|
|
210
|
+
env_vars.extend(further_nested)
|
|
211
|
+
|
|
212
|
+
return env_vars
|
|
213
|
+
|
|
214
|
+
|
|
215
|
+
def _is_pydantic_model(type_annotation: Any) -> bool:
|
|
216
|
+
"""Check if a type annotation represents a Pydantic model."""
|
|
217
|
+
try:
|
|
218
|
+
return (
|
|
219
|
+
hasattr(type_annotation, "model_fields")
|
|
220
|
+
and hasattr(type_annotation, "__mro__")
|
|
221
|
+
and BaseModel in type_annotation.__mro__
|
|
222
|
+
)
|
|
223
|
+
except (TypeError, AttributeError):
|
|
224
|
+
return False
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
def _format_default_value(value: Any, field_name: str) -> str:
|
|
228
|
+
"""Format default values for documentation, handling special cases."""
|
|
229
|
+
from pathlib import Path
|
|
230
|
+
|
|
231
|
+
# Handle Path objects that contain user home directory
|
|
232
|
+
if isinstance(value, Path):
|
|
233
|
+
path_str = str(value)
|
|
234
|
+
# Replace actual home directory with generic placeholder
|
|
235
|
+
home_dir = str(Path.home())
|
|
236
|
+
if path_str.startswith(home_dir):
|
|
237
|
+
return path_str.replace(home_dir, "~")
|
|
238
|
+
|
|
239
|
+
# Handle special field names that we know represent dynamic defaults
|
|
240
|
+
if field_name.lower() == "data_dir" and isinstance(value, Path):
|
|
241
|
+
return "~/.kodit"
|
|
242
|
+
|
|
243
|
+
return str(value)
|
|
244
|
+
|
|
245
|
+
|
|
246
|
+
def _lint_markdown(content: str) -> str:
|
|
247
|
+
"""Apply basic markdown linting rules to clean up formatting."""
|
|
248
|
+
import re
|
|
249
|
+
|
|
250
|
+
lines = content.split("\n")
|
|
251
|
+
cleaned_lines: list[str] = []
|
|
252
|
+
previous_line_empty = False
|
|
253
|
+
in_table = False
|
|
254
|
+
|
|
255
|
+
for original_line in lines:
|
|
256
|
+
# Remove trailing whitespace
|
|
257
|
+
cleaned_line = original_line.rstrip()
|
|
258
|
+
|
|
259
|
+
# Check if current line is empty
|
|
260
|
+
current_line_empty = len(cleaned_line) == 0
|
|
261
|
+
|
|
262
|
+
# Check if we're in a table
|
|
263
|
+
is_table_line = cleaned_line.startswith("|")
|
|
264
|
+
|
|
265
|
+
# Handle table state
|
|
266
|
+
if is_table_line and not in_table:
|
|
267
|
+
# Starting a table - ensure blank line before
|
|
268
|
+
if cleaned_lines and not previous_line_empty:
|
|
269
|
+
cleaned_lines.append("")
|
|
270
|
+
in_table = True
|
|
271
|
+
elif not is_table_line and in_table:
|
|
272
|
+
# Ending a table
|
|
273
|
+
in_table = False
|
|
274
|
+
|
|
275
|
+
# Skip multiple consecutive empty lines (keep only one)
|
|
276
|
+
# But don't skip empty lines in tables
|
|
277
|
+
if current_line_empty and previous_line_empty and not in_table:
|
|
278
|
+
continue
|
|
279
|
+
|
|
280
|
+
# Don't add empty lines within tables
|
|
281
|
+
if current_line_empty and in_table:
|
|
282
|
+
continue
|
|
283
|
+
|
|
284
|
+
cleaned_lines.append(cleaned_line)
|
|
285
|
+
previous_line_empty = current_line_empty
|
|
286
|
+
|
|
287
|
+
# Join lines back together
|
|
288
|
+
result = "\n".join(cleaned_lines)
|
|
289
|
+
|
|
290
|
+
# Ensure file ends with exactly one newline
|
|
291
|
+
result = result.rstrip("\n") + "\n"
|
|
292
|
+
|
|
293
|
+
# Fix spacing around headers (ensure one blank line before, no blank line after)
|
|
294
|
+
result = re.sub(r"\n+(?=^##)", "\n\n", result, flags=re.MULTILINE)
|
|
295
|
+
|
|
296
|
+
# Clean up any remaining multiple newlines (max 2 consecutive)
|
|
297
|
+
return re.sub(r"\n{3,}", "\n\n", result)
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
|
|
301
|
+
|
|
302
|
+
def extract_all_models() -> dict[str, Any]:
|
|
303
|
+
"""Extract all Pydantic models from config module."""
|
|
304
|
+
from kodit import config
|
|
305
|
+
|
|
306
|
+
models = {}
|
|
307
|
+
|
|
308
|
+
# Get all classes from the config module
|
|
309
|
+
for name, obj in inspect.getmembers(config):
|
|
310
|
+
if inspect.isclass(obj) and issubclass(obj, BaseModel) and obj is not BaseModel:
|
|
311
|
+
models[name] = get_model_info(obj)
|
|
312
|
+
|
|
313
|
+
return models
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def main() -> None:
|
|
317
|
+
"""Generate configuration documentation from Pydantic Settings."""
|
|
318
|
+
parser = argparse.ArgumentParser(
|
|
319
|
+
prog="dump-config.py",
|
|
320
|
+
description="Generate configuration documentation from Pydantic Settings",
|
|
321
|
+
)
|
|
322
|
+
parser.add_argument(
|
|
323
|
+
"--template",
|
|
324
|
+
help="Jinja2 template file path",
|
|
325
|
+
default="docs/reference/configuration/templates/template.j2",
|
|
326
|
+
)
|
|
327
|
+
parser.add_argument(
|
|
328
|
+
"--output",
|
|
329
|
+
help="Output markdown file path",
|
|
330
|
+
default="docs/reference/configuration/index.md",
|
|
331
|
+
)
|
|
332
|
+
|
|
333
|
+
args = parser.parse_args()
|
|
334
|
+
|
|
335
|
+
# Extract model information
|
|
336
|
+
models = extract_all_models()
|
|
337
|
+
|
|
338
|
+
# Load and render template
|
|
339
|
+
template_path = Path(args.template)
|
|
340
|
+
if not template_path.exists():
|
|
341
|
+
raise FileNotFoundError(f"Template file not found: {template_path}")
|
|
342
|
+
|
|
343
|
+
with template_path.open("r") as f:
|
|
344
|
+
template_content = f.read()
|
|
345
|
+
|
|
346
|
+
template = jinja2.Template(template_content)
|
|
347
|
+
rendered = template.render(models=models)
|
|
348
|
+
|
|
349
|
+
# Apply markdown linting
|
|
350
|
+
cleaned_content = _lint_markdown(rendered)
|
|
351
|
+
|
|
352
|
+
# Write output
|
|
353
|
+
output_path = Path(args.output)
|
|
354
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
355
|
+
|
|
356
|
+
with output_path.open("w") as f:
|
|
357
|
+
f.write(cleaned_content)
|
|
358
|
+
|
|
359
|
+
|
|
360
|
+
if __name__ == "__main__":
|
|
361
|
+
main()
|
kodit/utils/dump_openapi.py
CHANGED
|
@@ -2,7 +2,6 @@
|
|
|
2
2
|
|
|
3
3
|
import argparse
|
|
4
4
|
import json
|
|
5
|
-
import re
|
|
6
5
|
from pathlib import Path
|
|
7
6
|
from typing import Any
|
|
8
7
|
|
|
@@ -21,12 +20,12 @@ if __name__ == "__main__":
|
|
|
21
20
|
app = import_from_string(args.app)
|
|
22
21
|
openapi = app.openapi()
|
|
23
22
|
|
|
24
|
-
|
|
25
|
-
re_version = re.compile(r"^([\d.]+)(?:.+)$")
|
|
26
|
-
git_tag = re_version.match(openapi["info"]["version"])
|
|
23
|
+
git_tag = openapi["info"]["version"]
|
|
27
24
|
if not git_tag:
|
|
28
|
-
raise ValueError(f"Invalid version: {openapi['info']
|
|
29
|
-
|
|
25
|
+
raise ValueError(f"Invalid version: {openapi['info']}")
|
|
26
|
+
# Strip any rcxxx suffix
|
|
27
|
+
git_tag = git_tag.split("rc")[0]
|
|
28
|
+
openapi["info"]["version"] = git_tag
|
|
30
29
|
|
|
31
30
|
output_json_file = Path(args.out_dir) / "openapi.json"
|
|
32
31
|
|