kc-cli 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- kc/__init__.py +5 -0
- kc/__main__.py +11 -0
- kc/artifacts/__init__.py +1 -0
- kc/artifacts/diff.py +76 -0
- kc/artifacts/frontmatter.py +26 -0
- kc/artifacts/markdown.py +116 -0
- kc/atomic_write.py +33 -0
- kc/cli.py +284 -0
- kc/commands/__init__.py +1 -0
- kc/commands/artifact.py +1190 -0
- kc/commands/citation.py +231 -0
- kc/commands/common.py +346 -0
- kc/commands/conformance.py +293 -0
- kc/commands/context.py +190 -0
- kc/commands/doctor.py +81 -0
- kc/commands/eval.py +133 -0
- kc/commands/export.py +97 -0
- kc/commands/guide.py +571 -0
- kc/commands/index.py +54 -0
- kc/commands/init.py +207 -0
- kc/commands/lint.py +238 -0
- kc/commands/source.py +464 -0
- kc/commands/status.py +52 -0
- kc/commands/task.py +260 -0
- kc/config.py +127 -0
- kc/embedding_models/potion-base-8M/README.md +97 -0
- kc/embedding_models/potion-base-8M/config.json +13 -0
- kc/embedding_models/potion-base-8M/model.safetensors +0 -0
- kc/embedding_models/potion-base-8M/modules.json +14 -0
- kc/embedding_models/potion-base-8M/tokenizer.json +1 -0
- kc/errors.py +141 -0
- kc/fingerprints.py +35 -0
- kc/ids.py +23 -0
- kc/locks.py +65 -0
- kc/models/__init__.py +17 -0
- kc/models/artifact.py +34 -0
- kc/models/citation.py +60 -0
- kc/models/context.py +23 -0
- kc/models/eval.py +21 -0
- kc/models/plan.py +37 -0
- kc/models/source.py +37 -0
- kc/models/source_range.py +29 -0
- kc/models/source_revision.py +19 -0
- kc/models/task.py +35 -0
- kc/output.py +838 -0
- kc/paths.py +126 -0
- kc/provenance/__init__.py +1 -0
- kc/provenance/citations.py +296 -0
- kc/search/__init__.py +1 -0
- kc/search/extract.py +268 -0
- kc/search/fts.py +284 -0
- kc/search/semantic.py +346 -0
- kc/store/__init__.py +1 -0
- kc/store/jsonl.py +55 -0
- kc/store/sqlite.py +444 -0
- kc/store/transaction.py +67 -0
- kc/templates/agents/skills/kc/SKILL.md +282 -0
- kc/templates/agents/skills/kc/agents/openai.yaml +5 -0
- kc/templates/agents/skills/kc/scripts/resolve_query_citations.py +134 -0
- kc/workspace.py +98 -0
- kc_cli-0.4.0.dist-info/METADATA +522 -0
- kc_cli-0.4.0.dist-info/RECORD +65 -0
- kc_cli-0.4.0.dist-info/WHEEL +4 -0
- kc_cli-0.4.0.dist-info/entry_points.txt +2 -0
- kc_cli-0.4.0.dist-info/licenses/LICENSE +21 -0
kc/commands/init.py
ADDED
|
@@ -0,0 +1,207 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from contextlib import nullcontext
|
|
4
|
+
from importlib.resources import files
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Annotated, Any
|
|
7
|
+
|
|
8
|
+
import typer
|
|
9
|
+
|
|
10
|
+
from kc.atomic_write import atomic_write_text
|
|
11
|
+
from kc.commands.common import run, validate_choice
|
|
12
|
+
from kc.config import DEFAULT_CONFIG
|
|
13
|
+
from kc.output import emit_success, warning
|
|
14
|
+
from kc.paths import current_paths, repo_relative
|
|
15
|
+
from kc.store.sqlite import init_db
|
|
16
|
+
from kc.store.transaction import mutation_transaction
|
|
17
|
+
|
|
18
|
+
ALLOWED_PROFILES = {"generic"}
|
|
19
|
+
MANAGED_AGENT_SKILL_MARKER = "kc-managed-agent-skill:v1"
|
|
20
|
+
AGENT_SKILL_DIRS = [
|
|
21
|
+
Path(".agents"),
|
|
22
|
+
Path(".agents") / "skills",
|
|
23
|
+
Path(".agents") / "skills" / "kc",
|
|
24
|
+
Path(".agents") / "skills" / "kc" / "agents",
|
|
25
|
+
Path(".agents") / "skills" / "kc" / "scripts",
|
|
26
|
+
]
|
|
27
|
+
AGENT_SKILL_TEMPLATE_FILES = [
|
|
28
|
+
(("SKILL.md",), Path(".agents") / "skills" / "kc" / "SKILL.md"),
|
|
29
|
+
(("agents", "openai.yaml"), Path(".agents") / "skills" / "kc" / "agents" / "openai.yaml"),
|
|
30
|
+
(
|
|
31
|
+
("scripts", "resolve_query_citations.py"),
|
|
32
|
+
Path(".agents") / "skills" / "kc" / "scripts" / "resolve_query_citations.py",
|
|
33
|
+
),
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def _agent_skill_templates() -> dict[Path, str]:
|
|
38
|
+
template_root = files("kc").joinpath("templates", "agents", "skills", "kc")
|
|
39
|
+
return {
|
|
40
|
+
target: template_root.joinpath(*template_path).read_text(encoding="utf-8")
|
|
41
|
+
for template_path, target in AGENT_SKILL_TEMPLATE_FILES
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def _handle_managed_file(
|
|
46
|
+
path: Path,
|
|
47
|
+
content: str,
|
|
48
|
+
*,
|
|
49
|
+
effective_dry_run: bool,
|
|
50
|
+
created: list[str],
|
|
51
|
+
updated: list[str],
|
|
52
|
+
noop: list[str],
|
|
53
|
+
planned: list[str],
|
|
54
|
+
warnings: list[dict[str, Any]],
|
|
55
|
+
) -> None:
|
|
56
|
+
rel = repo_relative(path)
|
|
57
|
+
if not path.exists():
|
|
58
|
+
if effective_dry_run:
|
|
59
|
+
planned.append(rel)
|
|
60
|
+
else:
|
|
61
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
62
|
+
atomic_write_text(path, content)
|
|
63
|
+
created.append(rel)
|
|
64
|
+
return
|
|
65
|
+
|
|
66
|
+
if not path.is_file():
|
|
67
|
+
noop.append(rel)
|
|
68
|
+
warnings.append(
|
|
69
|
+
warning(
|
|
70
|
+
"KC_INIT_AGENT_SKILL_CUSTOM",
|
|
71
|
+
"Existing agent skill path is not a managed file; preserved without overwrite.",
|
|
72
|
+
{"path": rel},
|
|
73
|
+
)
|
|
74
|
+
)
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
current = path.read_text(encoding="utf-8")
|
|
78
|
+
if current == content:
|
|
79
|
+
noop.append(rel)
|
|
80
|
+
return
|
|
81
|
+
if MANAGED_AGENT_SKILL_MARKER in current:
|
|
82
|
+
if effective_dry_run:
|
|
83
|
+
planned.append(rel)
|
|
84
|
+
else:
|
|
85
|
+
atomic_write_text(path, content)
|
|
86
|
+
updated.append(rel)
|
|
87
|
+
return
|
|
88
|
+
|
|
89
|
+
noop.append(rel)
|
|
90
|
+
warnings.append(
|
|
91
|
+
warning(
|
|
92
|
+
"KC_INIT_AGENT_SKILL_CUSTOM",
|
|
93
|
+
"Existing agent skill file is not kc-managed; preserved without overwrite.",
|
|
94
|
+
{"path": rel},
|
|
95
|
+
)
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def register(app: typer.Typer) -> None:
|
|
100
|
+
@app.command("init", help="Create the repo-local kc layout, config, JSONL stores, and SQLite state.")
|
|
101
|
+
def init_command(
|
|
102
|
+
profile: Annotated[
|
|
103
|
+
str, typer.Option("--profile", help="Initialization profile: generic.")
|
|
104
|
+
] = "generic",
|
|
105
|
+
dry_run: Annotated[
|
|
106
|
+
bool, typer.Option("--dry-run", help="Preview without writing.")
|
|
107
|
+
] = False,
|
|
108
|
+
yes: Annotated[bool, typer.Option("--yes", help="Create files.")] = False,
|
|
109
|
+
) -> None:
|
|
110
|
+
def _run() -> None:
|
|
111
|
+
validate_choice(profile, option="--profile", supported=ALLOWED_PROFILES)
|
|
112
|
+
paths = current_paths()
|
|
113
|
+
effective_dry_run = dry_run or not yes
|
|
114
|
+
dirs = [
|
|
115
|
+
paths.data_dir,
|
|
116
|
+
paths.data_dir / "raw",
|
|
117
|
+
paths.wiki_dir,
|
|
118
|
+
paths.data_dir / "artifacts",
|
|
119
|
+
paths.data_dir / "schemas",
|
|
120
|
+
paths.data_dir / "evals",
|
|
121
|
+
paths.data_dir / "exports",
|
|
122
|
+
*[paths.root / path for path in AGENT_SKILL_DIRS],
|
|
123
|
+
paths.state_dir,
|
|
124
|
+
paths.locks_dir,
|
|
125
|
+
paths.snapshots_dir,
|
|
126
|
+
paths.plans_dir,
|
|
127
|
+
paths.tasks_dir,
|
|
128
|
+
paths.context_dir,
|
|
129
|
+
paths.operations_dir,
|
|
130
|
+
paths.state_dir / "cache",
|
|
131
|
+
paths.state_dir / "logs",
|
|
132
|
+
]
|
|
133
|
+
files: dict[Path, str] = {
|
|
134
|
+
paths.config_path: DEFAULT_CONFIG,
|
|
135
|
+
paths.sources_jsonl: "",
|
|
136
|
+
paths.source_revisions_jsonl: "",
|
|
137
|
+
paths.ranges_jsonl: "",
|
|
138
|
+
paths.artifacts_jsonl: "",
|
|
139
|
+
paths.citation_edges_jsonl: "",
|
|
140
|
+
paths.wiki_dir / "index.md": "# Knowledge Index\n\n",
|
|
141
|
+
paths.log_path: "# Knowledge Log\n\n",
|
|
142
|
+
}
|
|
143
|
+
created: list[str] = []
|
|
144
|
+
noop: list[str] = []
|
|
145
|
+
planned: list[str] = []
|
|
146
|
+
updated: list[str] = []
|
|
147
|
+
warnings: list[dict[str, Any]] = []
|
|
148
|
+
transaction = (
|
|
149
|
+
nullcontext()
|
|
150
|
+
if effective_dry_run
|
|
151
|
+
else mutation_transaction(paths, "init", [paths.root])
|
|
152
|
+
)
|
|
153
|
+
with transaction as tx:
|
|
154
|
+
for d in dirs:
|
|
155
|
+
rel = repo_relative(d)
|
|
156
|
+
if d.exists():
|
|
157
|
+
noop.append(rel)
|
|
158
|
+
elif effective_dry_run:
|
|
159
|
+
planned.append(rel)
|
|
160
|
+
else:
|
|
161
|
+
d.mkdir(parents=True, exist_ok=True)
|
|
162
|
+
created.append(rel)
|
|
163
|
+
for path, content in files.items():
|
|
164
|
+
rel = repo_relative(path)
|
|
165
|
+
if path.exists():
|
|
166
|
+
noop.append(rel)
|
|
167
|
+
elif effective_dry_run:
|
|
168
|
+
planned.append(rel)
|
|
169
|
+
else:
|
|
170
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
171
|
+
atomic_write_text(path, content)
|
|
172
|
+
created.append(rel)
|
|
173
|
+
for rel_path, content in _agent_skill_templates().items():
|
|
174
|
+
_handle_managed_file(
|
|
175
|
+
paths.root / rel_path,
|
|
176
|
+
content,
|
|
177
|
+
effective_dry_run=effective_dry_run,
|
|
178
|
+
created=created,
|
|
179
|
+
updated=updated,
|
|
180
|
+
noop=noop,
|
|
181
|
+
planned=planned,
|
|
182
|
+
warnings=warnings,
|
|
183
|
+
)
|
|
184
|
+
sqlite_rel = repo_relative(paths.sqlite_path)
|
|
185
|
+
if paths.sqlite_path.exists():
|
|
186
|
+
noop.append(sqlite_rel)
|
|
187
|
+
elif effective_dry_run:
|
|
188
|
+
planned.append(sqlite_rel)
|
|
189
|
+
else:
|
|
190
|
+
init_db(paths.sqlite_path)
|
|
191
|
+
created.append(sqlite_rel)
|
|
192
|
+
if tx is not None:
|
|
193
|
+
tx.commit({"created": created, "updated": updated})
|
|
194
|
+
emit_success(
|
|
195
|
+
"init",
|
|
196
|
+
{
|
|
197
|
+
"dry_run": effective_dry_run,
|
|
198
|
+
"profile": profile,
|
|
199
|
+
"created": created,
|
|
200
|
+
"updated": updated,
|
|
201
|
+
"planned": planned,
|
|
202
|
+
"noop": sorted(set(noop)),
|
|
203
|
+
},
|
|
204
|
+
warnings=warnings,
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
run("init", _run)
|
kc/commands/lint.py
ADDED
|
@@ -0,0 +1,238 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from typing import Annotated
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
|
|
8
|
+
from kc.commands.artifact import validate_artifact_file
|
|
9
|
+
from kc.commands.common import (
|
|
10
|
+
load_artifacts,
|
|
11
|
+
load_citation_edges,
|
|
12
|
+
load_ranges,
|
|
13
|
+
load_sources,
|
|
14
|
+
parse_checks,
|
|
15
|
+
run,
|
|
16
|
+
)
|
|
17
|
+
from kc.errors import EXIT_VALIDATION, KcError
|
|
18
|
+
from kc.fingerprints import raw_fingerprint
|
|
19
|
+
from kc.output import emit, emit_success, envelope
|
|
20
|
+
from kc.paths import current_paths, resolve_repo_path
|
|
21
|
+
from kc.store.sqlite import index_status
|
|
22
|
+
|
|
23
|
+
LOG_REF_RE = re.compile(r"\b(?P<kind>plan|task)_[A-Z0-9]+\b")
|
|
24
|
+
DEFAULT_CHECKS = {"citations", "stale", "orphans"}
|
|
25
|
+
ALLOWED_CHECKS = {"citations", "stale", "orphans", "duplicates", "index", "log"}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def register(app: typer.Typer) -> None:
|
|
29
|
+
@app.command("lint", help="Run repository integrity checks for citations, stale sources, and orphaned artifacts.")
|
|
30
|
+
def lint(
|
|
31
|
+
checks: Annotated[
|
|
32
|
+
str,
|
|
33
|
+
typer.Option("--checks", help="Comma-separated checks: citations,stale,orphans."),
|
|
34
|
+
] = "citations,stale,orphans",
|
|
35
|
+
) -> None:
|
|
36
|
+
def _run() -> None:
|
|
37
|
+
enabled = parse_checks(checks, allowed=ALLOWED_CHECKS, all_checks=ALLOWED_CHECKS)
|
|
38
|
+
issues: list[dict] = []
|
|
39
|
+
paths = current_paths()
|
|
40
|
+
sources = load_sources()
|
|
41
|
+
ranges = load_ranges()
|
|
42
|
+
artifacts = load_artifacts()
|
|
43
|
+
citation_edges = load_citation_edges()
|
|
44
|
+
source_ids = [source.source_id for source in sources]
|
|
45
|
+
range_ids = [source_range.range_id for source_range in ranges]
|
|
46
|
+
artifact_ids = [artifact.artifact_id for artifact in artifacts]
|
|
47
|
+
artifact_paths = {artifact.path for artifact in artifacts}
|
|
48
|
+
|
|
49
|
+
if "duplicates" in enabled:
|
|
50
|
+
issues.extend(_duplicate_issues("source_id", source_ids, "KC_CONFIG_INVALID"))
|
|
51
|
+
issues.extend(_duplicate_issues("range_id", range_ids, "KC_CONFIG_INVALID"))
|
|
52
|
+
issues.extend(
|
|
53
|
+
_duplicate_issues("artifact_id", artifact_ids, "KC_ARTIFACT_SCHEMA_INVALID")
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
if "stale" in enabled:
|
|
57
|
+
for source in sources:
|
|
58
|
+
original = source.metadata.get("original_path")
|
|
59
|
+
if not isinstance(original, str):
|
|
60
|
+
continue
|
|
61
|
+
path = resolve_repo_path(original, paths.root)
|
|
62
|
+
if not path.exists():
|
|
63
|
+
issues.append(
|
|
64
|
+
{
|
|
65
|
+
"code": "KC_SOURCE_STALE",
|
|
66
|
+
"message": f"Source file is missing: {source.uri}",
|
|
67
|
+
"source_id": source.source_id,
|
|
68
|
+
}
|
|
69
|
+
)
|
|
70
|
+
elif raw_fingerprint(path) != source.fingerprint:
|
|
71
|
+
issues.append(
|
|
72
|
+
{
|
|
73
|
+
"code": "KC_SOURCE_STALE",
|
|
74
|
+
"message": f"Source fingerprint changed: {source.uri}",
|
|
75
|
+
"source_id": source.source_id,
|
|
76
|
+
}
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
if "orphans" in enabled:
|
|
80
|
+
source_id_set = set(source_ids)
|
|
81
|
+
range_id_set = set(range_ids)
|
|
82
|
+
for source_range in ranges:
|
|
83
|
+
if source_range.source_id not in source_id_set:
|
|
84
|
+
issues.append(
|
|
85
|
+
{
|
|
86
|
+
"code": "KC_SOURCE_NOT_FOUND",
|
|
87
|
+
"message": f"Source range has no registered source: {source_range.range_id}",
|
|
88
|
+
"range_id": source_range.range_id,
|
|
89
|
+
"source_id": source_range.source_id,
|
|
90
|
+
}
|
|
91
|
+
)
|
|
92
|
+
for edge in citation_edges:
|
|
93
|
+
if edge.artifact_path not in artifact_paths:
|
|
94
|
+
issues.append(
|
|
95
|
+
{
|
|
96
|
+
"code": "KC_ARTIFACT_NOT_FOUND",
|
|
97
|
+
"message": f"Citation edge has no registered artifact: {edge.artifact_path}",
|
|
98
|
+
"edge_id": edge.edge_id,
|
|
99
|
+
"artifact_path": edge.artifact_path,
|
|
100
|
+
}
|
|
101
|
+
)
|
|
102
|
+
if edge.source_id not in source_id_set:
|
|
103
|
+
issues.append(
|
|
104
|
+
{
|
|
105
|
+
"code": "KC_CITATION_SOURCE_MISSING",
|
|
106
|
+
"message": f"Citation edge source is missing: {edge.source_id}",
|
|
107
|
+
"edge_id": edge.edge_id,
|
|
108
|
+
"source_id": edge.source_id,
|
|
109
|
+
}
|
|
110
|
+
)
|
|
111
|
+
if edge.range_id and edge.range_id not in range_id_set:
|
|
112
|
+
issues.append(
|
|
113
|
+
{
|
|
114
|
+
"code": "KC_CITATION_RANGE_MISSING",
|
|
115
|
+
"message": f"Citation edge range is missing: {edge.range_id}",
|
|
116
|
+
"edge_id": edge.edge_id,
|
|
117
|
+
"range_id": edge.range_id,
|
|
118
|
+
}
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
for artifact in artifacts:
|
|
122
|
+
artifact_path = resolve_repo_path(artifact.path, paths.root)
|
|
123
|
+
if "orphans" in enabled and not artifact_path.exists():
|
|
124
|
+
issues.append(
|
|
125
|
+
{
|
|
126
|
+
"code": "KC_ARTIFACT_NOT_FOUND",
|
|
127
|
+
"message": f"Registered artifact file is missing: {artifact.path}",
|
|
128
|
+
"artifact_id": artifact.artifact_id,
|
|
129
|
+
}
|
|
130
|
+
)
|
|
131
|
+
continue
|
|
132
|
+
if "citations" in enabled and artifact_path.exists():
|
|
133
|
+
result = validate_artifact_file(artifact_path)
|
|
134
|
+
if not result["valid"]:
|
|
135
|
+
for error in result["errors"]:
|
|
136
|
+
issues.append(error | {"artifact_path": artifact.path})
|
|
137
|
+
|
|
138
|
+
if "index" in enabled:
|
|
139
|
+
status = index_status(paths.sqlite_path, sources, ranges)
|
|
140
|
+
if status["stale"]:
|
|
141
|
+
issues.append(
|
|
142
|
+
{
|
|
143
|
+
"code": "KC_INDEX_BUILD_FAILED",
|
|
144
|
+
"message": "SQLite search index is missing or stale.",
|
|
145
|
+
"index": status,
|
|
146
|
+
}
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
if "log" in enabled and paths.log_path.exists():
|
|
150
|
+
plan_dir = paths.plans_dir
|
|
151
|
+
task_dir = paths.tasks_dir
|
|
152
|
+
for match in LOG_REF_RE.finditer(paths.log_path.read_text(encoding="utf-8")):
|
|
153
|
+
ref = match.group(0)
|
|
154
|
+
if ref.startswith("plan_") and not (plan_dir / f"{ref}.json").exists():
|
|
155
|
+
issues.append(
|
|
156
|
+
{
|
|
157
|
+
"code": "KC_ARTIFACT_SCHEMA_INVALID",
|
|
158
|
+
"message": f"Knowledge log references unknown plan: {ref}",
|
|
159
|
+
"reference": ref,
|
|
160
|
+
}
|
|
161
|
+
)
|
|
162
|
+
if ref.startswith("task_") and not (task_dir / f"{ref}.json").exists():
|
|
163
|
+
issues.append(
|
|
164
|
+
{
|
|
165
|
+
"code": "KC_ARTIFACT_SCHEMA_INVALID",
|
|
166
|
+
"message": f"Knowledge log references unknown task: {ref}",
|
|
167
|
+
"reference": ref,
|
|
168
|
+
}
|
|
169
|
+
)
|
|
170
|
+
result = {
|
|
171
|
+
"valid": not issues,
|
|
172
|
+
"checks": sorted(enabled),
|
|
173
|
+
"sources": len(sources),
|
|
174
|
+
"artifacts": len(artifacts),
|
|
175
|
+
"issues": issues,
|
|
176
|
+
"next_commands": _next_commands(issues),
|
|
177
|
+
}
|
|
178
|
+
if issues:
|
|
179
|
+
errors = [
|
|
180
|
+
KcError(
|
|
181
|
+
code=str(issue.get("code", "KC_ARTIFACT_SCHEMA_INVALID")),
|
|
182
|
+
message=str(issue.get("message", "Lint issue.")),
|
|
183
|
+
details=issue,
|
|
184
|
+
suggested_action="fix lint issue",
|
|
185
|
+
).to_message()
|
|
186
|
+
for issue in issues
|
|
187
|
+
]
|
|
188
|
+
emit(
|
|
189
|
+
envelope(
|
|
190
|
+
"lint",
|
|
191
|
+
None,
|
|
192
|
+
ok=False,
|
|
193
|
+
errors=errors,
|
|
194
|
+
),
|
|
195
|
+
exit_code=max(int(error["exit_code"]) for error in errors)
|
|
196
|
+
if errors
|
|
197
|
+
else EXIT_VALIDATION,
|
|
198
|
+
)
|
|
199
|
+
emit_success("lint", result)
|
|
200
|
+
|
|
201
|
+
run("lint", _run)
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
def _duplicate_issues(field: str, values: list[str], code: str) -> list[dict]:
|
|
205
|
+
seen: set[str] = set()
|
|
206
|
+
duplicates: set[str] = set()
|
|
207
|
+
for value in values:
|
|
208
|
+
if value in seen:
|
|
209
|
+
duplicates.add(value)
|
|
210
|
+
seen.add(value)
|
|
211
|
+
return [
|
|
212
|
+
{
|
|
213
|
+
"code": code,
|
|
214
|
+
"message": f"Duplicate {field}: {value}",
|
|
215
|
+
"field": field,
|
|
216
|
+
"value": value,
|
|
217
|
+
}
|
|
218
|
+
for value in sorted(duplicates)
|
|
219
|
+
]
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def _next_commands(issues: list[dict]) -> list[str]:
|
|
223
|
+
commands: set[str] = set()
|
|
224
|
+
for issue in issues:
|
|
225
|
+
code = issue.get("code")
|
|
226
|
+
if code == "KC_SOURCE_STALE" and issue.get("source_id"):
|
|
227
|
+
commands.add(f"kc source refresh {issue['source_id']} --dry-run")
|
|
228
|
+
elif code == "KC_INDEX_BUILD_FAILED":
|
|
229
|
+
commands.add("kc index build")
|
|
230
|
+
elif code in {"KC_CITATION_RANGE_MISSING", "KC_CITATION_STALE_SOURCE"}:
|
|
231
|
+
artifact_path = issue.get("artifact_path")
|
|
232
|
+
if artifact_path:
|
|
233
|
+
commands.add(f"kc citation repair --file {artifact_path} --dry-run")
|
|
234
|
+
elif code in {"KC_ARTIFACT_SCHEMA_INVALID", "KC_VALIDATION_MISSING_CITATION"}:
|
|
235
|
+
artifact_path = issue.get("artifact_path")
|
|
236
|
+
if artifact_path:
|
|
237
|
+
commands.add(f"kc artifact validate --file {artifact_path}")
|
|
238
|
+
return sorted(commands)
|