@prsense/workflows 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/dist/contract/ResolvedConfig.d.ts +37 -0
- package/dist/contract/ResolvedConfig.d.ts.map +1 -0
- package/dist/contract/ResolvedConfig.js +3 -0
- package/dist/contract/ResolvedConfig.js.map +1 -0
- package/dist/doctor/adaptCapability.d.ts +11 -0
- package/dist/doctor/adaptCapability.d.ts.map +1 -0
- package/dist/doctor/adaptCapability.js +35 -0
- package/dist/doctor/adaptCapability.js.map +1 -0
- package/dist/doctor/buildCapabilityContext.d.ts +3 -0
- package/dist/doctor/buildCapabilityContext.d.ts.map +1 -0
- package/dist/doctor/buildCapabilityContext.js +15 -0
- package/dist/doctor/buildCapabilityContext.js.map +1 -0
- package/dist/doctor/checks/checkEnvConfig.d.ts +3 -0
- package/dist/doctor/checks/checkEnvConfig.d.ts.map +1 -0
- package/dist/doctor/checks/checkEnvConfig.js +18 -0
- package/dist/doctor/checks/checkEnvConfig.js.map +1 -0
- package/dist/doctor/checks/checkLLM.d.ts +3 -0
- package/dist/doctor/checks/checkLLM.d.ts.map +1 -0
- package/dist/doctor/checks/checkLLM.js +46 -0
- package/dist/doctor/checks/checkLLM.js.map +1 -0
- package/dist/doctor/checks/checkRepository.d.ts +3 -0
- package/dist/doctor/checks/checkRepository.d.ts.map +1 -0
- package/dist/doctor/checks/checkRepository.js +18 -0
- package/dist/doctor/checks/checkRepository.js.map +1 -0
- package/dist/doctor/checks/checkUserConfig.d.ts +3 -0
- package/dist/doctor/checks/checkUserConfig.d.ts.map +1 -0
- package/dist/doctor/checks/checkUserConfig.js +16 -0
- package/dist/doctor/checks/checkUserConfig.js.map +1 -0
- package/dist/doctor/doctorWorkflow.d.ts +3 -0
- package/dist/doctor/doctorWorkflow.d.ts.map +1 -0
- package/dist/doctor/doctorWorkflow.js +26 -0
- package/dist/doctor/doctorWorkflow.js.map +1 -0
- package/dist/doctor/types.d.ts +26 -0
- package/dist/doctor/types.d.ts.map +1 -0
- package/dist/doctor/types.js +2 -0
- package/dist/doctor/types.js.map +1 -0
- package/dist/doctor/workflow.d.ts +8 -0
- package/dist/doctor/workflow.d.ts.map +1 -0
- package/dist/doctor/workflow.js +66 -0
- package/dist/doctor/workflow.js.map +1 -0
- package/dist/index/debug.d.ts +16 -0
- package/dist/index/debug.d.ts.map +1 -0
- package/dist/index/debug.js +2 -0
- package/dist/index/debug.js.map +1 -0
- package/dist/index/deps.d.ts +8 -0
- package/dist/index/deps.d.ts.map +1 -0
- package/dist/index/deps.js +2 -0
- package/dist/index/deps.js.map +1 -0
- package/dist/index/events.d.ts +15 -0
- package/dist/index/events.d.ts.map +1 -0
- package/dist/index/events.js +2 -0
- package/dist/index/events.js.map +1 -0
- package/dist/index/helper.d.ts +2 -0
- package/dist/index/helper.d.ts.map +1 -0
- package/dist/index/helper.js +15 -0
- package/dist/index/helper.js.map +1 -0
- package/dist/index/index.d.ts +4 -0
- package/dist/index/index.d.ts.map +1 -0
- package/dist/index/index.js +4 -0
- package/dist/index/index.js.map +1 -0
- package/dist/index/indexWorkflow.d.ts +13 -0
- package/dist/index/indexWorkflow.d.ts.map +1 -0
- package/dist/index/indexWorkflow.js +301 -0
- package/dist/index/indexWorkflow.js.map +1 -0
- package/dist/index/listIndexedRepositories.d.ts +4 -0
- package/dist/index/listIndexedRepositories.d.ts.map +1 -0
- package/dist/index/listIndexedRepositories.js +6 -0
- package/dist/index/listIndexedRepositories.js.map +1 -0
- package/dist/index/ports.d.ts +6 -0
- package/dist/index/ports.d.ts.map +1 -0
- package/dist/index/ports.js +3 -0
- package/dist/index/ports.js.map +1 -0
- package/dist/index/types.d.ts +8 -0
- package/dist/index/types.d.ts.map +1 -0
- package/dist/index/types.js +3 -0
- package/dist/index/types.js.map +1 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +9 -0
- package/dist/index.js.map +1 -0
- package/dist/review/buildDiffEmbeddingQuery.d.ts +8 -0
- package/dist/review/buildDiffEmbeddingQuery.d.ts.map +1 -0
- package/dist/review/buildDiffEmbeddingQuery.js +43 -0
- package/dist/review/buildDiffEmbeddingQuery.js.map +1 -0
- package/dist/review/dedupeSignals.d.ts +3 -0
- package/dist/review/dedupeSignals.d.ts.map +1 -0
- package/dist/review/dedupeSignals.js +11 -0
- package/dist/review/dedupeSignals.js.map +1 -0
- package/dist/review/extractJson.d.ts +2 -0
- package/dist/review/extractJson.d.ts.map +1 -0
- package/dist/review/extractJson.js +15 -0
- package/dist/review/extractJson.js.map +1 -0
- package/dist/review/index.d.ts +4 -0
- package/dist/review/index.d.ts.map +1 -0
- package/dist/review/index.js +4 -0
- package/dist/review/index.js.map +1 -0
- package/dist/review/input/ReviewInput.d.ts +18 -0
- package/dist/review/input/ReviewInput.d.ts.map +1 -0
- package/dist/review/input/ReviewInput.js +3 -0
- package/dist/review/input/ReviewInput.js.map +1 -0
- package/dist/review/normalizeSignal.d.ts +3 -0
- package/dist/review/normalizeSignal.d.ts.map +1 -0
- package/dist/review/normalizeSignal.js +31 -0
- package/dist/review/normalizeSignal.js.map +1 -0
- package/dist/review/ports.d.ts +5 -0
- package/dist/review/ports.d.ts.map +1 -0
- package/dist/review/ports.js +3 -0
- package/dist/review/ports.js.map +1 -0
- package/dist/review/retrieveContext.d.ts +12 -0
- package/dist/review/retrieveContext.d.ts.map +1 -0
- package/dist/review/retrieveContext.js +76 -0
- package/dist/review/retrieveContext.js.map +1 -0
- package/dist/review/reviewWorkflow.d.ts +11 -0
- package/dist/review/reviewWorkflow.d.ts.map +1 -0
- package/dist/review/reviewWorkflow.js +286 -0
- package/dist/review/reviewWorkflow.js.map +1 -0
- package/dist/review/types.d.ts +14 -0
- package/dist/review/types.d.ts.map +1 -0
- package/dist/review/types.js +3 -0
- package/dist/review/types.js.map +1 -0
- package/dist/review/validateReviewOutput.d.ts +4 -0
- package/dist/review/validateReviewOutput.d.ts.map +1 -0
- package/dist/review/validateReviewOutput.js +10 -0
- package/dist/review/validateReviewOutput.js.map +1 -0
- package/dist/setup/setupWorkflow.d.ts +9 -0
- package/dist/setup/setupWorkflow.d.ts.map +1 -0
- package/dist/setup/setupWorkflow.js +81 -0
- package/dist/setup/setupWorkflow.js.map +1 -0
- package/dist/setup/types.d.ts +16 -0
- package/dist/setup/types.d.ts.map +1 -0
- package/dist/setup/types.js +2 -0
- package/dist/setup/types.js.map +1 -0
- package/dist/types/checks.d.ts +17 -0
- package/dist/types/checks.d.ts.map +1 -0
- package/dist/types/checks.js +2 -0
- package/dist/types/checks.js.map +1 -0
- package/dist/types/core.d.ts +17 -0
- package/dist/types/core.d.ts.map +1 -0
- package/dist/types/core.js +2 -0
- package/dist/types/core.js.map +1 -0
- package/dist/types/deps.d.ts +9 -0
- package/dist/types/deps.d.ts.map +1 -0
- package/dist/types/deps.js +2 -0
- package/dist/types/deps.js.map +1 -0
- package/package.json +30 -0
- package/src/doctor/workflow.ts +99 -0
- package/src/index/index.ts +3 -0
- package/src/index/indexWorkflow.ts +411 -0
- package/src/index/listIndexedRepositories.ts +11 -0
- package/src/index/ports.ts +8 -0
- package/src/index/types.ts +11 -0
- package/src/index.ts +13 -0
- package/src/review/buildDiffEmbeddingQuery.ts +66 -0
- package/src/review/dedupeSignals.ts +10 -0
- package/src/review/extractJson.ts +17 -0
- package/src/review/index.ts +3 -0
- package/src/review/input/ReviewInput.ts +22 -0
- package/src/review/normalizeSignal.ts +34 -0
- package/src/review/ports.ts +7 -0
- package/src/review/retrieveContext.ts +105 -0
- package/src/review/reviewWorkflow.ts +366 -0
- package/src/review/types.ts +18 -0
- package/src/review/validateReviewOutput.ts +13 -0
- package/src/setup/setupWorkflow.ts +110 -0
- package/src/setup/types.ts +9 -0
- package/src/types/core.ts +18 -0
- package/tsconfig.json +11 -0
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
// packages/workflows/src/review/retrieveContext.ts
|
|
2
|
+
|
|
3
|
+
import type { RetrievedContext, EventBus, ContextChunk } from "@prsense/core";
|
|
4
|
+
import { CoreEvents } from "@prsense/core";
|
|
5
|
+
import type { ResolvedConfig } from "@prsense/config";
|
|
6
|
+
import { PostgresRagChunkRepository } from "@prsense/context";
|
|
7
|
+
import {
|
|
8
|
+
createOpenAiEmbeddingClient,
|
|
9
|
+
createOllamaEmbeddingClient,
|
|
10
|
+
} from "@prsense/llm";
|
|
11
|
+
|
|
12
|
+
export async function retrieveContext(params: {
|
|
13
|
+
config: ResolvedConfig;
|
|
14
|
+
query: string;
|
|
15
|
+
repoProvider: string;
|
|
16
|
+
repoName: string;
|
|
17
|
+
repoRef?: string;
|
|
18
|
+
limit: number;
|
|
19
|
+
eventBus?: EventBus;
|
|
20
|
+
}): Promise<RetrievedContext> {
|
|
21
|
+
const { config, query, repoProvider, repoName, repoRef, limit } = params;
|
|
22
|
+
|
|
23
|
+
// -------------------------------------------------
|
|
24
|
+
// Create embedding client
|
|
25
|
+
// -------------------------------------------------
|
|
26
|
+
|
|
27
|
+
const embeddingClient =
|
|
28
|
+
config.embeddings.provider === "openai"
|
|
29
|
+
? createOpenAiEmbeddingClient({
|
|
30
|
+
apiKey: process.env.OPENAI_API_KEY!,
|
|
31
|
+
model: config.embeddings.model,
|
|
32
|
+
})
|
|
33
|
+
: createOllamaEmbeddingClient({
|
|
34
|
+
model: config.embeddings.model,
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
const [queryEmbedding] = await embeddingClient.embed([query]);
|
|
38
|
+
|
|
39
|
+
params.eventBus?.emit(CoreEvents.WorkflowReviewContextEmbeddingGenerated, {
|
|
40
|
+
dimension: queryEmbedding?.length,
|
|
41
|
+
});
|
|
42
|
+
|
|
43
|
+
if (!queryEmbedding) {
|
|
44
|
+
throw new Error("Failed to generate query embedding");
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
// -------------------------------------------------
|
|
48
|
+
// Query RAG store
|
|
49
|
+
// -------------------------------------------------
|
|
50
|
+
|
|
51
|
+
const repository = new PostgresRagChunkRepository(config.database.url);
|
|
52
|
+
|
|
53
|
+
const rows = await repository.searchNearest({
|
|
54
|
+
repoProvider,
|
|
55
|
+
repoName,
|
|
56
|
+
...(repoRef ? { repoRef } : {}),
|
|
57
|
+
embedding: queryEmbedding,
|
|
58
|
+
limit,
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
params.eventBus?.emit(CoreEvents.WorkflowReviewContextRetrieved, {
|
|
62
|
+
chunks: rows.length,
|
|
63
|
+
repoProvider,
|
|
64
|
+
repoName,
|
|
65
|
+
minDistance: rows[0]?.distance,
|
|
66
|
+
maxDistance: rows[rows.length - 1]?.distance,
|
|
67
|
+
});
|
|
68
|
+
|
|
69
|
+
// -------------------------------------------------
|
|
70
|
+
// Map to domain objects
|
|
71
|
+
// -------------------------------------------------
|
|
72
|
+
|
|
73
|
+
const chunks: ContextChunk[] = rows.map((row) => {
|
|
74
|
+
const chunk: ContextChunk = {
|
|
75
|
+
id: row.id,
|
|
76
|
+
source: {
|
|
77
|
+
kind: "file",
|
|
78
|
+
path: row.path,
|
|
79
|
+
},
|
|
80
|
+
content: row.content,
|
|
81
|
+
metadata: {
|
|
82
|
+
path: row.path,
|
|
83
|
+
...(row.lineStart != null ? { lineStart: row.lineStart } : {}),
|
|
84
|
+
...(row.lineEnd != null ? { lineEnd: row.lineEnd } : {}),
|
|
85
|
+
...(row.language != null ? { language: row.language } : {}),
|
|
86
|
+
},
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
params.eventBus?.emit(CoreEvents.WorkflowReviewContextChunkRetrieved, {
|
|
90
|
+
source: chunk.source,
|
|
91
|
+
metadata: chunk.metadata,
|
|
92
|
+
distance: row.distance,
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
return chunk;
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
return {
|
|
99
|
+
chunks,
|
|
100
|
+
stats: {
|
|
101
|
+
totalChunks: rows.length,
|
|
102
|
+
truncated: rows.length === limit,
|
|
103
|
+
},
|
|
104
|
+
};
|
|
105
|
+
}
|
|
@@ -0,0 +1,366 @@
|
|
|
1
|
+
// packages/workflows/src/review/reviewWorkflow.ts
|
|
2
|
+
|
|
3
|
+
import { CoreEvents, EventBus, buildReviewPrompt } from "@prsense/core";
|
|
4
|
+
import type { ReviewSignal, DiffProvider } from "@prsense/core";
|
|
5
|
+
import type { ResolvedConfig, CredentialContext } from "@prsense/config";
|
|
6
|
+
import { PostgresIndexMetadataRepository } from "@prsense/context";
|
|
7
|
+
import {
|
|
8
|
+
createOpenAiClient,
|
|
9
|
+
createOllamaClient,
|
|
10
|
+
createGoogleClient,
|
|
11
|
+
createAnthropicClient,
|
|
12
|
+
type LlmClient,
|
|
13
|
+
type LlmUsage,
|
|
14
|
+
} from "@prsense/llm";
|
|
15
|
+
import type { ReviewWorkflowResult } from "./types.js";
|
|
16
|
+
import { validateReviewOutput } from "./validateReviewOutput.js";
|
|
17
|
+
import { buildDiffEmbeddingQuery } from "./buildDiffEmbeddingQuery.js";
|
|
18
|
+
import { dedupeSignals } from "./dedupeSignals.js";
|
|
19
|
+
import { retrieveContext } from "./retrieveContext.js";
|
|
20
|
+
import { normalizeSignal } from "./normalizeSignal.js";
|
|
21
|
+
import { extractJson } from "./extractJson.js";
|
|
22
|
+
|
|
23
|
+
export async function runReviewWorkflow({
|
|
24
|
+
config,
|
|
25
|
+
credentials,
|
|
26
|
+
diffProvider,
|
|
27
|
+
eventBus,
|
|
28
|
+
}: {
|
|
29
|
+
config: ResolvedConfig;
|
|
30
|
+
credentials: CredentialContext;
|
|
31
|
+
diffProvider: DiffProvider;
|
|
32
|
+
eventBus: EventBus;
|
|
33
|
+
}): Promise<ReviewWorkflowResult> {
|
|
34
|
+
eventBus.emit(CoreEvents.WorkflowReviewStarted);
|
|
35
|
+
|
|
36
|
+
try {
|
|
37
|
+
// -------------------------------------------------
|
|
38
|
+
// Load diff from provider
|
|
39
|
+
// -------------------------------------------------
|
|
40
|
+
|
|
41
|
+
const { diff, revision, repositoryIdentity, metadata } =
|
|
42
|
+
await diffProvider.load();
|
|
43
|
+
|
|
44
|
+
const metadataRepository = new PostgresIndexMetadataRepository(
|
|
45
|
+
config.database.url,
|
|
46
|
+
);
|
|
47
|
+
|
|
48
|
+
const storedMetadata = await metadataRepository.load(
|
|
49
|
+
repositoryIdentity.provider,
|
|
50
|
+
repositoryIdentity.id,
|
|
51
|
+
);
|
|
52
|
+
|
|
53
|
+
let contextualReviewAvailable = false;
|
|
54
|
+
|
|
55
|
+
if (storedMetadata) {
|
|
56
|
+
const embeddingMatches =
|
|
57
|
+
storedMetadata.embedding.provider === config.embeddings.provider &&
|
|
58
|
+
storedMetadata.embedding.model === config.embeddings.model;
|
|
59
|
+
|
|
60
|
+
if (embeddingMatches) {
|
|
61
|
+
contextualReviewAvailable = true;
|
|
62
|
+
|
|
63
|
+
if (storedMetadata.revision.commitSha !== revision) {
|
|
64
|
+
eventBus.emit(CoreEvents.WorkflowReviewIndexOutdated, {
|
|
65
|
+
indexedCommit: storedMetadata.revision.commitSha,
|
|
66
|
+
currentCommit: revision,
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
if (!storedMetadata) {
|
|
73
|
+
eventBus.emit(CoreEvents.WorkflowReviewContextUnavailable);
|
|
74
|
+
} else if (!contextualReviewAvailable) {
|
|
75
|
+
eventBus.emit(CoreEvents.WorkflowReviewIndexOutdated, {
|
|
76
|
+
indexedCommit: storedMetadata.revision.commitSha,
|
|
77
|
+
currentCommit: revision,
|
|
78
|
+
});
|
|
79
|
+
} else {
|
|
80
|
+
eventBus.emit(CoreEvents.WorkflowReviewContextAvailable);
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
if (diff.files.length === 0) {
|
|
84
|
+
eventBus.emit(CoreEvents.WorkflowReviewFinished);
|
|
85
|
+
return {
|
|
86
|
+
outcome: "success",
|
|
87
|
+
payload: { signals: [] },
|
|
88
|
+
};
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
// -------------------------------------------------
|
|
92
|
+
// Retrieve contextual chunks (RAG) if available
|
|
93
|
+
// -------------------------------------------------
|
|
94
|
+
|
|
95
|
+
let contextText = "";
|
|
96
|
+
|
|
97
|
+
if (contextualReviewAvailable) {
|
|
98
|
+
const retrievalQuery = buildDiffEmbeddingQuery({
|
|
99
|
+
diff,
|
|
100
|
+
...(metadata?.title ? { title: metadata.title } : {}),
|
|
101
|
+
...(metadata?.description ? { description: metadata.description } : {}),
|
|
102
|
+
});
|
|
103
|
+
eventBus.emit(CoreEvents.WorkflowReviewContextQueryBuilt, {
|
|
104
|
+
preview: retrievalQuery.slice(0, 500),
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
const retrieved = await retrieveContext({
|
|
108
|
+
config,
|
|
109
|
+
query: retrievalQuery,
|
|
110
|
+
repoProvider: repositoryIdentity.provider,
|
|
111
|
+
repoName: repositoryIdentity.id,
|
|
112
|
+
limit: config.context.maxChunks,
|
|
113
|
+
eventBus,
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
// context size guard
|
|
117
|
+
const MAX_CONTEXT_CHARS = 20000;
|
|
118
|
+
|
|
119
|
+
let accumulated = "";
|
|
120
|
+
for (const chunk of retrieved.chunks) {
|
|
121
|
+
if (accumulated.length + chunk.content.length > MAX_CONTEXT_CHARS)
|
|
122
|
+
break;
|
|
123
|
+
accumulated += chunk.content + "\n\n";
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
contextText = accumulated;
|
|
127
|
+
|
|
128
|
+
eventBus.emit(CoreEvents.WorkflowReviewContextRetrieved, {
|
|
129
|
+
chunks: retrieved.stats.totalChunks,
|
|
130
|
+
truncated: retrieved.stats.truncated,
|
|
131
|
+
contextChars: contextText.length,
|
|
132
|
+
});
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
// -------------------------------------------------
|
|
136
|
+
// Create LLM client
|
|
137
|
+
// -------------------------------------------------
|
|
138
|
+
|
|
139
|
+
let llmClient: LlmClient;
|
|
140
|
+
|
|
141
|
+
switch (config.llm.provider) {
|
|
142
|
+
case "openai": {
|
|
143
|
+
const apiKey = credentials.openai?.apiKey;
|
|
144
|
+
if (!apiKey) {
|
|
145
|
+
throw new Error("OpenAI credentials missing");
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
llmClient = createOpenAiClient({
|
|
149
|
+
apiKey,
|
|
150
|
+
model: config.llm.model,
|
|
151
|
+
temperature: config.llm.temperature,
|
|
152
|
+
});
|
|
153
|
+
break;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
case "google": {
|
|
157
|
+
const apiKey = credentials.google?.apiKey;
|
|
158
|
+
if (!apiKey) {
|
|
159
|
+
throw new Error("Google credentials missing");
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
llmClient = createGoogleClient({
|
|
163
|
+
apiKey,
|
|
164
|
+
model: config.llm.model,
|
|
165
|
+
temperature: config.llm.temperature,
|
|
166
|
+
});
|
|
167
|
+
break;
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
case "anthropic": {
|
|
171
|
+
const apiKey = credentials.anthropic?.apiKey;
|
|
172
|
+
if (!apiKey) {
|
|
173
|
+
throw new Error("Anthropic credentials missing");
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
llmClient = createAnthropicClient({
|
|
177
|
+
apiKey,
|
|
178
|
+
model: config.llm.model,
|
|
179
|
+
temperature: config.llm.temperature,
|
|
180
|
+
});
|
|
181
|
+
break;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
case "ollama":
|
|
185
|
+
default:
|
|
186
|
+
llmClient = createOllamaClient({
|
|
187
|
+
model: config.llm.model,
|
|
188
|
+
temperature: config.llm.temperature,
|
|
189
|
+
});
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
// -------------------------------------------------
|
|
193
|
+
// Generate review
|
|
194
|
+
// -------------------------------------------------
|
|
195
|
+
|
|
196
|
+
const allSignals: ReviewSignal[] = [];
|
|
197
|
+
let totalUsage: LlmUsage | undefined;
|
|
198
|
+
|
|
199
|
+
for (const file of diff.files) {
|
|
200
|
+
// Skip empty or trivial patches
|
|
201
|
+
if (!file.patch || file.patch.length < 40) {
|
|
202
|
+
continue;
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
// skip binary/non code files
|
|
206
|
+
if (
|
|
207
|
+
file.path.endsWith(".png") ||
|
|
208
|
+
file.path.endsWith(".jpg") ||
|
|
209
|
+
file.path.endsWith(".svg") ||
|
|
210
|
+
file.path.endsWith(".lock")
|
|
211
|
+
) {
|
|
212
|
+
continue;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
eventBus.emit(CoreEvents.WorkflowReviewFileStarted, {
|
|
216
|
+
file: file.path,
|
|
217
|
+
});
|
|
218
|
+
const fileDiff = {
|
|
219
|
+
files: [file],
|
|
220
|
+
};
|
|
221
|
+
const prompt = buildReviewPrompt({
|
|
222
|
+
diff: fileDiff,
|
|
223
|
+
context: contextText,
|
|
224
|
+
...(metadata ?? {}),
|
|
225
|
+
});
|
|
226
|
+
|
|
227
|
+
eventBus.emit(CoreEvents.WorkflowReviewPromptBuilt, {
|
|
228
|
+
model: config.llm.model,
|
|
229
|
+
provider: config.llm.provider,
|
|
230
|
+
promptChars: JSON.stringify(prompt).length,
|
|
231
|
+
preview: JSON.stringify(prompt).slice(0, 2000),
|
|
232
|
+
});
|
|
233
|
+
eventBus.emit(CoreEvents.WorkflowReviewLlmRequestStarted);
|
|
234
|
+
|
|
235
|
+
const start = Date.now();
|
|
236
|
+
|
|
237
|
+
const response = await llmClient.generate({ prompt });
|
|
238
|
+
|
|
239
|
+
eventBus.emit(CoreEvents.WorkflowReviewLlmResponseReceived, {
|
|
240
|
+
outputChars: response.text.length,
|
|
241
|
+
usage: response.usage,
|
|
242
|
+
durationMs: Date.now() - start,
|
|
243
|
+
});
|
|
244
|
+
eventBus.emit(CoreEvents.WorkflowReviewLlmRawResponse, {
|
|
245
|
+
preview: response.text.slice(0, 1000),
|
|
246
|
+
fullLength: response.text.length,
|
|
247
|
+
});
|
|
248
|
+
|
|
249
|
+
if (response.usage) {
|
|
250
|
+
if (!totalUsage) {
|
|
251
|
+
totalUsage = { ...response.usage };
|
|
252
|
+
} else {
|
|
253
|
+
totalUsage.promptTokens += response.usage.promptTokens;
|
|
254
|
+
totalUsage.completionTokens += response.usage.completionTokens;
|
|
255
|
+
totalUsage.totalTokens += response.usage.totalTokens;
|
|
256
|
+
}
|
|
257
|
+
}
|
|
258
|
+
|
|
259
|
+
const cleaned = extractJson(response.text);
|
|
260
|
+
const trimmed = cleaned.trim();
|
|
261
|
+
if (!(trimmed.endsWith("}") || trimmed.endsWith("]}"))) {
|
|
262
|
+
throw new Error("LLM response truncated");
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
let parsed: any;
|
|
266
|
+
|
|
267
|
+
try {
|
|
268
|
+
parsed = JSON.parse(cleaned);
|
|
269
|
+
} catch {
|
|
270
|
+
eventBus.emit(CoreEvents.WorkflowReviewInvalidJson, {
|
|
271
|
+
rawResponsePreview: response.text.slice(0, 2000),
|
|
272
|
+
});
|
|
273
|
+
throw new Error("LLM returned invalid JSON");
|
|
274
|
+
}
|
|
275
|
+
let validated: ReturnType<typeof validateReviewOutput>;
|
|
276
|
+
|
|
277
|
+
try {
|
|
278
|
+
validated = validateReviewOutput(parsed);
|
|
279
|
+
} catch {
|
|
280
|
+
const correctionPrompt = `
|
|
281
|
+
The previous output did not match the required schema.
|
|
282
|
+
|
|
283
|
+
Convert the following into valid JSON matching this schema:
|
|
284
|
+
|
|
285
|
+
{
|
|
286
|
+
"signals": [
|
|
287
|
+
{
|
|
288
|
+
"type": "bug" | "risk" | "test" | "style",
|
|
289
|
+
"severity": "low" | "medium" | "high",
|
|
290
|
+
"confidence": number,
|
|
291
|
+
"file": string,
|
|
292
|
+
"lineStart": number | null,
|
|
293
|
+
"lineEnd": number | null,
|
|
294
|
+
"message": string,
|
|
295
|
+
"rationale": string | null,
|
|
296
|
+
"suggestedFix": string | null
|
|
297
|
+
}
|
|
298
|
+
]
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
Output only valid JSON.
|
|
302
|
+
|
|
303
|
+
Previous output:
|
|
304
|
+
${cleaned}
|
|
305
|
+
`;
|
|
306
|
+
|
|
307
|
+
const retry = await llmClient.generate({
|
|
308
|
+
prompt: {
|
|
309
|
+
system: "You are correcting malformed JSON output.",
|
|
310
|
+
user: correctionPrompt,
|
|
311
|
+
},
|
|
312
|
+
});
|
|
313
|
+
|
|
314
|
+
const retryCleaned = extractJson(retry.text);
|
|
315
|
+
parsed = JSON.parse(retryCleaned);
|
|
316
|
+
validated = validateReviewOutput(parsed);
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
allSignals.push(...validated.signals);
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
// -------------------------------------------------
|
|
323
|
+
// Normalize + filter signals
|
|
324
|
+
// -------------------------------------------------
|
|
325
|
+
|
|
326
|
+
const rawSignals = allSignals;
|
|
327
|
+
|
|
328
|
+
const normalized = rawSignals
|
|
329
|
+
.map(normalizeSignal)
|
|
330
|
+
.filter((s): s is ReviewSignal => s != null);
|
|
331
|
+
|
|
332
|
+
const thresholded = normalized.filter(
|
|
333
|
+
(s) => s.confidence >= config.review.confidenceThreshold,
|
|
334
|
+
);
|
|
335
|
+
|
|
336
|
+
const signals = dedupeSignals(thresholded);
|
|
337
|
+
|
|
338
|
+
eventBus.emit(CoreEvents.SignalCompiled, {
|
|
339
|
+
count: signals.length,
|
|
340
|
+
});
|
|
341
|
+
|
|
342
|
+
eventBus.emit(CoreEvents.WorkflowReviewFinished);
|
|
343
|
+
|
|
344
|
+
if (totalUsage) {
|
|
345
|
+
return {
|
|
346
|
+
outcome: "success",
|
|
347
|
+
payload: { signals, usage: totalUsage },
|
|
348
|
+
};
|
|
349
|
+
}
|
|
350
|
+
return {
|
|
351
|
+
outcome: "success",
|
|
352
|
+
payload: { signals },
|
|
353
|
+
};
|
|
354
|
+
} catch (err) {
|
|
355
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
356
|
+
|
|
357
|
+
eventBus.emit(CoreEvents.WorkflowReviewFailed, {
|
|
358
|
+
error: message,
|
|
359
|
+
});
|
|
360
|
+
|
|
361
|
+
return {
|
|
362
|
+
outcome: "failure",
|
|
363
|
+
payload: { signals: [] },
|
|
364
|
+
};
|
|
365
|
+
}
|
|
366
|
+
}
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
// packages/workflows/src/review/types.ts
|
|
2
|
+
|
|
3
|
+
import type { ReviewSignal, RetrievedContext } from "@prsense/core";
|
|
4
|
+
import type { LlmUsage } from "@prsense/llm";
|
|
5
|
+
import { WorkflowResult } from "../types/core.js";
|
|
6
|
+
|
|
7
|
+
export type ReviewPayload = {
|
|
8
|
+
signals: ReviewSignal[];
|
|
9
|
+
usage?: LlmUsage;
|
|
10
|
+
};
|
|
11
|
+
|
|
12
|
+
export type ReviewWorkflowResult = WorkflowResult<ReviewPayload>;
|
|
13
|
+
|
|
14
|
+
export type RetrievalWorkflowResult = {
|
|
15
|
+
outcome: "success" | "failure";
|
|
16
|
+
payload?: RetrievedContext;
|
|
17
|
+
error?: string;
|
|
18
|
+
};
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
export function validateReviewOutput(parsed: any): {
|
|
2
|
+
signals: any[];
|
|
3
|
+
} {
|
|
4
|
+
if (!parsed || typeof parsed !== "object") {
|
|
5
|
+
throw new Error("Review output must be an object");
|
|
6
|
+
}
|
|
7
|
+
|
|
8
|
+
if (!Array.isArray(parsed.signals)) {
|
|
9
|
+
throw new Error("Review output missing 'signals' array");
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
return parsed;
|
|
13
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
// packages/workflows/src/setup/runSetupWorkflow.ts
|
|
2
|
+
import type { Capability, CapabilityContext } from "@prsense/preflight";
|
|
3
|
+
import type { EventBus } from "@prsense/core";
|
|
4
|
+
import { CoreEvents } from "@prsense/core";
|
|
5
|
+
|
|
6
|
+
import type { SetupWorkflowResult, SetupStepResult } from "./types.js";
|
|
7
|
+
|
|
8
|
+
export async function runSetupWorkflow({
|
|
9
|
+
capabilities,
|
|
10
|
+
ctx,
|
|
11
|
+
eventBus,
|
|
12
|
+
}: {
|
|
13
|
+
capabilities: Capability[];
|
|
14
|
+
ctx: CapabilityContext;
|
|
15
|
+
eventBus: EventBus;
|
|
16
|
+
}): Promise<SetupWorkflowResult> {
|
|
17
|
+
eventBus.emit(CoreEvents.WorkflowSetupStarted);
|
|
18
|
+
|
|
19
|
+
const steps: SetupStepResult[] = [];
|
|
20
|
+
|
|
21
|
+
for (const cap of capabilities) {
|
|
22
|
+
eventBus.emit(CoreEvents.CapabilityCheckStarted, {
|
|
23
|
+
capability: cap.id,
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
const status = await cap.check(ctx);
|
|
27
|
+
|
|
28
|
+
eventBus.emit(CoreEvents.CapabilityCheckFinished, {
|
|
29
|
+
capability: cap.id,
|
|
30
|
+
status: status.kind,
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
// 1️⃣ Not applicable → skip silently
|
|
34
|
+
if (status.kind === "non-applicable") {
|
|
35
|
+
steps.push({ id: cap.id, outcome: "skipped" });
|
|
36
|
+
continue;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
// 2️⃣ Already ready → skip
|
|
40
|
+
if (status.kind === "ready") {
|
|
41
|
+
steps.push({ id: cap.id, outcome: "skipped" });
|
|
42
|
+
continue;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
// 3️⃣ Partial → never auto-fix
|
|
46
|
+
if (status.kind === "partial") {
|
|
47
|
+
steps.push({
|
|
48
|
+
id: cap.id,
|
|
49
|
+
outcome: "failed",
|
|
50
|
+
error: status.reason,
|
|
51
|
+
});
|
|
52
|
+
break;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
// 4️⃣ Missing → attempt apply if possible
|
|
56
|
+
if (status.kind === "missing") {
|
|
57
|
+
if (!cap.apply) {
|
|
58
|
+
steps.push({
|
|
59
|
+
id: cap.id,
|
|
60
|
+
outcome: "failed",
|
|
61
|
+
error: status.reason,
|
|
62
|
+
});
|
|
63
|
+
break;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
eventBus.emit(CoreEvents.TaskStarted, {
|
|
67
|
+
taskId: `setup.${cap.id}`,
|
|
68
|
+
label: `Setting up ${cap.description}`,
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
try {
|
|
72
|
+
await cap.apply(ctx);
|
|
73
|
+
|
|
74
|
+
eventBus.emit(CoreEvents.TaskSucceeded, {
|
|
75
|
+
taskId: `setup.${cap.id}`,
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
steps.push({
|
|
79
|
+
id: cap.id,
|
|
80
|
+
outcome: "applied",
|
|
81
|
+
});
|
|
82
|
+
} catch (err) {
|
|
83
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
84
|
+
|
|
85
|
+
eventBus.emit(CoreEvents.TaskFailed, {
|
|
86
|
+
taskId: `setup.${cap.id}`,
|
|
87
|
+
error: message,
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
steps.push({
|
|
91
|
+
id: cap.id,
|
|
92
|
+
outcome: "failed",
|
|
93
|
+
error: message,
|
|
94
|
+
});
|
|
95
|
+
break;
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const failed = steps.some((s) => s.outcome === "failed");
|
|
101
|
+
|
|
102
|
+
eventBus.emit(CoreEvents.WorkflowSetupFinished, {
|
|
103
|
+
outcome: failed ? "failure" : "success",
|
|
104
|
+
});
|
|
105
|
+
|
|
106
|
+
return {
|
|
107
|
+
outcome: failed ? "failure" : "success",
|
|
108
|
+
steps,
|
|
109
|
+
};
|
|
110
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
export type SetupStepResult =
|
|
2
|
+
| { id: string; outcome: "skipped" }
|
|
3
|
+
| { id: string; outcome: "applied" }
|
|
4
|
+
| { id: string; outcome: "failed"; error: string };
|
|
5
|
+
|
|
6
|
+
export type SetupWorkflowResult = {
|
|
7
|
+
outcome: "success" | "failure";
|
|
8
|
+
steps: SetupStepResult[];
|
|
9
|
+
};
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* High-level outcome of a workflow.
|
|
3
|
+
* This is NOT presentation and NOT process exit.
|
|
4
|
+
*/
|
|
5
|
+
export type WorkflowOutcome = "success" | "failure";
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Base shape returned by all workflows.
|
|
9
|
+
*/
|
|
10
|
+
export type WorkflowResult<TPayload> = {
|
|
11
|
+
outcome: WorkflowOutcome;
|
|
12
|
+
|
|
13
|
+
/**
|
|
14
|
+
* Structured payload produced by the workflow.
|
|
15
|
+
* Interpretation is workflow-specific.
|
|
16
|
+
*/
|
|
17
|
+
payload: TPayload;
|
|
18
|
+
};
|