foundry-mcp 0.3.3__py3-none-any.whl → 0.8.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- foundry_mcp/__init__.py +7 -1
- foundry_mcp/cli/__init__.py +0 -13
- foundry_mcp/cli/commands/plan.py +10 -3
- foundry_mcp/cli/commands/review.py +19 -4
- foundry_mcp/cli/commands/session.py +1 -8
- foundry_mcp/cli/commands/specs.py +38 -208
- foundry_mcp/cli/context.py +39 -0
- foundry_mcp/cli/output.py +3 -3
- foundry_mcp/config.py +615 -11
- foundry_mcp/core/ai_consultation.py +146 -9
- foundry_mcp/core/batch_operations.py +1196 -0
- foundry_mcp/core/discovery.py +7 -7
- foundry_mcp/core/error_store.py +2 -2
- foundry_mcp/core/intake.py +933 -0
- foundry_mcp/core/llm_config.py +28 -2
- foundry_mcp/core/metrics_store.py +2 -2
- foundry_mcp/core/naming.py +25 -2
- foundry_mcp/core/progress.py +70 -0
- foundry_mcp/core/prometheus.py +0 -13
- foundry_mcp/core/prompts/fidelity_review.py +149 -4
- foundry_mcp/core/prompts/markdown_plan_review.py +5 -1
- foundry_mcp/core/prompts/plan_review.py +5 -1
- foundry_mcp/core/providers/__init__.py +12 -0
- foundry_mcp/core/providers/base.py +39 -0
- foundry_mcp/core/providers/claude.py +51 -48
- foundry_mcp/core/providers/codex.py +70 -60
- foundry_mcp/core/providers/cursor_agent.py +25 -47
- foundry_mcp/core/providers/detectors.py +34 -7
- foundry_mcp/core/providers/gemini.py +69 -58
- foundry_mcp/core/providers/opencode.py +101 -47
- foundry_mcp/core/providers/package-lock.json +4 -4
- foundry_mcp/core/providers/package.json +1 -1
- foundry_mcp/core/providers/validation.py +128 -0
- foundry_mcp/core/research/__init__.py +68 -0
- foundry_mcp/core/research/memory.py +528 -0
- foundry_mcp/core/research/models.py +1220 -0
- foundry_mcp/core/research/providers/__init__.py +40 -0
- foundry_mcp/core/research/providers/base.py +242 -0
- foundry_mcp/core/research/providers/google.py +507 -0
- foundry_mcp/core/research/providers/perplexity.py +442 -0
- foundry_mcp/core/research/providers/semantic_scholar.py +544 -0
- foundry_mcp/core/research/providers/tavily.py +383 -0
- foundry_mcp/core/research/workflows/__init__.py +25 -0
- foundry_mcp/core/research/workflows/base.py +298 -0
- foundry_mcp/core/research/workflows/chat.py +271 -0
- foundry_mcp/core/research/workflows/consensus.py +539 -0
- foundry_mcp/core/research/workflows/deep_research.py +4020 -0
- foundry_mcp/core/research/workflows/ideate.py +682 -0
- foundry_mcp/core/research/workflows/thinkdeep.py +405 -0
- foundry_mcp/core/responses.py +690 -0
- foundry_mcp/core/spec.py +2439 -236
- foundry_mcp/core/task.py +1205 -31
- foundry_mcp/core/testing.py +512 -123
- foundry_mcp/core/validation.py +319 -43
- foundry_mcp/dashboard/components/charts.py +0 -57
- foundry_mcp/dashboard/launcher.py +11 -0
- foundry_mcp/dashboard/views/metrics.py +25 -35
- foundry_mcp/dashboard/views/overview.py +1 -65
- foundry_mcp/resources/specs.py +25 -25
- foundry_mcp/schemas/intake-schema.json +89 -0
- foundry_mcp/schemas/sdd-spec-schema.json +33 -5
- foundry_mcp/server.py +0 -14
- foundry_mcp/tools/unified/__init__.py +39 -18
- foundry_mcp/tools/unified/authoring.py +2371 -248
- foundry_mcp/tools/unified/documentation_helpers.py +69 -6
- foundry_mcp/tools/unified/environment.py +434 -32
- foundry_mcp/tools/unified/error.py +18 -1
- foundry_mcp/tools/unified/lifecycle.py +8 -0
- foundry_mcp/tools/unified/plan.py +133 -2
- foundry_mcp/tools/unified/provider.py +0 -40
- foundry_mcp/tools/unified/research.py +1283 -0
- foundry_mcp/tools/unified/review.py +374 -17
- foundry_mcp/tools/unified/review_helpers.py +16 -1
- foundry_mcp/tools/unified/server.py +9 -24
- foundry_mcp/tools/unified/spec.py +367 -0
- foundry_mcp/tools/unified/task.py +1664 -30
- foundry_mcp/tools/unified/test.py +69 -8
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/METADATA +8 -1
- foundry_mcp-0.8.10.dist-info/RECORD +153 -0
- foundry_mcp/cli/flags.py +0 -266
- foundry_mcp/core/feature_flags.py +0 -592
- foundry_mcp-0.3.3.dist-info/RECORD +0 -135
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/WHEEL +0 -0
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/entry_points.txt +0 -0
- {foundry_mcp-0.3.3.dist-info → foundry_mcp-0.8.10.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,682 @@
|
|
|
1
|
+
"""IDEATE workflow for creative brainstorming with phased execution.
|
|
2
|
+
|
|
3
|
+
Provides creative ideation capabilities with multi-perspective generation,
|
|
4
|
+
idea clustering, scoring, and elaboration phases.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
import logging
|
|
8
|
+
from typing import Any, Optional
|
|
9
|
+
|
|
10
|
+
from foundry_mcp.config import ResearchConfig
|
|
11
|
+
from foundry_mcp.core.research.memory import ResearchMemory
|
|
12
|
+
from foundry_mcp.core.research.models import (
|
|
13
|
+
Idea,
|
|
14
|
+
IdeaCluster,
|
|
15
|
+
IdeationPhase,
|
|
16
|
+
IdeationState,
|
|
17
|
+
)
|
|
18
|
+
from foundry_mcp.core.research.workflows.base import ResearchWorkflowBase, WorkflowResult
|
|
19
|
+
|
|
20
|
+
logger = logging.getLogger(__name__)
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class IdeateWorkflow(ResearchWorkflowBase):
|
|
24
|
+
"""Creative brainstorming workflow with phased execution.
|
|
25
|
+
|
|
26
|
+
Features:
|
|
27
|
+
- Divergent phase: Multi-perspective idea generation
|
|
28
|
+
- Convergent phase: Idea clustering and scoring
|
|
29
|
+
- Selection phase: Mark clusters for elaboration
|
|
30
|
+
- Elaboration phase: Develop selected clusters
|
|
31
|
+
- Persistent state across sessions
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
def __init__(
|
|
35
|
+
self,
|
|
36
|
+
config: ResearchConfig,
|
|
37
|
+
memory: Optional[ResearchMemory] = None,
|
|
38
|
+
) -> None:
|
|
39
|
+
"""Initialize ideate workflow.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
config: Research configuration
|
|
43
|
+
memory: Optional memory instance
|
|
44
|
+
"""
|
|
45
|
+
super().__init__(config, memory)
|
|
46
|
+
|
|
47
|
+
def execute(
|
|
48
|
+
self,
|
|
49
|
+
topic: Optional[str] = None,
|
|
50
|
+
ideation_id: Optional[str] = None,
|
|
51
|
+
action: str = "generate",
|
|
52
|
+
perspective: Optional[str] = None,
|
|
53
|
+
cluster_ids: Optional[list[str]] = None,
|
|
54
|
+
system_prompt: Optional[str] = None,
|
|
55
|
+
provider_id: Optional[str] = None,
|
|
56
|
+
perspectives: Optional[list[str]] = None,
|
|
57
|
+
scoring_criteria: Optional[list[str]] = None,
|
|
58
|
+
**kwargs: Any,
|
|
59
|
+
) -> WorkflowResult:
|
|
60
|
+
"""Execute an ideation action.
|
|
61
|
+
|
|
62
|
+
Args:
|
|
63
|
+
topic: Topic for new ideation session
|
|
64
|
+
ideation_id: Existing session to continue
|
|
65
|
+
action: Action to perform (generate, cluster, score, select, elaborate)
|
|
66
|
+
perspective: Specific perspective for idea generation
|
|
67
|
+
cluster_ids: Cluster IDs for selection/elaboration
|
|
68
|
+
system_prompt: System prompt for new sessions
|
|
69
|
+
provider_id: Provider to use
|
|
70
|
+
perspectives: Custom perspectives (uses config default if None)
|
|
71
|
+
scoring_criteria: Custom scoring criteria
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
WorkflowResult with ideation results
|
|
75
|
+
"""
|
|
76
|
+
# Get or create state
|
|
77
|
+
if ideation_id:
|
|
78
|
+
state = self.memory.load_ideation(ideation_id)
|
|
79
|
+
if not state:
|
|
80
|
+
return WorkflowResult(
|
|
81
|
+
success=False,
|
|
82
|
+
content="",
|
|
83
|
+
error=f"Ideation session {ideation_id} not found",
|
|
84
|
+
)
|
|
85
|
+
elif topic:
|
|
86
|
+
state = IdeationState(
|
|
87
|
+
topic=topic,
|
|
88
|
+
perspectives=perspectives or self.config.ideate_perspectives,
|
|
89
|
+
scoring_criteria=scoring_criteria or ["novelty", "feasibility", "impact"],
|
|
90
|
+
system_prompt=system_prompt,
|
|
91
|
+
)
|
|
92
|
+
else:
|
|
93
|
+
return WorkflowResult(
|
|
94
|
+
success=False,
|
|
95
|
+
content="",
|
|
96
|
+
error="Either 'topic' (for new session) or 'ideation_id' (to continue) is required",
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
# Dispatch to action handler
|
|
100
|
+
if action == "generate":
|
|
101
|
+
result = self._generate_ideas(state, perspective, provider_id)
|
|
102
|
+
elif action == "cluster":
|
|
103
|
+
result = self._cluster_ideas(state, provider_id)
|
|
104
|
+
elif action == "score":
|
|
105
|
+
result = self._score_ideas(state, provider_id)
|
|
106
|
+
elif action == "select":
|
|
107
|
+
result = self._select_clusters(state, cluster_ids)
|
|
108
|
+
elif action == "elaborate":
|
|
109
|
+
result = self._elaborate_clusters(state, provider_id)
|
|
110
|
+
elif action == "status":
|
|
111
|
+
result = self._get_status(state)
|
|
112
|
+
else:
|
|
113
|
+
return WorkflowResult(
|
|
114
|
+
success=False,
|
|
115
|
+
content="",
|
|
116
|
+
error=f"Unknown action '{action}'. Valid: generate, cluster, score, select, elaborate, status",
|
|
117
|
+
)
|
|
118
|
+
|
|
119
|
+
if result.success:
|
|
120
|
+
# Persist state
|
|
121
|
+
self.memory.save_ideation(state)
|
|
122
|
+
|
|
123
|
+
# Add common metadata
|
|
124
|
+
result.metadata["ideation_id"] = state.id
|
|
125
|
+
result.metadata["phase"] = state.phase.value
|
|
126
|
+
result.metadata["idea_count"] = len(state.ideas)
|
|
127
|
+
result.metadata["cluster_count"] = len(state.clusters)
|
|
128
|
+
|
|
129
|
+
return result
|
|
130
|
+
|
|
131
|
+
def _generate_ideas(
|
|
132
|
+
self,
|
|
133
|
+
state: IdeationState,
|
|
134
|
+
perspective: Optional[str],
|
|
135
|
+
provider_id: Optional[str],
|
|
136
|
+
) -> WorkflowResult:
|
|
137
|
+
"""Generate ideas from a perspective.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
state: Ideation state
|
|
141
|
+
perspective: Perspective to generate from (or all if None)
|
|
142
|
+
provider_id: Provider to use
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
WorkflowResult with generated ideas
|
|
146
|
+
"""
|
|
147
|
+
perspectives_to_use = [perspective] if perspective else state.perspectives
|
|
148
|
+
|
|
149
|
+
all_ideas = []
|
|
150
|
+
for persp in perspectives_to_use:
|
|
151
|
+
prompt = self._build_generation_prompt(state.topic, persp)
|
|
152
|
+
result = self._execute_provider(
|
|
153
|
+
prompt=prompt,
|
|
154
|
+
provider_id=provider_id,
|
|
155
|
+
system_prompt=self._build_ideation_system_prompt(),
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
if result.success:
|
|
159
|
+
# Parse ideas from response
|
|
160
|
+
ideas = self._parse_ideas(result.content, persp, result.provider_id, result.model_used)
|
|
161
|
+
for idea in ideas:
|
|
162
|
+
state.ideas.append(idea)
|
|
163
|
+
all_ideas.append(idea)
|
|
164
|
+
|
|
165
|
+
if not all_ideas:
|
|
166
|
+
return WorkflowResult(
|
|
167
|
+
success=False,
|
|
168
|
+
content="",
|
|
169
|
+
error="No ideas generated",
|
|
170
|
+
)
|
|
171
|
+
|
|
172
|
+
# Format output
|
|
173
|
+
content = f"Generated {len(all_ideas)} ideas:\n\n"
|
|
174
|
+
for i, idea in enumerate(all_ideas, 1):
|
|
175
|
+
content += f"{i}. [{idea.perspective}] {idea.content}\n"
|
|
176
|
+
|
|
177
|
+
return WorkflowResult(
|
|
178
|
+
success=True,
|
|
179
|
+
content=content,
|
|
180
|
+
metadata={
|
|
181
|
+
"ideas_generated": len(all_ideas),
|
|
182
|
+
"perspectives_used": perspectives_to_use,
|
|
183
|
+
},
|
|
184
|
+
)
|
|
185
|
+
|
|
186
|
+
def _cluster_ideas(
|
|
187
|
+
self,
|
|
188
|
+
state: IdeationState,
|
|
189
|
+
provider_id: Optional[str],
|
|
190
|
+
) -> WorkflowResult:
|
|
191
|
+
"""Cluster related ideas.
|
|
192
|
+
|
|
193
|
+
Args:
|
|
194
|
+
state: Ideation state
|
|
195
|
+
provider_id: Provider to use
|
|
196
|
+
|
|
197
|
+
Returns:
|
|
198
|
+
WorkflowResult with clustering results
|
|
199
|
+
"""
|
|
200
|
+
if not state.ideas:
|
|
201
|
+
return WorkflowResult(
|
|
202
|
+
success=False,
|
|
203
|
+
content="",
|
|
204
|
+
error="No ideas to cluster. Generate ideas first.",
|
|
205
|
+
)
|
|
206
|
+
|
|
207
|
+
# Build clustering prompt
|
|
208
|
+
ideas_text = "\n".join(f"{i+1}. {idea.content}" for i, idea in enumerate(state.ideas))
|
|
209
|
+
prompt = f"""Analyze these ideas and group them into 3-5 thematic clusters:
|
|
210
|
+
|
|
211
|
+
{ideas_text}
|
|
212
|
+
|
|
213
|
+
For each cluster, provide:
|
|
214
|
+
1. A short name (2-4 words)
|
|
215
|
+
2. A brief description
|
|
216
|
+
3. The idea numbers that belong to it
|
|
217
|
+
|
|
218
|
+
Format as:
|
|
219
|
+
CLUSTER: [name]
|
|
220
|
+
DESCRIPTION: [description]
|
|
221
|
+
IDEAS: [comma-separated numbers]"""
|
|
222
|
+
|
|
223
|
+
result = self._execute_provider(
|
|
224
|
+
prompt=prompt,
|
|
225
|
+
provider_id=provider_id,
|
|
226
|
+
system_prompt="You are organizing ideas into thematic clusters. Be systematic and comprehensive.",
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
if not result.success:
|
|
230
|
+
return result
|
|
231
|
+
|
|
232
|
+
# Parse clusters from response
|
|
233
|
+
clusters = self._parse_clusters(result.content, state)
|
|
234
|
+
|
|
235
|
+
# Update state
|
|
236
|
+
state.clusters = clusters
|
|
237
|
+
state.phase = IdeationPhase.CONVERGENT
|
|
238
|
+
|
|
239
|
+
# Format output
|
|
240
|
+
content = f"Created {len(clusters)} clusters:\n\n"
|
|
241
|
+
for cluster in clusters:
|
|
242
|
+
idea_count = len(cluster.idea_ids)
|
|
243
|
+
content += f"**{cluster.name}** ({idea_count} ideas)\n{cluster.description}\n\n"
|
|
244
|
+
|
|
245
|
+
return WorkflowResult(
|
|
246
|
+
success=True,
|
|
247
|
+
content=content,
|
|
248
|
+
metadata={"clusters_created": len(clusters)},
|
|
249
|
+
)
|
|
250
|
+
|
|
251
|
+
def _score_ideas(
|
|
252
|
+
self,
|
|
253
|
+
state: IdeationState,
|
|
254
|
+
provider_id: Optional[str],
|
|
255
|
+
) -> WorkflowResult:
|
|
256
|
+
"""Score ideas based on criteria.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
state: Ideation state
|
|
260
|
+
provider_id: Provider to use
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
WorkflowResult with scoring results
|
|
264
|
+
"""
|
|
265
|
+
if not state.ideas:
|
|
266
|
+
return WorkflowResult(
|
|
267
|
+
success=False,
|
|
268
|
+
content="",
|
|
269
|
+
error="No ideas to score.",
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
criteria_text = ", ".join(state.scoring_criteria)
|
|
273
|
+
ideas_text = "\n".join(f"{i+1}. {idea.content}" for i, idea in enumerate(state.ideas))
|
|
274
|
+
|
|
275
|
+
prompt = f"""Score each idea on a scale of 0.0 to 1.0 based on these criteria: {criteria_text}
|
|
276
|
+
|
|
277
|
+
Ideas:
|
|
278
|
+
{ideas_text}
|
|
279
|
+
|
|
280
|
+
Provide an overall score (average of criteria) for each idea.
|
|
281
|
+
Format: [idea number]: [score] - [brief justification]"""
|
|
282
|
+
|
|
283
|
+
result = self._execute_provider(
|
|
284
|
+
prompt=prompt,
|
|
285
|
+
provider_id=provider_id,
|
|
286
|
+
system_prompt="You are evaluating ideas systematically. Be fair and objective.",
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
if not result.success:
|
|
290
|
+
return result
|
|
291
|
+
|
|
292
|
+
# Parse scores from response
|
|
293
|
+
self._parse_scores(result.content, state)
|
|
294
|
+
|
|
295
|
+
# Update cluster scores
|
|
296
|
+
for cluster in state.clusters:
|
|
297
|
+
cluster_ideas = [i for i in state.ideas if i.id in cluster.idea_ids]
|
|
298
|
+
if cluster_ideas:
|
|
299
|
+
scores = [i.score for i in cluster_ideas if i.score is not None]
|
|
300
|
+
if scores:
|
|
301
|
+
cluster.average_score = sum(scores) / len(scores)
|
|
302
|
+
|
|
303
|
+
# Format output
|
|
304
|
+
scored_ideas = [(i, i.score) for i in state.ideas if i.score is not None]
|
|
305
|
+
scored_ideas.sort(key=lambda x: x[1] or 0, reverse=True)
|
|
306
|
+
|
|
307
|
+
content = "Scored ideas (top to bottom):\n\n"
|
|
308
|
+
for idea, score in scored_ideas[:10]:
|
|
309
|
+
content += f"- {idea.content[:50]}... (score: {score:.2f})\n"
|
|
310
|
+
|
|
311
|
+
return WorkflowResult(
|
|
312
|
+
success=True,
|
|
313
|
+
content=content,
|
|
314
|
+
metadata={"ideas_scored": len(scored_ideas)},
|
|
315
|
+
)
|
|
316
|
+
|
|
317
|
+
def _select_clusters(
|
|
318
|
+
self,
|
|
319
|
+
state: IdeationState,
|
|
320
|
+
cluster_ids: Optional[list[str]],
|
|
321
|
+
) -> WorkflowResult:
|
|
322
|
+
"""Select clusters for elaboration.
|
|
323
|
+
|
|
324
|
+
Args:
|
|
325
|
+
state: Ideation state
|
|
326
|
+
cluster_ids: Cluster IDs to select
|
|
327
|
+
|
|
328
|
+
Returns:
|
|
329
|
+
WorkflowResult with selection confirmation
|
|
330
|
+
"""
|
|
331
|
+
if not state.clusters:
|
|
332
|
+
return WorkflowResult(
|
|
333
|
+
success=False,
|
|
334
|
+
content="",
|
|
335
|
+
error="No clusters to select. Run clustering first.",
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
if not cluster_ids:
|
|
339
|
+
# Auto-select top clusters by score
|
|
340
|
+
sorted_clusters = sorted(
|
|
341
|
+
state.clusters,
|
|
342
|
+
key=lambda c: c.average_score or 0,
|
|
343
|
+
reverse=True,
|
|
344
|
+
)
|
|
345
|
+
cluster_ids = [c.id for c in sorted_clusters[:2]]
|
|
346
|
+
|
|
347
|
+
selected = []
|
|
348
|
+
for cluster in state.clusters:
|
|
349
|
+
if cluster.id in cluster_ids:
|
|
350
|
+
cluster.selected_for_elaboration = True
|
|
351
|
+
selected.append(cluster)
|
|
352
|
+
|
|
353
|
+
if not selected:
|
|
354
|
+
return WorkflowResult(
|
|
355
|
+
success=False,
|
|
356
|
+
content="",
|
|
357
|
+
error=f"No matching clusters found for IDs: {cluster_ids}",
|
|
358
|
+
)
|
|
359
|
+
|
|
360
|
+
state.phase = IdeationPhase.SELECTION
|
|
361
|
+
|
|
362
|
+
content = f"Selected {len(selected)} clusters for elaboration:\n\n"
|
|
363
|
+
for cluster in selected:
|
|
364
|
+
content += f"- **{cluster.name}**: {cluster.description}\n"
|
|
365
|
+
|
|
366
|
+
return WorkflowResult(
|
|
367
|
+
success=True,
|
|
368
|
+
content=content,
|
|
369
|
+
metadata={"selected_clusters": [c.id for c in selected]},
|
|
370
|
+
)
|
|
371
|
+
|
|
372
|
+
def _elaborate_clusters(
|
|
373
|
+
self,
|
|
374
|
+
state: IdeationState,
|
|
375
|
+
provider_id: Optional[str],
|
|
376
|
+
) -> WorkflowResult:
|
|
377
|
+
"""Elaborate selected clusters into detailed plans.
|
|
378
|
+
|
|
379
|
+
Args:
|
|
380
|
+
state: Ideation state
|
|
381
|
+
provider_id: Provider to use
|
|
382
|
+
|
|
383
|
+
Returns:
|
|
384
|
+
WorkflowResult with elaborations
|
|
385
|
+
"""
|
|
386
|
+
selected = [c for c in state.clusters if c.selected_for_elaboration]
|
|
387
|
+
|
|
388
|
+
if not selected:
|
|
389
|
+
return WorkflowResult(
|
|
390
|
+
success=False,
|
|
391
|
+
content="",
|
|
392
|
+
error="No clusters selected for elaboration.",
|
|
393
|
+
)
|
|
394
|
+
|
|
395
|
+
elaborations = []
|
|
396
|
+
for cluster in selected:
|
|
397
|
+
# Get ideas in cluster
|
|
398
|
+
cluster_ideas = [i for i in state.ideas if i.id in cluster.idea_ids]
|
|
399
|
+
ideas_text = "\n".join(f"- {i.content}" for i in cluster_ideas)
|
|
400
|
+
|
|
401
|
+
prompt = f"""Elaborate on this cluster of ideas into a detailed plan:
|
|
402
|
+
|
|
403
|
+
Cluster: {cluster.name}
|
|
404
|
+
Description: {cluster.description}
|
|
405
|
+
|
|
406
|
+
Ideas in this cluster:
|
|
407
|
+
{ideas_text}
|
|
408
|
+
|
|
409
|
+
Provide:
|
|
410
|
+
1. A comprehensive synthesis of the ideas
|
|
411
|
+
2. Key implementation steps
|
|
412
|
+
3. Potential challenges and mitigations
|
|
413
|
+
4. Expected outcomes"""
|
|
414
|
+
|
|
415
|
+
result = self._execute_provider(
|
|
416
|
+
prompt=prompt,
|
|
417
|
+
provider_id=provider_id,
|
|
418
|
+
system_prompt="You are developing ideas into actionable plans. Be thorough and practical.",
|
|
419
|
+
)
|
|
420
|
+
|
|
421
|
+
if result.success:
|
|
422
|
+
cluster.elaboration = result.content
|
|
423
|
+
elaborations.append((cluster, result.content))
|
|
424
|
+
|
|
425
|
+
state.phase = IdeationPhase.ELABORATION
|
|
426
|
+
|
|
427
|
+
content = f"Elaborated {len(elaborations)} clusters:\n\n"
|
|
428
|
+
for cluster, elab in elaborations:
|
|
429
|
+
content += f"## {cluster.name}\n\n{elab}\n\n---\n\n"
|
|
430
|
+
|
|
431
|
+
return WorkflowResult(
|
|
432
|
+
success=True,
|
|
433
|
+
content=content,
|
|
434
|
+
metadata={"clusters_elaborated": len(elaborations)},
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
def _get_status(self, state: IdeationState) -> WorkflowResult:
|
|
438
|
+
"""Get current ideation status.
|
|
439
|
+
|
|
440
|
+
Args:
|
|
441
|
+
state: Ideation state
|
|
442
|
+
|
|
443
|
+
Returns:
|
|
444
|
+
WorkflowResult with status summary
|
|
445
|
+
"""
|
|
446
|
+
content = f"""# Ideation Status: {state.topic}
|
|
447
|
+
|
|
448
|
+
**Phase**: {state.phase.value}
|
|
449
|
+
**Ideas**: {len(state.ideas)}
|
|
450
|
+
**Clusters**: {len(state.clusters)}
|
|
451
|
+
**Created**: {state.created_at.isoformat()}
|
|
452
|
+
**Updated**: {state.updated_at.isoformat()}
|
|
453
|
+
|
|
454
|
+
## Perspectives
|
|
455
|
+
{', '.join(state.perspectives)}
|
|
456
|
+
|
|
457
|
+
## Scoring Criteria
|
|
458
|
+
{', '.join(state.scoring_criteria)}
|
|
459
|
+
"""
|
|
460
|
+
|
|
461
|
+
if state.clusters:
|
|
462
|
+
content += "\n## Clusters\n"
|
|
463
|
+
for cluster in state.clusters:
|
|
464
|
+
selected = " [SELECTED]" if cluster.selected_for_elaboration else ""
|
|
465
|
+
score = f" (score: {cluster.average_score:.2f})" if cluster.average_score else ""
|
|
466
|
+
content += f"- {cluster.name}{score}{selected}\n"
|
|
467
|
+
|
|
468
|
+
return WorkflowResult(
|
|
469
|
+
success=True,
|
|
470
|
+
content=content,
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
def _build_generation_prompt(self, topic: str, perspective: str) -> str:
|
|
474
|
+
"""Build idea generation prompt.
|
|
475
|
+
|
|
476
|
+
Args:
|
|
477
|
+
topic: Ideation topic
|
|
478
|
+
perspective: Perspective to generate from
|
|
479
|
+
|
|
480
|
+
Returns:
|
|
481
|
+
Generation prompt
|
|
482
|
+
"""
|
|
483
|
+
return f"""Generate 5-7 creative ideas for: {topic}
|
|
484
|
+
|
|
485
|
+
Approach this from a {perspective} perspective. Think freely and don't self-censor.
|
|
486
|
+
|
|
487
|
+
For each idea, provide a single sentence description.
|
|
488
|
+
Format: One idea per line, starting with a dash (-)"""
|
|
489
|
+
|
|
490
|
+
def _build_ideation_system_prompt(self) -> str:
|
|
491
|
+
"""Build system prompt for ideation.
|
|
492
|
+
|
|
493
|
+
Returns:
|
|
494
|
+
System prompt
|
|
495
|
+
"""
|
|
496
|
+
return """You are a creative brainstorming assistant. Generate diverse, innovative ideas without judgment.
|
|
497
|
+
Focus on quantity and variety - the evaluation comes later. Be bold and think outside the box."""
|
|
498
|
+
|
|
499
|
+
def _parse_ideas(
|
|
500
|
+
self,
|
|
501
|
+
response: str,
|
|
502
|
+
perspective: str,
|
|
503
|
+
provider_id: Optional[str],
|
|
504
|
+
model_used: Optional[str],
|
|
505
|
+
) -> list[Idea]:
|
|
506
|
+
"""Parse ideas from response.
|
|
507
|
+
|
|
508
|
+
Args:
|
|
509
|
+
response: Provider response
|
|
510
|
+
perspective: Perspective used
|
|
511
|
+
provider_id: Provider ID
|
|
512
|
+
model_used: Model used
|
|
513
|
+
|
|
514
|
+
Returns:
|
|
515
|
+
List of parsed ideas
|
|
516
|
+
"""
|
|
517
|
+
ideas = []
|
|
518
|
+
for line in response.split("\n"):
|
|
519
|
+
line = line.strip()
|
|
520
|
+
if line.startswith("-") or line.startswith("•"):
|
|
521
|
+
content = line[1:].strip()
|
|
522
|
+
if content:
|
|
523
|
+
ideas.append(
|
|
524
|
+
Idea(
|
|
525
|
+
content=content,
|
|
526
|
+
perspective=perspective,
|
|
527
|
+
provider_id=provider_id,
|
|
528
|
+
model_used=model_used,
|
|
529
|
+
)
|
|
530
|
+
)
|
|
531
|
+
return ideas
|
|
532
|
+
|
|
533
|
+
def _parse_clusters(self, response: str, state: IdeationState) -> list[IdeaCluster]:
|
|
534
|
+
"""Parse clusters from response.
|
|
535
|
+
|
|
536
|
+
Args:
|
|
537
|
+
response: Provider response
|
|
538
|
+
state: Ideation state
|
|
539
|
+
|
|
540
|
+
Returns:
|
|
541
|
+
List of parsed clusters
|
|
542
|
+
"""
|
|
543
|
+
clusters = []
|
|
544
|
+
current_name = None
|
|
545
|
+
current_desc = None
|
|
546
|
+
current_ideas = []
|
|
547
|
+
|
|
548
|
+
for line in response.split("\n"):
|
|
549
|
+
line = line.strip()
|
|
550
|
+
if line.upper().startswith("CLUSTER:"):
|
|
551
|
+
# Save previous cluster if exists
|
|
552
|
+
if current_name:
|
|
553
|
+
cluster = IdeaCluster(name=current_name, description=current_desc)
|
|
554
|
+
cluster.idea_ids = current_ideas
|
|
555
|
+
clusters.append(cluster)
|
|
556
|
+
current_name = line.split(":", 1)[1].strip()
|
|
557
|
+
current_desc = None
|
|
558
|
+
current_ideas = []
|
|
559
|
+
elif line.upper().startswith("DESCRIPTION:"):
|
|
560
|
+
current_desc = line.split(":", 1)[1].strip()
|
|
561
|
+
elif line.upper().startswith("IDEAS:"):
|
|
562
|
+
# Parse idea numbers
|
|
563
|
+
nums_str = line.split(":", 1)[1].strip()
|
|
564
|
+
for num in nums_str.replace(",", " ").split():
|
|
565
|
+
try:
|
|
566
|
+
idx = int(num.strip()) - 1
|
|
567
|
+
if 0 <= idx < len(state.ideas):
|
|
568
|
+
idea_id = state.ideas[idx].id
|
|
569
|
+
current_ideas.append(idea_id)
|
|
570
|
+
state.ideas[idx].cluster_id = idea_id
|
|
571
|
+
except ValueError:
|
|
572
|
+
continue
|
|
573
|
+
|
|
574
|
+
# Save last cluster
|
|
575
|
+
if current_name:
|
|
576
|
+
cluster = IdeaCluster(name=current_name, description=current_desc)
|
|
577
|
+
cluster.idea_ids = current_ideas
|
|
578
|
+
clusters.append(cluster)
|
|
579
|
+
|
|
580
|
+
return clusters
|
|
581
|
+
|
|
582
|
+
def _parse_scores(self, response: str, state: IdeationState) -> None:
|
|
583
|
+
"""Parse scores from response and update ideas.
|
|
584
|
+
|
|
585
|
+
Args:
|
|
586
|
+
response: Provider response
|
|
587
|
+
state: Ideation state
|
|
588
|
+
"""
|
|
589
|
+
for line in response.split("\n"):
|
|
590
|
+
line = line.strip()
|
|
591
|
+
if ":" in line:
|
|
592
|
+
try:
|
|
593
|
+
parts = line.split(":")
|
|
594
|
+
num = int(parts[0].strip().rstrip("."))
|
|
595
|
+
score_part = parts[1].strip()
|
|
596
|
+
# Extract score (handle "0.8 - justification" format)
|
|
597
|
+
score_str = score_part.split()[0].split("-")[0].strip()
|
|
598
|
+
score = float(score_str)
|
|
599
|
+
if 0 <= score <= 1 and 0 < num <= len(state.ideas):
|
|
600
|
+
state.ideas[num - 1].score = score
|
|
601
|
+
except (ValueError, IndexError):
|
|
602
|
+
continue
|
|
603
|
+
|
|
604
|
+
def get_ideation(self, ideation_id: str) -> Optional[dict[str, Any]]:
|
|
605
|
+
"""Get full ideation details.
|
|
606
|
+
|
|
607
|
+
Args:
|
|
608
|
+
ideation_id: Ideation identifier
|
|
609
|
+
|
|
610
|
+
Returns:
|
|
611
|
+
Ideation data or None if not found
|
|
612
|
+
"""
|
|
613
|
+
state = self.memory.load_ideation(ideation_id)
|
|
614
|
+
if not state:
|
|
615
|
+
return None
|
|
616
|
+
|
|
617
|
+
return {
|
|
618
|
+
"id": state.id,
|
|
619
|
+
"topic": state.topic,
|
|
620
|
+
"phase": state.phase.value,
|
|
621
|
+
"perspectives": state.perspectives,
|
|
622
|
+
"scoring_criteria": state.scoring_criteria,
|
|
623
|
+
"created_at": state.created_at.isoformat(),
|
|
624
|
+
"updated_at": state.updated_at.isoformat(),
|
|
625
|
+
"ideas": [
|
|
626
|
+
{
|
|
627
|
+
"id": i.id,
|
|
628
|
+
"content": i.content,
|
|
629
|
+
"perspective": i.perspective,
|
|
630
|
+
"score": i.score,
|
|
631
|
+
"cluster_id": i.cluster_id,
|
|
632
|
+
}
|
|
633
|
+
for i in state.ideas
|
|
634
|
+
],
|
|
635
|
+
"clusters": [
|
|
636
|
+
{
|
|
637
|
+
"id": c.id,
|
|
638
|
+
"name": c.name,
|
|
639
|
+
"description": c.description,
|
|
640
|
+
"idea_count": len(c.idea_ids),
|
|
641
|
+
"average_score": c.average_score,
|
|
642
|
+
"selected": c.selected_for_elaboration,
|
|
643
|
+
"has_elaboration": c.elaboration is not None,
|
|
644
|
+
}
|
|
645
|
+
for c in state.clusters
|
|
646
|
+
],
|
|
647
|
+
}
|
|
648
|
+
|
|
649
|
+
def list_ideations(self, limit: Optional[int] = 50) -> list[dict[str, Any]]:
|
|
650
|
+
"""List ideation sessions.
|
|
651
|
+
|
|
652
|
+
Args:
|
|
653
|
+
limit: Maximum sessions to return
|
|
654
|
+
|
|
655
|
+
Returns:
|
|
656
|
+
List of ideation summaries
|
|
657
|
+
"""
|
|
658
|
+
ideations = self.memory.list_ideations(limit=limit)
|
|
659
|
+
|
|
660
|
+
return [
|
|
661
|
+
{
|
|
662
|
+
"id": i.id,
|
|
663
|
+
"topic": i.topic,
|
|
664
|
+
"phase": i.phase.value,
|
|
665
|
+
"idea_count": len(i.ideas),
|
|
666
|
+
"cluster_count": len(i.clusters),
|
|
667
|
+
"created_at": i.created_at.isoformat(),
|
|
668
|
+
"updated_at": i.updated_at.isoformat(),
|
|
669
|
+
}
|
|
670
|
+
for i in ideations
|
|
671
|
+
]
|
|
672
|
+
|
|
673
|
+
def delete_ideation(self, ideation_id: str) -> bool:
|
|
674
|
+
"""Delete an ideation session.
|
|
675
|
+
|
|
676
|
+
Args:
|
|
677
|
+
ideation_id: Ideation identifier
|
|
678
|
+
|
|
679
|
+
Returns:
|
|
680
|
+
True if deleted, False if not found
|
|
681
|
+
"""
|
|
682
|
+
return self.memory.delete_ideation(ideation_id)
|