@creative-ia/cortex 1.0.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +41 -0
  2. package/dist/config/cloud-proxy.d.ts +15 -0
  3. package/dist/config/cloud-proxy.js +63 -0
  4. package/dist/config/cloudwatch-store.d.ts +13 -0
  5. package/dist/config/cloudwatch-store.js +66 -0
  6. package/dist/config/license.d.ts +29 -0
  7. package/dist/config/license.js +165 -0
  8. package/dist/config/ssm-store.d.ts +2 -0
  9. package/dist/config/ssm-store.js +38 -0
  10. package/dist/config/telemetry.d.ts +17 -0
  11. package/dist/config/telemetry.js +93 -0
  12. package/dist/index.d.ts +2 -0
  13. package/dist/index.js +460 -0
  14. package/dist/knowledge/dynamo-store.d.ts +17 -0
  15. package/dist/knowledge/dynamo-store.js +85 -0
  16. package/dist/knowledge/embeddings.d.ts +2 -0
  17. package/dist/knowledge/embeddings.js +36 -0
  18. package/dist/knowledge/loader.d.ts +8 -0
  19. package/dist/knowledge/loader.js +57 -0
  20. package/dist/lambda-package.zip +0 -0
  21. package/dist/lambda.d.ts +22 -0
  22. package/dist/lambda.js +496 -0
  23. package/dist/package.json +1 -0
  24. package/dist/tools/advance-process.d.ts +7 -0
  25. package/dist/tools/advance-process.js +128 -0
  26. package/dist/tools/analyze-code.d.ts +7 -0
  27. package/dist/tools/analyze-code.js +131 -0
  28. package/dist/tools/analyze-docs.d.ts +8 -0
  29. package/dist/tools/analyze-docs.js +147 -0
  30. package/dist/tools/config-registry.d.ts +3 -0
  31. package/dist/tools/config-registry.js +20 -0
  32. package/dist/tools/create-process.d.ts +6 -0
  33. package/dist/tools/create-process.js +257 -0
  34. package/dist/tools/decompose-epic.d.ts +7 -0
  35. package/dist/tools/decompose-epic.js +603 -0
  36. package/dist/tools/diagrams.d.ts +51 -0
  37. package/dist/tools/diagrams.js +304 -0
  38. package/dist/tools/generate-report.d.ts +9 -0
  39. package/dist/tools/generate-report.js +891 -0
  40. package/dist/tools/generate-wiki.d.ts +10 -0
  41. package/dist/tools/generate-wiki.js +700 -0
  42. package/dist/tools/get-architecture.d.ts +6 -0
  43. package/dist/tools/get-architecture.js +78 -0
  44. package/dist/tools/get-code-standards.d.ts +7 -0
  45. package/dist/tools/get-code-standards.js +52 -0
  46. package/dist/tools/init-process.d.ts +7 -0
  47. package/dist/tools/init-process.js +82 -0
  48. package/dist/tools/knowledge-crud.d.ts +26 -0
  49. package/dist/tools/knowledge-crud.js +142 -0
  50. package/dist/tools/logo-base64.d.ts +1 -0
  51. package/dist/tools/logo-base64.js +1 -0
  52. package/dist/tools/logs-query.d.ts +15 -0
  53. package/dist/tools/logs-query.js +46 -0
  54. package/dist/tools/reverse-engineer.d.ts +13 -0
  55. package/dist/tools/reverse-engineer.js +956 -0
  56. package/dist/tools/semantic-search.d.ts +7 -0
  57. package/dist/tools/semantic-search.js +68 -0
  58. package/dist/tools/update-process.d.ts +17 -0
  59. package/dist/tools/update-process.js +195 -0
  60. package/dist/tools/validate-idea.d.ts +7 -0
  61. package/dist/tools/validate-idea.js +339 -0
  62. package/dist/tools/validate-process.d.ts +6 -0
  63. package/dist/tools/validate-process.js +102 -0
  64. package/package.json +31 -0
@@ -0,0 +1,128 @@
1
+ /**
2
+ * Tool: advance_process
3
+ * Orquestrador inteligente que detecta a fase atual do processo
4
+ * e avança para o próximo artefato automaticamente.
5
+ *
6
+ * Também expõe generate_features, generate_user_stories, generate_tasks
7
+ * como funções granulares reutilizáveis.
8
+ */
9
+ import { readFile, readdir } from "node:fs/promises";
10
+ import { join } from "node:path";
11
+ import { existsSync } from "node:fs";
12
+ import { decomposeEpic } from "./decompose-epic.js";
13
+ // ============================================================
14
+ // PROCESS STATE DETECTOR
15
+ // ============================================================
16
+ async function detectProcessState(processDir) {
17
+ const state = {
18
+ hasStakeholder: false,
19
+ hasParecer: false,
20
+ hasEpicos: false,
21
+ epicIds: [],
22
+ decomposedEpics: [],
23
+ pendingEpics: [],
24
+ totalFeatures: 0,
25
+ totalUserStories: 0,
26
+ totalTasks: 0,
27
+ };
28
+ try {
29
+ const files = await readdir(processDir);
30
+ state.hasStakeholder = files.some((f) => f.startsWith("01-stakeholder"));
31
+ state.hasParecer = files.some((f) => f.startsWith("02-parecer"));
32
+ state.hasEpicos = files.some((f) => f.startsWith("03-epicos") && f.endsWith(".md"));
33
+ // Parse epic IDs from the epicos markdown
34
+ if (state.hasEpicos) {
35
+ const epicFile = files.find((f) => f.startsWith("03-epicos") && f.endsWith(".md"));
36
+ if (epicFile) {
37
+ const content = await readFile(join(processDir, epicFile), "utf-8");
38
+ const matches = content.match(/## EP-\d{2}/g) || [];
39
+ state.epicIds = matches.map((m) => m.replace("## ", ""));
40
+ }
41
+ }
42
+ // Check which epics have been decomposed
43
+ const epicsDir = join(processDir, "epics");
44
+ if (existsSync(epicsDir)) {
45
+ const epicFiles = await readdir(epicsDir);
46
+ state.decomposedEpics = epicFiles
47
+ .filter((f) => f.endsWith(".md"))
48
+ .map((f) => {
49
+ // EPIC-001.md → "001" → parseInt → 1 → "EP-01"
50
+ const num = parseInt(f.replace("EPIC-", "").replace(".md", ""), 10);
51
+ return `EP-${num.toString().padStart(2, "0")}`;
52
+ });
53
+ }
54
+ state.pendingEpics = state.epicIds.filter((ep) => !state.decomposedEpics.includes(ep));
55
+ // Count features, user stories, tasks
56
+ const featDir = join(processDir, "features");
57
+ if (existsSync(featDir)) {
58
+ state.totalFeatures = (await readdir(featDir)).filter((f) => f.endsWith(".md")).length;
59
+ }
60
+ const usDir = join(processDir, "user-stories");
61
+ if (existsSync(usDir)) {
62
+ state.totalUserStories = (await readdir(usDir)).filter((f) => f.endsWith(".md")).length;
63
+ }
64
+ const taskDir = join(processDir, "tasks");
65
+ if (existsSync(taskDir)) {
66
+ state.totalTasks = (await readdir(taskDir)).filter((f) => f.endsWith(".md")).length;
67
+ }
68
+ }
69
+ catch { /* ignore */ }
70
+ return state;
71
+ }
72
+ // ============================================================
73
+ // ADVANCE PROCESS — main orchestrator
74
+ // ============================================================
75
+ export async function advanceProcess(params) {
76
+ const { processId, processDir, epicId } = params;
77
+ if (!existsSync(processDir)) {
78
+ return `Erro: Diretório do processo não encontrado: ${processDir}`;
79
+ }
80
+ const state = await detectProcessState(processDir);
81
+ // If no epicos markdown exists, can't advance
82
+ if (!state.hasEpicos) {
83
+ const missing = [];
84
+ if (!state.hasStakeholder)
85
+ missing.push("01-stakeholder");
86
+ if (!state.hasParecer)
87
+ missing.push("02-parecer");
88
+ if (!state.hasEpicos)
89
+ missing.push("03-epicos");
90
+ return [
91
+ `Processo ${processId} — Estado atual:`,
92
+ ``,
93
+ `Artefatos faltantes: ${missing.join(", ")}`,
94
+ ``,
95
+ `O processo precisa ter o markdown de épicos (03-epicos*.md) para avançar na decomposição.`,
96
+ `Use create_process ou gere o stakeholder/parecer/épicos primeiro.`,
97
+ ].join("\n");
98
+ }
99
+ // If specific epic requested, decompose it
100
+ if (epicId) {
101
+ if (state.decomposedEpics.includes(epicId)) {
102
+ return `${epicId} já foi decomposto. Use outro épico ou verifique os artefatos em epics/, features/, user-stories/, tasks/.`;
103
+ }
104
+ return await decomposeEpic({ processId, epicId, processDir });
105
+ }
106
+ // Auto-detect: decompose next pending epic
107
+ if (state.pendingEpics.length > 0) {
108
+ const nextEpic = state.pendingEpics[0];
109
+ const result = await decomposeEpic({ processId, epicId: nextEpic, processDir });
110
+ const remaining = state.pendingEpics.length - 1;
111
+ const footer = remaining > 0
112
+ ? `\n\nPróximos épicos pendentes (${remaining}): ${state.pendingEpics.slice(1).join(", ")}`
113
+ : `\n\nTodos os épicos foram decompostos.`;
114
+ return result + footer;
115
+ }
116
+ // All epics decomposed — show summary
117
+ return [
118
+ `Processo ${processId} — Decomposição completa.`,
119
+ ``,
120
+ `Épicos: ${state.epicIds.length} (todos decompostos)`,
121
+ `Features: ${state.totalFeatures}`,
122
+ `User Stories: ${state.totalUserStories}`,
123
+ `Tasks: ${state.totalTasks}`,
124
+ ``,
125
+ `Todos os épicos já foram decompostos em features, user stories e tasks.`,
126
+ `Próximo passo sugerido: gerar HTMLs via generate_report ou avançar para design/QA strategy.`,
127
+ ].join("\n");
128
+ }
@@ -0,0 +1,7 @@
1
+ interface AnalyzeRequest {
2
+ code: string;
3
+ language?: string;
4
+ filename?: string;
5
+ }
6
+ export declare function analyzeCode(params: AnalyzeRequest): Promise<string>;
7
+ export {};
@@ -0,0 +1,131 @@
1
+ /**
2
+ * Tool: analyze_code
3
+ * Analisa um trecho de código contra os padrões L1-L4.
4
+ * Retorna violações encontradas com sugestões de correção.
5
+ */
6
+ import { loadCodeStandards } from "../knowledge/loader.js";
7
+ export async function analyzeCode(params) {
8
+ const standards = (await loadCodeStandards());
9
+ const lines = params.code.split("\n");
10
+ const violations = [];
11
+ const l1 = standards.levels?.L1_fundamentals?.rules || {};
12
+ // L1-001: File size
13
+ if (l1.file_size && lines.length > l1.file_size.max_lines) {
14
+ violations.push({
15
+ ruleId: "L1-001", rule: "File size",
16
+ severity: "error",
17
+ message: `File has ${lines.length} lines (max ${l1.file_size.max_lines})`,
18
+ suggestion: "Extract modules or helper classes to reduce file size.",
19
+ });
20
+ }
21
+ else if (l1.file_size && lines.length > l1.file_size.warn_at) {
22
+ violations.push({
23
+ ruleId: "L1-001", rule: "File size",
24
+ severity: "warning",
25
+ message: `File has ${lines.length} lines (warn at ${l1.file_size.warn_at})`,
26
+ suggestion: "Consider splitting before it grows further.",
27
+ });
28
+ }
29
+ // L1-003: Cyclomatic complexity (heuristic — count branching keywords)
30
+ const branchKeywords = /\b(if|else if|case|catch|&&|\|\||for|while|do)\b/g;
31
+ let fnStart = -1;
32
+ let fnName = "";
33
+ let branchCount = 0;
34
+ for (let i = 0; i < lines.length; i++) {
35
+ const line = lines[i];
36
+ const fnMatch = line.match(/(?:function|const|let|var)\s+(\w+)|(\w+)\s*\(/);
37
+ if (fnMatch && (line.includes("function") || line.includes("=>"))) {
38
+ if (fnStart >= 0 && branchCount > (l1.cyclomatic_complexity?.max || 10)) {
39
+ violations.push({
40
+ ruleId: "L1-003", rule: "Cyclomatic complexity",
41
+ severity: "error",
42
+ message: `Function '${fnName}' has complexity ~${branchCount} (max ${l1.cyclomatic_complexity?.max || 10})`,
43
+ suggestion: "Use early returns, guard clauses, or strategy pattern.",
44
+ });
45
+ }
46
+ fnStart = i;
47
+ fnName = fnMatch[1] || fnMatch[2] || "anonymous";
48
+ branchCount = 1;
49
+ }
50
+ const matches = line.match(branchKeywords);
51
+ if (matches)
52
+ branchCount += matches.length;
53
+ }
54
+ // L1-005: Function params
55
+ const paramRegex = /(?:function\s+\w+|(?:const|let|var)\s+\w+\s*=\s*(?:async\s*)?)\s*\(([^)]*)\)/g;
56
+ let paramMatch;
57
+ while ((paramMatch = paramRegex.exec(params.code)) !== null) {
58
+ const paramList = paramMatch[1].split(",").filter((p) => p.trim());
59
+ if (paramList.length > (l1.function_params?.max || 4)) {
60
+ violations.push({
61
+ ruleId: "L1-005", rule: "Function params",
62
+ severity: "error",
63
+ message: `Function has ${paramList.length} params (max ${l1.function_params?.max || 4})`,
64
+ suggestion: "Group into a typed config object.",
65
+ });
66
+ }
67
+ }
68
+ // L1-006: Nesting depth
69
+ let maxDepth = 0;
70
+ let currentDepth = 0;
71
+ for (const line of lines) {
72
+ currentDepth += (line.match(/{/g) || []).length;
73
+ currentDepth -= (line.match(/}/g) || []).length;
74
+ if (currentDepth > maxDepth)
75
+ maxDepth = currentDepth;
76
+ }
77
+ if (maxDepth > (l1.nesting_depth?.max || 3) + 1) {
78
+ violations.push({
79
+ ruleId: "L1-006", rule: "Nesting depth",
80
+ severity: "error",
81
+ message: `Max nesting depth: ${maxDepth - 1} (max ${l1.nesting_depth?.max || 3})`,
82
+ suggestion: "Invert conditions, use early returns, extract functions.",
83
+ });
84
+ }
85
+ // L1-008: Magic numbers
86
+ const magicRegex = /(?<!=)\s(\d+)(?!\s*[;:}\])]|\s*\/\/)/g;
87
+ const allowed = new Set((l1.no_magic_numbers?.allowed || [0, 1, -1, 2, 100]).map(String));
88
+ let magicMatch;
89
+ while ((magicMatch = magicRegex.exec(params.code)) !== null) {
90
+ if (!allowed.has(magicMatch[1]) && !magicMatch[1].startsWith("0x")) {
91
+ violations.push({
92
+ ruleId: "L1-008", rule: "Magic number",
93
+ severity: "warning",
94
+ message: `Magic number '${magicMatch[1]}' found. Extract to named constant.`,
95
+ suggestion: `const MEANINGFUL_NAME = ${magicMatch[1]};`,
96
+ });
97
+ break; // Report only first occurrence
98
+ }
99
+ }
100
+ // L1-009: Commented code
101
+ const commentedCodeRegex = /^\s*\/\/\s*(const|let|var|function|if|for|while|return|import|export)\b/;
102
+ for (let i = 0; i < lines.length; i++) {
103
+ if (commentedCodeRegex.test(lines[i])) {
104
+ violations.push({
105
+ ruleId: "L1-009", rule: "Commented code",
106
+ severity: "warning",
107
+ message: `Line ${i + 1}: Commented-out code detected.`,
108
+ suggestion: "Remove dead code. Use git history to recover if needed.",
109
+ });
110
+ break;
111
+ }
112
+ }
113
+ // Format output
114
+ if (violations.length === 0) {
115
+ return "No violations found. Code complies with L1-L4 standards.";
116
+ }
117
+ const errors = violations.filter((v) => v.severity === "error");
118
+ const warnings = violations.filter((v) => v.severity === "warning");
119
+ const out = [
120
+ `## Analysis Result: ${violations.length} issue(s) found`,
121
+ `Errors: ${errors.length} | Warnings: ${warnings.length}`,
122
+ "",
123
+ ];
124
+ for (const v of violations) {
125
+ out.push(`[${v.severity.toUpperCase()}] ${v.ruleId} — ${v.rule}`);
126
+ out.push(` ${v.message}`);
127
+ out.push(` Fix: ${v.suggestion}`);
128
+ out.push("");
129
+ }
130
+ return out.join("\n");
131
+ }
@@ -0,0 +1,8 @@
1
+ interface AnalyzeDocsRequest {
2
+ docsDir: string;
3
+ processId?: string;
4
+ processDir?: string;
5
+ maxFiles?: number;
6
+ }
7
+ export declare function analyzeDocs(params: AnalyzeDocsRequest): Promise<string>;
8
+ export {};
@@ -0,0 +1,147 @@
1
+ /**
2
+ * Tool #24: analyze_docs
3
+ *
4
+ * Analisa um diretorio de documentacao, extrai texto de todos os arquivos
5
+ * suportados (PDF, DOCX, PPTX, XLSX, MD, TXT, JSON, YAML, HTML) e gera
6
+ * um resumo estruturado com requisitos, regras de negocio, endpoints,
7
+ * entidades e fluxos identificados.
8
+ *
9
+ * Usa officeparser para PDF/DOCX/PPTX/XLSX (zero dependencias externas).
10
+ */
11
+ import { readdir, readFile, stat } from "node:fs/promises";
12
+ import { join, extname } from "node:path";
13
+ import { parseOffice } from "officeparser";
14
+ // Extensoes suportadas
15
+ const TEXT_EXTS = new Set([".md", ".txt", ".json", ".yaml", ".yml", ".html", ".htm", ".xml", ".csv", ".swagger", ".openapi", ".graphql", ".gql", ".proto", ".sql", ".env", ".cfg", ".ini", ".toml"]);
16
+ const OFFICE_EXTS = new Set([".pdf", ".docx", ".pptx", ".xlsx", ".odt", ".odp", ".ods"]);
17
+ const ALL_EXTS = new Set([...TEXT_EXTS, ...OFFICE_EXTS]);
18
+ const MAX_TEXT_PER_FILE = 50_000; // 50KB de texto por arquivo
19
+ const DEFAULT_MAX_FILES = 50;
20
+ /**
21
+ * Le todos os arquivos suportados de um diretorio (recursivo, max depth 3).
22
+ */
23
+ async function readDocsDir(dir, maxFiles, depth = 0) {
24
+ if (depth > 3)
25
+ return [];
26
+ const docs = [];
27
+ let entries;
28
+ try {
29
+ entries = await readdir(dir, { withFileTypes: true });
30
+ }
31
+ catch {
32
+ return [];
33
+ }
34
+ for (const entry of entries) {
35
+ if (docs.length >= maxFiles)
36
+ break;
37
+ const fullPath = join(dir, entry.name);
38
+ if (entry.isDirectory() && !entry.name.startsWith(".") && entry.name !== "node_modules") {
39
+ const sub = await readDocsDir(fullPath, maxFiles - docs.length, depth + 1);
40
+ docs.push(...sub);
41
+ continue;
42
+ }
43
+ if (!entry.isFile())
44
+ continue;
45
+ const ext = extname(entry.name).toLowerCase();
46
+ if (!ALL_EXTS.has(ext))
47
+ continue;
48
+ try {
49
+ const fileStat = await stat(fullPath);
50
+ // Pular arquivos > 10MB
51
+ if (fileStat.size > 10 * 1024 * 1024)
52
+ continue;
53
+ let text = "";
54
+ let truncated = false;
55
+ if (TEXT_EXTS.has(ext)) {
56
+ text = await readFile(fullPath, "utf-8");
57
+ }
58
+ else if (OFFICE_EXTS.has(ext)) {
59
+ const parsed = await parseOffice(fullPath);
60
+ text = typeof parsed === "string" ? parsed : String(parsed);
61
+ }
62
+ if (text.length > MAX_TEXT_PER_FILE) {
63
+ text = text.slice(0, MAX_TEXT_PER_FILE);
64
+ truncated = true;
65
+ }
66
+ if (text.trim().length > 0) {
67
+ docs.push({
68
+ name: entry.name,
69
+ ext,
70
+ size: fileStat.size,
71
+ text: text.trim(),
72
+ truncated,
73
+ });
74
+ }
75
+ }
76
+ catch {
77
+ // Pular arquivos que nao conseguimos ler
78
+ }
79
+ }
80
+ return docs;
81
+ }
82
+ /**
83
+ * Gera resumo estruturado dos documentos analisados.
84
+ */
85
+ function generateSummary(docs, docsDir) {
86
+ const lines = [];
87
+ lines.push(`# Analise de Documentacao`);
88
+ lines.push(``);
89
+ lines.push(`**Diretorio:** ${docsDir}`);
90
+ lines.push(`**Arquivos analisados:** ${docs.length}`);
91
+ lines.push(``);
92
+ // Tabela de arquivos
93
+ lines.push(`## Arquivos Encontrados`);
94
+ lines.push(``);
95
+ lines.push(`| Arquivo | Formato | Tamanho | Caracteres |`);
96
+ lines.push(`|---------|---------|---------|------------|`);
97
+ for (const doc of docs) {
98
+ const sizeKB = (doc.size / 1024).toFixed(1);
99
+ const chars = doc.text.length.toLocaleString();
100
+ const trunc = doc.truncated ? " (truncado)" : "";
101
+ lines.push(`| ${doc.name} | ${doc.ext} | ${sizeKB} KB | ${chars}${trunc} |`);
102
+ }
103
+ lines.push(``);
104
+ // Conteudo de cada arquivo
105
+ lines.push(`## Conteudo Extraido`);
106
+ lines.push(``);
107
+ for (const doc of docs) {
108
+ lines.push(`### ${doc.name}`);
109
+ lines.push(``);
110
+ lines.push("```");
111
+ lines.push(doc.text);
112
+ lines.push("```");
113
+ lines.push(``);
114
+ }
115
+ return lines.join("\n");
116
+ }
117
+ export async function analyzeDocs(params) {
118
+ const { docsDir, processId, processDir, maxFiles } = params;
119
+ const limit = maxFiles || DEFAULT_MAX_FILES;
120
+ // Verificar se o diretorio existe
121
+ try {
122
+ const dirStat = await stat(docsDir);
123
+ if (!dirStat.isDirectory()) {
124
+ return `Erro: '${docsDir}' nao e um diretorio.`;
125
+ }
126
+ }
127
+ catch {
128
+ return `Erro: Diretorio '${docsDir}' nao encontrado.`;
129
+ }
130
+ // Ler documentos
131
+ const docs = await readDocsDir(docsDir, limit);
132
+ if (docs.length === 0) {
133
+ return `Nenhum arquivo suportado encontrado em '${docsDir}'.\n\nFormatos suportados: ${[...ALL_EXTS].join(", ")}`;
134
+ }
135
+ // Gerar resumo
136
+ const summary = generateSummary(docs, docsDir);
137
+ // Se tem processDir, salvar o artefato
138
+ if (processDir) {
139
+ const { writeFile, mkdir } = await import("node:fs/promises");
140
+ const outputDir = processDir;
141
+ await mkdir(outputDir, { recursive: true });
142
+ const outputFile = join(outputDir, "analise-documentacao.md");
143
+ await writeFile(outputFile, summary, "utf-8");
144
+ return summary + `\n\n---\nArtefato salvo em: ${outputFile}`;
145
+ }
146
+ return summary;
147
+ }
@@ -0,0 +1,3 @@
1
+ export declare function getConfig(params: {
2
+ key?: string;
3
+ }): Promise<string>;
@@ -0,0 +1,20 @@
1
+ /**
2
+ * Tool MCP: get_config — Lê configuração do MCP Server via SSM Parameter Store.
3
+ */
4
+ import { getParam, getAllParams } from "../config/ssm-store.js";
5
+ export async function getConfig(params) {
6
+ if (params.key) {
7
+ const value = await getParam(params.key);
8
+ if (!value)
9
+ return `❌ Parâmetro não encontrado: ${params.key}`;
10
+ return `**${params.key}**: ${value}`;
11
+ }
12
+ const all = await getAllParams();
13
+ if (Object.keys(all).length === 0)
14
+ return "Nenhum parâmetro configurado.";
15
+ const lines = ["## MCP Server Config (SSM)\n"];
16
+ for (const [k, v] of Object.entries(all)) {
17
+ lines.push(`- **${k}**: ${v}`);
18
+ }
19
+ return lines.join("\n");
20
+ }
@@ -0,0 +1,6 @@
1
+ interface CreateProcessRequest {
2
+ prompt: string;
3
+ stakeholder?: string;
4
+ }
5
+ export declare function createProcess(params: CreateProcessRequest): Promise<string>;
6
+ export {};