@thelogicatelier/sylva 1.0.13 → 1.0.15
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +10 -5
- package/dist/awareness/braveSearch.js +9 -0
- package/dist/awareness/sourceScanner.js +58 -9
- package/dist/constants.d.ts +2 -0
- package/dist/constants.js +33 -1
- package/dist/prompts.js +3 -3
- package/package.json +2 -2
package/README.md
CHANGED
|
@@ -80,11 +80,11 @@ npx @thelogicatelier/sylva --local-repository . -m openai/gpt-5.2 -i 25
|
|
|
80
80
|
|
|
81
81
|
For detailed guidance, see the [Choosing the Right Model](https://achatt89.github.io/sylva/models/choosing.html) and [Iteration Depth Guide](https://achatt89.github.io/sylva/models/iterations.html) docs.
|
|
82
82
|
|
|
83
|
-
### Framework Awareness (NEW)
|
|
83
|
+
### Framework & Integration Awareness (NEW)
|
|
84
84
|
|
|
85
|
-
Sylva now includes **deterministic framework detection** that scans the entire repository (including nested subprojects and monorepos)
|
|
85
|
+
Sylva now includes **deterministic framework detection** and **source code integration scanning** that scans the entire repository (including nested subprojects and monorepos) before invoking the LLM. This prevents framework hallucination and produces more accurate `AGENTS.md` output.
|
|
86
86
|
|
|
87
|
-
**What it detects:**
|
|
87
|
+
**What it detects via Manifests:**
|
|
88
88
|
- **OpenClaw** (`openclaw.json`) — treated as the primary orchestrator
|
|
89
89
|
- **Node.js/JS/TS** — React, Angular, Vue, Next.js, Express, NestJS, etc. from `package.json`
|
|
90
90
|
- **Python** — Django, Flask, FastAPI from `requirements.txt`, `pyproject.toml`, `Pipfile`
|
|
@@ -93,11 +93,16 @@ Sylva now includes **deterministic framework detection** that scans the entire r
|
|
|
93
93
|
- **Go** — Gin, Echo, Fiber from `go.mod`
|
|
94
94
|
- **Rust** — Actix, Axum, Tokio from `Cargo.toml`
|
|
95
95
|
|
|
96
|
+
**What it detects via Source Scanning (Integrations & Deployments):**
|
|
97
|
+
- **Deployment Platforms** — Fly.io, Railway, Render, AWS, GCP, Azure, DigitalOcean via `fly.toml`, `railway.json`, `app.yaml`, or even `Dockerfile` base image inspection.
|
|
98
|
+
- **External APIs** — Stripe, Wix, AWS, Instagram Graph API via raw API URL regex matching or SDK imports directly in the source code.
|
|
99
|
+
- **Strict Security** — It automatically ignores `.env` files entirely to prevent secret leakage and strictly respects the `.gitignore`.
|
|
100
|
+
|
|
96
101
|
**Version certainty:** Versions are only reported when explicitly found in manifest/lockfiles. Never assumed.
|
|
97
102
|
|
|
98
|
-
**Web grounding:** When `BRAVE_API_KEY` is set, Sylva fetches official docs for detected frameworks (version-specific when exact versions are known, latest fallback otherwise).
|
|
103
|
+
**Web grounding:** When `BRAVE_API_KEY` is set, Sylva fetches official docs for detected frameworks (version-specific when exact versions are known, latest fallback otherwise) to feed into the architecture constraints. Note: It intelligently rate-limits itself to respect the Brave Free Tier (1 req/s).
|
|
99
104
|
|
|
100
|
-
**Debug output:** `awareness.json`
|
|
105
|
+
**Debug output:** `awareness.json` and `grounding.json` are saved alongside `AGENTS.md` for full transparency.
|
|
101
106
|
|
|
102
107
|
### Environment Overrides
|
|
103
108
|
- `AUTOSKILL_MODEL`: Set this to `gemini` or `anthropic` or `openai` to change the default execution provider globally without providing `-m` on every execution.
|
|
@@ -49,6 +49,8 @@ const BRAVE_SEARCH_URL = "https://api.search.brave.com/res/v1/web/search";
|
|
|
49
49
|
function cacheKey(query) {
|
|
50
50
|
return crypto.createHash("md5").update(query).digest("hex");
|
|
51
51
|
}
|
|
52
|
+
let lastRequestTime = 0;
|
|
53
|
+
const RATE_LIMIT_MS = 1100; // 1.1 seconds to be safe
|
|
52
54
|
/**
|
|
53
55
|
* Search Brave API for a query. Returns parsed results.
|
|
54
56
|
* Caches results to disk if cacheDir is provided.
|
|
@@ -74,6 +76,12 @@ async function braveSearch(query, options) {
|
|
|
74
76
|
}
|
|
75
77
|
try {
|
|
76
78
|
const url = `${BRAVE_SEARCH_URL}?q=${encodeURIComponent(query)}&count=5`;
|
|
79
|
+
const now = Date.now();
|
|
80
|
+
const timeSinceLast = now - lastRequestTime;
|
|
81
|
+
if (timeSinceLast < RATE_LIMIT_MS) {
|
|
82
|
+
await new Promise((resolve) => setTimeout(resolve, RATE_LIMIT_MS - timeSinceLast));
|
|
83
|
+
}
|
|
84
|
+
lastRequestTime = Date.now();
|
|
77
85
|
const response = await fetch(url, {
|
|
78
86
|
headers: {
|
|
79
87
|
Accept: "application/json",
|
|
@@ -81,6 +89,7 @@ async function braveSearch(query, options) {
|
|
|
81
89
|
"X-Subscription-Token": apiKey,
|
|
82
90
|
},
|
|
83
91
|
});
|
|
92
|
+
lastRequestTime = Date.now();
|
|
84
93
|
if (!response.ok) {
|
|
85
94
|
const statusText = response.statusText || "Unknown error";
|
|
86
95
|
const errorMsg = `HTTP ${response.status} (${statusText})`;
|
|
@@ -43,6 +43,15 @@ const ignore_1 = __importDefault(require("ignore"));
|
|
|
43
43
|
const constants_1 = require("../constants");
|
|
44
44
|
const MAX_SCAN_FILES = 200;
|
|
45
45
|
const MAX_LINES_PER_FILE = 500;
|
|
46
|
+
// Precompute known config files and docker files for fast lookup
|
|
47
|
+
const CONFIG_FILE_NAMES = new Set();
|
|
48
|
+
for (const int of constants_1.INTEGRATIONS) {
|
|
49
|
+
if (int.configPatterns) {
|
|
50
|
+
for (const pat of int.configPatterns)
|
|
51
|
+
CONFIG_FILE_NAMES.add(pat);
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
const DOCKER_FILE_NAMES = new Set(["docker-compose.yml", "docker-compose.yaml"]);
|
|
46
55
|
/**
|
|
47
56
|
* Loads .gitignore rules from the root if available
|
|
48
57
|
*/
|
|
@@ -92,7 +101,12 @@ function collectSourceFiles(dir, ig, rootPath, files = []) {
|
|
|
92
101
|
}
|
|
93
102
|
else if (entry.isFile()) {
|
|
94
103
|
const ext = path.extname(entry.name).toLowerCase();
|
|
95
|
-
|
|
104
|
+
const baseName = entry.name;
|
|
105
|
+
const lowerBaseName = baseName.toLowerCase();
|
|
106
|
+
if (constants_1.SOURCE_EXTENSIONS.includes(ext) ||
|
|
107
|
+
CONFIG_FILE_NAMES.has(baseName) ||
|
|
108
|
+
DOCKER_FILE_NAMES.has(lowerBaseName) ||
|
|
109
|
+
lowerBaseName.includes("dockerfile")) {
|
|
96
110
|
files.push(fullPath);
|
|
97
111
|
}
|
|
98
112
|
}
|
|
@@ -123,6 +137,31 @@ function scanSourceFiles(rootPath) {
|
|
|
123
137
|
const detectedIntegrationIds = new Set();
|
|
124
138
|
for (const file of targetFiles) {
|
|
125
139
|
try {
|
|
140
|
+
const fileName = path.basename(file);
|
|
141
|
+
const lowerFileName = fileName.toLowerCase();
|
|
142
|
+
const isDocker = lowerFileName.includes("dockerfile") || DOCKER_FILE_NAMES.has(lowerFileName);
|
|
143
|
+
// 1. Check exact config file matches (no file reading required)
|
|
144
|
+
for (const integration of constants_1.INTEGRATIONS) {
|
|
145
|
+
if (detectedIntegrationIds.has(integration.id))
|
|
146
|
+
continue;
|
|
147
|
+
if (integration.configPatterns?.includes(fileName)) {
|
|
148
|
+
detectedIntegrationIds.add(integration.id);
|
|
149
|
+
signals.push({
|
|
150
|
+
kind: "integration",
|
|
151
|
+
frameworkId: integration.id,
|
|
152
|
+
frameworkName: integration.name,
|
|
153
|
+
version: { certainty: "unknown", value: undefined },
|
|
154
|
+
evidence: {
|
|
155
|
+
file: path.relative(rootPath, file),
|
|
156
|
+
reason: `Source code scanner detected ${integration.name}: Found config file "${fileName}"`,
|
|
157
|
+
},
|
|
158
|
+
scope: { pathRoot: rootPath },
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
// If we found everything, stop scanning
|
|
163
|
+
if (detectedIntegrationIds.size === constants_1.INTEGRATIONS.length)
|
|
164
|
+
break;
|
|
126
165
|
// Read file and limit to N lines to keep it fast
|
|
127
166
|
const contentRaw = fs.readFileSync(file, "utf-8");
|
|
128
167
|
let content = contentRaw;
|
|
@@ -130,25 +169,35 @@ function scanSourceFiles(rootPath) {
|
|
|
130
169
|
if (lines.length > MAX_LINES_PER_FILE) {
|
|
131
170
|
content = lines.slice(0, MAX_LINES_PER_FILE).join("\n");
|
|
132
171
|
}
|
|
133
|
-
//
|
|
172
|
+
// 2. Content Checks
|
|
134
173
|
for (const integration of constants_1.INTEGRATIONS) {
|
|
135
174
|
if (detectedIntegrationIds.has(integration.id))
|
|
136
175
|
continue; // Found it already somewhere
|
|
137
176
|
let reason = "";
|
|
138
177
|
let matchedPattern = "";
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
178
|
+
if (isDocker) {
|
|
179
|
+
// Specialized Dockerfile scanning: scan the exact full content
|
|
180
|
+
matchedPattern = matchPatterns(contentRaw, integration.dockerPatterns) || "";
|
|
181
|
+
if (matchedPattern) {
|
|
182
|
+
reason = `Found deployment marker "${matchedPattern}" in ${fileName}`;
|
|
183
|
+
}
|
|
142
184
|
}
|
|
143
185
|
else {
|
|
144
|
-
|
|
186
|
+
// Standard source code checks
|
|
187
|
+
matchedPattern = matchPatterns(content, integration.urlPatterns) || "";
|
|
145
188
|
if (matchedPattern) {
|
|
146
|
-
reason = `Found
|
|
189
|
+
reason = `Found API URL pattern "${matchedPattern}"`;
|
|
147
190
|
}
|
|
148
191
|
else {
|
|
149
|
-
matchedPattern = matchPatterns(content, integration.
|
|
192
|
+
matchedPattern = matchPatterns(content, integration.importPatterns) || "";
|
|
150
193
|
if (matchedPattern) {
|
|
151
|
-
reason = `Found
|
|
194
|
+
reason = `Found SDK import pattern "${matchedPattern}"`;
|
|
195
|
+
}
|
|
196
|
+
else {
|
|
197
|
+
matchedPattern = matchPatterns(content, integration.envPatterns) || "";
|
|
198
|
+
if (matchedPattern) {
|
|
199
|
+
reason = `Found env var reference "${matchedPattern}"`;
|
|
200
|
+
}
|
|
152
201
|
}
|
|
153
202
|
}
|
|
154
203
|
}
|
package/dist/constants.d.ts
CHANGED
|
@@ -25,6 +25,8 @@ export interface IntegrationDef {
|
|
|
25
25
|
urlPatterns?: string[];
|
|
26
26
|
importPatterns?: string[];
|
|
27
27
|
envPatterns?: string[];
|
|
28
|
+
configPatterns?: string[];
|
|
29
|
+
dockerPatterns?: string[];
|
|
28
30
|
}
|
|
29
31
|
export declare const INTEGRATIONS: IntegrationDef[];
|
|
30
32
|
export declare const SOURCE_EXTENSIONS: string[];
|
package/dist/constants.js
CHANGED
|
@@ -154,41 +154,73 @@ exports.INTEGRATIONS = [
|
|
|
154
154
|
id: "aws",
|
|
155
155
|
name: "AWS",
|
|
156
156
|
importPatterns: ["boto3", "aws-sdk", "@aws-sdk"],
|
|
157
|
+
configPatterns: ["buildspec.yml", "samconfig.toml", "serverless.yml"],
|
|
157
158
|
envPatterns: ["AWS_ACCESS_KEY_ID", "AWS_SECRET_ACCESS_KEY", "AWS_REGION", "AWS_DEFAULT_REGION"],
|
|
159
|
+
dockerPatterns: ["public.ecr.aws", "amazonaws.com"],
|
|
158
160
|
},
|
|
159
161
|
{
|
|
160
162
|
id: "azure",
|
|
161
163
|
name: "Microsoft Azure",
|
|
162
164
|
importPatterns: ["azure", "@azure"],
|
|
163
165
|
urlPatterns: ["windows.net", "azure.com"],
|
|
166
|
+
configPatterns: ["azure-pipelines.yml"],
|
|
164
167
|
envPatterns: ["AZURE_CLIENT_ID", "AZURE_TENANT_ID"],
|
|
168
|
+
dockerPatterns: ["mcr.microsoft.com", "azurecr.io"],
|
|
165
169
|
},
|
|
166
170
|
{
|
|
167
171
|
id: "gcp",
|
|
168
172
|
name: "Google Cloud",
|
|
169
173
|
importPatterns: ["google-cloud", "@google-cloud"],
|
|
170
174
|
urlPatterns: ["googleapis.com"],
|
|
175
|
+
configPatterns: ["app.yaml", "cloudbuild.yaml"],
|
|
171
176
|
envPatterns: ["GOOGLE_APPLICATION_CREDENTIALS"],
|
|
177
|
+
dockerPatterns: ["gcr.io", "pkg.dev"],
|
|
172
178
|
},
|
|
173
179
|
// Hosting & Edge
|
|
174
180
|
{
|
|
175
181
|
id: "vercel",
|
|
176
182
|
name: "Vercel",
|
|
177
183
|
importPatterns: ["@vercel"],
|
|
184
|
+
configPatterns: ["vercel.json"],
|
|
178
185
|
envPatterns: ["VERCEL_URL", "VERCEL_PROJECT_ID"],
|
|
179
186
|
},
|
|
180
187
|
{
|
|
181
188
|
id: "netlify",
|
|
182
189
|
name: "Netlify",
|
|
183
190
|
importPatterns: ["@netlify"],
|
|
184
|
-
|
|
191
|
+
configPatterns: ["netlify.toml"],
|
|
192
|
+
envPatterns: ["NETLIFY", "NETLIFY_AUTH_TOKEN"], // Removed generic URL
|
|
185
193
|
},
|
|
186
194
|
{
|
|
187
195
|
id: "cloudflare",
|
|
188
196
|
name: "Cloudflare",
|
|
189
197
|
importPatterns: ["cloudflare", "@cloudflare"],
|
|
198
|
+
configPatterns: ["wrangler.toml"],
|
|
190
199
|
envPatterns: ["CLOUDFLARE_API_TOKEN", "CLOUDFLARE_ACCOUNT_ID"],
|
|
191
200
|
},
|
|
201
|
+
{
|
|
202
|
+
id: "flyio",
|
|
203
|
+
name: "Fly.io",
|
|
204
|
+
configPatterns: ["fly.toml"],
|
|
205
|
+
dockerPatterns: ["flyio", "flyctl"],
|
|
206
|
+
},
|
|
207
|
+
{
|
|
208
|
+
id: "railway",
|
|
209
|
+
name: "Railway",
|
|
210
|
+
configPatterns: ["railway.json", "railway.toml"],
|
|
211
|
+
dockerPatterns: ["railwayapp"],
|
|
212
|
+
},
|
|
213
|
+
{
|
|
214
|
+
id: "render",
|
|
215
|
+
name: "Render",
|
|
216
|
+
configPatterns: ["render.yaml"],
|
|
217
|
+
},
|
|
218
|
+
{
|
|
219
|
+
id: "digitalocean",
|
|
220
|
+
name: "DigitalOcean",
|
|
221
|
+
configPatterns: ["app-spec.yaml", "digitalocean.yaml"],
|
|
222
|
+
dockerPatterns: ["digitalocean"],
|
|
223
|
+
},
|
|
192
224
|
// Firebase
|
|
193
225
|
{
|
|
194
226
|
id: "firebase",
|
package/dist/prompts.js
CHANGED
|
@@ -8,7 +8,7 @@ exports.CODEBASE_ANALYSIS_SIGNATURE = (0, ax_1.f)()
|
|
|
8
8
|
.output("projectOverview", ax_1.f.string("Project Overview & Context: Exhaustively describe all primary sub-services, their purpose, and what languages or frameworks power them."))
|
|
9
9
|
.output("agentPersona", ax_1.f.string("Agent Persona / Role"))
|
|
10
10
|
.output("techStack", ax_1.f.string("Tech Stack & Versions: List EVERY distinct language, library, database, and external API dependency used. WARNING: Do NOT guess frameworks based on assumptions or the presence of a package.json. You must explicitly scan the actual code files (e.g. imports in .py, .ts, .js) and dependency manifests (e.g. requirements.txt, go.mod) to determine the EXACT tech stack."))
|
|
11
|
-
.output("directoryStructure", ax_1.f.string("Directory Structure (The Map): Deeply map out all root folders,
|
|
11
|
+
.output("directoryStructure", ax_1.f.string("Directory Structure (The Map): Deeply map out all root folders. CRITICAL: You MUST strictly represent the EXACT physical file and directory structure provided in the source context. Do NOT invent, extrapolate, or hallucinate conceptual directories (like 'client/' or 'server/') if they do not physically exist on disk. If you see scripts like 'build:client' or 'build:server' in a package.json, interpret them as Build Output pipelines (like SSR/SSG), NOT as physical source directories unless those directories literally exist."))
|
|
12
12
|
.output("executionCommands", ax_1.f.string("Execution Commands: Exact terminal commands to run or build."))
|
|
13
13
|
.output("codeStyleAndFormatting", ax_1.f.string("Code Style & Formatting: Language-specific formatting and strictly enforced linting rules."))
|
|
14
14
|
.output("architectureAndDesignPatterns", ax_1.f.string("Architecture & Design Patterns: Detailed cross-service logical flow, API boundaries, and system design logic."))
|
|
@@ -32,7 +32,7 @@ exports.COMPILE_CONVENTIONS_SIGNATURE = (0, ax_1.f)()
|
|
|
32
32
|
.input("projectOverview", ax_1.f.string("Project Overview & Context."))
|
|
33
33
|
.input("agentPersona", ax_1.f.string("Agent Persona / Role."))
|
|
34
34
|
.input("techStack", ax_1.f.string("Tech Stack & Versions."))
|
|
35
|
-
.input("directoryStructure", ax_1.f.string("Directory Structure (The Map)."))
|
|
35
|
+
.input("directoryStructure", ax_1.f.string("Directory Structure (The Map). CRITICAL: Maintain the exact PHYSICAL file and directory structure produced by the analyzer. Do NOT invent conceptual boundaries (like client/ or server/) if they are not in the provided map."))
|
|
36
36
|
.input("executionCommands", ax_1.f.string("Execution Commands."))
|
|
37
37
|
.input("codeStyleAndFormatting", ax_1.f.string("Code Style & Formatting."))
|
|
38
38
|
.input("architectureAndDesignPatterns", ax_1.f.string("Architecture & Design Patterns."))
|
|
@@ -56,7 +56,7 @@ exports.EXTRACT_AGENTS_SECTIONS_SIGNATURE = (0, ax_1.f)()
|
|
|
56
56
|
.input("awarenessContext", ax_1.f.string("Deterministically detected framework/architecture constraints. AUTHORITATIVE — do not contradict. If OpenClaw orchestrator is present, include OpenClaw Runtime section. Show version certainty for each framework. Include Framework References when web refs are available."))
|
|
57
57
|
.output("projectOverview", ax_1.f.string("Comprehensive description of the project: what it does, its tech stack, its primary languages, and its overall purpose and functionality."))
|
|
58
58
|
.output("techStack", ax_1.f.string("Explicit and exhaustive list of supported languages, frameworks, UI libraries, backend runtimes, and tools used in the repository. Annotate what each technology is used for (e.g., 'X Framework for UI', 'Y Language for REST Services')."))
|
|
59
|
-
.output("architecture", ax_1.f.string("Deep mapping of where things live: directory layout, key modules, entry points, and their responsibilities. You MUST generate an ASCII diagram showing the architecture
|
|
59
|
+
.output("architecture", ax_1.f.string("Deep mapping of where things live: directory layout, key modules, entry points, and their responsibilities. You MUST generate an ASCII diagram showing the strict PHYSICAL file architecture. CRITICAL: Do NOT invent conceptual directories like 'client/' or 'server/' if they do not exist on disk. Rely solely on the provided conventions Markdown. Build targets (like SSR/SSG scripts) are behaviors, not physical source folders."))
|
|
60
60
|
.output("codeStyle", ax_1.f.string("Granular coding standards observed: language version, formatting, naming conventions, import ordering, type-hinting rules, preferred patterns vs anti-patterns. Explicitly mention how different stacks in a monorepo communicate (e.g., REST, GraphQL, etc.) and how proprietary or 3rd-party external APIs are wrapped or invoked. Provide concrete examples from the codebase. All code blocks must be properly opened AND closed with triple backticks."))
|
|
61
61
|
.output("antiPatternsAndRestrictions", ax_1.f.string("Specific anti-patterns and 'NEVER do this' rules the AI must strictly avoid."))
|
|
62
62
|
.output("databaseAndState", ax_1.f.string("Guidelines on how data and state should flow through the application, including databases, external API data syncing, or state managers."))
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@thelogicatelier/sylva",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.15",
|
|
4
4
|
"description": "Auto-generate AGENTS.md for your repository using Ax-LLM. Analyze the structural backbone, data flow, and day-to-day coding conventions natively.",
|
|
5
5
|
"main": "dist/index.js",
|
|
6
6
|
"bin": {
|
|
@@ -64,4 +64,4 @@
|
|
|
64
64
|
"prettier": "^3.8.1",
|
|
65
65
|
"typescript-eslint": "^8.56.1"
|
|
66
66
|
}
|
|
67
|
-
}
|
|
67
|
+
}
|