@staff0rd/assist 0.10.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +44 -0
- package/dist/commands/deploy/build.yml +25 -0
- package/dist/commands/deploy/init.ts +104 -0
- package/dist/commands/enable-ralph/index.ts +83 -0
- package/dist/commands/enable-ralph/settings.local.json +8 -0
- package/dist/commands/lint/biome.linter.json +57 -0
- package/dist/commands/lint/init.ts +58 -0
- package/dist/commands/lint/lint.ts +12 -0
- package/dist/commands/lint/runFileNameCheck.ts +69 -0
- package/dist/commands/lint/runStaticImportCheck.ts +64 -0
- package/dist/index.js +2003 -0
- package/package.json +49 -0
package/README.md
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# assist
|
|
2
|
+
|
|
3
|
+
A CLI tool for enforcing determinism in LLM development workflow automation.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
# Clone the repository
|
|
9
|
+
git clone git@github.com:staff0rd/assist.git
|
|
10
|
+
cd assist
|
|
11
|
+
|
|
12
|
+
# Install dependencies
|
|
13
|
+
npm install
|
|
14
|
+
|
|
15
|
+
# Build the project
|
|
16
|
+
npm run build
|
|
17
|
+
|
|
18
|
+
# Install globally
|
|
19
|
+
npm install -g .
|
|
20
|
+
```
|
|
21
|
+
|
|
22
|
+
After installation, the `assist` command will be available globally.
|
|
23
|
+
|
|
24
|
+
## Commands
|
|
25
|
+
|
|
26
|
+
- `assist init` - Initialize project with VS Code and verify configurations
|
|
27
|
+
- `assist new` - Initialize a new Vite React TypeScript project
|
|
28
|
+
- `assist sync` - Copy command files to `~/.claude/commands`
|
|
29
|
+
- `assist commit <message>` - Create a git commit with validation
|
|
30
|
+
- `assist update` - Update claude-code to the latest version
|
|
31
|
+
- `assist verify` - Run all verify:* scripts from package.json in parallel
|
|
32
|
+
- `assist verify init` - Add verify scripts to a project
|
|
33
|
+
- `assist verify hardcoded-colors` - Check for hardcoded hex colors in src/
|
|
34
|
+
- `assist lint` - Run lint checks for conventions not enforced by biomejs
|
|
35
|
+
- `assist lint init` - Initialize Biome with standard linter config
|
|
36
|
+
- `assist refactor check [pattern]` - Check for files that exceed the maximum line count
|
|
37
|
+
- `assist refactor ignore <file>` - Add a file to the refactor ignore list
|
|
38
|
+
- `assist devlog list` - Group git commits by date
|
|
39
|
+
- `assist devlog next` - Show commits for the day after the last versioned entry
|
|
40
|
+
- `assist devlog skip <date>` - Add a date to the skip list
|
|
41
|
+
- `assist devlog version` - Show current repo name and version info
|
|
42
|
+
- `assist vscode init` - Add VS Code configuration files
|
|
43
|
+
- `assist deploy init` - Initialize Netlify project and configure deployment
|
|
44
|
+
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
name: Build and Deploy to Netlify
|
|
2
|
+
on:
|
|
3
|
+
push:
|
|
4
|
+
branches:
|
|
5
|
+
- main
|
|
6
|
+
jobs:
|
|
7
|
+
build_and_deploy:
|
|
8
|
+
runs-on: ubuntu-latest
|
|
9
|
+
steps:
|
|
10
|
+
- uses: actions/checkout@v3
|
|
11
|
+
name: Checkout
|
|
12
|
+
|
|
13
|
+
- uses: actions/setup-node@v4
|
|
14
|
+
name: Setup Node.js
|
|
15
|
+
with:
|
|
16
|
+
node-version: "22"
|
|
17
|
+
|
|
18
|
+
- run: npm ci
|
|
19
|
+
name: Install dependencies
|
|
20
|
+
|
|
21
|
+
- run: npm run build
|
|
22
|
+
name: Build project
|
|
23
|
+
|
|
24
|
+
- run: npx netlify-cli deploy --no-build --dir=dist --prod -s {{NETLIFY_SITE_ID}} --auth ${{ secrets.NETLIFY_AUTH_TOKEN }} --message "Deployed commit ${{ github.sha }}"
|
|
25
|
+
name: Deploy to Netlify
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { dirname, join } from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import chalk from "chalk";
|
|
6
|
+
import enquirer from "enquirer";
|
|
7
|
+
import { printDiff } from "../../utils/printDiff";
|
|
8
|
+
|
|
9
|
+
const WORKFLOW_PATH = ".github/workflows/build.yml";
|
|
10
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
11
|
+
|
|
12
|
+
function getExistingSiteId(): string | null {
|
|
13
|
+
if (!existsSync(WORKFLOW_PATH)) {
|
|
14
|
+
return null;
|
|
15
|
+
}
|
|
16
|
+
const content = readFileSync(WORKFLOW_PATH, "utf-8");
|
|
17
|
+
const match = content.match(/-s\s+([a-f0-9-]{36})/);
|
|
18
|
+
return match ? match[1] : null;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function getTemplateContent(siteId: string): string {
|
|
22
|
+
const templatePath = join(__dirname, "commands/deploy/build.yml");
|
|
23
|
+
const template = readFileSync(templatePath, "utf-8");
|
|
24
|
+
return template.replace("{{NETLIFY_SITE_ID}}", siteId);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
async function updateWorkflow(siteId: string): Promise<void> {
|
|
28
|
+
const newContent = getTemplateContent(siteId);
|
|
29
|
+
|
|
30
|
+
const workflowDir = ".github/workflows";
|
|
31
|
+
if (!existsSync(workflowDir)) {
|
|
32
|
+
mkdirSync(workflowDir, { recursive: true });
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
if (existsSync(WORKFLOW_PATH)) {
|
|
36
|
+
const oldContent = readFileSync(WORKFLOW_PATH, "utf-8");
|
|
37
|
+
|
|
38
|
+
if (oldContent === newContent) {
|
|
39
|
+
console.log(chalk.green("build.yml is already up to date"));
|
|
40
|
+
return;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
console.log(chalk.yellow("\nbuild.yml will be updated:"));
|
|
44
|
+
console.log();
|
|
45
|
+
printDiff(oldContent, newContent);
|
|
46
|
+
|
|
47
|
+
const { confirm } = await enquirer.prompt<{ confirm: boolean }>({
|
|
48
|
+
type: "confirm",
|
|
49
|
+
name: "confirm",
|
|
50
|
+
message: chalk.red("Update build.yml?"),
|
|
51
|
+
initial: true,
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
if (!confirm) {
|
|
55
|
+
console.log("Skipped build.yml update");
|
|
56
|
+
return;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
writeFileSync(WORKFLOW_PATH, newContent);
|
|
61
|
+
console.log(chalk.green(`\nCreated ${WORKFLOW_PATH}`));
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
export async function init(): Promise<void> {
|
|
65
|
+
console.log(chalk.bold("Initializing Netlify deployment...\n"));
|
|
66
|
+
|
|
67
|
+
const existingSiteId = getExistingSiteId();
|
|
68
|
+
|
|
69
|
+
if (existingSiteId) {
|
|
70
|
+
console.log(chalk.dim(`Using existing site ID: ${existingSiteId}\n`));
|
|
71
|
+
await updateWorkflow(existingSiteId);
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
console.log("Creating Netlify site...\n");
|
|
76
|
+
execSync("netlify sites:create --disable-linking", {
|
|
77
|
+
stdio: "inherit",
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
const { siteId } = await enquirer.prompt<{ siteId: string }>({
|
|
81
|
+
type: "input",
|
|
82
|
+
name: "siteId",
|
|
83
|
+
message: "Enter the Site ID from above:",
|
|
84
|
+
validate: (value) =>
|
|
85
|
+
/^[a-f0-9-]+$/i.test(value) || "Invalid site ID format",
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
await updateWorkflow(siteId);
|
|
89
|
+
|
|
90
|
+
console.log(chalk.bold("\nDeployment initialized successfully!"));
|
|
91
|
+
console.log(
|
|
92
|
+
chalk.yellow("\nTo complete setup, create a personal access token at:"),
|
|
93
|
+
);
|
|
94
|
+
console.log(
|
|
95
|
+
chalk.cyan(
|
|
96
|
+
"https://app.netlify.com/user/applications#personal-access-tokens",
|
|
97
|
+
),
|
|
98
|
+
);
|
|
99
|
+
console.log(
|
|
100
|
+
chalk.yellow(
|
|
101
|
+
"\nThen add it as NETLIFY_AUTH_TOKEN in your GitHub repository secrets.",
|
|
102
|
+
),
|
|
103
|
+
);
|
|
104
|
+
}
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
import * as fs from "node:fs";
|
|
2
|
+
import * as path from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import chalk from "chalk";
|
|
5
|
+
import enquirer from "enquirer";
|
|
6
|
+
import { printDiff } from "../../utils/printDiff";
|
|
7
|
+
|
|
8
|
+
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
|
9
|
+
|
|
10
|
+
function deepMerge(
|
|
11
|
+
target: Record<string, unknown>,
|
|
12
|
+
source: Record<string, unknown>,
|
|
13
|
+
): Record<string, unknown> {
|
|
14
|
+
const result = { ...target };
|
|
15
|
+
for (const key of Object.keys(source)) {
|
|
16
|
+
const sourceVal = source[key];
|
|
17
|
+
const targetVal = result[key];
|
|
18
|
+
if (
|
|
19
|
+
sourceVal &&
|
|
20
|
+
typeof sourceVal === "object" &&
|
|
21
|
+
!Array.isArray(sourceVal) &&
|
|
22
|
+
targetVal &&
|
|
23
|
+
typeof targetVal === "object" &&
|
|
24
|
+
!Array.isArray(targetVal)
|
|
25
|
+
) {
|
|
26
|
+
result[key] = deepMerge(
|
|
27
|
+
targetVal as Record<string, unknown>,
|
|
28
|
+
sourceVal as Record<string, unknown>,
|
|
29
|
+
);
|
|
30
|
+
} else {
|
|
31
|
+
result[key] = sourceVal;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
return result;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export async function enableRalph(): Promise<void> {
|
|
38
|
+
const sourcePath = path.join(
|
|
39
|
+
__dirname,
|
|
40
|
+
"commands/enable-ralph/settings.local.json",
|
|
41
|
+
);
|
|
42
|
+
const targetPath = path.join(process.cwd(), ".claude/settings.local.json");
|
|
43
|
+
const sourceData = JSON.parse(fs.readFileSync(sourcePath, "utf-8"));
|
|
44
|
+
|
|
45
|
+
const targetDir = path.dirname(targetPath);
|
|
46
|
+
if (!fs.existsSync(targetDir)) {
|
|
47
|
+
fs.mkdirSync(targetDir, { recursive: true });
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
let targetData: Record<string, unknown> = {};
|
|
51
|
+
let targetContent = "{}";
|
|
52
|
+
if (fs.existsSync(targetPath)) {
|
|
53
|
+
targetContent = fs.readFileSync(targetPath, "utf-8");
|
|
54
|
+
targetData = JSON.parse(targetContent);
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const merged = deepMerge(targetData, sourceData);
|
|
58
|
+
const mergedContent = JSON.stringify(merged, null, "\t") + "\n";
|
|
59
|
+
|
|
60
|
+
if (mergedContent === targetContent) {
|
|
61
|
+
console.log(chalk.green("settings.local.json already has ralph enabled"));
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
console.log(chalk.yellow("\nChanges to settings.local.json:"));
|
|
66
|
+
console.log();
|
|
67
|
+
printDiff(targetContent, mergedContent);
|
|
68
|
+
|
|
69
|
+
const { confirm } = await enquirer.prompt<{ confirm: boolean }>({
|
|
70
|
+
type: "confirm",
|
|
71
|
+
name: "confirm",
|
|
72
|
+
message: "Apply these changes?",
|
|
73
|
+
initial: true,
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
if (!confirm) {
|
|
77
|
+
console.log("Skipped");
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
fs.writeFileSync(targetPath, mergedContent);
|
|
82
|
+
console.log(`Updated ${targetPath}`);
|
|
83
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
{
|
|
2
|
+
"overrides": [
|
|
3
|
+
{
|
|
4
|
+
"includes": ["**/constants.ts"],
|
|
5
|
+
"linter": {
|
|
6
|
+
"rules": {
|
|
7
|
+
"style": {
|
|
8
|
+
"useFilenamingConvention": {
|
|
9
|
+
"level": "error",
|
|
10
|
+
"options": {
|
|
11
|
+
"filenameCases": ["kebab-case"],
|
|
12
|
+
"requireAscii": true
|
|
13
|
+
}
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
},
|
|
19
|
+
{
|
|
20
|
+
"includes": ["**/*.test.ts"],
|
|
21
|
+
"linter": {
|
|
22
|
+
"rules": {
|
|
23
|
+
"style": {
|
|
24
|
+
"useFilenamingConvention": "off"
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
],
|
|
30
|
+
"linter": {
|
|
31
|
+
"enabled": true,
|
|
32
|
+
"rules": {
|
|
33
|
+
"recommended": true,
|
|
34
|
+
"style": {
|
|
35
|
+
"useFilenamingConvention": {
|
|
36
|
+
"level": "error",
|
|
37
|
+
"options": {
|
|
38
|
+
"filenameCases": ["export"],
|
|
39
|
+
"requireAscii": true
|
|
40
|
+
}
|
|
41
|
+
},
|
|
42
|
+
"useConsistentTypeDefinitions": {
|
|
43
|
+
"level": "error",
|
|
44
|
+
"options": {
|
|
45
|
+
"style": "type"
|
|
46
|
+
}
|
|
47
|
+
},
|
|
48
|
+
"useConsistentMemberAccessibility": {
|
|
49
|
+
"level": "error",
|
|
50
|
+
"options": {
|
|
51
|
+
"accessibility": "noPublic"
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
}
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
import { execSync } from "node:child_process";
|
|
2
|
+
import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { dirname, join } from "node:path";
|
|
4
|
+
import { fileURLToPath } from "node:url";
|
|
5
|
+
import chalk from "chalk";
|
|
6
|
+
import enquirer from "enquirer";
|
|
7
|
+
import { printDiff } from "../../utils/printDiff";
|
|
8
|
+
|
|
9
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
10
|
+
|
|
11
|
+
export async function init(): Promise<void> {
|
|
12
|
+
const biomeConfigPath = "biome.json";
|
|
13
|
+
|
|
14
|
+
if (!existsSync(biomeConfigPath)) {
|
|
15
|
+
console.log("Initializing Biome...");
|
|
16
|
+
execSync("npx @biomejs/biome init", { stdio: "inherit" });
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
if (!existsSync(biomeConfigPath)) {
|
|
20
|
+
console.log("No biome.json found, skipping linter config");
|
|
21
|
+
return;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const linterConfigPath = join(__dirname, "commands/lint/biome.linter.json");
|
|
25
|
+
const linterConfig = JSON.parse(readFileSync(linterConfigPath, "utf-8"));
|
|
26
|
+
const biomeConfig = JSON.parse(readFileSync(biomeConfigPath, "utf-8"));
|
|
27
|
+
|
|
28
|
+
const oldContent = `${JSON.stringify(biomeConfig, null, 2)}\n`;
|
|
29
|
+
biomeConfig.linter = linterConfig.linter;
|
|
30
|
+
if (linterConfig.overrides) {
|
|
31
|
+
biomeConfig.overrides = linterConfig.overrides;
|
|
32
|
+
}
|
|
33
|
+
const newContent = `${JSON.stringify(biomeConfig, null, 2)}\n`;
|
|
34
|
+
|
|
35
|
+
if (oldContent === newContent) {
|
|
36
|
+
console.log("biome.json already has the correct linter config");
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
console.log(chalk.yellow("\n⚠️ biome.json will be updated:"));
|
|
41
|
+
console.log();
|
|
42
|
+
printDiff(oldContent, newContent);
|
|
43
|
+
|
|
44
|
+
const { confirm } = await enquirer.prompt<{ confirm: boolean }>({
|
|
45
|
+
type: "confirm",
|
|
46
|
+
name: "confirm",
|
|
47
|
+
message: chalk.red("Update biome.json?"),
|
|
48
|
+
initial: true,
|
|
49
|
+
});
|
|
50
|
+
|
|
51
|
+
if (!confirm) {
|
|
52
|
+
console.log("Skipped biome.json update");
|
|
53
|
+
return;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
writeFileSync(biomeConfigPath, newContent);
|
|
57
|
+
console.log("Updated biome.json with linter config");
|
|
58
|
+
}
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
// Implements conventions not enforced by biomejs
|
|
2
|
+
import { runFileNameCheck } from "./runFileNameCheck";
|
|
3
|
+
import { runStaticImportCheck } from "./runStaticImportCheck";
|
|
4
|
+
|
|
5
|
+
export function lint(): void {
|
|
6
|
+
const fileNamePassed = runFileNameCheck();
|
|
7
|
+
const staticImportPassed = runStaticImportCheck();
|
|
8
|
+
|
|
9
|
+
if (!fileNamePassed || !staticImportPassed) {
|
|
10
|
+
process.exit(1);
|
|
11
|
+
}
|
|
12
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import path from "node:path";
|
|
3
|
+
import chalk from "chalk";
|
|
4
|
+
import { findSourceFiles } from "../../shared/findSourceFiles";
|
|
5
|
+
|
|
6
|
+
type FileNameViolation = {
|
|
7
|
+
filePath: string;
|
|
8
|
+
fileName: string;
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
function hasClassOrComponent(content: string): boolean {
|
|
12
|
+
const classPattern = /^(export\s+)?(abstract\s+)?class\s+\w+/m;
|
|
13
|
+
const functionComponentPattern =
|
|
14
|
+
/^(export\s+)?(default\s+)?function\s+[A-Z]\w*\s*\(/m;
|
|
15
|
+
const arrowComponentPattern = /^(export\s+)?(const|let)\s+[A-Z]\w*\s*=.*=>/m;
|
|
16
|
+
|
|
17
|
+
return (
|
|
18
|
+
classPattern.test(content) ||
|
|
19
|
+
functionComponentPattern.test(content) ||
|
|
20
|
+
arrowComponentPattern.test(content)
|
|
21
|
+
);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function checkFileNames(): FileNameViolation[] {
|
|
25
|
+
const sourceFiles = findSourceFiles("src");
|
|
26
|
+
const violations: FileNameViolation[] = [];
|
|
27
|
+
|
|
28
|
+
for (const filePath of sourceFiles) {
|
|
29
|
+
const fileName = path.basename(filePath);
|
|
30
|
+
const nameWithoutExt = fileName.replace(/\.(ts|tsx)$/, "");
|
|
31
|
+
|
|
32
|
+
if (/^[A-Z]/.test(nameWithoutExt)) {
|
|
33
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
34
|
+
if (!hasClassOrComponent(content)) {
|
|
35
|
+
violations.push({ filePath, fileName });
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return violations;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export function runFileNameCheck(): boolean {
|
|
44
|
+
const violations = checkFileNames();
|
|
45
|
+
if (violations.length > 0) {
|
|
46
|
+
console.error(chalk.red("\nFile name check failed:\n"));
|
|
47
|
+
console.error(
|
|
48
|
+
chalk.red(
|
|
49
|
+
" Files without classes or React components should not start with a capital letter.\n",
|
|
50
|
+
),
|
|
51
|
+
);
|
|
52
|
+
for (const violation of violations) {
|
|
53
|
+
console.error(chalk.red(` ${violation.filePath}`));
|
|
54
|
+
console.error(
|
|
55
|
+
chalk.gray(
|
|
56
|
+
` Rename to: ${violation.fileName.charAt(0).toLowerCase()}${violation.fileName.slice(1)}\n`,
|
|
57
|
+
),
|
|
58
|
+
);
|
|
59
|
+
}
|
|
60
|
+
return false;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
if (!process.env.CLAUDECODE) {
|
|
64
|
+
console.log(
|
|
65
|
+
"File name check passed. All PascalCase files contain classes or components.",
|
|
66
|
+
);
|
|
67
|
+
}
|
|
68
|
+
return true;
|
|
69
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
import fs from "node:fs";
|
|
2
|
+
import chalk from "chalk";
|
|
3
|
+
import { findSourceFiles } from "../../shared/findSourceFiles";
|
|
4
|
+
|
|
5
|
+
type ImportViolation = {
|
|
6
|
+
filePath: string;
|
|
7
|
+
line: number;
|
|
8
|
+
content: string;
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
function checkForDynamicImports(filePath: string): ImportViolation[] {
|
|
12
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
13
|
+
const lines = content.split("\n");
|
|
14
|
+
const violations: ImportViolation[] = [];
|
|
15
|
+
|
|
16
|
+
const requirePattern = /\brequire\s*\(/;
|
|
17
|
+
const dynamicImportPattern = /\bimport\s*\(/;
|
|
18
|
+
|
|
19
|
+
for (let i = 0; i < lines.length; i++) {
|
|
20
|
+
const line = lines[i];
|
|
21
|
+
if (requirePattern.test(line) || dynamicImportPattern.test(line)) {
|
|
22
|
+
violations.push({
|
|
23
|
+
filePath,
|
|
24
|
+
line: i + 1,
|
|
25
|
+
content: line.trim(),
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
return violations;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function checkStaticImports(): ImportViolation[] {
|
|
34
|
+
const sourceFiles = findSourceFiles("src");
|
|
35
|
+
const violations: ImportViolation[] = [];
|
|
36
|
+
|
|
37
|
+
for (const filePath of sourceFiles) {
|
|
38
|
+
violations.push(...checkForDynamicImports(filePath));
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
return violations;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export function runStaticImportCheck(): boolean {
|
|
45
|
+
const violations = checkStaticImports();
|
|
46
|
+
if (violations.length > 0) {
|
|
47
|
+
console.error(chalk.red("\nStatic import check failed:\n"));
|
|
48
|
+
console.error(
|
|
49
|
+
chalk.red(
|
|
50
|
+
" Dynamic imports (require() and import()) are not allowed. Use static imports instead.\n",
|
|
51
|
+
),
|
|
52
|
+
);
|
|
53
|
+
for (const violation of violations) {
|
|
54
|
+
console.error(chalk.red(` ${violation.filePath}:${violation.line}`));
|
|
55
|
+
console.error(chalk.gray(` ${violation.content}\n`));
|
|
56
|
+
}
|
|
57
|
+
return false;
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
if (!process.env.CLAUDECODE) {
|
|
61
|
+
console.log("Static import check passed. No dynamic imports found.");
|
|
62
|
+
}
|
|
63
|
+
return true;
|
|
64
|
+
}
|