@standards-kit/conform 0.1.0 → 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +143 -0
- package/dist/{chunk-P7TIZJ4C.js → chunk-DXIYZR62.js} +2 -2
- package/dist/chunk-DXIYZR62.js.map +1 -0
- package/dist/{chunk-RXA4FO7L.js → chunk-NADY2H35.js} +12 -8
- package/dist/chunk-NADY2H35.js.map +1 -0
- package/dist/chunk-O745CMWG.js +29 -0
- package/dist/chunk-O745CMWG.js.map +1 -0
- package/dist/chunk-RHM53NLG.js +49 -0
- package/dist/chunk-RHM53NLG.js.map +1 -0
- package/dist/{chunk-KHO6NIAI.js → chunk-YGDEM6K5.js} +24 -10
- package/dist/chunk-YGDEM6K5.js.map +1 -0
- package/dist/cli.d.ts +2 -0
- package/dist/cli.js +49 -38
- package/dist/cli.js.map +1 -1
- package/dist/{cloudwatch-KSZ4A256.js → cloudwatch-3LTDYG6G.js} +6 -10
- package/dist/cloudwatch-3LTDYG6G.js.map +1 -0
- package/dist/code/index.d.ts +11 -0
- package/dist/code/tools/base.d.ts +51 -0
- package/dist/code/tools/comment-utils.d.ts +17 -0
- package/dist/code/tools/coverage-run.d.ts +37 -0
- package/dist/code/tools/disable-comments.d.ts +42 -0
- package/dist/code/tools/eslint.d.ts +99 -0
- package/dist/code/tools/gitleaks.d.ts +42 -0
- package/dist/code/tools/index.d.ts +13 -0
- package/dist/code/tools/knip.d.ts +20 -0
- package/dist/code/tools/naming.d.ts +64 -0
- package/dist/code/tools/pipaudit.d.ts +24 -0
- package/dist/code/tools/pnpmaudit.d.ts +36 -0
- package/dist/code/tools/ruff.d.ts +46 -0
- package/dist/code/tools/tsc.d.ts +57 -0
- package/dist/code/tools/ty.d.ts +34 -0
- package/dist/code/tools/vulture.d.ts +32 -0
- package/dist/constants.d.ts +69 -0
- package/dist/core/index.d.ts +7 -0
- package/dist/core/loader.d.ts +42 -0
- package/dist/core/registry.d.ts +17 -0
- package/dist/core/schema.d.ts +1857 -0
- package/dist/core/types.d.ts +95 -0
- package/dist/{src-KZRTG3EU.js → core-QRFGIQ42.js} +4 -3
- package/dist/dependencies/index.d.ts +13 -0
- package/dist/dependencies/mappings.d.ts +17 -0
- package/dist/dependencies/output.d.ts +12 -0
- package/dist/dependencies/types.d.ts +34 -0
- package/dist/{dynamodb-5KVESCVJ.js → dynamodb-HQH3IMAI.js} +6 -10
- package/dist/dynamodb-HQH3IMAI.js.map +1 -0
- package/dist/{ec2-HKPE6GZV.js → ec2-AEPT735A.js} +6 -10
- package/dist/ec2-AEPT735A.js.map +1 -0
- package/dist/{ecs-OS3NJZTA.js → ecs-UHKCH5A7.js} +6 -10
- package/dist/ecs-UHKCH5A7.js.map +1 -0
- package/dist/{elasticache-7TCRHYYM.js → elasticache-5Y6K7GKJ.js} +6 -10
- package/dist/elasticache-5Y6K7GKJ.js.map +1 -0
- package/dist/{elb-PEDLXW5R.js → elb-CN6ELVM5.js} +6 -10
- package/dist/elb-CN6ELVM5.js.map +1 -0
- package/dist/{iam-7H5HFWVQ.js → iam-YXMHK2MV.js} +6 -2
- package/dist/iam-YXMHK2MV.js.map +1 -0
- package/dist/index.d.ts +21 -0
- package/dist/index.js +99 -121
- package/dist/index.js.map +1 -1
- package/dist/infra/arn.d.ts +16 -0
- package/dist/infra/checkers/client-factory.d.ts +45 -0
- package/dist/infra/checkers/cloudwatch.d.ts +8 -0
- package/dist/infra/checkers/dynamodb.d.ts +8 -0
- package/dist/infra/checkers/ec2.d.ts +13 -0
- package/dist/infra/checkers/ecs.d.ts +13 -0
- package/dist/infra/checkers/elasticache.d.ts +13 -0
- package/dist/infra/checkers/elb.d.ts +13 -0
- package/dist/infra/checkers/gcp/artifactregistry.d.ts +5 -0
- package/dist/infra/checkers/gcp/cloudrun.d.ts +5 -0
- package/dist/infra/checkers/gcp/iam.d.ts +5 -0
- package/dist/infra/checkers/gcp/index.d.ts +17 -0
- package/dist/infra/checkers/gcp/secretmanager.d.ts +5 -0
- package/dist/infra/checkers/iam.d.ts +8 -0
- package/dist/infra/checkers/index.d.ts +26 -0
- package/dist/infra/checkers/lambda.d.ts +8 -0
- package/dist/infra/checkers/rds.d.ts +13 -0
- package/dist/infra/checkers/s3.d.ts +8 -0
- package/dist/infra/checkers/secretsmanager.d.ts +8 -0
- package/dist/infra/checkers/sns.d.ts +8 -0
- package/dist/infra/checkers/sqs.d.ts +8 -0
- package/dist/infra/checkers/types.d.ts +28 -0
- package/dist/infra/gcp.d.ts +18 -0
- package/dist/infra/generate.d.ts +74 -0
- package/dist/infra/index.d.ts +59 -0
- package/dist/infra/manifest.d.ts +58 -0
- package/dist/infra/output.d.ts +8 -0
- package/dist/infra/scan.d.ts +25 -0
- package/dist/infra/schemas.d.ts +806 -0
- package/dist/infra/types.d.ts +8 -0
- package/dist/{infra-UXM5XQX3.js → infra-TO54IUSC.js} +21 -19
- package/dist/infra-TO54IUSC.js.map +1 -0
- package/dist/{lambda-NFB5UILT.js → lambda-YTJOCYV5.js} +6 -10
- package/dist/lambda-YTJOCYV5.js.map +1 -0
- package/dist/mcp/index.d.ts +7 -0
- package/dist/mcp/server.d.ts +18 -0
- package/dist/mcp/standards/fetcher.d.ts +29 -0
- package/dist/mcp/standards/index.d.ts +4 -0
- package/dist/mcp/standards/matcher.d.ts +22 -0
- package/dist/mcp/standards/parser.d.ts +46 -0
- package/dist/mcp/standards/types.d.ts +32 -0
- package/dist/mcp/tools/get-guideline.d.ts +26 -0
- package/dist/mcp/tools/get-ruleset.d.ts +26 -0
- package/dist/mcp/tools/get-standards.d.ts +27 -0
- package/dist/mcp/tools/index.d.ts +4 -0
- package/dist/mcp/tools/list-guidelines.d.ts +25 -0
- package/dist/{mcp-O5O7XVFG.js → mcp-73FZXT3P.js} +5 -4
- package/dist/mcp-73FZXT3P.js.map +1 -0
- package/dist/output/index.d.ts +14 -0
- package/dist/process/commands/check-branch.d.ts +13 -0
- package/dist/process/commands/check-commit.d.ts +14 -0
- package/dist/process/commands/index.d.ts +2 -0
- package/dist/process/index.d.ts +11 -0
- package/dist/process/scan/index.d.ts +5 -0
- package/dist/process/scan/remote-fetcher.d.ts +18 -0
- package/dist/process/scan/scanner.d.ts +6 -0
- package/dist/process/scan/types.d.ts +57 -0
- package/dist/process/scan/validators.d.ts +37 -0
- package/dist/process/sync/applier.d.ts +10 -0
- package/dist/process/sync/differ.d.ts +7 -0
- package/dist/process/sync/fetcher.d.ts +14 -0
- package/dist/process/sync/index.d.ts +9 -0
- package/dist/process/sync/types.d.ts +131 -0
- package/dist/process/sync/validator.d.ts +22 -0
- package/dist/process/tools/backups.d.ts +32 -0
- package/dist/process/tools/base.d.ts +52 -0
- package/dist/process/tools/branches.d.ts +41 -0
- package/dist/process/tools/changesets.d.ts +53 -0
- package/dist/process/tools/ci.d.ts +57 -0
- package/dist/process/tools/codeowners.d.ts +68 -0
- package/dist/process/tools/commits.d.ts +39 -0
- package/dist/process/tools/coverage.d.ts +57 -0
- package/dist/process/tools/docs-helpers.d.ts +44 -0
- package/dist/process/tools/docs.d.ts +38 -0
- package/dist/process/tools/forbidden-files.d.ts +40 -0
- package/dist/process/tools/hooks.d.ts +39 -0
- package/dist/process/tools/index.d.ts +14 -0
- package/dist/process/tools/pr.d.ts +59 -0
- package/dist/process/tools/repo.d.ts +65 -0
- package/dist/process/tools/tickets.d.ts +42 -0
- package/dist/projects/detector.d.ts +16 -0
- package/dist/projects/index.d.ts +4 -0
- package/dist/projects/templates.d.ts +15 -0
- package/dist/projects/tier-loader.d.ts +21 -0
- package/dist/projects/types.d.ts +76 -0
- package/dist/{rds-KLG5O5SI.js → rds-GZ5RVPIU.js} +6 -10
- package/dist/rds-GZ5RVPIU.js.map +1 -0
- package/dist/{registry-V65CC7IN.js → registry-JRCQAIHR.js} +3 -2
- package/dist/{s3-2DH7PRVR.js → s3-53UELUWT.js} +16 -12
- package/dist/s3-53UELUWT.js.map +1 -0
- package/dist/s3-S4GXNR7H.js +53 -0
- package/dist/s3-S4GXNR7H.js.map +1 -0
- package/dist/{scan-EELS42BP.js → scan-RHQWHASY.js} +5 -4
- package/dist/{scan-EELS42BP.js.map → scan-RHQWHASY.js.map} +1 -1
- package/dist/{secretsmanager-MOOIHLAO.js → secretsmanager-FJKTPIXI.js} +6 -10
- package/dist/secretsmanager-FJKTPIXI.js.map +1 -0
- package/dist/{sns-Y36LVTWA.js → sns-RV64OMK2.js} +6 -10
- package/dist/sns-RV64OMK2.js.map +1 -0
- package/dist/{sqs-RRS3GRHK.js → sqs-MHBW6UFC.js} +6 -10
- package/dist/sqs-MHBW6UFC.js.map +1 -0
- package/dist/{standards-RXK5G4IG.js → standards-XAZKTKYJ.js} +3 -2
- package/dist/{sync-RLYBGYNY.js → sync-P3UZECLW.js} +4 -3
- package/dist/{sync-RLYBGYNY.js.map → sync-P3UZECLW.js.map} +1 -1
- package/dist/validate/guidelines.d.ts +18 -0
- package/dist/validate/index.d.ts +5 -0
- package/dist/validate/tier.d.ts +17 -0
- package/dist/validate/types.d.ts +50 -0
- package/dist/{validate-AABLVQJS.js → validate-J5E336GX.js} +53 -84
- package/dist/validate-J5E336GX.js.map +1 -0
- package/package.json +22 -25
- package/dist/chunk-KHO6NIAI.js.map +0 -1
- package/dist/chunk-P7TIZJ4C.js.map +0 -1
- package/dist/chunk-RXA4FO7L.js.map +0 -1
- package/dist/cloudwatch-KSZ4A256.js.map +0 -1
- package/dist/dynamodb-5KVESCVJ.js.map +0 -1
- package/dist/ec2-HKPE6GZV.js.map +0 -1
- package/dist/ecs-OS3NJZTA.js.map +0 -1
- package/dist/elasticache-7TCRHYYM.js.map +0 -1
- package/dist/elb-PEDLXW5R.js.map +0 -1
- package/dist/iam-7H5HFWVQ.js.map +0 -1
- package/dist/infra-UXM5XQX3.js.map +0 -1
- package/dist/lambda-NFB5UILT.js.map +0 -1
- package/dist/mcp-O5O7XVFG.js.map +0 -1
- package/dist/rds-KLG5O5SI.js.map +0 -1
- package/dist/s3-2DH7PRVR.js.map +0 -1
- package/dist/secretsmanager-MOOIHLAO.js.map +0 -1
- package/dist/sns-Y36LVTWA.js.map +0 -1
- package/dist/sqs-RRS3GRHK.js.map +0 -1
- package/dist/validate-AABLVQJS.js.map +0 -1
- /package/dist/{registry-V65CC7IN.js.map → core-QRFGIQ42.js.map} +0 -0
- /package/dist/{src-KZRTG3EU.js.map → registry-JRCQAIHR.js.map} +0 -0
- /package/dist/{standards-RXK5G4IG.js.map → standards-XAZKTKYJ.js.map} +0 -0
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import { type CheckResult } from "../../core/index.js";
|
|
2
|
+
import { BaseProcessToolRunner } from "./base.js";
|
|
3
|
+
/** Coverage configuration from standards.toml */
|
|
4
|
+
interface CoverageConfig {
|
|
5
|
+
enabled?: boolean;
|
|
6
|
+
min_threshold?: number;
|
|
7
|
+
enforce_in?: "ci" | "config" | "both";
|
|
8
|
+
ci_workflow?: string;
|
|
9
|
+
ci_job?: string;
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Coverage enforcement runner.
|
|
13
|
+
* Checks that coverage thresholds are configured in CI workflows and/or config files.
|
|
14
|
+
*/
|
|
15
|
+
export declare class CoverageRunner extends BaseProcessToolRunner {
|
|
16
|
+
readonly name = "Coverage";
|
|
17
|
+
readonly rule = "process.coverage";
|
|
18
|
+
readonly toolId = "coverage";
|
|
19
|
+
private config;
|
|
20
|
+
/**
|
|
21
|
+
* Set configuration from standards.toml
|
|
22
|
+
*/
|
|
23
|
+
setConfig(config: CoverageConfig): void;
|
|
24
|
+
/** Check for vitest coverage config */
|
|
25
|
+
private checkVitestConfig;
|
|
26
|
+
/** Check jest config file */
|
|
27
|
+
private checkJestConfigFile;
|
|
28
|
+
/** Check jest config in package.json */
|
|
29
|
+
private checkJestPackageJson;
|
|
30
|
+
/** Check for jest coverage config */
|
|
31
|
+
private checkJestConfig;
|
|
32
|
+
/** Check a single nyc config file and return result */
|
|
33
|
+
private checkSingleNycConfig;
|
|
34
|
+
/** Check nyc config file */
|
|
35
|
+
private checkNycConfigFile;
|
|
36
|
+
/** Check nyc config in package.json */
|
|
37
|
+
private checkNycPackageJson;
|
|
38
|
+
/** Check for nyc coverage config */
|
|
39
|
+
private checkNycConfig;
|
|
40
|
+
/** Check for coverage config in any supported tool */
|
|
41
|
+
private checkConfigCoverage;
|
|
42
|
+
/** Check if a step has coverage enforcement */
|
|
43
|
+
private stepHasCoverage;
|
|
44
|
+
/** Check a single job for coverage enforcement */
|
|
45
|
+
private checkJobForCoverage;
|
|
46
|
+
/** Check workflow jobs for coverage */
|
|
47
|
+
private checkWorkflowJobs;
|
|
48
|
+
/** Check for coverage enforcement in CI workflow */
|
|
49
|
+
private checkCiCoverage;
|
|
50
|
+
/** Validate config coverage and add violations if needed */
|
|
51
|
+
private validateConfigCoverage;
|
|
52
|
+
/** Validate CI coverage and add violations if needed */
|
|
53
|
+
private validateCiCoverage;
|
|
54
|
+
/** Run coverage validation */
|
|
55
|
+
run(projectRoot: string): Promise<CheckResult>;
|
|
56
|
+
}
|
|
57
|
+
export {};
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/** Enforcement mode for documentation checks */
|
|
2
|
+
export type EnforcementMode = "block" | "warn";
|
|
3
|
+
/** Doc type configuration */
|
|
4
|
+
export interface DocTypeConfig {
|
|
5
|
+
required_sections?: string[];
|
|
6
|
+
frontmatter?: string[];
|
|
7
|
+
}
|
|
8
|
+
/** Documentation configuration from standards.toml */
|
|
9
|
+
export interface DocsConfig {
|
|
10
|
+
enabled?: boolean;
|
|
11
|
+
path?: string;
|
|
12
|
+
enforcement?: EnforcementMode;
|
|
13
|
+
allowlist?: string[];
|
|
14
|
+
max_files?: number;
|
|
15
|
+
max_file_lines?: number;
|
|
16
|
+
max_total_kb?: number;
|
|
17
|
+
staleness_days?: number;
|
|
18
|
+
stale_mappings?: Record<string, string>;
|
|
19
|
+
min_coverage?: number;
|
|
20
|
+
coverage_paths?: string[];
|
|
21
|
+
exclude_patterns?: string[];
|
|
22
|
+
types?: Record<string, DocTypeConfig>;
|
|
23
|
+
}
|
|
24
|
+
/** Parsed frontmatter from a markdown file */
|
|
25
|
+
export interface ParsedDoc {
|
|
26
|
+
filePath: string;
|
|
27
|
+
frontmatter: Record<string, unknown>;
|
|
28
|
+
content: string;
|
|
29
|
+
headings: string[];
|
|
30
|
+
}
|
|
31
|
+
/** Export info from TypeScript file */
|
|
32
|
+
export interface ExportInfo {
|
|
33
|
+
name: string;
|
|
34
|
+
file: string;
|
|
35
|
+
line: number;
|
|
36
|
+
}
|
|
37
|
+
/** Escape special regex characters in a string */
|
|
38
|
+
export declare function escapeRegex(str: string): string;
|
|
39
|
+
/** Parse a markdown file and extract frontmatter, content, headings */
|
|
40
|
+
export declare function parseMarkdownFile(content: string, filePath: string): ParsedDoc;
|
|
41
|
+
/** Extract exports from file content */
|
|
42
|
+
export declare function extractFileExports(file: string, content: string): ExportInfo[];
|
|
43
|
+
/** Get the source path that a doc file tracks */
|
|
44
|
+
export declare function getTrackedPath(docFile: string, frontmatter: Record<string, unknown>, staleMappings: Record<string, string>, docsPath: string): string | null;
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import { type CheckResult } from "../../core/index.js";
|
|
2
|
+
import { BaseProcessToolRunner } from "./base.js";
|
|
3
|
+
import { type DocsConfig } from "./docs-helpers.js";
|
|
4
|
+
/**
|
|
5
|
+
* Documentation governance runner.
|
|
6
|
+
* Validates documentation structure, content, freshness, and API coverage.
|
|
7
|
+
*/
|
|
8
|
+
export declare class DocsRunner extends BaseProcessToolRunner {
|
|
9
|
+
readonly name = "Documentation";
|
|
10
|
+
readonly rule = "process.docs";
|
|
11
|
+
readonly toolId = "docs";
|
|
12
|
+
private config;
|
|
13
|
+
setConfig(config: DocsConfig): void;
|
|
14
|
+
private getSeverity;
|
|
15
|
+
run(projectRoot: string): Promise<CheckResult>;
|
|
16
|
+
private checkStructure;
|
|
17
|
+
private checkAllowlist;
|
|
18
|
+
private checkFileLimits;
|
|
19
|
+
private checkSingleFileLimit;
|
|
20
|
+
private checkContent;
|
|
21
|
+
private validateDocFile;
|
|
22
|
+
private parseDocFile;
|
|
23
|
+
private validateFrontmatter;
|
|
24
|
+
private validateSections;
|
|
25
|
+
private validateInternalLinks;
|
|
26
|
+
private checkSingleLink;
|
|
27
|
+
private checkFreshness;
|
|
28
|
+
private getTimestamps;
|
|
29
|
+
private createStalenessViolation;
|
|
30
|
+
private checkFileFreshness;
|
|
31
|
+
private getGitLastModified;
|
|
32
|
+
private checkApiCoverage;
|
|
33
|
+
private getSourceFiles;
|
|
34
|
+
private getAllDocsContent;
|
|
35
|
+
private isExportDocumented;
|
|
36
|
+
private buildCoverageViolations;
|
|
37
|
+
private extractExports;
|
|
38
|
+
}
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import { type CheckResult } from "../../core/index.js";
|
|
2
|
+
import { BaseProcessToolRunner } from "./base.js";
|
|
3
|
+
/** Forbidden files configuration */
|
|
4
|
+
interface ForbiddenFilesConfig {
|
|
5
|
+
enabled?: boolean;
|
|
6
|
+
files?: string[];
|
|
7
|
+
ignore?: string[];
|
|
8
|
+
message?: string;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Runner for forbidden files validation.
|
|
12
|
+
* Validates that certain files do NOT exist anywhere in the repository.
|
|
13
|
+
*/
|
|
14
|
+
export declare class ForbiddenFilesRunner extends BaseProcessToolRunner {
|
|
15
|
+
readonly name = "Forbidden Files";
|
|
16
|
+
readonly rule = "process.forbidden_files";
|
|
17
|
+
readonly toolId = "forbidden-files";
|
|
18
|
+
private config;
|
|
19
|
+
setConfig(config: ForbiddenFilesConfig): void;
|
|
20
|
+
/**
|
|
21
|
+
* Run check - scans for forbidden files using glob patterns
|
|
22
|
+
*/
|
|
23
|
+
run(projectRoot: string): Promise<CheckResult>;
|
|
24
|
+
/**
|
|
25
|
+
* Find files matching a forbidden pattern
|
|
26
|
+
*/
|
|
27
|
+
private findForbiddenFiles;
|
|
28
|
+
/**
|
|
29
|
+
* Get ignore patterns, respecting explicit empty array override
|
|
30
|
+
* - undefined: use defaults (node_modules, .git)
|
|
31
|
+
* - []: no ignores (scan everything)
|
|
32
|
+
* - [...]: use custom ignores
|
|
33
|
+
*/
|
|
34
|
+
private getIgnorePatterns;
|
|
35
|
+
/**
|
|
36
|
+
* Create a violation for a forbidden file
|
|
37
|
+
*/
|
|
38
|
+
private createViolation;
|
|
39
|
+
}
|
|
40
|
+
export {};
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import { type CheckResult } from "../../core/index.js";
|
|
2
|
+
import { BaseProcessToolRunner } from "./base.js";
|
|
3
|
+
/** Hooks configuration from standards.toml */
|
|
4
|
+
interface HooksConfig {
|
|
5
|
+
enabled?: boolean;
|
|
6
|
+
require_husky?: boolean;
|
|
7
|
+
require_hooks?: string[];
|
|
8
|
+
commands?: Record<string, string[]>;
|
|
9
|
+
protected_branches?: string[];
|
|
10
|
+
}
|
|
11
|
+
/**
|
|
12
|
+
* Git hooks validation runner.
|
|
13
|
+
* Checks that husky is installed and required hooks are configured.
|
|
14
|
+
*/
|
|
15
|
+
export declare class HooksRunner extends BaseProcessToolRunner {
|
|
16
|
+
readonly name = "Hooks";
|
|
17
|
+
readonly rule = "process.hooks";
|
|
18
|
+
readonly toolId = "hooks";
|
|
19
|
+
private config;
|
|
20
|
+
/**
|
|
21
|
+
* Set configuration from standards.toml
|
|
22
|
+
*/
|
|
23
|
+
setConfig(config: HooksConfig): void;
|
|
24
|
+
/** Check if husky is installed */
|
|
25
|
+
private checkHuskyInstalled;
|
|
26
|
+
/** Check that required hooks exist */
|
|
27
|
+
private checkRequiredHooks;
|
|
28
|
+
/** Check that hooks contain required commands */
|
|
29
|
+
private checkHookCommands;
|
|
30
|
+
/** Create a pre-push hook violation */
|
|
31
|
+
private createPrePushViolation;
|
|
32
|
+
/** Check if pre-push hook has branch detection */
|
|
33
|
+
private hasBranchDetection;
|
|
34
|
+
/** Check that pre-push hook prevents direct pushes to protected branches */
|
|
35
|
+
private checkProtectedBranches;
|
|
36
|
+
/** Run hooks validation */
|
|
37
|
+
run(projectRoot: string): Promise<CheckResult>;
|
|
38
|
+
}
|
|
39
|
+
export {};
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
export { BackupsRunner } from "./backups.js";
|
|
2
|
+
export { BaseProcessToolRunner } from "./base.js";
|
|
3
|
+
export { BranchesRunner } from "./branches.js";
|
|
4
|
+
export { ChangesetsRunner } from "./changesets.js";
|
|
5
|
+
export { CiRunner } from "./ci.js";
|
|
6
|
+
export { CodeownersRunner } from "./codeowners.js";
|
|
7
|
+
export { CommitsRunner } from "./commits.js";
|
|
8
|
+
export { CoverageRunner } from "./coverage.js";
|
|
9
|
+
export { DocsRunner } from "./docs.js";
|
|
10
|
+
export { ForbiddenFilesRunner } from "./forbidden-files.js";
|
|
11
|
+
export { HooksRunner } from "./hooks.js";
|
|
12
|
+
export { PrRunner } from "./pr.js";
|
|
13
|
+
export { RepoRunner } from "./repo.js";
|
|
14
|
+
export { TicketsRunner } from "./tickets.js";
|
|
@@ -0,0 +1,59 @@
|
|
|
1
|
+
import { type CheckResult } from "../../core/index.js";
|
|
2
|
+
import { BaseProcessToolRunner } from "./base.js";
|
|
3
|
+
/** PR configuration from standards.toml */
|
|
4
|
+
interface PrConfig {
|
|
5
|
+
enabled?: boolean;
|
|
6
|
+
max_files?: number;
|
|
7
|
+
max_lines?: number;
|
|
8
|
+
require_issue?: boolean;
|
|
9
|
+
issue_keywords?: string[];
|
|
10
|
+
exclude?: string[];
|
|
11
|
+
}
|
|
12
|
+
/**
|
|
13
|
+
* PR size validation runner.
|
|
14
|
+
* Checks that the PR does not exceed configured size limits.
|
|
15
|
+
* Reads PR data from GITHUB_EVENT_PATH environment variable (GitHub Actions context).
|
|
16
|
+
*/
|
|
17
|
+
export declare class PrRunner extends BaseProcessToolRunner {
|
|
18
|
+
readonly name = "PR";
|
|
19
|
+
readonly rule = "process.pr";
|
|
20
|
+
readonly toolId = "pr";
|
|
21
|
+
private config;
|
|
22
|
+
/**
|
|
23
|
+
* Set configuration from standards.toml
|
|
24
|
+
*/
|
|
25
|
+
setConfig(config: PrConfig): void;
|
|
26
|
+
/** Read PR data from GitHub event payload */
|
|
27
|
+
private readPrEventPayload;
|
|
28
|
+
/** Get PR data from payload, returns null if not available */
|
|
29
|
+
private getPrData;
|
|
30
|
+
/** Fetch a single page of PR files from GitHub API */
|
|
31
|
+
private fetchPrFilesPage;
|
|
32
|
+
/**
|
|
33
|
+
* Fetch PR files from GitHub API with pagination support.
|
|
34
|
+
* Returns empty array if GITHUB_TOKEN is not available or API fails.
|
|
35
|
+
*/
|
|
36
|
+
private fetchPrFiles;
|
|
37
|
+
/**
|
|
38
|
+
* Filter files that match exclude patterns.
|
|
39
|
+
* Returns only files that do NOT match any exclude pattern.
|
|
40
|
+
*/
|
|
41
|
+
private filterExcludedFiles;
|
|
42
|
+
/** Check if any validation is configured */
|
|
43
|
+
private hasValidationConfigured;
|
|
44
|
+
/** Check if PR body contains issue reference with keyword */
|
|
45
|
+
private findIssueReference;
|
|
46
|
+
/** Validate that PR contains issue reference */
|
|
47
|
+
private validateIssueReference;
|
|
48
|
+
/** Get PR counts, applying exclusions if configured */
|
|
49
|
+
private getPrCounts;
|
|
50
|
+
/** Check size limits and return violations */
|
|
51
|
+
private checkSizeLimits;
|
|
52
|
+
/** Validate PR size against configured limits */
|
|
53
|
+
private validatePrSize;
|
|
54
|
+
/** Collect all violations from PR validations */
|
|
55
|
+
private collectViolations;
|
|
56
|
+
/** Run PR validation */
|
|
57
|
+
run(_projectRoot: string): Promise<CheckResult>;
|
|
58
|
+
}
|
|
59
|
+
export {};
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { type CheckResult } from "../../core/index.js";
|
|
2
|
+
import { BaseProcessToolRunner } from "./base.js";
|
|
3
|
+
/** Bypass actor configuration */
|
|
4
|
+
interface BypassActorConfig {
|
|
5
|
+
actor_type: "Integration" | "OrganizationAdmin" | "RepositoryRole" | "Team" | "DeployKey";
|
|
6
|
+
actor_id?: number;
|
|
7
|
+
bypass_mode?: "always" | "pull_request";
|
|
8
|
+
}
|
|
9
|
+
/** Ruleset configuration (uses GitHub Rulesets API) */
|
|
10
|
+
interface RulesetConfig {
|
|
11
|
+
name?: string;
|
|
12
|
+
branch?: string;
|
|
13
|
+
enforcement?: "active" | "evaluate" | "disabled";
|
|
14
|
+
required_reviews?: number;
|
|
15
|
+
dismiss_stale_reviews?: boolean;
|
|
16
|
+
require_code_owner_reviews?: boolean;
|
|
17
|
+
require_status_checks?: string[];
|
|
18
|
+
require_branches_up_to_date?: boolean;
|
|
19
|
+
require_signed_commits?: boolean;
|
|
20
|
+
enforce_admins?: boolean;
|
|
21
|
+
bypass_actors?: BypassActorConfig[];
|
|
22
|
+
}
|
|
23
|
+
/** Tag protection configuration */
|
|
24
|
+
interface TagProtectionConfig {
|
|
25
|
+
patterns?: string[];
|
|
26
|
+
prevent_deletion?: boolean;
|
|
27
|
+
prevent_update?: boolean;
|
|
28
|
+
}
|
|
29
|
+
/** Repository configuration */
|
|
30
|
+
interface RepoConfig {
|
|
31
|
+
enabled?: boolean;
|
|
32
|
+
require_branch_protection?: boolean;
|
|
33
|
+
require_codeowners?: boolean;
|
|
34
|
+
ruleset?: RulesetConfig;
|
|
35
|
+
tag_protection?: TagProtectionConfig;
|
|
36
|
+
}
|
|
37
|
+
/** Runner for repository settings validation */
|
|
38
|
+
export declare class RepoRunner extends BaseProcessToolRunner {
|
|
39
|
+
readonly name = "Repository";
|
|
40
|
+
readonly rule = "process.repo";
|
|
41
|
+
readonly toolId = "repo";
|
|
42
|
+
private config;
|
|
43
|
+
setConfig(config: RepoConfig): void;
|
|
44
|
+
run(projectRoot: string): Promise<CheckResult>;
|
|
45
|
+
private collectViolations;
|
|
46
|
+
private isGhCliAvailable;
|
|
47
|
+
private getRepoInfo;
|
|
48
|
+
private checkCodeowners;
|
|
49
|
+
private checkBranchProtection;
|
|
50
|
+
private findBranchRuleset;
|
|
51
|
+
private matchesBranch;
|
|
52
|
+
private handleNoBranchRuleset;
|
|
53
|
+
private handleBranchProtectionError;
|
|
54
|
+
private validateBranchRulesetSettings;
|
|
55
|
+
private checkPullRequestRuleSettings;
|
|
56
|
+
private checkStatusChecksRuleSettings;
|
|
57
|
+
private checkBypassActorsSettings;
|
|
58
|
+
private checkTagProtection;
|
|
59
|
+
private validateTagProtection;
|
|
60
|
+
private checkTagPatterns;
|
|
61
|
+
private checkTagRules;
|
|
62
|
+
private tagViolation;
|
|
63
|
+
private handleTagProtectionError;
|
|
64
|
+
}
|
|
65
|
+
export {};
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { type CheckResult } from "../../core/index.js";
|
|
2
|
+
import { BaseProcessToolRunner } from "./base.js";
|
|
3
|
+
/** Tickets configuration from standards.toml */
|
|
4
|
+
interface TicketsConfig {
|
|
5
|
+
enabled?: boolean;
|
|
6
|
+
pattern?: string;
|
|
7
|
+
require_in_commits?: boolean;
|
|
8
|
+
require_in_branch?: boolean;
|
|
9
|
+
}
|
|
10
|
+
/**
|
|
11
|
+
* Ticket reference validation runner.
|
|
12
|
+
* Checks that commit messages and/or branch names contain ticket references.
|
|
13
|
+
*/
|
|
14
|
+
export declare class TicketsRunner extends BaseProcessToolRunner {
|
|
15
|
+
readonly name = "Tickets";
|
|
16
|
+
readonly rule = "process.tickets";
|
|
17
|
+
readonly toolId = "tickets";
|
|
18
|
+
private config;
|
|
19
|
+
/**
|
|
20
|
+
* Set configuration from standards.toml
|
|
21
|
+
*/
|
|
22
|
+
setConfig(config: TicketsConfig): void;
|
|
23
|
+
/** Get the current git branch name */
|
|
24
|
+
private getCurrentBranch;
|
|
25
|
+
/** Get the HEAD commit message */
|
|
26
|
+
private getHeadCommitMessage;
|
|
27
|
+
/** Check if text contains a match for the ticket pattern */
|
|
28
|
+
private matchesPattern;
|
|
29
|
+
/** Validate the regex pattern is valid */
|
|
30
|
+
private isValidPattern;
|
|
31
|
+
/** Check configuration validity */
|
|
32
|
+
private hasValidConfig;
|
|
33
|
+
/** Validate branch name contains ticket reference */
|
|
34
|
+
private validateBranch;
|
|
35
|
+
/** Validate commit message contains ticket reference */
|
|
36
|
+
private validateCommit;
|
|
37
|
+
/** Perform all validations and collect results */
|
|
38
|
+
private runValidations;
|
|
39
|
+
/** Run ticket validation */
|
|
40
|
+
run(projectRoot: string): Promise<CheckResult>;
|
|
41
|
+
}
|
|
42
|
+
export {};
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import type { DetectedProject, DetectionResult, ProjectType } from "./types.js";
|
|
2
|
+
/** Options for project detection */
|
|
3
|
+
export interface DetectProjectsOptions {
|
|
4
|
+
/** Glob patterns to exclude from detection */
|
|
5
|
+
excludePatterns?: string[];
|
|
6
|
+
}
|
|
7
|
+
/**
|
|
8
|
+
* Detect all projects in a directory tree.
|
|
9
|
+
* Identifies projects by marker files and skips workspace roots.
|
|
10
|
+
*
|
|
11
|
+
* @param searchRoot - Root directory to search
|
|
12
|
+
* @param options - Detection options including exclude patterns
|
|
13
|
+
*/
|
|
14
|
+
export declare function detectProjects(searchRoot: string, options?: DetectProjectsOptions): Promise<DetectionResult>;
|
|
15
|
+
/** Get all project types that need templates */
|
|
16
|
+
export declare function getProjectTypes(projects: DetectedProject[]): Set<ProjectType>;
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import type { ProjectType } from "./types.js";
|
|
2
|
+
/** Get the default template for a project type */
|
|
3
|
+
export declare function getTemplate(type: ProjectType): string;
|
|
4
|
+
/** Get a template that extends from a registry */
|
|
5
|
+
export declare function getExtendsTemplate(registryPath: string, projectType: ProjectType): string;
|
|
6
|
+
/**
|
|
7
|
+
* Create a standards.toml file for a project.
|
|
8
|
+
* @returns true if file was created (or would be created in dry-run)
|
|
9
|
+
*/
|
|
10
|
+
export declare function createCheckToml(projectPath: string, type: ProjectType, dryRun: boolean, registryPath?: string): boolean;
|
|
11
|
+
/**
|
|
12
|
+
* Create a shared registry with rulesets.
|
|
13
|
+
* @param projectTypes - Set of project types that need rulesets
|
|
14
|
+
*/
|
|
15
|
+
export declare function createRegistry(registryPath: string, projectTypes: Set<ProjectType>, dryRun: boolean): void;
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
import type { Tier } from "../validate/types.js";
|
|
2
|
+
import type { TierSource } from "./types.js";
|
|
3
|
+
/** Result of loading tier from a project directory */
|
|
4
|
+
export interface TierInfo {
|
|
5
|
+
/** The tier value (undefined if no metadata exists) */
|
|
6
|
+
tier?: Tier;
|
|
7
|
+
/** Source of the tier: "standards.toml" or null if not found */
|
|
8
|
+
source: TierSource;
|
|
9
|
+
/** Project name (optional, from standards.toml [metadata]) */
|
|
10
|
+
project?: string;
|
|
11
|
+
/** Organisation name (optional, from standards.toml [metadata]) */
|
|
12
|
+
organisation?: string;
|
|
13
|
+
/** Status (optional, from standards.toml [metadata]) */
|
|
14
|
+
status?: "active" | "pre-release" | "deprecated";
|
|
15
|
+
}
|
|
16
|
+
/**
|
|
17
|
+
* Load tier information from a project directory.
|
|
18
|
+
* Reads from standards.toml [metadata] section.
|
|
19
|
+
* Returns tier and source, with null source if no metadata exists.
|
|
20
|
+
*/
|
|
21
|
+
export declare function loadProjectTier(projectDir: string): TierInfo;
|
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
import type { Tier } from "../validate/types.js";
|
|
2
|
+
/** Re-export Tier for convenience */
|
|
3
|
+
export type { Tier } from "../validate/types.js";
|
|
4
|
+
/** Project types detected by marker files */
|
|
5
|
+
export type ProjectType = "typescript" | "python";
|
|
6
|
+
/** Source of tier value */
|
|
7
|
+
export type TierSource = "standards.toml" | "default" | null;
|
|
8
|
+
/** Project marker file configuration */
|
|
9
|
+
export interface ProjectMarker {
|
|
10
|
+
file: string;
|
|
11
|
+
type: ProjectType;
|
|
12
|
+
/** Optional function to check if this marker indicates a workspace root */
|
|
13
|
+
isWorkspaceRoot?: (content: string) => boolean;
|
|
14
|
+
}
|
|
15
|
+
/** A detected project in the monorepo */
|
|
16
|
+
export interface DetectedProject {
|
|
17
|
+
/** Relative path from search root */
|
|
18
|
+
path: string;
|
|
19
|
+
/** Detected project type */
|
|
20
|
+
type: ProjectType;
|
|
21
|
+
/** Whether standards.toml exists in this project */
|
|
22
|
+
hasCheckToml: boolean;
|
|
23
|
+
/** Which marker file triggered detection */
|
|
24
|
+
markerFile: string;
|
|
25
|
+
}
|
|
26
|
+
/** A project enriched with tier information */
|
|
27
|
+
export interface EnrichedProject extends DetectedProject {
|
|
28
|
+
/** Tier from standards.toml [metadata] (undefined if not found) */
|
|
29
|
+
tier?: Tier;
|
|
30
|
+
/** Source of tier value: standards.toml, default, or null if not found */
|
|
31
|
+
tierSource?: TierSource;
|
|
32
|
+
}
|
|
33
|
+
/** Result of project detection */
|
|
34
|
+
export interface DetectionResult {
|
|
35
|
+
/** All detected projects */
|
|
36
|
+
projects: DetectedProject[];
|
|
37
|
+
/** Paths that were identified as workspace roots (skipped) */
|
|
38
|
+
workspaceRoots: string[];
|
|
39
|
+
}
|
|
40
|
+
/** Options for the detect command */
|
|
41
|
+
export interface DetectOptions {
|
|
42
|
+
/** Create missing standards.toml files */
|
|
43
|
+
fix?: boolean;
|
|
44
|
+
/** Show what would be created without creating */
|
|
45
|
+
dryRun?: boolean;
|
|
46
|
+
/** Create shared registry and extend from it */
|
|
47
|
+
registry?: string;
|
|
48
|
+
/** Output format */
|
|
49
|
+
format: "text" | "json";
|
|
50
|
+
/** Show tier/status from standards.toml [metadata] */
|
|
51
|
+
showStatus?: boolean;
|
|
52
|
+
/** Filter to projects without standards.toml */
|
|
53
|
+
missingConfig?: boolean;
|
|
54
|
+
}
|
|
55
|
+
/** JSON output structure */
|
|
56
|
+
export interface DetectJsonOutput {
|
|
57
|
+
projects: {
|
|
58
|
+
path: string;
|
|
59
|
+
type: string;
|
|
60
|
+
status: "has-config" | "missing-config";
|
|
61
|
+
/** Tier from repo-metadata.yaml (only when --show-status) */
|
|
62
|
+
tier?: Tier | null;
|
|
63
|
+
/** Source of tier value (only when --show-status) */
|
|
64
|
+
tierSource?: TierSource;
|
|
65
|
+
}[];
|
|
66
|
+
workspaceRoots: string[];
|
|
67
|
+
summary: {
|
|
68
|
+
total: number;
|
|
69
|
+
withConfig: number;
|
|
70
|
+
missingConfig: number;
|
|
71
|
+
};
|
|
72
|
+
actions?: {
|
|
73
|
+
action: "created" | "would-create";
|
|
74
|
+
path: string;
|
|
75
|
+
}[];
|
|
76
|
+
}
|
|
@@ -1,3 +1,7 @@
|
|
|
1
|
+
import {
|
|
2
|
+
createClientFactory
|
|
3
|
+
} from "./chunk-O745CMWG.js";
|
|
4
|
+
|
|
1
5
|
// src/infra/checkers/rds.ts
|
|
2
6
|
import {
|
|
3
7
|
DescribeDBInstancesCommand,
|
|
@@ -5,15 +9,7 @@ import {
|
|
|
5
9
|
DescribeDBSubnetGroupsCommand,
|
|
6
10
|
RDSClient
|
|
7
11
|
} from "@aws-sdk/client-rds";
|
|
8
|
-
var
|
|
9
|
-
function getClient(region) {
|
|
10
|
-
let client = clientCache.get(region);
|
|
11
|
-
if (!client) {
|
|
12
|
-
client = new RDSClient({ region });
|
|
13
|
-
clientCache.set(region, client);
|
|
14
|
-
}
|
|
15
|
-
return client;
|
|
16
|
-
}
|
|
12
|
+
var getClient = createClientFactory(RDSClient);
|
|
17
13
|
async function checkDBInstance(client, arn) {
|
|
18
14
|
const { resourceId, raw } = arn;
|
|
19
15
|
try {
|
|
@@ -148,4 +144,4 @@ var RDSChecker = {
|
|
|
148
144
|
export {
|
|
149
145
|
RDSChecker
|
|
150
146
|
};
|
|
151
|
-
//# sourceMappingURL=rds-
|
|
147
|
+
//# sourceMappingURL=rds-GZ5RVPIU.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/infra/checkers/rds.ts"],"sourcesContent":["/**\n * RDS resource checker\n *\n * Supports:\n * - DB instances\n * - DB clusters (Aurora)\n * - DB subnet groups\n */\n\nimport {\n DescribeDBInstancesCommand,\n DescribeDBClustersCommand,\n DescribeDBSubnetGroupsCommand,\n RDSClient,\n} from \"@aws-sdk/client-rds\";\n\nimport type { ParsedArn, ResourceCheckResult } from \"../types.js\";\nimport { createClientFactory } from \"./client-factory.js\";\nimport type { ResourceChecker } from \"./types.js\";\n\n/**\n * Get or create an RDS client for a region\n */\nconst getClient = createClientFactory(RDSClient);\n\n/**\n * Check if an RDS DB instance exists\n */\nasync function checkDBInstance(\n client: RDSClient,\n arn: ParsedArn\n): Promise<ResourceCheckResult> {\n const { resourceId, raw } = arn;\n\n try {\n const response = await client.send(\n new DescribeDBInstancesCommand({ DBInstanceIdentifier: resourceId })\n );\n\n const instance = response.DBInstances?.[0];\n const exists = !!instance && instance.DBInstanceStatus !== \"deleting\";\n\n return {\n arn: raw,\n exists,\n service: \"rds\",\n resourceType: \"db\",\n resourceId,\n };\n } catch (error) {\n const err = error as Error & { name?: string };\n\n if (err.name === \"DBInstanceNotFoundFault\") {\n return {\n arn: raw,\n exists: false,\n service: \"rds\",\n resourceType: \"db\",\n resourceId,\n };\n }\n\n return {\n arn: raw,\n exists: false,\n error: err.message || \"Unknown error\",\n service: \"rds\",\n resourceType: \"db\",\n resourceId,\n };\n }\n}\n\n/**\n * Check if an RDS DB cluster exists (Aurora)\n */\nasync function checkDBCluster(\n client: RDSClient,\n arn: ParsedArn\n): Promise<ResourceCheckResult> {\n const { resourceId, raw } = arn;\n\n try {\n const response = await client.send(\n new DescribeDBClustersCommand({ DBClusterIdentifier: resourceId })\n );\n\n const cluster = response.DBClusters?.[0];\n const exists = !!cluster && cluster.Status !== \"deleting\";\n\n return {\n arn: raw,\n exists,\n service: \"rds\",\n resourceType: \"cluster\",\n resourceId,\n };\n } catch (error) {\n const err = error as Error & { name?: string };\n\n if (err.name === \"DBClusterNotFoundFault\") {\n return {\n arn: raw,\n exists: false,\n service: \"rds\",\n resourceType: \"cluster\",\n resourceId,\n };\n }\n\n return {\n arn: raw,\n exists: false,\n error: err.message || \"Unknown error\",\n service: \"rds\",\n resourceType: \"cluster\",\n resourceId,\n };\n }\n}\n\n/**\n * Check if an RDS DB subnet group exists\n */\nasync function checkDBSubnetGroup(\n client: RDSClient,\n arn: ParsedArn\n): Promise<ResourceCheckResult> {\n const { resourceId, raw } = arn;\n\n try {\n const response = await client.send(\n new DescribeDBSubnetGroupsCommand({ DBSubnetGroupName: resourceId })\n );\n\n const subnetGroup = response.DBSubnetGroups?.[0];\n const exists = !!subnetGroup;\n\n return {\n arn: raw,\n exists,\n service: \"rds\",\n resourceType: \"subgrp\",\n resourceId,\n };\n } catch (error) {\n const err = error as Error & { name?: string };\n\n if (err.name === \"DBSubnetGroupNotFoundFault\") {\n return {\n arn: raw,\n exists: false,\n service: \"rds\",\n resourceType: \"subgrp\",\n resourceId,\n };\n }\n\n return {\n arn: raw,\n exists: false,\n error: err.message || \"Unknown error\",\n service: \"rds\",\n resourceType: \"subgrp\",\n resourceId,\n };\n }\n}\n\n/**\n * RDS resource checker\n */\nexport const RDSChecker: ResourceChecker = {\n async check(arn: ParsedArn): Promise<ResourceCheckResult> {\n const { resourceType, resourceId, region, raw } = arn;\n const client = getClient(region);\n\n switch (resourceType) {\n case \"db\":\n return checkDBInstance(client, arn);\n\n case \"cluster\":\n return checkDBCluster(client, arn);\n\n case \"subgrp\":\n return checkDBSubnetGroup(client, arn);\n\n default:\n return {\n arn: raw,\n exists: false,\n error: `Unsupported RDS resource type: ${resourceType}`,\n service: \"rds\",\n resourceType,\n resourceId,\n };\n }\n },\n};\n"],"mappings":";;;;;AASA;AAAA,EACE;AAAA,EACA;AAAA,EACA;AAAA,EACA;AAAA,OACK;AASP,IAAM,YAAY,oBAAoB,SAAS;AAK/C,eAAe,gBACb,QACA,KAC8B;AAC9B,QAAM,EAAE,YAAY,IAAI,IAAI;AAE5B,MAAI;AACF,UAAM,WAAW,MAAM,OAAO;AAAA,MAC5B,IAAI,2BAA2B,EAAE,sBAAsB,WAAW,CAAC;AAAA,IACrE;AAEA,UAAM,WAAW,SAAS,cAAc,CAAC;AACzC,UAAM,SAAS,CAAC,CAAC,YAAY,SAAS,qBAAqB;AAE3D,WAAO;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,cAAc;AAAA,MACd;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,UAAM,MAAM;AAEZ,QAAI,IAAI,SAAS,2BAA2B;AAC1C,aAAO;AAAA,QACL,KAAK;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,cAAc;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,KAAK;AAAA,MACL,QAAQ;AAAA,MACR,OAAO,IAAI,WAAW;AAAA,MACtB,SAAS;AAAA,MACT,cAAc;AAAA,MACd;AAAA,IACF;AAAA,EACF;AACF;AAKA,eAAe,eACb,QACA,KAC8B;AAC9B,QAAM,EAAE,YAAY,IAAI,IAAI;AAE5B,MAAI;AACF,UAAM,WAAW,MAAM,OAAO;AAAA,MAC5B,IAAI,0BAA0B,EAAE,qBAAqB,WAAW,CAAC;AAAA,IACnE;AAEA,UAAM,UAAU,SAAS,aAAa,CAAC;AACvC,UAAM,SAAS,CAAC,CAAC,WAAW,QAAQ,WAAW;AAE/C,WAAO;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,cAAc;AAAA,MACd;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,UAAM,MAAM;AAEZ,QAAI,IAAI,SAAS,0BAA0B;AACzC,aAAO;AAAA,QACL,KAAK;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,cAAc;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,KAAK;AAAA,MACL,QAAQ;AAAA,MACR,OAAO,IAAI,WAAW;AAAA,MACtB,SAAS;AAAA,MACT,cAAc;AAAA,MACd;AAAA,IACF;AAAA,EACF;AACF;AAKA,eAAe,mBACb,QACA,KAC8B;AAC9B,QAAM,EAAE,YAAY,IAAI,IAAI;AAE5B,MAAI;AACF,UAAM,WAAW,MAAM,OAAO;AAAA,MAC5B,IAAI,8BAA8B,EAAE,mBAAmB,WAAW,CAAC;AAAA,IACrE;AAEA,UAAM,cAAc,SAAS,iBAAiB,CAAC;AAC/C,UAAM,SAAS,CAAC,CAAC;AAEjB,WAAO;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA,SAAS;AAAA,MACT,cAAc;AAAA,MACd;AAAA,IACF;AAAA,EACF,SAAS,OAAO;AACd,UAAM,MAAM;AAEZ,QAAI,IAAI,SAAS,8BAA8B;AAC7C,aAAO;AAAA,QACL,KAAK;AAAA,QACL,QAAQ;AAAA,QACR,SAAS;AAAA,QACT,cAAc;AAAA,QACd;AAAA,MACF;AAAA,IACF;AAEA,WAAO;AAAA,MACL,KAAK;AAAA,MACL,QAAQ;AAAA,MACR,OAAO,IAAI,WAAW;AAAA,MACtB,SAAS;AAAA,MACT,cAAc;AAAA,MACd;AAAA,IACF;AAAA,EACF;AACF;AAKO,IAAM,aAA8B;AAAA,EACzC,MAAM,MAAM,KAA8C;AACxD,UAAM,EAAE,cAAc,YAAY,QAAQ,IAAI,IAAI;AAClD,UAAM,SAAS,UAAU,MAAM;AAE/B,YAAQ,cAAc;AAAA,MACpB,KAAK;AACH,eAAO,gBAAgB,QAAQ,GAAG;AAAA,MAEpC,KAAK;AACH,eAAO,eAAe,QAAQ,GAAG;AAAA,MAEnC,KAAK;AACH,eAAO,mBAAmB,QAAQ,GAAG;AAAA,MAEvC;AACE,eAAO;AAAA,UACL,KAAK;AAAA,UACL,QAAQ;AAAA,UACR,OAAO,kCAAkC,YAAY;AAAA,UACrD,SAAS;AAAA,UACT;AAAA,UACA;AAAA,QACF;AAAA,IACJ;AAAA,EACF;AACF;","names":[]}
|
|
@@ -4,7 +4,8 @@ import {
|
|
|
4
4
|
mergeConfigs,
|
|
5
5
|
parseRegistryUrl,
|
|
6
6
|
resolveExtends
|
|
7
|
-
} from "./chunk-
|
|
7
|
+
} from "./chunk-YGDEM6K5.js";
|
|
8
|
+
import "./chunk-RHM53NLG.js";
|
|
8
9
|
export {
|
|
9
10
|
fetchRegistry,
|
|
10
11
|
loadRuleset,
|
|
@@ -12,4 +13,4 @@ export {
|
|
|
12
13
|
parseRegistryUrl,
|
|
13
14
|
resolveExtends
|
|
14
15
|
};
|
|
15
|
-
//# sourceMappingURL=registry-
|
|
16
|
+
//# sourceMappingURL=registry-JRCQAIHR.js.map
|
|
@@ -1,17 +1,21 @@
|
|
|
1
|
+
import {
|
|
2
|
+
AWS_DEFAULTS
|
|
3
|
+
} from "./chunk-RHM53NLG.js";
|
|
4
|
+
import {
|
|
5
|
+
createClientFactoryWithConfig
|
|
6
|
+
} from "./chunk-O745CMWG.js";
|
|
7
|
+
|
|
1
8
|
// src/infra/checkers/s3.ts
|
|
2
9
|
import { HeadBucketCommand, S3Client } from "@aws-sdk/client-s3";
|
|
3
|
-
var
|
|
10
|
+
var getClientForRegion = createClientFactoryWithConfig(
|
|
11
|
+
(region) => new S3Client({
|
|
12
|
+
region,
|
|
13
|
+
followRegionRedirects: true
|
|
14
|
+
})
|
|
15
|
+
);
|
|
4
16
|
function getClient(region) {
|
|
5
|
-
const effectiveRegion = region ||
|
|
6
|
-
|
|
7
|
-
if (!client) {
|
|
8
|
-
client = new S3Client({
|
|
9
|
-
region: effectiveRegion,
|
|
10
|
-
followRegionRedirects: true
|
|
11
|
-
});
|
|
12
|
-
clientCache.set(effectiveRegion, client);
|
|
13
|
-
}
|
|
14
|
-
return client;
|
|
17
|
+
const effectiveRegion = region || AWS_DEFAULTS.globalRegion;
|
|
18
|
+
return getClientForRegion(effectiveRegion);
|
|
15
19
|
}
|
|
16
20
|
var S3Checker = {
|
|
17
21
|
async check(arn) {
|
|
@@ -46,4 +50,4 @@ async function checkBucket(bucketName, region, arn) {
|
|
|
46
50
|
export {
|
|
47
51
|
S3Checker
|
|
48
52
|
};
|
|
49
|
-
//# sourceMappingURL=s3-
|
|
53
|
+
//# sourceMappingURL=s3-53UELUWT.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/infra/checkers/s3.ts"],"sourcesContent":["/**\n * S3 resource checker\n */\n\nimport { HeadBucketCommand, S3Client } from \"@aws-sdk/client-s3\";\n\nimport { AWS_DEFAULTS } from \"../../constants.js\";\nimport type { ParsedArn, ResourceCheckResult } from \"../types.js\";\nimport { createClientFactoryWithConfig } from \"./client-factory.js\";\nimport type { ResourceChecker } from \"./types.js\";\n\n/**\n * Get or create an S3 client for a region.\n * S3 is global, but we use the default region for global operations.\n * Uses followRegionRedirects for cross-region bucket access.\n */\nconst getClientForRegion = createClientFactoryWithConfig(\n (region: string) =>\n new S3Client({\n region,\n followRegionRedirects: true,\n })\n);\n\n/**\n * Get S3 client with fallback to default region\n */\nfunction getClient(region: string): S3Client {\n const effectiveRegion = region || AWS_DEFAULTS.globalRegion;\n return getClientForRegion(effectiveRegion);\n}\n\n/**\n * S3 bucket checker\n */\nexport const S3Checker: ResourceChecker = {\n async check(arn: ParsedArn): Promise<ResourceCheckResult> {\n const { resourceType, resourceId, raw } = arn;\n\n // Only check bucket existence (not individual objects)\n if (resourceType === \"object\") {\n // For objects, we'd need to check if the key exists, which is expensive\n // For now, we just check if the bucket exists\n const bucketName = resourceId.split(\"/\")[0];\n return checkBucket(bucketName, arn.region, raw);\n }\n\n return checkBucket(resourceId, arn.region, raw);\n },\n};\n\n/**\n * Create a bucket check result\n */\nfunction bucketResult(\n arn: string,\n bucketName: string,\n exists: boolean,\n error?: string\n): ResourceCheckResult {\n return { arn, exists, error, service: \"s3\", resourceType: \"bucket\", resourceId: bucketName };\n}\n\n/**\n * Check if error indicates bucket doesn't exist (404 or 403)\n */\nfunction isBucketNotFound(err: Error & { name?: string; $metadata?: { httpStatusCode?: number } }): boolean {\n const httpStatus = err.$metadata?.httpStatusCode;\n // 404 = not found, 403 = access denied (S3 returns 403 for non-existent buckets to prevent enumeration)\n return err.name === \"NotFound\" || err.name === \"NoSuchBucket\" || httpStatus === 404 ||\n err.name === \"Forbidden\" || err.name === \"AccessDenied\" || httpStatus === 403;\n}\n\n/**\n * Check if an S3 bucket exists\n */\nasync function checkBucket(bucketName: string, region: string, arn: string): Promise<ResourceCheckResult> {\n const client = getClient(region);\n\n try {\n await client.send(new HeadBucketCommand({ Bucket: bucketName }));\n return bucketResult(arn, bucketName, true);\n } catch (error) {\n const err = error as Error & { name?: string; $metadata?: { httpStatusCode?: number } };\n if (isBucketNotFound(err)) {\n return bucketResult(arn, bucketName, false);\n }\n return bucketResult(arn, bucketName, false, err.message || \"Unknown error\");\n }\n}\n"],"mappings":";;;;;;;;AAIA,SAAS,mBAAmB,gBAAgB;AAY5C,IAAM,qBAAqB;AAAA,EACzB,CAAC,WACC,IAAI,SAAS;AAAA,IACX;AAAA,IACA,uBAAuB;AAAA,EACzB,CAAC;AACL;AAKA,SAAS,UAAU,QAA0B;AAC3C,QAAM,kBAAkB,UAAU,aAAa;AAC/C,SAAO,mBAAmB,eAAe;AAC3C;AAKO,IAAM,YAA6B;AAAA,EACxC,MAAM,MAAM,KAA8C;AACxD,UAAM,EAAE,cAAc,YAAY,IAAI,IAAI;AAG1C,QAAI,iBAAiB,UAAU;AAG7B,YAAM,aAAa,WAAW,MAAM,GAAG,EAAE,CAAC;AAC1C,aAAO,YAAY,YAAY,IAAI,QAAQ,GAAG;AAAA,IAChD;AAEA,WAAO,YAAY,YAAY,IAAI,QAAQ,GAAG;AAAA,EAChD;AACF;AAKA,SAAS,aACP,KACA,YACA,QACA,OACqB;AACrB,SAAO,EAAE,KAAK,QAAQ,OAAO,SAAS,MAAM,cAAc,UAAU,YAAY,WAAW;AAC7F;AAKA,SAAS,iBAAiB,KAAkF;AAC1G,QAAM,aAAa,IAAI,WAAW;AAElC,SAAO,IAAI,SAAS,cAAc,IAAI,SAAS,kBAAkB,eAAe,OACzE,IAAI,SAAS,eAAe,IAAI,SAAS,kBAAkB,eAAe;AACnF;AAKA,eAAe,YAAY,YAAoB,QAAgB,KAA2C;AACxG,QAAM,SAAS,UAAU,MAAM;AAE/B,MAAI;AACF,UAAM,OAAO,KAAK,IAAI,kBAAkB,EAAE,QAAQ,WAAW,CAAC,CAAC;AAC/D,WAAO,aAAa,KAAK,YAAY,IAAI;AAAA,EAC3C,SAAS,OAAO;AACd,UAAM,MAAM;AACZ,QAAI,iBAAiB,GAAG,GAAG;AACzB,aAAO,aAAa,KAAK,YAAY,KAAK;AAAA,IAC5C;AACA,WAAO,aAAa,KAAK,YAAY,OAAO,IAAI,WAAW,eAAe;AAAA,EAC5E;AACF;","names":[]}
|