@intentius/chant-lexicon-docker 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -0
- package/dist/integrity.json +19 -0
- package/dist/manifest.json +15 -0
- package/dist/meta.json +222 -0
- package/dist/rules/apt-no-recommends.ts +43 -0
- package/dist/rules/docker-helpers.ts +114 -0
- package/dist/rules/no-latest-image.ts +36 -0
- package/dist/rules/no-latest-tag.ts +63 -0
- package/dist/rules/no-root-user.ts +36 -0
- package/dist/rules/prefer-copy.ts +53 -0
- package/dist/rules/ssh-port-exposed.ts +68 -0
- package/dist/rules/unused-volume.ts +49 -0
- package/dist/skills/chant-docker-patterns.md +153 -0
- package/dist/skills/chant-docker.md +129 -0
- package/dist/types/index.d.ts +93 -0
- package/package.json +53 -0
- package/src/codegen/docs-cli.ts +10 -0
- package/src/codegen/docs.ts +12 -0
- package/src/codegen/generate-cli.ts +36 -0
- package/src/codegen/generate-compose.ts +21 -0
- package/src/codegen/generate-dockerfile.ts +21 -0
- package/src/codegen/generate.test.ts +105 -0
- package/src/codegen/generate.ts +158 -0
- package/src/codegen/naming.test.ts +81 -0
- package/src/codegen/naming.ts +54 -0
- package/src/codegen/package.ts +65 -0
- package/src/codegen/patches.ts +42 -0
- package/src/codegen/versions.ts +15 -0
- package/src/composites/index.ts +12 -0
- package/src/coverage.test.ts +33 -0
- package/src/coverage.ts +54 -0
- package/src/default-labels.test.ts +85 -0
- package/src/default-labels.ts +72 -0
- package/src/generated/index.d.ts +93 -0
- package/src/generated/index.ts +10 -0
- package/src/generated/lexicon-docker.json +222 -0
- package/src/generated/runtime.ts +4 -0
- package/src/import/generator.test.ts +133 -0
- package/src/import/generator.ts +127 -0
- package/src/import/parser.test.ts +137 -0
- package/src/import/parser.ts +190 -0
- package/src/import/roundtrip.test.ts +49 -0
- package/src/import/testdata/full.yaml +43 -0
- package/src/import/testdata/simple.yaml +9 -0
- package/src/import/testdata/webapp.yaml +41 -0
- package/src/index.ts +29 -0
- package/src/interpolation.test.ts +41 -0
- package/src/interpolation.ts +76 -0
- package/src/lint/post-synth/apt-no-recommends.ts +43 -0
- package/src/lint/post-synth/docker-helpers.ts +114 -0
- package/src/lint/post-synth/no-latest-image.ts +36 -0
- package/src/lint/post-synth/no-root-user.ts +36 -0
- package/src/lint/post-synth/post-synth.test.ts +181 -0
- package/src/lint/post-synth/prefer-copy.ts +53 -0
- package/src/lint/post-synth/ssh-port-exposed.ts +68 -0
- package/src/lint/post-synth/unused-volume.ts +49 -0
- package/src/lint/rules/data/deprecated-images.ts +28 -0
- package/src/lint/rules/data/known-base-images.ts +20 -0
- package/src/lint/rules/index.ts +5 -0
- package/src/lint/rules/no-latest-tag.ts +63 -0
- package/src/lint/rules/rules.test.ts +82 -0
- package/src/lsp/completions.test.ts +34 -0
- package/src/lsp/completions.ts +20 -0
- package/src/lsp/hover.test.ts +34 -0
- package/src/lsp/hover.ts +38 -0
- package/src/package-cli.ts +42 -0
- package/src/plugin.test.ts +117 -0
- package/src/plugin.ts +250 -0
- package/src/serializer.test.ts +294 -0
- package/src/serializer.ts +322 -0
- package/src/skills/chant-docker-patterns.md +153 -0
- package/src/skills/chant-docker.md +129 -0
- package/src/spec/fetch-compose.ts +35 -0
- package/src/spec/fetch-engine.ts +25 -0
- package/src/spec/parse-compose.ts +110 -0
- package/src/spec/parse-engine.ts +47 -0
- package/src/validate-cli.ts +19 -0
- package/src/validate.test.ts +16 -0
- package/src/validate.ts +44 -0
- package/src/variables.test.ts +32 -0
- package/src/variables.ts +47 -0
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Docker IR → TypeScript source generator.
|
|
3
|
+
*
|
|
4
|
+
* Converts parsed IR to valid chant TypeScript source code.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { DockerIR, ServiceIR, VolumeIR, NetworkIR, ConfigIR, SecretIR, DockerfileIR } from "./parser";
|
|
8
|
+
|
|
9
|
+
export interface GenerateResult {
|
|
10
|
+
source: string;
|
|
11
|
+
warnings: string[];
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Generator for Docker entities — IR → TypeScript chant source.
|
|
16
|
+
*/
|
|
17
|
+
export class DockerGenerator {
|
|
18
|
+
generate(entities: DockerIR[]): GenerateResult {
|
|
19
|
+
const warnings: string[] = [];
|
|
20
|
+
const imports = new Set<string>();
|
|
21
|
+
const lines: string[] = [];
|
|
22
|
+
|
|
23
|
+
for (const entity of entities) {
|
|
24
|
+
switch (entity.kind) {
|
|
25
|
+
case "service":
|
|
26
|
+
imports.add("Service");
|
|
27
|
+
lines.push(generateService(entity));
|
|
28
|
+
break;
|
|
29
|
+
case "volume":
|
|
30
|
+
imports.add("Volume");
|
|
31
|
+
lines.push(generateVolume(entity));
|
|
32
|
+
break;
|
|
33
|
+
case "network":
|
|
34
|
+
imports.add("Network");
|
|
35
|
+
lines.push(generateNetwork(entity));
|
|
36
|
+
break;
|
|
37
|
+
case "config":
|
|
38
|
+
imports.add("Config");
|
|
39
|
+
lines.push(generateConfig(entity));
|
|
40
|
+
break;
|
|
41
|
+
case "secret":
|
|
42
|
+
imports.add("Secret");
|
|
43
|
+
lines.push(generateSecret(entity));
|
|
44
|
+
break;
|
|
45
|
+
case "dockerfile":
|
|
46
|
+
imports.add("Dockerfile");
|
|
47
|
+
lines.push(generateDockerfile(entity));
|
|
48
|
+
break;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
const importLine = `import { ${[...imports].sort().join(", ")} } from "@intentius/chant-lexicon-docker";`;
|
|
53
|
+
const source = [importLine, "", ...lines].join("\n");
|
|
54
|
+
|
|
55
|
+
return { source, warnings };
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
function generateService(svc: ServiceIR): string {
|
|
60
|
+
const propsStr = JSON.stringify(svc.props, null, 2)
|
|
61
|
+
.replace(/"([a-z_][a-z0-9_]*)":/g, "$1:");
|
|
62
|
+
return `export const ${sanitizeName(svc.name)} = new Service(${propsStr});`;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
function generateVolume(vol: VolumeIR): string {
|
|
66
|
+
const hasProps = Object.keys(vol.props).length > 0;
|
|
67
|
+
return `export const ${sanitizeName(vol.name)} = new Volume(${hasProps ? JSON.stringify(vol.props) : "{}"});`;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
function generateNetwork(net: NetworkIR): string {
|
|
71
|
+
const hasProps = Object.keys(net.props).length > 0;
|
|
72
|
+
return `export const ${sanitizeName(net.name)} = new Network(${hasProps ? JSON.stringify(net.props) : "{}"});`;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
function generateConfig(cfg: ConfigIR): string {
|
|
76
|
+
const hasProps = Object.keys(cfg.props).length > 0;
|
|
77
|
+
return `export const ${sanitizeName(cfg.name)} = new Config(${hasProps ? JSON.stringify(cfg.props) : "{}"});`;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function generateSecret(sec: SecretIR): string {
|
|
81
|
+
const hasProps = Object.keys(sec.props).length > 0;
|
|
82
|
+
return `export const ${sanitizeName(sec.name)} = new Secret(${hasProps ? JSON.stringify(sec.props) : "{}"});`;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function generateDockerfile(df: DockerfileIR): string {
|
|
86
|
+
if (df.stages.length > 1) {
|
|
87
|
+
// Multi-stage: emit stages array
|
|
88
|
+
const props = { stages: df.stages };
|
|
89
|
+
const propsStr = JSON.stringify(props, null, 2).replace(/"([a-z_][a-z0-9_]*)":/g, "$1:");
|
|
90
|
+
return `export const ${sanitizeName(df.name)} = new Dockerfile(${propsStr});`;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
// Single-stage: flat props (preserved behaviour)
|
|
94
|
+
const stage = df.stages[0];
|
|
95
|
+
const props: Record<string, unknown> = {};
|
|
96
|
+
if (stage) props.from = stage.from;
|
|
97
|
+
|
|
98
|
+
// Group instructions by type
|
|
99
|
+
const grouped: Record<string, string[]> = {};
|
|
100
|
+
for (const { instruction, value } of stage?.instructions ?? []) {
|
|
101
|
+
const key = instruction.toLowerCase();
|
|
102
|
+
if (!grouped[key]) grouped[key] = [];
|
|
103
|
+
grouped[key].push(value);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
// Single-value instructions
|
|
107
|
+
for (const single of ["workdir", "user", "entrypoint", "cmd", "healthcheck"]) {
|
|
108
|
+
if (grouped[single]?.length === 1) {
|
|
109
|
+
props[single] = grouped[single][0];
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
// Multi-value instructions
|
|
114
|
+
for (const multi of ["run", "copy", "add", "env", "arg", "expose", "volume", "label"]) {
|
|
115
|
+
if (grouped[multi]?.length) {
|
|
116
|
+
props[multi] = grouped[multi].length === 1 ? grouped[multi] : grouped[multi];
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
const propsStr = JSON.stringify(props, null, 2).replace(/"([a-z_][a-z0-9_]*)":/g, "$1:");
|
|
121
|
+
return `export const ${sanitizeName(df.name)} = new Dockerfile(${propsStr});`;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
function sanitizeName(name: string): string {
|
|
125
|
+
// Convert kebab/snake to camelCase
|
|
126
|
+
return name.replace(/[-_]([a-z])/g, (_, c) => c.toUpperCase());
|
|
127
|
+
}
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { readFileSync } from "fs";
|
|
3
|
+
import { join } from "path";
|
|
4
|
+
import { DockerParser, DockerfileParser } from "./parser";
|
|
5
|
+
|
|
6
|
+
const testdata = (file: string) =>
|
|
7
|
+
readFileSync(join(import.meta.dir, "testdata", file), "utf8");
|
|
8
|
+
|
|
9
|
+
describe("DockerParser — services", () => {
|
|
10
|
+
test("image extracted correctly", () => {
|
|
11
|
+
const yaml = `services:\n api:\n image: nginx:1.25\n`;
|
|
12
|
+
const { entities } = new DockerParser().parse(yaml);
|
|
13
|
+
const svc = entities.find((e) => e.name === "api");
|
|
14
|
+
expect(svc?.kind).toBe("service");
|
|
15
|
+
expect((svc?.props as any).image).toBe("nginx:1.25");
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
test("ports array preserved", () => {
|
|
19
|
+
const yaml = `services:\n web:\n image: nginx:1.25\n ports:\n - "80:80"\n - "443:443"\n`;
|
|
20
|
+
const { entities } = new DockerParser().parse(yaml);
|
|
21
|
+
const svc = entities.find((e) => e.name === "web");
|
|
22
|
+
expect((svc?.props as any).ports).toEqual(["80:80", "443:443"]);
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
test("environment map preserved", () => {
|
|
26
|
+
const yaml = `services:\n app:\n image: node:20\n environment:\n NODE_ENV: production\n PORT: "3000"\n`;
|
|
27
|
+
const { entities } = new DockerParser().parse(yaml);
|
|
28
|
+
const svc = entities.find((e) => e.name === "app");
|
|
29
|
+
expect((svc?.props as any).environment).toEqual({ NODE_ENV: "production", PORT: "3000" });
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
test("volumes list preserved", () => {
|
|
33
|
+
const yaml = `services:\n db:\n image: postgres:16\n volumes:\n - "pg-data:/var/lib/postgresql/data"\n`;
|
|
34
|
+
const { entities } = new DockerParser().parse(yaml);
|
|
35
|
+
const svc = entities.find((e) => e.name === "db");
|
|
36
|
+
expect((svc?.props as any).volumes).toEqual(["pg-data:/var/lib/postgresql/data"]);
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
test("depends_on list preserved", () => {
|
|
40
|
+
const yaml = `services:\n api:\n image: node:20\n depends_on:\n - db\n - cache\n`;
|
|
41
|
+
const { entities } = new DockerParser().parse(yaml);
|
|
42
|
+
const svc = entities.find((e) => e.name === "api");
|
|
43
|
+
expect((svc?.props as any).depends_on).toEqual(["db", "cache"]);
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
test("restart string preserved", () => {
|
|
47
|
+
const yaml = `services:\n app:\n image: myapp:latest\n restart: unless-stopped\n`;
|
|
48
|
+
const { entities } = new DockerParser().parse(yaml);
|
|
49
|
+
const svc = entities.find((e) => e.name === "app");
|
|
50
|
+
expect((svc?.props as any).restart).toBe("unless-stopped");
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
test("healthcheck object preserved", () => {
|
|
54
|
+
const { entities } = new DockerParser().parse(testdata("webapp.yaml"));
|
|
55
|
+
const svc = entities.find((e) => e.name === "api");
|
|
56
|
+
const hc = (svc?.props as any).healthcheck;
|
|
57
|
+
expect(hc).toBeDefined();
|
|
58
|
+
expect(hc.interval).toBe("30s");
|
|
59
|
+
expect(hc.retries).toBe(3);
|
|
60
|
+
});
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
describe("DockerParser — top-level sections", () => {
|
|
64
|
+
test("top-level volumes: section → VolumeIR entities", () => {
|
|
65
|
+
const { entities } = new DockerParser().parse(testdata("simple.yaml"));
|
|
66
|
+
const vol = entities.find((e) => e.kind === "volume" && e.name === "webdata");
|
|
67
|
+
expect(vol).toBeDefined();
|
|
68
|
+
expect((vol?.props as any).driver).toBe("local");
|
|
69
|
+
});
|
|
70
|
+
|
|
71
|
+
test("top-level networks: section → NetworkIR entities", () => {
|
|
72
|
+
const { entities } = new DockerParser().parse(testdata("full.yaml"));
|
|
73
|
+
const net = entities.find((e) => e.kind === "network" && e.name === "backend");
|
|
74
|
+
expect(net).toBeDefined();
|
|
75
|
+
expect((net?.props as any).driver).toBe("bridge");
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
test("top-level configs: section → ConfigIR entities", () => {
|
|
79
|
+
const { entities } = new DockerParser().parse(testdata("full.yaml"));
|
|
80
|
+
const cfg = entities.find((e) => e.kind === "config" && e.name === "app-config");
|
|
81
|
+
expect(cfg).toBeDefined();
|
|
82
|
+
expect((cfg?.props as any).file).toBe("./config/app.conf");
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
test("top-level secrets: section → SecretIR entities", () => {
|
|
86
|
+
const { entities } = new DockerParser().parse(testdata("full.yaml"));
|
|
87
|
+
const sec = entities.find((e) => e.kind === "secret" && e.name === "db-password");
|
|
88
|
+
expect(sec).toBeDefined();
|
|
89
|
+
expect((sec?.props as any).file).toBe("./secrets/db-password.txt");
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
test("returns empty entities for empty compose", () => {
|
|
93
|
+
const result = new DockerParser().parse("");
|
|
94
|
+
expect(result.entities).toHaveLength(0);
|
|
95
|
+
expect(result.warnings).toHaveLength(0);
|
|
96
|
+
});
|
|
97
|
+
});
|
|
98
|
+
|
|
99
|
+
describe("DockerfileParser", () => {
|
|
100
|
+
test("single-stage: stages[0].from correct", () => {
|
|
101
|
+
const content = `FROM node:20-alpine\nRUN npm ci\nCMD ["node", "index.js"]\n`;
|
|
102
|
+
const result = new DockerfileParser().parse("builder", content);
|
|
103
|
+
expect(result.kind).toBe("dockerfile");
|
|
104
|
+
expect(result.stages).toHaveLength(1);
|
|
105
|
+
expect(result.stages[0].from).toBe("node:20-alpine");
|
|
106
|
+
});
|
|
107
|
+
|
|
108
|
+
test("multi-stage: two stages with correct from/as", () => {
|
|
109
|
+
const content = [
|
|
110
|
+
"FROM node:20-alpine AS builder",
|
|
111
|
+
"RUN npm ci",
|
|
112
|
+
"FROM nginx:1.25 AS runner",
|
|
113
|
+
"COPY --from=builder /app/dist /usr/share/nginx/html",
|
|
114
|
+
].join("\n");
|
|
115
|
+
const result = new DockerfileParser().parse("app", content);
|
|
116
|
+
expect(result.stages).toHaveLength(2);
|
|
117
|
+
expect(result.stages[0].from).toBe("node:20-alpine");
|
|
118
|
+
expect(result.stages[0].as).toBe("builder");
|
|
119
|
+
expect(result.stages[1].from).toBe("nginx:1.25");
|
|
120
|
+
expect(result.stages[1].as).toBe("runner");
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
test("parses multiple instructions within a stage", () => {
|
|
124
|
+
const content = `FROM ubuntu:22.04\nRUN apt-get update\nCOPY . /app\nWORKDIR /app\n`;
|
|
125
|
+
const result = new DockerfileParser().parse("app", content);
|
|
126
|
+
expect(result.stages[0].instructions.length).toBeGreaterThanOrEqual(2);
|
|
127
|
+
const runInstr = result.stages[0].instructions.find((i) => i.instruction === "RUN");
|
|
128
|
+
expect(runInstr?.value).toBe("apt-get update");
|
|
129
|
+
});
|
|
130
|
+
|
|
131
|
+
test("skips comments and blank lines", () => {
|
|
132
|
+
const content = `# This is a comment\nFROM alpine:3.18\n\n# Another comment\nRUN echo hello\n`;
|
|
133
|
+
const result = new DockerfileParser().parse("test", content);
|
|
134
|
+
expect(result.stages[0].from).toBe("alpine:3.18");
|
|
135
|
+
expect(result.stages[0].instructions).toHaveLength(1);
|
|
136
|
+
});
|
|
137
|
+
});
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Docker Compose + Dockerfile parser — converts existing YAML/Dockerfiles
|
|
3
|
+
* to an intermediate representation (IR) for TypeScript code generation.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { parseYAML } from "@intentius/chant/yaml";
|
|
7
|
+
|
|
8
|
+
export interface ServiceIR {
|
|
9
|
+
kind: "service";
|
|
10
|
+
name: string;
|
|
11
|
+
props: Record<string, unknown>;
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
export interface VolumeIR {
|
|
15
|
+
kind: "volume";
|
|
16
|
+
name: string;
|
|
17
|
+
props: Record<string, unknown>;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export interface NetworkIR {
|
|
21
|
+
kind: "network";
|
|
22
|
+
name: string;
|
|
23
|
+
props: Record<string, unknown>;
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
export interface ConfigIR {
|
|
27
|
+
kind: "config";
|
|
28
|
+
name: string;
|
|
29
|
+
props: Record<string, unknown>;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export interface SecretIR {
|
|
33
|
+
kind: "secret";
|
|
34
|
+
name: string;
|
|
35
|
+
props: Record<string, unknown>;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
export interface DockerfileStage {
|
|
39
|
+
from: string;
|
|
40
|
+
as?: string;
|
|
41
|
+
instructions: Array<{ instruction: string; value: string }>;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
export interface DockerfileIR {
|
|
45
|
+
kind: "dockerfile";
|
|
46
|
+
name: string;
|
|
47
|
+
stages: DockerfileStage[];
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export type DockerIR = ServiceIR | VolumeIR | NetworkIR | ConfigIR | SecretIR | DockerfileIR;
|
|
51
|
+
|
|
52
|
+
export interface ParseResult {
|
|
53
|
+
entities: DockerIR[];
|
|
54
|
+
warnings: string[];
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const SERVICE_PROPS = [
|
|
58
|
+
"image", "ports", "environment", "volumes", "depends_on",
|
|
59
|
+
"restart", "healthcheck", "labels", "command", "entrypoint",
|
|
60
|
+
"networks", "build", "deploy", "secrets", "configs",
|
|
61
|
+
] as const;
|
|
62
|
+
|
|
63
|
+
const VOLUME_PROPS = ["driver", "driver_opts", "external", "labels"] as const;
|
|
64
|
+
const NETWORK_PROPS = ["driver", "external", "attachable", "labels"] as const;
|
|
65
|
+
const CONFIG_PROPS = ["file", "external"] as const;
|
|
66
|
+
const SECRET_PROPS = ["file", "external"] as const;
|
|
67
|
+
|
|
68
|
+
function extractProps(
|
|
69
|
+
raw: unknown,
|
|
70
|
+
allowed: readonly string[],
|
|
71
|
+
): Record<string, unknown> {
|
|
72
|
+
if (!raw || typeof raw !== "object" || Array.isArray(raw)) return {};
|
|
73
|
+
const obj = raw as Record<string, unknown>;
|
|
74
|
+
const props: Record<string, unknown> = {};
|
|
75
|
+
for (const key of allowed) {
|
|
76
|
+
if (key in obj) props[key] = obj[key];
|
|
77
|
+
}
|
|
78
|
+
return props;
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
/**
|
|
82
|
+
* Parser for docker-compose.yml files.
|
|
83
|
+
* Converts YAML to an IR consumable by the TypeScript generator.
|
|
84
|
+
*/
|
|
85
|
+
export class DockerParser {
|
|
86
|
+
parse(content: string): ParseResult {
|
|
87
|
+
if (!content.trim()) return { entities: [], warnings: [] };
|
|
88
|
+
|
|
89
|
+
const entities: DockerIR[] = [];
|
|
90
|
+
const warnings: string[] = [];
|
|
91
|
+
const doc = parseYAML(content) as Record<string, unknown>;
|
|
92
|
+
|
|
93
|
+
// services
|
|
94
|
+
const services = doc["services"];
|
|
95
|
+
if (services && typeof services === "object" && !Array.isArray(services)) {
|
|
96
|
+
for (const [name, raw] of Object.entries(services as Record<string, unknown>)) {
|
|
97
|
+
entities.push({
|
|
98
|
+
kind: "service",
|
|
99
|
+
name,
|
|
100
|
+
props: extractProps(raw, SERVICE_PROPS),
|
|
101
|
+
});
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// volumes
|
|
106
|
+
const volumes = doc["volumes"];
|
|
107
|
+
if (volumes && typeof volumes === "object" && !Array.isArray(volumes)) {
|
|
108
|
+
for (const [name, raw] of Object.entries(volumes as Record<string, unknown>)) {
|
|
109
|
+
entities.push({
|
|
110
|
+
kind: "volume",
|
|
111
|
+
name,
|
|
112
|
+
props: extractProps(raw, VOLUME_PROPS),
|
|
113
|
+
});
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
// networks
|
|
118
|
+
const networks = doc["networks"];
|
|
119
|
+
if (networks && typeof networks === "object" && !Array.isArray(networks)) {
|
|
120
|
+
for (const [name, raw] of Object.entries(networks as Record<string, unknown>)) {
|
|
121
|
+
entities.push({
|
|
122
|
+
kind: "network",
|
|
123
|
+
name,
|
|
124
|
+
props: extractProps(raw, NETWORK_PROPS),
|
|
125
|
+
});
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
// configs
|
|
130
|
+
const configs = doc["configs"];
|
|
131
|
+
if (configs && typeof configs === "object" && !Array.isArray(configs)) {
|
|
132
|
+
for (const [name, raw] of Object.entries(configs as Record<string, unknown>)) {
|
|
133
|
+
entities.push({
|
|
134
|
+
kind: "config",
|
|
135
|
+
name,
|
|
136
|
+
props: extractProps(raw, CONFIG_PROPS),
|
|
137
|
+
});
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// secrets
|
|
142
|
+
const secrets = doc["secrets"];
|
|
143
|
+
if (secrets && typeof secrets === "object" && !Array.isArray(secrets)) {
|
|
144
|
+
for (const [name, raw] of Object.entries(secrets as Record<string, unknown>)) {
|
|
145
|
+
entities.push({
|
|
146
|
+
kind: "secret",
|
|
147
|
+
name,
|
|
148
|
+
props: extractProps(raw, SECRET_PROPS),
|
|
149
|
+
});
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
return { entities, warnings };
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Parser for Dockerfile content.
|
|
159
|
+
*/
|
|
160
|
+
export class DockerfileParser {
|
|
161
|
+
parse(name: string, content: string): DockerfileIR {
|
|
162
|
+
const stages: DockerfileStage[] = [];
|
|
163
|
+
let current: DockerfileStage | null = null;
|
|
164
|
+
|
|
165
|
+
for (const line of content.split("\n")) {
|
|
166
|
+
const trimmed = line.trim();
|
|
167
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
168
|
+
|
|
169
|
+
const match = trimmed.match(/^([A-Z]+)\s+([\s\S]+)$/);
|
|
170
|
+
if (!match) continue;
|
|
171
|
+
|
|
172
|
+
const [, instruction, value] = match;
|
|
173
|
+
|
|
174
|
+
if (instruction === "FROM") {
|
|
175
|
+
// Parse "FROM image AS stagename"
|
|
176
|
+
const asMatch = value.match(/^(.+?)\s+[Aa][Ss]\s+(\S+)$/);
|
|
177
|
+
if (asMatch) {
|
|
178
|
+
current = { from: asMatch[1].trim(), as: asMatch[2].trim(), instructions: [] };
|
|
179
|
+
} else {
|
|
180
|
+
current = { from: value.trim(), instructions: [] };
|
|
181
|
+
}
|
|
182
|
+
stages.push(current);
|
|
183
|
+
} else if (current) {
|
|
184
|
+
current.instructions.push({ instruction, value: value.trim() });
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
return { kind: "dockerfile", name, stages };
|
|
189
|
+
}
|
|
190
|
+
}
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { readFileSync } from "fs";
|
|
3
|
+
import { join } from "path";
|
|
4
|
+
import { DockerParser, DockerfileParser } from "./parser";
|
|
5
|
+
import { DockerGenerator } from "./generator";
|
|
6
|
+
|
|
7
|
+
const testdata = (file: string) =>
|
|
8
|
+
readFileSync(join(import.meta.dir, "testdata", file), "utf8");
|
|
9
|
+
|
|
10
|
+
describe("roundtrip: parse → generate", () => {
|
|
11
|
+
test("simple.yaml → Service + Volume constructors", () => {
|
|
12
|
+
const { entities } = new DockerParser().parse(testdata("simple.yaml"));
|
|
13
|
+
const { source } = new DockerGenerator().generate(entities);
|
|
14
|
+
expect(source).toContain("new Service(");
|
|
15
|
+
expect(source).toContain("new Volume(");
|
|
16
|
+
expect(source).toContain("nginx:1.25");
|
|
17
|
+
});
|
|
18
|
+
|
|
19
|
+
test("webapp.yaml → ports / healthcheck / depends_on survive roundtrip", () => {
|
|
20
|
+
const { entities } = new DockerParser().parse(testdata("webapp.yaml"));
|
|
21
|
+
const { source } = new DockerGenerator().generate(entities);
|
|
22
|
+
expect(source).toContain("3000:3000");
|
|
23
|
+
expect(source).toContain("depends_on");
|
|
24
|
+
expect(source).toContain("healthcheck");
|
|
25
|
+
expect(source).toContain("unless-stopped");
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
test("full.yaml → Config / Secret / Network constructors present", () => {
|
|
29
|
+
const { entities } = new DockerParser().parse(testdata("full.yaml"));
|
|
30
|
+
const { source } = new DockerGenerator().generate(entities);
|
|
31
|
+
expect(source).toContain("new Config(");
|
|
32
|
+
expect(source).toContain("new Secret(");
|
|
33
|
+
expect(source).toContain("new Network(");
|
|
34
|
+
});
|
|
35
|
+
|
|
36
|
+
test("multi-stage Dockerfile inline → stages: in output", () => {
|
|
37
|
+
const content = [
|
|
38
|
+
"FROM node:20-alpine AS builder",
|
|
39
|
+
"RUN npm ci",
|
|
40
|
+
"FROM nginx:1.25 AS runner",
|
|
41
|
+
"COPY --from=builder /app/dist /usr/share/nginx/html",
|
|
42
|
+
].join("\n");
|
|
43
|
+
const entity = new DockerfileParser().parse("app", content);
|
|
44
|
+
const { source } = new DockerGenerator().generate([entity]);
|
|
45
|
+
expect(source).toContain("stages");
|
|
46
|
+
expect(source).toContain("node:20-alpine");
|
|
47
|
+
expect(source).toContain("nginx:1.25");
|
|
48
|
+
});
|
|
49
|
+
});
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
services:
|
|
2
|
+
app:
|
|
3
|
+
image: myapp:latest
|
|
4
|
+
ports:
|
|
5
|
+
- "8080:8080"
|
|
6
|
+
environment:
|
|
7
|
+
DB_URL: postgres://db:5432/mydb
|
|
8
|
+
networks:
|
|
9
|
+
- frontend
|
|
10
|
+
- backend
|
|
11
|
+
secrets:
|
|
12
|
+
- db-password
|
|
13
|
+
configs:
|
|
14
|
+
- app-config
|
|
15
|
+
|
|
16
|
+
db:
|
|
17
|
+
image: postgres:16
|
|
18
|
+
networks:
|
|
19
|
+
- backend
|
|
20
|
+
|
|
21
|
+
volumes:
|
|
22
|
+
db-data:
|
|
23
|
+
driver: local
|
|
24
|
+
app-uploads:
|
|
25
|
+
driver: local
|
|
26
|
+
driver_opts:
|
|
27
|
+
type: nfs
|
|
28
|
+
o: addr=10.0.0.1,rw
|
|
29
|
+
|
|
30
|
+
networks:
|
|
31
|
+
frontend:
|
|
32
|
+
driver: bridge
|
|
33
|
+
backend:
|
|
34
|
+
driver: bridge
|
|
35
|
+
internal: true
|
|
36
|
+
|
|
37
|
+
configs:
|
|
38
|
+
app-config:
|
|
39
|
+
file: ./config/app.conf
|
|
40
|
+
|
|
41
|
+
secrets:
|
|
42
|
+
db-password:
|
|
43
|
+
file: ./secrets/db-password.txt
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
services:
|
|
2
|
+
api:
|
|
3
|
+
image: node:20-alpine
|
|
4
|
+
ports:
|
|
5
|
+
- "3000:3000"
|
|
6
|
+
environment:
|
|
7
|
+
NODE_ENV: production
|
|
8
|
+
PORT: "3000"
|
|
9
|
+
depends_on:
|
|
10
|
+
- db
|
|
11
|
+
- cache
|
|
12
|
+
restart: unless-stopped
|
|
13
|
+
healthcheck:
|
|
14
|
+
test:
|
|
15
|
+
- CMD
|
|
16
|
+
- curl
|
|
17
|
+
- -f
|
|
18
|
+
- http://localhost:3000/health
|
|
19
|
+
interval: 30s
|
|
20
|
+
timeout: 10s
|
|
21
|
+
retries: 3
|
|
22
|
+
volumes:
|
|
23
|
+
- "app-data:/data"
|
|
24
|
+
|
|
25
|
+
db:
|
|
26
|
+
image: postgres:16-alpine
|
|
27
|
+
environment:
|
|
28
|
+
POSTGRES_DB: appdb
|
|
29
|
+
POSTGRES_USER: app
|
|
30
|
+
POSTGRES_PASSWORD: secret
|
|
31
|
+
volumes:
|
|
32
|
+
- "pg-data:/var/lib/postgresql/data"
|
|
33
|
+
|
|
34
|
+
cache:
|
|
35
|
+
image: redis:7-alpine
|
|
36
|
+
restart: always
|
|
37
|
+
|
|
38
|
+
volumes:
|
|
39
|
+
app-data:
|
|
40
|
+
pg-data:
|
|
41
|
+
driver: local
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,29 @@
|
|
|
1
|
+
// Serializer
|
|
2
|
+
export { dockerSerializer } from "./serializer";
|
|
3
|
+
|
|
4
|
+
// Plugin
|
|
5
|
+
export { dockerPlugin } from "./plugin";
|
|
6
|
+
|
|
7
|
+
// Interpolation intrinsic
|
|
8
|
+
export { env } from "./interpolation";
|
|
9
|
+
export type { EnvOptions, EnvIntrinsic } from "./interpolation";
|
|
10
|
+
|
|
11
|
+
// Context variables
|
|
12
|
+
export { DOCKER_VARS, COMPOSE_VARS } from "./variables";
|
|
13
|
+
export type { DockerVar, ComposeVar } from "./variables";
|
|
14
|
+
|
|
15
|
+
// Default labels
|
|
16
|
+
export { defaultLabels, defaultAnnotations, isDefaultLabels, isDefaultAnnotations } from "./default-labels";
|
|
17
|
+
export { DEFAULT_LABELS_MARKER, DEFAULT_ANNOTATIONS_MARKER } from "./default-labels";
|
|
18
|
+
export type { DefaultLabels, DefaultAnnotations } from "./default-labels";
|
|
19
|
+
|
|
20
|
+
// Generated entities — populated by `bun run generate`
|
|
21
|
+
export * from "./generated/index";
|
|
22
|
+
|
|
23
|
+
// Composites (to be added in Tier 2)
|
|
24
|
+
// export * from "./composites/index";
|
|
25
|
+
|
|
26
|
+
// Codegen pipeline (for external tooling)
|
|
27
|
+
export { generate, writeGeneratedFiles } from "./codegen/generate";
|
|
28
|
+
export { packageLexicon } from "./codegen/package";
|
|
29
|
+
export type { PackageOptions, PackageResult } from "./codegen/package";
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { describe, test, expect } from "bun:test";
|
|
2
|
+
import { env } from "./interpolation";
|
|
3
|
+
import { INTRINSIC_MARKER } from "@intentius/chant/intrinsic";
|
|
4
|
+
|
|
5
|
+
describe("env()", () => {
|
|
6
|
+
test("bare variable: ${VAR}", () => {
|
|
7
|
+
const e = env("APP_IMAGE");
|
|
8
|
+
expect(e.toJSON()).toBe("${APP_IMAGE}");
|
|
9
|
+
expect(e.toString()).toBe("${APP_IMAGE}");
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
test("default value: ${VAR:-default}", () => {
|
|
13
|
+
const e = env("APP_IMAGE", { default: "myapp:latest" });
|
|
14
|
+
expect(e.toJSON()).toBe("${APP_IMAGE:-myapp:latest}");
|
|
15
|
+
});
|
|
16
|
+
|
|
17
|
+
test("required variable: ${VAR:?error}", () => {
|
|
18
|
+
const e = env("DB_URL", { required: true });
|
|
19
|
+
expect(e.toJSON()).toBe("${DB_URL:?DB_URL is required}");
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
test("required with custom error message: ${VAR:?msg}", () => {
|
|
23
|
+
const e = env("DB_URL", { errorMessage: "Database URL must be set" });
|
|
24
|
+
expect(e.toJSON()).toBe("${DB_URL:?Database URL must be set}");
|
|
25
|
+
});
|
|
26
|
+
|
|
27
|
+
test("ifSet: ${VAR:+value}", () => {
|
|
28
|
+
const e = env("DEBUG", { ifSet: "true" });
|
|
29
|
+
expect(e.toJSON()).toBe("${DEBUG:+true}");
|
|
30
|
+
});
|
|
31
|
+
|
|
32
|
+
test("ifSet takes precedence over default", () => {
|
|
33
|
+
const e = env("X", { ifSet: "on", default: "off" });
|
|
34
|
+
expect(e.toJSON()).toBe("${X:+on}");
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
test("has INTRINSIC_MARKER", () => {
|
|
38
|
+
const e = env("FOO");
|
|
39
|
+
expect(INTRINSIC_MARKER in e).toBe(true);
|
|
40
|
+
});
|
|
41
|
+
});
|