@kithinji/pod 1.0.18 → 1.0.20
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/main.js +664 -76
- package/dist/main.js.map +4 -4
- package/dist/types/deploy/deploy.d.ts +4 -0
- package/dist/types/deploy/deploy.d.ts.map +1 -0
- package/dist/types/deploy/index.d.ts +2 -0
- package/dist/types/deploy/index.d.ts.map +1 -0
- package/dist/types/docker/docker.d.ts.map +1 -1
- package/package.json +3 -1
- package/src/deploy/deploy.ts +592 -0
- package/src/deploy/index.ts +1 -0
- package/src/dev/server.ts +1 -1
- package/src/docker/docker.ts +156 -10
- package/src/main.ts +12 -6
package/dist/main.js
CHANGED
|
@@ -405,17 +405,17 @@ var MacroDependencyGraph = class {
|
|
|
405
405
|
const visited = /* @__PURE__ */ new Set();
|
|
406
406
|
const inProgress = /* @__PURE__ */ new Set();
|
|
407
407
|
const sorted = [];
|
|
408
|
-
const visit = (key,
|
|
408
|
+
const visit = (key, path15 = []) => {
|
|
409
409
|
if (visited.has(key)) return;
|
|
410
410
|
if (inProgress.has(key)) {
|
|
411
|
-
const cycle = [...
|
|
411
|
+
const cycle = [...path15, key].join(" -> ");
|
|
412
412
|
throw new Error(`Circular macro dependency detected: ${cycle}`);
|
|
413
413
|
}
|
|
414
414
|
const node = this.nodes.get(key);
|
|
415
415
|
if (!node) return;
|
|
416
416
|
inProgress.add(key);
|
|
417
417
|
for (const depKey of node.dependencies) {
|
|
418
|
-
visit(depKey, [...
|
|
418
|
+
visit(depKey, [...path15, key]);
|
|
419
419
|
}
|
|
420
420
|
inProgress.delete(key);
|
|
421
421
|
visited.add(key);
|
|
@@ -1573,22 +1573,22 @@ var ElementTransformer = class {
|
|
|
1573
1573
|
this.jsxUtils = jsxUtils;
|
|
1574
1574
|
this.observableManager = observableManager;
|
|
1575
1575
|
}
|
|
1576
|
-
transformElement(
|
|
1577
|
-
if (this.t.isJSXFragment(
|
|
1576
|
+
transformElement(path15, scope, context2) {
|
|
1577
|
+
if (this.t.isJSXFragment(path15.node)) {
|
|
1578
1578
|
return this.transformFragment(
|
|
1579
|
-
|
|
1579
|
+
path15,
|
|
1580
1580
|
scope,
|
|
1581
1581
|
context2
|
|
1582
1582
|
);
|
|
1583
1583
|
}
|
|
1584
1584
|
return this.transformJSXElement(
|
|
1585
|
-
|
|
1585
|
+
path15,
|
|
1586
1586
|
scope,
|
|
1587
1587
|
context2
|
|
1588
1588
|
);
|
|
1589
1589
|
}
|
|
1590
|
-
transformJSXElement(
|
|
1591
|
-
const jsxElement =
|
|
1590
|
+
transformJSXElement(path15, scope, context2) {
|
|
1591
|
+
const jsxElement = path15.node;
|
|
1592
1592
|
const tag = this.jsxUtils.getComponentName(jsxElement.openingElement.name);
|
|
1593
1593
|
const isComponent = this.jsxUtils.isComponentTag(tag);
|
|
1594
1594
|
if (isComponent && tag) {
|
|
@@ -1702,7 +1702,7 @@ var ElementTransformer = class {
|
|
|
1702
1702
|
}
|
|
1703
1703
|
return { id: elId, statements };
|
|
1704
1704
|
}
|
|
1705
|
-
transformFragment(
|
|
1705
|
+
transformFragment(path15, scope, context2) {
|
|
1706
1706
|
const fragId = scope.generateUidIdentifier("frag");
|
|
1707
1707
|
const statements = [];
|
|
1708
1708
|
statements.push(
|
|
@@ -1720,7 +1720,7 @@ var ElementTransformer = class {
|
|
|
1720
1720
|
])
|
|
1721
1721
|
);
|
|
1722
1722
|
this.processDOMChildren(
|
|
1723
|
-
|
|
1723
|
+
path15.node.children,
|
|
1724
1724
|
fragId,
|
|
1725
1725
|
statements,
|
|
1726
1726
|
scope,
|
|
@@ -2065,7 +2065,7 @@ function j2d({ types: t }) {
|
|
|
2065
2065
|
name: "jsx-to-dom",
|
|
2066
2066
|
visitor: {
|
|
2067
2067
|
Program: {
|
|
2068
|
-
exit(
|
|
2068
|
+
exit(path15, state) {
|
|
2069
2069
|
if (state.helpersImported) return;
|
|
2070
2070
|
const helpers = [
|
|
2071
2071
|
{ local: "$insert", imported: "insert" },
|
|
@@ -2076,7 +2076,7 @@ function j2d({ types: t }) {
|
|
|
2076
2076
|
{ local: "$effect", imported: "effect" }
|
|
2077
2077
|
];
|
|
2078
2078
|
for (const helper of helpers) {
|
|
2079
|
-
|
|
2079
|
+
path15.unshiftContainer(
|
|
2080
2080
|
"body",
|
|
2081
2081
|
t.importDeclaration(
|
|
2082
2082
|
[
|
|
@@ -2092,10 +2092,10 @@ function j2d({ types: t }) {
|
|
|
2092
2092
|
state.helpersImported = true;
|
|
2093
2093
|
}
|
|
2094
2094
|
},
|
|
2095
|
-
ClassMethod(
|
|
2096
|
-
if (
|
|
2095
|
+
ClassMethod(path15) {
|
|
2096
|
+
if (path15.getData("processed")) return;
|
|
2097
2097
|
let hasJSX = false;
|
|
2098
|
-
|
|
2098
|
+
path15.traverse({
|
|
2099
2099
|
JSXElement() {
|
|
2100
2100
|
hasJSX = true;
|
|
2101
2101
|
},
|
|
@@ -2104,11 +2104,11 @@ function j2d({ types: t }) {
|
|
|
2104
2104
|
}
|
|
2105
2105
|
});
|
|
2106
2106
|
if (!hasJSX) return;
|
|
2107
|
-
|
|
2108
|
-
const body =
|
|
2107
|
+
path15.setData("processed", true);
|
|
2108
|
+
const body = path15.node.body;
|
|
2109
2109
|
if (!t.isBlockStatement(body)) return;
|
|
2110
2110
|
const observables = /* @__PURE__ */ new Map();
|
|
2111
|
-
|
|
2111
|
+
path15.traverse({
|
|
2112
2112
|
JSXElement(jsxPath) {
|
|
2113
2113
|
observableManager.collectObservables(
|
|
2114
2114
|
jsxPath.node,
|
|
@@ -2132,7 +2132,7 @@ function j2d({ types: t }) {
|
|
|
2132
2132
|
const observableSignals = /* @__PURE__ */ new Map();
|
|
2133
2133
|
const signalDeclarations = [];
|
|
2134
2134
|
for (const [key, observable] of observables) {
|
|
2135
|
-
const signalId =
|
|
2135
|
+
const signalId = path15.scope.generateUidIdentifier("sig");
|
|
2136
2136
|
observableSignals.set(key, signalId);
|
|
2137
2137
|
signalDeclarations.push(
|
|
2138
2138
|
t.variableDeclaration("const", [
|
|
@@ -2150,7 +2150,7 @@ function j2d({ types: t }) {
|
|
|
2150
2150
|
astUtils.insertBeforeReturn(body.body, signalDeclarations);
|
|
2151
2151
|
}
|
|
2152
2152
|
const context2 = { observables, observableSignals };
|
|
2153
|
-
|
|
2153
|
+
path15.traverse({
|
|
2154
2154
|
JSXElement(jsxPath) {
|
|
2155
2155
|
if (jsxPath.getData("processed")) return;
|
|
2156
2156
|
jsxPath.setData("processed", true);
|
|
@@ -2742,8 +2742,8 @@ async function swcTransform(source, pathStr, tsx = false, react) {
|
|
|
2742
2742
|
resolveDir
|
|
2743
2743
|
};
|
|
2744
2744
|
}
|
|
2745
|
-
function parseFileMetadata(source,
|
|
2746
|
-
const isTsx =
|
|
2745
|
+
function parseFileMetadata(source, path15) {
|
|
2746
|
+
const isTsx = path15.endsWith(".tsx");
|
|
2747
2747
|
const isInteractiveFile = source.startsWith('"use interactive"') || source.startsWith("'use interactive'");
|
|
2748
2748
|
const isPublicFile = source.startsWith('"use public"') || source.startsWith("'use public'");
|
|
2749
2749
|
let directive = null;
|
|
@@ -2751,7 +2751,7 @@ function parseFileMetadata(source, path14) {
|
|
|
2751
2751
|
else if (isPublicFile) directive = "public";
|
|
2752
2752
|
return {
|
|
2753
2753
|
source,
|
|
2754
|
-
path:
|
|
2754
|
+
path: path15,
|
|
2755
2755
|
isTsx,
|
|
2756
2756
|
directive,
|
|
2757
2757
|
isPublicFile,
|
|
@@ -2759,58 +2759,58 @@ function parseFileMetadata(source, path14) {
|
|
|
2759
2759
|
};
|
|
2760
2760
|
}
|
|
2761
2761
|
var ServerBuildTransformer = class {
|
|
2762
|
-
async transformPublicFile(source,
|
|
2763
|
-
const controllerCode = generateController(
|
|
2762
|
+
async transformPublicFile(source, path15) {
|
|
2763
|
+
const controllerCode = generateController(path15, source);
|
|
2764
2764
|
if (controllerCode) {
|
|
2765
2765
|
source = `${source}
|
|
2766
2766
|
|
|
2767
2767
|
${controllerCode}
|
|
2768
2768
|
`;
|
|
2769
2769
|
}
|
|
2770
|
-
return swcTransform(source,
|
|
2770
|
+
return swcTransform(source, path15);
|
|
2771
2771
|
}
|
|
2772
|
-
async transformRegularTypeScript(source,
|
|
2772
|
+
async transformRegularTypeScript(source, path15, isPublic) {
|
|
2773
2773
|
if (isPublic) {
|
|
2774
|
-
return this.transformPublicFile(source,
|
|
2774
|
+
return this.transformPublicFile(source, path15);
|
|
2775
2775
|
}
|
|
2776
|
-
return swcTransform(source,
|
|
2776
|
+
return swcTransform(source, path15);
|
|
2777
2777
|
}
|
|
2778
|
-
async transformServerTsx(source,
|
|
2779
|
-
return swcTransform(source,
|
|
2778
|
+
async transformServerTsx(source, path15) {
|
|
2779
|
+
return swcTransform(source, path15, true, {
|
|
2780
2780
|
runtime: "automatic",
|
|
2781
2781
|
importSource: "@kithinji/orca"
|
|
2782
2782
|
});
|
|
2783
2783
|
}
|
|
2784
|
-
async transformInteractiveTsxStub(source,
|
|
2785
|
-
const stubSource = generateServerStub(
|
|
2786
|
-
return swcTransform(stubSource,
|
|
2784
|
+
async transformInteractiveTsxStub(source, path15) {
|
|
2785
|
+
const stubSource = generateServerStub(path15, source);
|
|
2786
|
+
return swcTransform(stubSource, path15);
|
|
2787
2787
|
}
|
|
2788
2788
|
async process(metadata, onClientFound) {
|
|
2789
2789
|
const expandedSource = await expandMacros(metadata.source, metadata.path);
|
|
2790
2790
|
const expandedMetadata = { ...metadata, source: expandedSource };
|
|
2791
|
-
const { source, path:
|
|
2791
|
+
const { source, path: path15, isTsx, isInteractiveFile, isPublicFile } = expandedMetadata;
|
|
2792
2792
|
if (isTsx) {
|
|
2793
2793
|
if (isInteractiveFile) {
|
|
2794
|
-
onClientFound(
|
|
2795
|
-
const clientCode = await this.transformInteractiveTsxStub(source,
|
|
2794
|
+
onClientFound(path15);
|
|
2795
|
+
const clientCode = await this.transformInteractiveTsxStub(source, path15);
|
|
2796
2796
|
const store = Store.getInstance();
|
|
2797
|
-
store.set(
|
|
2797
|
+
store.set(path15, clientCode.contents);
|
|
2798
2798
|
return clientCode;
|
|
2799
2799
|
}
|
|
2800
|
-
return this.transformServerTsx(source,
|
|
2800
|
+
return this.transformServerTsx(source, path15);
|
|
2801
2801
|
}
|
|
2802
|
-
return this.transformRegularTypeScript(source,
|
|
2802
|
+
return this.transformRegularTypeScript(source, path15, isPublicFile);
|
|
2803
2803
|
}
|
|
2804
2804
|
};
|
|
2805
2805
|
var ClientBuildTransformer = class {
|
|
2806
|
-
async transformInteractiveTsx(source,
|
|
2807
|
-
const swcResult = await swcTransform(source,
|
|
2806
|
+
async transformInteractiveTsx(source, path15) {
|
|
2807
|
+
const swcResult = await swcTransform(source, path15, true, {
|
|
2808
2808
|
runtime: "preserve"
|
|
2809
2809
|
});
|
|
2810
2810
|
const babelResult = await babel.transformAsync(
|
|
2811
2811
|
swcResult.contents,
|
|
2812
2812
|
{
|
|
2813
|
-
filename:
|
|
2813
|
+
filename: path15,
|
|
2814
2814
|
sourceType: "module",
|
|
2815
2815
|
plugins: [j2d],
|
|
2816
2816
|
parserOpts: {
|
|
@@ -2826,37 +2826,37 @@ var ClientBuildTransformer = class {
|
|
|
2826
2826
|
resolveDir: swcResult.resolveDir
|
|
2827
2827
|
};
|
|
2828
2828
|
}
|
|
2829
|
-
async transformServerComponent(node, source,
|
|
2830
|
-
const scSource = generateServerComponent(
|
|
2831
|
-
return swcTransform(scSource,
|
|
2829
|
+
async transformServerComponent(node, source, path15) {
|
|
2830
|
+
const scSource = generateServerComponent(path15, source);
|
|
2831
|
+
return swcTransform(scSource, path15);
|
|
2832
2832
|
}
|
|
2833
|
-
async transformPublicFileRsc(node, source,
|
|
2834
|
-
const stubSource = generateRscStub(
|
|
2835
|
-
return swcTransform(stubSource,
|
|
2833
|
+
async transformPublicFileRsc(node, source, path15) {
|
|
2834
|
+
const stubSource = generateRscStub(path15, source);
|
|
2835
|
+
return swcTransform(stubSource, path15);
|
|
2836
2836
|
}
|
|
2837
|
-
async transformSharedCode(source,
|
|
2838
|
-
return swcTransform(source,
|
|
2837
|
+
async transformSharedCode(source, path15) {
|
|
2838
|
+
return swcTransform(source, path15);
|
|
2839
2839
|
}
|
|
2840
2840
|
async process(node, metadata) {
|
|
2841
2841
|
const expandedSource = await expandMacros(metadata.source, metadata.path);
|
|
2842
2842
|
const expandedMetadata = { ...metadata, source: expandedSource };
|
|
2843
|
-
const { source, path:
|
|
2843
|
+
const { source, path: path15, isTsx, directive } = expandedMetadata;
|
|
2844
2844
|
if (isTsx) {
|
|
2845
2845
|
if (directive === "interactive") {
|
|
2846
|
-
return this.transformInteractiveTsx(source,
|
|
2846
|
+
return this.transformInteractiveTsx(source, path15);
|
|
2847
2847
|
} else if (directive === null) {
|
|
2848
|
-
return this.transformServerComponent(node, source,
|
|
2848
|
+
return this.transformServerComponent(node, source, path15);
|
|
2849
2849
|
} else {
|
|
2850
2850
|
throw new Error(
|
|
2851
|
-
`Unexpected directive "${directive}" for TSX file: ${
|
|
2851
|
+
`Unexpected directive "${directive}" for TSX file: ${path15}`
|
|
2852
2852
|
);
|
|
2853
2853
|
}
|
|
2854
2854
|
}
|
|
2855
2855
|
if (directive === "public") {
|
|
2856
|
-
return this.transformPublicFileRsc(node, source,
|
|
2856
|
+
return this.transformPublicFileRsc(node, source, path15);
|
|
2857
2857
|
}
|
|
2858
2858
|
if (directive === null) {
|
|
2859
|
-
return this.transformSharedCode(source,
|
|
2859
|
+
return this.transformSharedCode(source, path15);
|
|
2860
2860
|
}
|
|
2861
2861
|
return {
|
|
2862
2862
|
contents: source,
|
|
@@ -3185,7 +3185,7 @@ async function startDevServer() {
|
|
|
3185
3185
|
format: "esm",
|
|
3186
3186
|
sourcemap: config.build?.sourcemap ?? true,
|
|
3187
3187
|
splitting: true,
|
|
3188
|
-
minify: config.build?.minify ??
|
|
3188
|
+
minify: config.build?.minify ?? true,
|
|
3189
3189
|
plugins: [
|
|
3190
3190
|
...config.plugins?.map((cb) => cb(store)) || [],
|
|
3191
3191
|
...config.client_plugins?.map((cb) => cb(store)) || [],
|
|
@@ -4336,7 +4336,7 @@ bootstrap();
|
|
|
4336
4336
|
}
|
|
4337
4337
|
|
|
4338
4338
|
// src/main.ts
|
|
4339
|
-
import
|
|
4339
|
+
import path14 from "path";
|
|
4340
4340
|
import { execSync } from "child_process";
|
|
4341
4341
|
|
|
4342
4342
|
// src/docker/docker.ts
|
|
@@ -4361,6 +4361,7 @@ async function dockerize(env = "prod") {
|
|
|
4361
4361
|
} else {
|
|
4362
4362
|
await setupDevelopment(cwd, projectName, selectedServices);
|
|
4363
4363
|
}
|
|
4364
|
+
await writeEnvVars(cwd, selectedServices, env);
|
|
4364
4365
|
printNextSteps(projectName, env, selectedServices);
|
|
4365
4366
|
}
|
|
4366
4367
|
function detectServices(packageJson) {
|
|
@@ -4407,8 +4408,66 @@ async function restructureProject(cwd, projectName) {
|
|
|
4407
4408
|
await fs9.move(envSrc, envDest, { overwrite: true });
|
|
4408
4409
|
}
|
|
4409
4410
|
}
|
|
4411
|
+
async function writeEnvVars(cwd, services, env) {
|
|
4412
|
+
const envPath = path12.join(cwd, ".env");
|
|
4413
|
+
let existingEnv = {};
|
|
4414
|
+
let existingContent = "";
|
|
4415
|
+
if (fs9.existsSync(envPath)) {
|
|
4416
|
+
existingContent = await fs9.readFile(envPath, "utf8");
|
|
4417
|
+
existingEnv = parseEnvFile(existingContent);
|
|
4418
|
+
}
|
|
4419
|
+
const newVars = [];
|
|
4420
|
+
if (env === "prod" && !existingEnv.HOST) {
|
|
4421
|
+
newVars.push("HOST=example.com");
|
|
4422
|
+
}
|
|
4423
|
+
for (const service of services) {
|
|
4424
|
+
const serviceVars = getEnvVars(service.name);
|
|
4425
|
+
for (const varLine of serviceVars) {
|
|
4426
|
+
const [key] = varLine.split("=");
|
|
4427
|
+
if (!existingEnv[key]) {
|
|
4428
|
+
newVars.push(varLine);
|
|
4429
|
+
}
|
|
4430
|
+
}
|
|
4431
|
+
if (env === "dev" && service.needsTunnel) {
|
|
4432
|
+
const remoteHostKey = `${service.name.toUpperCase()}_REMOTE_HOST`;
|
|
4433
|
+
const remotePortKey = `${service.name.toUpperCase()}_REMOTE_PORT`;
|
|
4434
|
+
if (!existingEnv[remoteHostKey]) {
|
|
4435
|
+
newVars.push(`${remoteHostKey}=user@remote-server.com`);
|
|
4436
|
+
}
|
|
4437
|
+
if (!existingEnv[remotePortKey]) {
|
|
4438
|
+
newVars.push(`${remotePortKey}=${getDefaultPort(service.name)}`);
|
|
4439
|
+
}
|
|
4440
|
+
}
|
|
4441
|
+
}
|
|
4442
|
+
if (newVars.length > 0) {
|
|
4443
|
+
const separator = existingContent && !existingContent.endsWith("\n") ? "\n" : "";
|
|
4444
|
+
const newContent = existingContent + separator + (existingContent ? "\n" : "") + newVars.join("\n") + "\n";
|
|
4445
|
+
await fs9.writeFile(envPath, newContent);
|
|
4446
|
+
console.log(
|
|
4447
|
+
`\u2705 Added ${newVars.length} new environment variable(s) to .env`
|
|
4448
|
+
);
|
|
4449
|
+
} else {
|
|
4450
|
+
console.log("\u2705 All required environment variables already exist in .env");
|
|
4451
|
+
}
|
|
4452
|
+
}
|
|
4453
|
+
function parseEnvFile(content) {
|
|
4454
|
+
const env = {};
|
|
4455
|
+
const lines = content.split("\n");
|
|
4456
|
+
for (const line of lines) {
|
|
4457
|
+
const trimmed = line.trim();
|
|
4458
|
+
if (!trimmed || trimmed.startsWith("#")) continue;
|
|
4459
|
+
const equalIndex = trimmed.indexOf("=");
|
|
4460
|
+
if (equalIndex > 0) {
|
|
4461
|
+
const key = trimmed.substring(0, equalIndex).trim();
|
|
4462
|
+
const value = trimmed.substring(equalIndex + 1).trim();
|
|
4463
|
+
env[key] = value;
|
|
4464
|
+
}
|
|
4465
|
+
}
|
|
4466
|
+
return env;
|
|
4467
|
+
}
|
|
4410
4468
|
async function createDockerfile(cwd, projectName) {
|
|
4411
4469
|
const dockerfilePath = path12.join(cwd, projectName, "Dockerfile");
|
|
4470
|
+
const dockerignorePath = path12.join(cwd, projectName, ".dockerignore");
|
|
4412
4471
|
const dockerfile = `FROM node:18-alpine
|
|
4413
4472
|
|
|
4414
4473
|
WORKDIR /app
|
|
@@ -4422,8 +4481,65 @@ COPY . .
|
|
|
4422
4481
|
EXPOSE 8080
|
|
4423
4482
|
|
|
4424
4483
|
CMD ["npm", "run", "dev"]
|
|
4484
|
+
`;
|
|
4485
|
+
const dockerignore = `# Dependencies
|
|
4486
|
+
node_modules
|
|
4487
|
+
npm-debug.log
|
|
4488
|
+
yarn-error.log
|
|
4489
|
+
package-lock.json
|
|
4490
|
+
yarn.lock
|
|
4491
|
+
|
|
4492
|
+
# Environment files
|
|
4493
|
+
.env
|
|
4494
|
+
.env.*
|
|
4495
|
+
|
|
4496
|
+
# Git
|
|
4497
|
+
.git
|
|
4498
|
+
.gitignore
|
|
4499
|
+
|
|
4500
|
+
# IDE
|
|
4501
|
+
.vscode
|
|
4502
|
+
.idea
|
|
4503
|
+
*.swp
|
|
4504
|
+
*.swo
|
|
4505
|
+
*~
|
|
4506
|
+
|
|
4507
|
+
# OS
|
|
4508
|
+
.DS_Store
|
|
4509
|
+
Thumbs.db
|
|
4510
|
+
|
|
4511
|
+
# Testing
|
|
4512
|
+
coverage
|
|
4513
|
+
.nyc_output
|
|
4514
|
+
*.test.js
|
|
4515
|
+
*.spec.js
|
|
4516
|
+
__tests__
|
|
4517
|
+
|
|
4518
|
+
# Build files
|
|
4519
|
+
dist
|
|
4520
|
+
public
|
|
4521
|
+
|
|
4522
|
+
# Logs
|
|
4523
|
+
logs
|
|
4524
|
+
*.log
|
|
4525
|
+
|
|
4526
|
+
# Documentation
|
|
4527
|
+
README.md
|
|
4528
|
+
docs
|
|
4529
|
+
*.md
|
|
4530
|
+
|
|
4531
|
+
# Docker
|
|
4532
|
+
Dockerfile
|
|
4533
|
+
.dockerignore
|
|
4534
|
+
docker-compose*.yml
|
|
4535
|
+
|
|
4536
|
+
# Misc
|
|
4537
|
+
.cache
|
|
4538
|
+
tmp
|
|
4539
|
+
temp
|
|
4425
4540
|
`;
|
|
4426
4541
|
await fs9.writeFile(dockerfilePath, dockerfile);
|
|
4542
|
+
await fs9.writeFile(dockerignorePath, dockerignore);
|
|
4427
4543
|
}
|
|
4428
4544
|
async function setupProduction(cwd, projectName, services) {
|
|
4429
4545
|
const compose = {
|
|
@@ -4454,7 +4570,6 @@ async function setupProduction(cwd, projectName, services) {
|
|
|
4454
4570
|
],
|
|
4455
4571
|
networks: ["web"],
|
|
4456
4572
|
env_file: [".env"]
|
|
4457
|
-
// Fixed: Added env_file to read HOST variable
|
|
4458
4573
|
},
|
|
4459
4574
|
[projectName]: {
|
|
4460
4575
|
build: {
|
|
@@ -4463,19 +4578,16 @@ async function setupProduction(cwd, projectName, services) {
|
|
|
4463
4578
|
},
|
|
4464
4579
|
labels: [
|
|
4465
4580
|
"traefik.enable=true",
|
|
4466
|
-
"traefik.http.routers.app.rule=Host(
|
|
4467
|
-
// Fixed: Changed from app.${HOST} to ${HOST}
|
|
4581
|
+
"traefik.http.routers.app.rule=Host(`app.${HOST}`)",
|
|
4468
4582
|
"traefik.http.routers.app.entrypoints=websecure",
|
|
4469
4583
|
"traefik.http.routers.app.tls.certresolver=myresolver",
|
|
4470
4584
|
"traefik.http.services.app.loadbalancer.server.port=8080"
|
|
4471
4585
|
],
|
|
4472
4586
|
env_file: [".env"],
|
|
4473
4587
|
networks: ["web"],
|
|
4474
|
-
volumes: [`./${projectName}:/app`],
|
|
4475
|
-
// Fixed: Changed from ./web to ./${projectName}
|
|
4588
|
+
volumes: [`./${projectName}:/app`, `/app/node_modules`],
|
|
4476
4589
|
command: "npm run dev",
|
|
4477
4590
|
depends_on: []
|
|
4478
|
-
// Fixed: Added missing depends_on array
|
|
4479
4591
|
}
|
|
4480
4592
|
},
|
|
4481
4593
|
networks: {
|
|
@@ -4484,7 +4596,6 @@ async function setupProduction(cwd, projectName, services) {
|
|
|
4484
4596
|
}
|
|
4485
4597
|
},
|
|
4486
4598
|
volumes: {}
|
|
4487
|
-
// Fixed: Added missing volumes object
|
|
4488
4599
|
};
|
|
4489
4600
|
for (const service of services) {
|
|
4490
4601
|
const config = getServiceConfig(service.name);
|
|
@@ -4548,7 +4659,6 @@ async function setupDevelopment(cwd, projectName, services) {
|
|
|
4548
4659
|
ports: ["8080:8080"],
|
|
4549
4660
|
env_file: [".env"],
|
|
4550
4661
|
volumes: [`./${projectName}:/app`, `/app/node_modules`],
|
|
4551
|
-
// Fixed: Changed from . to ./${projectName}
|
|
4552
4662
|
command: "npm run dev",
|
|
4553
4663
|
depends_on: []
|
|
4554
4664
|
}
|
|
@@ -4665,6 +4775,33 @@ function getServiceConfig(serviceName) {
|
|
|
4665
4775
|
};
|
|
4666
4776
|
return configs[serviceName];
|
|
4667
4777
|
}
|
|
4778
|
+
function getEnvVars(serviceName) {
|
|
4779
|
+
const vars = {
|
|
4780
|
+
postgres: [
|
|
4781
|
+
"DB_HOST=postgres",
|
|
4782
|
+
"DB_PORT=5432",
|
|
4783
|
+
"DB_USER=myuser",
|
|
4784
|
+
"DB_PASSWORD=mypassword",
|
|
4785
|
+
"DB_NAME=mydb"
|
|
4786
|
+
],
|
|
4787
|
+
mysql: [
|
|
4788
|
+
"DB_HOST=mysql",
|
|
4789
|
+
"DB_PORT=3306",
|
|
4790
|
+
"DB_USER=myuser",
|
|
4791
|
+
"DB_PASSWORD=mypassword",
|
|
4792
|
+
"DB_NAME=mydb",
|
|
4793
|
+
"DB_ROOT_PASSWORD=rootpassword"
|
|
4794
|
+
],
|
|
4795
|
+
redis: ["REDIS_HOST=redis", "REDIS_PORT=6379"],
|
|
4796
|
+
mongodb: [
|
|
4797
|
+
"MONGO_HOST=mongodb",
|
|
4798
|
+
"MONGO_PORT=27017",
|
|
4799
|
+
"MONGO_USER=myuser",
|
|
4800
|
+
"MONGO_PASSWORD=mypassword"
|
|
4801
|
+
]
|
|
4802
|
+
};
|
|
4803
|
+
return vars[serviceName] || [];
|
|
4804
|
+
}
|
|
4668
4805
|
function getDefaultPort(service) {
|
|
4669
4806
|
const ports = {
|
|
4670
4807
|
postgres: 5432,
|
|
@@ -4679,12 +4816,12 @@ function printNextSteps(projectName, env, services) {
|
|
|
4679
4816
|
\u2705 Done! Next steps:
|
|
4680
4817
|
`);
|
|
4681
4818
|
if (env === "prod") {
|
|
4682
|
-
console.log(` #
|
|
4819
|
+
console.log(` # Review and edit .env with your settings`);
|
|
4683
4820
|
console.log(` docker-compose up -d`);
|
|
4684
|
-
console.log(` # Access at https
|
|
4821
|
+
console.log(` # Access at https://app.\${HOST}
|
|
4685
4822
|
`);
|
|
4686
4823
|
} else {
|
|
4687
|
-
console.log(` #
|
|
4824
|
+
console.log(` # Review and edit .env with your settings`);
|
|
4688
4825
|
if (services.some((s) => s.needsTunnel)) {
|
|
4689
4826
|
console.log(` # Add SSH keys: {service}.pem`);
|
|
4690
4827
|
}
|
|
@@ -4693,12 +4830,461 @@ function printNextSteps(projectName, env, services) {
|
|
|
4693
4830
|
}
|
|
4694
4831
|
}
|
|
4695
4832
|
|
|
4833
|
+
// src/deploy/deploy.ts
|
|
4834
|
+
import fs10 from "fs-extra";
|
|
4835
|
+
import yaml2 from "js-yaml";
|
|
4836
|
+
import path13 from "path";
|
|
4837
|
+
import os from "os";
|
|
4838
|
+
import { NodeSSH } from "node-ssh";
|
|
4839
|
+
import chalk from "chalk";
|
|
4840
|
+
function interpolate(str, context2) {
|
|
4841
|
+
if (!str) return "";
|
|
4842
|
+
return str.replace(/\${([^}]+)}/g, (match, key) => {
|
|
4843
|
+
return context2[key] !== void 0 ? String(context2[key]) : match;
|
|
4844
|
+
});
|
|
4845
|
+
}
|
|
4846
|
+
function deepInterpolate(obj, context2) {
|
|
4847
|
+
if (typeof obj === "string") {
|
|
4848
|
+
return interpolate(obj, context2);
|
|
4849
|
+
}
|
|
4850
|
+
if (Array.isArray(obj)) {
|
|
4851
|
+
return obj.map((item) => deepInterpolate(item, context2));
|
|
4852
|
+
}
|
|
4853
|
+
if (obj && typeof obj === "object") {
|
|
4854
|
+
const result = {};
|
|
4855
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
4856
|
+
result[key] = deepInterpolate(value, context2);
|
|
4857
|
+
}
|
|
4858
|
+
return result;
|
|
4859
|
+
}
|
|
4860
|
+
return obj;
|
|
4861
|
+
}
|
|
4862
|
+
function expandTilde(fp) {
|
|
4863
|
+
if (!fp || typeof fp !== "string") return fp;
|
|
4864
|
+
return fp.startsWith("~/") ? path13.join(os.homedir(), fp.slice(2)) : fp;
|
|
4865
|
+
}
|
|
4866
|
+
function resolveLocalPaths(obj, cwd) {
|
|
4867
|
+
if (Array.isArray(obj)) {
|
|
4868
|
+
return obj.map((item) => resolveLocalPaths(item, cwd));
|
|
4869
|
+
}
|
|
4870
|
+
if (obj && typeof obj === "object") {
|
|
4871
|
+
const result = {};
|
|
4872
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
4873
|
+
const isLocalPathKey = key === "keyPath" || key === "source";
|
|
4874
|
+
if (isLocalPathKey && typeof value === "string") {
|
|
4875
|
+
const expanded = expandTilde(value);
|
|
4876
|
+
result[key] = path13.isAbsolute(expanded) ? expanded : path13.resolve(cwd, expanded);
|
|
4877
|
+
} else {
|
|
4878
|
+
result[key] = resolveLocalPaths(value, cwd);
|
|
4879
|
+
}
|
|
4880
|
+
}
|
|
4881
|
+
return result;
|
|
4882
|
+
}
|
|
4883
|
+
return obj;
|
|
4884
|
+
}
|
|
4885
|
+
var SCRIPTS = {
|
|
4886
|
+
SWAP: (size) => `#!/bin/bash
|
|
4887
|
+
set -euo pipefail
|
|
4888
|
+
|
|
4889
|
+
# Check if swap file exists
|
|
4890
|
+
if [ -f /swapfile ]; then
|
|
4891
|
+
CURRENT_SIZE=$(stat -c%s /swapfile 2>/dev/null || echo "0")
|
|
4892
|
+
CURRENT_SIZE_GB=$((CURRENT_SIZE / 1024 / 1024 / 1024))
|
|
4893
|
+
REQ_SIZE=$(echo "${size}" | tr -d 'G' | tr -d 'g')
|
|
4894
|
+
|
|
4895
|
+
if [ "$CURRENT_SIZE_GB" -ge "$REQ_SIZE" ]; then
|
|
4896
|
+
echo "LOG: Swap of sufficient size exists. Skipping."
|
|
4897
|
+
exit 0
|
|
4898
|
+
fi
|
|
4899
|
+
|
|
4900
|
+
# Remove old swap if size doesn't match
|
|
4901
|
+
sudo swapoff /swapfile || true
|
|
4902
|
+
sudo rm /swapfile
|
|
4903
|
+
fi
|
|
4904
|
+
|
|
4905
|
+
echo "LOG: Creating ${size} swap file..."
|
|
4906
|
+
sudo fallocate -l ${size} /swapfile || \\
|
|
4907
|
+
sudo dd if=/dev/zero of=/swapfile bs=1M count=$(($(echo ${size} | tr -d 'G' | tr -d 'g') * 1024))
|
|
4908
|
+
|
|
4909
|
+
sudo chmod 600 /swapfile
|
|
4910
|
+
sudo mkswap /swapfile
|
|
4911
|
+
sudo swapon /swapfile
|
|
4912
|
+
|
|
4913
|
+
# Add to fstab if not already there
|
|
4914
|
+
grep -q "/swapfile" /etc/fstab || echo '/swapfile none swap sw 0 0' | sudo tee -a /etc/fstab
|
|
4915
|
+
|
|
4916
|
+
echo "LOG: Swap file configured successfully"
|
|
4917
|
+
`,
|
|
4918
|
+
DOCKER: (version, user, addToGroup) => `#!/bin/bash
|
|
4919
|
+
set -euo pipefail
|
|
4920
|
+
|
|
4921
|
+
echo "LOG: Target Docker version: ${version}"
|
|
4922
|
+
|
|
4923
|
+
# Check current Docker installation
|
|
4924
|
+
INSTALLED_VER=$(docker --version 2>/dev/null | awk '{print $3}' | tr -d ',' || echo "none")
|
|
4925
|
+
echo "LOG: Currently installed: $INSTALLED_VER"
|
|
4926
|
+
|
|
4927
|
+
# Determine if we need to install/reinstall
|
|
4928
|
+
NEEDS_INSTALL=false
|
|
4929
|
+
|
|
4930
|
+
if [ "$INSTALLED_VER" = "none" ]; then
|
|
4931
|
+
echo "LOG: Docker not installed"
|
|
4932
|
+
NEEDS_INSTALL=true
|
|
4933
|
+
elif [ "${version}" = "latest" ]; then
|
|
4934
|
+
echo "LOG: Latest version requested"
|
|
4935
|
+
NEEDS_INSTALL=true
|
|
4936
|
+
elif [[ "$INSTALLED_VER" != *"${version}"* ]]; then
|
|
4937
|
+
echo "LOG: Version mismatch detected (need ${version}, have $INSTALLED_VER)"
|
|
4938
|
+
echo "LOG: Uninstalling current Docker..."
|
|
4939
|
+
|
|
4940
|
+
# Stop Docker services
|
|
4941
|
+
sudo systemctl stop docker.socket 2>/dev/null || true
|
|
4942
|
+
sudo systemctl stop docker 2>/dev/null || true
|
|
4943
|
+
sudo systemctl stop containerd 2>/dev/null || true
|
|
4944
|
+
|
|
4945
|
+
# Remove Docker packages (data is preserved)
|
|
4946
|
+
sudo apt-get purge -y \\
|
|
4947
|
+
docker-ce \\
|
|
4948
|
+
docker-ce-cli \\
|
|
4949
|
+
containerd.io \\
|
|
4950
|
+
docker-buildx-plugin \\
|
|
4951
|
+
docker-compose-plugin \\
|
|
4952
|
+
docker-ce-rootless-extras \\
|
|
4953
|
+
2>/dev/null || true
|
|
4954
|
+
|
|
4955
|
+
sudo apt-get purge -y docker docker-engine docker.io runc 2>/dev/null || true
|
|
4956
|
+
sudo apt-get autoremove -y
|
|
4957
|
+
|
|
4958
|
+
echo "LOG: Uninstall complete"
|
|
4959
|
+
NEEDS_INSTALL=true
|
|
4960
|
+
else
|
|
4961
|
+
echo "LOG: Correct version already installed"
|
|
4962
|
+
fi
|
|
4963
|
+
|
|
4964
|
+
if [ "$NEEDS_INSTALL" = true ]; then
|
|
4965
|
+
echo "LOG: Installing Docker ${version}..."
|
|
4966
|
+
|
|
4967
|
+
# Update and install prerequisites
|
|
4968
|
+
sudo apt-get update -y
|
|
4969
|
+
sudo apt-get install -y ca-certificates curl gnupg lsb-release
|
|
4970
|
+
|
|
4971
|
+
# Add Docker GPG key
|
|
4972
|
+
sudo install -m 0755 -d /etc/apt/keyrings
|
|
4973
|
+
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | \\
|
|
4974
|
+
sudo gpg --dearmor --batch --yes -o /etc/apt/keyrings/docker.gpg
|
|
4975
|
+
sudo chmod a+r /etc/apt/keyrings/docker.gpg
|
|
4976
|
+
|
|
4977
|
+
# Add Docker repository
|
|
4978
|
+
ARCH="$(dpkg --print-architecture)"
|
|
4979
|
+
RELEASE="$(lsb_release -cs)"
|
|
4980
|
+
echo "deb [arch=\${ARCH} signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \${RELEASE} stable" | \\
|
|
4981
|
+
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
|
|
4982
|
+
|
|
4983
|
+
sudo apt-get update -y
|
|
4984
|
+
|
|
4985
|
+
# Install Docker
|
|
4986
|
+
if [ "${version}" = "latest" ]; then
|
|
4987
|
+
echo "LOG: Installing latest Docker version"
|
|
4988
|
+
sudo apt-get install -y \\
|
|
4989
|
+
docker-ce \\
|
|
4990
|
+
docker-ce-cli \\
|
|
4991
|
+
containerd.io \\
|
|
4992
|
+
docker-buildx-plugin \\
|
|
4993
|
+
docker-compose-plugin
|
|
4994
|
+
else
|
|
4995
|
+
echo "LOG: Finding version ${version}..."
|
|
4996
|
+
VERSION_STRING=$(apt-cache madison docker-ce | grep "${version}" | head -1 | awk '{print $3}')
|
|
4997
|
+
|
|
4998
|
+
if [ -z "$VERSION_STRING" ]; then
|
|
4999
|
+
echo "LOG: ERROR - Version ${version} not found!"
|
|
5000
|
+
echo "LOG: Available versions:"
|
|
5001
|
+
apt-cache madison docker-ce | head -10
|
|
5002
|
+
exit 1
|
|
5003
|
+
fi
|
|
5004
|
+
|
|
5005
|
+
echo "LOG: Installing Docker CE version: $VERSION_STRING"
|
|
5006
|
+
sudo apt-get install -y \\
|
|
5007
|
+
docker-ce=$VERSION_STRING \\
|
|
5008
|
+
docker-ce-cli=$VERSION_STRING \\
|
|
5009
|
+
containerd.io \\
|
|
5010
|
+
docker-buildx-plugin \\
|
|
5011
|
+
docker-compose-plugin
|
|
5012
|
+
fi
|
|
5013
|
+
|
|
5014
|
+
# Enable and start Docker
|
|
5015
|
+
sudo systemctl enable docker
|
|
5016
|
+
sudo systemctl start docker
|
|
5017
|
+
|
|
5018
|
+
FINAL_VER=$(docker --version | awk '{print $3}' | tr -d ',')
|
|
5019
|
+
echo "LOG: Docker installed successfully - $FINAL_VER"
|
|
5020
|
+
fi
|
|
5021
|
+
|
|
5022
|
+
# Configure docker group
|
|
5023
|
+
if [ "${addToGroup}" = "true" ]; then
|
|
5024
|
+
if ! getent group docker >/dev/null 2>&1; then
|
|
5025
|
+
sudo groupadd docker
|
|
5026
|
+
echo "LOG: Created docker group"
|
|
5027
|
+
fi
|
|
5028
|
+
|
|
5029
|
+
if groups ${user} | grep -q '\\bdocker\\b'; then
|
|
5030
|
+
echo "LOG: User ${user} already in docker group"
|
|
5031
|
+
else
|
|
5032
|
+
sudo usermod -aG docker ${user}
|
|
5033
|
+
echo "LOG: Added ${user} to docker group (logout required)"
|
|
5034
|
+
fi
|
|
5035
|
+
fi
|
|
5036
|
+
`
|
|
5037
|
+
};
|
|
5038
|
+
var RemoteShell = class {
|
|
5039
|
+
constructor(ssh) {
|
|
5040
|
+
this.ssh = ssh;
|
|
5041
|
+
}
|
|
5042
|
+
async uploadContent(remotePath, content) {
|
|
5043
|
+
const localTmp = path13.join(os.tmpdir(), `pod_tmp_${Date.now()}`);
|
|
5044
|
+
fs10.writeFileSync(localTmp, content);
|
|
5045
|
+
try {
|
|
5046
|
+
await this.ssh.execCommand(`mkdir -p $(dirname ${remotePath})`);
|
|
5047
|
+
await this.ssh.putFile(localTmp, remotePath);
|
|
5048
|
+
} finally {
|
|
5049
|
+
if (fs10.existsSync(localTmp)) fs10.unlinkSync(localTmp);
|
|
5050
|
+
}
|
|
5051
|
+
}
|
|
5052
|
+
async runScript(name, content, context2) {
|
|
5053
|
+
const interpolated = interpolate(content, context2);
|
|
5054
|
+
const remotePath = `/tmp/pod_script_${name}_${Date.now()}.sh`;
|
|
5055
|
+
await this.uploadContent(remotePath, interpolated);
|
|
5056
|
+
try {
|
|
5057
|
+
await this.ssh.execCommand(`chmod +x ${remotePath}`);
|
|
5058
|
+
return await this.run(remotePath, context2);
|
|
5059
|
+
} finally {
|
|
5060
|
+
await this.ssh.execCommand(`rm -f ${remotePath}`);
|
|
5061
|
+
}
|
|
5062
|
+
}
|
|
5063
|
+
async run(cmd, context2, silent = false) {
|
|
5064
|
+
const interpolated = interpolate(cmd, context2);
|
|
5065
|
+
const result = await this.ssh.execCommand(interpolated);
|
|
5066
|
+
if (result.code !== 0 && result.code !== null) {
|
|
5067
|
+
throw new Error(`Execution failed: ${cmd}
|
|
5068
|
+
STDERR: ${result.stderr}`);
|
|
5069
|
+
}
|
|
5070
|
+
if (!silent && result.stdout) {
|
|
5071
|
+
result.stdout.split("\n").filter((l) => l.startsWith("LOG:")).forEach((l) => console.log(chalk.gray(` ${l.replace("LOG: ", "")}`)));
|
|
5072
|
+
}
|
|
5073
|
+
return result;
|
|
5074
|
+
}
|
|
5075
|
+
async readJson(remotePath) {
|
|
5076
|
+
const res = await this.ssh.execCommand(`cat ${remotePath}`);
|
|
5077
|
+
try {
|
|
5078
|
+
return res.code === 0 ? JSON.parse(res.stdout) : null;
|
|
5079
|
+
} catch {
|
|
5080
|
+
return null;
|
|
5081
|
+
}
|
|
5082
|
+
}
|
|
5083
|
+
};
|
|
5084
|
+
async function deploy(targetName, options) {
|
|
5085
|
+
const cwd = process.cwd();
|
|
5086
|
+
const rawConfig = yaml2.load(
|
|
5087
|
+
fs10.readFileSync(path13.join(cwd, "pod.deploy.yml"), "utf8")
|
|
5088
|
+
);
|
|
5089
|
+
const rawTarget = rawConfig.targets?.[targetName];
|
|
5090
|
+
if (!rawTarget) throw new Error(`Target ${targetName} not found.`);
|
|
5091
|
+
console.log(
|
|
5092
|
+
chalk.blue.bold(
|
|
5093
|
+
`
|
|
5094
|
+
\u{1F680} Pod Deploy: ${rawConfig.name} v${rawConfig.version} \u2192 ${targetName}
|
|
5095
|
+
`
|
|
5096
|
+
)
|
|
5097
|
+
);
|
|
5098
|
+
let target = deepInterpolate(rawTarget, {
|
|
5099
|
+
...rawConfig,
|
|
5100
|
+
...rawTarget
|
|
5101
|
+
});
|
|
5102
|
+
target = resolveLocalPaths(target, cwd);
|
|
5103
|
+
const ssh = new NodeSSH();
|
|
5104
|
+
const shell = new RemoteShell(ssh);
|
|
5105
|
+
try {
|
|
5106
|
+
await ssh.connect({
|
|
5107
|
+
host: target.host,
|
|
5108
|
+
username: target.user,
|
|
5109
|
+
privateKeyPath: target.keyPath,
|
|
5110
|
+
port: target.port || 22
|
|
5111
|
+
});
|
|
5112
|
+
const lockPath = path13.posix.join(target.deployPath, "pod-lock.json");
|
|
5113
|
+
let lock = await shell.readJson(lockPath) || {
|
|
5114
|
+
ensures: {},
|
|
5115
|
+
once_actions: []
|
|
5116
|
+
};
|
|
5117
|
+
if (lock.deployment_version !== rawConfig.version) {
|
|
5118
|
+
console.log(chalk.magenta(`\u2192 Version change: ${rawConfig.version}`));
|
|
5119
|
+
lock.deployment_version = rawConfig.version;
|
|
5120
|
+
lock.once_actions = [];
|
|
5121
|
+
await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
|
|
5122
|
+
}
|
|
5123
|
+
for (const op of target.operations) {
|
|
5124
|
+
try {
|
|
5125
|
+
if (op.type === "ensure") {
|
|
5126
|
+
await handleEnsure(op, shell, target, lock, lockPath, options);
|
|
5127
|
+
} else if (op.type === "action") {
|
|
5128
|
+
await handleAction(op, shell, target, lock, lockPath);
|
|
5129
|
+
} else if (op.type === "verify") {
|
|
5130
|
+
await handleVerify(op, shell, target);
|
|
5131
|
+
} else {
|
|
5132
|
+
throw new Error(`Unknown operation type: ${op.type}`);
|
|
5133
|
+
}
|
|
5134
|
+
} catch (err) {
|
|
5135
|
+
throw new Error(`Failed at operation "${op.name}": ${err.message}`);
|
|
5136
|
+
}
|
|
5137
|
+
}
|
|
5138
|
+
console.log(chalk.green.bold(`
|
|
5139
|
+
\u2705 Deployment successful!
|
|
5140
|
+
`));
|
|
5141
|
+
} catch (err) {
|
|
5142
|
+
console.error(chalk.red.bold(`
|
|
5143
|
+
\u274C Deployment Failed: ${err.message}`));
|
|
5144
|
+
throw err;
|
|
5145
|
+
} finally {
|
|
5146
|
+
ssh.dispose();
|
|
5147
|
+
}
|
|
5148
|
+
}
|
|
5149
|
+
async function handleEnsure(op, shell, target, lock, lockPath, options) {
|
|
5150
|
+
if (!op.ensure) {
|
|
5151
|
+
throw new Error(`Ensure operation "${op.name}" missing ensure config`);
|
|
5152
|
+
}
|
|
5153
|
+
if (op.ensure.swap) {
|
|
5154
|
+
const key = "swap";
|
|
5155
|
+
const locked = lock.ensures[key];
|
|
5156
|
+
const currentConfig = op.ensure.swap;
|
|
5157
|
+
const configChanged = JSON.stringify(locked?.config) !== JSON.stringify(currentConfig);
|
|
5158
|
+
if (options?.forceEnsure || !locked || locked.version !== currentConfig.size || configChanged) {
|
|
5159
|
+
console.log(chalk.yellow(`\u2192 Ensuring: ${op.name}`));
|
|
5160
|
+
const script = SCRIPTS.SWAP(currentConfig.size);
|
|
5161
|
+
await shell.runScript(key, script, target);
|
|
5162
|
+
lock.ensures[key] = {
|
|
5163
|
+
version: currentConfig.size,
|
|
5164
|
+
config: currentConfig
|
|
5165
|
+
};
|
|
5166
|
+
await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
|
|
5167
|
+
} else {
|
|
5168
|
+
console.log(chalk.gray(`\u2713 ${op.name} (already satisfied)`));
|
|
5169
|
+
}
|
|
5170
|
+
}
|
|
5171
|
+
if (op.ensure.docker) {
|
|
5172
|
+
const key = "docker";
|
|
5173
|
+
const locked = lock.ensures[key];
|
|
5174
|
+
const currentConfig = op.ensure.docker;
|
|
5175
|
+
const configChanged = JSON.stringify(locked?.config) !== JSON.stringify(currentConfig);
|
|
5176
|
+
if (options?.forceEnsure || !locked || locked.version !== currentConfig.version || configChanged) {
|
|
5177
|
+
console.log(chalk.yellow(`\u2192 Ensuring: ${op.name}`));
|
|
5178
|
+
const script = SCRIPTS.DOCKER(
|
|
5179
|
+
currentConfig.version,
|
|
5180
|
+
target.user,
|
|
5181
|
+
!!currentConfig.addUserToGroup
|
|
5182
|
+
);
|
|
5183
|
+
await shell.runScript(key, script, target);
|
|
5184
|
+
lock.ensures[key] = {
|
|
5185
|
+
version: currentConfig.version,
|
|
5186
|
+
config: currentConfig
|
|
5187
|
+
};
|
|
5188
|
+
await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
|
|
5189
|
+
} else {
|
|
5190
|
+
console.log(chalk.gray(`\u2713 ${op.name} (already satisfied)`));
|
|
5191
|
+
}
|
|
5192
|
+
}
|
|
5193
|
+
if (op.ensure.directory) {
|
|
5194
|
+
const key = `directory_${op.ensure.directory.path}`;
|
|
5195
|
+
const locked = lock.ensures[key];
|
|
5196
|
+
const currentConfig = op.ensure.directory;
|
|
5197
|
+
const configChanged = JSON.stringify(locked?.config) !== JSON.stringify(currentConfig);
|
|
5198
|
+
if (options?.forceEnsure || !locked || configChanged) {
|
|
5199
|
+
console.log(chalk.yellow(`\u2192 Ensuring: ${op.name}`));
|
|
5200
|
+
const dirPath = interpolate(currentConfig.path, target);
|
|
5201
|
+
const owner = currentConfig.owner ? interpolate(currentConfig.owner, target) : target.user;
|
|
5202
|
+
await shell.run(`mkdir -p ${dirPath}`, target, true);
|
|
5203
|
+
await shell.run(
|
|
5204
|
+
`sudo chown -R ${owner}:${owner} ${dirPath}`,
|
|
5205
|
+
target,
|
|
5206
|
+
true
|
|
5207
|
+
);
|
|
5208
|
+
lock.ensures[key] = {
|
|
5209
|
+
version: dirPath,
|
|
5210
|
+
config: currentConfig
|
|
5211
|
+
};
|
|
5212
|
+
await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
|
|
5213
|
+
} else {
|
|
5214
|
+
console.log(chalk.gray(`\u2713 ${op.name} (already satisfied)`));
|
|
5215
|
+
}
|
|
5216
|
+
}
|
|
5217
|
+
}
|
|
5218
|
+
async function handleAction(op, shell, target, lock, lockPath) {
|
|
5219
|
+
if (!op.action) {
|
|
5220
|
+
throw new Error(`Action operation "${op.name}" missing action config`);
|
|
5221
|
+
}
|
|
5222
|
+
const when = op.when || "always";
|
|
5223
|
+
if (when === "never") {
|
|
5224
|
+
console.log(chalk.gray(`\u2298 ${op.name} (disabled)`));
|
|
5225
|
+
return;
|
|
5226
|
+
}
|
|
5227
|
+
const actionId = `action_${op.name}`;
|
|
5228
|
+
if (when === "once" && lock.once_actions.includes(actionId)) {
|
|
5229
|
+
console.log(chalk.gray(`\u2713 ${op.name} (already completed)`));
|
|
5230
|
+
return;
|
|
5231
|
+
}
|
|
5232
|
+
console.log(chalk.cyan(`\u2192 Running: ${op.name}`));
|
|
5233
|
+
if (op.action.rsync) {
|
|
5234
|
+
const src = op.action.rsync.source;
|
|
5235
|
+
const dest = interpolate(op.action.rsync.destination || ".", target);
|
|
5236
|
+
const putOptions = { recursive: true, concurrency: 10 };
|
|
5237
|
+
if (op.action.rsync.exclude?.length) {
|
|
5238
|
+
const excludePatterns = op.action.rsync.exclude;
|
|
5239
|
+
putOptions.validate = (filePath) => {
|
|
5240
|
+
const relative = path13.relative(src, filePath);
|
|
5241
|
+
if (relative === "") return true;
|
|
5242
|
+
return !excludePatterns.some((pattern) => {
|
|
5243
|
+
if (pattern.endsWith("/")) {
|
|
5244
|
+
const dir = pattern.slice(0, -1);
|
|
5245
|
+
const segment = "/" + dir + "/";
|
|
5246
|
+
return relative === dir || relative.startsWith(dir + "/") || relative.includes(segment);
|
|
5247
|
+
}
|
|
5248
|
+
if (pattern.startsWith("*.")) {
|
|
5249
|
+
return relative.endsWith(pattern.slice(1));
|
|
5250
|
+
}
|
|
5251
|
+
return relative === pattern;
|
|
5252
|
+
});
|
|
5253
|
+
};
|
|
5254
|
+
}
|
|
5255
|
+
console.log(chalk.gray(` Syncing ${src} \u2192 ${dest}`));
|
|
5256
|
+
await shell.ssh.putDirectory(src, dest, putOptions);
|
|
5257
|
+
}
|
|
5258
|
+
if (op.action.command) {
|
|
5259
|
+
await shell.run(op.action.command, target);
|
|
5260
|
+
}
|
|
5261
|
+
if (when === "once") {
|
|
5262
|
+
lock.once_actions.push(actionId);
|
|
5263
|
+
await shell.uploadContent(lockPath, JSON.stringify(lock, null, 2));
|
|
5264
|
+
}
|
|
5265
|
+
}
|
|
5266
|
+
async function handleVerify(op, shell, target) {
|
|
5267
|
+
if (!op.verify) {
|
|
5268
|
+
throw new Error(`Verify operation "${op.name}" missing verify config`);
|
|
5269
|
+
}
|
|
5270
|
+
console.log(chalk.cyan(`\u2192 Verifying: ${op.name}`));
|
|
5271
|
+
if (op.verify.http) {
|
|
5272
|
+
const url = interpolate(op.verify.http.url, target);
|
|
5273
|
+
const timeout = op.verify.http.timeout || "30s";
|
|
5274
|
+
await shell.run(`curl -f --max-time ${timeout} ${url}`, target, true);
|
|
5275
|
+
}
|
|
5276
|
+
if (op.verify.command) {
|
|
5277
|
+
await shell.run(op.verify.command, target, true);
|
|
5278
|
+
}
|
|
5279
|
+
}
|
|
5280
|
+
|
|
4696
5281
|
// src/main.ts
|
|
5282
|
+
import chalk2 from "chalk";
|
|
4697
5283
|
var program = new Command();
|
|
4698
|
-
program.name("pod").description("Pod cli tool").version("1.0.
|
|
5284
|
+
program.name("pod").description("Pod cli tool").version("1.0.20");
|
|
4699
5285
|
program.command("new <name>").description("Start a new Pod Project").action(async (name) => {
|
|
4700
5286
|
await addNew(name);
|
|
4701
|
-
const appDir =
|
|
5287
|
+
const appDir = path14.resolve(process.cwd(), name);
|
|
4702
5288
|
const shell = process.platform === "win32" ? process.env.ComSpec || "cmd.exe" : "/bin/sh";
|
|
4703
5289
|
console.log("Installing dependencies...");
|
|
4704
5290
|
execSync("npm install", { stdio: "inherit", cwd: appDir, shell });
|
|
@@ -4729,10 +5315,12 @@ program.command("dockerize <env>").description("Dockerize a pod project.").actio
|
|
|
4729
5315
|
console.error("\u274C Error:", err.message);
|
|
4730
5316
|
}
|
|
4731
5317
|
});
|
|
4732
|
-
program.command("deploy
|
|
5318
|
+
program.command("deploy").description("Deploy to a target environment").argument("<target>", "Target environment (e.g., ec2)").option("--force-install", "Force reinstallation even if already installed").action(async (target, options) => {
|
|
4733
5319
|
try {
|
|
4734
|
-
|
|
4735
|
-
|
|
5320
|
+
await deploy(target, options);
|
|
5321
|
+
} catch (error) {
|
|
5322
|
+
console.error(chalk2.red(error.message));
|
|
5323
|
+
process.exit(1);
|
|
4736
5324
|
}
|
|
4737
5325
|
});
|
|
4738
5326
|
program.parse(process.argv);
|