@vaharoni/devops 1.2.13 → 1.2.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/chunk-N7EX3HJH.js +139 -0
- package/dist/chunk-RZ46YYZZ.js +420 -0
- package/dist/chunk-WKP7EQNU.js +421 -0
- package/dist/devops.d.ts +0 -2
- package/dist/devops.js +3235 -91
- package/dist/index.d.ts +972 -4
- package/dist/index.js +67 -3
- package/dist/plugins.d.ts +95 -4
- package/dist/plugins.js +20 -6
- package/dist/src/target-templates/README.md +1 -0
- package/dist/src/target-templates/cluster-resource-options/README.md +2 -0
- package/dist/src/target-templates/cluster-resource-options/dns-test/dns-test.yaml +60 -0
- package/dist/src/target-templates/cluster-resource-options/milvus/production/milvus-values.yaml +2 -0
- package/dist/src/target-templates/cluster-resource-options/milvus/staging/milvus-values.yaml +2 -0
- package/dist/src/target-templates/cluster-resource-options/monitoring-ingress/monitoring-ingress.yaml +62 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/daily-operator-restart.yaml +54 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/production/cluster/PodDisruptionBudget.yaml +27 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/production/cluster/SGCluster.yaml +47 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/production/cluster/StackGres-alerts.yaml +191 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/production/configurations/06-SGDistributedLogs.yaml +11 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/production/configurations/07-SGObjectStorage.yaml +18 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/production/configurations/08-SGScript.yaml +12 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/stackgres-ui-ingress.yaml +35 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/staging/cluster/SGCluster.yaml +42 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/staging/configurations/07-SGObjectStorage.yaml +18 -0
- package/dist/src/target-templates/cluster-resource-options/postgres/staging/configurations/08-SGScript.yaml +12 -0
- package/dist/src/target-templates/cluster-resource-options/prefect/production/prefect-values.yaml +14 -0
- package/dist/src/target-templates/cluster-resource-options/prefect/staging/prefect-values.yaml +14 -0
- package/dist/src/target-templates/cluster-resource-options/redis/production/redis-values.yaml +20 -0
- package/dist/src/target-templates/cluster-resource-options/redis/staging/redis-values.yaml +8 -0
- package/dist/src/target-templates/infra-variants/README.md +2 -0
- package/dist/src/target-templates/infra-variants/digitalocean/.devops/config/constants.yaml +18 -0
- package/dist/src/target-templates/infra-variants/digitalocean/.github/workflows/k8s-build.yaml +91 -0
- package/dist/src/target-templates/infra-variants/gcloud/.devops/config/constants.yaml +15 -0
- package/dist/src/target-templates/infra-variants/gcloud/.devops/manifests/ingress.yaml.hb +22 -0
- package/dist/src/target-templates/infra-variants/gcloud/.github/workflows/k8s-build.yaml +95 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/config/constants.yaml +18 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/abandoned/harbor-values.yaml +30 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/abandoned/hcloud-config.yaml +134 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/cert-manager.yaml +25 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/harbor-cert.yaml +13 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/harbor-values.yaml +76 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/hcloud-config.yaml +113 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/ingress-nginx-annotations.yaml +49 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/ingress-nginx-configmap.yaml +8 -0
- package/dist/src/target-templates/infra-variants/hetzner/.devops/infra/hetzner/retain-storage-class.yaml +8 -0
- package/dist/src/target-templates/infra-variants/hetzner/.github/workflows/k8s-build.yaml +93 -0
- package/dist/src/target-templates/lang-variants-common/README.md +4 -0
- package/dist/src/target-templates/lang-variants-common/python/.cursor/rules/monorepo-python.mdc +56 -0
- package/dist/src/target-templates/lang-variants-common/python/.devops/config/images.yaml +89 -0
- package/dist/src/target-templates/lang-variants-common/python/.devops/docker-images/python-services/python-exec.sh +8 -0
- package/dist/src/target-templates/lang-variants-common/python/.devops/docker-images/python-services/python-run.sh +8 -0
- package/dist/src/target-templates/lang-variants-common/python/.devops/docker-images/python-services.Dockerfile +29 -0
- package/dist/src/target-templates/lang-variants-common/python/.devops/manifests/_index.yaml +21 -0
- package/dist/src/target-templates/lang-variants-common/python/.devops/manifests/prefect.yaml.hb +63 -0
- package/dist/src/target-templates/lang-variants-common/python/applications/example-data-pipeline/pyproject.toml +14 -0
- package/dist/src/target-templates/lang-variants-common/python/applications/example-data-pipeline/src/example_data_pipeline/main.py +38 -0
- package/dist/src/target-templates/lang-variants-common/python/applications/example-python/pyproject.toml +20 -0
- package/dist/src/target-templates/lang-variants-common/python/applications/example-python/src/example_python/__init__.py +0 -0
- package/dist/src/target-templates/lang-variants-common/python/applications/example-python/src/example_python/main.py +13 -0
- package/dist/src/target-templates/lang-variants-common/python/applications/example-python/src/example_python/scripts.py +17 -0
- package/dist/src/target-templates/lang-variants-common/python/applications/example-python/tests/__init__.py +0 -0
- package/dist/src/target-templates/lang-variants-common/python/devopspy +3 -0
- package/dist/src/target-templates/lang-variants-common/python/libs/example-python-lib/pyproject.toml +11 -0
- package/dist/src/target-templates/lang-variants-common/python/libs/example-python-lib/src/example_python_lib/__init__.py +2 -0
- package/dist/src/target-templates/lang-variants-common/python/pyproject.toml +16 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.cursor/rules/monorepo-typescript.mdc +51 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/config/images.yaml +69 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/docker-images/cloudrun.Dockerfile +31 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/docker-images/common/docker-common.sh +23 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/docker-images/node-services/node-exec.sh +8 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/docker-images/node-services/node-run.sh +8 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/docker-images/node-services.Dockerfile +31 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/env.example.yaml +23 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/manifests/_index.yaml +19 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/manifests/cron-jobs.yaml.hb +56 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/manifests/db-migrate-job.yaml.hb +43 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/manifests/deployment-debug.yaml.hb +42 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/manifests/deployment-process.yaml.hb +48 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/manifests/deployment-web.yaml.hb +54 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/manifests/ingress.yaml.hb +21 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.devops/manifests/service.yaml.hb +15 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.envrc +5 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.github/actions/build-image@v1/action.yaml +81 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.github/actions/connect-to-digital-ocean@v1/action.yaml +29 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.github/actions/connect-to-gke@v1/action.yaml +43 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.github/actions/connect-to-hetzner@v1/action.yaml +31 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.github/actions/db-migrate@v1/action.yaml +23 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.github/actions/deploy-image-cloudrun@v1/action.yaml +71 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.github/actions/deploy-image-k8s@v1/action.yaml +37 -0
- package/dist/src/target-templates/lang-variants-common/typescript/.github/actions/setup-prereq@v1/action.yaml +24 -0
- package/dist/src/target-templates/lang-variants-common/typescript/applications/example-node/index.ts +30 -0
- package/dist/src/target-templates/lang-variants-common/typescript/applications/example-node/package.json +26 -0
- package/dist/src/target-templates/lang-variants-common/typescript/applications/example-node/tsconfig.json +3 -0
- package/dist/src/target-templates/lang-variants-common/typescript/applications/jobs/README.md +68 -0
- package/dist/src/target-templates/lang-variants-common/typescript/applications/jobs/index.ts +1 -0
- package/dist/src/target-templates/lang-variants-common/typescript/applications/jobs/package.json +30 -0
- package/dist/src/target-templates/lang-variants-common/typescript/applications/jobs/tsconfig.json +3 -0
- package/dist/src/target-templates/lang-variants-common/typescript/config/.env.development +1 -0
- package/dist/src/target-templates/lang-variants-common/typescript/config/.env.global +4 -0
- package/dist/src/target-templates/lang-variants-common/typescript/config/.env.test +1 -0
- package/dist/src/target-templates/lang-variants-common/typescript/devops +3 -0
- package/dist/src/target-templates/lang-variants-common/typescript/libs/example-node-lib/index.ts +3 -0
- package/dist/src/target-templates/lang-variants-common/typescript/libs/example-node-lib/package.json +12 -0
- package/dist/src/target-templates/lang-variants-common/typescript/libs/example-node-lib/tsconfig.json +3 -0
- package/dist/src/target-templates/lang-variants-common/typescript/tmp/.gitkeep +0 -0
- package/dist/src/target-templates/lang-variants-common/typescript/tsconfig.json +27 -0
- package/dist/src/target-templates/lang-variants-prisma/README.md +3 -0
- package/dist/src/target-templates/lang-variants-prisma/python/.cursor/rules/prisma-python.mdc +55 -0
- package/dist/src/target-templates/lang-variants-prisma/python/.cursor/rules/testing-python.mdc +89 -0
- package/dist/src/target-templates/lang-variants-prisma/python/db/db/__init__.py +0 -0
- package/dist/src/target-templates/lang-variants-prisma/python/db/db/db_client_test.py +46 -0
- package/dist/src/target-templates/lang-variants-prisma/python/db/pyproject.toml +14 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/.cursor/rules/prisma-typescript.mdc +54 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/.cursor/rules/testing-typescript.mdc +103 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/db/db-client-test.ts +142 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/db/db-client.ts +19 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/db/env.yaml +4 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/db/package.json +17 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/db/prisma/schema.prisma +24 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/db/prisma-setup-vitest.ts +27 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/db/tsconfig.json +3 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/dml/package.json +7 -0
- package/dist/src/target-templates/lang-variants-prisma/typescript/dml/tsconfig.json +3 -0
- package/package.json +3 -2
- package/src/cli/core/exec.ts +2 -4
- package/src/cli/core/run.ts +2 -4
- package/src/cli/extensions/prisma.ts +2 -4
- package/src/libs/init-generator.ts +2 -4
- package/src/pkg-root.ts +14 -0
- package/src/plugins.ts +2 -4
- package/dist/app-support/crypto/aes.d.ts +0 -15
- package/dist/app-support/crypto/aes.d.ts.map +0 -1
- package/dist/app-support/crypto/aes.js +0 -56
- package/dist/app-support/crypto/aes.spec.d.ts +0 -2
- package/dist/app-support/crypto/aes.spec.d.ts.map +0 -1
- package/dist/app-support/crypto/aes.spec.js +0 -58
- package/dist/app-support/crypto/index.d.ts +0 -16
- package/dist/app-support/crypto/index.d.ts.map +0 -1
- package/dist/app-support/crypto/index.js +0 -31
- package/dist/app-support/crypto/internal-token.d.ts +0 -20
- package/dist/app-support/crypto/internal-token.d.ts.map +0 -1
- package/dist/app-support/crypto/internal-token.js +0 -42
- package/dist/app-support/crypto/internal-token.spec.d.ts +0 -2
- package/dist/app-support/crypto/internal-token.spec.d.ts.map +0 -1
- package/dist/app-support/crypto/internal-token.spec.js +0 -45
- package/dist/app-support/crypto/secret.d.ts +0 -3
- package/dist/app-support/crypto/secret.d.ts.map +0 -1
- package/dist/app-support/crypto/secret.js +0 -12
- package/dist/app-support/crypto/secret.spec.d.ts +0 -2
- package/dist/app-support/crypto/secret.spec.d.ts.map +0 -1
- package/dist/app-support/crypto/secret.spec.js +0 -15
- package/dist/app-support/discovery/dev-discovery-loader.d.ts +0 -2
- package/dist/app-support/discovery/dev-discovery-loader.d.ts.map +0 -1
- package/dist/app-support/discovery/dev-discovery-loader.js +0 -30
- package/dist/app-support/discovery/service-endpoint.d.ts +0 -2
- package/dist/app-support/discovery/service-endpoint.d.ts.map +0 -1
- package/dist/app-support/discovery/service-endpoint.js +0 -10
- package/dist/cli/common.d.ts +0 -89
- package/dist/cli/common.d.ts.map +0 -1
- package/dist/cli/common.js +0 -246
- package/dist/cli/common.spec.d.ts +0 -2
- package/dist/cli/common.spec.d.ts.map +0 -1
- package/dist/cli/common.spec.js +0 -64
- package/dist/cli/core/affected.d.ts +0 -9
- package/dist/cli/core/affected.d.ts.map +0 -1
- package/dist/cli/core/affected.js +0 -101
- package/dist/cli/core/cloudrun.d.ts +0 -9
- package/dist/cli/core/cloudrun.d.ts.map +0 -1
- package/dist/cli/core/cloudrun.js +0 -119
- package/dist/cli/core/console.d.ts +0 -10
- package/dist/cli/core/console.d.ts.map +0 -1
- package/dist/cli/core/console.js +0 -50
- package/dist/cli/core/constant.d.ts +0 -9
- package/dist/cli/core/constant.d.ts.map +0 -1
- package/dist/cli/core/constant.js +0 -20
- package/dist/cli/core/env.d.ts +0 -9
- package/dist/cli/core/env.d.ts.map +0 -1
- package/dist/cli/core/env.js +0 -65
- package/dist/cli/core/exec.d.ts +0 -9
- package/dist/cli/core/exec.d.ts.map +0 -1
- package/dist/cli/core/exec.js +0 -48
- package/dist/cli/core/image.d.ts +0 -9
- package/dist/cli/core/image.d.ts.map +0 -1
- package/dist/cli/core/image.js +0 -153
- package/dist/cli/core/index.d.ts +0 -16
- package/dist/cli/core/index.d.ts.map +0 -1
- package/dist/cli/core/index.js +0 -15
- package/dist/cli/core/init.d.ts +0 -9
- package/dist/cli/core/init.d.ts.map +0 -1
- package/dist/cli/core/init.js +0 -219
- package/dist/cli/core/job.d.ts +0 -9
- package/dist/cli/core/job.d.ts.map +0 -1
- package/dist/cli/core/job.js +0 -65
- package/dist/cli/core/namespace.d.ts +0 -9
- package/dist/cli/core/namespace.d.ts.map +0 -1
- package/dist/cli/core/namespace.js +0 -67
- package/dist/cli/core/prep-build.d.ts +0 -10
- package/dist/cli/core/prep-build.d.ts.map +0 -1
- package/dist/cli/core/prep-build.js +0 -109
- package/dist/cli/core/registry.d.ts +0 -9
- package/dist/cli/core/registry.d.ts.map +0 -1
- package/dist/cli/core/registry.js +0 -66
- package/dist/cli/core/run-many.d.ts +0 -10
- package/dist/cli/core/run-many.d.ts.map +0 -1
- package/dist/cli/core/run-many.js +0 -48
- package/dist/cli/core/run.d.ts +0 -9
- package/dist/cli/core/run.d.ts.map +0 -1
- package/dist/cli/core/run.js +0 -35
- package/dist/cli/core/test.d.ts +0 -9
- package/dist/cli/core/test.d.ts.map +0 -1
- package/dist/cli/core/test.js +0 -26
- package/dist/cli/extensions/dml.d.ts +0 -9
- package/dist/cli/extensions/dml.d.ts.map +0 -1
- package/dist/cli/extensions/dml.js +0 -114
- package/dist/cli/extensions/index.d.ts +0 -9
- package/dist/cli/extensions/index.d.ts.map +0 -1
- package/dist/cli/extensions/index.js +0 -8
- package/dist/cli/extensions/internal-curl.d.ts +0 -10
- package/dist/cli/extensions/internal-curl.d.ts.map +0 -1
- package/dist/cli/extensions/internal-curl.js +0 -41
- package/dist/cli/extensions/jwt.d.ts +0 -9
- package/dist/cli/extensions/jwt.d.ts.map +0 -1
- package/dist/cli/extensions/jwt.js +0 -25
- package/dist/cli/extensions/prisma.d.ts +0 -9
- package/dist/cli/extensions/prisma.d.ts.map +0 -1
- package/dist/cli/extensions/prisma.js +0 -23
- package/dist/cli/extensions/redis-bitnami.d.ts +0 -11
- package/dist/cli/extensions/redis-bitnami.d.ts.map +0 -1
- package/dist/cli/extensions/redis-bitnami.js +0 -74
- package/dist/cli/extensions/redis-ha.d.ts +0 -11
- package/dist/cli/extensions/redis-ha.d.ts.map +0 -1
- package/dist/cli/extensions/redis-ha.js +0 -51
- package/dist/cli/extensions/stackgres.d.ts +0 -10
- package/dist/cli/extensions/stackgres.d.ts.map +0 -1
- package/dist/cli/extensions/stackgres.js +0 -117
- package/dist/cli/extensions/template.d.ts +0 -9
- package/dist/cli/extensions/template.d.ts.map +0 -1
- package/dist/cli/extensions/template.js +0 -121
- package/dist/devops.d.ts.map +0 -1
- package/dist/index.d.ts.map +0 -1
- package/dist/libs/affected-entities.d.ts +0 -15
- package/dist/libs/affected-entities.d.ts.map +0 -1
- package/dist/libs/affected-entities.js +0 -52
- package/dist/libs/cloudrun-helpers.d.ts +0 -16
- package/dist/libs/cloudrun-helpers.d.ts.map +0 -1
- package/dist/libs/cloudrun-helpers.js +0 -81
- package/dist/libs/config.d.ts +0 -7
- package/dist/libs/config.d.ts.map +0 -1
- package/dist/libs/config.js +0 -102
- package/dist/libs/digital-ocean/container-reg.d.ts +0 -6
- package/dist/libs/digital-ocean/container-reg.d.ts.map +0 -1
- package/dist/libs/digital-ocean/container-reg.js +0 -74
- package/dist/libs/discovery/dependencies.d.ts +0 -19
- package/dist/libs/discovery/dependencies.d.ts.map +0 -1
- package/dist/libs/discovery/dependencies.js +0 -62
- package/dist/libs/discovery/dependencies.spec.d.ts +0 -2
- package/dist/libs/discovery/dependencies.spec.d.ts.map +0 -1
- package/dist/libs/discovery/dependencies.spec.js +0 -21
- package/dist/libs/discovery/images.d.ts +0 -5
- package/dist/libs/discovery/images.d.ts.map +0 -1
- package/dist/libs/discovery/images.js +0 -45
- package/dist/libs/discovery/index.d.ts +0 -6
- package/dist/libs/discovery/index.d.ts.map +0 -1
- package/dist/libs/discovery/index.js +0 -67
- package/dist/libs/discovery/process-common.d.ts +0 -25
- package/dist/libs/discovery/process-common.d.ts.map +0 -1
- package/dist/libs/discovery/process-common.js +0 -40
- package/dist/libs/discovery/process-package-json.d.ts +0 -3
- package/dist/libs/discovery/process-package-json.d.ts.map +0 -1
- package/dist/libs/discovery/process-package-json.js +0 -40
- package/dist/libs/discovery/process-pyproject-toml.d.ts +0 -3
- package/dist/libs/discovery/process-pyproject-toml.d.ts.map +0 -1
- package/dist/libs/discovery/process-pyproject-toml.js +0 -49
- package/dist/libs/git-helpers.d.ts +0 -8
- package/dist/libs/git-helpers.d.ts.map +0 -1
- package/dist/libs/git-helpers.js +0 -20
- package/dist/libs/hetzner/reg-secret.d.ts +0 -3
- package/dist/libs/hetzner/reg-secret.d.ts.map +0 -1
- package/dist/libs/hetzner/reg-secret.js +0 -39
- package/dist/libs/init-generator.d.ts +0 -41
- package/dist/libs/init-generator.d.ts.map +0 -1
- package/dist/libs/init-generator.js +0 -123
- package/dist/libs/k8s-constants.d.ts +0 -13
- package/dist/libs/k8s-constants.d.ts.map +0 -1
- package/dist/libs/k8s-constants.js +0 -91
- package/dist/libs/k8s-db.d.ts +0 -18
- package/dist/libs/k8s-db.d.ts.map +0 -1
- package/dist/libs/k8s-db.js +0 -73
- package/dist/libs/k8s-generate.d.ts +0 -17
- package/dist/libs/k8s-generate.d.ts.map +0 -1
- package/dist/libs/k8s-generate.js +0 -193
- package/dist/libs/k8s-helpers.d.ts +0 -11
- package/dist/libs/k8s-helpers.d.ts.map +0 -1
- package/dist/libs/k8s-helpers.js +0 -42
- package/dist/libs/k8s-image-config.d.ts +0 -8
- package/dist/libs/k8s-image-config.d.ts.map +0 -1
- package/dist/libs/k8s-image-config.js +0 -113
- package/dist/libs/k8s-job-waiter.d.ts +0 -8
- package/dist/libs/k8s-job-waiter.d.ts.map +0 -1
- package/dist/libs/k8s-job-waiter.js +0 -84
- package/dist/libs/k8s-namespace.d.ts +0 -7
- package/dist/libs/k8s-namespace.d.ts.map +0 -1
- package/dist/libs/k8s-namespace.js +0 -27
- package/dist/libs/k8s-redis-bitnami.d.ts +0 -6
- package/dist/libs/k8s-redis-bitnami.d.ts.map +0 -1
- package/dist/libs/k8s-redis-bitnami.js +0 -31
- package/dist/libs/k8s-redis-ha.d.ts +0 -3
- package/dist/libs/k8s-redis-ha.d.ts.map +0 -1
- package/dist/libs/k8s-redis-ha.js +0 -15
- package/dist/libs/k8s-secrets-manager.d.ts +0 -6
- package/dist/libs/k8s-secrets-manager.d.ts.map +0 -1
- package/dist/libs/k8s-secrets-manager.js +0 -64
- package/dist/libs/validate-env.d.ts +0 -56
- package/dist/libs/validate-env.d.ts.map +0 -1
- package/dist/libs/validate-env.js +0 -214
- package/dist/libs/validate-env.spec.d.ts +0 -2
- package/dist/libs/validate-env.spec.d.ts.map +0 -1
- package/dist/libs/validate-env.spec.js +0 -168
- package/dist/plugins.d.ts.map +0 -1
- package/dist/types/index.d.ts +0 -939
- package/dist/types/index.d.ts.map +0 -1
- package/dist/types/index.js +0 -82
package/dist/devops.js
CHANGED
|
@@ -1,74 +1,3229 @@
|
|
|
1
1
|
#!/usr/bin/env bun
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
import
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
2
|
+
import {
|
|
3
|
+
InternalToken
|
|
4
|
+
} from "./chunk-N7EX3HJH.js";
|
|
5
|
+
import {
|
|
6
|
+
CLICommandParser,
|
|
7
|
+
CommandExecutor,
|
|
8
|
+
MISSING_DOMAIN_KEY_ERROR,
|
|
9
|
+
StrongParams,
|
|
10
|
+
applyHandler,
|
|
11
|
+
containerRegistryImageName,
|
|
12
|
+
containerRegistryPath,
|
|
13
|
+
containerRegistryRepoPath,
|
|
14
|
+
dbMigrateJobName,
|
|
15
|
+
domainNameForEnv,
|
|
16
|
+
dotEnvFilesForEnv,
|
|
17
|
+
envToNamespace,
|
|
18
|
+
imageConfigMap,
|
|
19
|
+
imageDebugName,
|
|
20
|
+
isLocalOrRemoteEnv,
|
|
21
|
+
kubectlCommand,
|
|
22
|
+
patchSecretKeyCommand,
|
|
23
|
+
pkgRoot,
|
|
24
|
+
printUsageAndExit,
|
|
25
|
+
secretName,
|
|
26
|
+
upsertConfigMapCommand
|
|
27
|
+
} from "./chunk-WKP7EQNU.js";
|
|
28
|
+
import {
|
|
29
|
+
IGNORED_PATHS,
|
|
30
|
+
__export,
|
|
31
|
+
getConst,
|
|
32
|
+
getImageData,
|
|
33
|
+
getImageNames,
|
|
34
|
+
getImageType,
|
|
35
|
+
getTemplateData,
|
|
36
|
+
getWorkspace,
|
|
37
|
+
globEnvYamlFiles,
|
|
38
|
+
workspaceDirectoryForLanguage
|
|
39
|
+
} from "./chunk-RZ46YYZZ.js";
|
|
40
|
+
|
|
41
|
+
// src/devops.ts
|
|
42
|
+
import { globSync as globSync2 } from "glob";
|
|
43
|
+
|
|
44
|
+
// src/cli/core/index.ts
|
|
45
|
+
var core_exports = {};
|
|
46
|
+
__export(core_exports, {
|
|
47
|
+
affected: () => affected,
|
|
48
|
+
cloudrun: () => cloudrun,
|
|
49
|
+
consoleCommand: () => consoleCommand,
|
|
50
|
+
constant: () => constant,
|
|
51
|
+
env: () => env,
|
|
52
|
+
exec: () => exec,
|
|
53
|
+
image: () => image,
|
|
54
|
+
init: () => init,
|
|
55
|
+
job: () => job,
|
|
56
|
+
namespace: () => namespace,
|
|
57
|
+
prepBuild: () => prepBuild,
|
|
58
|
+
registry: () => registry,
|
|
59
|
+
run: () => run,
|
|
60
|
+
runMany: () => runMany,
|
|
61
|
+
test: () => test
|
|
22
62
|
});
|
|
23
|
-
|
|
24
|
-
//
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
63
|
+
|
|
64
|
+
// src/cli/core/run.ts
|
|
65
|
+
import path from "path";
|
|
66
|
+
var execShPath = path.join(pkgRoot, "cli/exec.sh");
|
|
67
|
+
var oneLiner = "Runs a script defined in package.json after injecting env variables";
|
|
68
|
+
var keyExamples = `$ devops run project:test`;
|
|
69
|
+
var usage = `
|
|
70
|
+
${oneLiner}
|
|
71
|
+
|
|
72
|
+
GENERAL USAGE
|
|
73
|
+
devops run <project-name>:<script-name> [--] [options for script]
|
|
74
|
+
|
|
75
|
+
NOTES
|
|
76
|
+
- Only works for node projects. Use 'devopspy' for python projects.
|
|
77
|
+
- Does not allow interactive mode. If you need interactivity, use devops exec instead.
|
|
78
|
+
|
|
79
|
+
EXAMPLES
|
|
80
|
+
${keyExamples}
|
|
81
|
+
`;
|
|
82
|
+
async function runFn(cmdObj) {
|
|
83
|
+
if (cmdObj.help || cmdObj.args.length === 0) printUsageAndExit(usage);
|
|
84
|
+
const [workspace, script] = cmdObj.args[0].split(":");
|
|
85
|
+
if (!workspace || !script) printUsageAndExit(usage);
|
|
86
|
+
const rootPath = getWorkspace(workspace).rootPath;
|
|
87
|
+
const remaining = cmdObj.args.slice(1).join(" ");
|
|
88
|
+
cmdObj.executorFromEnv(
|
|
89
|
+
`${execShPath} ${rootPath} bun run ${script} ${remaining}`,
|
|
90
|
+
{ checkEnvYaml: true }
|
|
91
|
+
).spawn();
|
|
92
|
+
}
|
|
93
|
+
var run = { oneLiner, keyExamples, run: runFn };
|
|
94
|
+
|
|
95
|
+
// src/cli/core/run-many.ts
|
|
96
|
+
import concurrently from "concurrently";
|
|
97
|
+
var oneLiner2 = "Runs a script concurrently in all projects that define it in their package.json";
|
|
98
|
+
var keyExamples2 = `
|
|
99
|
+
$ devops run-many build
|
|
100
|
+
`.trim();
|
|
101
|
+
var usage2 = `
|
|
102
|
+
${oneLiner2}
|
|
103
|
+
|
|
104
|
+
USAGE
|
|
105
|
+
devops run-many <script-name> [--kill-others-on-fail]
|
|
106
|
+
|
|
107
|
+
NOTE
|
|
108
|
+
Only works for node projects. Use 'devopspy' for python projects.
|
|
109
|
+
|
|
110
|
+
EXAMPLES
|
|
111
|
+
${keyExamples2}
|
|
112
|
+
`;
|
|
113
|
+
async function run2(cmdObj) {
|
|
114
|
+
if (cmdObj.help || cmdObj.args.length === 0) printUsageAndExit(usage2);
|
|
115
|
+
const parsed = cmdObj.parseOptions({ booleans: ["--kill-others-on-fail"] });
|
|
116
|
+
const [script] = parsed.args;
|
|
117
|
+
const remaining = parsed.args.slice(1).join(" ");
|
|
118
|
+
const commands = [];
|
|
119
|
+
Object.values(workspaceDirectoryForLanguage("node")).forEach(async (packageData) => {
|
|
120
|
+
if (packageData.scripts?.[script]) {
|
|
121
|
+
commands.push({
|
|
122
|
+
name: packageData.name,
|
|
123
|
+
command: `devops --env ${cmdObj.env} run ${packageData.name}:${script} ${remaining}`
|
|
124
|
+
});
|
|
125
|
+
}
|
|
126
|
+
});
|
|
127
|
+
if (!commands.length) {
|
|
128
|
+
console.error(`No workspaces define the script: ${script}`);
|
|
129
|
+
process.exit(0);
|
|
130
|
+
}
|
|
131
|
+
const options = parsed.options["--kill-others-on-fail"] ? { killOthers: "failure" } : {};
|
|
132
|
+
concurrently(commands, options).result.then(() => {
|
|
133
|
+
}).catch((error) => {
|
|
134
|
+
console.error("One of the commands failed");
|
|
135
|
+
process.exit(1);
|
|
136
|
+
});
|
|
137
|
+
}
|
|
138
|
+
var runMany = { command: "run-many", oneLiner: oneLiner2, keyExamples: keyExamples2, run: run2 };
|
|
139
|
+
|
|
140
|
+
// src/cli/core/exec.ts
|
|
141
|
+
import path2 from "path";
|
|
142
|
+
var execShPath2 = path2.join(pkgRoot, "cli/exec.sh");
|
|
143
|
+
var oneLiner3 = "Executes a command after injecting env variables, either globally or in a workspace";
|
|
144
|
+
var keyExamples3 = `
|
|
145
|
+
$ devops exec tmp/test.sh
|
|
146
|
+
$ devops exec bun test.ts --in myworkspace --env staging
|
|
147
|
+
`.trim();
|
|
148
|
+
var usage3 = `
|
|
149
|
+
${oneLiner3}
|
|
150
|
+
|
|
151
|
+
USAGE
|
|
152
|
+
devops exec <command>
|
|
153
|
+
devops exec --in <workspace> <command>
|
|
154
|
+
devops exec --in <workspace> <command> --interactive
|
|
155
|
+
|
|
156
|
+
EXAMPLES
|
|
157
|
+
${keyExamples3}
|
|
158
|
+
`;
|
|
159
|
+
function run3(cmdObj) {
|
|
160
|
+
if (cmdObj.help || cmdObj.args.length === 0) printUsageAndExit(usage3);
|
|
161
|
+
const parsed = cmdObj.parseOptions({
|
|
162
|
+
params: ["--in"],
|
|
163
|
+
booleans: ["--interactive"]
|
|
164
|
+
});
|
|
165
|
+
const workspace = parsed.options["--in"];
|
|
166
|
+
let executor;
|
|
167
|
+
if (workspace) {
|
|
168
|
+
const rootPath = getWorkspace(workspace).rootPath;
|
|
169
|
+
executor = cmdObj.executorFromEnv(
|
|
170
|
+
`${execShPath2} ${rootPath} ${parsed.argsStr}`,
|
|
171
|
+
{ checkEnvYaml: true }
|
|
172
|
+
);
|
|
173
|
+
} else {
|
|
174
|
+
executor = cmdObj.executorFromEnv(parsed.argsStr, { checkEnvYaml: true });
|
|
175
|
+
}
|
|
176
|
+
const interactive = Boolean(parsed.options["--interactive"]);
|
|
177
|
+
if (interactive) {
|
|
178
|
+
executor.spawn();
|
|
179
|
+
} else {
|
|
180
|
+
executor.exec();
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
var exec = { oneLiner: oneLiner3, keyExamples: keyExamples3, run: run3 };
|
|
184
|
+
|
|
185
|
+
// src/libs/discovery/dependencies.ts
|
|
186
|
+
import chalk from "chalk";
|
|
187
|
+
var WorkspaceDependencies = class {
|
|
188
|
+
constructor(getAllProjects) {
|
|
189
|
+
this.getAllProjects = getAllProjects;
|
|
190
|
+
}
|
|
191
|
+
dependencies = {};
|
|
192
|
+
loaded = false;
|
|
193
|
+
workspaces = {};
|
|
194
|
+
_getOrCreate(name) {
|
|
195
|
+
let node = this.dependencies[name];
|
|
196
|
+
if (!node) {
|
|
197
|
+
node = new DependencyNode(name);
|
|
198
|
+
this.dependencies[name] = node;
|
|
199
|
+
}
|
|
200
|
+
return node;
|
|
201
|
+
}
|
|
202
|
+
_buildTree() {
|
|
203
|
+
this.workspaces = this.getAllProjects();
|
|
204
|
+
this.loaded = true;
|
|
205
|
+
for (const workspace of Object.keys(this.workspaces)) {
|
|
206
|
+
const node = this._getOrCreate(workspace);
|
|
207
|
+
const data = this.workspaces[workspace];
|
|
208
|
+
for (const dep of data.dependencyNames ?? []) {
|
|
209
|
+
if (this.workspaces[dep]) {
|
|
210
|
+
node.dependsOn.add(dep);
|
|
39
211
|
}
|
|
40
|
-
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
const
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
getDependents(workspaceName) {
|
|
216
|
+
if (!this.loaded) {
|
|
217
|
+
this._buildTree();
|
|
218
|
+
}
|
|
219
|
+
const node = this.dependencies[workspaceName];
|
|
220
|
+
if (!node) {
|
|
221
|
+
console.error(chalk.red(`
|
|
222
|
+
Workspace ${workspaceName} not found
|
|
223
|
+
`));
|
|
224
|
+
process.exit(1);
|
|
225
|
+
}
|
|
226
|
+
return node.flattenDependents(this.dependencies);
|
|
227
|
+
}
|
|
228
|
+
};
|
|
229
|
+
var DependencyNode = class {
|
|
230
|
+
name;
|
|
231
|
+
dependsOn;
|
|
232
|
+
constructor(name) {
|
|
233
|
+
this.name = name;
|
|
234
|
+
this.dependsOn = /* @__PURE__ */ new Set();
|
|
235
|
+
}
|
|
236
|
+
flattenDependents(allDependencies, visited) {
|
|
237
|
+
visited ??= /* @__PURE__ */ new Set();
|
|
238
|
+
visited.add(this.name);
|
|
239
|
+
const notVisitedDependents = [];
|
|
240
|
+
for (const dep of this.dependsOn) {
|
|
241
|
+
if (!visited.has(dep)) {
|
|
242
|
+
const node = allDependencies[dep];
|
|
243
|
+
notVisitedDependents.push(
|
|
244
|
+
...node.flattenDependents(allDependencies, visited)
|
|
245
|
+
);
|
|
246
|
+
}
|
|
247
|
+
}
|
|
248
|
+
return [this.name, ...notVisitedDependents];
|
|
249
|
+
}
|
|
250
|
+
};
|
|
251
|
+
|
|
252
|
+
// src/libs/discovery/images.ts
|
|
253
|
+
var _imageDescendents = {};
|
|
254
|
+
var _imageDescendentsLoaded = false;
|
|
255
|
+
function imageDescendents() {
|
|
256
|
+
if (!_imageDescendentsLoaded) {
|
|
257
|
+
for (const imageName of getImageNames()) {
|
|
258
|
+
const descendents = /* @__PURE__ */ new Set();
|
|
259
|
+
const imageData = getImageData(imageName);
|
|
260
|
+
const workspaces = workspaceDirectoryForLanguage(imageData.language);
|
|
261
|
+
const dependencyResolver = new WorkspaceDependencies(() => workspaces);
|
|
262
|
+
imageData.applications.forEach((workspace) => {
|
|
263
|
+
dependencyResolver.getDependents(workspace).forEach((name) => descendents.add(name));
|
|
264
|
+
});
|
|
265
|
+
_imageDescendents[imageName] = Array.from(descendents).map((name) => workspaces[name]);
|
|
266
|
+
}
|
|
267
|
+
_imageDescendentsLoaded = true;
|
|
268
|
+
}
|
|
269
|
+
return _imageDescendents;
|
|
270
|
+
}
|
|
271
|
+
function getImageDescendentData(imageName) {
|
|
272
|
+
return imageDescendents()[imageName] ?? [];
|
|
273
|
+
}
|
|
274
|
+
var _workspaceImages = {};
|
|
275
|
+
var _workspaceImagesLoaded = false;
|
|
276
|
+
function workspaceImages() {
|
|
277
|
+
if (!_workspaceImagesLoaded) {
|
|
278
|
+
for (const [imageName, descendents] of Object.entries(imageDescendents())) {
|
|
279
|
+
for (const packageData of descendents) {
|
|
280
|
+
_workspaceImages[packageData.name] ??= [];
|
|
281
|
+
_workspaceImages[packageData.name].push(imageName);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
_workspaceImagesLoaded = true;
|
|
285
|
+
}
|
|
286
|
+
return _workspaceImages;
|
|
287
|
+
}
|
|
288
|
+
function getWorkspaceImages(workspaceName) {
|
|
289
|
+
const _verifyPresence = getWorkspace(workspaceName);
|
|
290
|
+
return workspaceImages()[workspaceName] ?? [];
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
// src/libs/git-helpers.ts
|
|
294
|
+
function commitExists(sha) {
|
|
295
|
+
const statusCode = new CommandExecutor(
|
|
296
|
+
`git merge-base --is-ancestor "${sha}" HEAD`
|
|
297
|
+
).exec({ onlyStatusCode: true });
|
|
298
|
+
return statusCode === 0;
|
|
299
|
+
}
|
|
300
|
+
function isAffected(path8, opts = {}) {
|
|
301
|
+
const baseSha = opts.baseSha || "HEAD^";
|
|
302
|
+
const headSha = opts.headSha || "HEAD";
|
|
303
|
+
if (!opts.skipCheck && (!commitExists(baseSha) || !commitExists(headSha)))
|
|
304
|
+
return true;
|
|
305
|
+
const statusCode = new CommandExecutor(
|
|
306
|
+
`git diff --quiet ${baseSha} ${headSha} -- ${path8}`,
|
|
307
|
+
{ quiet: true }
|
|
308
|
+
).exec({ onlyStatusCode: true });
|
|
309
|
+
return statusCode !== 0;
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
// src/libs/k8s-image-config.ts
|
|
313
|
+
function updateImageConfigMap(monorepoEnv, image2, data = {}) {
|
|
314
|
+
const imageConfigMapName = imageConfigMap(image2);
|
|
315
|
+
return new CommandExecutor(
|
|
316
|
+
upsertConfigMapCommand(monorepoEnv, imageConfigMapName, data)
|
|
317
|
+
).exec();
|
|
318
|
+
}
|
|
319
|
+
function getImageConfigMap(monorepoEnv, image2) {
|
|
320
|
+
const imageConfigMapName = imageConfigMap(image2);
|
|
321
|
+
const { statusCode, stdout } = new CommandExecutor(
|
|
322
|
+
kubectlCommand(
|
|
323
|
+
`get configmap ${imageConfigMapName} -o jsonpath='{.data}'`,
|
|
324
|
+
{ monorepoEnv }
|
|
325
|
+
),
|
|
326
|
+
{ quiet: true }
|
|
327
|
+
).exec({ asObject: true });
|
|
328
|
+
if (statusCode !== 0 || !stdout) return {};
|
|
329
|
+
try {
|
|
330
|
+
return JSON.parse(stdout);
|
|
331
|
+
} catch {
|
|
332
|
+
console.error(
|
|
333
|
+
`Error parsing config map data for ${image2}. Received: ${stdout}`
|
|
334
|
+
);
|
|
335
|
+
process.exit(1);
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
function deserializeImageConfigMapKey(monorepoEnv, image2, key) {
|
|
339
|
+
const value = getImageConfigMap(monorepoEnv, image2)[key];
|
|
340
|
+
if (!value) return {};
|
|
341
|
+
try {
|
|
342
|
+
return JSON.parse(value);
|
|
343
|
+
} catch {
|
|
344
|
+
console.error(`Error parsing config map data for ${image2} for key ${key}. Received: ${value}`);
|
|
345
|
+
process.exit(1);
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
function getImageVersion(monorepoEnv, image2) {
|
|
349
|
+
const data = getImageConfigMap(monorepoEnv, image2);
|
|
350
|
+
return data.version;
|
|
351
|
+
}
|
|
352
|
+
function setImageVersion(monorepoEnv, image2, version) {
|
|
353
|
+
const data = getImageConfigMap(monorepoEnv, image2);
|
|
354
|
+
return updateImageConfigMap(monorepoEnv, image2, { ...data, version });
|
|
355
|
+
}
|
|
356
|
+
function deleteImageVersion(monorepoEnv, image2) {
|
|
357
|
+
const { version, ...rest } = getImageConfigMap(monorepoEnv, image2);
|
|
358
|
+
return updateImageConfigMap(monorepoEnv, image2, rest);
|
|
359
|
+
}
|
|
360
|
+
function setK8sScale(monorepoEnv, workspaceName, replicaCount) {
|
|
361
|
+
const workspaceData = getWorkspace(workspaceName);
|
|
362
|
+
const serviceName = workspaceData.packageDataEntries.find((x) => x.deployment?.service_name)?.deployment?.service_name;
|
|
363
|
+
if (!serviceName) {
|
|
364
|
+
console.error(
|
|
365
|
+
`Workspace ${workspaceName} must have a service_name defined in its deployment key in package.json. Skipping.`
|
|
366
|
+
);
|
|
367
|
+
return false;
|
|
368
|
+
}
|
|
369
|
+
new CommandExecutor(
|
|
370
|
+
kubectlCommand(
|
|
371
|
+
`scale deployment ${workspaceName} --replicas=${replicaCount}`,
|
|
372
|
+
{ monorepoEnv }
|
|
373
|
+
)
|
|
374
|
+
).exec();
|
|
375
|
+
return true;
|
|
376
|
+
}
|
|
377
|
+
function setWorkspaceScale(monorepoEnv, image2, workspaceName, replicaCount) {
|
|
378
|
+
const workspaceData = getWorkspace(workspaceName);
|
|
379
|
+
if (!workspaceData.packageDataEntries.find((x) => x.deployment)) {
|
|
380
|
+
console.error(`Workspace ${workspaceName} does not have deployment data.`);
|
|
381
|
+
process.exit(1);
|
|
382
|
+
}
|
|
383
|
+
if (replicaCount < 1) {
|
|
384
|
+
console.error("Replica count must be at least 1.");
|
|
385
|
+
process.exit(1);
|
|
386
|
+
}
|
|
387
|
+
const { scale: _scale, ...rest } = getImageConfigMap(monorepoEnv, image2);
|
|
388
|
+
const parsedScale = deserializeImageConfigMapKey(monorepoEnv, image2, "scale");
|
|
389
|
+
const isApplicable2 = setK8sScale(monorepoEnv, workspaceName, replicaCount);
|
|
390
|
+
if (!isApplicable2) return;
|
|
391
|
+
updateImageConfigMap(monorepoEnv, image2, {
|
|
392
|
+
...rest,
|
|
393
|
+
scale: JSON.stringify({
|
|
394
|
+
...parsedScale,
|
|
395
|
+
[workspaceName]: replicaCount
|
|
396
|
+
})
|
|
397
|
+
});
|
|
398
|
+
return parsedScale?.[workspaceName] ?? 1;
|
|
399
|
+
}
|
|
400
|
+
function getWorkspaceScale(monorepoEnv, image2, workspaceName) {
|
|
401
|
+
const parsedScale = deserializeImageConfigMapKey(monorepoEnv, image2, "scale");
|
|
402
|
+
if (!workspaceName) return parsedScale ?? {};
|
|
403
|
+
const _ensureWorkspace = getWorkspace(workspaceName);
|
|
404
|
+
return parsedScale?.[workspaceName] ?? 1;
|
|
405
|
+
}
|
|
406
|
+
function resetWorkspaceScale(monorepoEnv, image2, workspaceName) {
|
|
407
|
+
const { scale: _scale, ...rest } = getImageConfigMap(monorepoEnv, image2);
|
|
408
|
+
const parsedScale = deserializeImageConfigMapKey(monorepoEnv, image2, "scale");
|
|
409
|
+
if (!workspaceName) {
|
|
410
|
+
updateImageConfigMap(monorepoEnv, image2, rest);
|
|
411
|
+
Object.entries(parsedScale ?? {}).filter(([_name, scale]) => Number(scale) > 1).forEach(([name, _scale2]) => {
|
|
412
|
+
setK8sScale(monorepoEnv, name, 1);
|
|
413
|
+
});
|
|
414
|
+
return parsedScale;
|
|
415
|
+
} else {
|
|
416
|
+
const oldScale = parsedScale?.[workspaceName] ?? 1;
|
|
417
|
+
const newScale = { ...parsedScale };
|
|
418
|
+
delete newScale[workspaceName];
|
|
419
|
+
updateImageConfigMap(monorepoEnv, image2, { ...rest, scale: JSON.stringify(newScale) });
|
|
420
|
+
setK8sScale(monorepoEnv, workspaceName, 1);
|
|
421
|
+
return oldScale;
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
|
|
425
|
+
// src/libs/affected-entities.ts
|
|
426
|
+
function isImageAffected(image2, opts = {}) {
|
|
427
|
+
const headSha = opts.headSha ?? "HEAD";
|
|
428
|
+
let baseSha;
|
|
429
|
+
if (opts.fromLiveVersion) {
|
|
430
|
+
if (!opts.monorepoEnv) {
|
|
431
|
+
throw new Error("monorepoEnv is required when fromLiveVersion is true");
|
|
432
|
+
}
|
|
433
|
+
baseSha = getImageVersion(opts.monorepoEnv, image2);
|
|
434
|
+
if (!baseSha) return true;
|
|
435
|
+
}
|
|
436
|
+
baseSha ??= opts.baseSha ?? "HEAD^";
|
|
437
|
+
const descendentData = getImageDescendentData(image2);
|
|
438
|
+
if (!commitExists(baseSha) || !commitExists(headSha)) return true;
|
|
439
|
+
for (const { rootPath } of descendentData) {
|
|
440
|
+
if (isAffected(rootPath, { baseSha, headSha, skipCheck: true })) {
|
|
441
|
+
return true;
|
|
442
|
+
}
|
|
443
|
+
}
|
|
444
|
+
return false;
|
|
445
|
+
}
|
|
446
|
+
function findImagesAffected(opts = {}) {
|
|
447
|
+
return getImageNames().filter(
|
|
448
|
+
(imageName) => isImageAffected(imageName, opts)
|
|
449
|
+
);
|
|
450
|
+
}
|
|
451
|
+
function isWorkspaceAffected(workspaceName, opts = {}) {
|
|
452
|
+
const data = getWorkspace(workspaceName);
|
|
453
|
+
return isAffected(data.rootPath, opts);
|
|
454
|
+
}
|
|
455
|
+
function findImagesWithAffectedWorkspace(workspaceName, opts = {}) {
|
|
456
|
+
const res = [];
|
|
457
|
+
const headSha = opts.headSha ?? "HEAD";
|
|
458
|
+
const defaultBaseSha = opts.baseSha ?? "HEAD^";
|
|
459
|
+
const rootPath = getWorkspace(workspaceName).rootPath;
|
|
460
|
+
if (opts.fromLiveVersion && !opts.monorepoEnv) {
|
|
461
|
+
throw new Error("monorepoEnv is required when fromLiveVersion is true");
|
|
462
|
+
}
|
|
463
|
+
for (const imageName of getWorkspaceImages(workspaceName)) {
|
|
464
|
+
const baseSha = opts.fromLiveVersion ? getImageVersion(opts.monorepoEnv, imageName) : defaultBaseSha;
|
|
465
|
+
if (isAffected(rootPath, { baseSha, headSha })) {
|
|
466
|
+
res.push(imageName);
|
|
467
|
+
}
|
|
468
|
+
}
|
|
469
|
+
return res;
|
|
470
|
+
}
|
|
471
|
+
|
|
472
|
+
// src/cli/core/affected.ts
|
|
473
|
+
var oneLiner4 = "Command to check whether an image or a workspace is affected by certain commit(s)";
|
|
474
|
+
var keyExamples4 = `
|
|
475
|
+
$ devops affected list-images
|
|
476
|
+
$ devops affected workspace db --base <sha1> --head <sha2>
|
|
477
|
+
$ devops affected image main-node --from-live-version
|
|
478
|
+
$ devops affected find-migrator --from-live-version
|
|
479
|
+
`;
|
|
480
|
+
var usage4 = `
|
|
481
|
+
${oneLiner4}
|
|
482
|
+
|
|
483
|
+
GENERAL USAGE
|
|
484
|
+
List
|
|
485
|
+
devops affected list-images --base [SHA1] --head [SHA2]
|
|
486
|
+
devops affected list-images --from-live-version
|
|
487
|
+
|
|
488
|
+
These return a list of all images affected by the given commits.
|
|
489
|
+
|
|
490
|
+
Checkers
|
|
491
|
+
devops affected workspace <workspace> --base [SHA1] --head [SHA2]
|
|
492
|
+
|
|
493
|
+
devops affected image <image> --base [SHA1] --head [SHA2]
|
|
494
|
+
devops affected image <image> --from-live-version
|
|
495
|
+
|
|
496
|
+
These return "true" or "false".
|
|
497
|
+
|
|
498
|
+
Finders
|
|
499
|
+
devops affected find-migrator --base [SHA1] --head [SHA2]
|
|
500
|
+
devops affected find-migrator --from-live-version
|
|
501
|
+
|
|
502
|
+
When --base and --head are used, it checks whether the db project is affected. If it is, it returns the name of one random
|
|
503
|
+
affected image.
|
|
504
|
+
When --from-live-version is used, it iterates on the live version of each image that depends on db, and returns the first image that is
|
|
505
|
+
affected by a db project change since that commit.
|
|
506
|
+
|
|
507
|
+
If the db project is unaffected, returns an empty string.
|
|
508
|
+
|
|
509
|
+
Options
|
|
510
|
+
Which commits are regarded for the affected calculation can be changed using:
|
|
511
|
+
--base Base of the current branch (HEAD^ by default)
|
|
512
|
+
--head Latest commit of the current branch (HEAD by default)
|
|
513
|
+
--from-live-version Use the live version of the image/workspace as the base (see devops image version get)
|
|
514
|
+
|
|
515
|
+
If --from-live-version is present, --base and --head are ignored.
|
|
516
|
+
|
|
517
|
+
EXAMPLES
|
|
518
|
+
${keyExamples4.trim()}
|
|
519
|
+
`;
|
|
520
|
+
async function run4(cmdObj) {
|
|
521
|
+
const options = cmdObj.parseOptions({
|
|
522
|
+
params: ["--base", "--head"],
|
|
523
|
+
booleans: ["--from-live-version"]
|
|
524
|
+
});
|
|
525
|
+
if (cmdObj.help || options.args.length < 1) printUsageAndExit(usage4);
|
|
526
|
+
const baseSha = options.options["--base"]?.toString();
|
|
527
|
+
const headSha = options.options["--head"]?.toString();
|
|
528
|
+
const fromLiveVersion = Boolean(options.options["--from-live-version"]);
|
|
529
|
+
const commonOpts = {
|
|
530
|
+
baseSha,
|
|
531
|
+
headSha,
|
|
532
|
+
fromLiveVersion,
|
|
533
|
+
monorepoEnv: cmdObj.env
|
|
534
|
+
};
|
|
535
|
+
const [cmd, imageOrWorkspace] = options.args;
|
|
536
|
+
const params = new StrongParams(usage4, { imageOrWorkspace });
|
|
537
|
+
switch (cmd) {
|
|
538
|
+
case "list-images": {
|
|
539
|
+
console.log(findImagesAffected(commonOpts).join("\n"));
|
|
540
|
+
break;
|
|
541
|
+
}
|
|
542
|
+
case "workspace": {
|
|
543
|
+
const affected2 = isWorkspaceAffected(params.required("imageOrWorkspace"), commonOpts);
|
|
544
|
+
console.log(affected2 ? "true" : "false");
|
|
545
|
+
break;
|
|
546
|
+
}
|
|
547
|
+
case "image": {
|
|
548
|
+
const affected2 = isImageAffected(
|
|
549
|
+
params.required("imageOrWorkspace"),
|
|
550
|
+
commonOpts
|
|
551
|
+
);
|
|
552
|
+
console.log(affected2 ? "true" : "false");
|
|
553
|
+
break;
|
|
554
|
+
}
|
|
555
|
+
case "find-migrator": {
|
|
556
|
+
const migrator = findImagesWithAffectedWorkspace("db", commonOpts);
|
|
557
|
+
if (!migrator.length) break;
|
|
558
|
+
const firstMigrator = migrator.find((imageName) => getImageData(imageName)["can-db-migrate"]);
|
|
559
|
+
if (!firstMigrator) {
|
|
560
|
+
console.error(`The db project was changed and affects the following images: ${migrator.join(", ")}. However, no image in the list has can-db-migrate=true in .devops/config/images.yaml.`);
|
|
561
|
+
process.exit(1);
|
|
562
|
+
}
|
|
563
|
+
console.log(firstMigrator);
|
|
564
|
+
break;
|
|
565
|
+
}
|
|
566
|
+
default:
|
|
567
|
+
printUsageAndExit(usage4);
|
|
568
|
+
}
|
|
569
|
+
}
|
|
570
|
+
var affected = { oneLiner: oneLiner4, keyExamples: keyExamples4, run: run4 };
|
|
571
|
+
|
|
572
|
+
// src/libs/cloudrun-helpers.ts
|
|
573
|
+
import { randomBytes } from "crypto";
|
|
574
|
+
|
|
575
|
+
// src/libs/k8s-secrets-manager.ts
|
|
576
|
+
var SECRET_FILE_NAME = "env_json";
|
|
577
|
+
function execUpdateSecret(monorepoEnv, secretValue) {
|
|
578
|
+
const { fullCommand, redactedCommand } = patchSecretKeyCommand(monorepoEnv, secretName(), SECRET_FILE_NAME, JSON.stringify(secretValue));
|
|
579
|
+
new CommandExecutor(fullCommand, { quiet: true, redactedCommand }).exec();
|
|
580
|
+
}
|
|
581
|
+
function getMonorepoSecretObject(monorepoEnv, keys = []) {
|
|
582
|
+
const escapedSecretFileName = SECRET_FILE_NAME.replaceAll(".", "\\.");
|
|
583
|
+
const cmd = kubectlCommand(`get secrets/${secretName()} -o jsonpath="{.data['${escapedSecretFileName}']}"`, { monorepoEnv });
|
|
584
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
585
|
+
if (!res) return {};
|
|
586
|
+
const resJson = JSON.parse(atob(res));
|
|
587
|
+
if (!keys || keys.length === 0) return resJson;
|
|
588
|
+
return Object.fromEntries(
|
|
589
|
+
keys.filter((x) => resJson[x]).map((x) => [x, resJson[x]])
|
|
590
|
+
);
|
|
591
|
+
}
|
|
592
|
+
function updateSecret(monorepoEnv, vars) {
|
|
593
|
+
if (!vars || Object.keys(vars).length === 0) {
|
|
594
|
+
console.error(
|
|
595
|
+
"Keys-value pairs to set must be provided, e.g. KEY1=val1 KEY2=val2"
|
|
596
|
+
);
|
|
597
|
+
process.exit(1);
|
|
598
|
+
}
|
|
599
|
+
const current = getMonorepoSecretObject(monorepoEnv);
|
|
600
|
+
const newVars = { ...current, ...vars };
|
|
601
|
+
execUpdateSecret(monorepoEnv, newVars);
|
|
602
|
+
}
|
|
603
|
+
function deleteSecretKeys(monorepoEnv, keys = []) {
|
|
604
|
+
if (!keys?.length) {
|
|
605
|
+
console.error("Keys to delete must be provided");
|
|
606
|
+
process.exit(1);
|
|
607
|
+
}
|
|
608
|
+
const secretValue = getMonorepoSecretObject(monorepoEnv);
|
|
609
|
+
keys.forEach((key) => delete secretValue[key]);
|
|
610
|
+
execUpdateSecret(monorepoEnv, secretValue);
|
|
611
|
+
}
|
|
612
|
+
function getMonorepoSecretStr(monorepoEnv, keys = []) {
|
|
613
|
+
const value = getMonorepoSecretObject(monorepoEnv, keys);
|
|
614
|
+
if (keys.length === 1) {
|
|
615
|
+
return Object.values(value)[0];
|
|
616
|
+
}
|
|
617
|
+
return Object.entries(value).map((pair) => pair.join("=")).join("\n");
|
|
618
|
+
}
|
|
619
|
+
function setMonorepoSecret(monorepoEnv, pairs = []) {
|
|
620
|
+
const pairsObj = Object.fromEntries(
|
|
621
|
+
pairs.map((x) => {
|
|
622
|
+
const [key, ...values] = x.split("=");
|
|
623
|
+
return [key, values.join("=")];
|
|
624
|
+
})
|
|
625
|
+
);
|
|
626
|
+
updateSecret(monorepoEnv, pairsObj);
|
|
627
|
+
}
|
|
628
|
+
function deleteMonorepoSecret(monorepoEnv, keys = []) {
|
|
629
|
+
deleteSecretKeys(monorepoEnv, keys);
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
// src/libs/cloudrun-helpers.ts
|
|
633
|
+
import chalk2 from "chalk";
|
|
634
|
+
function verifyCloudrunImage(image2) {
|
|
635
|
+
const imageData = getImageData(image2);
|
|
636
|
+
if (!imageData["cloudrun"]) {
|
|
637
|
+
console.error(`Image ${image2} is not a cloudrun image. Add "cloudrun: true" in images.yaml`);
|
|
638
|
+
process.exit(1);
|
|
639
|
+
}
|
|
640
|
+
}
|
|
641
|
+
function getEnvValuesToForward(env2, forwardEnv) {
|
|
642
|
+
if (!forwardEnv.length) return {};
|
|
643
|
+
let envValues = {};
|
|
644
|
+
const missingValues = /* @__PURE__ */ new Set();
|
|
645
|
+
for (const key of forwardEnv) {
|
|
646
|
+
const value = process.env[key];
|
|
647
|
+
if (value) {
|
|
648
|
+
envValues[key] = value;
|
|
649
|
+
} else {
|
|
650
|
+
missingValues.add(key);
|
|
651
|
+
}
|
|
652
|
+
}
|
|
653
|
+
if (missingValues.size > 0 && isLocalOrRemoteEnv(env2) === "remote") {
|
|
654
|
+
const secretsFromCluster = getMonorepoSecretObject(env2, Array.from(missingValues));
|
|
655
|
+
for (const key of Object.keys(secretsFromCluster)) {
|
|
656
|
+
envValues[key] = secretsFromCluster[key];
|
|
657
|
+
missingValues.delete(key);
|
|
658
|
+
}
|
|
659
|
+
}
|
|
660
|
+
if (missingValues.size > 0) {
|
|
661
|
+
console.error(`Some forwardEnv variables are missing: ${Array.from(missingValues).join(", ")}`);
|
|
662
|
+
process.exit(1);
|
|
663
|
+
}
|
|
664
|
+
return envValues;
|
|
665
|
+
}
|
|
666
|
+
async function buildDev(image2) {
|
|
667
|
+
verifyCloudrunImage(image2);
|
|
668
|
+
const env2 = "development";
|
|
669
|
+
const sha = randomBytes(12).toString("hex");
|
|
670
|
+
const buildDir = new CommandExecutor(`devops prep-build ${image2}`, {
|
|
671
|
+
env: env2,
|
|
672
|
+
quiet: true
|
|
673
|
+
}).exec().trim();
|
|
674
|
+
const tag = containerRegistryRepoPath(image2, env2, sha);
|
|
675
|
+
console.warn(`Building ${tag} from ${buildDir}`);
|
|
676
|
+
await new CommandExecutor(
|
|
677
|
+
`docker build --platform linux/amd64 -t ${tag} ${buildDir} --build-arg MONOREPO_ENV=${env2}`,
|
|
678
|
+
{ env: env2 }
|
|
679
|
+
).spawn({ pipeStdoutTo: "stderr" });
|
|
680
|
+
console.warn(`Pushing ${tag}`);
|
|
681
|
+
await new CommandExecutor(`docker push ${tag}`, { env: env2 }).spawn({ pipeStdoutTo: "stderr" });
|
|
682
|
+
console.warn(`
|
|
683
|
+
\u2705 Built and pushed ${tag}
|
|
684
|
+
`);
|
|
685
|
+
console.warn('Run "devops cloudrun deploy" next. For example:');
|
|
686
|
+
console.warn(chalk2.blue(`./devops cloudrun deploy ${image2} ${sha} --env ${env2} --allow-unauthenticated --region us-east1 --forward-env ENV1,ENV2 -- --service-account RUNTIME_SA`));
|
|
687
|
+
console.warn(chalk2.yellow(`
|
|
688
|
+
|
|
689
|
+
RUNTIME_SA is the name of the service account used to run the Cloud Run service.`));
|
|
690
|
+
console.warn(chalk2.yellow(`Find it with "gcloud iam service-accounts list"
|
|
691
|
+
`));
|
|
692
|
+
console.log(sha);
|
|
693
|
+
}
|
|
694
|
+
async function deploy({
|
|
695
|
+
image: image2,
|
|
696
|
+
env: env2,
|
|
697
|
+
sha,
|
|
698
|
+
region,
|
|
699
|
+
forwardEnv = [],
|
|
700
|
+
allowUnauthenticated = false,
|
|
701
|
+
cpu = "0.25",
|
|
702
|
+
memory = "256Mi",
|
|
703
|
+
minInstances = 0,
|
|
704
|
+
maxInstances = 1,
|
|
705
|
+
timeout = "60s",
|
|
706
|
+
extraArgs = ""
|
|
707
|
+
}) {
|
|
708
|
+
verifyCloudrunImage(image2);
|
|
709
|
+
const repoPath = containerRegistryRepoPath(image2, env2, sha);
|
|
710
|
+
const envValues = getEnvValuesToForward(env2, forwardEnv);
|
|
711
|
+
const envValuesCsv = Object.entries(envValues).map(([key, value]) => `${key}="${value}"`).join(",");
|
|
712
|
+
const serviceName = `${image2}-${env2}`;
|
|
713
|
+
const cmd = `
|
|
714
|
+
gcloud run deploy ${serviceName}
|
|
715
|
+
--image ${repoPath}
|
|
716
|
+
${Object.keys(envValues).length > 0 ? `--set-env-vars ${envValuesCsv}` : ""}
|
|
717
|
+
${allowUnauthenticated ? "--allow-unauthenticated" : ""}
|
|
718
|
+
--region ${region}
|
|
719
|
+
--cpu ${cpu}
|
|
720
|
+
--memory ${memory}
|
|
721
|
+
--min-instances ${minInstances}
|
|
722
|
+
--max-instances ${maxInstances}
|
|
723
|
+
--timeout ${timeout}
|
|
724
|
+
${extraArgs}
|
|
725
|
+
`.trim().replace(/\s+/g, " ");
|
|
726
|
+
await new CommandExecutor(cmd, { env: env2 }).spawn();
|
|
727
|
+
}
|
|
728
|
+
|
|
729
|
+
// src/cli/core/cloudrun.ts
|
|
730
|
+
var oneLiner5 = "Supports cloudrun images";
|
|
731
|
+
var keyExamples5 = `
|
|
732
|
+
$ devops cloudrun deploy cloudrun-image SHA --env staging --region us-east1 [--forward-env ENV1,ENV2 --allow-unauthenticated]
|
|
733
|
+
$ devops cloudrun build-dev cloudrun-image
|
|
734
|
+
`.trim();
|
|
735
|
+
var usage5 = `
|
|
736
|
+
${oneLiner5}
|
|
737
|
+
|
|
738
|
+
USAGE
|
|
739
|
+
Configuration prerequisites:
|
|
740
|
+
- The image should be defined in images.yaml with:
|
|
741
|
+
cloudrun: true
|
|
742
|
+
- The artifact registry URL should be set in config/constants.yaml:
|
|
743
|
+
cloudrun-artifact-registry-repo-path: REGION-docker.pkg.dev/PROJECT_ID/REPO
|
|
744
|
+
|
|
745
|
+
Deploy a cloudrun image to Cloud Run:
|
|
746
|
+
devops cloudrun deploy <image> <sha> --env <env> --region <region> [options]
|
|
747
|
+
|
|
748
|
+
Options:
|
|
749
|
+
--forward-env ENV1,ENV2 Comma-separated env var names to forward into the service
|
|
750
|
+
--allow-unauthenticated Allow unauthenticated access
|
|
751
|
+
--cpu <cpu> CPU, e.g. 0.25, 0.5, 1
|
|
752
|
+
--memory <mem> Memory, e.g. 256Mi, 512Mi, 1Gi
|
|
753
|
+
--min-instances <n> Minimum instances
|
|
754
|
+
--max-instances <n> Maximum instances
|
|
755
|
+
--timeout <time> Request timeout, e.g. 60s
|
|
756
|
+
-- Pass through additional args to gcloud (e.g. -- --ingress internal)
|
|
757
|
+
|
|
758
|
+
Notes:
|
|
759
|
+
- The image must already be pushed to the artifact registry.
|
|
760
|
+
- <env> also supports local environments (e.g. development).
|
|
761
|
+
- For remote monorepo environments, variables specified in --forward-env that
|
|
762
|
+
are not present in the current process's env are fetched from the cluster.
|
|
763
|
+
|
|
764
|
+
Build a cloudrun image locally in development environment:
|
|
765
|
+
devops cloudrun build-dev cloudrun-image
|
|
766
|
+
|
|
767
|
+
This command builds the image locally with a random SHA and pushes it to the artifact registry.
|
|
768
|
+
|
|
769
|
+
EXAMPLES
|
|
770
|
+
${keyExamples5}
|
|
771
|
+
`;
|
|
772
|
+
var handlers = {
|
|
773
|
+
"build-dev": (opts) => {
|
|
774
|
+
buildDev(opts.required("image"));
|
|
775
|
+
},
|
|
776
|
+
_deploy: (opts) => {
|
|
777
|
+
const rawForwardEnv = opts.optional("forwardEnv");
|
|
778
|
+
const forwardEnv = rawForwardEnv ? rawForwardEnv.split(",").map((v) => v.trim()).filter(Boolean) : [];
|
|
779
|
+
const minInstancesStr = opts.optional("minInstances");
|
|
780
|
+
const maxInstancesStr = opts.optional("maxInstances");
|
|
781
|
+
deploy({
|
|
782
|
+
image: opts.required("image"),
|
|
783
|
+
env: opts.required("env"),
|
|
784
|
+
sha: opts.required("sha"),
|
|
785
|
+
region: opts.required("region"),
|
|
786
|
+
forwardEnv,
|
|
787
|
+
allowUnauthenticated: opts.optional("allowUnauthenticated") === "true",
|
|
788
|
+
cpu: opts.optional("cpu"),
|
|
789
|
+
memory: opts.optional("memory"),
|
|
790
|
+
minInstances: minInstancesStr ? Number(minInstancesStr) : void 0,
|
|
791
|
+
maxInstances: maxInstancesStr ? Number(maxInstancesStr) : void 0,
|
|
792
|
+
timeout: opts.optional("timeout"),
|
|
793
|
+
extraArgs: opts.optional("extraArgs")
|
|
794
|
+
});
|
|
795
|
+
}
|
|
796
|
+
};
|
|
797
|
+
async function run5(cmdObj) {
|
|
798
|
+
if (cmdObj.help || cmdObj.args.length === 0) printUsageAndExit(usage5);
|
|
799
|
+
const parsed = cmdObj.parseOptions({
|
|
800
|
+
params: [
|
|
801
|
+
"--keep-last",
|
|
802
|
+
"--forward-env",
|
|
803
|
+
"--region",
|
|
804
|
+
"--cpu",
|
|
805
|
+
"--memory",
|
|
806
|
+
"--min-instances",
|
|
807
|
+
"--max-instances",
|
|
808
|
+
"--timeout",
|
|
809
|
+
"--sha"
|
|
810
|
+
],
|
|
811
|
+
booleans: ["--allow-unauthenticated"],
|
|
812
|
+
passthroughArgs: true
|
|
813
|
+
});
|
|
814
|
+
const [subcommand, image2, sha] = parsed.args;
|
|
815
|
+
if (subcommand === "deploy") {
|
|
816
|
+
cmdObj.executorFromEnv(
|
|
817
|
+
`devops cloudrun _deploy ${cmdObj.args.slice(1).join(" ")}`,
|
|
818
|
+
{ checkEnvYaml: false }
|
|
819
|
+
).spawn();
|
|
820
|
+
return;
|
|
821
|
+
}
|
|
822
|
+
const handler = handlers[subcommand];
|
|
823
|
+
if (!handler) {
|
|
824
|
+
console.error(`Unknown subcommand: ${subcommand}`);
|
|
825
|
+
printUsageAndExit(usage5);
|
|
826
|
+
}
|
|
827
|
+
const params = new StrongParams(usage5, {
|
|
828
|
+
env: cmdObj.env,
|
|
829
|
+
subcommand,
|
|
830
|
+
image: image2,
|
|
831
|
+
sha,
|
|
832
|
+
keepLast: parsed.options["--keep-last"],
|
|
833
|
+
forwardEnv: parsed.options["--forward-env"],
|
|
834
|
+
region: parsed.options["--region"],
|
|
835
|
+
allowUnauthenticated: parsed.options["--allow-unauthenticated"] ? "true" : void 0,
|
|
836
|
+
cpu: parsed.options["--cpu"],
|
|
837
|
+
memory: parsed.options["--memory"],
|
|
838
|
+
minInstances: parsed.options["--min-instances"],
|
|
839
|
+
maxInstances: parsed.options["--max-instances"],
|
|
840
|
+
timeout: parsed.options["--timeout"],
|
|
841
|
+
extraArgs: parsed.passthrough ? parsed.passthrough.join(" ") : void 0
|
|
842
|
+
});
|
|
843
|
+
handler(params);
|
|
844
|
+
}
|
|
845
|
+
var cloudrun = { oneLiner: oneLiner5, keyExamples: keyExamples5, run: run5 };
|
|
846
|
+
|
|
847
|
+
// src/libs/k8s-namespace.ts
|
|
848
|
+
import { randomBytes as randomBytes2 } from "crypto";
|
|
849
|
+
var BASE_SECRET_KEY = "baseSecret";
|
|
850
|
+
function checkEnvSetup(monorepoEnv) {
|
|
851
|
+
const namespace2 = envToNamespace(monorepoEnv);
|
|
852
|
+
const exitCode = new CommandExecutor(
|
|
853
|
+
kubectlCommand(`get ns ${namespace2}`)
|
|
854
|
+
).exec({
|
|
855
|
+
onlyStatusCode: true
|
|
856
|
+
});
|
|
857
|
+
return exitCode === 0;
|
|
858
|
+
}
|
|
859
|
+
function createNamespace(monorepoEnv) {
|
|
860
|
+
new CommandExecutor(kubectlCommand(`create ns ${envToNamespace(monorepoEnv)}`)).exec();
|
|
861
|
+
}
|
|
862
|
+
function createEmptyEnvSecret(monorepoEnv) {
|
|
863
|
+
const cmd = kubectlCommand(`create secret generic ${secretName()}`, { namespace: envToNamespace(monorepoEnv) });
|
|
864
|
+
new CommandExecutor(cmd).exec();
|
|
865
|
+
}
|
|
866
|
+
function patchBaseSecret(monorepoEnv) {
|
|
867
|
+
const { fullCommand, redactedCommand } = patchSecretKeyCommand(
|
|
868
|
+
monorepoEnv,
|
|
869
|
+
secretName(),
|
|
870
|
+
BASE_SECRET_KEY,
|
|
871
|
+
randomBytes2(32).toString("hex")
|
|
872
|
+
);
|
|
873
|
+
new CommandExecutor(fullCommand, { quiet: true, redactedCommand }).exec();
|
|
874
|
+
}
|
|
875
|
+
function deleteNamespace(monorepoEnv) {
|
|
876
|
+
const cmd = kubectlCommand(`delete ns ${envToNamespace(monorepoEnv)}`);
|
|
877
|
+
new CommandExecutor(cmd).exec();
|
|
878
|
+
}
|
|
879
|
+
|
|
880
|
+
// src/libs/k8s-generate.ts
|
|
881
|
+
import path3 from "path";
|
|
882
|
+
import yaml from "yaml";
|
|
883
|
+
import fs from "fs";
|
|
884
|
+
import { globSync } from "glob";
|
|
885
|
+
import _ from "lodash";
|
|
886
|
+
import Handlebars from "handlebars";
|
|
887
|
+
var MANIFEST_FOLDER_PATH = path3.join(process.cwd(), ".devops/manifests");
|
|
888
|
+
var MANIFEST_INDEX_FILE_PATH = path3.join(MANIFEST_FOLDER_PATH, "_index.yaml");
|
|
889
|
+
var DB_MIGRATE_TEMPLATE_NAME = "db-migrate";
|
|
890
|
+
function verifyNotCloudrunImage(image2) {
|
|
891
|
+
const imageData = getImageData(image2);
|
|
892
|
+
if (imageData["cloudrun"]) {
|
|
893
|
+
console.error(`Image ${image2} is a cloudrun image. Cloudrun images are not supported for k8s generation`);
|
|
894
|
+
process.exit(1);
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
function generateImageDeployments(monorepoEnv, image2, gitSha) {
|
|
898
|
+
verifyNotCloudrunImage(image2);
|
|
899
|
+
const generator = new ImageContextGenerator(monorepoEnv, image2, gitSha);
|
|
900
|
+
const apps = getImageDescendentData(image2).filter((packageData) => packageData.deployment).flatMap((projectData) => {
|
|
901
|
+
const context = generator.getDeployment(projectData);
|
|
902
|
+
const renderFn = (template2) => Handlebars.compile(template2)(context);
|
|
903
|
+
return generateManifestForDeployment(projectData.rootPath, projectData.deployment.template, renderFn);
|
|
904
|
+
});
|
|
905
|
+
const manifest = apps.filter(Boolean).join("\n---\n");
|
|
906
|
+
return ensureProperDomainsPresent(manifest, monorepoEnv, image2);
|
|
907
|
+
}
|
|
908
|
+
function generateWorkspaceDeployment(packageData, monorepoEnv, image2, gitSha) {
|
|
909
|
+
verifyNotCloudrunImage(image2);
|
|
910
|
+
const generator = new ImageContextGenerator(monorepoEnv, image2, gitSha);
|
|
911
|
+
const context = generator.getDeployment(packageData);
|
|
912
|
+
const renderFn = (template2) => Handlebars.compile(template2)(context);
|
|
913
|
+
const manifest = generateManifestForDeployment(packageData.rootPath, packageData.deployment.template, renderFn).join("\n---\n");
|
|
914
|
+
return ensureProperDomainsPresent(manifest, monorepoEnv, image2);
|
|
915
|
+
}
|
|
916
|
+
function generateDebugPod(monorepoEnv, image2, gitSha) {
|
|
917
|
+
verifyNotCloudrunImage(image2);
|
|
918
|
+
const generator = new ImageContextGenerator(monorepoEnv, image2, gitSha);
|
|
919
|
+
const context = generator.getDebug();
|
|
920
|
+
const renderFn = (template2) => Handlebars.compile(template2)(context);
|
|
921
|
+
const debugTemplate = getImageData(image2)["debug-template"];
|
|
922
|
+
if (!debugTemplate) return;
|
|
923
|
+
return generateManifestsFromTemplateName(debugTemplate, renderFn, { validate: false }).map((x) => yaml.stringify(x)).join("\n---\n");
|
|
924
|
+
}
|
|
925
|
+
function generateDbMigrateJob(monorepoEnv, image2, gitSha) {
|
|
926
|
+
verifyNotCloudrunImage(image2);
|
|
927
|
+
const generator = new ImageContextGenerator(monorepoEnv, image2, gitSha);
|
|
928
|
+
const context = generator.getDbMigrate();
|
|
929
|
+
const renderFn = (template2) => Handlebars.compile(template2)(context);
|
|
930
|
+
return generateManifestsFromTemplateName(DB_MIGRATE_TEMPLATE_NAME, renderFn).map((x) => yaml.stringify(x)).join("\n---\n");
|
|
931
|
+
}
|
|
932
|
+
function ensureProperDomainsPresent(manifest, monorepoEnv, image2) {
|
|
933
|
+
if (manifest.includes(MISSING_DOMAIN_KEY_ERROR)) {
|
|
934
|
+
console.error(`The image ${image2} does not have a domain defined for the environment ${monorepoEnv}. Please add it to the .devops/config/images.yaml.`);
|
|
935
|
+
process.exit(1);
|
|
936
|
+
}
|
|
937
|
+
return manifest;
|
|
938
|
+
}
|
|
939
|
+
function generateManifestForDeployment(rootPath, templateName, renderFn) {
|
|
940
|
+
const defaults = generateManifestsFromTemplateName(templateName, renderFn);
|
|
941
|
+
const overrides = generateManifestFromFilesInFolder(rootPath, renderFn);
|
|
942
|
+
const keyExtractor = (manifest) => `${manifest.kind}-${manifest.metadata.name}`;
|
|
943
|
+
const defaultTemplateLookup = _.keyBy(defaults, keyExtractor);
|
|
944
|
+
const overrideTemplateLookup = _.keyBy(overrides, keyExtractor);
|
|
945
|
+
const mergedTemplates = _.merge(defaultTemplateLookup, overrideTemplateLookup);
|
|
946
|
+
return Object.values(mergedTemplates).map((x) => yaml.stringify(x));
|
|
947
|
+
}
|
|
948
|
+
function generateManifestsFromTemplateName(templateName, renderFn, options = { validate: true }) {
|
|
949
|
+
const entries = manifestFilesForTemplate(templateName);
|
|
950
|
+
if (!entries) {
|
|
951
|
+
console.error(`No entries found for ${templateName} in ${MANIFEST_INDEX_FILE_PATH}`);
|
|
952
|
+
process.exit(1);
|
|
953
|
+
}
|
|
954
|
+
return generateManifestsFromFileList(entries.map((entry) => path3.join(MANIFEST_FOLDER_PATH, entry)), renderFn, options);
|
|
955
|
+
}
|
|
956
|
+
function generateManifestFromFilesInFolder(folderPath, renderFn) {
|
|
957
|
+
const manifestOverridePath = path3.join(folderPath, "manifests");
|
|
958
|
+
if (!fs.existsSync(manifestOverridePath)) {
|
|
959
|
+
return [];
|
|
960
|
+
}
|
|
961
|
+
const files = globSync(path3.join(manifestOverridePath, "**/*")).filter((x) => fs.lstatSync(x).isFile());
|
|
962
|
+
return generateManifestsFromFileList(files, renderFn);
|
|
963
|
+
}
|
|
964
|
+
function generateManifestsFromFileList(filesList, renderFn, options = { validate: true }) {
|
|
965
|
+
return filesList.flatMap((filePath) => {
|
|
966
|
+
try {
|
|
967
|
+
const manifestFileStr = fs.readFileSync(filePath, "utf8");
|
|
968
|
+
const renderedStr = renderFn(manifestFileStr);
|
|
969
|
+
const res = yaml.parseAllDocuments(renderedStr);
|
|
970
|
+
if (options.validate) {
|
|
971
|
+
res.forEach((doc) => {
|
|
972
|
+
if (!doc.get("kind") || !doc.getIn(["metadata", "name"])) {
|
|
973
|
+
console.error(`Invalid manifest file ${filePath}: kind and metadata.name must be present`);
|
|
974
|
+
console.error(doc.toString());
|
|
54
975
|
process.exit(1);
|
|
976
|
+
}
|
|
977
|
+
});
|
|
978
|
+
}
|
|
979
|
+
return res.map((x) => x.toJSON());
|
|
980
|
+
} catch (e) {
|
|
981
|
+
if (e instanceof Error) {
|
|
982
|
+
console.error(`Could not parse ${filePath}: ${e.message}`);
|
|
983
|
+
} else {
|
|
984
|
+
console.error(`Could not parse ${filePath}`);
|
|
985
|
+
}
|
|
986
|
+
process.exit(1);
|
|
987
|
+
}
|
|
988
|
+
});
|
|
989
|
+
}
|
|
990
|
+
var _manifestIndex;
|
|
991
|
+
function manifestFilesForTemplate(template2) {
|
|
992
|
+
if (!_manifestIndex) {
|
|
993
|
+
try {
|
|
994
|
+
const indexFileStr = fs.readFileSync(MANIFEST_INDEX_FILE_PATH, "utf8");
|
|
995
|
+
_manifestIndex = yaml.parse(indexFileStr);
|
|
996
|
+
} catch {
|
|
997
|
+
console.error(`Unable to process ${MANIFEST_INDEX_FILE_PATH}`);
|
|
998
|
+
process.exit(1);
|
|
999
|
+
}
|
|
1000
|
+
}
|
|
1001
|
+
return _manifestIndex[template2];
|
|
1002
|
+
}
|
|
1003
|
+
var ImageContextGenerator = class {
|
|
1004
|
+
constructor(monorepoEnv, image2, gitSha) {
|
|
1005
|
+
this.monorepoEnv = monorepoEnv;
|
|
1006
|
+
this.image = image2;
|
|
1007
|
+
this.gitSha = gitSha;
|
|
1008
|
+
this.replicaMap = getWorkspaceScale(monorepoEnv, image2);
|
|
1009
|
+
this.imageContext = {
|
|
1010
|
+
monorepo_env: monorepoEnv,
|
|
1011
|
+
namespace: envToNamespace(monorepoEnv),
|
|
1012
|
+
env_secret_name: secretName(),
|
|
1013
|
+
env_base_secret_key: BASE_SECRET_KEY,
|
|
1014
|
+
domain_name: domainNameForEnv(image2, monorepoEnv),
|
|
1015
|
+
image_path: containerRegistryRepoPath(image2, monorepoEnv, gitSha)
|
|
1016
|
+
};
|
|
1017
|
+
}
|
|
1018
|
+
replicaMap;
|
|
1019
|
+
imageContext;
|
|
1020
|
+
getDeployment(pkgData) {
|
|
1021
|
+
if (!pkgData.deployment) {
|
|
1022
|
+
console.error(`The deployment key is missing for workspace ${pkgData.name}`);
|
|
1023
|
+
process.exit(1);
|
|
1024
|
+
}
|
|
1025
|
+
return {
|
|
1026
|
+
// Basic context
|
|
1027
|
+
project_name: pkgData.name,
|
|
1028
|
+
...this.imageContext,
|
|
1029
|
+
// Defaults that can be overriden by pkgData.deployment
|
|
1030
|
+
app_name: pkgData.name,
|
|
1031
|
+
subdomain: pkgData.deployment.service_name,
|
|
1032
|
+
// This may override the defaults above
|
|
1033
|
+
...pkgData.deployment,
|
|
1034
|
+
// Override from config map
|
|
1035
|
+
replicas: this.replicaMap[pkgData.name] ?? 1
|
|
1036
|
+
};
|
|
1037
|
+
}
|
|
1038
|
+
getDbMigrate() {
|
|
1039
|
+
return {
|
|
1040
|
+
...this.imageContext,
|
|
1041
|
+
db_migrate_job_name: dbMigrateJobName(this.gitSha)
|
|
1042
|
+
};
|
|
1043
|
+
}
|
|
1044
|
+
getDebug() {
|
|
1045
|
+
return {
|
|
1046
|
+
...this.imageContext,
|
|
1047
|
+
debug_pod_name: imageDebugName(this.image)
|
|
1048
|
+
};
|
|
1049
|
+
}
|
|
1050
|
+
};
|
|
1051
|
+
|
|
1052
|
+
// src/cli/core/console.ts
|
|
1053
|
+
import yaml2 from "yaml";
|
|
1054
|
+
var oneLiner6 = "Spin up a debug pod of the specified image and get a shell into it.";
|
|
1055
|
+
var keyExamples6 = `
|
|
1056
|
+
$ devops console main-node
|
|
1057
|
+
`.trim();
|
|
1058
|
+
var usage6 = `
|
|
1059
|
+
${oneLiner6}
|
|
1060
|
+
|
|
1061
|
+
USAGE
|
|
1062
|
+
devops console <image> [--version <version>]
|
|
1063
|
+
|
|
1064
|
+
Options:
|
|
1065
|
+
--version <version> The version (git SHA) of the image to use.
|
|
1066
|
+
If not specified, the live version of the image (obtained using 'devops image version get <image>') is used.
|
|
1067
|
+
|
|
1068
|
+
EXAMPLES
|
|
1069
|
+
${keyExamples6}
|
|
1070
|
+
`;
|
|
1071
|
+
function run6(cmdObj) {
|
|
1072
|
+
if (cmdObj.help || cmdObj.args.length === 0) printUsageAndExit(usage6);
|
|
1073
|
+
const parsed = cmdObj.parseOptions({ params: ["--version"] });
|
|
1074
|
+
if (parsed.args.length !== 1) printUsageAndExit(usage6);
|
|
1075
|
+
const image2 = parsed.args[0];
|
|
1076
|
+
const version = parsed.options["--version"];
|
|
1077
|
+
const gitSha = version ?? getImageVersion(cmdObj.env, image2);
|
|
1078
|
+
if (!gitSha) {
|
|
1079
|
+
console.error(`No git SHA found for image ${image2} in environment ${cmdObj.env}`);
|
|
1080
|
+
process.exit(1);
|
|
1081
|
+
}
|
|
1082
|
+
const debugYaml = generateDebugPod(cmdObj.env, image2, gitSha);
|
|
1083
|
+
if (!debugYaml) {
|
|
1084
|
+
console.error(`The image ${image2} does not specify debug-template in images.yaml`);
|
|
1085
|
+
process.exit(1);
|
|
1086
|
+
}
|
|
1087
|
+
const userName = new CommandExecutor(`kubectl auth whoami -o jsonpath='{.status.userInfo.username}'`).exec();
|
|
1088
|
+
const debugManifestsJson = JSON.stringify(yaml2.parse(debugYaml));
|
|
1089
|
+
const randomId = Math.random().toString(36).substring(2, 10);
|
|
1090
|
+
const podName = ["ephemeral-console", slugify(userName), slugify(image2), randomId].filter(Boolean).join("-");
|
|
1091
|
+
new CommandExecutor(
|
|
1092
|
+
kubectlCommand(
|
|
1093
|
+
`run ${podName} --restart=Never --rm -it --image=overridden --overrides='${debugManifestsJson}'`,
|
|
1094
|
+
{ monorepoEnv: cmdObj.env }
|
|
1095
|
+
)
|
|
1096
|
+
).spawn();
|
|
1097
|
+
}
|
|
1098
|
+
function slugify(str, maxLength = 20) {
|
|
1099
|
+
return str.toLowerCase().trim().replace(/[^a-zA-Z0-9]/g, "-").replace(/-+/g, "-").replace(/^-|-$/g, "").slice(0, maxLength);
|
|
1100
|
+
}
|
|
1101
|
+
var consoleCommand = { command: "console", oneLiner: oneLiner6, keyExamples: keyExamples6, run: run6 };
|
|
1102
|
+
|
|
1103
|
+
// src/cli/core/constant.ts
|
|
1104
|
+
var oneLiner7 = "Prints to stdout a constant from constant.yaml";
|
|
1105
|
+
var keyExamples7 = `$ devops constant infra`;
|
|
1106
|
+
var usage7 = `
|
|
1107
|
+
${oneLiner7}
|
|
1108
|
+
|
|
1109
|
+
GENERAL USAGE
|
|
1110
|
+
devops constant <constant-name>
|
|
1111
|
+
|
|
1112
|
+
EXAMPLES
|
|
1113
|
+
${keyExamples7}
|
|
1114
|
+
`;
|
|
1115
|
+
async function run7(cmdObj) {
|
|
1116
|
+
if (cmdObj.help || cmdObj.args.length === 0) printUsageAndExit(usage7);
|
|
1117
|
+
const [constant2] = cmdObj.args;
|
|
1118
|
+
console.log(getConst(constant2));
|
|
1119
|
+
}
|
|
1120
|
+
var constant = { oneLiner: oneLiner7, keyExamples: keyExamples7, run: run7 };
|
|
1121
|
+
|
|
1122
|
+
// src/libs/validate-env.ts
|
|
1123
|
+
import fs2 from "fs";
|
|
1124
|
+
import yaml3 from "yaml";
|
|
1125
|
+
var CombinedEnvValidator = class {
|
|
1126
|
+
envYamlPaths;
|
|
1127
|
+
dotEnvPaths;
|
|
1128
|
+
yamlValidators = [];
|
|
1129
|
+
dotEnvParsers = [];
|
|
1130
|
+
keysFromYamlFiles = /* @__PURE__ */ new Set();
|
|
1131
|
+
keysFromDotEnvFiles = {};
|
|
1132
|
+
errors = {};
|
|
1133
|
+
warnings = [];
|
|
1134
|
+
constructor(envYamlPaths, dotEnvPaths = []) {
|
|
1135
|
+
this.envYamlPaths = envYamlPaths.filter(
|
|
1136
|
+
(path8) => !IGNORED_PATHS.some((ignoredPath) => path8.includes(ignoredPath))
|
|
1137
|
+
);
|
|
1138
|
+
this.dotEnvPaths = dotEnvPaths;
|
|
1139
|
+
}
|
|
1140
|
+
validate() {
|
|
1141
|
+
this._handleYamlFiles();
|
|
1142
|
+
this._handleDotEnvFiles();
|
|
1143
|
+
this._finalize();
|
|
1144
|
+
}
|
|
1145
|
+
_handleYamlFiles() {
|
|
1146
|
+
this._loadYamlFiles(this.envYamlPaths);
|
|
1147
|
+
this._validateYamlFiles();
|
|
1148
|
+
this._haltIfParsingErrors();
|
|
1149
|
+
this._extractErrors();
|
|
1150
|
+
}
|
|
1151
|
+
_handleDotEnvFiles() {
|
|
1152
|
+
this._loadDotEnvFiles(this.dotEnvPaths);
|
|
1153
|
+
this._parseDotEnvFiles();
|
|
1154
|
+
this._combineDotEnvFiles();
|
|
1155
|
+
this._extractWarnings();
|
|
1156
|
+
}
|
|
1157
|
+
_loadYamlFiles(envYamlPaths) {
|
|
1158
|
+
envYamlPaths.forEach((path8) => {
|
|
1159
|
+
const validator = new SingleEnvValidator(path8);
|
|
1160
|
+
this.yamlValidators.push(validator);
|
|
1161
|
+
});
|
|
1162
|
+
}
|
|
1163
|
+
_validateYamlFiles() {
|
|
1164
|
+
this.yamlValidators.forEach((x) => x.validate());
|
|
1165
|
+
}
|
|
1166
|
+
_haltIfParsingErrors() {
|
|
1167
|
+
const filesWithParsingErrors = this.yamlValidators.filter(
|
|
1168
|
+
(validator) => Boolean(validator.parsingError)
|
|
1169
|
+
);
|
|
1170
|
+
if (filesWithParsingErrors.length === 0) return;
|
|
1171
|
+
console.error("The following env.yaml files have parsing errors:");
|
|
1172
|
+
filesWithParsingErrors.forEach((validator) => {
|
|
1173
|
+
console.error(` ${validator.parsingError}`);
|
|
1174
|
+
});
|
|
1175
|
+
process.exit(1);
|
|
1176
|
+
}
|
|
1177
|
+
_extractErrors() {
|
|
1178
|
+
this.yamlValidators.forEach((validator) => {
|
|
1179
|
+
Object.keys(validator.parsedEnvYaml ?? {}).forEach((envVar) => {
|
|
1180
|
+
this.keysFromYamlFiles.add(envVar);
|
|
1181
|
+
});
|
|
1182
|
+
Object.entries(validator.errors).forEach(([envVar, error]) => {
|
|
1183
|
+
this.errors[envVar] ??= [];
|
|
1184
|
+
this.errors[envVar].push(error);
|
|
1185
|
+
});
|
|
1186
|
+
});
|
|
1187
|
+
}
|
|
1188
|
+
_loadDotEnvFiles(dotEnvPaths = []) {
|
|
1189
|
+
dotEnvPaths.forEach((path8) => {
|
|
1190
|
+
const parser = new DotEnvParser(path8);
|
|
1191
|
+
this.dotEnvParsers.push(parser);
|
|
1192
|
+
});
|
|
1193
|
+
}
|
|
1194
|
+
_parseDotEnvFiles() {
|
|
1195
|
+
this.dotEnvParsers.forEach((x) => x.parse());
|
|
1196
|
+
}
|
|
1197
|
+
_combineDotEnvFiles() {
|
|
1198
|
+
this.dotEnvParsers.forEach((parser) => {
|
|
1199
|
+
if (!parser.keys) return;
|
|
1200
|
+
parser.keys.forEach((key) => {
|
|
1201
|
+
this.keysFromDotEnvFiles[key] ??= [];
|
|
1202
|
+
this.keysFromDotEnvFiles[key].push(parser.path);
|
|
1203
|
+
});
|
|
1204
|
+
});
|
|
1205
|
+
}
|
|
1206
|
+
_extractWarnings() {
|
|
1207
|
+
const unusedKeys = Object.keys(this.keysFromDotEnvFiles).filter(
|
|
1208
|
+
(x) => !this.keysFromYamlFiles.has(x)
|
|
1209
|
+
);
|
|
1210
|
+
unusedKeys.forEach((x) => {
|
|
1211
|
+
this.warnings.push(`${x} in: ${this.keysFromDotEnvFiles[x].join(", ")}`);
|
|
1212
|
+
});
|
|
1213
|
+
}
|
|
1214
|
+
_finalize() {
|
|
1215
|
+
if (this.warnings.length > 0) {
|
|
1216
|
+
console.error(
|
|
1217
|
+
"WARNING: some env variables exist in .env but not in env.yaml:"
|
|
1218
|
+
);
|
|
1219
|
+
this.warnings.forEach((warning) => console.error(` ${warning}`));
|
|
1220
|
+
console.error();
|
|
1221
|
+
}
|
|
1222
|
+
if (Object.keys(this.errors).length > 0) {
|
|
1223
|
+
Object.entries(this.errors).forEach(([key, errors]) => {
|
|
1224
|
+
console.error(`Errors for ${key}:`);
|
|
1225
|
+
errors.forEach((error) => console.error(` ${error}`));
|
|
1226
|
+
console.error();
|
|
1227
|
+
});
|
|
1228
|
+
console.error();
|
|
1229
|
+
process.exit(1);
|
|
1230
|
+
}
|
|
1231
|
+
}
|
|
1232
|
+
};
|
|
1233
|
+
var DotEnvParser = class {
|
|
1234
|
+
path;
|
|
1235
|
+
keys;
|
|
1236
|
+
constructor(path8) {
|
|
1237
|
+
this.path = path8;
|
|
1238
|
+
}
|
|
1239
|
+
parse() {
|
|
1240
|
+
const text = this._readFile(this.path);
|
|
1241
|
+
if (text) this.keys = this._parse(text);
|
|
1242
|
+
}
|
|
1243
|
+
_readFile(path8) {
|
|
1244
|
+
if (!fs2.existsSync(path8)) return;
|
|
1245
|
+
return fs2.readFileSync(path8).toString();
|
|
1246
|
+
}
|
|
1247
|
+
_parse(text) {
|
|
1248
|
+
const lines = text.split("\n");
|
|
1249
|
+
const withoutComments = lines.map((x) => x.replace(/#.*$/, "").trim()).filter(Boolean);
|
|
1250
|
+
const keys = withoutComments.map((x) => x.split("=").map((y) => y.trim())).filter((x) => x.length > 1).map((x) => x[0]);
|
|
1251
|
+
return keys;
|
|
1252
|
+
}
|
|
1253
|
+
};
|
|
1254
|
+
var SingleEnvValidator = class {
|
|
1255
|
+
envYamlPath;
|
|
1256
|
+
parsedEnvYaml;
|
|
1257
|
+
parsingError;
|
|
1258
|
+
errors = {};
|
|
1259
|
+
constructor(envYamlPath) {
|
|
1260
|
+
this.envYamlPath = envYamlPath;
|
|
1261
|
+
}
|
|
1262
|
+
validate() {
|
|
1263
|
+
this.parsedEnvYaml = this._parse();
|
|
1264
|
+
if (!this.parsingError) this._addAllErrors();
|
|
1265
|
+
}
|
|
1266
|
+
_readFile() {
|
|
1267
|
+
if (!fs2.existsSync(this.envYamlPath)) {
|
|
1268
|
+
console.error(`Skipping ${this.envYamlPath}: does not exist`);
|
|
1269
|
+
return;
|
|
1270
|
+
}
|
|
1271
|
+
return yaml3.parse(fs2.readFileSync(this.envYamlPath).toString());
|
|
1272
|
+
}
|
|
1273
|
+
_generateError(message) {
|
|
1274
|
+
return `Error in ${this.envYamlPath}: ${message}`;
|
|
1275
|
+
}
|
|
1276
|
+
_setParsingError(message) {
|
|
1277
|
+
this.parsingError = this._generateError(message);
|
|
1278
|
+
}
|
|
1279
|
+
_addError(key, message) {
|
|
1280
|
+
this.errors[key] = this._generateError(message);
|
|
1281
|
+
}
|
|
1282
|
+
_parse() {
|
|
1283
|
+
const allEnv = {};
|
|
1284
|
+
const envManifest = this._readFile();
|
|
1285
|
+
if (!envManifest) return;
|
|
1286
|
+
if (!(envManifest instanceof Array)) {
|
|
1287
|
+
this._setParsingError(`env.yaml file must resolve to an array`);
|
|
1288
|
+
return;
|
|
1289
|
+
}
|
|
1290
|
+
envManifest.forEach((env2) => {
|
|
1291
|
+
if (env2 instanceof Object) {
|
|
1292
|
+
const entries = Object.entries(env2);
|
|
1293
|
+
if (entries.length > 1) {
|
|
1294
|
+
this._setParsingError(
|
|
1295
|
+
`every object in env.yaml must have one key. Error near: ${entries[0][0]}`
|
|
1296
|
+
);
|
|
1297
|
+
return;
|
|
55
1298
|
}
|
|
56
|
-
const
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
1299
|
+
const [name, value] = entries[0];
|
|
1300
|
+
if (!(value instanceof Array) && !["optional", "boolean"].includes(value)) {
|
|
1301
|
+
this._setParsingError(
|
|
1302
|
+
`invalid value for ${name}: ${JSON.stringify(value)}`
|
|
1303
|
+
);
|
|
1304
|
+
return;
|
|
62
1305
|
}
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
1306
|
+
allEnv[name] = value;
|
|
1307
|
+
} else {
|
|
1308
|
+
allEnv[env2] = "required";
|
|
1309
|
+
}
|
|
1310
|
+
});
|
|
1311
|
+
return allEnv;
|
|
1312
|
+
}
|
|
1313
|
+
_addAllErrors() {
|
|
1314
|
+
Object.entries(this.parsedEnvYaml).forEach(([key, requirement]) => {
|
|
1315
|
+
const value = process.env[key];
|
|
1316
|
+
if (requirement !== "optional" && !value) {
|
|
1317
|
+
this._addError(key, `${key} is required but missing`);
|
|
1318
|
+
} else if (requirement === "boolean" && !["true", "false"].includes(String(value))) {
|
|
1319
|
+
this._addError(
|
|
1320
|
+
key,
|
|
1321
|
+
`${key} must be either true or false. Value: ${value}`
|
|
1322
|
+
);
|
|
1323
|
+
} else if (requirement instanceof Array && !requirement.includes(value ?? "")) {
|
|
1324
|
+
this._addError(
|
|
1325
|
+
key,
|
|
1326
|
+
`${key} must be one of ${requirement.join(", ")}. Value: ${value}`
|
|
1327
|
+
);
|
|
1328
|
+
}
|
|
1329
|
+
});
|
|
1330
|
+
}
|
|
1331
|
+
};
|
|
1332
|
+
|
|
1333
|
+
// src/cli/core/env.ts
|
|
1334
|
+
var oneLiner8 = "Commands to manipulate env variables";
|
|
1335
|
+
var keyExamples8 = `
|
|
1336
|
+
$ devops env get --env staging
|
|
1337
|
+
$ devops env get KEY1 KEY2 --env staging
|
|
1338
|
+
$ devops env set KEY1=123 KEY2=345 --env staging
|
|
1339
|
+
$ devops env delete KEY1 KEY2 --env staging
|
|
1340
|
+
$ devops env validate
|
|
1341
|
+
`;
|
|
1342
|
+
var usage8 = `
|
|
1343
|
+
${oneLiner8}
|
|
1344
|
+
|
|
1345
|
+
COMMANDS
|
|
1346
|
+
get Fetches secrets for the chosen environment and printes them to console
|
|
1347
|
+
set Sets specific secrets for the chosen environment
|
|
1348
|
+
delete Deletes specific secrets for the chosen environment
|
|
1349
|
+
validate Validate locally, verifying the existence and type of environment variables against all env.yaml files
|
|
1350
|
+
|
|
1351
|
+
EXAMPLES
|
|
1352
|
+
${keyExamples8}
|
|
1353
|
+
`;
|
|
1354
|
+
function run8(cmdObj) {
|
|
1355
|
+
if (cmdObj.help || cmdObj.args.length === 0) printUsageAndExit(usage8);
|
|
1356
|
+
const [command, ...rest] = cmdObj.args;
|
|
1357
|
+
switch (command) {
|
|
1358
|
+
case "validate": {
|
|
1359
|
+
const envYamlFiles = globEnvYamlFiles();
|
|
1360
|
+
cmdObj.executorFromEnv(
|
|
1361
|
+
`devops env _validate ${envYamlFiles.join(" ")}`,
|
|
1362
|
+
{ quiet: false }
|
|
1363
|
+
).exec();
|
|
1364
|
+
break;
|
|
1365
|
+
}
|
|
1366
|
+
case "_validate": {
|
|
1367
|
+
const options = cmdObj.parseOptions({ booleans: ["--skip-dotenv"] });
|
|
1368
|
+
const [_subcmd, ...envYamlFiles] = options.args;
|
|
1369
|
+
const envFiles = options.options["--skip-dotenv"] ? [] : dotEnvFilesForEnv(cmdObj.env);
|
|
1370
|
+
const validator = new CombinedEnvValidator(envYamlFiles, envFiles);
|
|
1371
|
+
validator.validate();
|
|
1372
|
+
break;
|
|
1373
|
+
}
|
|
1374
|
+
case "get": {
|
|
1375
|
+
console.log(getMonorepoSecretStr(cmdObj.env, rest));
|
|
1376
|
+
break;
|
|
1377
|
+
}
|
|
1378
|
+
case "set": {
|
|
1379
|
+
setMonorepoSecret(cmdObj.env, rest);
|
|
1380
|
+
break;
|
|
1381
|
+
}
|
|
1382
|
+
case "delete": {
|
|
1383
|
+
deleteMonorepoSecret(cmdObj.env, rest);
|
|
1384
|
+
break;
|
|
1385
|
+
}
|
|
1386
|
+
default: {
|
|
1387
|
+
printUsageAndExit(usage8);
|
|
1388
|
+
}
|
|
1389
|
+
}
|
|
1390
|
+
}
|
|
1391
|
+
var env = { oneLiner: oneLiner8, keyExamples: keyExamples8, run: run8 };
|
|
1392
|
+
|
|
1393
|
+
// src/cli/core/image.ts
|
|
1394
|
+
var oneLiner9 = "Applies image-related manifests, retrieves or set the version deployed, and scales deployments of applications";
|
|
1395
|
+
var keyExamples9 = `
|
|
1396
|
+
$ devops image get type main-node
|
|
1397
|
+
$ devops image deployment gen main-node sha --env staging
|
|
1398
|
+
$ devops image deployment create main-node sha --env staging
|
|
1399
|
+
$ devops image deployment delete main-node --env staging
|
|
1400
|
+
$ devops image version get main-node --env staging
|
|
1401
|
+
$ devops image version set main-node sha --env staging
|
|
1402
|
+
$ devops image version unset main-node --env staging
|
|
1403
|
+
$ devops image scale get main-node --env staging
|
|
1404
|
+
$ devops image scale get main-node www --env staging
|
|
1405
|
+
$ devops image scale set main-node www 3 --env staging
|
|
1406
|
+
$ devops image scale unset main-node --env staging
|
|
1407
|
+
$ devops image scale unset main-node www --env staging
|
|
1408
|
+
`.trim();
|
|
1409
|
+
var usage9 = `
|
|
1410
|
+
${oneLiner9}
|
|
1411
|
+
|
|
1412
|
+
GET IMAGE TYPE
|
|
1413
|
+
devops image get type <image-name>
|
|
1414
|
+
|
|
1415
|
+
Returns "k8s" or "cloudrun" depending on the image type.
|
|
1416
|
+
|
|
1417
|
+
GENERATING DEPLOYMENT MANIFESTS
|
|
1418
|
+
devops image deployment gen|create|delete <image-name> <sha>
|
|
1419
|
+
|
|
1420
|
+
gen - generates the manifest file with all applications that are deployed with the image
|
|
1421
|
+
create - generates the manifest file and then runs kubectl apply
|
|
1422
|
+
delete - generates the manifest file and then runs kubectl delete
|
|
1423
|
+
|
|
1424
|
+
MANAGINE APPLICATION SCALE
|
|
1425
|
+
devops image scale get|unset <image-name> [<workspace-name>]
|
|
1426
|
+
devops image scale set <image-name> <workspace-name> <replica-count>
|
|
1427
|
+
|
|
1428
|
+
set - sets the scale count in the image's config map so that it persists across deployments.
|
|
1429
|
+
get - retrieves the current scale count. If workspace is not provided, all workspaces are returned.
|
|
1430
|
+
unset - resets the scale count by removing the key from the image's config map. If workspace is not provided, all workspaces are reset.
|
|
1431
|
+
|
|
1432
|
+
Both 'set' and 'unset' return the previous scale count prior to the operation.
|
|
1433
|
+
|
|
1434
|
+
MANAGING IMAGE VERSIONS
|
|
1435
|
+
devops image version get|unset <image>
|
|
1436
|
+
devops image version set <image> <sha>
|
|
1437
|
+
|
|
1438
|
+
Meant to be called during deployment in order to maintain a record of the most recent deployed version.
|
|
1439
|
+
|
|
1440
|
+
EXAMPLES
|
|
1441
|
+
${keyExamples9}
|
|
1442
|
+
`;
|
|
1443
|
+
var handlers2 = {
|
|
1444
|
+
get: {
|
|
1445
|
+
type: (opts) => {
|
|
1446
|
+
console.log(getImageType(opts.required("image")));
|
|
1447
|
+
}
|
|
1448
|
+
},
|
|
1449
|
+
deployment: {
|
|
1450
|
+
gen: (opts) => {
|
|
1451
|
+
console.log(
|
|
1452
|
+
generateImageDeployments(
|
|
1453
|
+
opts.required("env"),
|
|
1454
|
+
opts.required("image"),
|
|
1455
|
+
opts.required("sha")
|
|
1456
|
+
)
|
|
1457
|
+
);
|
|
1458
|
+
},
|
|
1459
|
+
create: (opts) => {
|
|
1460
|
+
applyHandler(
|
|
1461
|
+
"apply-deployment-",
|
|
1462
|
+
"apply",
|
|
1463
|
+
generateImageDeployments(
|
|
1464
|
+
opts.required("env"),
|
|
1465
|
+
opts.required("image"),
|
|
1466
|
+
opts.required("sha")
|
|
1467
|
+
)
|
|
1468
|
+
);
|
|
1469
|
+
},
|
|
1470
|
+
delete: (opts) => {
|
|
1471
|
+
applyHandler(
|
|
1472
|
+
"delete-deployment-",
|
|
1473
|
+
"delete",
|
|
1474
|
+
generateImageDeployments(
|
|
1475
|
+
opts.required("env"),
|
|
1476
|
+
opts.required("image"),
|
|
1477
|
+
"dummy-sha"
|
|
1478
|
+
)
|
|
1479
|
+
);
|
|
1480
|
+
}
|
|
1481
|
+
},
|
|
1482
|
+
scale: {
|
|
1483
|
+
set: (opts) => {
|
|
1484
|
+
const workspace = opts.required("workspace");
|
|
1485
|
+
const image2 = opts.required("image");
|
|
1486
|
+
const replicas = Number(opts.required("replicas"));
|
|
1487
|
+
const res = setWorkspaceScale(
|
|
1488
|
+
opts.required("env"),
|
|
1489
|
+
image2,
|
|
1490
|
+
workspace,
|
|
1491
|
+
replicas
|
|
1492
|
+
);
|
|
1493
|
+
if (res) {
|
|
1494
|
+
console.warn(
|
|
1495
|
+
`Scale for ${workspace} in ${image2} set to ${replicas}. Previous value:`
|
|
1496
|
+
);
|
|
1497
|
+
console.log(res);
|
|
1498
|
+
}
|
|
1499
|
+
},
|
|
1500
|
+
get: (opts) => {
|
|
1501
|
+
const workspace = opts.optional("workspace");
|
|
1502
|
+
if (!workspace) {
|
|
1503
|
+
console.log(
|
|
1504
|
+
getWorkspaceScale(
|
|
1505
|
+
opts.required("env"),
|
|
1506
|
+
opts.required("image")
|
|
1507
|
+
)
|
|
1508
|
+
);
|
|
1509
|
+
} else {
|
|
1510
|
+
console.log(
|
|
1511
|
+
getWorkspaceScale(
|
|
1512
|
+
opts.required("env"),
|
|
1513
|
+
opts.required("image"),
|
|
1514
|
+
workspace
|
|
1515
|
+
)
|
|
1516
|
+
);
|
|
1517
|
+
}
|
|
1518
|
+
},
|
|
1519
|
+
unset: (opts) => {
|
|
1520
|
+
const image2 = opts.required("image");
|
|
1521
|
+
const workspace = opts.optional("workspace");
|
|
1522
|
+
const prev = resetWorkspaceScale(opts.required("env"), image2, workspace);
|
|
1523
|
+
if (workspace) {
|
|
1524
|
+
console.warn(`Scale for ${workspace} in ${image2} unset. Previous scale:`);
|
|
1525
|
+
console.log(prev);
|
|
1526
|
+
} else {
|
|
1527
|
+
console.warn(
|
|
1528
|
+
`Scale for all workspaces in ${image2} unset. Previous scale:`
|
|
1529
|
+
);
|
|
1530
|
+
console.log(prev);
|
|
1531
|
+
}
|
|
1532
|
+
}
|
|
1533
|
+
},
|
|
1534
|
+
version: {
|
|
1535
|
+
get: (opts) => {
|
|
1536
|
+
const version = getImageVersion(
|
|
1537
|
+
opts.required("env"),
|
|
1538
|
+
opts.required("image")
|
|
1539
|
+
);
|
|
1540
|
+
console.log(version ?? "");
|
|
1541
|
+
},
|
|
1542
|
+
set: (opts) => {
|
|
1543
|
+
setImageVersion(
|
|
1544
|
+
opts.required("env"),
|
|
1545
|
+
opts.required("image"),
|
|
1546
|
+
opts.required("sha")
|
|
1547
|
+
);
|
|
1548
|
+
},
|
|
1549
|
+
unset: (opts) => {
|
|
1550
|
+
deleteImageVersion(
|
|
1551
|
+
opts.required("env"),
|
|
1552
|
+
opts.required("image")
|
|
1553
|
+
);
|
|
1554
|
+
}
|
|
1555
|
+
}
|
|
1556
|
+
};
|
|
1557
|
+
function run9(cmdObj) {
|
|
1558
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage9);
|
|
1559
|
+
const [command, subcommand, image2, param1, param2] = cmdObj.args;
|
|
1560
|
+
const commandHandler = handlers2[command];
|
|
1561
|
+
if (!commandHandler) {
|
|
1562
|
+
console.error(`Unknown command: ${command}`);
|
|
1563
|
+
printUsageAndExit(usage9);
|
|
1564
|
+
}
|
|
1565
|
+
const handler = commandHandler[subcommand];
|
|
1566
|
+
if (!handler) {
|
|
1567
|
+
console.error(`Unknown subcommand: ${subcommand}`);
|
|
1568
|
+
printUsageAndExit(usage9);
|
|
1569
|
+
}
|
|
1570
|
+
function getExtraParams() {
|
|
1571
|
+
if (command === "get") {
|
|
1572
|
+
return {};
|
|
1573
|
+
} else if (command === "scale") {
|
|
1574
|
+
return subcommand === "set" ? { workspace: param1, replicas: param2 } : { workspace: param1 };
|
|
1575
|
+
} else {
|
|
1576
|
+
return { sha: param1 };
|
|
1577
|
+
}
|
|
1578
|
+
}
|
|
1579
|
+
const params = new StrongParams(usage9, {
|
|
1580
|
+
env: cmdObj.env,
|
|
1581
|
+
subcommand,
|
|
1582
|
+
image: image2,
|
|
1583
|
+
...getExtraParams()
|
|
1584
|
+
});
|
|
1585
|
+
handler(params);
|
|
1586
|
+
}
|
|
1587
|
+
var image = { oneLiner: oneLiner9, keyExamples: keyExamples9, run: run9 };
|
|
1588
|
+
|
|
1589
|
+
// src/cli/core/init.ts
|
|
1590
|
+
import inquirer from "inquirer";
|
|
1591
|
+
|
|
1592
|
+
// src/libs/init-generator.ts
|
|
1593
|
+
import path4 from "path";
|
|
1594
|
+
import fs3 from "fs-extra";
|
|
1595
|
+
import chalk3 from "chalk";
|
|
1596
|
+
import fg from "fast-glob";
|
|
1597
|
+
var templatesDir = path4.join(pkgRoot, "src/target-templates");
|
|
1598
|
+
var targetDir = process.cwd();
|
|
1599
|
+
var InitGenerator = class {
|
|
1600
|
+
projectName;
|
|
1601
|
+
/** The key is targetRel */
|
|
1602
|
+
files = {};
|
|
1603
|
+
constructor() {
|
|
1604
|
+
if (fs3.existsSync("package.json")) {
|
|
1605
|
+
const packageJson = fs3.readJSONSync("package.json");
|
|
1606
|
+
this.projectName = packageJson.name;
|
|
1607
|
+
}
|
|
1608
|
+
}
|
|
1609
|
+
_ensureFileExists(targetRel) {
|
|
1610
|
+
if (!this.files[targetRel]) {
|
|
1611
|
+
throw new Error(`File for target "${targetRel}" not found.`);
|
|
1612
|
+
}
|
|
1613
|
+
}
|
|
1614
|
+
enableSubtitution(targetRel) {
|
|
1615
|
+
this._ensureFileExists(targetRel);
|
|
1616
|
+
if (this.files[targetRel].type !== "copied") {
|
|
1617
|
+
throw new Error(`File for target "${targetRel}" is not a copied file.`);
|
|
1618
|
+
}
|
|
1619
|
+
this.files[targetRel].enableSubstitution = true;
|
|
1620
|
+
}
|
|
1621
|
+
setMessageGenerator(targetRel, messageGen) {
|
|
1622
|
+
this._ensureFileExists(targetRel);
|
|
1623
|
+
this.files[targetRel].messageGenerator = messageGen;
|
|
1624
|
+
}
|
|
1625
|
+
addGeneratedFile(targetRel, content) {
|
|
1626
|
+
const targetAbs = path4.join(targetDir, targetRel);
|
|
1627
|
+
const targetFolderAbs = path4.dirname(targetAbs);
|
|
1628
|
+
const exists = fs3.existsSync(targetAbs);
|
|
1629
|
+
this.files[targetRel] = {
|
|
1630
|
+
type: "generated",
|
|
1631
|
+
targetRel,
|
|
1632
|
+
targetAbs,
|
|
1633
|
+
targetFolderAbs,
|
|
1634
|
+
targetExists: exists,
|
|
1635
|
+
content
|
|
1636
|
+
};
|
|
1637
|
+
}
|
|
1638
|
+
/**
|
|
1639
|
+
* @param source relative path under the templates folder. All files and folders under `source` are copied directly under `target`.
|
|
1640
|
+
* @param target relative path under the project root folder.
|
|
1641
|
+
* If the target file exists already in `files`, it will be overridden.
|
|
1642
|
+
*/
|
|
1643
|
+
addCopiedFolder(source, target) {
|
|
1644
|
+
const pathPrefix = path4.join(templatesDir, source);
|
|
1645
|
+
const glob = path4.join(pathPrefix, "**/*");
|
|
1646
|
+
fg.globSync(glob, { dot: true }).forEach((sourceAbs) => {
|
|
1647
|
+
const sourceRel = path4.relative(templatesDir, sourceAbs);
|
|
1648
|
+
const pathUnderSource = path4.relative(pathPrefix, sourceAbs);
|
|
1649
|
+
const targetRel = path4.join(target, pathUnderSource);
|
|
1650
|
+
const targetAbs = path4.join(targetDir, targetRel);
|
|
1651
|
+
const targetFolderAbs = path4.dirname(targetAbs);
|
|
1652
|
+
const exists = fs3.existsSync(targetAbs);
|
|
1653
|
+
this.files[targetRel] = {
|
|
1654
|
+
type: "copied",
|
|
1655
|
+
sourceRel,
|
|
1656
|
+
targetRel,
|
|
1657
|
+
sourceAbs,
|
|
1658
|
+
targetAbs,
|
|
1659
|
+
targetFolderAbs,
|
|
1660
|
+
targetExists: exists
|
|
1661
|
+
};
|
|
1662
|
+
});
|
|
1663
|
+
}
|
|
1664
|
+
run({
|
|
1665
|
+
substitution = {},
|
|
1666
|
+
messages = []
|
|
1667
|
+
}) {
|
|
1668
|
+
const fileMessages = [];
|
|
1669
|
+
Object.values(this.files).forEach((fileInfo) => {
|
|
1670
|
+
if (fileInfo.messageGenerator) {
|
|
1671
|
+
const message = fileInfo.messageGenerator(fileInfo.targetExists, fileInfo);
|
|
1672
|
+
if (message) {
|
|
1673
|
+
fileMessages.push(message);
|
|
66
1674
|
}
|
|
67
|
-
|
|
1675
|
+
}
|
|
1676
|
+
if (fileInfo.targetExists) {
|
|
1677
|
+
console.log(`Skipped ${chalk3.yellow(fileInfo.targetRel)} (exists)`);
|
|
1678
|
+
return;
|
|
1679
|
+
}
|
|
1680
|
+
if (!fs3.existsSync(fileInfo.targetFolderAbs)) {
|
|
1681
|
+
fs3.mkdirSync(fileInfo.targetFolderAbs, { recursive: true });
|
|
1682
|
+
}
|
|
1683
|
+
if (fileInfo.type === "generated") {
|
|
1684
|
+
fs3.writeFileSync(fileInfo.targetAbs, fileInfo.content, "utf8");
|
|
1685
|
+
} else if (fileInfo.enableSubstitution) {
|
|
1686
|
+
const content = fs3.readFileSync(fileInfo.sourceAbs, "utf8");
|
|
1687
|
+
const substitutedContent = content.replace(/\$([A-Z_]+)/g, (_2, varName) => {
|
|
1688
|
+
const value = substitution[varName];
|
|
1689
|
+
if (!value) {
|
|
1690
|
+
throw new Error(`${chalk3.blue("TemplateCopier.run()")}: Variable ${chalk3.yellow(varName)} is needed by ${chalk3.yellow(fileInfo.targetRel)} but is undefined.`);
|
|
1691
|
+
}
|
|
1692
|
+
return value;
|
|
1693
|
+
});
|
|
1694
|
+
fs3.writeFileSync(fileInfo.targetAbs, substitutedContent);
|
|
1695
|
+
} else {
|
|
1696
|
+
fs3.copySync(fileInfo.sourceAbs, fileInfo.targetAbs, {
|
|
1697
|
+
overwrite: false,
|
|
1698
|
+
errorOnExist: false,
|
|
1699
|
+
dereference: false
|
|
1700
|
+
});
|
|
1701
|
+
}
|
|
1702
|
+
console.log(`Created ${chalk3.green(fileInfo.targetRel)}`);
|
|
1703
|
+
});
|
|
1704
|
+
const allMessages = [...messages, ...fileMessages];
|
|
1705
|
+
if (!allMessages.length) return;
|
|
1706
|
+
console.log(chalk3.blue("\nNext steps:"));
|
|
1707
|
+
allMessages.forEach((msg, i) => {
|
|
1708
|
+
console.log(`${i + 1}. ${msg}
|
|
1709
|
+
`);
|
|
1710
|
+
});
|
|
1711
|
+
}
|
|
1712
|
+
};
|
|
1713
|
+
|
|
1714
|
+
// src/cli/core/init.ts
|
|
1715
|
+
import chalk4 from "chalk";
|
|
1716
|
+
import fs4 from "fs-extra";
|
|
1717
|
+
var oneLiner10 = "Initializes the devops utility by copying template files to the current folder";
|
|
1718
|
+
var keyExamples10 = `$ devops init`;
|
|
1719
|
+
var usage10 = `
|
|
1720
|
+
${oneLiner10}
|
|
1721
|
+
|
|
1722
|
+
NOTE
|
|
1723
|
+
No files are overwritten.
|
|
1724
|
+
|
|
1725
|
+
EXAMPLES
|
|
1726
|
+
${keyExamples10}
|
|
1727
|
+
`;
|
|
1728
|
+
async function run10(cmdObj) {
|
|
1729
|
+
if (cmdObj.help) printUsageAndExit(usage10);
|
|
1730
|
+
createFiles();
|
|
1731
|
+
}
|
|
1732
|
+
var init = { oneLiner: oneLiner10, keyExamples: keyExamples10, run: run10 };
|
|
1733
|
+
async function createFiles() {
|
|
1734
|
+
const tc = new InitGenerator();
|
|
1735
|
+
const userChoices = await getUserChoices(tc.projectName);
|
|
1736
|
+
tc.addCopiedFolder("lang-variants-common/typescript", ".");
|
|
1737
|
+
if (userChoices.usePython) {
|
|
1738
|
+
tc.addCopiedFolder("lang-variants-common/python", ".");
|
|
1739
|
+
tc.enableSubtitution("pyproject.toml");
|
|
1740
|
+
}
|
|
1741
|
+
tc.enableSubtitution(".devops/config/images.yaml");
|
|
1742
|
+
tc.setMessageGenerator(".envrc", envrcMessage);
|
|
1743
|
+
const gitIgnore = gitIgnoreContent(userChoices.infraVariant, userChoices.usePython);
|
|
1744
|
+
tc.addGeneratedFile(".gitignore", gitIgnore);
|
|
1745
|
+
tc.setMessageGenerator(".gitignore", gitignoreMessageGen(gitIgnore));
|
|
1746
|
+
tc.addCopiedFolder(`infra-variants/${userChoices.infraVariant}`, ".");
|
|
1747
|
+
tc.enableSubtitution(".devops/config/constants.yaml");
|
|
1748
|
+
if (userChoices.infraVariant === "hetzner") {
|
|
1749
|
+
tc.enableSubtitution(".devops/infra/hetzner/harbor-cert.yaml");
|
|
1750
|
+
tc.enableSubtitution(".devops/infra/hetzner/harbor-values.yaml");
|
|
1751
|
+
tc.enableSubtitution(".devops/infra/hetzner/hcloud-config.yaml");
|
|
1752
|
+
}
|
|
1753
|
+
if (userChoices.usePrisma) {
|
|
1754
|
+
tc.addCopiedFolder("lang-variants-prisma/typescript", ".");
|
|
1755
|
+
if (userChoices.usePython) {
|
|
1756
|
+
tc.addCopiedFolder("lang-variants-prisma/python", ".");
|
|
1757
|
+
}
|
|
1758
|
+
}
|
|
1759
|
+
const clusterResources = new Set(userChoices.clusterResources);
|
|
1760
|
+
if (clusterResources.has("dns-test")) {
|
|
1761
|
+
tc.addCopiedFolder("cluster-resource-options/dns-test", ".devops/infra/dns-test");
|
|
1762
|
+
}
|
|
1763
|
+
if (clusterResources.has("monitoring-ingress")) {
|
|
1764
|
+
tc.addCopiedFolder("cluster-resource-options/monitoring-ingress", ".devops/infra/monitoring-ingress");
|
|
1765
|
+
}
|
|
1766
|
+
if (clusterResources.has("postgres")) {
|
|
1767
|
+
tc.addCopiedFolder("cluster-resource-options/postgres", ".devops/infra/postgres");
|
|
1768
|
+
tc.enableSubtitution(".devops/infra/postgres/staging/configurations/07-SGObjectStorage.yaml");
|
|
1769
|
+
tc.enableSubtitution(".devops/infra/postgres/staging/configurations/08-SGScript.yaml");
|
|
1770
|
+
tc.enableSubtitution(".devops/infra/postgres/production/configurations/07-SGObjectStorage.yaml");
|
|
1771
|
+
tc.enableSubtitution(".devops/infra/postgres/production/configurations/08-SGScript.yaml");
|
|
1772
|
+
}
|
|
1773
|
+
if (clusterResources.has("redis")) {
|
|
1774
|
+
tc.addCopiedFolder("cluster-resource-options/redis", ".devops/infra/redis");
|
|
1775
|
+
}
|
|
1776
|
+
if (clusterResources.has("milvus")) {
|
|
1777
|
+
tc.addCopiedFolder("cluster-resource-options/milvus", ".devops/infra/milvus");
|
|
1778
|
+
}
|
|
1779
|
+
if (clusterResources.has("prefect") && userChoices.usePython) {
|
|
1780
|
+
tc.addCopiedFolder("cluster-resource-options/prefect", ".devops/infra/prefect");
|
|
1781
|
+
}
|
|
1782
|
+
tc.run({
|
|
1783
|
+
substitution: {
|
|
1784
|
+
"PROJECT_NAME": userChoices.projectName,
|
|
1785
|
+
"STAGING_DOMAIN": userChoices.stagingDomain,
|
|
1786
|
+
"PRODUCTION_DOMAIN": userChoices.productionDomain,
|
|
1787
|
+
"GCLOUD_PROJECT_ID": userChoices.gcloudProjectId,
|
|
1788
|
+
"REGISTRY_IMAGE_PATH_PREFIX": userChoices.registryImagePathPrefix,
|
|
1789
|
+
"REGISTRY_BASE_URL": userChoices.registryBaseUrl
|
|
1790
|
+
},
|
|
1791
|
+
messages: [
|
|
1792
|
+
packageJsonMessage(userChoices.usePrisma)
|
|
1793
|
+
]
|
|
1794
|
+
});
|
|
1795
|
+
}
|
|
1796
|
+
function packageJsonMessage(usePrisma) {
|
|
1797
|
+
const prismaMessage = usePrisma ? `,
|
|
1798
|
+
"db/**",
|
|
1799
|
+
"dml/**"` : "";
|
|
1800
|
+
return `add the following entry to the main ${chalk4.blue("package.json")}:
|
|
1801
|
+
${chalk4.yellow(`"workspaces": [
|
|
1802
|
+
"libs/**",
|
|
1803
|
+
"applications/**"${prismaMessage}
|
|
1804
|
+
],`)}`;
|
|
1805
|
+
}
|
|
1806
|
+
function gitIgnoreContent(infraVariant, usePython) {
|
|
1807
|
+
const common = `**/.env*
|
|
1808
|
+
config/kubeconfig
|
|
1809
|
+
tmp/**
|
|
1810
|
+
!tmp/**/.gitkeep`;
|
|
1811
|
+
const gcloud = infraVariant === "gcloud" ? "config/gke_gcloud_auth_plugin_cache" : null;
|
|
1812
|
+
const python = usePython ? `venv/
|
|
1813
|
+
**/__pycache__` : null;
|
|
1814
|
+
return [common, gcloud, python].filter(Boolean).join("\n");
|
|
1815
|
+
}
|
|
1816
|
+
function gitignoreMessageGen(content) {
|
|
1817
|
+
return (exists) => {
|
|
1818
|
+
if (!exists) return;
|
|
1819
|
+
return `add the following to your ${chalk4.blue(".gitignore")}:
|
|
1820
|
+
${chalk4.yellow(content)}`;
|
|
1821
|
+
};
|
|
1822
|
+
}
|
|
1823
|
+
function envrcMessage(targetExists, fileInfo) {
|
|
1824
|
+
if (fileInfo.type !== "copied") throw new Error(`envrcMessage() expects a copied file, got ${fileInfo.type}`);
|
|
1825
|
+
if (targetExists) {
|
|
1826
|
+
const content = fs4.readFileSync(fileInfo.sourceAbs, "utf-8");
|
|
1827
|
+
return `add the following to your ${chalk4.blue(".envrc")} and run ${chalk4.yellow("direnv allow")}:
|
|
1828
|
+
${chalk4.yellow(content)}`;
|
|
1829
|
+
} else {
|
|
1830
|
+
return `Enable ${chalk4.blue(".envrc")} by installing ${chalk4.blue("direnv")} and running ${chalk4.yellow("direnv allow")}`;
|
|
1831
|
+
}
|
|
1832
|
+
}
|
|
1833
|
+
function getUserChoices(projectName) {
|
|
1834
|
+
const defaultProjectName = projectName || "changeme";
|
|
1835
|
+
return inquirer.prompt([
|
|
1836
|
+
{
|
|
1837
|
+
type: "input",
|
|
1838
|
+
name: "projectName",
|
|
1839
|
+
message: `Enter the project name (default: '${defaultProjectName}')`,
|
|
1840
|
+
default: defaultProjectName
|
|
1841
|
+
},
|
|
1842
|
+
{
|
|
1843
|
+
type: "input",
|
|
1844
|
+
name: "stagingDomain",
|
|
1845
|
+
message: "Enter the staging domain (default: 'staging.com')",
|
|
1846
|
+
default: "staging.com"
|
|
1847
|
+
},
|
|
1848
|
+
{
|
|
1849
|
+
type: "input",
|
|
1850
|
+
name: "productionDomain",
|
|
1851
|
+
message: "Enter the production domain (default: 'production.com')",
|
|
1852
|
+
default: "production.com"
|
|
1853
|
+
},
|
|
1854
|
+
{
|
|
1855
|
+
type: "list",
|
|
1856
|
+
name: "infraVariant",
|
|
1857
|
+
message: "Where does your cluster run?",
|
|
1858
|
+
choices: [
|
|
1859
|
+
{ name: "Google Cloud", value: "gcloud" },
|
|
1860
|
+
{ name: "Digital Ocean", value: "digitalocean" },
|
|
1861
|
+
{ name: "Hetzner", value: "hetzner" }
|
|
1862
|
+
]
|
|
1863
|
+
},
|
|
1864
|
+
{
|
|
1865
|
+
type: "input",
|
|
1866
|
+
name: "gcloudProjectId",
|
|
1867
|
+
message: "Enter the GCP project ID (default: 'changeme')",
|
|
1868
|
+
default: "changeme",
|
|
1869
|
+
when: (answers) => answers.infraVariant === "gcloud"
|
|
1870
|
+
},
|
|
1871
|
+
{
|
|
1872
|
+
type: "input",
|
|
1873
|
+
name: "registryImagePathPrefix",
|
|
1874
|
+
message: (answers) => `Enter your Digital Ocean container registry name (default: '${answers.projectName}')`,
|
|
1875
|
+
default: (answers) => answers.projectName,
|
|
1876
|
+
when: (answers) => answers.infraVariant === "digitalocean"
|
|
1877
|
+
},
|
|
1878
|
+
{
|
|
1879
|
+
type: "input",
|
|
1880
|
+
name: "registryBaseUrl",
|
|
1881
|
+
message: (answers) => `Enter your registry base URL (default: 'registry.${answers.stagingDomain}')`,
|
|
1882
|
+
default: (answers) => `registry.${answers.stagingDomain}`,
|
|
1883
|
+
when: (answers) => answers.infraVariant === "hetzner"
|
|
1884
|
+
},
|
|
1885
|
+
{
|
|
1886
|
+
type: "confirm",
|
|
1887
|
+
name: "usePython",
|
|
1888
|
+
message: "Add support for Python?",
|
|
1889
|
+
default: true
|
|
1890
|
+
},
|
|
1891
|
+
{
|
|
1892
|
+
type: "confirm",
|
|
1893
|
+
name: "usePrisma",
|
|
1894
|
+
message: "Add support for Prisma?",
|
|
1895
|
+
default: true
|
|
1896
|
+
},
|
|
1897
|
+
{
|
|
1898
|
+
type: "checkbox",
|
|
1899
|
+
name: "clusterResources",
|
|
1900
|
+
message: "Optional manifests and helm charts to add",
|
|
1901
|
+
choices: (answers) => [
|
|
1902
|
+
{ name: "Manifest to test DNS setup", value: "dns-test" },
|
|
1903
|
+
{ name: "Manifest to setup ingress for graphana and prometheus", value: "monitoring-ingress" },
|
|
1904
|
+
{ name: "Stackgres CRDs and manifests for Postgres", value: "postgres" },
|
|
1905
|
+
{ name: "Redis Helm chart values", value: "redis" },
|
|
1906
|
+
{ name: "Milvus helm chart values", value: "milvus" },
|
|
1907
|
+
...answers.usePython ? [{ name: "Prefect Helm chart values", value: "prefect" }] : []
|
|
1908
|
+
]
|
|
1909
|
+
}
|
|
1910
|
+
]);
|
|
1911
|
+
}
|
|
1912
|
+
|
|
1913
|
+
// src/libs/k8s-job-waiter.ts
|
|
1914
|
+
var POLL_INTERVAL_SEC = 1;
|
|
1915
|
+
var K8sJobWaiter = class {
|
|
1916
|
+
constructor(monorepoEnv, timeoutInS) {
|
|
1917
|
+
this.monorepoEnv = monorepoEnv;
|
|
1918
|
+
this.namespace = envToNamespace(monorepoEnv);
|
|
1919
|
+
this.jobStatuses = {};
|
|
1920
|
+
this.timeoutInMs = timeoutInS * 1e3;
|
|
1921
|
+
}
|
|
1922
|
+
timeoutInMs;
|
|
1923
|
+
namespace;
|
|
1924
|
+
jobStatuses;
|
|
1925
|
+
async pollJob(job2) {
|
|
1926
|
+
const startTime = Date.now();
|
|
1927
|
+
while (true) {
|
|
1928
|
+
const res = await this.fetchStatus(job2);
|
|
1929
|
+
const elapsed = Date.now() - startTime;
|
|
1930
|
+
if (res.failure > 0) {
|
|
1931
|
+
const error = await this.fetchError(job2);
|
|
1932
|
+
this.jobStatuses[job2] = { status: "failure", elapsed, error };
|
|
1933
|
+
return;
|
|
1934
|
+
}
|
|
1935
|
+
if (res.success > 0) {
|
|
1936
|
+
this.jobStatuses[job2] = { status: "success", elapsed };
|
|
1937
|
+
return;
|
|
1938
|
+
}
|
|
1939
|
+
if (elapsed >= this.timeoutInMs) {
|
|
1940
|
+
this.jobStatuses[job2] = { status: "timeout", elapsed };
|
|
1941
|
+
return;
|
|
1942
|
+
}
|
|
1943
|
+
await new Promise((res2) => setTimeout(res2, POLL_INTERVAL_SEC * 1e3));
|
|
1944
|
+
}
|
|
1945
|
+
}
|
|
1946
|
+
async fetchStatus(job2) {
|
|
1947
|
+
const result = new CommandExecutor(
|
|
1948
|
+
kubectlCommand(
|
|
1949
|
+
`get job ${job2} -o jsonpath='{.status.failed},{.status.succeeded}'`,
|
|
1950
|
+
{ namespace: this.namespace }
|
|
1951
|
+
)
|
|
1952
|
+
).exec();
|
|
1953
|
+
const [failure, success] = result.split(",").map((x) => Number(x));
|
|
1954
|
+
return { failure, success };
|
|
1955
|
+
}
|
|
1956
|
+
async fetchError(job2) {
|
|
1957
|
+
try {
|
|
1958
|
+
const podName = new CommandExecutor(
|
|
1959
|
+
kubectlCommand(`get pod -l job-name=${job2} -o name`, {
|
|
1960
|
+
namespace: this.namespace
|
|
1961
|
+
})
|
|
1962
|
+
).exec();
|
|
1963
|
+
const logs = new CommandExecutor(
|
|
1964
|
+
kubectlCommand(`logs ${podName}`, { namespace: this.namespace })
|
|
1965
|
+
).exec();
|
|
1966
|
+
return logs;
|
|
1967
|
+
} catch (e) {
|
|
1968
|
+
console.log("Error fetching logs for job", { job: job2, e });
|
|
1969
|
+
return `<COULD NOT FETCH ERROR FOR ${job2}>`;
|
|
1970
|
+
}
|
|
1971
|
+
}
|
|
1972
|
+
};
|
|
1973
|
+
async function k8sJobWaiter(monorepoEnv, timeoutInS, jobs) {
|
|
1974
|
+
const waiter = new K8sJobWaiter(monorepoEnv, timeoutInS);
|
|
1975
|
+
await Promise.all(jobs.map((job2) => waiter.pollJob(job2)));
|
|
1976
|
+
return waiter.jobStatuses;
|
|
1977
|
+
}
|
|
1978
|
+
function printJobStatuses(statuses) {
|
|
1979
|
+
console.log("Statuses:");
|
|
1980
|
+
Object.entries(statuses).forEach(([job2, statusRecord]) => {
|
|
1981
|
+
console.log(
|
|
1982
|
+
`${job2}: ${statusRecord.status} ${Math.round(statusRecord.elapsed / 1e3 * 100) / 100}s`
|
|
1983
|
+
);
|
|
1984
|
+
});
|
|
1985
|
+
console.log();
|
|
1986
|
+
Object.entries(statuses).filter(([_2, statusRecord]) => statusRecord.status === "failure").forEach(([job2, statusRecord]) => {
|
|
1987
|
+
console.error(`Error for ${job2}:`);
|
|
1988
|
+
console.error(statusRecord.error);
|
|
1989
|
+
console.error();
|
|
1990
|
+
});
|
|
1991
|
+
const failures = Object.entries(statuses).filter(
|
|
1992
|
+
([_2, statusRecord]) => ["failure", "timeout"].includes(statusRecord.status)
|
|
1993
|
+
);
|
|
1994
|
+
if (failures.length > 0) {
|
|
1995
|
+
console.error();
|
|
1996
|
+
console.error(
|
|
1997
|
+
`Some jobs did not succeed: ${failures.map(([job2, _2]) => job2).join(", ")}`
|
|
1998
|
+
);
|
|
1999
|
+
console.error();
|
|
2000
|
+
process.exit(1);
|
|
2001
|
+
}
|
|
2002
|
+
Object.entries(statuses).forEach(([job2, statusRecord]) => {
|
|
2003
|
+
console.log(`${job2}: ${statusRecord.status}`);
|
|
2004
|
+
});
|
|
2005
|
+
}
|
|
2006
|
+
|
|
2007
|
+
// src/cli/core/job.ts
|
|
2008
|
+
var oneLiner11 = "Creates a k8s job and waits for it to run";
|
|
2009
|
+
var keyExamples11 = `
|
|
2010
|
+
$ devops job db-migrate gen main-node <sha> --env staging
|
|
2011
|
+
$ devops job db-migrate create main-node <sha> --env staging --timeout 120
|
|
2012
|
+
`.trim();
|
|
2013
|
+
var usage11 = `
|
|
2014
|
+
${oneLiner11}
|
|
2015
|
+
|
|
2016
|
+
GENERAL USAGE
|
|
2017
|
+
devops job db-migrate gen <image> <sha>
|
|
2018
|
+
devops job db-migrate create <image> <sha> --timeout <timeout>
|
|
2019
|
+
|
|
2020
|
+
EXAMPLES
|
|
2021
|
+
${keyExamples11}
|
|
2022
|
+
`;
|
|
2023
|
+
var handlers3 = {
|
|
2024
|
+
gen: (opts) => {
|
|
2025
|
+
console.log(
|
|
2026
|
+
generateDbMigrateJob(opts.required("env"), opts.required("image"), opts.required("sha"))
|
|
2027
|
+
);
|
|
2028
|
+
},
|
|
2029
|
+
create: async (opts) => {
|
|
2030
|
+
const env2 = opts.required("env");
|
|
2031
|
+
const image2 = opts.required("image");
|
|
2032
|
+
const sha = opts.required("sha");
|
|
2033
|
+
const timeout = opts.optional("timeout") ?? "240";
|
|
2034
|
+
const manifest = generateDbMigrateJob(env2, image2, sha);
|
|
2035
|
+
const jobName = dbMigrateJobName(sha);
|
|
2036
|
+
applyHandler(`apply-${jobName}`, "apply", manifest);
|
|
2037
|
+
const statuses = await k8sJobWaiter(env2, Number(timeout ?? "240"), [jobName]);
|
|
2038
|
+
if (statuses && Object.keys(statuses).length > 0) {
|
|
2039
|
+
printJobStatuses(statuses);
|
|
2040
|
+
}
|
|
2041
|
+
}
|
|
2042
|
+
};
|
|
2043
|
+
function run11(cmdObj) {
|
|
2044
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage11);
|
|
2045
|
+
const parsedArgs = cmdObj.parseOptions({
|
|
2046
|
+
params: ["--timeout"]
|
|
2047
|
+
});
|
|
2048
|
+
const [jobName, command, image2, sha] = parsedArgs.args;
|
|
2049
|
+
if (jobName !== "db-migrate") {
|
|
2050
|
+
console.error(`Unknown job: ${jobName}. Only db-migrate is supported at this time.`);
|
|
2051
|
+
process.exit(1);
|
|
2052
|
+
}
|
|
2053
|
+
const handler = handlers3[command];
|
|
2054
|
+
if (!handler) {
|
|
2055
|
+
console.error(`Unknown command: ${command}`);
|
|
2056
|
+
printUsageAndExit(usage11);
|
|
2057
|
+
}
|
|
2058
|
+
const timeout = parsedArgs.options["--timeout"];
|
|
2059
|
+
const params = new StrongParams(usage11, {
|
|
2060
|
+
env: cmdObj.env,
|
|
2061
|
+
image: image2,
|
|
2062
|
+
sha,
|
|
2063
|
+
timeout
|
|
2064
|
+
});
|
|
2065
|
+
handler(params);
|
|
2066
|
+
}
|
|
2067
|
+
var job = { oneLiner: oneLiner11, keyExamples: keyExamples11, run: run11 };
|
|
2068
|
+
|
|
2069
|
+
// src/libs/hetzner/reg-secret.ts
|
|
2070
|
+
function isApplicable() {
|
|
2071
|
+
const infra = getConst("infra");
|
|
2072
|
+
if (infra !== "hetzner") {
|
|
2073
|
+
console.warn(
|
|
2074
|
+
"Setting up registry permissions is only needed for Harbor in a Hetzner setup"
|
|
2075
|
+
);
|
|
2076
|
+
return false;
|
|
2077
|
+
}
|
|
2078
|
+
return true;
|
|
2079
|
+
}
|
|
2080
|
+
function copySecretHarborToNamespace(monorepoEnv) {
|
|
2081
|
+
if (!isApplicable()) return;
|
|
2082
|
+
const cmd = kubectlCommand("get secret harbor-registry-secret -o json", {
|
|
2083
|
+
monorepoEnv,
|
|
2084
|
+
namespace: "harbor"
|
|
2085
|
+
});
|
|
2086
|
+
const secretStr = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2087
|
+
const secretJson = JSON.parse(secretStr);
|
|
2088
|
+
const {
|
|
2089
|
+
apiVersion,
|
|
2090
|
+
data,
|
|
2091
|
+
kind,
|
|
2092
|
+
metadata: { name },
|
|
2093
|
+
type
|
|
2094
|
+
} = secretJson;
|
|
2095
|
+
const relevantParts = {
|
|
2096
|
+
apiVersion,
|
|
2097
|
+
data,
|
|
2098
|
+
kind,
|
|
2099
|
+
metadata: { name, namespace: envToNamespace(monorepoEnv) },
|
|
2100
|
+
type
|
|
2101
|
+
};
|
|
2102
|
+
const copyCmd = `echo '${JSON.stringify(relevantParts)}' | kubectl apply -f -`;
|
|
2103
|
+
new CommandExecutor(copyCmd, { quiet: true }).exec();
|
|
2104
|
+
}
|
|
2105
|
+
function patchServiceAccountImagePullSecret(monorepoEnv) {
|
|
2106
|
+
if (!isApplicable()) return;
|
|
2107
|
+
const cmd = kubectlCommand(
|
|
2108
|
+
`patch serviceaccount default -p '{"imagePullSecrets": [{"name": "harbor-registry-secret"}]}'`,
|
|
2109
|
+
{ monorepoEnv }
|
|
2110
|
+
);
|
|
2111
|
+
new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2112
|
+
}
|
|
2113
|
+
|
|
2114
|
+
// src/cli/core/namespace.ts
|
|
2115
|
+
var oneLiner12 = "Creates the basic prerequisites for a monorepo";
|
|
2116
|
+
var keyExamples12 = `
|
|
2117
|
+
$ devops namespace create --env staging
|
|
2118
|
+
$ devops namespace delete --env staging
|
|
2119
|
+
$ devops namespace check --env staging
|
|
2120
|
+
`.trim();
|
|
2121
|
+
var usage12 = `
|
|
2122
|
+
${oneLiner12}
|
|
2123
|
+
|
|
2124
|
+
GENERAL USAGE
|
|
2125
|
+
devops namespcae create|delete|check --env <env>
|
|
2126
|
+
|
|
2127
|
+
'create' does the following:
|
|
2128
|
+
1. Creates the namepace
|
|
2129
|
+
2. Creates a secret to hold environment variables (used by devops env) and the base cryptographic secret
|
|
2130
|
+
3. On Hetzner, copies the Harbor secret to the namespace and patches the default service account to use it
|
|
2131
|
+
|
|
2132
|
+
'delete' removes the namespace in kubernetes, which deletes all entities within it.
|
|
2133
|
+
|
|
2134
|
+
'check' returns exit code 0 if the namespace exists in kubernetes, 1 otherwise.
|
|
2135
|
+
|
|
2136
|
+
EXAMPLES
|
|
2137
|
+
${keyExamples12}
|
|
2138
|
+
`;
|
|
2139
|
+
var handlers4 = {
|
|
2140
|
+
create(opts) {
|
|
2141
|
+
const env2 = opts.required("env");
|
|
2142
|
+
createNamespace(env2);
|
|
2143
|
+
createEmptyEnvSecret(env2);
|
|
2144
|
+
patchBaseSecret(env2);
|
|
2145
|
+
copySecretHarborToNamespace(env2);
|
|
2146
|
+
patchServiceAccountImagePullSecret(env2);
|
|
2147
|
+
},
|
|
2148
|
+
delete(opts) {
|
|
2149
|
+
deleteNamespace(opts.required("env"));
|
|
2150
|
+
},
|
|
2151
|
+
check(opts) {
|
|
2152
|
+
const exists = checkEnvSetup(opts.required("env"));
|
|
2153
|
+
if (exists) return;
|
|
2154
|
+
console.error(`
|
|
2155
|
+
The environment does not exist in the cluster.
|
|
2156
|
+
In order to create resources for it in the cluster, it must be first set up. This is done to protect from unintentional resource creation.
|
|
2157
|
+
To set up the environment, run the following from your dev machine:
|
|
2158
|
+
$ devops namespace create --env <env>
|
|
2159
|
+
`);
|
|
2160
|
+
process.exit(1);
|
|
2161
|
+
}
|
|
2162
|
+
};
|
|
2163
|
+
function run12(cmdObj) {
|
|
2164
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage12);
|
|
2165
|
+
const [command] = cmdObj.args;
|
|
2166
|
+
const handler = handlers4[command];
|
|
2167
|
+
if (!handler) {
|
|
2168
|
+
console.error(`Unknown command: ${command}`);
|
|
2169
|
+
printUsageAndExit(usage12);
|
|
2170
|
+
}
|
|
2171
|
+
const params = new StrongParams(usage12, {
|
|
2172
|
+
env: cmdObj.env
|
|
2173
|
+
});
|
|
2174
|
+
handler(params);
|
|
2175
|
+
}
|
|
2176
|
+
var namespace = { oneLiner: oneLiner12, keyExamples: keyExamples12, run: run12 };
|
|
2177
|
+
|
|
2178
|
+
// src/cli/core/prep-build.ts
|
|
2179
|
+
import fs5 from "fs-extra";
|
|
2180
|
+
import os from "os";
|
|
2181
|
+
import path5 from "path";
|
|
2182
|
+
import chalk5 from "chalk";
|
|
2183
|
+
var oneLiner13 = "Copies all dependencies of an image to a temporary folder in preparation for a Docker build";
|
|
2184
|
+
var keyExamples13 = `
|
|
2185
|
+
$ devops prep-build main-node
|
|
2186
|
+
`.trim();
|
|
2187
|
+
var usage13 = `
|
|
2188
|
+
${oneLiner13}
|
|
2189
|
+
|
|
2190
|
+
USAGE
|
|
2191
|
+
devops prep-build <image> --env <env>
|
|
2192
|
+
|
|
2193
|
+
If <env> is a remote environment (e.g. staging, production), the environment variables are
|
|
2194
|
+
fetched from the cluster and injected in case they are needed during the build process.
|
|
2195
|
+
|
|
2196
|
+
EXAMPLES
|
|
2197
|
+
${keyExamples13}
|
|
2198
|
+
`;
|
|
2199
|
+
async function run13(cmdObj) {
|
|
2200
|
+
if (cmdObj.help || cmdObj.args.length === 0) printUsageAndExit(usage13);
|
|
2201
|
+
const [image2] = cmdObj.args;
|
|
2202
|
+
const imageData = getImageData(image2);
|
|
2203
|
+
const imageTemplate = imageData["image-template"];
|
|
2204
|
+
const dockerFile = `${imageTemplate}.Dockerfile`;
|
|
2205
|
+
const dockerFilePath = path5.join(".devops/docker-images", dockerFile);
|
|
2206
|
+
const dockerImagePayloadPath = path5.join(".devops/docker-images", imageTemplate);
|
|
2207
|
+
const dockerCommonPayloadPath = path5.join(".devops/docker-images", "common");
|
|
2208
|
+
const imageTemplateData = getTemplateData(imageTemplate);
|
|
2209
|
+
const copyCommon = imageTemplateData["copy-common"] ?? false;
|
|
2210
|
+
const imageExtraContent = imageTemplateData["extra-content"] ?? [];
|
|
2211
|
+
if (!fs5.existsSync(dockerFilePath)) {
|
|
2212
|
+
console.error(`The dockerfile ${dockerFilePath} does not exist`);
|
|
2213
|
+
process.exit(1);
|
|
2214
|
+
}
|
|
2215
|
+
imageExtraContent.forEach((file) => {
|
|
2216
|
+
if (!fs5.existsSync(file)) {
|
|
2217
|
+
console.error(`The file ${file} is specified in the extra-content section of ${image2} but does not exist`);
|
|
2218
|
+
process.exit(1);
|
|
2219
|
+
}
|
|
2220
|
+
});
|
|
2221
|
+
const destFolder = `${os.tmpdir()}/image-${image2}-${Date.now()}`;
|
|
2222
|
+
console.warn(`Creating build in ${destFolder}`);
|
|
2223
|
+
fs5.mkdirSync(destFolder);
|
|
2224
|
+
console.warn(`COPYING Dockerfile`);
|
|
2225
|
+
fs5.copySync(dockerFilePath, path5.join(destFolder, "Dockerfile"));
|
|
2226
|
+
if (copyCommon) {
|
|
2227
|
+
console.warn(`COPYING Docker common`);
|
|
2228
|
+
fs5.copySync(dockerCommonPayloadPath, destFolder);
|
|
2229
|
+
}
|
|
2230
|
+
if (fs5.existsSync(dockerImagePayloadPath)) {
|
|
2231
|
+
console.warn(`COPYING Docker image payload`);
|
|
2232
|
+
fs5.copySync(dockerImagePayloadPath, destFolder);
|
|
2233
|
+
}
|
|
2234
|
+
console.warn(`COPYING .devops/config`);
|
|
2235
|
+
fs5.mkdirSync(path5.join(destFolder, ".devops"));
|
|
2236
|
+
fs5.copySync(".devops/config", path5.join(destFolder, ".devops/config"));
|
|
2237
|
+
console.warn(`CREATING config for the build process`);
|
|
2238
|
+
fs5.mkdirSync(path5.join(destFolder, "config"));
|
|
2239
|
+
const destGlobalEnvPath = path5.join(destFolder, "config/.env.global");
|
|
2240
|
+
if (isLocalOrRemoteEnv(cmdObj.env) === "remote") {
|
|
2241
|
+
const envFileData = getMonorepoSecretStr(cmdObj.env);
|
|
2242
|
+
fs5.writeFileSync(destGlobalEnvPath, envFileData);
|
|
2243
|
+
} else {
|
|
2244
|
+
let anyCopied = false;
|
|
2245
|
+
const localGlobalEnvPath = "config/.env.global";
|
|
2246
|
+
const localEnvPath = `config/.env.${cmdObj.env}`;
|
|
2247
|
+
const destEnvPath = path5.join(destFolder, `config/.env.${cmdObj.env}`);
|
|
2248
|
+
if (fs5.existsSync(localGlobalEnvPath)) {
|
|
2249
|
+
console.warn(`COPYING ${localGlobalEnvPath} to ${destGlobalEnvPath}`);
|
|
2250
|
+
fs5.copyFileSync(localGlobalEnvPath, destGlobalEnvPath);
|
|
2251
|
+
anyCopied = true;
|
|
2252
|
+
}
|
|
2253
|
+
if (fs5.existsSync(localEnvPath)) {
|
|
2254
|
+
console.warn(`COPYING ${localEnvPath} to ${destEnvPath}`);
|
|
2255
|
+
fs5.copyFileSync(localEnvPath, destEnvPath);
|
|
2256
|
+
anyCopied = true;
|
|
2257
|
+
}
|
|
2258
|
+
if (!anyCopied) {
|
|
2259
|
+
console.warn(chalk5.red(`
|
|
2260
|
+
Warning: local environment ${cmdObj.env} has no .env files. Environment variables will not be injected.
|
|
2261
|
+
`));
|
|
2262
|
+
}
|
|
2263
|
+
}
|
|
2264
|
+
getImageDescendentData(image2).forEach((project) => {
|
|
2265
|
+
console.warn(`COPYING ${project.rootPath}`);
|
|
2266
|
+
if (project.rootPath === ".devops") return;
|
|
2267
|
+
fs5.copySync(project.rootPath, path5.join(destFolder, project.rootPath));
|
|
2268
|
+
});
|
|
2269
|
+
console.warn(`COPYING files from image-extra-content`);
|
|
2270
|
+
imageExtraContent.forEach((file) => {
|
|
2271
|
+
fs5.copySync(file, path5.join(destFolder, file));
|
|
2272
|
+
console.warn(` ${file}`);
|
|
2273
|
+
});
|
|
2274
|
+
console.log(destFolder);
|
|
2275
|
+
}
|
|
2276
|
+
var prepBuild = { command: "prep-build", oneLiner: oneLiner13, keyExamples: keyExamples13, run: run13 };
|
|
2277
|
+
|
|
2278
|
+
// src/libs/digital-ocean/container-reg.ts
|
|
2279
|
+
import { z } from "zod";
|
|
2280
|
+
var repoTagMetadataSchema = z.object({
|
|
2281
|
+
// What we rely on
|
|
2282
|
+
tag: z.string().optional(),
|
|
2283
|
+
updated_at: z.string(),
|
|
2284
|
+
manifest_digest: z.string()
|
|
2285
|
+
// Other fields that existed in the output
|
|
2286
|
+
// registry_name: z.string().optional(),
|
|
2287
|
+
// repository: z.string().optional(),
|
|
2288
|
+
// compressed_size_bytes: z.number().optional(),
|
|
2289
|
+
// size_bytes: z.number().optional(),
|
|
2290
|
+
});
|
|
2291
|
+
var repoTagMetadataSchemaOutput = z.array(repoTagMetadataSchema);
|
|
2292
|
+
function getRepoTagMetadata(repoName) {
|
|
2293
|
+
const cmd = `doctl registry repository list-tags ${repoName} -o json`;
|
|
2294
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2295
|
+
if (!res) return [];
|
|
2296
|
+
try {
|
|
2297
|
+
const parsed = JSON.parse(res);
|
|
2298
|
+
const parseRes = repoTagMetadataSchemaOutput.safeParse(parsed);
|
|
2299
|
+
if (parseRes.error) {
|
|
2300
|
+
console.error(
|
|
2301
|
+
`Error schema-parsing output from "${cmd}": ${parseRes.error.toString()}`
|
|
2302
|
+
);
|
|
2303
|
+
console.error(">>> Command output");
|
|
2304
|
+
console.error(res);
|
|
2305
|
+
process.exit(1);
|
|
2306
|
+
}
|
|
2307
|
+
return parseRes.data.filter((data) => data.tag).sort(
|
|
2308
|
+
(a, b) => new Date(b.updated_at).getTime() - new Date(a.updated_at).getTime()
|
|
2309
|
+
);
|
|
2310
|
+
} catch (e) {
|
|
2311
|
+
console.error(`Error JSON-parsing output from "${cmd}": ${res}`);
|
|
2312
|
+
process.exit(1);
|
|
2313
|
+
}
|
|
2314
|
+
}
|
|
2315
|
+
function deleteRepoTag(repoName, tag) {
|
|
2316
|
+
const cmd = `doctl registry repository delete-tag ${repoName} ${tag} --force`;
|
|
2317
|
+
new CommandExecutor(cmd).exec();
|
|
2318
|
+
}
|
|
2319
|
+
function stargGarbageCollection(registryName) {
|
|
2320
|
+
const cmd = `doctl registry garbage-collection start --include-untagged-manifests ${registryName} --force`;
|
|
2321
|
+
new CommandExecutor(cmd).exec();
|
|
2322
|
+
}
|
|
2323
|
+
function prune(registryFullName, repoName, image2) {
|
|
2324
|
+
const infra = getConst("infra");
|
|
2325
|
+
if (infra !== "digitalocean") {
|
|
2326
|
+
console.warn(
|
|
2327
|
+
"Pruning is only supported for the DigitalOcean container registry"
|
|
2328
|
+
);
|
|
2329
|
+
return;
|
|
2330
|
+
}
|
|
2331
|
+
const imageData = getImageData(image2);
|
|
2332
|
+
if (imageData["cloudrun"]) {
|
|
2333
|
+
console.warn(
|
|
2334
|
+
"Pruning is skipped for cloudrun images"
|
|
2335
|
+
);
|
|
2336
|
+
return;
|
|
2337
|
+
}
|
|
2338
|
+
const tags = getRepoTagMetadata(repoName);
|
|
2339
|
+
const versionsToKeep = Number(getConst("image-versions-to-keep"));
|
|
2340
|
+
if (!tags.length || tags.length <= versionsToKeep) return;
|
|
2341
|
+
const tagsToDelete = tags.slice(versionsToKeep);
|
|
2342
|
+
tagsToDelete.forEach((tag) => {
|
|
2343
|
+
deleteRepoTag(repoName, tag.tag);
|
|
2344
|
+
});
|
|
2345
|
+
const registryName = registryFullName.split("/").slice(-1)[0];
|
|
2346
|
+
stargGarbageCollection(registryName);
|
|
2347
|
+
}
|
|
2348
|
+
|
|
2349
|
+
// src/cli/core/registry.ts
|
|
2350
|
+
var oneLiner14 = "Manage container repositories";
|
|
2351
|
+
var keyExamples14 = `
|
|
2352
|
+
$ devops registry server-url
|
|
2353
|
+
$ devops registry reg-url
|
|
2354
|
+
$ devops registry repo-url my-image sha
|
|
2355
|
+
$ devops registry image-name my-image
|
|
2356
|
+
$ devops registry prune my-image
|
|
2357
|
+
`.trim();
|
|
2358
|
+
var usage14 = `
|
|
2359
|
+
${oneLiner14}
|
|
2360
|
+
|
|
2361
|
+
USAGE
|
|
2362
|
+
Get base URLs for the container registry of the cluster:
|
|
2363
|
+
devops registry server-url
|
|
2364
|
+
devops registry reg-url
|
|
2365
|
+
|
|
2366
|
+
Note: for cloudrun images these URLs are not relevant.
|
|
2367
|
+
|
|
2368
|
+
Gets the URL of an image in the container registry:
|
|
2369
|
+
devops registry repo-url <image> <sha> --env <env>
|
|
2370
|
+
|
|
2371
|
+
Gets the image name in the container registry:
|
|
2372
|
+
devops registry image-name <image> --env <env>
|
|
2373
|
+
|
|
2374
|
+
Prunes the repository of old images to enforce the "image-versions-to-keep" constant in config/constants.yaml:
|
|
2375
|
+
devops registry prune <image> --env <env>
|
|
2376
|
+
|
|
2377
|
+
This is only relevant when the "infra" constant is set to "digitalocean".
|
|
2378
|
+
|
|
2379
|
+
EXAMPLES
|
|
2380
|
+
${keyExamples14}
|
|
2381
|
+
`;
|
|
2382
|
+
var handlers5 = {
|
|
2383
|
+
"server-url": () => console.log(getConst("registry-base-url")),
|
|
2384
|
+
"reg-url": () => console.log(containerRegistryPath()),
|
|
2385
|
+
"repo-url": (opts) => {
|
|
2386
|
+
console.log(
|
|
2387
|
+
containerRegistryRepoPath(
|
|
2388
|
+
opts.required("image"),
|
|
2389
|
+
opts.required("env"),
|
|
2390
|
+
opts.required("sha")
|
|
2391
|
+
)
|
|
2392
|
+
);
|
|
2393
|
+
},
|
|
2394
|
+
"image-name": (opts) => {
|
|
2395
|
+
console.log(containerRegistryImageName(opts.required("image"), opts.required("env")));
|
|
2396
|
+
},
|
|
2397
|
+
prune: (opts) => {
|
|
2398
|
+
const regName = containerRegistryPath();
|
|
2399
|
+
const image2 = opts.required("image");
|
|
2400
|
+
const repoName = containerRegistryImageName(
|
|
2401
|
+
image2,
|
|
2402
|
+
opts.required("env")
|
|
2403
|
+
);
|
|
2404
|
+
prune(regName, repoName, image2);
|
|
2405
|
+
}
|
|
2406
|
+
};
|
|
2407
|
+
function run14(cmdObj) {
|
|
2408
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage14);
|
|
2409
|
+
const [command, image2, sha] = cmdObj.args;
|
|
2410
|
+
const handler = handlers5[command];
|
|
2411
|
+
if (!handler) {
|
|
2412
|
+
console.error(`Unknown command: ${command}`);
|
|
2413
|
+
printUsageAndExit(usage14);
|
|
2414
|
+
}
|
|
2415
|
+
const params = new StrongParams(usage14, { image: image2, env: cmdObj.env, sha });
|
|
2416
|
+
handler(params);
|
|
2417
|
+
}
|
|
2418
|
+
var registry = { oneLiner: oneLiner14, keyExamples: keyExamples14, run: run14 };
|
|
2419
|
+
|
|
2420
|
+
// src/cli/core/test.ts
|
|
2421
|
+
var oneLiner15 = "Runs tests in all projects or one specific project";
|
|
2422
|
+
var keyExamples15 = `
|
|
2423
|
+
$ devops test
|
|
2424
|
+
$ devops test project
|
|
2425
|
+
`;
|
|
2426
|
+
var usage15 = `
|
|
2427
|
+
${oneLiner15}
|
|
2428
|
+
|
|
2429
|
+
USAGE
|
|
2430
|
+
${keyExamples15}
|
|
2431
|
+
`;
|
|
2432
|
+
function run15(cmdObj) {
|
|
2433
|
+
const options = cmdObj.parseOptions({ params: ["--in"] });
|
|
2434
|
+
if (cmdObj.help || options.args.length > 1) printUsageAndExit(usage15);
|
|
2435
|
+
const workspace = options.args[0];
|
|
2436
|
+
const env2 = cmdObj.envForced ? cmdObj.env : "test";
|
|
2437
|
+
if (workspace) {
|
|
2438
|
+
new CommandExecutor(`devops run ${workspace}:test --env ${env2}`).spawn();
|
|
2439
|
+
} else {
|
|
2440
|
+
new CommandExecutor(`devops run-many test --env ${env2}`).spawn();
|
|
2441
|
+
}
|
|
2442
|
+
}
|
|
2443
|
+
var test = { oneLiner: oneLiner15, keyExamples: keyExamples15, run: run15 };
|
|
2444
|
+
|
|
2445
|
+
// src/cli/extensions/index.ts
|
|
2446
|
+
var extensions_exports = {};
|
|
2447
|
+
__export(extensions_exports, {
|
|
2448
|
+
dml: () => dml,
|
|
2449
|
+
internalCurl: () => internalCurl,
|
|
2450
|
+
jwt: () => jwt,
|
|
2451
|
+
prisma: () => prisma,
|
|
2452
|
+
redisBitnami: () => redisBitnami,
|
|
2453
|
+
redisHa: () => redisHa,
|
|
2454
|
+
stackgres: () => stackgres,
|
|
2455
|
+
template: () => template
|
|
2456
|
+
});
|
|
2457
|
+
|
|
2458
|
+
// src/cli/extensions/dml.ts
|
|
2459
|
+
import fs6 from "fs";
|
|
2460
|
+
var oneLiner16 = "Utilities to manage and run DML scripts in the db project";
|
|
2461
|
+
var keyExamples16 = `
|
|
2462
|
+
$ devops dml create --name my-dml-name
|
|
2463
|
+
$ devops dml run 20250113153318_my_dml_name
|
|
2464
|
+
`;
|
|
2465
|
+
var usage16 = `
|
|
2466
|
+
${oneLiner16}
|
|
2467
|
+
|
|
2468
|
+
CREATE DML SCRIPTS
|
|
2469
|
+
devops dml create --name <dml-semantic-name>
|
|
2470
|
+
|
|
2471
|
+
This command creates a new folder under /dml using the current timestamp and a
|
|
2472
|
+
snake-case version of the name. Inside the folder, a file called migrate.ts is created.
|
|
2473
|
+
You should write your DML script in this file.
|
|
2474
|
+
|
|
2475
|
+
You can add additional artifacts to the folder, such as a README.md file, sql files, json
|
|
2476
|
+
files, csv files, etc. You can also add optional scripts, such as rollback.ts.
|
|
2477
|
+
|
|
2478
|
+
RUN DML SCRIPTS
|
|
2479
|
+
devops dml run <dml-folder-name> [script-file-name] [-- arg1 arg2 ...]
|
|
2480
|
+
|
|
2481
|
+
The dml-folder-name must be the full name, including the timestamp. This follows prisma
|
|
2482
|
+
conventions.
|
|
2483
|
+
If the optional script-file-name is omitted, 'migrate' is used by default. The name should
|
|
2484
|
+
not include the '.ts' suffix.
|
|
2485
|
+
Optionally, args can be passed to the script as command line arguments after double
|
|
2486
|
+
dash (--).
|
|
2487
|
+
The runner first changes the working directory to 'dml/', then executes the script using
|
|
2488
|
+
'bunx tsx'.
|
|
2489
|
+
|
|
2490
|
+
Note: DML scripts are typically run inside the debug container of the image.
|
|
2491
|
+
|
|
2492
|
+
EXAMPLES
|
|
2493
|
+
${keyExamples16.trim()}
|
|
2494
|
+
$ devops dml run 20250113153318_my_dml_name rollback
|
|
2495
|
+
$ devops dml run 20250113153318_my_dml_name -- staging
|
|
2496
|
+
`;
|
|
2497
|
+
var dmlFileTemplate = `
|
|
2498
|
+
/**
|
|
2499
|
+
* Header code that retrieves the context of the DML script.
|
|
2500
|
+
* Feel free to modify this code to suit your needs.
|
|
2501
|
+
*
|
|
2502
|
+
* fullDmlFilePath - path to the current file
|
|
2503
|
+
* fullDmlDirPath - path to the current directory
|
|
2504
|
+
* dmlFile - name of the current DML script file
|
|
2505
|
+
* dmlDir - name of the directory containing the DML scripts
|
|
2506
|
+
* args - command line arguments passed to the script
|
|
2507
|
+
*
|
|
2508
|
+
* Notes:
|
|
2509
|
+
* - the script runs with the cwd set to the dml/ directory
|
|
2510
|
+
* - remove unused variables from this template, otherwise the linter will complain
|
|
2511
|
+
*/
|
|
2512
|
+
|
|
2513
|
+
import { prisma } from 'db';
|
|
2514
|
+
import { basename, dirname, sep } from 'path';
|
|
2515
|
+
import { fileURLToPath } from 'url';
|
|
2516
|
+
|
|
2517
|
+
const fullDmlFilePath = fileURLToPath(import.meta.url);
|
|
2518
|
+
const fullDmlDirPath = dirname(fullDmlFilePath);
|
|
2519
|
+
const dmlFile = basename(fullDmlFilePath);
|
|
2520
|
+
const dmlDir = fullDmlDirPath.split(sep).pop();
|
|
2521
|
+
|
|
2522
|
+
const args = process.argv.slice(2);
|
|
2523
|
+
`.trim();
|
|
2524
|
+
function createDml(name) {
|
|
2525
|
+
const timestamp = (/* @__PURE__ */ new Date()).toISOString().replace(/[^0-9]/g, "").slice(0, 14);
|
|
2526
|
+
const nameSnake = name.toLowerCase().replace(/[^a-z0-9]/g, "_");
|
|
2527
|
+
const folderName = `${timestamp}_${nameSnake}`;
|
|
2528
|
+
fs6.mkdirSync(`dml/${folderName}`);
|
|
2529
|
+
fs6.writeFileSync(`dml/${folderName}/migrate.ts`, dmlFileTemplate);
|
|
2530
|
+
console.log(`
|
|
2531
|
+
Created DML folder: dml/${folderName}
|
|
2532
|
+
`);
|
|
2533
|
+
}
|
|
2534
|
+
function runDml(cmdObj, folderName, scriptFileName, args) {
|
|
2535
|
+
scriptFileName ??= "migrate";
|
|
2536
|
+
args ??= [];
|
|
2537
|
+
const script = scriptFileName.endsWith(".ts") ? scriptFileName : `${scriptFileName}.ts`;
|
|
2538
|
+
cmdObj.executorFromEnv(
|
|
2539
|
+
// prettier-ignore
|
|
2540
|
+
`devops exec --in dml bun ${folderName}/${script} ${args.join(" ")}`
|
|
2541
|
+
).exec();
|
|
2542
|
+
}
|
|
2543
|
+
function run16(cmdObj) {
|
|
2544
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage16);
|
|
2545
|
+
const parsed = cmdObj.parseOptions({
|
|
2546
|
+
passthroughArgs: true,
|
|
2547
|
+
params: ["--name"]
|
|
2548
|
+
});
|
|
2549
|
+
switch (parsed.args[0]) {
|
|
2550
|
+
case "create": {
|
|
2551
|
+
const name = parsed.options["--name"];
|
|
2552
|
+
if (!name) printUsageAndExit(usage16);
|
|
2553
|
+
return createDml(name);
|
|
2554
|
+
}
|
|
2555
|
+
case "run": {
|
|
2556
|
+
const [_2, folderName, scriptFileName] = parsed.args;
|
|
2557
|
+
if (!folderName) printUsageAndExit(usage16);
|
|
2558
|
+
return runDml(cmdObj, folderName, scriptFileName, parsed.passthrough);
|
|
2559
|
+
}
|
|
2560
|
+
default:
|
|
2561
|
+
printUsageAndExit(usage16);
|
|
2562
|
+
}
|
|
2563
|
+
}
|
|
2564
|
+
var dml = { oneLiner: oneLiner16, keyExamples: keyExamples16, run: run16 };
|
|
2565
|
+
|
|
2566
|
+
// src/cli/extensions/internal-curl.ts
|
|
2567
|
+
var oneLiner17 = "Runs a curl command bearing an internal jwt-like token to allow verifying internal communication within the namespace";
|
|
2568
|
+
var keyExamples17 = `$ devops internal-curl jobs http://service-name:port/path`;
|
|
2569
|
+
var usage17 = `
|
|
2570
|
+
${oneLiner17}
|
|
2571
|
+
|
|
2572
|
+
GENERAL USAGE
|
|
2573
|
+
devops internal-curl <subject> [curl-options] <url>
|
|
2574
|
+
|
|
2575
|
+
NOTE
|
|
2576
|
+
--env should not be used with this command. It is expected to be run inside pods in the namespace.
|
|
2577
|
+
Relies on the MONOREPO_BASE_SECRET environment variable for signing the token.
|
|
2578
|
+
|
|
2579
|
+
'subject' is the subject of the token. Receiving endpoints should verify it matches the expected value using the
|
|
2580
|
+
'InternalToken' class exposed by @vaharoni/devops.
|
|
2581
|
+
|
|
2582
|
+
EXAMPLES
|
|
2583
|
+
${keyExamples17}
|
|
2584
|
+
`;
|
|
2585
|
+
async function run17(cmdObj) {
|
|
2586
|
+
if (cmdObj.help || cmdObj.args.length === 0 || cmdObj.envForced) printUsageAndExit(usage17);
|
|
2587
|
+
const [subject, ...rest] = cmdObj.args;
|
|
2588
|
+
let token;
|
|
2589
|
+
if (process.env.MONOREPO_BASE_SECRET) {
|
|
2590
|
+
token = new InternalToken(subject).generate();
|
|
2591
|
+
} else {
|
|
2592
|
+
const res = cmdObj.executorFromEnv(`devops jwt ${subject}`).exec({ asObject: true });
|
|
2593
|
+
if (res.statusCode !== 0) {
|
|
2594
|
+
console.error("Failed to generate token. Aborting.");
|
|
2595
|
+
process.exit(1);
|
|
2596
|
+
}
|
|
2597
|
+
token = res.stdout.trim();
|
|
2598
|
+
}
|
|
2599
|
+
cmdObj.executorFromEnv(`curl -H "Authorization: Bearer ${token}" ${rest.join(" ")}`).exec();
|
|
2600
|
+
}
|
|
2601
|
+
var internalCurl = { command: "internal-curl", oneLiner: oneLiner17, keyExamples: keyExamples17, run: run17 };
|
|
2602
|
+
|
|
2603
|
+
// src/cli/extensions/jwt.ts
|
|
2604
|
+
var oneLiner18 = "Returns a JWT-like token to allow verifying internal communication within the namespace";
|
|
2605
|
+
var keyExamples18 = `$ devops jwt jobs`;
|
|
2606
|
+
var usage18 = `
|
|
2607
|
+
${oneLiner18}
|
|
2608
|
+
|
|
2609
|
+
GENERAL USAGE
|
|
2610
|
+
devops jwt <subject>
|
|
2611
|
+
|
|
2612
|
+
NOTE
|
|
2613
|
+
The token is valid for 60 seconds and bears the specified subject.
|
|
2614
|
+
--env should not be used with this command. It is expected to be run inside pods in the namespace.
|
|
2615
|
+
Relies on the MONOREPO_BASE_SECRET environment variable for signing the token.
|
|
2616
|
+
|
|
2617
|
+
EXAMPLES
|
|
2618
|
+
${keyExamples18}
|
|
2619
|
+
`;
|
|
2620
|
+
async function run18(cmdObj) {
|
|
2621
|
+
if (cmdObj.help || cmdObj.args.length === 0 || cmdObj.envForced) printUsageAndExit(usage18);
|
|
2622
|
+
const subject = cmdObj.args[0];
|
|
2623
|
+
console.log(new InternalToken(subject).generate());
|
|
2624
|
+
}
|
|
2625
|
+
var jwt = { oneLiner: oneLiner18, keyExamples: keyExamples18, run: run18 };
|
|
2626
|
+
|
|
2627
|
+
// src/cli/extensions/prisma.ts
|
|
2628
|
+
import path6 from "path";
|
|
2629
|
+
var execShPath3 = path6.join(pkgRoot, "cli/exec.sh");
|
|
2630
|
+
var oneLiner19 = "Runs prisma commands in the db project after injecting the environment variables";
|
|
2631
|
+
var keyExamples19 = `$ devops prisma migrate dev`;
|
|
2632
|
+
var usage19 = `
|
|
2633
|
+
${oneLiner19}
|
|
2634
|
+
|
|
2635
|
+
GENERAL USAGE
|
|
2636
|
+
devops prisma <command>
|
|
2637
|
+
|
|
2638
|
+
<command> can be any command you normally set prisma for.
|
|
2639
|
+
|
|
2640
|
+
EXAMPLES
|
|
2641
|
+
${keyExamples19}
|
|
2642
|
+
`;
|
|
2643
|
+
async function run19(cmdObj) {
|
|
2644
|
+
cmdObj.executorFromEnv(
|
|
2645
|
+
`${execShPath3} db bunx prisma ${cmdObj.args.join(" ")}`,
|
|
2646
|
+
{ checkEnvYaml: false }
|
|
2647
|
+
).spawn();
|
|
2648
|
+
}
|
|
2649
|
+
var prisma = { oneLiner: oneLiner19, keyExamples: keyExamples19, run: run19 };
|
|
2650
|
+
|
|
2651
|
+
// src/cli/extensions/redis-bitnami.ts
|
|
2652
|
+
import chalk6 from "chalk";
|
|
2653
|
+
|
|
2654
|
+
// src/libs/k8s-redis-bitnami.ts
|
|
2655
|
+
function getRedisList() {
|
|
2656
|
+
const cmd = kubectlCommand(`get pods -l app.kubernetes.io/name=redis -A`);
|
|
2657
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2658
|
+
if (!res) return null;
|
|
2659
|
+
return res;
|
|
2660
|
+
}
|
|
2661
|
+
function getRedisPassword(namespace2) {
|
|
2662
|
+
const cmd = kubectlCommand(`get secrets/${namespace2} -o jsonpath="{.data}"`, {
|
|
2663
|
+
namespace: namespace2
|
|
2664
|
+
});
|
|
2665
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2666
|
+
if (!res) return null;
|
|
2667
|
+
try {
|
|
2668
|
+
const resJson = JSON.parse(res);
|
|
2669
|
+
const password = atob(resJson["redis-password"]);
|
|
2670
|
+
return { password };
|
|
2671
|
+
} catch {
|
|
2672
|
+
return null;
|
|
2673
|
+
}
|
|
2674
|
+
}
|
|
2675
|
+
function establishRedisTunnel(namespace2, port) {
|
|
2676
|
+
const cmd = kubectlCommand(`port-forward svc/${namespace2}-master ${port}:6379`, {
|
|
2677
|
+
namespace: namespace2
|
|
2678
|
+
});
|
|
2679
|
+
new CommandExecutor(cmd).spawn();
|
|
2680
|
+
}
|
|
2681
|
+
|
|
2682
|
+
// src/cli/extensions/redis-bitnami.ts
|
|
2683
|
+
var oneLiner20 = "Utilities to help accessing production and staging redis installation from Bitnami";
|
|
2684
|
+
var keyExamples20 = `
|
|
2685
|
+
$ devops redis list
|
|
2686
|
+
$ devops redis password redis-staging
|
|
2687
|
+
$ devops redis tunnel redis-staging
|
|
2688
|
+
`.trim();
|
|
2689
|
+
var usage20 = `
|
|
2690
|
+
${oneLiner20}
|
|
2691
|
+
|
|
2692
|
+
COMMANDS
|
|
2693
|
+
list Lists the available redis installations
|
|
2694
|
+
password <namespace> Shows the password for the Redis instance
|
|
2695
|
+
tunnel <namespace> [-p <port>] Sets up a tunnel to the remote Redis instance so you can access the DB from your local machine on port 9379 by default
|
|
2696
|
+
|
|
2697
|
+
NOTES
|
|
2698
|
+
This command assumes the namespace and the helm release name are the same.
|
|
2699
|
+
The --env flag should not be used with these commands, as the Redis namespaces follow different conventions than the monorepo env.
|
|
2700
|
+
|
|
2701
|
+
EXAMPLES
|
|
2702
|
+
${keyExamples20}
|
|
2703
|
+
`;
|
|
2704
|
+
var handlers6 = {
|
|
2705
|
+
list: () => {
|
|
2706
|
+
const res = getRedisList();
|
|
2707
|
+
console.log(res);
|
|
2708
|
+
},
|
|
2709
|
+
password: (opts) => {
|
|
2710
|
+
const namespace2 = opts.required("namespace");
|
|
2711
|
+
const res = getRedisPassword(namespace2);
|
|
2712
|
+
if (!res) {
|
|
2713
|
+
console.error("Failed to get the secret");
|
|
2714
|
+
process.exit(1);
|
|
2715
|
+
} else {
|
|
2716
|
+
console.log();
|
|
2717
|
+
console.log(res.password);
|
|
2718
|
+
console.log();
|
|
2719
|
+
}
|
|
2720
|
+
},
|
|
2721
|
+
tunnel: (opts) => {
|
|
2722
|
+
const namespace2 = opts.required("namespace");
|
|
2723
|
+
const port = opts.optional("port") ?? "9379";
|
|
2724
|
+
const res = getRedisPassword(namespace2);
|
|
2725
|
+
console.log(
|
|
2726
|
+
chalk6.blue("\nAfter the tunnel is established, connect to Redis by running:\n ") + chalk6.green.bold(`redis-cli -p ${port} --askpass`)
|
|
2727
|
+
);
|
|
2728
|
+
if (res) {
|
|
2729
|
+
console.log(chalk6.blue(" Password: ") + chalk6.green.bold(res.password));
|
|
2730
|
+
console.log();
|
|
2731
|
+
}
|
|
2732
|
+
establishRedisTunnel(namespace2, port);
|
|
2733
|
+
}
|
|
2734
|
+
};
|
|
2735
|
+
function run20(cmdObj) {
|
|
2736
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage20);
|
|
2737
|
+
const parsed = cmdObj.parseOptions({ params: ["-p"] });
|
|
2738
|
+
const [command, namespace2] = parsed.args;
|
|
2739
|
+
const port = parsed.options["-p"];
|
|
2740
|
+
const handler = handlers6[command];
|
|
2741
|
+
if (!handler) {
|
|
2742
|
+
console.error(`Unknown command: ${command}`);
|
|
2743
|
+
printUsageAndExit(usage20);
|
|
2744
|
+
}
|
|
2745
|
+
const params = new StrongParams(usage20, {
|
|
2746
|
+
namespace: namespace2,
|
|
2747
|
+
port
|
|
2748
|
+
});
|
|
2749
|
+
handler(params);
|
|
2750
|
+
}
|
|
2751
|
+
var redisBitnami = { name: "redis-bitnami", command: "redis", oneLiner: oneLiner20, keyExamples: keyExamples20, run: run20 };
|
|
2752
|
+
|
|
2753
|
+
// src/libs/k8s-redis-ha.ts
|
|
2754
|
+
function getRedisList2() {
|
|
2755
|
+
const cmd = kubectlCommand(`get pods -l app=redis-ha -A`);
|
|
2756
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2757
|
+
if (!res) return null;
|
|
2758
|
+
return res;
|
|
2759
|
+
}
|
|
2760
|
+
function establishRedisTunnel2(namespace2, port) {
|
|
2761
|
+
const cmd = kubectlCommand(`port-forward svc/${namespace2}-redis-ha ${port}:6379`, {
|
|
2762
|
+
namespace: namespace2
|
|
2763
|
+
});
|
|
2764
|
+
new CommandExecutor(cmd).spawn();
|
|
2765
|
+
}
|
|
2766
|
+
|
|
2767
|
+
// src/cli/extensions/redis-ha.ts
|
|
2768
|
+
var oneLiner21 = "Utilities to help accessing production and staging redis installation from redis-ha";
|
|
2769
|
+
var keyExamples21 = `
|
|
2770
|
+
$ devops redis list
|
|
2771
|
+
$ devops redis tunnel redis-staging
|
|
2772
|
+
`.trim();
|
|
2773
|
+
var usage21 = `
|
|
2774
|
+
${oneLiner21}
|
|
2775
|
+
|
|
2776
|
+
COMMANDS
|
|
2777
|
+
list Lists the available redis installations
|
|
2778
|
+
tunnel <namespace> [-p <port>] Sets up a tunnel to the remote Redis instance so you can access the DB from your local machine on port 9379 by default
|
|
2779
|
+
|
|
2780
|
+
NOTES
|
|
2781
|
+
This command assumes the namespace and the helm release name are the same.
|
|
2782
|
+
The --env flag should not be used with these commands, as the Redis namespaces follow different conventions than the monorepo env.
|
|
2783
|
+
|
|
2784
|
+
EXAMPLES
|
|
2785
|
+
${keyExamples21}
|
|
2786
|
+
`;
|
|
2787
|
+
var handlers7 = {
|
|
2788
|
+
list: () => {
|
|
2789
|
+
const res = getRedisList2();
|
|
2790
|
+
console.log(res);
|
|
2791
|
+
},
|
|
2792
|
+
tunnel: (opts) => {
|
|
2793
|
+
const namespace2 = opts.required("namespace");
|
|
2794
|
+
const port = opts.optional("port") ?? "9379";
|
|
2795
|
+
establishRedisTunnel2(namespace2, port);
|
|
2796
|
+
}
|
|
2797
|
+
};
|
|
2798
|
+
function run21(cmdObj) {
|
|
2799
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage21);
|
|
2800
|
+
const parsed = cmdObj.parseOptions({ params: ["-p"] });
|
|
2801
|
+
const [command, namespace2] = parsed.args;
|
|
2802
|
+
const port = parsed.options["-p"];
|
|
2803
|
+
const handler = handlers7[command];
|
|
2804
|
+
if (!handler) {
|
|
2805
|
+
console.error(`Unknown command: ${command}`);
|
|
2806
|
+
printUsageAndExit(usage21);
|
|
2807
|
+
}
|
|
2808
|
+
const params = new StrongParams(usage21, {
|
|
2809
|
+
namespace: namespace2,
|
|
2810
|
+
port
|
|
2811
|
+
});
|
|
2812
|
+
handler(params);
|
|
2813
|
+
}
|
|
2814
|
+
var redisHa = { name: "redis-ha", command: "redis", oneLiner: oneLiner21, keyExamples: keyExamples21, run: run21 };
|
|
2815
|
+
|
|
2816
|
+
// src/libs/k8s-db.ts
|
|
2817
|
+
function getDbList() {
|
|
2818
|
+
const cmd = kubectlCommand(`get sgcluster -A`);
|
|
2819
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2820
|
+
if (!res) return null;
|
|
2821
|
+
return res;
|
|
2822
|
+
}
|
|
2823
|
+
function getDbBackups() {
|
|
2824
|
+
const cmd = kubectlCommand(`get sgbkp -A`);
|
|
2825
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2826
|
+
if (!res) return null;
|
|
2827
|
+
return res;
|
|
2828
|
+
}
|
|
2829
|
+
function getDbAdminPassword() {
|
|
2830
|
+
const cmd = kubectlCommand(
|
|
2831
|
+
`get secrets/stackgres-restapi-admin -o jsonpath="{.data}"`,
|
|
2832
|
+
{ namespace: "stackgres" }
|
|
2833
|
+
);
|
|
2834
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2835
|
+
if (!res) return null;
|
|
2836
|
+
try {
|
|
2837
|
+
const resJson = JSON.parse(res);
|
|
2838
|
+
const password = atob(resJson["clearPassword"]);
|
|
2839
|
+
const user = atob(resJson["k8sUsername"]);
|
|
2840
|
+
return { user, password };
|
|
2841
|
+
} catch {
|
|
2842
|
+
return null;
|
|
2843
|
+
}
|
|
2844
|
+
}
|
|
2845
|
+
function getDbPasswords(namespace2) {
|
|
2846
|
+
const cmd = kubectlCommand(`get secrets/${namespace2} -o jsonpath="{.data}"`, {
|
|
2847
|
+
namespace: namespace2
|
|
2848
|
+
});
|
|
2849
|
+
const res = new CommandExecutor(cmd, { quiet: true }).exec();
|
|
2850
|
+
if (!res) return null;
|
|
2851
|
+
try {
|
|
2852
|
+
const resJson = JSON.parse(res);
|
|
2853
|
+
const superUser = atob(resJson["superuser-username"]);
|
|
2854
|
+
const superPassword = atob(resJson["superuser-password"]);
|
|
2855
|
+
const authenticatorUser = atob(resJson["authenticator-username"]);
|
|
2856
|
+
const authenticatorPassword = atob(resJson["authenticator-password"]);
|
|
2857
|
+
const replicationUser = atob(resJson["replication-username"]);
|
|
2858
|
+
const replicationPassword = atob(resJson["replication-password"]);
|
|
2859
|
+
return {
|
|
2860
|
+
superUser,
|
|
2861
|
+
superPassword,
|
|
2862
|
+
authenticatorUser,
|
|
2863
|
+
authenticatorPassword,
|
|
2864
|
+
replicationUser,
|
|
2865
|
+
replicationPassword
|
|
2866
|
+
};
|
|
2867
|
+
} catch {
|
|
2868
|
+
return null;
|
|
2869
|
+
}
|
|
2870
|
+
}
|
|
2871
|
+
function connectToPatroni(namespace2) {
|
|
2872
|
+
const cmd = kubectlCommand(
|
|
2873
|
+
`exec -it ${namespace2}-0 -c patroni -- /bin/bash`,
|
|
2874
|
+
{ namespace: namespace2 }
|
|
2875
|
+
);
|
|
2876
|
+
new CommandExecutor(cmd).spawn();
|
|
2877
|
+
}
|
|
2878
|
+
function connectToPsql(namespace2) {
|
|
2879
|
+
const cmd = kubectlCommand(
|
|
2880
|
+
`exec -it ${namespace2}-0 -c postgres-util -- psql`,
|
|
2881
|
+
{ namespace: namespace2 }
|
|
2882
|
+
);
|
|
2883
|
+
new CommandExecutor(cmd).spawn();
|
|
2884
|
+
}
|
|
2885
|
+
function establishTunnel(namespace2, port) {
|
|
2886
|
+
const cmd = kubectlCommand(`port-forward pod/${namespace2}-0 ${port}:5432`, {
|
|
2887
|
+
namespace: namespace2
|
|
2888
|
+
});
|
|
2889
|
+
new CommandExecutor(cmd).spawn();
|
|
2890
|
+
}
|
|
2891
|
+
|
|
2892
|
+
// src/cli/extensions/stackgres.ts
|
|
2893
|
+
var oneLiner22 = "Utilities to help day to day operations of production and staging databases";
|
|
2894
|
+
var keyExamples22 = `
|
|
2895
|
+
$ devops db list
|
|
2896
|
+
$ devops db backups
|
|
2897
|
+
$ devops db password ui
|
|
2898
|
+
$ devops db password db-staging
|
|
2899
|
+
$ devops db tunnel db-staging
|
|
2900
|
+
$ devops db patroni db-staging
|
|
2901
|
+
$ devops db psql db-staging
|
|
2902
|
+
`.trim();
|
|
2903
|
+
var usage22 = `
|
|
2904
|
+
${oneLiner22}
|
|
2905
|
+
|
|
2906
|
+
NOTES
|
|
2907
|
+
The admin UI provided by Stackgres is great. It allows you to do most of the operations you need, such as
|
|
2908
|
+
restarting the cluster, upgrading postgres versions, and restoring from backups with point in time recovery (PITR).
|
|
2909
|
+
|
|
2910
|
+
This utility complements the admin UI with a few helpful shortcuts.
|
|
2911
|
+
|
|
2912
|
+
Note that the --env flag should not be used with these commands, as the DB namespaces follow different
|
|
2913
|
+
conventions than the monorepo env.
|
|
2914
|
+
|
|
2915
|
+
This utility assumes that the cluster name and the namespace are always the same.
|
|
2916
|
+
|
|
2917
|
+
COMMANDS
|
|
2918
|
+
list Lists the available clusters
|
|
2919
|
+
backups Lists all available backups
|
|
2920
|
+
password ui Shows the password to the admin UI
|
|
2921
|
+
password <namespace> Shows the superuser, replication, and authenticator password of the remote database
|
|
2922
|
+
patroni <namespace> Obtain a shell to the primary pod's patroni container, where you can run 'patronictl'
|
|
2923
|
+
psql <namespace> Runs 'psql' in the primary pod's postgres-utils container
|
|
2924
|
+
tunnel <namespace> [-p <port>] Sets up a tunnel to the remote database so you can access the DB from your local machine.
|
|
2925
|
+
By default, the port is taken from the namespace to make it easier to create connection profiles locally:
|
|
2926
|
+
db-staging: 7432, db-production: 8432, otherwise: 9432
|
|
2927
|
+
|
|
2928
|
+
EXAMPLES
|
|
2929
|
+
${keyExamples22}
|
|
2930
|
+
`;
|
|
2931
|
+
var DEFAULT_PORTS = {
|
|
2932
|
+
"db-staging": "7432",
|
|
2933
|
+
"db-production": "8432"
|
|
2934
|
+
};
|
|
2935
|
+
var handlers8 = {
|
|
2936
|
+
list: () => {
|
|
2937
|
+
const res = getDbList();
|
|
2938
|
+
console.log(res);
|
|
2939
|
+
},
|
|
2940
|
+
backups: () => {
|
|
2941
|
+
const res = getDbBackups();
|
|
2942
|
+
console.log(res);
|
|
2943
|
+
},
|
|
2944
|
+
password: (opts) => {
|
|
2945
|
+
const namespace2 = opts.required("namespace");
|
|
2946
|
+
if (namespace2 === "ui") {
|
|
2947
|
+
const res2 = getDbAdminPassword();
|
|
2948
|
+
if (!res2) {
|
|
2949
|
+
console.error("Failed to get the secret");
|
|
2950
|
+
process.exit(1);
|
|
2951
|
+
} else {
|
|
2952
|
+
console.log(`User: ${res2.user}`);
|
|
2953
|
+
console.log(`Password: ${res2.password}`);
|
|
2954
|
+
}
|
|
2955
|
+
return;
|
|
2956
|
+
}
|
|
2957
|
+
const res = getDbPasswords(namespace2);
|
|
2958
|
+
if (!res) {
|
|
2959
|
+
console.error("Failed to get the secret");
|
|
2960
|
+
process.exit(1);
|
|
2961
|
+
} else {
|
|
2962
|
+
console.log("\nSuperuser");
|
|
2963
|
+
console.log(` ${res.superUser}`);
|
|
2964
|
+
console.log(` ${res.superPassword}`);
|
|
2965
|
+
console.log("\nAuthenticator");
|
|
2966
|
+
console.log(` ${res.authenticatorUser}`);
|
|
2967
|
+
console.log(` ${res.authenticatorPassword}`);
|
|
2968
|
+
console.log("\nReplication");
|
|
2969
|
+
console.log(` ${res.replicationUser}`);
|
|
2970
|
+
console.log(` ${res.replicationPassword}`);
|
|
2971
|
+
console.log();
|
|
2972
|
+
}
|
|
2973
|
+
},
|
|
2974
|
+
tunnel: (opts) => {
|
|
2975
|
+
const defaultPort = DEFAULT_PORTS[opts.required("namespace")] || "9432";
|
|
2976
|
+
const port = opts.optional("port") || defaultPort;
|
|
2977
|
+
establishTunnel(opts.required("namespace"), port);
|
|
2978
|
+
},
|
|
2979
|
+
patroni: (opts) => {
|
|
2980
|
+
connectToPatroni(opts.required("namespace"));
|
|
2981
|
+
},
|
|
2982
|
+
psql: (opts) => {
|
|
2983
|
+
connectToPsql(opts.required("namespace"));
|
|
2984
|
+
}
|
|
2985
|
+
};
|
|
2986
|
+
function run22(cmdObj) {
|
|
2987
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage22);
|
|
2988
|
+
const parsed = cmdObj.parseOptions({ params: ["-p"] });
|
|
2989
|
+
const [command, namespace2] = parsed.args;
|
|
2990
|
+
const port = parsed.options["-p"];
|
|
2991
|
+
const handler = handlers8[command];
|
|
2992
|
+
if (!handler) {
|
|
2993
|
+
console.error(`Unknown command: ${command}`);
|
|
2994
|
+
printUsageAndExit(usage22);
|
|
2995
|
+
}
|
|
2996
|
+
const params = new StrongParams(usage22, {
|
|
2997
|
+
namespace: namespace2,
|
|
2998
|
+
port
|
|
2999
|
+
});
|
|
3000
|
+
handler(params);
|
|
3001
|
+
}
|
|
3002
|
+
var stackgres = { command: "db", oneLiner: oneLiner22, keyExamples: keyExamples22, run: run22 };
|
|
3003
|
+
|
|
3004
|
+
// src/cli/extensions/template.ts
|
|
3005
|
+
import chalk7 from "chalk";
|
|
3006
|
+
var SUPPORTED_CONTEXT_TYPES = ["deployment", "db-migrate", "debug"];
|
|
3007
|
+
var oneLiner23 = "Utilities to help validate manifest templates rendering.";
|
|
3008
|
+
var keyExamples23 = `
|
|
3009
|
+
$ devops template context deployment www
|
|
3010
|
+
$ devops template context debug
|
|
3011
|
+
$ devops template context db-migrate
|
|
3012
|
+
$ devops template gen deployment www
|
|
3013
|
+
$ devops template gen debug main-node
|
|
3014
|
+
$ devops template gen db-migrate
|
|
3015
|
+
`.trim();
|
|
3016
|
+
var usage23 = `
|
|
3017
|
+
${oneLiner23}
|
|
3018
|
+
|
|
3019
|
+
IMPORTANT
|
|
3020
|
+
This command generates examples only. It's intended to help design new templates by showing what context variables are available
|
|
3021
|
+
and how they get rendered using handlebar replacement. They should not be used to apply changes to the cluster.
|
|
3022
|
+
|
|
3023
|
+
SHOW CONTEXT OBJECT
|
|
3024
|
+
devops template context deployment <workspace>
|
|
3025
|
+
devops template context db-migrate-job|debug
|
|
3026
|
+
|
|
3027
|
+
Prints out a context object with dummy values for the specified template type.
|
|
3028
|
+
For deployment, the workspace name is required.
|
|
3029
|
+
|
|
3030
|
+
GENERATE TEMPLATES
|
|
3031
|
+
devops template gen deployment <workspace>
|
|
3032
|
+
devops template gen debug <image>
|
|
3033
|
+
devops template gen db-migrate-job
|
|
3034
|
+
|
|
3035
|
+
For deployment, generates an example manifest of a workspace, including override files present under the 'manifests' folder.
|
|
3036
|
+
|
|
3037
|
+
EXAMPLES
|
|
3038
|
+
${keyExamples23}
|
|
3039
|
+
`;
|
|
3040
|
+
var handlers9 = {
|
|
3041
|
+
context: {
|
|
3042
|
+
"deployment": (opts) => {
|
|
3043
|
+
const workspace = opts.required("workspaceOrImage");
|
|
3044
|
+
const workspaceData = getWorkspace(workspace);
|
|
3045
|
+
const packageDataWithDeployment = workspaceData.packageDataEntries.find((entry) => entry.deployment);
|
|
3046
|
+
if (!packageDataWithDeployment) {
|
|
3047
|
+
console.error(`No deployment found for workspace ${workspace}`);
|
|
3048
|
+
process.exit(1);
|
|
3049
|
+
}
|
|
3050
|
+
const randomImage = getWorkspaceImages(workspace)[0];
|
|
3051
|
+
console.warn(chalk7.green("\nThis is a sample context object used to render a manifest template of type deployment:\n"));
|
|
3052
|
+
console.log(
|
|
3053
|
+
JSON.stringify(
|
|
3054
|
+
new ImageContextGenerator(opts.required("env"), randomImage, "dummy-sha").getDeployment(packageDataWithDeployment),
|
|
3055
|
+
null,
|
|
3056
|
+
2
|
|
3057
|
+
)
|
|
3058
|
+
);
|
|
3059
|
+
},
|
|
3060
|
+
"db-migrate": (opts) => {
|
|
3061
|
+
const randomImage = getWorkspaceImages("db").filter((image2) => getImageData(image2)["can-db-migrate"])[0];
|
|
3062
|
+
if (!randomImage) {
|
|
3063
|
+
console.error("No image found with can-db-migrate=true in the db workspace.");
|
|
3064
|
+
process.exit(1);
|
|
3065
|
+
}
|
|
3066
|
+
console.warn(chalk7.green("\nThis is a sample context object used to render a manifest template of type db-migrate:\n"));
|
|
3067
|
+
console.log(
|
|
3068
|
+
JSON.stringify(
|
|
3069
|
+
new ImageContextGenerator(opts.required("env"), randomImage, "dummy-sha").getDbMigrate(),
|
|
3070
|
+
null,
|
|
3071
|
+
2
|
|
3072
|
+
)
|
|
3073
|
+
);
|
|
3074
|
+
},
|
|
3075
|
+
"debug": (opts) => {
|
|
3076
|
+
const randomImage = getImageNames()[0];
|
|
3077
|
+
console.warn(chalk7.green("\nThis is a sample context object used to render a manifest template of type debug:\n"));
|
|
3078
|
+
console.log(
|
|
3079
|
+
JSON.stringify(
|
|
3080
|
+
new ImageContextGenerator(opts.required("env"), randomImage, "dummy-sha").getDebug(),
|
|
3081
|
+
null,
|
|
3082
|
+
2
|
|
3083
|
+
)
|
|
3084
|
+
);
|
|
3085
|
+
}
|
|
3086
|
+
},
|
|
3087
|
+
gen: {
|
|
3088
|
+
"deployment": (opts) => {
|
|
3089
|
+
const workspace = opts.required("workspaceOrImage");
|
|
3090
|
+
const workspaceData = getWorkspace(workspace);
|
|
3091
|
+
const packageDataWithDeployment = workspaceData.packageDataEntries.find((entry) => entry.deployment);
|
|
3092
|
+
if (!packageDataWithDeployment) {
|
|
3093
|
+
console.error(`No deployment found for workspace ${workspace}`);
|
|
3094
|
+
process.exit(1);
|
|
3095
|
+
}
|
|
3096
|
+
const randomImage = getWorkspaceImages(workspace)[0];
|
|
3097
|
+
console.warn(chalk7.green(`
|
|
3098
|
+
This is a sample of generated manifests for the ${workspace} workspace:
|
|
3099
|
+
`));
|
|
3100
|
+
console.log(
|
|
3101
|
+
generateWorkspaceDeployment(
|
|
3102
|
+
packageDataWithDeployment,
|
|
3103
|
+
opts.required("env"),
|
|
3104
|
+
randomImage,
|
|
3105
|
+
"dummy-sha"
|
|
3106
|
+
)
|
|
3107
|
+
);
|
|
3108
|
+
},
|
|
3109
|
+
"db-migrate": (opts) => {
|
|
3110
|
+
const randomImage = getWorkspaceImages("db").filter((image2) => getImageData(image2)["can-db-migrate"])[0];
|
|
3111
|
+
if (!randomImage) {
|
|
3112
|
+
console.error("No image found with can-db-migrate=true in the db workspace.");
|
|
3113
|
+
process.exit(1);
|
|
3114
|
+
}
|
|
3115
|
+
console.warn(chalk7.green("\nThis is a sample of generated manifests for the db-migrate job:\n"));
|
|
3116
|
+
console.log(
|
|
3117
|
+
generateDbMigrateJob(
|
|
3118
|
+
opts.required("env"),
|
|
3119
|
+
randomImage,
|
|
3120
|
+
"dummy-sha"
|
|
3121
|
+
)
|
|
3122
|
+
);
|
|
3123
|
+
},
|
|
3124
|
+
"debug": (opts) => {
|
|
3125
|
+
const image2 = opts.required("workspaceOrImage");
|
|
3126
|
+
console.warn(chalk7.green(`
|
|
3127
|
+
This is a sample of generated manifests for the debug image ${image2}:
|
|
3128
|
+
`));
|
|
3129
|
+
console.log(
|
|
3130
|
+
generateDebugPod(
|
|
3131
|
+
opts.required("env"),
|
|
3132
|
+
image2,
|
|
3133
|
+
"dummy-sha"
|
|
3134
|
+
)
|
|
3135
|
+
);
|
|
3136
|
+
}
|
|
3137
|
+
}
|
|
3138
|
+
};
|
|
3139
|
+
function run23(cmdObj) {
|
|
3140
|
+
if (cmdObj.help || cmdObj.args.length < 1) printUsageAndExit(usage23);
|
|
3141
|
+
const [command, contextType, param] = cmdObj.args;
|
|
3142
|
+
const commandHandler = handlers9[command];
|
|
3143
|
+
if (!commandHandler) {
|
|
3144
|
+
console.error(`Unknown command: ${command}`);
|
|
3145
|
+
printUsageAndExit(usage23);
|
|
3146
|
+
}
|
|
3147
|
+
if (!SUPPORTED_CONTEXT_TYPES.includes(contextType)) {
|
|
3148
|
+
console.error(`Unknown context type: ${contextType}. Supported types: ${SUPPORTED_CONTEXT_TYPES.join(", ")}`);
|
|
3149
|
+
process.exit(1);
|
|
3150
|
+
}
|
|
3151
|
+
const handler = commandHandler[contextType];
|
|
3152
|
+
const params = new StrongParams(usage23, {
|
|
3153
|
+
env: cmdObj.env === "development" ? "staging" : cmdObj.env,
|
|
3154
|
+
contextType,
|
|
3155
|
+
workspaceOrImage: param
|
|
3156
|
+
});
|
|
3157
|
+
handler(params);
|
|
3158
|
+
}
|
|
3159
|
+
var template = { oneLiner: oneLiner23, keyExamples: keyExamples23, run: run23 };
|
|
3160
|
+
|
|
3161
|
+
// src/devops.ts
|
|
3162
|
+
import { existsSync } from "fs";
|
|
3163
|
+
import path7 from "path";
|
|
3164
|
+
var [_node, _scriptPath, ...commandArgs] = process.argv;
|
|
3165
|
+
var newLine = "\n ";
|
|
3166
|
+
function maxKeyLength(commands) {
|
|
3167
|
+
return Math.max(...Object.keys(commands).map((x) => x.length)) + 10;
|
|
3168
|
+
}
|
|
3169
|
+
var coreCommands = {};
|
|
3170
|
+
Object.entries(core_exports).forEach(([constKey, imported]) => {
|
|
3171
|
+
const { oneLiner: oneLiner24, keyExamples: keyExamples24, run: run24 } = imported;
|
|
3172
|
+
const key = "command" in imported ? imported.command : constKey;
|
|
3173
|
+
coreCommands[key] = { oneLiner: oneLiner24, keyExamples: keyExamples24, run: run24, key };
|
|
3174
|
+
});
|
|
3175
|
+
var coreCommandsKeyLength = maxKeyLength(coreCommands);
|
|
3176
|
+
var extensionCommands = {};
|
|
3177
|
+
var activeExtensions = getConst("extensions", { ignoreIfInvalid: true });
|
|
3178
|
+
if (activeExtensions?.length) {
|
|
3179
|
+
const availableExtensionsLookup = Object.fromEntries(
|
|
3180
|
+
Object.entries(extensions_exports).map(([constKey, value]) => {
|
|
3181
|
+
const { oneLiner: oneLiner24, keyExamples: keyExamples24, run: run24 } = value;
|
|
3182
|
+
const keyInYaml = "name" in value ? value.name : constKey;
|
|
3183
|
+
const key = "command" in value ? value.command : constKey;
|
|
3184
|
+
return [keyInYaml, { oneLiner: oneLiner24, keyExamples: keyExamples24, run: run24, key }];
|
|
3185
|
+
})
|
|
3186
|
+
);
|
|
3187
|
+
for (const extension of activeExtensions) {
|
|
3188
|
+
const extensionData = availableExtensionsLookup[extension];
|
|
3189
|
+
if (!extensionData) {
|
|
3190
|
+
console.error(`
|
|
3191
|
+
Extension "${extension}" referenced in constants.yaml is not supported
|
|
3192
|
+
|
|
3193
|
+
`);
|
|
3194
|
+
process.exit(1);
|
|
3195
|
+
}
|
|
3196
|
+
extensionCommands[extensionData.key] = extensionData;
|
|
3197
|
+
}
|
|
3198
|
+
}
|
|
3199
|
+
var extensionCommandsKeyLength = maxKeyLength(extensionCommands);
|
|
3200
|
+
var pluginCommands = {};
|
|
3201
|
+
if (existsSync(".devops/plugins")) {
|
|
3202
|
+
const pluginsDir = path7.join(process.cwd(), ".devops/plugins");
|
|
3203
|
+
const pluginFiles = globSync2(path7.join(pluginsDir, "*.ts"));
|
|
3204
|
+
for (const pluginFile of pluginFiles) {
|
|
3205
|
+
const plugin = await import(pluginFile);
|
|
3206
|
+
const keys = Object.keys(plugin);
|
|
3207
|
+
if (keys.length !== 1) {
|
|
3208
|
+
console.error(`Plugin ${pluginFile} must export exactly one command`);
|
|
3209
|
+
process.exit(1);
|
|
3210
|
+
}
|
|
3211
|
+
const constKey = keys[0];
|
|
3212
|
+
const { oneLiner: oneLiner24, keyExamples: keyExamples24, run: run24, command } = plugin[constKey];
|
|
3213
|
+
const key = command ?? constKey;
|
|
3214
|
+
if (!oneLiner24 || !keyExamples24 || !run24) {
|
|
3215
|
+
console.error(`Plugin ${pluginFile} must export oneLiner, keyExamples, and run`);
|
|
3216
|
+
process.exit(1);
|
|
3217
|
+
}
|
|
3218
|
+
if (typeof run24 !== "function") {
|
|
3219
|
+
console.error(`Plugin ${pluginFile} must export a run function`);
|
|
3220
|
+
process.exit(1);
|
|
68
3221
|
}
|
|
3222
|
+
pluginCommands[key] = { oneLiner: oneLiner24, keyExamples: keyExamples24, run: run24, key };
|
|
3223
|
+
}
|
|
69
3224
|
}
|
|
70
|
-
|
|
71
|
-
|
|
3225
|
+
var pluginCommandsKeyLength = maxKeyLength(pluginCommands);
|
|
3226
|
+
var GENERAL_USAGE = `
|
|
72
3227
|
Devops utilities for the monorepo.
|
|
73
3228
|
|
|
74
3229
|
USAGE
|
|
@@ -92,36 +3247,25 @@ CHOOSING ENV with <env-options>
|
|
|
92
3247
|
|
|
93
3248
|
|
|
94
3249
|
CORE COMMANDS
|
|
95
|
-
${Object.values(coreCommands)
|
|
96
|
-
|
|
97
|
-
|
|
3250
|
+
${Object.values(coreCommands).map(
|
|
3251
|
+
(cmd) => [cmd.key, " ".repeat(coreCommandsKeyLength - cmd.key.length), cmd.oneLiner].join("")
|
|
3252
|
+
).join(newLine)}
|
|
98
3253
|
`;
|
|
99
|
-
|
|
3254
|
+
var EXTENSION_USAGE = Object.keys(extensionCommands).length ? `
|
|
100
3255
|
ACTIVE EXTENSIONS
|
|
101
|
-
${Object.values(extensionCommands)
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
` :
|
|
105
|
-
|
|
3256
|
+
${Object.values(extensionCommands).map(
|
|
3257
|
+
(cmd) => [cmd.key, " ".repeat(extensionCommandsKeyLength - cmd.key.length), cmd.oneLiner].join("")
|
|
3258
|
+
).join(newLine)}
|
|
3259
|
+
` : "";
|
|
3260
|
+
var PLUGIN_USAGE = Object.keys(pluginCommands).length ? `
|
|
106
3261
|
ACTIVE PLUGINS
|
|
107
|
-
${Object.values(pluginCommands)
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
` :
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
// cmd.keyExamples
|
|
117
|
-
// .split("\n")
|
|
118
|
-
// .map((x) => x.trim())
|
|
119
|
-
// .filter(Boolean)
|
|
120
|
-
// .join(newLine)
|
|
121
|
-
// )
|
|
122
|
-
// .join(newLine)}
|
|
123
|
-
const commandObj = new CLICommandParser(commandArgs);
|
|
124
|
-
const chosenCommand = allCommands[commandObj.command];
|
|
125
|
-
if (!chosenCommand)
|
|
126
|
-
printUsageAndExit(ALL_USAGE);
|
|
3262
|
+
${Object.values(pluginCommands).map(
|
|
3263
|
+
(cmd) => [cmd.key, " ".repeat(pluginCommandsKeyLength - cmd.key.length), cmd.oneLiner].join("")
|
|
3264
|
+
).join(newLine)}
|
|
3265
|
+
` : "";
|
|
3266
|
+
var ALL_USAGE = [GENERAL_USAGE, EXTENSION_USAGE, PLUGIN_USAGE].filter(Boolean).join("");
|
|
3267
|
+
var allCommands = { ...coreCommands, ...extensionCommands, ...pluginCommands };
|
|
3268
|
+
var commandObj = new CLICommandParser(commandArgs);
|
|
3269
|
+
var chosenCommand = allCommands[commandObj.command];
|
|
3270
|
+
if (!chosenCommand) printUsageAndExit(ALL_USAGE);
|
|
127
3271
|
chosenCommand.run(commandObj);
|