@crossdelta/platform-sdk 0.19.10 → 0.19.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cli.mjs +144 -126
- package/bin/templates/workspace/infra/package.json.hbs +2 -2
- package/bin/templates/workspace/packages/contracts/package.json.hbs +2 -2
- package/bin/templates/workspace/turbo.json +1 -2
- package/package.json +4 -4
- package/bin/chunk-634PL24Z.mjs +0 -20
- package/bin/config-CKQHYOF4.mjs +0 -2
- package/bin/templates/workspace/.github/README.md +0 -70
- package/bin/templates/workspace/.github/actions/check-image-tag-exists/action.yml +0 -27
- package/bin/templates/workspace/.github/actions/check-image-tag-exists/index.js +0 -179
- package/bin/templates/workspace/.github/actions/check-path-changes/action.yml +0 -21
- package/bin/templates/workspace/.github/actions/check-path-changes/index.js +0 -192
- package/bin/templates/workspace/.github/actions/detect-skipped-services/action.yml +0 -38
- package/bin/templates/workspace/.github/actions/generate-scope-matrix/action.yml +0 -21
- package/bin/templates/workspace/.github/actions/generate-scope-matrix/index.js +0 -370
- package/bin/templates/workspace/.github/actions/prepare-build-context/action.yml +0 -108
- package/bin/templates/workspace/.github/actions/resolve-scope-tags/action.yml +0 -31
- package/bin/templates/workspace/.github/actions/resolve-scope-tags/index.js +0 -398
- package/bin/templates/workspace/.github/actions/setup-bun-install/action.yml.hbs +0 -57
- package/bin/templates/workspace/.github/copilot-chat-configuration.json +0 -49
- package/bin/templates/workspace/.github/dependabot.yml +0 -18
- package/bin/templates/workspace/.github/workflows/build-and-deploy.yml.hbs +0 -243
- package/bin/templates/workspace/.github/workflows/lint-and-tests.yml.hbs +0 -32
- package/bin/templates/workspace/.github/workflows/publish-packages.yml +0 -202
- package/bin/templates/workspace/apps/.gitkeep +0 -0
- package/bin/templates/workspace/docs/.gitkeep +0 -0
- package/bin/templates/workspace/infra/services/.gitkeep +0 -0
- package/bin/templates/workspace/packages/.gitkeep +0 -0
- package/bin/templates/workspace/services/.gitkeep +0 -0
|
@@ -7,8 +7,8 @@
|
|
|
7
7
|
"pulumi": "pulumi"
|
|
8
8
|
},
|
|
9
9
|
"dependencies": {
|
|
10
|
-
"@crossdelta/cloudevents": "^0.7.
|
|
11
|
-
"@crossdelta/infrastructure": "^0.7.
|
|
10
|
+
"@crossdelta/cloudevents": "^0.7.10",
|
|
11
|
+
"@crossdelta/infrastructure": "^0.7.6",
|
|
12
12
|
"{{scope}}/contracts": "workspace:*",
|
|
13
13
|
"@pulumi/digitalocean": "^4.55.0",
|
|
14
14
|
"@pulumi/kubernetes": "^4.21.0",
|
|
@@ -19,8 +19,8 @@
|
|
|
19
19
|
"clean": "rm -rf dist"
|
|
20
20
|
},
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@crossdelta/cloudevents": "^0.7.
|
|
23
|
-
"@crossdelta/infrastructure": "^0.7.
|
|
22
|
+
"@crossdelta/cloudevents": "^0.7.10",
|
|
23
|
+
"@crossdelta/infrastructure": "^0.7.6",
|
|
24
24
|
"zod": "^4.0.0"
|
|
25
25
|
},
|
|
26
26
|
"devDependencies": {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@crossdelta/platform-sdk",
|
|
3
|
-
"version": "0.19.
|
|
3
|
+
"version": "0.19.12",
|
|
4
4
|
"description": "Platform toolkit for event-driven microservices — keeping code and infrastructure in lockstep.",
|
|
5
5
|
"keywords": [
|
|
6
6
|
"cli",
|
|
@@ -118,7 +118,7 @@
|
|
|
118
118
|
"@ai-sdk/anthropic": "^2.0.53",
|
|
119
119
|
"@ai-sdk/openai": "^2.0.79",
|
|
120
120
|
"@angular-devkit/core": "^21.0.0",
|
|
121
|
-
"@crossdelta/flowcore": "
|
|
121
|
+
"@crossdelta/flowcore": "workspace:*",
|
|
122
122
|
"@faker-js/faker": "^9.8.0",
|
|
123
123
|
"@inquirer/prompts": "^7.5.0",
|
|
124
124
|
"@listr2/prompt-adapter-enquirer": "^2.0.15",
|
|
@@ -143,8 +143,8 @@
|
|
|
143
143
|
"zod": "^4.0.0"
|
|
144
144
|
},
|
|
145
145
|
"peerDependencies": {
|
|
146
|
-
"@crossdelta/cloudevents": "
|
|
147
|
-
"@crossdelta/infrastructure": "
|
|
146
|
+
"@crossdelta/cloudevents": "workspace:*",
|
|
147
|
+
"@crossdelta/infrastructure": "workspace:*",
|
|
148
148
|
"@nestjs/schematics": "^11.0.5",
|
|
149
149
|
"turbo": "^2.0.0"
|
|
150
150
|
},
|
package/bin/chunk-634PL24Z.mjs
DELETED
|
@@ -1,20 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
var tt=(t=>typeof require<"u"?require:typeof Proxy<"u"?new Proxy(t,{get:(r,e)=>(typeof require<"u"?require:r)[e]}):t)(function(t){if(typeof require<"u")return require.apply(this,arguments);throw Error('Dynamic require of "'+t+'" is not supported')});import{existsSync as C,readdirSync as Tt,readFileSync as q,writeFileSync as St}from"fs";import{join as p,resolve as At}from"path";var Mt=process.env.npm_package_version||"0.0.0",_=["bun.lock","bun.lockb","package-lock.json","yarn.lock","pnpm-lock.yaml"],Ht="start:dev";import w from"chalk";function qt(t,r){if(t.length===0)return"No entries to display.";let e=r?.title||"Benchmark Results",n=r?.footer||"",o=r?.maxBarWidth||50,s=r?.unit||"",i=Math.max(...t.map(c=>c.value)),a=[];a.push(""),a.push(e),a.push("");for(let c of t){let g=rt(c.barColor??r?.barColor),u=et(c,i,o,g,s,r?.labelColor);a.push(u)}return n&&(a.push(""),a.push(n)),a.join(`
|
|
3
|
-
`)}function rt(t){return t?w[t]("\u2587"):"\u2587"}function et(t,r,e,n,o,s){let i=Math.round(t.value/r*e),a=t.barColor?w[t.barColor](n.repeat(i)):n.repeat(i),c=t.color??s;return`${w.bold(c?w[c](t.label.padEnd(12)):t.label.padEnd(12))} ${a} ${t.value.toLocaleString()}${o?` ${o}`:""}`}import G from"chalk";import{Command as dt}from"commander";import{Listr as U}from"listr2";var d={SUCCESS:0,GENERAL_ERROR:1,USAGE_ERROR:2,WORKSPACE_ERROR:64,VALIDATION_ERROR:65,CONFIG_ERROR:66,IO_ERROR:74,PERMISSION_ERROR:77,CANCELLED:130};var m=class extends Error{exitCode;category;hint;cause;constructor(r,e=d.GENERAL_ERROR,n={}){super(r),this.name="CliError",this.exitCode=e,this.category=n.category??"process",this.hint=n.hint,this.cause=n.cause}toUserMessage(){let r=[this.message];return this.hint&&r.push(`
|
|
4
|
-
Hint: ${this.hint}`),r.join("")}};var f=class extends m{path;constructor(r,e,n={}){super(r,d.IO_ERROR,{...n,category:"io"}),this.name="IoError",this.path=e}},y=class extends m{constructor(r="Operation cancelled",e={}){super(r,d.CANCELLED,{...e,category:"user"}),this.name="CancelledError"}};import h from"chalk";var nt=t=>t instanceof Error,nr=t=>nt(t)?t.message:typeof t=="string"?t:String(t),W=["ExitPromptError"],I=["Cancelled prompt","User force closed"],ot=["ERR_USE_AFTER_CLOSE"],st=t=>W.some(r=>t.name?.includes(r))||I.some(r=>t.message?.includes(r)),$=t=>{if(t instanceof y)return!0;if(!(t instanceof Error))return!1;if(W.some(e=>t.name?.includes(e))||I.some(e=>t.message?.includes(e)))return!0;let r=t.code;return r&&ot.includes(r)?st(t):!1},it=t=>{if(t instanceof m){let e=t.toUserMessage();return h.red(`\u2716 ${e||"Unexpected error (re-run with DEBUG=true for details)"}`)}if(t instanceof Error){let e=t.message?.trim();return e?h.red(`\u2716 ${e}`):h.red("\u2716 Unexpected error (re-run with DEBUG=true for details)")}let r=String(t);return!r||r==="undefined"||r==="null"?h.red("\u2716 Unexpected error (re-run with DEBUG=true for details)"):h.red(`\u2716 ${r}`)},at=t=>t instanceof m?t.exitCode:$(t)?d.CANCELLED:d.GENERAL_ERROR,J=(t,r={})=>{let{debug:e=process.env.DEBUG==="true",exit:n=!0,logger:o=console.error}=r;if($(t)){n&&process.exit(d.CANCELLED);return}o(it(t)),e&&t instanceof Error&&t.stack&&o(h.dim(t.stack)),t instanceof m&&t.hint&&o(h.dim(`
|
|
5
|
-
Hint: ${t.hint}`)),n&&process.exit(at(t))};import{existsSync as S,unlinkSync as ct,writeFileSync as pt}from"fs";import{join as T}from"path";import L from"chokidar";var lt=".pf-generating",P=t=>T(t,lt),gt=t=>S(P(t)),D=t=>{pt(P(t),"","utf-8")},A=t=>{let r=P(t);S(r)&&ct(r)},ut=t=>{let e=R(t)?.pf?.paths;return e?Object.values(e).filter(n=>typeof n=="object"&&n!==null&&n.watch===!0&&typeof n.path=="string"):[{path:"services"},{path:"apps"}]},mr=(t,r)=>{A(t);let e=ut(t),n=P(t),s=j(async()=>{gt(t)||await r()},300),i=[];i.push(L.watch(n,{ignoreInitial:!0}).on("unlink",()=>r()));for(let a of e){let c=T(t,a.path);if(!S(c))continue;let g=a.ignorePatterns?.map(u=>T(c,u));i.push(L.watch(c,{ignoreInitial:!0,depth:0,ignored:g,usePolling:!0,interval:1e3}).on("addDir",u=>u!==c&&s()).on("unlinkDir",u=>u!==c&&s())),i.push(L.watch(`${c}/*/package.json`,{ignoreInitial:!0,ignored:g}).on("add",s).on("change",s))}return async()=>{await Promise.all(i.map(a=>a.close()))}};var yr=t=>{let r=new dt(t.name).description(t.description).showHelpAfterError();t.arguments?.forEach(([o,s])=>{r.argument(o,s)}),t.options?.forEach(([o,s])=>{r.option(o,s)}),t.exampleUsage&&r.addHelpText("after",()=>`${G.cyan.bold(`
|
|
6
|
-
Example:`)}
|
|
7
|
-
${G.bold(t.exampleUsage)}
|
|
8
|
-
`),t.additionalInfo&&r.addHelpText("after",()=>(typeof t.additionalInfo=="function"?t.additionalInfo():t.additionalInfo)??"");let e=async o=>{t.prompts?.length&&await new U(B(t.prompts,o),{rendererOptions:{lazy:!0,showErrorMessage:!1}}).run(o),await new U(B(t.actions,o),{rendererOptions:{lazy:!1,showErrorMessage:!1}}).run(o),await t.onComplete?.(o)},n=()=>{try{let o=k();return D(o),o}catch{return null}};return r.action(async(...o)=>{let s=null;try{let i=o.at(-1),a=i.args,c=i.opts(),g=mt(t.arguments??[],a);if(t.shouldSkipWorkflow?.(g,c)){await t.onSkipWorkflow?.(g,c);return}let u=await t.buildContext(g,c);s=n(),await e(u)}catch(i){J(i,{exit:!1})}finally{s&&A(s)}}),r},mt=(t,r)=>{let e=t.map(([n],o)=>[n.replace(/[<>[\]]/g,""),r[o]]);return Object.fromEntries(e)},B=(t,r)=>t.map(e=>typeof e=="function"?e(r):e);var j=(t,r)=>{let e=null;return(...n)=>(e&&clearTimeout(e),new Promise(o=>{e=setTimeout(async()=>{e=null,await t(...n),o()},r)}))};import{execaSync as ft}from"execa";function kt(t){try{return ft(t,["--version"]),!0}catch{return!1}}import{readdirSync as ht}from"fs";import{dirname as V,isAbsolute as xt,join as Et,normalize as Ct,resolve as M,sep as wt}from"path";import{fileURLToPath as yt}from"url";import{packageUpSync as Pt}from"package-up";import{rimraf as Rt}from"rimraf";var vt=()=>typeof import.meta?.url=="string"?yt(import.meta.url):process.cwd(),$r=t=>{let r=K()?k():process.cwd();return Et(r,t)},Jr=(t,r)=>{if(!t||!t.trim())throw new f("Path cannot be empty",t,{hint:"Provide a valid relative path"});if(xt(t))throw new f(`Absolute paths are not allowed: ${t}`,t,{hint:"Use a relative path within the target directory"});if(/^[a-zA-Z]:/.test(t))throw new f(`Windows drive paths are not allowed: ${t}`,t,{hint:"Use a relative path within the target directory"});if(t.replace(/\\/g,"/").split("/").some(a=>a===".."))throw new f(`Path traversal not allowed: ${t}`,t,{hint:"The path must stay within the target directory"});let o=Ct(t),s=M(r),i=M(r,o);if(!i.startsWith(s+wt)&&i!==s)throw new f(`Path escapes target directory: ${t}`,t,{hint:"The path must stay within the target directory"});return i},Dr=t=>{try{return ht(t).length===0}catch{return!0}};function H(){let t=Pt({cwd:V(vt())});if(!t)throw new Error("Could not find package.json");return V(t)}var jr=t=>Rt(t);import x from"chalk";var z={debug:0,info:1,warn:2,error:3,silent:4},bt=t=>t==="debug"||t==="info"||t==="warn"||t==="error"||t==="silent",Ot=()=>{let t=process.env.LOG_LEVEL;return bt(t)?t:process.env.DEBUG==="true"||process.env.VERBOSE==="true"?"debug":"info"},v=t=>{let r=Ot();return z[t]>=z[r]};var l={logs:[],breakLine:()=>(console.log(),l),success:(t,...r)=>(console.log(x.green(`\u2714 ${t}`),...r),l),info:(t,...r)=>(v("info")&&console.log(x.cyan(`\u{1F6C8} ${t}`),...r),l),warn:(t,...r)=>(v("warn")&&console.warn(x.yellow(`\u26A0\uFE0E ${t}`),...r),l),error:(t,...r)=>(v("error")&&console.error(x.red(`\u2716 ${t}`),...r),l),debug:(t,...r)=>(v("debug")&&console.log(x.dim(`[debug] ${t}`),...r),l),log:(t,...r)=>(console.log(t,...r),l.logs.push({message:t,context:r.join()}),l),getStoredLogs:t=>t?l.logs.filter(r=>r.context?.includes(t)):l.logs,storeLog:(t,r)=>l.logs.push({message:t,context:r})};import Lt from"chalk";async function Hr(t,r){let e=Date.now(),n=t.title,o=()=>Math.floor((Date.now()-e)/1e3),s=setInterval(()=>{o()>1&&(t.title=`${n} (${o()}s)`)},1e3);try{return await r(o)}finally{clearInterval(s)}}async function Kr(t,r,e){for(let n=0;n<r.length;n++){let o=r[n];t.output=Lt.dim(`[${n+1}/${r.length}] ${o.title}`),await o.fn(e??{},t)}}var F=t=>JSON.parse(q(t,"utf-8")),Nt=F,Ft=(t,r,e)=>St(t,`${JSON.stringify(r,null,2)}
|
|
9
|
-
`,"utf-8"),_t=["@crossdelta/cloudevents"],E={services:"services",apps:"apps",packages:"packages",contracts:"packages/contracts"},Wt={docs:{base:["service.md"],frameworks:{}},serviceTypes:{hono:{commandType:"hono-micro",entryPoint:"src/index.ts",skipFiles:[]},nest:{commandType:"nest-micro",entryPoint:"src/main.ts",skipFiles:[]}}},O=t=>{if(!C(t))return null;try{return JSON.parse(q(t,"utf-8"))}catch{return null}},b=(t,r)=>typeof t=="string"?t:typeof t=="object"&&t!==null&&"path"in t?t.path:r,It=t=>t.split("/").pop()||"workspace",$t=t=>{let r=t.slice(0,t.lastIndexOf("/"));return r===t?null:r},Y=t=>t.split("."),Jt=(t,r)=>{if(!t.workspaces)return!1;if(t.pf||C(p(r,"turbo.json")))return!0;let e=_.some(o=>C(p(r,o))),n=C(p(r,"infra"));return e&&n},Dt=t=>{let r=O(p(t,"package.json"));return r?Jt(r,t):!1},Z=t=>{let r=t;for(;r;){if(Dt(r))return r;r=$t(r)}return null},K=()=>Z(process.cwd())!==null,k=()=>{let t=Z(process.cwd());if(!t)throw new Error(`
|
|
10
|
-
\x1B[31m\u2716\x1B[0m Not in a workspace directory
|
|
11
|
-
|
|
12
|
-
Current directory: ${process.cwd()}
|
|
13
|
-
|
|
14
|
-
This command must be run from within a workspace created with \x1B[36mpf new workspace\x1B[0m
|
|
15
|
-
|
|
16
|
-
To create a new workspace, run:
|
|
17
|
-
|
|
18
|
-
\x1B[36mpf new workspace my-platform\x1B[0m
|
|
19
|
-
`);return t},R=t=>{let r=t??k();return O(p(r,"package.json"))},Q=t=>{let r=R(t);if(!r?.pf?.paths)return E;let{paths:e}=r.pf;return{services:b(e.services,E.services),apps:b(e.apps,E.apps),packages:b(e.packages,E.packages),contracts:b(e.contracts,E.contracts)}},le=t=>{let r=R(t);return{plugins:r?.pf?.plugins??_t,dev:{filter:r?.pf?.dev?.filter??[]}}},ge=t=>{let r=t??k(),e=Q(r),n=p(r,e.contracts),s=O(p(n,"package.json"))?.name??`${jt()}/contracts`;return{packagePath:n,eventsPath:p(n,"src","events"),indexPath:p(n,"src","index.ts"),relativePath:e.contracts,packageName:s}},jt=()=>{let t=k(),r=O(p(t,"package.json")),e=`@${It(t)}`;return r?.name?r.name.startsWith("@")?r.name.split("/")[0]:`@${r.name}`:e},Gt=(t,r)=>{let e=Y(r),n=t;for(let o of e){if(n===null||typeof n!="object"||!(o in n))return;n=n[o]}return n},Ut=(t,r,e)=>{let n=Y(r),o=n[n.length-1],s=t;for(let i of n.slice(0,-1))(!(i in s)||typeof s[i]!="object"||s[i]===null)&&(s[i]={}),s=s[i];s[o]=e},ue=(t,r=process.cwd())=>{let e=F(p(r,"package.json"));return Gt(e,t)},de=(t,r,e=process.cwd())=>{let n=p(e,"package.json"),o=F(n);Ut(o,t,r),Ft(n,o,{spaces:2,EOL:`
|
|
20
|
-
`,encoding:"utf-8"})},Bt=t=>{let r=H();return Nt(At(r,t))},N=null,X=()=>(N||(N=Bt("package.json")),N),me=new Proxy({},{get:(t,r)=>X()[r]}),fe=()=>X().generatorConfig??Wt,ke=t=>{let r=t??k(),e=Q(r),n=p(r,e.services);if(!C(n))return[];try{return Tt(n,{withFileTypes:!0}).filter(s=>s.isDirectory()&&!s.name.startsWith(".")).map(s=>`${e.services}/${s.name}`).sort()}catch{return[]}};export{tt as a,d as b,nt as c,nr as d,$ as e,J as f,Ht as g,qt as h,lt as i,mr as j,yr as k,j as l,kt as m,$r as n,Jr as o,Dr as p,jr as q,l as r,Hr as s,Kr as t,K as u,k as v,R as w,Q as x,le as y,ge as z,jt as A,ue as B,de as C,Bt as D,X as E,me as F,fe as G,ke as H};
|
package/bin/config-CKQHYOF4.mjs
DELETED
|
@@ -1,2 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import{A as g,B as h,C as i,D as j,E as k,F as l,G as m,H as n,u as a,v as b,w as c,x as d,y as e,z as f}from"./chunk-634PL24Z.mjs";export{n as discoverAvailableServices,b as findWorkspaceRoot,f as getContractsConfig,m as getGeneratorConfig,h as getPackageJsonField,e as getPfConfig,k as getPkgJson,g as getRootPackageScope,c as getWorkspacePackageJson,d as getWorkspacePathsConfig,a as isInWorkspace,l as pkgJson,j as readPackageConfig,i as updatePackageJsonField};
|
|
@@ -1,70 +0,0 @@
|
|
|
1
|
-
# CI/CD Workflows
|
|
2
|
-
|
|
3
|
-
This project includes GitHub Actions workflows for continuous integration and deployment.
|
|
4
|
-
|
|
5
|
-
## Workflows
|
|
6
|
-
|
|
7
|
-
### Pull Request Checks (`lint-and-tests.yml`)
|
|
8
|
-
|
|
9
|
-
Runs on every pull request to `main`:
|
|
10
|
-
- Lints the codebase (`bun lint`)
|
|
11
|
-
- Runs tests (`bun test`)
|
|
12
|
-
- Uses dependency caching for faster builds
|
|
13
|
-
|
|
14
|
-
### Build and Deploy (`build-and-deploy.yml`)
|
|
15
|
-
|
|
16
|
-
Runs on pushes to `main` and after package publishing:
|
|
17
|
-
- Builds Docker images for changed scopes (apps/services)
|
|
18
|
-
- Pushes images to GitHub Container Registry (GHCR)
|
|
19
|
-
- Deploys infrastructure using Pulumi
|
|
20
|
-
|
|
21
|
-
## Required Secrets
|
|
22
|
-
|
|
23
|
-
Configure these secrets in your GitHub repository settings:
|
|
24
|
-
|
|
25
|
-
| Secret | Description |
|
|
26
|
-
|--------|-------------|
|
|
27
|
-
| `PULUMI_ACCESS_TOKEN` | Pulumi Cloud access token for infrastructure deployment |
|
|
28
|
-
| `DIGITALOCEAN_TOKEN` | DigitalOcean API token for DOKS/spaces access |
|
|
29
|
-
|
|
30
|
-
## Required Variables
|
|
31
|
-
|
|
32
|
-
Configure these variables in your GitHub repository settings:
|
|
33
|
-
|
|
34
|
-
| Variable | Description | Example |
|
|
35
|
-
|----------|-------------|---------|
|
|
36
|
-
| `PULUMI_STACK_BASE` | Base name for Pulumi stacks | `myorg/myproject` |
|
|
37
|
-
|
|
38
|
-
## Automatic Permissions
|
|
39
|
-
|
|
40
|
-
These are handled automatically via `permissions` in workflows:
|
|
41
|
-
- `GITHUB_TOKEN` - GitHub-provided token for GHCR push and API access
|
|
42
|
-
|
|
43
|
-
## Custom Actions
|
|
44
|
-
|
|
45
|
-
The workflows use these local actions:
|
|
46
|
-
|
|
47
|
-
| Action | Purpose |
|
|
48
|
-
|--------|---------|
|
|
49
|
-
| `setup-bun-install` | Setup Bun runtime with caching |
|
|
50
|
-
| `generate-scope-matrix` | Discover Docker-enabled scopes for matrix builds |
|
|
51
|
-
| `check-image-tag-exists` | Skip builds if image tag already exists in GHCR |
|
|
52
|
-
| `check-path-changes` | Detect file changes for conditional steps |
|
|
53
|
-
| `prepare-build-context` | Flatten turbo prune output for Docker builds |
|
|
54
|
-
| `resolve-scope-tags` | Map scope names to image tags for deployment |
|
|
55
|
-
| `detect-skipped-services` | Find services marked with `skip: true` in infra config |
|
|
56
|
-
|
|
57
|
-
## Infrastructure Configuration
|
|
58
|
-
|
|
59
|
-
Each service in `infra/services/*.ts` can be configured with:
|
|
60
|
-
|
|
61
|
-
```typescript
|
|
62
|
-
const config: K8sServiceConfig = {
|
|
63
|
-
name: 'my-service',
|
|
64
|
-
containerPort: 4001,
|
|
65
|
-
skip: false, // Set to true to skip deployment
|
|
66
|
-
// ... other config
|
|
67
|
-
}
|
|
68
|
-
```
|
|
69
|
-
|
|
70
|
-
Services with `skip: true` will be excluded from deployment.
|
|
@@ -1,27 +0,0 @@
|
|
|
1
|
-
name: Check image tag exists
|
|
2
|
-
inputs:
|
|
3
|
-
scope-short-name:
|
|
4
|
-
description: Short directory name of the scope (e.g., storefront)
|
|
5
|
-
required: true
|
|
6
|
-
image-tag:
|
|
7
|
-
description: Checksum-based image tag to look for
|
|
8
|
-
required: true
|
|
9
|
-
github-token:
|
|
10
|
-
description: GitHub token with read:packages to query GHCR
|
|
11
|
-
required: true
|
|
12
|
-
repository-owner:
|
|
13
|
-
description: GitHub organization or user that owns the container package
|
|
14
|
-
required: false
|
|
15
|
-
repository-prefix:
|
|
16
|
-
description: Prefix used for GHCR packages (defaults to $GHCR_REPOSITORY_PREFIX or "platform")
|
|
17
|
-
required: false
|
|
18
|
-
max-pages:
|
|
19
|
-
description: Maximum number of pagination pages to inspect (100 tags each)
|
|
20
|
-
required: false
|
|
21
|
-
default: '5'
|
|
22
|
-
outputs:
|
|
23
|
-
exists:
|
|
24
|
-
description: 'true if the tag already exists in GHCR'
|
|
25
|
-
runs:
|
|
26
|
-
using: node20
|
|
27
|
-
main: index.js
|
|
@@ -1,179 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Check Image Tag Exists Action
|
|
3
|
-
*
|
|
4
|
-
* Checks if a specific Docker image tag already exists in GitHub Container Registry (GHCR).
|
|
5
|
-
* Used to skip redundant builds when the image checksum hasn't changed.
|
|
6
|
-
*
|
|
7
|
-
* @example
|
|
8
|
-
* - uses: ./.github/actions/check-image-tag-exists
|
|
9
|
-
* with:
|
|
10
|
-
* scope-short-name: storefront
|
|
11
|
-
* image-tag: abc123def456
|
|
12
|
-
* github-token: ${{ secrets.GITHUB_TOKEN }}
|
|
13
|
-
*
|
|
14
|
-
* @outputs exists - 'true' if the tag exists, 'false' otherwise
|
|
15
|
-
*/
|
|
16
|
-
|
|
17
|
-
const { appendFileSync } = require('node:fs')
|
|
18
|
-
const { exit } = require('node:process')
|
|
19
|
-
|
|
20
|
-
/**
|
|
21
|
-
* Builds environment variable keys for GitHub Actions inputs.
|
|
22
|
-
* @param {string} name - Input name (e.g., 'scope-short-name')
|
|
23
|
-
* @returns {string[]} Possible environment variable keys
|
|
24
|
-
*/
|
|
25
|
-
const buildInputKeys = (name) => {
|
|
26
|
-
const trimmed = name.trim()
|
|
27
|
-
const upper = trimmed.toUpperCase()
|
|
28
|
-
const normalized = upper.replace(/[^A-Z0-9]+/g, '_')
|
|
29
|
-
return Array.from(new Set([`INPUT_${upper}`, `INPUT_${normalized}`]))
|
|
30
|
-
}
|
|
31
|
-
|
|
32
|
-
/**
|
|
33
|
-
* Retrieves a GitHub Actions input value from environment variables.
|
|
34
|
-
* @param {string} name - Input name as defined in action.yml
|
|
35
|
-
* @param {Object} options - Options
|
|
36
|
-
* @param {boolean} [options.required=false] - Whether the input is required
|
|
37
|
-
* @param {string} [options.defaultValue=''] - Default value if input is not set
|
|
38
|
-
* @returns {string} The input value
|
|
39
|
-
*/
|
|
40
|
-
const getInput = (name, { required = false, defaultValue = '' } = {}) => {
|
|
41
|
-
const keys = buildInputKeys(name)
|
|
42
|
-
const raw = keys.map((key) => process.env[key]).find((value) => typeof value === 'string')
|
|
43
|
-
const value = (typeof raw === 'string' ? raw : defaultValue).trim()
|
|
44
|
-
|
|
45
|
-
if (required && !value) {
|
|
46
|
-
console.error(`Input "${name}" is required`)
|
|
47
|
-
exit(1)
|
|
48
|
-
}
|
|
49
|
-
|
|
50
|
-
return value
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
/**
|
|
54
|
-
* Sets a GitHub Actions output value.
|
|
55
|
-
* @param {string} name - Output name
|
|
56
|
-
* @param {string} value - Output value
|
|
57
|
-
*/
|
|
58
|
-
const setOutput = (name, value) => {
|
|
59
|
-
const outputFile = process.env.GITHUB_OUTPUT
|
|
60
|
-
if (outputFile) {
|
|
61
|
-
appendFileSync(outputFile, `${name}=${value}\n`)
|
|
62
|
-
} else {
|
|
63
|
-
console.log(`${name}=${value}`)
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
// Required inputs
|
|
68
|
-
const scopeShortName = getInput('scope-short-name', { required: true })
|
|
69
|
-
const imageTag = getInput('image-tag', { required: true })
|
|
70
|
-
const githubToken = getInput('github-token', { required: true })
|
|
71
|
-
|
|
72
|
-
// Optional inputs - defaults from environment for portability
|
|
73
|
-
const repositoryOwner = getInput('repository-owner', { defaultValue: process.env.GITHUB_REPOSITORY_OWNER })
|
|
74
|
-
const repositoryPrefixInput = getInput('repository-prefix')
|
|
75
|
-
const repositoryPrefix =
|
|
76
|
-
repositoryPrefixInput || process.env.GHCR_REPOSITORY_PREFIX || process.env.GITHUB_REPOSITORY?.split('/')[1] || 'platform'
|
|
77
|
-
const maxPages = Number(getInput('max-pages', { defaultValue: '5' }))
|
|
78
|
-
|
|
79
|
-
// GitHub API request headers
|
|
80
|
-
const headers = {
|
|
81
|
-
Authorization: `Bearer ${githubToken}`,
|
|
82
|
-
Accept: 'application/vnd.github+json',
|
|
83
|
-
'X-GitHub-Api-Version': '2022-11-28',
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
// Construct the full package name (e.g., 'platform/storefront')
|
|
87
|
-
const packageName = `${repositoryPrefix}/${scopeShortName}`
|
|
88
|
-
const encodedPackage = encodeURIComponent(packageName)
|
|
89
|
-
|
|
90
|
-
/**
|
|
91
|
-
* Fetches a page of container versions from the GitHub Packages API.
|
|
92
|
-
* @param {number} page - Page number (1-indexed)
|
|
93
|
-
* @returns {Promise<Object>} Result with versions array, hasMore flag, and notFound flag
|
|
94
|
-
*/
|
|
95
|
-
const fetchVersions = async (page) => {
|
|
96
|
-
const url = `https://api.github.com/orgs/${repositoryOwner}/packages/container/${encodedPackage}/versions?per_page=100&page=${page}`
|
|
97
|
-
const response = await fetch(url, { headers })
|
|
98
|
-
|
|
99
|
-
// 404 means the package doesn't exist yet (first build)
|
|
100
|
-
if (response.status === 404) {
|
|
101
|
-
return { notFound: true, versions: [], hasMore: false }
|
|
102
|
-
}
|
|
103
|
-
|
|
104
|
-
if (!response.ok) {
|
|
105
|
-
const body = await response.text()
|
|
106
|
-
throw new Error(`GitHub API error (${response.status}): ${body}`)
|
|
107
|
-
}
|
|
108
|
-
|
|
109
|
-
const data = await response.json()
|
|
110
|
-
const versions = Array.isArray(data) ? data : []
|
|
111
|
-
return {
|
|
112
|
-
versions,
|
|
113
|
-
hasMore: versions.length === 100, // Full page means there might be more
|
|
114
|
-
notFound: false,
|
|
115
|
-
}
|
|
116
|
-
}
|
|
117
|
-
|
|
118
|
-
/**
|
|
119
|
-
* Checks if the target tag exists in a list of container versions.
|
|
120
|
-
* @param {Object[]} versions - Array of version objects from GitHub API
|
|
121
|
-
* @param {string} targetTag - The tag to search for
|
|
122
|
-
* @returns {boolean} True if tag is found
|
|
123
|
-
*/
|
|
124
|
-
const tagExistsInVersions = (versions, targetTag) => {
|
|
125
|
-
for (const version of versions) {
|
|
126
|
-
const tags = version?.metadata?.container?.tags
|
|
127
|
-
if (!Array.isArray(tags)) continue
|
|
128
|
-
if (tags.includes(targetTag)) {
|
|
129
|
-
return true
|
|
130
|
-
}
|
|
131
|
-
}
|
|
132
|
-
return false
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
/**
|
|
136
|
-
* Checks if the image tag exists in GHCR by paginating through all versions.
|
|
137
|
-
* @returns {Promise<boolean>} True if tag exists
|
|
138
|
-
*/
|
|
139
|
-
const check = async () => {
|
|
140
|
-
const targetTag = imageTag.trim()
|
|
141
|
-
|
|
142
|
-
// Paginate through versions until we find the tag or run out of pages
|
|
143
|
-
for (let page = 1; page <= maxPages; page += 1) {
|
|
144
|
-
const { versions, hasMore, notFound } = await fetchVersions(page)
|
|
145
|
-
|
|
146
|
-
// Package doesn't exist yet - tag definitely doesn't exist
|
|
147
|
-
if (notFound) {
|
|
148
|
-
return false
|
|
149
|
-
}
|
|
150
|
-
|
|
151
|
-
// Check if target tag is in this page
|
|
152
|
-
if (tagExistsInVersions(versions, targetTag)) {
|
|
153
|
-
return true
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
// No more pages to check
|
|
157
|
-
if (!hasMore) {
|
|
158
|
-
break
|
|
159
|
-
}
|
|
160
|
-
}
|
|
161
|
-
|
|
162
|
-
return false
|
|
163
|
-
}
|
|
164
|
-
|
|
165
|
-
/**
|
|
166
|
-
* Main entry point - runs the check and outputs the result.
|
|
167
|
-
*/
|
|
168
|
-
const run = async () => {
|
|
169
|
-
try {
|
|
170
|
-
const exists = await check()
|
|
171
|
-
setOutput('exists', String(exists))
|
|
172
|
-
} catch (error) {
|
|
173
|
-
console.error('Failed to check image tag existence:', error)
|
|
174
|
-
setOutput('exists', 'false')
|
|
175
|
-
exit(1)
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
run()
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
name: Detect path changes
|
|
2
|
-
description: Reports whether the specified paths changed between two git references.
|
|
3
|
-
|
|
4
|
-
inputs:
|
|
5
|
-
paths:
|
|
6
|
-
description: Newline or comma-delimited list of paths to check.
|
|
7
|
-
required: true
|
|
8
|
-
base-ref:
|
|
9
|
-
description: Base git ref/sha to diff from. Falls back to GITHUB_EVENT_BEFORE or parent commit.
|
|
10
|
-
default: ''
|
|
11
|
-
head-ref:
|
|
12
|
-
description: Head git ref/sha to diff against. Falls back to GITHUB_SHA.
|
|
13
|
-
default: ''
|
|
14
|
-
|
|
15
|
-
outputs:
|
|
16
|
-
changed:
|
|
17
|
-
description: 'true' if any of the provided paths changed.
|
|
18
|
-
|
|
19
|
-
runs:
|
|
20
|
-
using: node20
|
|
21
|
-
main: index.js
|
|
@@ -1,192 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Check Path Changes Action
|
|
3
|
-
*
|
|
4
|
-
* Detects whether specified paths have changed between two git references.
|
|
5
|
-
* Useful for conditional workflow steps based on file changes.
|
|
6
|
-
*
|
|
7
|
-
* @example
|
|
8
|
-
* - uses: ./.github/actions/check-path-changes
|
|
9
|
-
* with:
|
|
10
|
-
* paths: infra,packages/cloudevents
|
|
11
|
-
* base-ref: main
|
|
12
|
-
*
|
|
13
|
-
* @outputs changed - 'true' if any of the provided paths changed
|
|
14
|
-
*/
|
|
15
|
-
|
|
16
|
-
const { appendFileSync } = require('node:fs')
|
|
17
|
-
const { spawnSync } = require('node:child_process')
|
|
18
|
-
|
|
19
|
-
/**
|
|
20
|
-
* Builds environment variable keys for GitHub Actions inputs.
|
|
21
|
-
* @param {string} name - Input name
|
|
22
|
-
* @returns {string[]} Possible environment variable keys
|
|
23
|
-
*/
|
|
24
|
-
const buildInputKeys = (name) => {
|
|
25
|
-
const trimmed = name.trim()
|
|
26
|
-
const upper = trimmed.toUpperCase()
|
|
27
|
-
const normalized = upper.replace(/[^A-Z0-9]+/g, '_')
|
|
28
|
-
return Array.from(new Set([`INPUT_${upper}`, `INPUT_${normalized}`]))
|
|
29
|
-
}
|
|
30
|
-
|
|
31
|
-
/**
|
|
32
|
-
* Retrieves a GitHub Actions input value from environment variables.
|
|
33
|
-
* @param {string} name - Input name
|
|
34
|
-
* @param {Object} options - Options
|
|
35
|
-
* @param {boolean} [options.required=false] - Whether the input is required
|
|
36
|
-
* @param {string} [options.defaultValue=''] - Default value if not set
|
|
37
|
-
* @returns {string} The input value
|
|
38
|
-
*/
|
|
39
|
-
const getInput = (name, { required = false, defaultValue = '' } = {}) => {
|
|
40
|
-
const keys = buildInputKeys(name)
|
|
41
|
-
const raw = keys.map((key) => process.env[key]).find((value) => typeof value === 'string')
|
|
42
|
-
const value = typeof raw === 'string' ? raw.trim() : defaultValue
|
|
43
|
-
|
|
44
|
-
if (required && !value) {
|
|
45
|
-
console.error(`Input "${name}" is required`)
|
|
46
|
-
process.exit(1)
|
|
47
|
-
}
|
|
48
|
-
|
|
49
|
-
return value
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
/**
|
|
53
|
-
* Sets a GitHub Actions output value.
|
|
54
|
-
* @param {string} name - Output name
|
|
55
|
-
* @param {string|object} value - Output value
|
|
56
|
-
*/
|
|
57
|
-
const setOutput = (name, value) => {
|
|
58
|
-
const outputFile = process.env.GITHUB_OUTPUT
|
|
59
|
-
const stringValue = typeof value === 'string' ? value : JSON.stringify(value)
|
|
60
|
-
if (outputFile) {
|
|
61
|
-
appendFileSync(outputFile, `${name}=${stringValue}\n`)
|
|
62
|
-
} else {
|
|
63
|
-
console.log(`${name}=${stringValue}`)
|
|
64
|
-
}
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
/**
|
|
68
|
-
* Parses a comma/newline-separated list of paths.
|
|
69
|
-
* @param {string} value - Input string
|
|
70
|
-
* @returns {string[]} Array of paths
|
|
71
|
-
*/
|
|
72
|
-
const parseList = (value) => {
|
|
73
|
-
if (!value) return []
|
|
74
|
-
return value
|
|
75
|
-
.split(/[^a-zA-Z0-9._\-\/]+/)
|
|
76
|
-
.map((entry) => entry.trim())
|
|
77
|
-
.filter(Boolean)
|
|
78
|
-
}
|
|
79
|
-
|
|
80
|
-
/**
|
|
81
|
-
* Runs a git command and returns stdout.
|
|
82
|
-
* @param {string[]} args - Git command arguments
|
|
83
|
-
* @returns {string} Command output
|
|
84
|
-
*/
|
|
85
|
-
const runGit = (args) => {
|
|
86
|
-
const result = spawnSync('git', args, { encoding: 'utf8' })
|
|
87
|
-
if (result.error) {
|
|
88
|
-
throw result.error
|
|
89
|
-
}
|
|
90
|
-
if (result.status !== 0) {
|
|
91
|
-
throw new Error(`git ${args.join(' ')} failed: ${result.stderr || result.stdout}`)
|
|
92
|
-
}
|
|
93
|
-
return result.stdout.trim()
|
|
94
|
-
}
|
|
95
|
-
|
|
96
|
-
const resolveHeadRef = (headRefInput) => {
|
|
97
|
-
if (headRefInput) {
|
|
98
|
-
return headRefInput
|
|
99
|
-
}
|
|
100
|
-
if (process.env.GITHUB_SHA) {
|
|
101
|
-
return process.env.GITHUB_SHA
|
|
102
|
-
}
|
|
103
|
-
return runGit(['rev-parse', 'HEAD'])
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
/**
|
|
107
|
-
* Checks if a ref is a zero (null) ref.
|
|
108
|
-
* @param {string} ref - Git reference
|
|
109
|
-
* @returns {boolean} True if zero ref
|
|
110
|
-
*/
|
|
111
|
-
const isZeroRef = (ref) => !ref || /^0+$/.test(ref)
|
|
112
|
-
|
|
113
|
-
/**
|
|
114
|
-
* Resolves the base ref for comparison.
|
|
115
|
-
* @param {string} baseRefInput - User-provided base ref
|
|
116
|
-
* @param {string} headRef - Resolved head ref
|
|
117
|
-
* @returns {string} Base ref to use
|
|
118
|
-
*/
|
|
119
|
-
const resolveBaseRef = (baseRefInput, headRef) => {
|
|
120
|
-
if (baseRefInput && !isZeroRef(baseRefInput)) {
|
|
121
|
-
return baseRefInput
|
|
122
|
-
}
|
|
123
|
-
|
|
124
|
-
const eventBefore = process.env.GITHUB_EVENT_BEFORE
|
|
125
|
-
if (eventBefore && !isZeroRef(eventBefore)) {
|
|
126
|
-
return eventBefore
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
try {
|
|
130
|
-
const parent = runGit(['rev-parse', `${headRef}^`])
|
|
131
|
-
if (parent) {
|
|
132
|
-
return parent
|
|
133
|
-
}
|
|
134
|
-
} catch (error) {
|
|
135
|
-
// ignore and fall through
|
|
136
|
-
}
|
|
137
|
-
|
|
138
|
-
return headRef
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
/**
|
|
142
|
-
* Checks if a specific path has changed between two commits.
|
|
143
|
-
* @param {string} baseRef - Base commit ref
|
|
144
|
-
* @param {string} headRef - Head commit ref
|
|
145
|
-
* @param {string} targetPath - Path to check
|
|
146
|
-
* @returns {boolean} True if path has changes
|
|
147
|
-
*/
|
|
148
|
-
const pathChanged = (baseRef, headRef, targetPath) => {
|
|
149
|
-
const result = spawnSync('git', ['diff', '--name-only', baseRef, headRef, '--', targetPath], { encoding: 'utf8' })
|
|
150
|
-
if (result.error) {
|
|
151
|
-
throw result.error
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
if (result.status !== 0) {
|
|
155
|
-
throw new Error(result.stderr || `git diff exited with status ${result.status}`)
|
|
156
|
-
}
|
|
157
|
-
|
|
158
|
-
return result.stdout.trim().length > 0
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
/**
|
|
162
|
-
* Main entry point.
|
|
163
|
-
*/
|
|
164
|
-
const main = () => {
|
|
165
|
-
const pathsInput = getInput('paths', { required: true })
|
|
166
|
-
const baseRefInput = getInput('base-ref')
|
|
167
|
-
const headRefInput = getInput('head-ref')
|
|
168
|
-
|
|
169
|
-
const paths = parseList(pathsInput)
|
|
170
|
-
if (paths.length === 0) {
|
|
171
|
-
console.error('At least one valid path must be provided via the paths input')
|
|
172
|
-
process.exit(1)
|
|
173
|
-
}
|
|
174
|
-
|
|
175
|
-
const headRef = resolveHeadRef(headRefInput)
|
|
176
|
-
const baseRef = resolveBaseRef(baseRefInput, headRef)
|
|
177
|
-
|
|
178
|
-
let changed = false
|
|
179
|
-
for (const targetPath of paths) {
|
|
180
|
-
if (pathChanged(baseRef, headRef, targetPath)) {
|
|
181
|
-
changed = true
|
|
182
|
-
break
|
|
183
|
-
}
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
setOutput('changed', changed ? 'true' : 'false')
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
main().catch((error) => {
|
|
190
|
-
console.error('check-path-changes action failed:', error)
|
|
191
|
-
process.exit(1)
|
|
192
|
-
})
|
|
@@ -1,38 +0,0 @@
|
|
|
1
|
-
name: Detect skipped services
|
|
2
|
-
description: Detects services with skip:true and outputs them for workflow use
|
|
3
|
-
inputs:
|
|
4
|
-
services-dir:
|
|
5
|
-
description: Directory containing service configuration files
|
|
6
|
-
required: false
|
|
7
|
-
default: infra/services
|
|
8
|
-
outputs:
|
|
9
|
-
skipped-services:
|
|
10
|
-
description: Comma-separated list of skipped service names
|
|
11
|
-
value: ${{ steps.detect.outputs.skipped_services }}
|
|
12
|
-
runs:
|
|
13
|
-
using: composite
|
|
14
|
-
steps:
|
|
15
|
-
- name: Detect skipped services
|
|
16
|
-
id: detect
|
|
17
|
-
shell: bash
|
|
18
|
-
env:
|
|
19
|
-
SERVICES_DIR: ${{ inputs.services-dir }}
|
|
20
|
-
run: |
|
|
21
|
-
set -euo pipefail
|
|
22
|
-
|
|
23
|
-
SKIPPED=""
|
|
24
|
-
for file in "$SERVICES_DIR"/*.ts; do
|
|
25
|
-
if [ -f "$file" ] && grep -q "skip:\s*true" "$file" 2>/dev/null; then
|
|
26
|
-
SERVICE_NAME=$(grep -oP "name:\s*['\"]?\K[a-zA-Z0-9_-]+" "$file" | head -1)
|
|
27
|
-
if [ -n "$SERVICE_NAME" ]; then
|
|
28
|
-
if [ -z "$SKIPPED" ]; then
|
|
29
|
-
SKIPPED="$SERVICE_NAME"
|
|
30
|
-
else
|
|
31
|
-
SKIPPED="$SKIPPED,$SERVICE_NAME"
|
|
32
|
-
fi
|
|
33
|
-
fi
|
|
34
|
-
fi
|
|
35
|
-
done
|
|
36
|
-
|
|
37
|
-
echo "skipped_services=$SKIPPED" >> "$GITHUB_OUTPUT"
|
|
38
|
-
echo "Detected skipped services: $SKIPPED"
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
name: Generate scope matrix
|
|
2
|
-
description: Discover Docker scopes and output JSON matrix entries for changed scopes
|
|
3
|
-
inputs:
|
|
4
|
-
scope-roots:
|
|
5
|
-
description: Comma/space separated list of directories containing scope folders
|
|
6
|
-
required: false
|
|
7
|
-
force-scope-short-names:
|
|
8
|
-
description: Optional short-name list to force into the matrix even if unchanged
|
|
9
|
-
required: false
|
|
10
|
-
force-all:
|
|
11
|
-
description: When true, include ALL discovered scopes regardless of changes
|
|
12
|
-
required: false
|
|
13
|
-
default: 'false'
|
|
14
|
-
outputs:
|
|
15
|
-
scopes:
|
|
16
|
-
description: JSON array describing scopes for the matrix strategy
|
|
17
|
-
scopes_count:
|
|
18
|
-
description: Number of scopes detected in the matrix
|
|
19
|
-
runs:
|
|
20
|
-
using: node20
|
|
21
|
-
main: index.js
|