@omni-oss/create-jobs 0.1.6 → 0.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +18 -0
- package/dist/create-jobs.cjs +1 -1
- package/dist/create-jobs.d.ts +1 -1
- package/dist/create-jobs.d.ts.map +1 -1
- package/dist/create-jobs.mjs +12 -12
- package/dist/index.cjs +1 -1
- package/dist/index.mjs +1 -1
- package/dist/schemas-BnpD42eF.js +1 -0
- package/dist/{schemas-CeVQC8Uz.mjs → schemas-CySsa0xy.mjs} +56 -52
- package/package.json +2 -2
- package/dist/schemas-CnXDgPym.js +0 -1
- package/project.omni.yaml +0 -29
- package/src/cli/index.ts +0 -32
- package/src/create-jobs.spec.ts +0 -337
- package/src/create-jobs.ts +0 -178
- package/src/index.ts +0 -2
- package/src/schemas.ts +0 -176
- package/tsconfig.json +0 -3
- package/tsconfig.project.json +0 -5
- package/tsconfig.types.json +0 -3
- package/vite.config.ts +0 -30
- package/vitest.config.integration.ts +0 -12
- package/vitest.config.unit.ts +0 -13
package/CHANGELOG.md
CHANGED
|
@@ -2,6 +2,24 @@
|
|
|
2
2
|
All notable changes to this project will be documented in this file. See [conventional commits](https://www.conventionalcommits.org/) for commit guidelines.
|
|
3
3
|
|
|
4
4
|
- - -
|
|
5
|
+
## @omni-oss/create-jobs-v0.1.8 - 2026-02-12
|
|
6
|
+
#### Bug Fixes
|
|
7
|
+
- (**@omni-oss/create-jobs**) output relative paths - (db68af6) - Clarence Manuel
|
|
8
|
+
|
|
9
|
+
- - -
|
|
10
|
+
|
|
11
|
+
## @omni-oss/create-jobs-v0.1.7 - 2026-02-10
|
|
12
|
+
#### Bug Fixes
|
|
13
|
+
- generic publish task - (f5d56c4) - Clarence Manuel
|
|
14
|
+
|
|
15
|
+
- - -
|
|
16
|
+
|
|
17
|
+
## @omni-oss/create-jobs-v0.1.6 - 2026-02-09
|
|
18
|
+
#### Bug Fixes
|
|
19
|
+
- (**@omni-oss/create-jobs**) support create jobs via is_*_task meta data - (d95dbca) - Clarence Manuel
|
|
20
|
+
|
|
21
|
+
- - -
|
|
22
|
+
|
|
5
23
|
## @omni-oss/create-jobs-v0.1.5 - 2026-02-09
|
|
6
24
|
#### Bug Fixes
|
|
7
25
|
- (**@omni-oss/create-jobs**) support sanitizing artifact names - (1ed9932) - Clarence Manuel
|
package/dist/create-jobs.cjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
#!/usr/bin/env bun
|
|
2
|
-
"use strict";const
|
|
2
|
+
"use strict";const r=require("node:fs/promises"),i=require("@commander-js/extra-typings"),s=require("./schemas-BnpD42eF.js"),l=new i.Command;l.argument("<input>","The input file to read from.").option("-o, --output <output>","The output file to write to.").option("-r, --root <root>","Override the workspace root.").action(async(n,e)=>{const a=await r.readFile(n,"utf-8"),u=JSON.parse(a),t=s.TaskResultArraySchema.safeParse(u);if(t.success){const c=t.data,o=s.createJobs(c,e.root);e.output?await r.writeFile(e.output,JSON.stringify(o,null,2)):console.log(o)}else console.error(t.error),process.exit(1)}).parseAsync();
|
package/dist/create-jobs.d.ts
CHANGED
|
@@ -32,5 +32,5 @@ export type PublishJobs = {
|
|
|
32
32
|
generic: Job[];
|
|
33
33
|
rust_github: Job[];
|
|
34
34
|
};
|
|
35
|
-
export declare function createJobs(results: TaskResultArray): Jobs;
|
|
35
|
+
export declare function createJobs(results: TaskResultArray, rootDir?: string): Jobs;
|
|
36
36
|
//# sourceMappingURL=create-jobs.d.ts.map
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"create-jobs.d.ts","sourceRoot":"","sources":["../src/create-jobs.ts"],"names":[],"mappings":"
|
|
1
|
+
{"version":3,"file":"create-jobs.d.ts","sourceRoot":"","sources":["../src/create-jobs.ts"],"names":[],"mappings":"AAEA,OAAO,KAAK,EAAE,IAAI,EAAc,eAAe,EAAE,MAAM,WAAW,CAAC;AAEnE,MAAM,MAAM,QAAQ,GAAG;IACnB,IAAI,EAAE,MAAM,CAAC;IACb,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,WAAW,EAAE,MAAM,CAAC;CACvB,CAAC;AAEF,MAAM,MAAM,GAAG,GAAG;IACd,SAAS,EAAE,MAAM,CAAC;IAClB,YAAY,EAAE,MAAM,CAAC;IACrB,WAAW,EAAE,MAAM,CAAC;IACpB,SAAS,EAAE;QACP,SAAS,EAAE,QAAQ,CAAC;QACpB,OAAO,EAAE,QAAQ,CAAC;KACrB,CAAC;IACF,IAAI,EAAE,IAAI,CAAC;CACd,CAAC;AAEF,MAAM,MAAM,IAAI,GAAG;IACf,IAAI,EAAE,QAAQ,CAAC;IACf,KAAK,EAAE,SAAS,CAAC;IACjB,OAAO,EAAE,WAAW,CAAC;CACxB,CAAC;AAEF,MAAM,MAAM,QAAQ,GAAG;IACnB,IAAI,EAAE,GAAG,EAAE,CAAC;IACZ,UAAU,EAAE,GAAG,EAAE,CAAC;CACrB,CAAC;AAEF,MAAM,MAAM,SAAS,GAAG;IACpB,IAAI,EAAE,GAAG,EAAE,CAAC;IACZ,UAAU,EAAE,GAAG,EAAE,CAAC;CACrB,CAAC;AAEF,MAAM,MAAM,WAAW,GAAG;IACtB,GAAG,EAAE,GAAG,EAAE,CAAC;IACX,OAAO,EAAE,GAAG,EAAE,CAAC;IACf,WAAW,EAAE,GAAG,EAAE,CAAC;CACtB,CAAC;AAEF,wBAAgB,UAAU,CAAC,OAAO,EAAE,eAAe,EAAE,OAAO,CAAC,EAAE,MAAM,GAAG,IAAI,CA8D3E"}
|
package/dist/create-jobs.mjs
CHANGED
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
#!/usr/bin/env bun
|
|
2
|
-
import
|
|
2
|
+
import r from "node:fs/promises";
|
|
3
3
|
import { Command as u } from "@commander-js/extra-typings";
|
|
4
|
-
import { T as c, c as
|
|
5
|
-
const
|
|
6
|
-
|
|
7
|
-
const a = await
|
|
8
|
-
if (
|
|
9
|
-
const i =
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
JSON.stringify(
|
|
13
|
-
) : console.log(
|
|
4
|
+
import { T as c, c as p } from "./schemas-CySsa0xy.mjs";
|
|
5
|
+
const l = new u();
|
|
6
|
+
l.argument("<input>", "The input file to read from.").option("-o, --output <output>", "The output file to write to.").option("-r, --root <root>", "Override the workspace root.").action(async (s, t) => {
|
|
7
|
+
const a = await r.readFile(s, "utf-8"), n = JSON.parse(a), o = c.safeParse(n);
|
|
8
|
+
if (o.success) {
|
|
9
|
+
const i = o.data, e = p(i, t.root);
|
|
10
|
+
t.output ? await r.writeFile(
|
|
11
|
+
t.output,
|
|
12
|
+
JSON.stringify(e, null, 2)
|
|
13
|
+
) : console.log(e);
|
|
14
14
|
} else
|
|
15
|
-
console.error(
|
|
15
|
+
console.error(o.error), process.exit(1);
|
|
16
16
|
}).parseAsync();
|
package/dist/index.cjs
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./schemas-
|
|
1
|
+
"use strict";Object.defineProperty(exports,Symbol.toStringTag,{value:"Module"});const e=require("./schemas-BnpD42eF.js");exports.TaskResultArraySchema=e.TaskResultArraySchema;exports.TaskResultSchema=e.TaskResultSchema;exports.createJobs=e.createJobs;
|
package/dist/index.mjs
CHANGED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"use strict";const n=require("node:path"),u=require("node:process"),e=require("zod");function b(s,a){a||(a=u.cwd());const i={test:{rust:[],typescript:[]},build:{rust:[],typescript:[]},publish:{generic:[],npm:[],rust_github:[]}};for(const t of s){if(t.status==="skipped")continue;const o=t.task;(o.task_name==="test"||t.details.meta?.is_test_task)&&(t.details.meta?.language==="rust"&&i.test.rust.push(r(t,a)),t.details.meta?.language==="typescript"&&i.test.typescript.push(r(t,a))),(o.task_name==="build"||t.details.meta?.is_build_task)&&(t.details.meta?.language==="rust"&&i.build.rust.push(r(t,a)),t.details.meta?.language==="typescript"&&i.build.typescript.push(r(t,a))),(o.task_name==="publish"||t.details.meta?.is_publish_task)&&(t.details.meta?.language==="typescript"?i.publish.npm.push(r(t,a)):t.details.meta?.language==="rust"?i.publish.rust_github.push(r(t,a)):i.publish.generic.push(r(t,a)))}return i}function r(s,a){const i=[],t=[];if(s.details.output_files&&s.details.output_files.length>0)for(const o of s.details.output_files){const l=n.resolve(s.task.project_dir,o);m(s.task.project_dir,l)?t.push(n.relative(s.task.project_dir,l)):i.push(n.relative(a,l))}return{task_name:s.task.task_name,project_name:s.task.project_name,artifacts:{project:{name:`project-${c(s.task.project_name)}__${c(s.task.task_name)}`,files:t,files_count:t.length},workspace:{name:`workspace-${c(s.task.project_name)}__${c(s.task.task_name)}`,files:i,files_count:i.length}},project_dir:n.relative(a,s.task.project_dir),meta:s.details.meta??{}}}function m(s,a){const i=n.relative(s,a);return i&&!i.startsWith("..")&&!n.isAbsolute(i)}function c(s){let a=s.replace(/[/\\?%*:|"<> \x00-\x1f]/g,"_");return a=a.replace(/[.\s]+$/,""),/^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])$/i.test(a)&&(a+="_"),a||"unsaved_file"}const f=e.z.object({secs:e.z.number().int().nonnegative().describe("The number of whole seconds elapsed."),nanos:e.z.number().int().nonnegative().describe("The number of nanoseconds elapsed.")}),k=e.z.object({runner:e.z.string().describe("The runner to use for the target.")}),g=e.z.object({npm:e.z.boolean().optional().describe("Whether to publish to npm."),github:e.z.boolean().optional().describe("Whether to publish to github.")}),_=e.z.object({type:e.z.string().optional().describe("The type of project (e.g., library, service, application)."),language:e.z.string().optional().describe("The primary language of the project."),targets:e.z.record(e.z.string(),k).optional().describe("The targets to build."),release:g.optional(),is_publish_task:e.z.boolean().optional().describe("Whether this is a publish job."),is_build_task:e.z.boolean().optional().describe("Whether this is a build job."),is_test_task:e.z.boolean().optional().describe("Whether this is a test job.")}),p=e.z.object({meta:_.optional(),output_files:e.z.array(e.z.string()).optional().describe("The output files generated.")}),d=e.z.object({task_name:e.z.string().describe("The short name of the task (e.g., 'test', 'build')."),task_command:e.z.string().describe("The command executed for the task."),project_name:e.z.string().describe("The name of the project."),project_dir:e.z.string().describe("The absolute directory path of the project."),full_task_name:e.z.string().describe("The fully qualified task name (e.g., 'omni_utils#test')."),dependencies:e.z.array(e.z.string()).describe("A list of dependent task names."),enabled:e.z.boolean().or(e.z.string()).optional().describe("Whether the task is enabled by configuration. Either a boolean or a tera template string that evaluates to a boolean."),interactive:e.z.boolean().describe("Whether the task is interactive."),persistent:e.z.boolean().describe("Whether the task is persistent.")}),z=e.z.object({status:e.z.literal("completed"),hash:e.z.string().describe("The task's content hash (Base64 encoded string). Used for caching."),task:d,exit_code:e.z.number().int().describe("The exit code of the executed command (typically 0 for success)."),elapsed:f.describe("The duration the task took to execute."),cache_hit:e.z.boolean().describe("Indicates if the result was pulled from cache."),details:p}),j=e.z.object({status:e.z.literal("errored"),task:d,error:e.z.string().describe("The error message."),details:p}),T=e.z.object({status:e.z.literal("skipped"),task:d,skip_reason:e.z.string().describe("The reason the task was skipped (e.g., 'disabled')."),details:p}),h=e.z.discriminatedUnion("status",[z,T,j]).describe("Schema for a single task execution result (completed or skipped)."),y=e.z.array(h).describe("An array of task execution results.");exports.TaskResultArraySchema=y;exports.TaskResultSchema=h;exports.createJobs=b;
|
|
@@ -1,7 +1,9 @@
|
|
|
1
|
-
import
|
|
1
|
+
import o from "node:path";
|
|
2
|
+
import h from "node:process";
|
|
2
3
|
import { z as e } from "zod";
|
|
3
|
-
function
|
|
4
|
-
|
|
4
|
+
function x(s, a) {
|
|
5
|
+
a || (a = h.cwd());
|
|
6
|
+
const i = {
|
|
5
7
|
test: {
|
|
6
8
|
rust: [],
|
|
7
9
|
typescript: []
|
|
@@ -16,68 +18,70 @@ function y(a) {
|
|
|
16
18
|
rust_github: []
|
|
17
19
|
}
|
|
18
20
|
};
|
|
19
|
-
for (const t of
|
|
21
|
+
for (const t of s) {
|
|
20
22
|
if (t.status === "skipped")
|
|
21
23
|
continue;
|
|
22
|
-
const
|
|
23
|
-
(
|
|
24
|
+
const n = t.task;
|
|
25
|
+
(n.task_name === "test" || t.details.meta?.is_test_task) && (t.details.meta?.language === "rust" && i.test.rust.push(r(t, a)), t.details.meta?.language === "typescript" && i.test.typescript.push(r(t, a))), (n.task_name === "build" || t.details.meta?.is_build_task) && (t.details.meta?.language === "rust" && i.build.rust.push(r(t, a)), t.details.meta?.language === "typescript" && i.build.typescript.push(r(t, a))), (n.task_name === "publish" || t.details.meta?.is_publish_task) && (t.details.meta?.language === "typescript" ? i.publish.npm.push(r(t, a)) : t.details.meta?.language === "rust" ? i.publish.rust_github.push(r(t, a)) : i.publish.generic.push(r(t, a)));
|
|
24
26
|
}
|
|
25
|
-
return
|
|
27
|
+
return i;
|
|
26
28
|
}
|
|
27
|
-
function
|
|
28
|
-
const
|
|
29
|
-
if (
|
|
30
|
-
for (const
|
|
31
|
-
const
|
|
32
|
-
|
|
29
|
+
function r(s, a) {
|
|
30
|
+
const i = [], t = [];
|
|
31
|
+
if (s.details.output_files && s.details.output_files.length > 0)
|
|
32
|
+
for (const n of s.details.output_files) {
|
|
33
|
+
const l = o.resolve(s.task.project_dir, n);
|
|
34
|
+
u(s.task.project_dir, l) ? t.push(
|
|
35
|
+
o.relative(s.task.project_dir, l)
|
|
36
|
+
) : i.push(o.relative(a, l));
|
|
33
37
|
}
|
|
34
38
|
return {
|
|
35
|
-
task_name:
|
|
36
|
-
project_name:
|
|
39
|
+
task_name: s.task.task_name,
|
|
40
|
+
project_name: s.task.project_name,
|
|
37
41
|
artifacts: {
|
|
38
42
|
project: {
|
|
39
|
-
name: `project-${
|
|
43
|
+
name: `project-${c(s.task.project_name)}__${c(s.task.task_name)}`,
|
|
40
44
|
files: t,
|
|
41
45
|
files_count: t.length
|
|
42
46
|
},
|
|
43
47
|
workspace: {
|
|
44
|
-
name: `workspace-${
|
|
45
|
-
files:
|
|
46
|
-
files_count:
|
|
48
|
+
name: `workspace-${c(s.task.project_name)}__${c(s.task.task_name)}`,
|
|
49
|
+
files: i,
|
|
50
|
+
files_count: i.length
|
|
47
51
|
}
|
|
48
52
|
},
|
|
49
|
-
project_dir: a.task.project_dir,
|
|
50
|
-
meta:
|
|
53
|
+
project_dir: o.relative(a, s.task.project_dir),
|
|
54
|
+
meta: s.details.meta ?? {}
|
|
51
55
|
};
|
|
52
56
|
}
|
|
53
|
-
function
|
|
54
|
-
const
|
|
55
|
-
return
|
|
57
|
+
function u(s, a) {
|
|
58
|
+
const i = o.relative(s, a);
|
|
59
|
+
return i && !i.startsWith("..") && !o.isAbsolute(i);
|
|
56
60
|
}
|
|
57
|
-
function
|
|
58
|
-
let
|
|
59
|
-
return
|
|
61
|
+
function c(s) {
|
|
62
|
+
let a = s.replace(/[/\\?%*:|"<> \x00-\x1f]/g, "_");
|
|
63
|
+
return a = a.replace(/[.\s]+$/, ""), /^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])$/i.test(a) && (a += "_"), a || "unsaved_file";
|
|
60
64
|
}
|
|
61
|
-
const
|
|
65
|
+
const b = e.object({
|
|
62
66
|
secs: e.number().int().nonnegative().describe("The number of whole seconds elapsed."),
|
|
63
67
|
nanos: e.number().int().nonnegative().describe("The number of nanoseconds elapsed.")
|
|
64
|
-
}),
|
|
68
|
+
}), m = e.object({
|
|
65
69
|
runner: e.string().describe("The runner to use for the target.")
|
|
66
|
-
}),
|
|
70
|
+
}), f = e.object({
|
|
67
71
|
npm: e.boolean().optional().describe("Whether to publish to npm."),
|
|
68
72
|
github: e.boolean().optional().describe("Whether to publish to github.")
|
|
69
|
-
}),
|
|
73
|
+
}), g = e.object({
|
|
70
74
|
type: e.string().optional().describe("The type of project (e.g., library, service, application)."),
|
|
71
75
|
language: e.string().optional().describe("The primary language of the project."),
|
|
72
|
-
targets: e.record(e.string(),
|
|
73
|
-
release:
|
|
76
|
+
targets: e.record(e.string(), m).optional().describe("The targets to build."),
|
|
77
|
+
release: f.optional(),
|
|
74
78
|
is_publish_task: e.boolean().optional().describe("Whether this is a publish job."),
|
|
75
79
|
is_build_task: e.boolean().optional().describe("Whether this is a build job."),
|
|
76
80
|
is_test_task: e.boolean().optional().describe("Whether this is a test job.")
|
|
77
|
-
}), l = e.object({
|
|
78
|
-
meta: m.optional(),
|
|
79
|
-
output_files: e.array(e.string()).optional().describe("The output files generated.")
|
|
80
81
|
}), p = e.object({
|
|
82
|
+
meta: g.optional(),
|
|
83
|
+
output_files: e.array(e.string()).optional().describe("The output files generated.")
|
|
84
|
+
}), d = e.object({
|
|
81
85
|
task_name: e.string().describe("The short name of the task (e.g., 'test', 'build')."),
|
|
82
86
|
task_command: e.string().describe("The command executed for the task."),
|
|
83
87
|
project_name: e.string().describe("The name of the project."),
|
|
@@ -94,32 +98,32 @@ const d = e.object({
|
|
|
94
98
|
hash: e.string().describe(
|
|
95
99
|
"The task's content hash (Base64 encoded string). Used for caching."
|
|
96
100
|
),
|
|
97
|
-
task:
|
|
101
|
+
task: d,
|
|
98
102
|
exit_code: e.number().int().describe(
|
|
99
103
|
"The exit code of the executed command (typically 0 for success)."
|
|
100
104
|
),
|
|
101
|
-
elapsed:
|
|
105
|
+
elapsed: b.describe("The duration the task took to execute."),
|
|
102
106
|
cache_hit: e.boolean().describe("Indicates if the result was pulled from cache."),
|
|
103
|
-
details:
|
|
104
|
-
}),
|
|
107
|
+
details: p
|
|
108
|
+
}), _ = e.object({
|
|
105
109
|
status: e.literal("errored"),
|
|
106
|
-
task:
|
|
110
|
+
task: d,
|
|
107
111
|
error: e.string().describe("The error message."),
|
|
108
|
-
details:
|
|
109
|
-
}),
|
|
112
|
+
details: p
|
|
113
|
+
}), j = e.object({
|
|
110
114
|
status: e.literal("skipped"),
|
|
111
|
-
task:
|
|
115
|
+
task: d,
|
|
112
116
|
skip_reason: e.string().describe("The reason the task was skipped (e.g., 'disabled')."),
|
|
113
|
-
details:
|
|
114
|
-
}),
|
|
117
|
+
details: p
|
|
118
|
+
}), T = e.discriminatedUnion("status", [
|
|
115
119
|
k,
|
|
116
|
-
|
|
117
|
-
|
|
120
|
+
j,
|
|
121
|
+
_
|
|
118
122
|
]).describe(
|
|
119
123
|
"Schema for a single task execution result (completed or skipped)."
|
|
120
|
-
),
|
|
124
|
+
), W = e.array(T).describe("An array of task execution results.");
|
|
121
125
|
export {
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
126
|
+
W as T,
|
|
127
|
+
T as a,
|
|
128
|
+
x as c
|
|
125
129
|
};
|
package/package.json
CHANGED
package/dist/schemas-CnXDgPym.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
"use strict";const c=require("node:path"),e=require("zod");function d(a){const s={test:{rust:[],typescript:[]},build:{rust:[],typescript:[]},publish:{generic:[],npm:[],rust_github:[]}};for(const t of a){if(t.status==="skipped")continue;const r=t.task;(r.task_name==="test"||t.details.meta?.is_test_task)&&(t.details.meta?.language==="rust"&&s.test.rust.push(i(t)),t.details.meta?.language==="typescript"&&s.test.typescript.push(i(t))),(r.task_name==="build"||t.details.meta?.is_build_task)&&(t.details.meta?.language==="rust"&&s.build.rust.push(i(t)),t.details.meta?.language==="typescript"&&s.build.typescript.push(i(t))),(r.task_name==="publish"||t.details.meta?.is_publish_task)&&t.details.meta?.release?.npm?s.publish.npm.push(i(t)):t.details.meta?.release?.github&&t.details.meta?.language==="rust"&&(t.task.task_name==="publish"||t.details.meta.is_publish_task)?s.publish.rust_github.push(i(t)):(t.details.meta?.is_publish_task||t.task.task_name==="publish")&&s.publish.generic.push(i(t))}return s}function i(a){const s=[],t=[];if(a.details.output_files&&a.details.output_files.length>0)for(const r of a.details.output_files){const n=c.resolve(a.task.project_dir,r);u(a.task.project_dir,n)?t.push(n):s.push(n)}return{task_name:a.task.task_name,project_name:a.task.project_name,artifacts:{project:{name:`project-${o(a.task.project_name)}__${o(a.task.task_name)}`,files:t,files_count:t.length},workspace:{name:`workspace-${o(a.task.project_name)}__${o(a.task.task_name)}`,files:s,files_count:s.length}},project_dir:a.task.project_dir,meta:a.details.meta??{}}}function u(a,s){const t=c.relative(a,s);return t&&!t.startsWith("..")&&!c.isAbsolute(t)}function o(a){let s=a.replace(/[/\\?%*:|"<> \x00-\x1f]/g,"_");return s=s.replace(/[.\s]+$/,""),/^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])$/i.test(s)&&(s+="_"),s||"unsaved_file"}const b=e.z.object({secs:e.z.number().int().nonnegative().describe("The number of whole seconds elapsed."),nanos:e.z.number().int().nonnegative().describe("The number of nanoseconds elapsed.")}),m=e.z.object({runner:e.z.string().describe("The runner to use for the target.")}),k=e.z.object({npm:e.z.boolean().optional().describe("Whether to publish to npm."),github:e.z.boolean().optional().describe("Whether to publish to github.")}),f=e.z.object({type:e.z.string().optional().describe("The type of project (e.g., library, service, application)."),language:e.z.string().optional().describe("The primary language of the project."),targets:e.z.record(e.z.string(),m).optional().describe("The targets to build."),release:k.optional(),is_publish_task:e.z.boolean().optional().describe("Whether this is a publish job."),is_build_task:e.z.boolean().optional().describe("Whether this is a build job."),is_test_task:e.z.boolean().optional().describe("Whether this is a test job.")}),l=e.z.object({meta:f.optional(),output_files:e.z.array(e.z.string()).optional().describe("The output files generated.")}),h=e.z.object({task_name:e.z.string().describe("The short name of the task (e.g., 'test', 'build')."),task_command:e.z.string().describe("The command executed for the task."),project_name:e.z.string().describe("The name of the project."),project_dir:e.z.string().describe("The absolute directory path of the project."),full_task_name:e.z.string().describe("The fully qualified task name (e.g., 'omni_utils#test')."),dependencies:e.z.array(e.z.string()).describe("A list of dependent task names."),enabled:e.z.boolean().or(e.z.string()).optional().describe("Whether the task is enabled by configuration. Either a boolean or a tera template string that evaluates to a boolean."),interactive:e.z.boolean().describe("Whether the task is interactive."),persistent:e.z.boolean().describe("Whether the task is persistent.")}),g=e.z.object({status:e.z.literal("completed"),hash:e.z.string().describe("The task's content hash (Base64 encoded string). Used for caching."),task:h,exit_code:e.z.number().int().describe("The exit code of the executed command (typically 0 for success)."),elapsed:b.describe("The duration the task took to execute."),cache_hit:e.z.boolean().describe("Indicates if the result was pulled from cache."),details:l}),_=e.z.object({status:e.z.literal("errored"),task:h,error:e.z.string().describe("The error message."),details:l}),z=e.z.object({status:e.z.literal("skipped"),task:h,skip_reason:e.z.string().describe("The reason the task was skipped (e.g., 'disabled')."),details:l}),p=e.z.discriminatedUnion("status",[g,z,_]).describe("Schema for a single task execution result (completed or skipped)."),j=e.z.array(p).describe("An array of task execution results.");exports.TaskResultArraySchema=j;exports.TaskResultSchema=p;exports.createJobs=d;
|
package/project.omni.yaml
DELETED
|
@@ -1,29 +0,0 @@
|
|
|
1
|
-
# yaml-language-server: $schema=https://raw.githubusercontent.com/omni-oss/omni/refs/heads/json-schemas/project-latest.json
|
|
2
|
-
name: "@omni-oss/create-jobs"
|
|
3
|
-
|
|
4
|
-
extends:
|
|
5
|
-
- "@workspace/omni/presets/ts-vite-script.omni.yaml"
|
|
6
|
-
|
|
7
|
-
tasks:
|
|
8
|
-
test:unit:
|
|
9
|
-
enabled: true
|
|
10
|
-
|
|
11
|
-
test:integration:
|
|
12
|
-
enabled: false
|
|
13
|
-
|
|
14
|
-
test:
|
|
15
|
-
enabled: true
|
|
16
|
-
|
|
17
|
-
build:
|
|
18
|
-
enabled: true
|
|
19
|
-
|
|
20
|
-
publish:
|
|
21
|
-
enabled: true
|
|
22
|
-
|
|
23
|
-
meta:
|
|
24
|
-
publish: true
|
|
25
|
-
|
|
26
|
-
dependencies:
|
|
27
|
-
- "@omni-oss/tsconfig"
|
|
28
|
-
- "@omni-oss/vite-config"
|
|
29
|
-
- "@omni-oss/vitest-config"
|
package/src/cli/index.ts
DELETED
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env bun
|
|
2
|
-
import fsAsync from "node:fs/promises";
|
|
3
|
-
import { Command } from "@commander-js/extra-typings";
|
|
4
|
-
import { createJobs, TaskResultArraySchema } from "..";
|
|
5
|
-
|
|
6
|
-
const command = new Command();
|
|
7
|
-
|
|
8
|
-
command
|
|
9
|
-
.argument("<input>", "The input file to read from.")
|
|
10
|
-
.option("-o, --output <output>", "The output file to write to.")
|
|
11
|
-
.action(async (input, options) => {
|
|
12
|
-
const inputFile = await fsAsync.readFile(input, "utf-8");
|
|
13
|
-
const results = JSON.parse(inputFile);
|
|
14
|
-
const result = TaskResultArraySchema.safeParse(results);
|
|
15
|
-
|
|
16
|
-
if (result.success) {
|
|
17
|
-
const data = result.data;
|
|
18
|
-
const processed = createJobs(data);
|
|
19
|
-
if (options.output) {
|
|
20
|
-
await fsAsync.writeFile(
|
|
21
|
-
options.output,
|
|
22
|
-
JSON.stringify(processed, null, 2),
|
|
23
|
-
);
|
|
24
|
-
} else {
|
|
25
|
-
console.log(processed);
|
|
26
|
-
}
|
|
27
|
-
} else {
|
|
28
|
-
console.error(result.error);
|
|
29
|
-
process.exit(1);
|
|
30
|
-
}
|
|
31
|
-
})
|
|
32
|
-
.parseAsync();
|
package/src/create-jobs.spec.ts
DELETED
|
@@ -1,337 +0,0 @@
|
|
|
1
|
-
/** biome-ignore-all lint/suspicious/noExplicitAny: test file */
|
|
2
|
-
import { describe, expect, it } from "vitest";
|
|
3
|
-
import { createJobs } from "./create-jobs"; // Update with actual path
|
|
4
|
-
|
|
5
|
-
describe("createJobs", () => {
|
|
6
|
-
it("should return an empty structure when given an empty array", () => {
|
|
7
|
-
const results: any[] = [];
|
|
8
|
-
const jobs = createJobs(results);
|
|
9
|
-
|
|
10
|
-
expect(jobs.test.rust).toHaveLength(0);
|
|
11
|
-
expect(jobs.build.typescript).toHaveLength(0);
|
|
12
|
-
expect(jobs.publish.npm).toHaveLength(0);
|
|
13
|
-
});
|
|
14
|
-
|
|
15
|
-
it('should skip tasks with status "skipped"', () => {
|
|
16
|
-
const results: any[] = [
|
|
17
|
-
{
|
|
18
|
-
status: "skipped",
|
|
19
|
-
task: {
|
|
20
|
-
task_name: "test",
|
|
21
|
-
project_name: "p1",
|
|
22
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
23
|
-
},
|
|
24
|
-
details: { meta: { language: "rust" } },
|
|
25
|
-
},
|
|
26
|
-
];
|
|
27
|
-
const jobs = createJobs(results);
|
|
28
|
-
expect(jobs.test.rust).toHaveLength(0);
|
|
29
|
-
});
|
|
30
|
-
|
|
31
|
-
it("should categorize test and build tasks by language", () => {
|
|
32
|
-
const results: any[] = [
|
|
33
|
-
{
|
|
34
|
-
status: "success",
|
|
35
|
-
task: {
|
|
36
|
-
task_name: "test",
|
|
37
|
-
project_name: "rust-app",
|
|
38
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
39
|
-
},
|
|
40
|
-
details: {
|
|
41
|
-
meta: { language: "rust" },
|
|
42
|
-
output_files: ["binary"],
|
|
43
|
-
},
|
|
44
|
-
},
|
|
45
|
-
{
|
|
46
|
-
status: "success",
|
|
47
|
-
task: {
|
|
48
|
-
task_name: "build",
|
|
49
|
-
project_name: "ts-lib",
|
|
50
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
51
|
-
},
|
|
52
|
-
details: { meta: { language: "typescript" } },
|
|
53
|
-
},
|
|
54
|
-
];
|
|
55
|
-
|
|
56
|
-
const jobs = createJobs(results);
|
|
57
|
-
|
|
58
|
-
expect(jobs.test.rust[0]).toMatchObject({
|
|
59
|
-
project_name: "rust-app",
|
|
60
|
-
task_name: "test",
|
|
61
|
-
artifacts: {
|
|
62
|
-
project: {
|
|
63
|
-
files: ["/mnt/c/Users/user/project/binary"],
|
|
64
|
-
},
|
|
65
|
-
},
|
|
66
|
-
});
|
|
67
|
-
expect(jobs.build.typescript[0]?.project_name).toBe("ts-lib");
|
|
68
|
-
expect(jobs.test.typescript).toHaveLength(0);
|
|
69
|
-
});
|
|
70
|
-
|
|
71
|
-
it("should handle publish tasks correctly (npm vs rust_github)", () => {
|
|
72
|
-
const results: any[] = [
|
|
73
|
-
{
|
|
74
|
-
status: "success",
|
|
75
|
-
task: {
|
|
76
|
-
task_name: "publish",
|
|
77
|
-
project_name: "js-pkg",
|
|
78
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
79
|
-
},
|
|
80
|
-
details: { meta: { release: { npm: true } } },
|
|
81
|
-
},
|
|
82
|
-
{
|
|
83
|
-
status: "success",
|
|
84
|
-
task: {
|
|
85
|
-
task_name: "publish",
|
|
86
|
-
project_name: "rust-pkg",
|
|
87
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
88
|
-
},
|
|
89
|
-
details: {
|
|
90
|
-
meta: {
|
|
91
|
-
language: "rust",
|
|
92
|
-
release: { github: true },
|
|
93
|
-
},
|
|
94
|
-
},
|
|
95
|
-
},
|
|
96
|
-
];
|
|
97
|
-
|
|
98
|
-
const jobs = createJobs(results);
|
|
99
|
-
|
|
100
|
-
// Checks NPM logic: task_name must be "publish" AND meta.release.npm must be true
|
|
101
|
-
expect(jobs.publish.npm).toHaveLength(1);
|
|
102
|
-
expect(jobs.publish.npm[0]?.project_name).toBe("js-pkg");
|
|
103
|
-
|
|
104
|
-
// Checks GitHub logic: meta.release.github must be true AND language must be "rust" AND task_name must be "publish"
|
|
105
|
-
expect(jobs.publish.rust_github).toHaveLength(1);
|
|
106
|
-
expect(jobs.publish.rust_github[0]?.project_name).toBe("rust-pkg");
|
|
107
|
-
});
|
|
108
|
-
|
|
109
|
-
it("should provide default values for missing meta or output_files", () => {
|
|
110
|
-
const results: any[] = [
|
|
111
|
-
{
|
|
112
|
-
status: "success",
|
|
113
|
-
task: {
|
|
114
|
-
task_name: "test",
|
|
115
|
-
project_name: "minimal",
|
|
116
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
117
|
-
},
|
|
118
|
-
details: {}, // Missing meta and output_files
|
|
119
|
-
},
|
|
120
|
-
];
|
|
121
|
-
|
|
122
|
-
// This won't be added to test.rust because meta.language is missing
|
|
123
|
-
const jobs = createJobs(results);
|
|
124
|
-
expect(jobs.test.rust).toHaveLength(0);
|
|
125
|
-
});
|
|
126
|
-
|
|
127
|
-
it("should resolve the relative output files to the project directory", () => {
|
|
128
|
-
const results: any[] = [
|
|
129
|
-
{
|
|
130
|
-
status: "success",
|
|
131
|
-
task: {
|
|
132
|
-
task_name: "test",
|
|
133
|
-
project_name: "minimal",
|
|
134
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
135
|
-
},
|
|
136
|
-
details: {
|
|
137
|
-
meta: { language: "rust" },
|
|
138
|
-
output_files: ["target/debug/minimal"],
|
|
139
|
-
},
|
|
140
|
-
},
|
|
141
|
-
];
|
|
142
|
-
|
|
143
|
-
const jobs = createJobs(results);
|
|
144
|
-
|
|
145
|
-
expect(jobs.test.rust[0]).toMatchObject({
|
|
146
|
-
project_name: "minimal",
|
|
147
|
-
task_name: "test",
|
|
148
|
-
artifacts: {
|
|
149
|
-
project: {
|
|
150
|
-
files: ["/mnt/c/Users/user/project/target/debug/minimal"],
|
|
151
|
-
},
|
|
152
|
-
},
|
|
153
|
-
});
|
|
154
|
-
});
|
|
155
|
-
|
|
156
|
-
it("should assign output files not in the project directory to the workspace artifacts", () => {
|
|
157
|
-
const results: any[] = [
|
|
158
|
-
{
|
|
159
|
-
status: "success",
|
|
160
|
-
task: {
|
|
161
|
-
task_name: "test",
|
|
162
|
-
project_name: "minimal",
|
|
163
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
164
|
-
},
|
|
165
|
-
details: {
|
|
166
|
-
meta: { language: "rust" },
|
|
167
|
-
output_files: ["/mnt/c/Users/user/target/debug/minimal"],
|
|
168
|
-
},
|
|
169
|
-
},
|
|
170
|
-
];
|
|
171
|
-
|
|
172
|
-
const jobs = createJobs(results);
|
|
173
|
-
|
|
174
|
-
expect(jobs.test.rust[0]).toMatchObject({
|
|
175
|
-
project_name: "minimal",
|
|
176
|
-
task_name: "test",
|
|
177
|
-
artifacts: {
|
|
178
|
-
workspace: {
|
|
179
|
-
files: ["/mnt/c/Users/user/target/debug/minimal"],
|
|
180
|
-
},
|
|
181
|
-
},
|
|
182
|
-
});
|
|
183
|
-
});
|
|
184
|
-
|
|
185
|
-
it("should sanitize the project name and task name in artifact names", () => {
|
|
186
|
-
const results: any[] = [
|
|
187
|
-
{
|
|
188
|
-
status: "success",
|
|
189
|
-
task: {
|
|
190
|
-
task_name: "test",
|
|
191
|
-
project_name: "@project/name",
|
|
192
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
193
|
-
},
|
|
194
|
-
details: {
|
|
195
|
-
meta: { language: "rust" },
|
|
196
|
-
output_files: [],
|
|
197
|
-
},
|
|
198
|
-
},
|
|
199
|
-
];
|
|
200
|
-
|
|
201
|
-
const jobs = createJobs(results);
|
|
202
|
-
|
|
203
|
-
expect(jobs.test.rust[0]).toMatchObject({
|
|
204
|
-
project_name: "@project/name",
|
|
205
|
-
task_name: "test",
|
|
206
|
-
artifacts: {
|
|
207
|
-
project: {
|
|
208
|
-
name: "project-@project_name__test",
|
|
209
|
-
files: [],
|
|
210
|
-
},
|
|
211
|
-
workspace: {
|
|
212
|
-
name: "workspace-@project_name__test",
|
|
213
|
-
files: [],
|
|
214
|
-
},
|
|
215
|
-
},
|
|
216
|
-
});
|
|
217
|
-
});
|
|
218
|
-
|
|
219
|
-
it("should handle is_publish_task correctly", () => {
|
|
220
|
-
const results: any[] = [
|
|
221
|
-
{
|
|
222
|
-
status: "success",
|
|
223
|
-
task: {
|
|
224
|
-
task_name: "publish",
|
|
225
|
-
project_name: "js-pkg",
|
|
226
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
227
|
-
},
|
|
228
|
-
details: {
|
|
229
|
-
meta: { release: { npm: true }, is_publish_task: true },
|
|
230
|
-
},
|
|
231
|
-
},
|
|
232
|
-
{
|
|
233
|
-
status: "success",
|
|
234
|
-
task: {
|
|
235
|
-
task_name: "any-task",
|
|
236
|
-
project_name: "rust-pkg",
|
|
237
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
238
|
-
},
|
|
239
|
-
details: {
|
|
240
|
-
meta: {
|
|
241
|
-
language: "rust",
|
|
242
|
-
release: { github: true },
|
|
243
|
-
is_publish_task: true,
|
|
244
|
-
},
|
|
245
|
-
},
|
|
246
|
-
},
|
|
247
|
-
];
|
|
248
|
-
|
|
249
|
-
const jobs = createJobs(results);
|
|
250
|
-
|
|
251
|
-
// Checks NPM logic: task_name must be "publish" AND meta.release.npm must be true
|
|
252
|
-
expect(jobs.publish.npm).toHaveLength(1);
|
|
253
|
-
expect(jobs.publish.npm[0]?.project_name).toBe("js-pkg");
|
|
254
|
-
|
|
255
|
-
// Checks GitHub logic: meta.release.github must be true AND language must be "rust"
|
|
256
|
-
// Note that your code doesn't strictly check if task_name === "publish" for rust_github!
|
|
257
|
-
expect(jobs.publish.rust_github).toHaveLength(1);
|
|
258
|
-
expect(jobs.publish.rust_github[0]?.project_name).toBe("rust-pkg");
|
|
259
|
-
});
|
|
260
|
-
|
|
261
|
-
it("should handle is_build_task correctly", () => {
|
|
262
|
-
const results: any[] = [
|
|
263
|
-
{
|
|
264
|
-
status: "success",
|
|
265
|
-
task: {
|
|
266
|
-
task_name: "build",
|
|
267
|
-
project_name: "ts-lib",
|
|
268
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
269
|
-
},
|
|
270
|
-
details: {
|
|
271
|
-
meta: { language: "typescript", is_build_task: true },
|
|
272
|
-
},
|
|
273
|
-
},
|
|
274
|
-
{
|
|
275
|
-
status: "success",
|
|
276
|
-
task: {
|
|
277
|
-
task_name: "any-task",
|
|
278
|
-
project_name: "rust-pkg",
|
|
279
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
280
|
-
},
|
|
281
|
-
details: {
|
|
282
|
-
meta: {
|
|
283
|
-
language: "rust",
|
|
284
|
-
is_build_task: true,
|
|
285
|
-
},
|
|
286
|
-
},
|
|
287
|
-
},
|
|
288
|
-
];
|
|
289
|
-
|
|
290
|
-
const jobs = createJobs(results);
|
|
291
|
-
|
|
292
|
-
// Checks build logic: task_name must be "build" AND meta.language must be "typescript"
|
|
293
|
-
expect(jobs.build.typescript).toHaveLength(1);
|
|
294
|
-
expect(jobs.build.typescript[0]?.project_name).toBe("ts-lib");
|
|
295
|
-
|
|
296
|
-
// Checks build logic: task_name must be "any-task" AND meta.language must be "rust"
|
|
297
|
-
expect(jobs.build.rust).toHaveLength(1);
|
|
298
|
-
expect(jobs.build.rust[0]?.project_name).toBe("rust-pkg");
|
|
299
|
-
});
|
|
300
|
-
|
|
301
|
-
it("should handle is_test_task correctly", () => {
|
|
302
|
-
const results: any[] = [
|
|
303
|
-
{
|
|
304
|
-
status: "success",
|
|
305
|
-
task: {
|
|
306
|
-
task_name: "test",
|
|
307
|
-
project_name: "rust-app",
|
|
308
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
309
|
-
},
|
|
310
|
-
details: {
|
|
311
|
-
meta: { language: "rust", is_test_task: true },
|
|
312
|
-
},
|
|
313
|
-
},
|
|
314
|
-
{
|
|
315
|
-
status: "success",
|
|
316
|
-
task: {
|
|
317
|
-
task_name: "test",
|
|
318
|
-
project_name: "rust-pkg",
|
|
319
|
-
project_dir: "/mnt/c/Users/user/project",
|
|
320
|
-
},
|
|
321
|
-
details: {
|
|
322
|
-
meta: {
|
|
323
|
-
language: "rust",
|
|
324
|
-
is_test_task: true,
|
|
325
|
-
},
|
|
326
|
-
},
|
|
327
|
-
},
|
|
328
|
-
];
|
|
329
|
-
|
|
330
|
-
const jobs = createJobs(results);
|
|
331
|
-
|
|
332
|
-
// Checks test logic: task_name must be "test" AND meta.language must be "rust"
|
|
333
|
-
expect(jobs.test.rust).toHaveLength(2);
|
|
334
|
-
expect(jobs.test.rust[0]?.project_name).toBe("rust-app");
|
|
335
|
-
expect(jobs.test.rust[1]?.project_name).toBe("rust-pkg");
|
|
336
|
-
});
|
|
337
|
-
});
|
package/src/create-jobs.ts
DELETED
|
@@ -1,178 +0,0 @@
|
|
|
1
|
-
import path from "node:path";
|
|
2
|
-
import type { Meta, TaskResult, TaskResultArray } from "./schemas";
|
|
3
|
-
|
|
4
|
-
export type Artifact = {
|
|
5
|
-
name: string;
|
|
6
|
-
files: string[];
|
|
7
|
-
files_count: number;
|
|
8
|
-
};
|
|
9
|
-
|
|
10
|
-
export type Job = {
|
|
11
|
-
task_name: string;
|
|
12
|
-
project_name: string;
|
|
13
|
-
project_dir: string;
|
|
14
|
-
artifacts: {
|
|
15
|
-
workspace: Artifact;
|
|
16
|
-
project: Artifact;
|
|
17
|
-
};
|
|
18
|
-
meta: Meta;
|
|
19
|
-
};
|
|
20
|
-
|
|
21
|
-
export type Jobs = {
|
|
22
|
-
test: TestJobs;
|
|
23
|
-
build: BuildJobs;
|
|
24
|
-
publish: PublishJobs;
|
|
25
|
-
};
|
|
26
|
-
|
|
27
|
-
export type TestJobs = {
|
|
28
|
-
rust: Job[];
|
|
29
|
-
typescript: Job[];
|
|
30
|
-
};
|
|
31
|
-
|
|
32
|
-
export type BuildJobs = {
|
|
33
|
-
rust: Job[];
|
|
34
|
-
typescript: Job[];
|
|
35
|
-
};
|
|
36
|
-
|
|
37
|
-
export type PublishJobs = {
|
|
38
|
-
npm: Job[];
|
|
39
|
-
generic: Job[];
|
|
40
|
-
rust_github: Job[];
|
|
41
|
-
};
|
|
42
|
-
|
|
43
|
-
export function createJobs(results: TaskResultArray): Jobs {
|
|
44
|
-
const jobs: Jobs = {
|
|
45
|
-
test: {
|
|
46
|
-
rust: [],
|
|
47
|
-
typescript: [],
|
|
48
|
-
},
|
|
49
|
-
build: {
|
|
50
|
-
rust: [],
|
|
51
|
-
typescript: [],
|
|
52
|
-
},
|
|
53
|
-
publish: {
|
|
54
|
-
generic: [],
|
|
55
|
-
npm: [],
|
|
56
|
-
rust_github: [],
|
|
57
|
-
},
|
|
58
|
-
};
|
|
59
|
-
|
|
60
|
-
for (const result of results) {
|
|
61
|
-
if (result.status === "skipped") {
|
|
62
|
-
continue;
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
const task = result.task;
|
|
66
|
-
if (task.task_name === "test" || result.details.meta?.is_test_task) {
|
|
67
|
-
if (result.details.meta?.language === "rust") {
|
|
68
|
-
jobs.test.rust.push(jobFromResult(result));
|
|
69
|
-
}
|
|
70
|
-
|
|
71
|
-
if (result.details.meta?.language === "typescript") {
|
|
72
|
-
jobs.test.typescript.push(jobFromResult(result));
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
|
|
76
|
-
if (task.task_name === "build" || result.details.meta?.is_build_task) {
|
|
77
|
-
if (result.details.meta?.language === "rust") {
|
|
78
|
-
jobs.build.rust.push(jobFromResult(result));
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
if (result.details.meta?.language === "typescript") {
|
|
82
|
-
jobs.build.typescript.push(jobFromResult(result));
|
|
83
|
-
}
|
|
84
|
-
}
|
|
85
|
-
|
|
86
|
-
if (
|
|
87
|
-
(task.task_name === "publish" ||
|
|
88
|
-
result.details.meta?.is_publish_task) &&
|
|
89
|
-
result.details.meta?.release?.npm
|
|
90
|
-
) {
|
|
91
|
-
jobs.publish.npm.push(jobFromResult(result));
|
|
92
|
-
} else if (
|
|
93
|
-
result.details.meta?.release?.github &&
|
|
94
|
-
result.details.meta?.language === "rust" &&
|
|
95
|
-
(result.task.task_name === "publish" ||
|
|
96
|
-
result.details.meta.is_publish_task)
|
|
97
|
-
) {
|
|
98
|
-
jobs.publish.rust_github.push(jobFromResult(result));
|
|
99
|
-
} else if (
|
|
100
|
-
result.details.meta?.is_publish_task ||
|
|
101
|
-
result.task.task_name === "publish"
|
|
102
|
-
) {
|
|
103
|
-
jobs.publish.generic.push(jobFromResult(result));
|
|
104
|
-
}
|
|
105
|
-
}
|
|
106
|
-
|
|
107
|
-
return jobs;
|
|
108
|
-
}
|
|
109
|
-
|
|
110
|
-
function jobFromResult(result: TaskResult): Job {
|
|
111
|
-
const workspaceArtifacts = [] as string[];
|
|
112
|
-
const projectArtifacts = [] as string[];
|
|
113
|
-
|
|
114
|
-
if (result.details.output_files && result.details.output_files.length > 0) {
|
|
115
|
-
for (const file of result.details.output_files) {
|
|
116
|
-
const fullPath = path.resolve(result.task.project_dir, file);
|
|
117
|
-
|
|
118
|
-
if (isPathInside(result.task.project_dir, fullPath)) {
|
|
119
|
-
projectArtifacts.push(fullPath);
|
|
120
|
-
} else {
|
|
121
|
-
workspaceArtifacts.push(fullPath);
|
|
122
|
-
}
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
|
|
126
|
-
return {
|
|
127
|
-
task_name: result.task.task_name,
|
|
128
|
-
project_name: result.task.project_name,
|
|
129
|
-
artifacts: {
|
|
130
|
-
project: {
|
|
131
|
-
name: `project-${toPathSafeString(result.task.project_name)}__${toPathSafeString(result.task.task_name)}`,
|
|
132
|
-
files: projectArtifacts,
|
|
133
|
-
files_count: projectArtifacts.length,
|
|
134
|
-
},
|
|
135
|
-
workspace: {
|
|
136
|
-
name: `workspace-${toPathSafeString(result.task.project_name)}__${toPathSafeString(result.task.task_name)}`,
|
|
137
|
-
files: workspaceArtifacts,
|
|
138
|
-
files_count: workspaceArtifacts.length,
|
|
139
|
-
},
|
|
140
|
-
},
|
|
141
|
-
project_dir: result.task.project_dir,
|
|
142
|
-
meta: result.details.meta ?? {},
|
|
143
|
-
};
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
/**
|
|
147
|
-
* Checks if 'child' is physically inside 'parent'
|
|
148
|
-
* @param parent - The potential parent directory
|
|
149
|
-
* @param child - The path to check
|
|
150
|
-
*/
|
|
151
|
-
function isPathInside(parent: string, child: string) {
|
|
152
|
-
const relative = path.relative(parent, child);
|
|
153
|
-
|
|
154
|
-
// path.relative returns an empty string if paths are the same.
|
|
155
|
-
// If the path starts with '..' (or the platform equivalent),
|
|
156
|
-
// it means the child is outside the parent.
|
|
157
|
-
return relative && !relative.startsWith("..") && !path.isAbsolute(relative);
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
function toPathSafeString(str: string): string {
|
|
161
|
-
// 1. Replace illegal characters: / \ ? % * : | " < >
|
|
162
|
-
// Also includes control characters (0-31) which are illegal on Windows
|
|
163
|
-
// biome-ignore lint/suspicious/noControlCharactersInRegex: false
|
|
164
|
-
let safeStr = str.replace(/[/\\?%*:|"<> \x00-\x1f]/g, "_");
|
|
165
|
-
|
|
166
|
-
// 2. Trim trailing dots and spaces (illegal on Windows filenames)
|
|
167
|
-
safeStr = safeStr.replace(/[.\s]+$/, "");
|
|
168
|
-
|
|
169
|
-
// 3. Handle Windows Reserved Names (CON, PRN, AUX, NUL, COM1-9, LPT1-9)
|
|
170
|
-
// These cannot be filenames even if they have no extension.
|
|
171
|
-
const reservedNames = /^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])$/i;
|
|
172
|
-
if (reservedNames.test(safeStr)) {
|
|
173
|
-
safeStr += "_";
|
|
174
|
-
}
|
|
175
|
-
|
|
176
|
-
// 4. Fallback for empty strings or strings that became empty after stripping
|
|
177
|
-
return safeStr || "unsaved_file";
|
|
178
|
-
}
|
package/src/index.ts
DELETED
package/src/schemas.ts
DELETED
|
@@ -1,176 +0,0 @@
|
|
|
1
|
-
import { z } from "zod";
|
|
2
|
-
|
|
3
|
-
// --- Nested Schemas ---
|
|
4
|
-
|
|
5
|
-
/**
|
|
6
|
-
* Defines the structure for the duration object.
|
|
7
|
-
*/
|
|
8
|
-
const ElapsedSchema = z.object({
|
|
9
|
-
secs: z
|
|
10
|
-
.number()
|
|
11
|
-
.int()
|
|
12
|
-
.nonnegative()
|
|
13
|
-
.describe("The number of whole seconds elapsed."),
|
|
14
|
-
nanos: z
|
|
15
|
-
.number()
|
|
16
|
-
.int()
|
|
17
|
-
.nonnegative()
|
|
18
|
-
.describe("The number of nanoseconds elapsed."),
|
|
19
|
-
});
|
|
20
|
-
|
|
21
|
-
const TargetSchema = z.object({
|
|
22
|
-
runner: z.string().describe("The runner to use for the target."),
|
|
23
|
-
});
|
|
24
|
-
|
|
25
|
-
const ReleaseSchema = z.object({
|
|
26
|
-
npm: z.boolean().optional().describe("Whether to publish to npm."),
|
|
27
|
-
github: z.boolean().optional().describe("Whether to publish to github."),
|
|
28
|
-
});
|
|
29
|
-
|
|
30
|
-
/**
|
|
31
|
-
* Defines the metadata for the project/task details.
|
|
32
|
-
*/
|
|
33
|
-
const MetaSchema = z.object({
|
|
34
|
-
type: z
|
|
35
|
-
.string()
|
|
36
|
-
.optional()
|
|
37
|
-
.describe("The type of project (e.g., library, service, application)."),
|
|
38
|
-
language: z
|
|
39
|
-
.string()
|
|
40
|
-
.optional()
|
|
41
|
-
.describe("The primary language of the project."),
|
|
42
|
-
targets: z
|
|
43
|
-
.record(z.string(), TargetSchema)
|
|
44
|
-
.optional()
|
|
45
|
-
.describe("The targets to build."),
|
|
46
|
-
release: ReleaseSchema.optional(),
|
|
47
|
-
is_publish_task: z
|
|
48
|
-
.boolean()
|
|
49
|
-
.optional()
|
|
50
|
-
.describe("Whether this is a publish job."),
|
|
51
|
-
is_build_task: z
|
|
52
|
-
.boolean()
|
|
53
|
-
.optional()
|
|
54
|
-
.describe("Whether this is a build job."),
|
|
55
|
-
is_test_task: z
|
|
56
|
-
.boolean()
|
|
57
|
-
.optional()
|
|
58
|
-
.describe("Whether this is a test job."),
|
|
59
|
-
});
|
|
60
|
-
|
|
61
|
-
const DetailsSchema = z.object({
|
|
62
|
-
meta: MetaSchema.optional(),
|
|
63
|
-
output_files: z
|
|
64
|
-
.array(z.string())
|
|
65
|
-
.optional()
|
|
66
|
-
.describe("The output files generated."),
|
|
67
|
-
});
|
|
68
|
-
|
|
69
|
-
/**
|
|
70
|
-
* Defines the details of the task that was run or skipped.
|
|
71
|
-
*/
|
|
72
|
-
const TaskSchema = z.object({
|
|
73
|
-
task_name: z
|
|
74
|
-
.string()
|
|
75
|
-
.describe("The short name of the task (e.g., 'test', 'build')."),
|
|
76
|
-
task_command: z.string().describe("The command executed for the task."),
|
|
77
|
-
project_name: z.string().describe("The name of the project."),
|
|
78
|
-
project_dir: z
|
|
79
|
-
.string()
|
|
80
|
-
.describe("The absolute directory path of the project."),
|
|
81
|
-
full_task_name: z
|
|
82
|
-
.string()
|
|
83
|
-
.describe("The fully qualified task name (e.g., 'omni_utils#test')."),
|
|
84
|
-
dependencies: z
|
|
85
|
-
.array(z.string())
|
|
86
|
-
.describe("A list of dependent task names."),
|
|
87
|
-
enabled: z
|
|
88
|
-
.boolean()
|
|
89
|
-
.or(z.string())
|
|
90
|
-
.optional()
|
|
91
|
-
.describe(
|
|
92
|
-
"Whether the task is enabled by configuration. Either a boolean or a tera template string that evaluates to a boolean.",
|
|
93
|
-
),
|
|
94
|
-
interactive: z.boolean().describe("Whether the task is interactive."),
|
|
95
|
-
persistent: z.boolean().describe("Whether the task is persistent."),
|
|
96
|
-
});
|
|
97
|
-
|
|
98
|
-
// --- Discriminant Schemas (Union Members) ---
|
|
99
|
-
|
|
100
|
-
/**
|
|
101
|
-
* Schema for a task that successfully completed.
|
|
102
|
-
* Note: 'elapsed', 'exit_code', 'hash', and 'cache_hit' are required here.
|
|
103
|
-
*/
|
|
104
|
-
const CompletedTaskSchema = z.object({
|
|
105
|
-
status: z.literal("completed"),
|
|
106
|
-
hash: z
|
|
107
|
-
.string()
|
|
108
|
-
.describe(
|
|
109
|
-
"The task's content hash (Base64 encoded string). Used for caching.",
|
|
110
|
-
),
|
|
111
|
-
task: TaskSchema,
|
|
112
|
-
exit_code: z
|
|
113
|
-
.number()
|
|
114
|
-
.int()
|
|
115
|
-
.describe(
|
|
116
|
-
"The exit code of the executed command (typically 0 for success).",
|
|
117
|
-
),
|
|
118
|
-
elapsed: ElapsedSchema.describe("The duration the task took to execute."),
|
|
119
|
-
cache_hit: z
|
|
120
|
-
.boolean()
|
|
121
|
-
.describe("Indicates if the result was pulled from cache."),
|
|
122
|
-
details: DetailsSchema,
|
|
123
|
-
});
|
|
124
|
-
|
|
125
|
-
const ErroredTaskSchema = z.object({
|
|
126
|
-
status: z.literal("errored"),
|
|
127
|
-
task: TaskSchema,
|
|
128
|
-
error: z.string().describe("The error message."),
|
|
129
|
-
details: DetailsSchema,
|
|
130
|
-
});
|
|
131
|
-
|
|
132
|
-
/**
|
|
133
|
-
* Schema for a task that was skipped.
|
|
134
|
-
* Note: 'skip_reason' is required here, and fields like 'hash' or 'elapsed' are omitted.
|
|
135
|
-
*/
|
|
136
|
-
const SkippedTaskSchema = z.object({
|
|
137
|
-
status: z.literal("skipped"),
|
|
138
|
-
task: TaskSchema,
|
|
139
|
-
skip_reason: z
|
|
140
|
-
.string()
|
|
141
|
-
.describe("The reason the task was skipped (e.g., 'disabled')."),
|
|
142
|
-
details: DetailsSchema,
|
|
143
|
-
});
|
|
144
|
-
|
|
145
|
-
// --- Root Schema ---
|
|
146
|
-
|
|
147
|
-
/**
|
|
148
|
-
* The primary schema for a single task result, using a discriminated union
|
|
149
|
-
* based on the 'status' field to correctly type the required fields.
|
|
150
|
-
*/
|
|
151
|
-
export const TaskResultSchema = z
|
|
152
|
-
.discriminatedUnion("status", [
|
|
153
|
-
CompletedTaskSchema,
|
|
154
|
-
SkippedTaskSchema,
|
|
155
|
-
ErroredTaskSchema,
|
|
156
|
-
])
|
|
157
|
-
.describe(
|
|
158
|
-
"Schema for a single task execution result (completed or skipped).",
|
|
159
|
-
);
|
|
160
|
-
|
|
161
|
-
/**
|
|
162
|
-
* The final schema for the root array of task results.
|
|
163
|
-
*/
|
|
164
|
-
export const TaskResultArraySchema = z
|
|
165
|
-
.array(TaskResultSchema)
|
|
166
|
-
.describe("An array of task execution results.");
|
|
167
|
-
|
|
168
|
-
// --- TypeScript Types (Inferred) ---
|
|
169
|
-
|
|
170
|
-
export type Elapsed = z.infer<typeof ElapsedSchema>;
|
|
171
|
-
export type Meta = z.infer<typeof MetaSchema>;
|
|
172
|
-
export type Task = z.infer<typeof TaskSchema>;
|
|
173
|
-
export type CompletedTaskResult = z.infer<typeof CompletedTaskSchema>;
|
|
174
|
-
export type SkippedTaskResult = z.infer<typeof SkippedTaskSchema>;
|
|
175
|
-
export type TaskResult = z.infer<typeof TaskResultSchema>;
|
|
176
|
-
export type TaskResultArray = z.infer<typeof TaskResultArraySchema>;
|
package/tsconfig.json
DELETED
package/tsconfig.project.json
DELETED
package/tsconfig.types.json
DELETED
package/vite.config.ts
DELETED
|
@@ -1,30 +0,0 @@
|
|
|
1
|
-
import createBaseConfig from "@omni-oss/vite-config/script";
|
|
2
|
-
import { mergeConfig, type UserConfig } from "vite";
|
|
3
|
-
import { dependencies } from "./package.json";
|
|
4
|
-
|
|
5
|
-
const baseConfig = createBaseConfig({
|
|
6
|
-
generateTypes: true,
|
|
7
|
-
});
|
|
8
|
-
|
|
9
|
-
const externalNodeDeps = ["node:path", "node:fs", "node:fs/promises"];
|
|
10
|
-
|
|
11
|
-
export default mergeConfig(baseConfig, {
|
|
12
|
-
build: {
|
|
13
|
-
minify: "esbuild",
|
|
14
|
-
lib: {
|
|
15
|
-
entry: {
|
|
16
|
-
"create-jobs": "src/cli/index.ts",
|
|
17
|
-
index: "src/index.ts",
|
|
18
|
-
},
|
|
19
|
-
|
|
20
|
-
formats: ["es", "cjs"],
|
|
21
|
-
fileName: (format, entryName) =>
|
|
22
|
-
`${entryName || "create-jobs"}.${format === "cjs" ? "cjs" : "mjs"}`,
|
|
23
|
-
name: "CreateJobs",
|
|
24
|
-
},
|
|
25
|
-
|
|
26
|
-
rollupOptions: {
|
|
27
|
-
external: [...Object.keys(dependencies), ...externalNodeDeps],
|
|
28
|
-
},
|
|
29
|
-
},
|
|
30
|
-
} satisfies UserConfig);
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
import { mergeConfig, type UserWorkspaceConfig } from "vitest/config";
|
|
2
|
-
import baseConfig from "./vite.config";
|
|
3
|
-
import integrationTestConfig from "@omni-oss/vitest-config/integration";
|
|
4
|
-
|
|
5
|
-
export default mergeConfig(mergeConfig(baseConfig, integrationTestConfig), {
|
|
6
|
-
test: {
|
|
7
|
-
testTimeout: 1000,
|
|
8
|
-
include: [
|
|
9
|
-
"./src/**/__tests__/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}",
|
|
10
|
-
],
|
|
11
|
-
},
|
|
12
|
-
} satisfies UserWorkspaceConfig);
|
package/vitest.config.unit.ts
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
import { mergeConfig, type UserWorkspaceConfig } from "vitest/config";
|
|
2
|
-
import baseConfig from "./vite.config";
|
|
3
|
-
import unitTestConfig from "@omni-oss/vitest-config/unit";
|
|
4
|
-
|
|
5
|
-
export default mergeConfig(mergeConfig(baseConfig, unitTestConfig), {
|
|
6
|
-
test: {
|
|
7
|
-
testTimeout: 1000,
|
|
8
|
-
include: ["./src/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}"],
|
|
9
|
-
exclude: [
|
|
10
|
-
"./src/**/__tests__/**/*.{test,spec}.{js,mjs,cjs,ts,mts,cts,jsx,tsx}",
|
|
11
|
-
],
|
|
12
|
-
},
|
|
13
|
-
} satisfies UserWorkspaceConfig);
|