@microlight/core 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +77 -0
- package/bin/microlight-core.js +70 -0
- package/dist/scripts/generate-folder-index.js +120 -0
- package/dist/scripts/generate-task-imports.js +64 -0
- package/dist/scripts/generate-task-index.js +61 -0
- package/dist/scripts/prepareFolders.js +119 -0
- package/dist/scripts/prepareServer.js +34 -0
- package/dist/scripts/prepareTasks.js +114 -0
- package/dist/server/app/api/tasks/[slug]/route.js +54 -0
- package/dist/server/app/layout.js +41 -0
- package/dist/server/app/library/[[...f_path]]/ViewFolder.js +113 -0
- package/dist/server/app/library/[[...f_path]]/page.js +42 -0
- package/dist/server/app/page.js +4 -0
- package/dist/server/app/tasks/[slug]/ViewTask.js +252 -0
- package/dist/server/app/tasks/[slug]/action.js +44 -0
- package/dist/server/app/tasks/[slug]/page.js +33 -0
- package/dist/server/app/tasks/[slug]/runs/[r_id]/ViewRun.js +230 -0
- package/dist/server/app/tasks/[slug]/runs/[r_id]/_components/DropdownActions/DropdownActions.js +46 -0
- package/dist/server/app/tasks/[slug]/runs/[r_id]/_components/DropdownActions/action.js +35 -0
- package/dist/server/app/tasks/[slug]/runs/[r_id]/page.js +43 -0
- package/dist/server/components/Icon.js +22 -0
- package/dist/server/components/Link.js +52 -0
- package/dist/server/components/MLInput.js +29 -0
- package/dist/server/components/Navbar/Navbar.js +38 -0
- package/dist/server/components/Navbar/NavbarContainer.js +26 -0
- package/dist/server/components/PageHeader.js +87 -0
- package/dist/server/components/StatusChip.js +11 -0
- package/dist/server/components/Test.js +5 -0
- package/dist/server/components/TopLoader.js +8 -0
- package/dist/server/database/microlight/index.js +52 -0
- package/dist/server/database/microlight/tables/Logs.model.js +34 -0
- package/dist/server/database/microlight/tables/Runs.model.js +61 -0
- package/dist/server/instrumentation.js +16 -0
- package/dist/server/lib/executeRun.js +80 -0
- package/dist/server/lib/generateDisplayFunctions.js +89 -0
- package/dist/server/lib/getAllTasks.js +32 -0
- package/dist/server/lib/getTaskDetails.js +17 -0
- package/dist/server/lib/loadSchedules.js +77 -0
- package/dist/server/tasks/1.intro/hello_world2.task.js +21 -0
- package/dist/server/tasks/1.intro/microlight.folder.js +5 -0
- package/dist/server/tasks/1.intro/ml.task.js +31 -0
- package/dist/server/tasks/1.intro/scheduled.task.js +18 -0
- package/dist/server/tasks/1.intro/takes_time.task.js +28 -0
- package/dist/server/tasks/1.intro/test/microlight.folder.js +5 -0
- package/dist/server/tasks/1.intro/test/takes_time2.task.js +28 -0
- package/dist/server/tasks/index.js +33 -0
- package/dist/server/tasks/microlight.folder.js +5 -0
- package/index.js +1 -0
- package/package.json +46 -0
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import { Sequelize } from 'sequelize';
|
|
2
|
+
import Runs from "./tables/Runs.model.js";
|
|
3
|
+
import Logs from "./tables/Logs.model.js";
|
|
4
|
+
import sqlite3 from 'sqlite3';
|
|
5
|
+
import pg from 'pg';
|
|
6
|
+
|
|
7
|
+
/*======================Initialize Sequelize======================*/
|
|
8
|
+
let sequelize;
|
|
9
|
+
if (process.env.ML_DB_PG) {
|
|
10
|
+
sequelize = new Sequelize(process.env.ML_DB_PG, {
|
|
11
|
+
logging: false,
|
|
12
|
+
dialect: 'postgres',
|
|
13
|
+
dialectOptions: {
|
|
14
|
+
ssl: {
|
|
15
|
+
require: true,
|
|
16
|
+
// This will force the SSL requirement
|
|
17
|
+
rejectUnauthorized: false // This is to avoid errors due to self-signed certificates
|
|
18
|
+
}
|
|
19
|
+
},
|
|
20
|
+
dialectModule: pg
|
|
21
|
+
});
|
|
22
|
+
} else {
|
|
23
|
+
sequelize = new Sequelize({
|
|
24
|
+
dialect: 'sqlite',
|
|
25
|
+
storage: '.microlight/microlight.db',
|
|
26
|
+
logging: false,
|
|
27
|
+
dialectModule: sqlite3
|
|
28
|
+
});
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
/*======================Initialize models======================*/
|
|
32
|
+
const models = {
|
|
33
|
+
Runs: Runs(sequelize),
|
|
34
|
+
Logs: Logs(sequelize)
|
|
35
|
+
// Schedule: Schedule(sequelize),
|
|
36
|
+
};
|
|
37
|
+
|
|
38
|
+
//Create db object
|
|
39
|
+
const microlightDB = {
|
|
40
|
+
...models,
|
|
41
|
+
sequelize,
|
|
42
|
+
Sequelize,
|
|
43
|
+
// Add sync method as a property using arrow function
|
|
44
|
+
sync: (...args) => sequelize.sync(...args)
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
// Only sync database in development mode
|
|
48
|
+
// if (process.env.NODE_ENV === 'development') {
|
|
49
|
+
await microlightDB.sync();
|
|
50
|
+
// }
|
|
51
|
+
|
|
52
|
+
export default microlightDB;
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { DataTypes } from 'sequelize';
|
|
2
|
+
export default sequelize => {
|
|
3
|
+
return sequelize.define('Logs', {
|
|
4
|
+
id: {
|
|
5
|
+
type: DataTypes.INTEGER,
|
|
6
|
+
primaryKey: true,
|
|
7
|
+
autoIncrement: true
|
|
8
|
+
// defaultValue: sequelize.literal("nextval('stock_consumption_id_seq'::regclass)")
|
|
9
|
+
},
|
|
10
|
+
created_at: {
|
|
11
|
+
type: DataTypes.DATE,
|
|
12
|
+
allowNull: false,
|
|
13
|
+
defaultValue: DataTypes.NOW
|
|
14
|
+
},
|
|
15
|
+
type: {
|
|
16
|
+
// can be log,json,markdown
|
|
17
|
+
type: DataTypes.TEXT,
|
|
18
|
+
allowNull: true
|
|
19
|
+
},
|
|
20
|
+
content: {
|
|
21
|
+
// can be pending, running, complete, failed, timeout
|
|
22
|
+
type: DataTypes.TEXT,
|
|
23
|
+
allowNull: true
|
|
24
|
+
},
|
|
25
|
+
run: {
|
|
26
|
+
type: DataTypes.INTEGER,
|
|
27
|
+
allowNull: false
|
|
28
|
+
}
|
|
29
|
+
}, {
|
|
30
|
+
tableName: 'logs',
|
|
31
|
+
timestamps: false
|
|
32
|
+
// schema: 'public'
|
|
33
|
+
});
|
|
34
|
+
};
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import { DataTypes } from 'sequelize';
|
|
2
|
+
export default sequelize => {
|
|
3
|
+
return sequelize.define('Runs', {
|
|
4
|
+
id: {
|
|
5
|
+
type: DataTypes.INTEGER,
|
|
6
|
+
primaryKey: true,
|
|
7
|
+
autoIncrement: true
|
|
8
|
+
// defaultValue: sequelize.literal("nextval('stock_consumption_id_seq'::regclass)")
|
|
9
|
+
},
|
|
10
|
+
task: {
|
|
11
|
+
type: DataTypes.TEXT,
|
|
12
|
+
allowNull: true
|
|
13
|
+
},
|
|
14
|
+
status: {
|
|
15
|
+
// can be pending, running, complete, failed, timeout
|
|
16
|
+
type: DataTypes.TEXT,
|
|
17
|
+
allowNull: true
|
|
18
|
+
},
|
|
19
|
+
started_at: {
|
|
20
|
+
type: DataTypes.DATE,
|
|
21
|
+
allowNull: true
|
|
22
|
+
},
|
|
23
|
+
completed_at: {
|
|
24
|
+
type: DataTypes.DATE,
|
|
25
|
+
allowNull: true
|
|
26
|
+
},
|
|
27
|
+
inputs: {
|
|
28
|
+
type: DataTypes.JSON,
|
|
29
|
+
allowNull: true,
|
|
30
|
+
get() {
|
|
31
|
+
const inputs = this.getDataValue('inputs');
|
|
32
|
+
return inputs ? typeof inputs === 'string' ? JSON.parse(inputs) : inputs : null;
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
triggered_by: {
|
|
36
|
+
// user,api,webhook,schedule
|
|
37
|
+
type: DataTypes.TEXT,
|
|
38
|
+
allowNull: true
|
|
39
|
+
},
|
|
40
|
+
created_at: {
|
|
41
|
+
type: DataTypes.DATE,
|
|
42
|
+
allowNull: false,
|
|
43
|
+
defaultValue: DataTypes.NOW
|
|
44
|
+
},
|
|
45
|
+
updated_at: {
|
|
46
|
+
type: DataTypes.DATE,
|
|
47
|
+
allowNull: false,
|
|
48
|
+
defaultValue: DataTypes.NOW
|
|
49
|
+
},
|
|
50
|
+
duration: {
|
|
51
|
+
type: DataTypes.INTEGER,
|
|
52
|
+
allowNull: false,
|
|
53
|
+
defaultValue: 0
|
|
54
|
+
// defaultValue: sequelize.literal("nextval('stock_consumption_id_seq'::regclass)")
|
|
55
|
+
}
|
|
56
|
+
}, {
|
|
57
|
+
tableName: 'runs',
|
|
58
|
+
timestamps: false
|
|
59
|
+
// schema: 'public'
|
|
60
|
+
});
|
|
61
|
+
};
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
export const register = async () => {
|
|
2
|
+
if (process.env.NEXT_RUNTIME === 'nodejs') {
|
|
3
|
+
// // Dynamically import loadSchedules only when we're in Node.js runtime
|
|
4
|
+
// const { default: loadSchedules } = await import('./lib/loadSchedules');
|
|
5
|
+
// await loadSchedules();
|
|
6
|
+
// // const interval = setInterval(async () => {
|
|
7
|
+
// // await executeRuns();
|
|
8
|
+
// // console.log('Instrumentation check running...');
|
|
9
|
+
// // }, 5000);
|
|
10
|
+
|
|
11
|
+
// // Clean up interval on process exit
|
|
12
|
+
process.on('SIGTERM', () => {
|
|
13
|
+
clearInterval(interval);
|
|
14
|
+
});
|
|
15
|
+
}
|
|
16
|
+
};
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
"use server";
|
|
2
|
+
|
|
3
|
+
import getTaskDetails from "./getTaskDetails";
|
|
4
|
+
import microlightDB from "../database/microlight";
|
|
5
|
+
import generateDisplayfunctions from "./generateDisplayFunctions";
|
|
6
|
+
import async from 'async';
|
|
7
|
+
export default async function executeRun(run) {
|
|
8
|
+
const workflow = {
|
|
9
|
+
startRun: async function () {
|
|
10
|
+
let started_at = new Date();
|
|
11
|
+
await microlightDB.Runs.update({
|
|
12
|
+
status: 'running',
|
|
13
|
+
started_at: started_at,
|
|
14
|
+
updated_at: started_at
|
|
15
|
+
}, {
|
|
16
|
+
where: {
|
|
17
|
+
id: run.id
|
|
18
|
+
},
|
|
19
|
+
returning: true
|
|
20
|
+
});
|
|
21
|
+
return started_at;
|
|
22
|
+
},
|
|
23
|
+
executeTask: ['startRun', async function (results) {
|
|
24
|
+
let params = {
|
|
25
|
+
slug: run.task
|
|
26
|
+
};
|
|
27
|
+
let taskDef = await getTaskDetails({
|
|
28
|
+
params
|
|
29
|
+
});
|
|
30
|
+
const ml = generateDisplayfunctions(run.id);
|
|
31
|
+
try {
|
|
32
|
+
await taskDef.fn(ml, run.inputs);
|
|
33
|
+
await ml.log('=== ML: run completed ===');
|
|
34
|
+
return {
|
|
35
|
+
status: 'complete'
|
|
36
|
+
};
|
|
37
|
+
} catch (e) {
|
|
38
|
+
await ml.error(e);
|
|
39
|
+
await ml.log('=== ML: run completed ===');
|
|
40
|
+
return {
|
|
41
|
+
status: 'failed'
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
}],
|
|
45
|
+
updateRun: ['executeTask', async function (results) {
|
|
46
|
+
let update = {
|
|
47
|
+
status: results.executeTask?.status,
|
|
48
|
+
completed_at: new Date(),
|
|
49
|
+
updated_at: new Date()
|
|
50
|
+
};
|
|
51
|
+
update.duration = update.completed_at - results.startRun;
|
|
52
|
+
return await microlightDB.Runs.update(update, {
|
|
53
|
+
where: {
|
|
54
|
+
id: run.id
|
|
55
|
+
},
|
|
56
|
+
returning: true
|
|
57
|
+
});
|
|
58
|
+
}]
|
|
59
|
+
// addToQueue:['createRun',function(results,cb){
|
|
60
|
+
// // let options ={
|
|
61
|
+
// // task_slug:run.task,
|
|
62
|
+
// // }
|
|
63
|
+
// // queue.daily_mis_report.push(options,cb)
|
|
64
|
+
// }],
|
|
65
|
+
// triggerRun:['createRun',async function(results){
|
|
66
|
+
// // await tasksQueue.add(run.task, { foo: 'bar' });
|
|
67
|
+
// // await importQueue.add(run.task, { foo: 'bar' });
|
|
68
|
+
// // const handle = await tasks.trigger(
|
|
69
|
+
// // "hello-world",
|
|
70
|
+
// // "James"
|
|
71
|
+
// // );
|
|
72
|
+
// // return { handle };
|
|
73
|
+
// }]
|
|
74
|
+
};
|
|
75
|
+
const results = await async.auto(workflow);
|
|
76
|
+
console.log(results);
|
|
77
|
+
return {
|
|
78
|
+
success: true
|
|
79
|
+
};
|
|
80
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
import microlightDB from "../database/microlight";
|
|
2
|
+
import markdownit from 'markdown-it';
|
|
3
|
+
const md = markdownit();
|
|
4
|
+
const cleanMD = function (md) {
|
|
5
|
+
if (!md) md = '<empty>';
|
|
6
|
+
var lines = md.split('\n');
|
|
7
|
+
var trimmed_lines = [];
|
|
8
|
+
lines.forEach(function (line) {
|
|
9
|
+
trimmed_lines.push(line.trim());
|
|
10
|
+
});
|
|
11
|
+
return trimmed_lines.join('\n');
|
|
12
|
+
};
|
|
13
|
+
export default function generateDisplayfunctions(run_id) {
|
|
14
|
+
let ml = {
|
|
15
|
+
log: async function (text) {
|
|
16
|
+
await microlightDB.Logs.create({
|
|
17
|
+
created_at: new Date(),
|
|
18
|
+
run: run_id,
|
|
19
|
+
type: 'log',
|
|
20
|
+
content: text
|
|
21
|
+
});
|
|
22
|
+
},
|
|
23
|
+
json: async function (data) {
|
|
24
|
+
await microlightDB.Logs.create({
|
|
25
|
+
created_at: new Date(),
|
|
26
|
+
run: run_id,
|
|
27
|
+
type: 'json',
|
|
28
|
+
content: JSON.stringify(data)
|
|
29
|
+
});
|
|
30
|
+
},
|
|
31
|
+
markdown: async function (text) {
|
|
32
|
+
await microlightDB.Logs.create({
|
|
33
|
+
created_at: new Date(),
|
|
34
|
+
run: run_id,
|
|
35
|
+
type: 'markdown',
|
|
36
|
+
content: md.render(cleanMD(text))
|
|
37
|
+
});
|
|
38
|
+
},
|
|
39
|
+
error: async function (error) {
|
|
40
|
+
const data = {
|
|
41
|
+
message: error.message,
|
|
42
|
+
stack: error.stack
|
|
43
|
+
};
|
|
44
|
+
await microlightDB.Logs.create({
|
|
45
|
+
created_at: new Date(),
|
|
46
|
+
run: run_id,
|
|
47
|
+
type: 'error',
|
|
48
|
+
content: JSON.stringify(data)
|
|
49
|
+
});
|
|
50
|
+
},
|
|
51
|
+
wait: async function (time) {
|
|
52
|
+
this.log(`waiting for ${time / 1000} secs`);
|
|
53
|
+
return await new Promise(resolve => setTimeout(resolve, time));
|
|
54
|
+
},
|
|
55
|
+
danger: async function (text) {
|
|
56
|
+
await microlightDB.Logs.create({
|
|
57
|
+
created_at: new Date(),
|
|
58
|
+
run: run_id,
|
|
59
|
+
type: 'danger',
|
|
60
|
+
content: text
|
|
61
|
+
});
|
|
62
|
+
},
|
|
63
|
+
warn: async function (text) {
|
|
64
|
+
await microlightDB.Logs.create({
|
|
65
|
+
created_at: new Date(),
|
|
66
|
+
run: run_id,
|
|
67
|
+
type: 'warn',
|
|
68
|
+
content: text
|
|
69
|
+
});
|
|
70
|
+
},
|
|
71
|
+
info: async function (text) {
|
|
72
|
+
await microlightDB.Logs.create({
|
|
73
|
+
created_at: new Date(),
|
|
74
|
+
run: run_id,
|
|
75
|
+
type: 'info',
|
|
76
|
+
content: text
|
|
77
|
+
});
|
|
78
|
+
},
|
|
79
|
+
success: async function (text) {
|
|
80
|
+
await microlightDB.Logs.create({
|
|
81
|
+
created_at: new Date(),
|
|
82
|
+
run: run_id,
|
|
83
|
+
type: 'success',
|
|
84
|
+
content: text
|
|
85
|
+
});
|
|
86
|
+
}
|
|
87
|
+
};
|
|
88
|
+
return ml;
|
|
89
|
+
}
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import fs from 'fs';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
function findAllTaskFiles(basePath) {
|
|
4
|
+
const tasks = [];
|
|
5
|
+
try {
|
|
6
|
+
const files = fs.readdirSync(basePath);
|
|
7
|
+
for (const file of files) {
|
|
8
|
+
const filePath = path.join(basePath, file);
|
|
9
|
+
const stat = fs.statSync(filePath);
|
|
10
|
+
if (stat.isDirectory()) {
|
|
11
|
+
// Recursively search subdirectories
|
|
12
|
+
tasks.push(...findAllTaskFiles(filePath));
|
|
13
|
+
} else if (file.endsWith('.task.js')) {
|
|
14
|
+
tasks.push(filePath);
|
|
15
|
+
}
|
|
16
|
+
}
|
|
17
|
+
} catch (err) {
|
|
18
|
+
console.error('Error reading directory:', err);
|
|
19
|
+
}
|
|
20
|
+
return tasks;
|
|
21
|
+
}
|
|
22
|
+
export default async function getAllTasks() {
|
|
23
|
+
const tasksDir = path.join(process.cwd(), 'src', 'tasks');
|
|
24
|
+
const taskPaths = findAllTaskFiles(tasksDir);
|
|
25
|
+
const tasks = await Promise.all(taskPaths.map(async fullPath => {
|
|
26
|
+
// Convert the full filesystem path to a module path
|
|
27
|
+
const modulePath = fullPath.split('/tasks/')[1];
|
|
28
|
+
const task = (await import(`@/tasks/${modulePath}`)).default;
|
|
29
|
+
return task;
|
|
30
|
+
}));
|
|
31
|
+
return tasks;
|
|
32
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import { notFound } from 'next/navigation';
|
|
2
|
+
import { importTaskModule } from "../../importTaskModule";
|
|
3
|
+
export default async function getTaskDetails({
|
|
4
|
+
params
|
|
5
|
+
}) {
|
|
6
|
+
try {
|
|
7
|
+
let taskConfig = await importTaskModule(params.slug);
|
|
8
|
+
return {
|
|
9
|
+
...taskConfig.default,
|
|
10
|
+
// _folderPath: folderPath
|
|
11
|
+
_folderPath: '1.intro'
|
|
12
|
+
};
|
|
13
|
+
} catch (e) {
|
|
14
|
+
if (e.code === 'MODULE_NOT_FOUND') notFound();
|
|
15
|
+
throw e;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import cron from "node-cron";
|
|
2
|
+
import getAllTasks from "./getAllTasks";
|
|
3
|
+
import async from 'async';
|
|
4
|
+
import executeRun from "./executeRun";
|
|
5
|
+
import microlightDB from "../database/microlight";
|
|
6
|
+
import tasks from "../tasks";
|
|
7
|
+
async function executeTask({
|
|
8
|
+
inputs,
|
|
9
|
+
task
|
|
10
|
+
}) {
|
|
11
|
+
const workflow = {
|
|
12
|
+
createRun: async function () {
|
|
13
|
+
let run = await microlightDB.Runs.create({
|
|
14
|
+
task: task.slug,
|
|
15
|
+
logs: {},
|
|
16
|
+
inputs: inputs,
|
|
17
|
+
triggered_by: 'schedule',
|
|
18
|
+
status: 'pending'
|
|
19
|
+
}, {
|
|
20
|
+
returning: true
|
|
21
|
+
});
|
|
22
|
+
return run.toJSON();
|
|
23
|
+
},
|
|
24
|
+
startRun: ['createRun', async function (results) {
|
|
25
|
+
process.nextTick(() => executeRun(results.createRun));
|
|
26
|
+
return;
|
|
27
|
+
}]
|
|
28
|
+
};
|
|
29
|
+
try {
|
|
30
|
+
const results = await async.auto(workflow);
|
|
31
|
+
return {
|
|
32
|
+
success: true,
|
|
33
|
+
run: results.createRun
|
|
34
|
+
};
|
|
35
|
+
} catch (e) {
|
|
36
|
+
return {
|
|
37
|
+
success: false,
|
|
38
|
+
error: e
|
|
39
|
+
};
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
export default async function loadSchedules() {
|
|
43
|
+
// const tasks = await getAllTasks();
|
|
44
|
+
let schedules = [];
|
|
45
|
+
tasks.forEach(function (task) {
|
|
46
|
+
// Check if task has schedules
|
|
47
|
+
if (task.is_enabled && task.schedules && Array.isArray(task.schedules)) {
|
|
48
|
+
task.schedules.forEach(scheduleConfig => {
|
|
49
|
+
if (scheduleConfig.is_enabled && scheduleConfig.schedule) {
|
|
50
|
+
// Create cron job
|
|
51
|
+
const job = cron.schedule(scheduleConfig.schedule, async () => {
|
|
52
|
+
try {
|
|
53
|
+
console.log('trigger the task');
|
|
54
|
+
// Execute task with schedule-specific inputs
|
|
55
|
+
await executeTask({
|
|
56
|
+
inputs: scheduleConfig.inputs || {},
|
|
57
|
+
task
|
|
58
|
+
});
|
|
59
|
+
} catch (error) {
|
|
60
|
+
console.error(`Error executing scheduled task ${task.slug}:`, error);
|
|
61
|
+
}
|
|
62
|
+
}, {
|
|
63
|
+
timezone: scheduleConfig.timezone || process.env.ML_CRON_TIMEZONE
|
|
64
|
+
});
|
|
65
|
+
schedules.push({
|
|
66
|
+
task,
|
|
67
|
+
schedule: scheduleConfig,
|
|
68
|
+
job
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
});
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
console.log('Count of schedules :' + schedules.length);
|
|
75
|
+
console.log(schedules);
|
|
76
|
+
return schedules;
|
|
77
|
+
}
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
const task = {
|
|
2
|
+
slug: 'hello_world',
|
|
3
|
+
name: 'Hello World',
|
|
4
|
+
is_enabled: true,
|
|
5
|
+
description: 'Simply prints Hello world',
|
|
6
|
+
inputs: {
|
|
7
|
+
name: {
|
|
8
|
+
name: "Name",
|
|
9
|
+
description: "Name of the person",
|
|
10
|
+
// default: new Date(Date.now() - 86400000).toISOString().substring(0,10),
|
|
11
|
+
default: 'World',
|
|
12
|
+
placeholder: 'Your name',
|
|
13
|
+
type: 'string',
|
|
14
|
+
required: false
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
fn: async function (ml, inputs) {
|
|
18
|
+
ml.log('hello world');
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
export default task;
|
|
@@ -0,0 +1,31 @@
|
|
|
1
|
+
const task = {
|
|
2
|
+
slug: 'ml',
|
|
3
|
+
name: 'Microlight display functions',
|
|
4
|
+
is_enabled: true,
|
|
5
|
+
description: 'Simply prints Hello world',
|
|
6
|
+
inputs: {},
|
|
7
|
+
fn: async function (ml, inputs) {
|
|
8
|
+
await ml.log('Microlight exposes a number of display functions. Using this you can display feedback to the user');
|
|
9
|
+
await ml.log('This is a log. Call this with ml.log("Hello world")');
|
|
10
|
+
await ml.log('ml.json(data)');
|
|
11
|
+
var data = {
|
|
12
|
+
name: 'microlight',
|
|
13
|
+
author: 'Alex J V',
|
|
14
|
+
github: 'https://github.com/IMGears/microlight'
|
|
15
|
+
};
|
|
16
|
+
await ml.json(data);
|
|
17
|
+
await ml.log('ml.markdown(text)');
|
|
18
|
+
await ml.markdown(`
|
|
19
|
+
#### Process FG Details:
|
|
20
|
+
- **Deleted:** 10
|
|
21
|
+
- **Created:** 10
|
|
22
|
+
`);
|
|
23
|
+
await ml.log("ml.error(new Error('test error'))");
|
|
24
|
+
await ml.error(new Error('test error'));
|
|
25
|
+
await ml.danger('this is a danger message');
|
|
26
|
+
await ml.warn('this is a warning message');
|
|
27
|
+
await ml.info('this is a info message');
|
|
28
|
+
await ml.success('this is a success message');
|
|
29
|
+
}
|
|
30
|
+
};
|
|
31
|
+
export default task;
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
import { schedule } from "node-cron";
|
|
2
|
+
const task = {
|
|
3
|
+
slug: 'scheduled',
|
|
4
|
+
name: 'Scheduled task',
|
|
5
|
+
is_enabled: true,
|
|
6
|
+
description: 'This task runs every 2 mins',
|
|
7
|
+
inputs: {},
|
|
8
|
+
// maybe should be called inputFields
|
|
9
|
+
schedules: [{
|
|
10
|
+
schedule: '*/2 * * * *',
|
|
11
|
+
is_enabled: true,
|
|
12
|
+
inputs: {}
|
|
13
|
+
}],
|
|
14
|
+
fn: async function (ml, inputs) {
|
|
15
|
+
await ml.log('This is a scheduled task. Will run every 2 mins');
|
|
16
|
+
}
|
|
17
|
+
};
|
|
18
|
+
export default task;
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
const task = {
|
|
2
|
+
slug: 'takes_time',
|
|
3
|
+
name: 'Task that takes time to run',
|
|
4
|
+
is_enabled: true,
|
|
5
|
+
description: 'Simply prints Hello world',
|
|
6
|
+
inputs: {},
|
|
7
|
+
fn: async function (ml, inputs) {
|
|
8
|
+
await ml.log('This task is going to take time complete execution');
|
|
9
|
+
await ml.wait(1000); // Wait for 1 seconds
|
|
10
|
+
await ml.log('Doing step 1');
|
|
11
|
+
await ml.wait(2000); // Wait for 2 seconds
|
|
12
|
+
await ml.log('Doing step 2');
|
|
13
|
+
await ml.wait(2000); // Wait for 2 seconds
|
|
14
|
+
await ml.log('Doing step 3');
|
|
15
|
+
await ml.wait(3000); // Wait for 3 seconds
|
|
16
|
+
await ml.log('Doing step 4');
|
|
17
|
+
await ml.wait(1000); // Wait for 1 seconds
|
|
18
|
+
await ml.log('Doing step 5');
|
|
19
|
+
await ml.wait(100); // Wait for 0.1 seconds
|
|
20
|
+
await ml.log('Doing step 6');
|
|
21
|
+
await ml.wait(100); // Wait for 0.1 seconds
|
|
22
|
+
await ml.log('Doing step 7');
|
|
23
|
+
await ml.wait(100); // Wait for 0.1 seconds
|
|
24
|
+
await ml.wait(2000); // Wait for 2 seconds
|
|
25
|
+
await ml.log('all done');
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
export default task;
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
const task = {
|
|
2
|
+
slug: 'takes_time2',
|
|
3
|
+
name: 'Task that takes time to run',
|
|
4
|
+
is_enabled: true,
|
|
5
|
+
description: 'Simply prints Hello world',
|
|
6
|
+
inputs: {},
|
|
7
|
+
fn: async function (ml, inputs) {
|
|
8
|
+
await ml.log('This task is going to take time complete execution');
|
|
9
|
+
await ml.wait(1000); // Wait for 1 seconds
|
|
10
|
+
await ml.log('Doing step 1');
|
|
11
|
+
await ml.wait(2000); // Wait for 2 seconds
|
|
12
|
+
await ml.log('Doing step 2');
|
|
13
|
+
await ml.wait(2000); // Wait for 2 seconds
|
|
14
|
+
await ml.log('Doing step 3');
|
|
15
|
+
await ml.wait(3000); // Wait for 3 seconds
|
|
16
|
+
await ml.log('Doing step 4');
|
|
17
|
+
await ml.wait(1000); // Wait for 1 seconds
|
|
18
|
+
await ml.log('Doing step 5');
|
|
19
|
+
await ml.wait(100); // Wait for 0.1 seconds
|
|
20
|
+
await ml.log('Doing step 6');
|
|
21
|
+
await ml.wait(100); // Wait for 0.1 seconds
|
|
22
|
+
await ml.log('Doing step 7');
|
|
23
|
+
await ml.wait(100); // Wait for 0.1 seconds
|
|
24
|
+
await ml.wait(2000); // Wait for 2 seconds
|
|
25
|
+
await ml.log('all done');
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
export default task;
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import { glob } from 'glob';
|
|
2
|
+
import path from 'path';
|
|
3
|
+
import { fileURLToPath } from 'url';
|
|
4
|
+
const __filename = fileURLToPath(import.meta.url);
|
|
5
|
+
const __dirname = path.dirname(__filename);
|
|
6
|
+
console.log(__filename);
|
|
7
|
+
console.log(__dirname);
|
|
8
|
+
|
|
9
|
+
// Find all task files and folder files
|
|
10
|
+
const taskFiles = glob.sync(['**/*.task.js', '**/microlight.folder.js'], {
|
|
11
|
+
cwd: __dirname,
|
|
12
|
+
absolute: true
|
|
13
|
+
});
|
|
14
|
+
console.log(taskFiles);
|
|
15
|
+
|
|
16
|
+
// Import all task files dynamically
|
|
17
|
+
const tasks = await Promise.all(taskFiles.map(async filePath => {
|
|
18
|
+
const task = await import(filePath);
|
|
19
|
+
const taskName = path.basename(filePath, '.task.js');
|
|
20
|
+
// return [taskName, task.default];
|
|
21
|
+
return [task?.default?.slug, {
|
|
22
|
+
...task?.default,
|
|
23
|
+
...{
|
|
24
|
+
file_name: taskName
|
|
25
|
+
}
|
|
26
|
+
}];
|
|
27
|
+
}));
|
|
28
|
+
console.log(tasks);
|
|
29
|
+
|
|
30
|
+
// Convert array of entries to an object
|
|
31
|
+
const taskMap = Object.fromEntries(tasks);
|
|
32
|
+
console.log(taskMap);
|
|
33
|
+
export default taskMap;
|
package/index.js
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
console.log('came here');
|