echoapi-cron-scheduler-batch 1.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/index.js +581 -0
  2. package/package.json +23 -0
package/index.js ADDED
@@ -0,0 +1,581 @@
1
+ const cluster = require('cluster');
2
+ const schedule = require('node-schedule');
3
+ const _ = require('lodash');
4
+ const axios = require('axios');
5
+ const path = require('path');
6
+ const os = require('os');
7
+ const JSON5 = require('json5');
8
+ const Database = require('better-sqlite3');
9
+ const fs = require('fs');
10
+ const { v4: uuidv4 } = require('uuid');
11
+ const { execSync } = require('child_process');
12
+
13
+ /**
14
+ * CronScheduler - 企业级多进程自动化测试调度器
15
+ */
16
+ class CronScheduler {
17
+ constructor(config = {}) {
18
+ // 配置初始化
19
+ this.apiUrl = config.apiUrl || process?.env["OPENAPI_DOMAIN"] || "https://open.apipost.net";
20
+ this.dbFile = config.dbPath || path.resolve(os.tmpdir(), 'echoapi-batch-tasks.sqlite');
21
+ this.workerNum = config.workerNum || os.cpus().length;
22
+
23
+ // 结果聚合收集器:用于收集同一批次下所有 Runner 的结果
24
+ this.resultCollector = new Map();
25
+
26
+ if (cluster.isPrimary) {
27
+ this.db = new Database(this.dbFile);
28
+ this._initDB();
29
+ }
30
+ }
31
+
32
+ /**
33
+ * 内部方法:初始化数据库表结构
34
+ */
35
+ _initDB() {
36
+ this.db.pragma('journal_mode = WAL');
37
+ this.db.prepare(`
38
+ CREATE TABLE IF NOT EXISTS jobs (
39
+ job_id TEXT PRIMARY KEY,
40
+ name TEXT,
41
+ frequency TEXT,
42
+ is_cancel INTEGER DEFAULT 0,
43
+ api_token TEXT,
44
+ project_id TEXT,
45
+ user_uid TEXT, -- 新增:存储用户UID
46
+ cases TEXT, -- 存储 runners 数组 JSON
47
+ create_dtime INTEGER
48
+ )
49
+ `).run();
50
+ }
51
+
52
+ /**
53
+ * 核心启动入口
54
+ */
55
+ run() {
56
+ if (cluster.isPrimary) {
57
+ this._startMaster();
58
+ } else {
59
+ this._startWorker();
60
+ }
61
+ }
62
+ /**
63
+ * Converts timestamp to ISO format
64
+ */
65
+ formatTimeToISO(time) {
66
+ const dayjs = require('dayjs');
67
+ const utc = require('dayjs/plugin/utc');
68
+ const timezone = require('dayjs/plugin/timezone');
69
+ dayjs.extend(utc);
70
+ dayjs.extend(timezone);
71
+ return dayjs(time).tz('Asia/Shanghai').format();
72
+ }
73
+
74
+ // ==========================================
75
+ // Master 进程逻辑:负责任务分发与结果聚合
76
+ // ==========================================
77
+
78
+ _startMaster() {
79
+ console.log(`[CronScheduler] Master process ${process.pid} is running.`);
80
+
81
+ // 衍生子进程
82
+ for (let i = 0; i < this.workerNum; i++) {
83
+ this._bindWorker(cluster.fork());
84
+ }
85
+
86
+ // 进程自动重启逻辑
87
+ cluster.on('exit', (worker) => {
88
+ console.warn(`[CronScheduler] Worker ${worker.process.pid} died. Forking a new one...`);
89
+ this._bindWorker(cluster.fork());
90
+ });
91
+
92
+ // 启动时自动加载数据库中的任务
93
+ this.loadJobs();
94
+ }
95
+
96
+ _bindWorker(worker) {
97
+ worker.on('message', async (msg) => {
98
+ if (msg.action === 'UNIT_COMPLETED') {
99
+ const { executionId, data } = msg.payload;
100
+ const record = this.resultCollector.get(executionId);
101
+
102
+ if (record) {
103
+
104
+ record.received.push(_.assign(data, {
105
+ source: 'scheduled',
106
+ start_at: this.formatTimeToISO(data?.start_at),
107
+ end_at: this.formatTimeToISO(data?.end_at)
108
+ }));
109
+ // 检查是否收齐了该 Job 下所有的 runners 结果
110
+ if (record.received.length >= record.total) {
111
+ await this._reportAggregated(executionId, record);
112
+ }
113
+ }
114
+ }
115
+ });
116
+ }
117
+
118
+ /**
119
+ * 将聚合后的结果一次性推送到 OpenAPI
120
+ */
121
+ async _reportAggregated(executionId, record) {
122
+ try {
123
+
124
+ const results = record.received;
125
+ const endTime = Date.now();
126
+
127
+ // --- 核心统计逻辑 ---
128
+ let stats = {
129
+ http_total: 0,
130
+ http_success: 0,
131
+ assert_total: 0,
132
+ assert_success: 0,
133
+ total_response_time: 0,
134
+ count: 0
135
+ };
136
+
137
+ results.forEach(item => {
138
+ // 累加 HTTP 统计
139
+ stats.http_total += (item.http?.total || 0);
140
+ stats.http_success += (item.http?.success || 0);
141
+
142
+ // 累加 断言 统计
143
+ stats.assert_total += (item.assert?.total || 0);
144
+ stats.assert_success += (item.assert?.success || 0);
145
+
146
+ // 累加 响应时间 用于计算平均值
147
+ stats.total_response_time += (item.total_response_time || 0);
148
+ stats.count++;
149
+ });
150
+
151
+ // 计算通过率 (保留两位小数)
152
+ const httpPassRate = stats.http_total > 0
153
+ ? ((stats.http_success / stats.http_total) * 100).toFixed(2)
154
+ : "0.00";
155
+
156
+ const assertPassRate = stats.assert_total > 0
157
+ ? ((stats.assert_success / stats.assert_total) * 100).toFixed(2)
158
+ : "0.00";
159
+
160
+ // 计算平均响应时间
161
+ const avgResponseTime = stats.count > 0
162
+ ? (stats.total_response_time / stats.count).toFixed(0)
163
+ : 0;
164
+
165
+ // --- 封装最终 Payload ---
166
+ const finalPayload = {
167
+ info: {
168
+ job_id: record.job_info.job_id,
169
+ name: record.job_info.name,
170
+ project_id: record.job_info.project_id,
171
+ user_uid: record.job_info.user_uid, // 放入最终报文
172
+ execution_id: executionId,
173
+ start_at: this.formatTimeToISO(record.job_info.start_time),
174
+ end_at: this.formatTimeToISO(endTime),
175
+
176
+ // 业务统计字段
177
+ http_pass_rate: `${httpPassRate}%`,
178
+ assert_pass_rate: `${assertPassRate}%`,
179
+ avg_response_time: `${avgResponseTime}ms`,
180
+
181
+ // 原始计数
182
+ total_http: stats.http_total,
183
+ success_http: stats.http_success,
184
+ total_assert: stats.assert_total,
185
+ success_assert: stats.assert_success,
186
+
187
+ total_units: record.total,
188
+ actual_units: results.length
189
+ },
190
+ results: results // 原始详细列表
191
+ };
192
+
193
+ // 写入本地备份,增加 try-catch 避免权限问题
194
+ try {
195
+ // fs.writeFileSync('last_report.json', JSON.stringify(finalPayload));
196
+ } catch (e) { }
197
+
198
+ console.log(`[CronScheduler] Batch job finished. ExecutionID: ${executionId}. Aggregating ${record.total} results...`);
199
+ // console.log(`[CronScheduler] TOKEN ${record.api_token}`);
200
+
201
+ const response = await axios.post(`${this.apiUrl}/open/automated_testing/report/add`, finalPayload, {
202
+ headers: {
203
+ 'Content-Type': 'application/json',
204
+ 'api-token': record.api_token
205
+ }
206
+ });
207
+ if (response?.data?.code != 0) {
208
+ console.log(`Execution Id ${executionId} reporting error: ${response?.data?.msg}`);
209
+ } else {
210
+ console.log(`Execution Id ${executionId} reported success...`);
211
+ }
212
+ } catch (err) {
213
+ console.error(`[CronScheduler] Report aggregation failed:`, err.message);
214
+ } finally {
215
+ this.resultCollector.delete(executionId); // 清理内存
216
+ console.info(`[CronScheduler] Result collector cleared for ${executionId}`);
217
+ }
218
+ }
219
+ /**
220
+ * 查看当前所有待执行(活跃)的任务及其详细信息
221
+ */
222
+ getPendingJobs() {
223
+ // 1. 从内存中获取 node-schedule 注册的所有 job_id
224
+ const activeTimerIds = Object.keys(schedule.scheduledJobs);
225
+
226
+ if (activeTimerIds.length === 0) {
227
+ return [];
228
+ }
229
+
230
+ // 2. 从数据库中查询这些活跃任务的详细配置
231
+ // 使用 IN 语句批量查询,效率更高
232
+ const placeholders = activeTimerIds.map(() => '?').join(',');
233
+ const query = `SELECT job_id, name, frequency, project_id, create_dtime FROM jobs WHERE job_id IN (${placeholders}) AND is_cancel < 1`;
234
+
235
+ try {
236
+ const jobs = this.db.prepare(query).all(...activeTimerIds);
237
+
238
+ // 3. 结合内存中的下次执行时间进行封装
239
+ return _.map(jobs, job => {
240
+ const cronRule = this.convertToCron(JSON5.parse(job.frequency));
241
+ const nextTime = this.getNextExecutionTime(cronRule);
242
+
243
+ return {
244
+ job_id: job.job_id,
245
+ name: job.name,
246
+ project_id: job.project_id,
247
+ cron: cronRule,
248
+ next_run_at: typeof nextTime === 'number' ? new Date(nextTime).toLocaleString() : nextTime,
249
+ timestamp: nextTime // 方便前端排序
250
+ };
251
+ }).sort((a, b) => (a.timestamp - b.timestamp)); // 按执行时间先后排序
252
+ } catch (e) {
253
+ console.error('[View Jobs Error]', e.message);
254
+ return [];
255
+ }
256
+ }
257
+
258
+ /**
259
+ * 增量更新定时器:解决时间漂移和重复触发的关键
260
+ */
261
+ upsertTimer(job_id, name, frequency, jobData) {
262
+ // 1. 如果已存在,先取消旧的内存定时器
263
+ if (schedule.scheduledJobs[job_id]) {
264
+ schedule.scheduledJobs[job_id].cancel();
265
+ }
266
+
267
+ // 2. 如果任务未被取消 (is_cancel < 1)
268
+ if (jobData && jobData.is_cancel < 1) {
269
+ try {
270
+ const freq = typeof frequency === 'string' ? JSON5.parse(frequency) : frequency;
271
+ const cronRule = this.convertToCron(freq);
272
+
273
+ // 重新创建定时器
274
+ schedule.scheduleJob(job_id, cronRule, () => {
275
+ const now = new Date().toLocaleString();
276
+ console.log(`[Timer Triggered] ${name} (${job_id}) at ${now}`);
277
+
278
+ // 触发时重新从数据库获取最新数据,确保分发的是最新的 runners
279
+ const currentJob = this.db.prepare(`SELECT * FROM jobs WHERE job_id = ?`).get(job_id);
280
+ if (currentJob && currentJob.is_cancel < 1) {
281
+ this._dispatch(currentJob);
282
+ }
283
+ });
284
+ } catch (e) {
285
+ console.error(`[CronScheduler] Upsert timer error (ID: ${job_id}):`, e.message);
286
+ }
287
+ }
288
+ }
289
+
290
+ // ==========================================
291
+ // 公共接口:供 Express 路由层调用
292
+ // ==========================================
293
+
294
+ /**
295
+ * 创建或更新任务
296
+ */
297
+ createJob(payload) {
298
+ const { job_id, name, frequency, api_token, project_id, user_uid, runners } = payload;
299
+ const casesJson = JSON.stringify(Array.isArray(runners) ? runners : []);
300
+ const freqJson = JSON.stringify(frequency);
301
+
302
+ const stmt = this.db.prepare(`
303
+ INSERT INTO jobs (job_id, name, frequency, api_token, project_id, cases, create_dtime)
304
+ VALUES (?, ?, ?, ?, ?, ?, ?)
305
+ ON CONFLICT(job_id) DO UPDATE SET
306
+ name=excluded.name,
307
+ frequency=excluded.frequency,
308
+ cases=excluded.cases,
309
+ api_token=excluded.api_token,
310
+ user_uid=excluded.user_uid, -- 更新时也同步 UID
311
+ is_cancel=0
312
+ `);
313
+ stmt.run(job_id, name, freqJson, api_token, project_id, user_uid || '', casesJson, Date.now());
314
+
315
+ // 使用增量更新,不再 loadJobs()
316
+ this.upsertTimer(job_id, name, frequency, { is_cancel: 0 });
317
+ return true;
318
+ }
319
+
320
+ cancelJob(job_id) {
321
+ this.db.prepare(`UPDATE jobs SET is_cancel = 1 WHERE job_id = ?`).run(job_id);
322
+ if (schedule.scheduledJobs[job_id]) {
323
+ schedule.scheduledJobs[job_id].cancel();
324
+ }
325
+ return true;
326
+ }
327
+
328
+ restartJob(job_id) {
329
+ this.db.prepare(`UPDATE jobs SET is_cancel = 0 WHERE job_id = ?`).run(job_id);
330
+ const job = this.db.prepare(`SELECT * FROM jobs WHERE job_id = ?`).get(job_id);
331
+ if (job) {
332
+ this.upsertTimer(job.job_id, job.name, job.frequency, job);
333
+ }
334
+ return true;
335
+ }
336
+
337
+ deleteJob(job_id) {
338
+ this.db.prepare(`DELETE FROM jobs WHERE job_id = ?`).run(job_id);
339
+ if (schedule.scheduledJobs[job_id]) {
340
+ schedule.scheduledJobs[job_id].cancel();
341
+ }
342
+ return true;
343
+ }
344
+
345
+ getAllJobs(project_id) {
346
+ const jobs = this.db.prepare(`SELECT * FROM jobs WHERE project_id = ?`).all(project_id);
347
+ // 返回列表时剔除巨大的 cases 字段,优化内存
348
+ return _.map(jobs, job => {
349
+ const freq = JSON5.parse(job.frequency || '{}');
350
+ const cases = JSON5.parse(job.cases || '[]');
351
+ const cronRule = this.convertToCron(freq);
352
+
353
+ // 获取下次执行时间
354
+ // 如果任务已取消,则不显示下次执行时间
355
+ let nextRunTime = '-';
356
+ if (job.is_cancel < 1) {
357
+ nextRunTime = this.getNextExecutionTime(cronRule);
358
+ }
359
+
360
+ return {
361
+ ..._.omit(job, ['cases']), // 排除大数据字段
362
+ status: job.is_cancel === 0 ? 0 : 1, // 0:运行中, 1:已暂停
363
+ next_run_time: nextRunTime,
364
+ frequency_display: cronRule, // 返回原始 Cron 规则供前端参考
365
+ runners: _.map(cases, (v) => v?.option?.testing_id)
366
+ };
367
+ });
368
+ }
369
+
370
+ loadJobs() {
371
+ console.log('[CronScheduler] Initializing timers from database...');
372
+ // 启动时全量清理一次
373
+ Object.values(schedule.scheduledJobs).forEach(j => j.cancel());
374
+
375
+ const jobs = this.db.prepare(`SELECT * FROM jobs WHERE is_cancel < 1`).all();
376
+ jobs.forEach(job => {
377
+ this.upsertTimer(job.job_id, job.name, job.frequency, job);
378
+ });
379
+ }
380
+
381
+ /**
382
+ * 分发逻辑:将一个批次内的所有 Runner 均衡分配给 Workers
383
+ */
384
+ _dispatch(job) {
385
+ try {
386
+ const runners = JSON5.parse(job.cases || '[]');
387
+ if (runners.length === 0) return;
388
+
389
+ const executionId = `${job.job_id}_${Date.now()}`;
390
+ this.resultCollector.set(executionId, {
391
+ total: runners.length,
392
+ received: [],
393
+ api_token: job.api_token,
394
+ job_info: {
395
+ job_id: job.job_id,
396
+ name: job.name,
397
+ project_id: job.project_id,
398
+ user_uid: job.user_uid, // 将 UID 存入内存
399
+ start_time: Date.now()
400
+ // 如果数据库有创建人字段,也可以存入:creator: job.creator
401
+ }
402
+ });
403
+
404
+ runners.forEach((runner, index) => {
405
+ // 负载均衡:选取当前存活的 worker
406
+ const workers = Object.values(cluster.workers).filter(w => w.isConnected());
407
+ const worker = _.sample(workers);
408
+ if (worker) {
409
+ worker.send({
410
+ action: 'EXECUTE_UNIT',
411
+ payload: {
412
+ executionId,
413
+ api_token: job.api_token,
414
+ test_events: runner.test_events,
415
+ option: runner.option,
416
+ unit_index: index
417
+ }
418
+ });
419
+ }
420
+ });
421
+ } catch (e) {
422
+ console.error(`[CronScheduler] Dispatch error:`, e.message);
423
+ }
424
+ }
425
+
426
+ // ==========================================
427
+ // Worker 进程逻辑:负责具体的任务执行
428
+ // ==========================================
429
+
430
+ _startWorker() {
431
+ const { run: runner } = require('runner-runtime');
432
+ const net = require('net');
433
+
434
+ process.on('message', async (msg) => {
435
+ if (msg.action === 'EXECUTE_UNIT') {
436
+ const { executionId, api_token, test_events, option } = msg.payload;
437
+ const socketPath = path.join(os.tmpdir(), `echoapi_${uuidv4()}.sock`);
438
+
439
+ const server = net.createServer((socket) => {
440
+ socket.on('data', async (stream) => {
441
+ try {
442
+ const info = JSON.parse(stream.toString());
443
+ const { action, data } = info;
444
+ await this._handleUnitScript(socket, action, data);
445
+ } catch (e) {
446
+ socket.write(JSON.stringify({ status: 'error', message: e.message }) + "\n\n");
447
+ }
448
+ });
449
+ });
450
+
451
+ server.listen(socketPath, () => {
452
+ const finalOptions = _.cloneDeep(option || {});
453
+ const base64Pipe = Buffer.from(socketPath).toString('base64');
454
+ _.set(finalOptions, 'env.ELECTRON_PIPE', base64Pipe);
455
+
456
+ runner(test_events, finalOptions, (res) => {
457
+ if (res?.action === 'complete') {
458
+ process.send({
459
+ action: 'UNIT_COMPLETED',
460
+ payload: { executionId, data: res.data, api_token }
461
+ });
462
+ server.close();
463
+ if (fs.existsSync(socketPath)) {
464
+ try { fs.unlinkSync(socketPath); } catch (e) { }
465
+ }
466
+ }
467
+ });
468
+ });
469
+ }
470
+ });
471
+ process.on('uncaughtException', (err) => {
472
+ console.error('[Worker Fatal Error] 捕获到沙箱崩溃:', err.message);
473
+ // 即使崩溃也不要让子进程立即退出,或者让 Master 自动重启它
474
+ });
475
+ }
476
+
477
+ async _handleUnitScript(socket, action, data) {
478
+ try {
479
+ switch (action) {
480
+ case 'queryDatabase': {
481
+ const { dbconfig, query } = data;
482
+ const { DatabaseQuery } = require('database-query');
483
+ const result = await DatabaseQuery(dbconfig, query);
484
+ socket.write(JSON.stringify(result) + "\n\n");
485
+ break;
486
+ }
487
+ case 'execute': {
488
+ const { file, args = [], option: execOption = {} } = data;
489
+ const ext = path.extname(file).slice(1).toLowerCase();
490
+ let command = "";
491
+
492
+ switch (ext) {
493
+ case 'jar': command = `java -jar ${file} ${args.join(' ')}`; break;
494
+ case 'php': command = `php -f ${file} ${args.join(' ')}`; break;
495
+ case 'js': command = `node ${file} ${args.join(' ')}`; break;
496
+ case 'py':
497
+ case 'py3': command = `python3 ${file} ${args.join(' ')}`; break;
498
+ case 'go': command = `go run ${file} ${args.join(' ')}`; break;
499
+ case 'sh': command = `sh ${file} ${args.join(' ')}`; break;
500
+ default: command = `${file} ${args.join(' ')}`;
501
+ }
502
+
503
+ const isWindows = process.platform === 'win32';
504
+ const config = _.assign(isWindows ? { encoding: 'cp936' } : { encoding: 'utf8' }, execOption);
505
+ const output = String(execSync(command, config));
506
+
507
+ socket.write(JSON.stringify({ status: 'success', result: output }) + "\n\n");
508
+ break;
509
+ }
510
+ }
511
+ } catch (err) {
512
+ socket.write(JSON.stringify({ status: 'error', message: err.message }) + "\n\n");
513
+ }
514
+ }
515
+
516
+ // ==========================================
517
+ // 辅助工具
518
+ // ==========================================
519
+
520
+ /**
521
+ * 修正后的 Cron 转换逻辑:确保每一项都从第 0 秒开始执行
522
+ */
523
+ convertToCron(freq) {
524
+ if (!freq) return '0 0 0 * * *';
525
+ const data = typeof freq === 'string' ? JSON5.parse(freq) : freq;
526
+
527
+ // 情况 A:用户直接传 Cron 表达式
528
+ if (data.type === 'cron') {
529
+ const exp = data.cron?.expression || '0 0 * * *';
530
+ const parts = exp.trim().split(/\s+/);
531
+ // 如果是 5 位,补齐第 0 位为 0 秒,确保不重复触发
532
+ return parts.length === 5 ? `0 ${exp}` : exp;
533
+ }
534
+
535
+ // 情况 B:预设周期配置
536
+ if (data.type === 'preset') {
537
+ const { cycle, config } = data.preset;
538
+
539
+ switch (cycle) {
540
+ case 'minute':
541
+ // 修正:强制 0 秒开始,每 interval 分钟执行
542
+ return `0 */${config.interval} * * * *`;
543
+
544
+ case 'hour':
545
+ // 修正:强制 0 秒 0 分开始,每 interval 小时执行
546
+ return `0 0 */${config.interval} * * *`;
547
+
548
+ case 'day': {
549
+ // 兼容 '10:30' 这种格式
550
+ const [hour, minute] = (config.time || "0:0").split(':');
551
+ return `0 ${parseInt(minute)} ${parseInt(hour)} * * *`;
552
+ }
553
+
554
+ case 'week': {
555
+ const [hour, minute] = (config.time || "0:0").split(':');
556
+ // config.weekdays 应该是 [1, 3, 5] 这种数组
557
+ const days = Array.isArray(config.weekdays) ? config.weekdays.join(',') : '*';
558
+ return `0 ${parseInt(minute)} ${parseInt(hour)} * * ${days}`;
559
+ }
560
+
561
+ default:
562
+ return '0 0 0 * * *';
563
+ }
564
+ }
565
+
566
+ return '0 0 0 * * *';
567
+ }
568
+
569
+ getNextExecutionTime(cron) {
570
+
571
+ try {
572
+ const { CronExpressionParser } = require('cron-parser');
573
+ const interval = CronExpressionParser.parse(cron, { tz: 'Asia/Shanghai', utc: false });
574
+ return new Date(interval.next().toISOString()).getTime()
575
+ } catch (e) {
576
+ return '-';
577
+ }
578
+ }
579
+ }
580
+
581
+ module.exports = CronScheduler;
package/package.json ADDED
@@ -0,0 +1,23 @@
1
+ {
2
+ "name": "echoapi-cron-scheduler-batch",
3
+ "version": "1.0.1",
4
+ "description": "",
5
+ "main": "index.js",
6
+ "scripts": {
7
+ "test": "echo \"Error: no test specified\" && exit 1"
8
+ },
9
+ "author": "rtc",
10
+ "license": "MIT",
11
+ "dependencies": {
12
+ "axios": "^1.13.4",
13
+ "better-sqlite3": "^12.6.2",
14
+ "cron-parser": "^5.5.0",
15
+ "database-query": "^1.1.19",
16
+ "dayjs": "^1.11.19",
17
+ "json5": "^2.2.3",
18
+ "lodash": "^4.17.23",
19
+ "node-schedule": "^2.1.1",
20
+ "runner-runtime": "^1.0.120",
21
+ "uuid": "^9.0.0"
22
+ }
23
+ }