jm2 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/GNU-AGPL-3.0 +665 -0
- package/README.md +603 -0
- package/bin/jm2.js +24 -0
- package/package.json +70 -0
- package/src/cli/commands/add.js +206 -0
- package/src/cli/commands/config.js +212 -0
- package/src/cli/commands/edit.js +198 -0
- package/src/cli/commands/export.js +61 -0
- package/src/cli/commands/flush.js +132 -0
- package/src/cli/commands/history.js +179 -0
- package/src/cli/commands/import.js +180 -0
- package/src/cli/commands/list.js +174 -0
- package/src/cli/commands/logs.js +415 -0
- package/src/cli/commands/pause.js +97 -0
- package/src/cli/commands/remove.js +107 -0
- package/src/cli/commands/restart.js +68 -0
- package/src/cli/commands/resume.js +96 -0
- package/src/cli/commands/run.js +115 -0
- package/src/cli/commands/show.js +159 -0
- package/src/cli/commands/start.js +46 -0
- package/src/cli/commands/status.js +47 -0
- package/src/cli/commands/stop.js +48 -0
- package/src/cli/index.js +274 -0
- package/src/cli/utils/output.js +267 -0
- package/src/cli/utils/prompts.js +56 -0
- package/src/core/config.js +227 -0
- package/src/core/history-db.js +439 -0
- package/src/core/job.js +329 -0
- package/src/core/logger.js +382 -0
- package/src/core/storage.js +315 -0
- package/src/daemon/executor.js +409 -0
- package/src/daemon/index.js +873 -0
- package/src/daemon/scheduler.js +465 -0
- package/src/ipc/client.js +112 -0
- package/src/ipc/protocol.js +183 -0
- package/src/ipc/server.js +92 -0
- package/src/utils/cron.js +205 -0
- package/src/utils/datetime.js +237 -0
- package/src/utils/duration.js +226 -0
- package/src/utils/paths.js +164 -0
|
@@ -0,0 +1,227 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Configuration management for JM2
|
|
3
|
+
* Handles daemon and application configuration
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { readJsonFile, writeJsonFile } from './storage.js';
|
|
7
|
+
import { getConfigFile } from '../utils/paths.js';
|
|
8
|
+
|
|
9
|
+
/**
|
|
10
|
+
* Default configuration values
|
|
11
|
+
*/
|
|
12
|
+
export const DEFAULT_CONFIG = {
|
|
13
|
+
// Daemon settings
|
|
14
|
+
daemon: {
|
|
15
|
+
// Maximum concurrent job executions
|
|
16
|
+
maxConcurrent: 10,
|
|
17
|
+
// Default shell for command execution
|
|
18
|
+
shell: process.platform === 'win32' ? 'cmd.exe' : '/bin/sh',
|
|
19
|
+
// Shell arguments
|
|
20
|
+
shellArgs: process.platform === 'win32' ? ['/c'] : ['-c'],
|
|
21
|
+
},
|
|
22
|
+
|
|
23
|
+
// Job defaults
|
|
24
|
+
jobs: {
|
|
25
|
+
// Default timeout for jobs (null = no timeout)
|
|
26
|
+
defaultTimeout: null,
|
|
27
|
+
// Default retry count
|
|
28
|
+
defaultRetry: 0,
|
|
29
|
+
// Default working directory (null = current directory)
|
|
30
|
+
defaultCwd: null,
|
|
31
|
+
},
|
|
32
|
+
|
|
33
|
+
// Logging settings
|
|
34
|
+
logging: {
|
|
35
|
+
// Log level (DEBUG, INFO, WARN, ERROR)
|
|
36
|
+
level: 'INFO',
|
|
37
|
+
// Maximum log file size in bytes (10MB)
|
|
38
|
+
maxFileSize: 10 * 1024 * 1024,
|
|
39
|
+
// Number of log files to keep
|
|
40
|
+
maxFiles: 5,
|
|
41
|
+
},
|
|
42
|
+
|
|
43
|
+
// History settings
|
|
44
|
+
history: {
|
|
45
|
+
// Maximum number of history entries per job
|
|
46
|
+
maxEntriesPerJob: 100,
|
|
47
|
+
// Days to keep history
|
|
48
|
+
retentionDays: 30,
|
|
49
|
+
},
|
|
50
|
+
|
|
51
|
+
// Cleanup settings
|
|
52
|
+
cleanup: {
|
|
53
|
+
// Auto-cleanup completed one-time jobs after N days
|
|
54
|
+
completedJobRetentionDays: 7,
|
|
55
|
+
// Auto-cleanup old logs after N days
|
|
56
|
+
logRetentionDays: 30,
|
|
57
|
+
},
|
|
58
|
+
};
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Get the current configuration
|
|
62
|
+
* Merges stored config with defaults
|
|
63
|
+
* @returns {object} Configuration object
|
|
64
|
+
*/
|
|
65
|
+
export function getConfig() {
|
|
66
|
+
const stored = readJsonFile(getConfigFile(), {});
|
|
67
|
+
return deepMerge(DEFAULT_CONFIG, stored);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Save configuration
|
|
72
|
+
* Only saves values that differ from defaults
|
|
73
|
+
* @param {object} config - Configuration to save
|
|
74
|
+
*/
|
|
75
|
+
export function saveConfig(config) {
|
|
76
|
+
writeJsonFile(getConfigFile(), config);
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
/**
|
|
80
|
+
* Get a specific configuration value by path
|
|
81
|
+
* @param {string} path - Dot-separated path (e.g., 'daemon.maxConcurrent')
|
|
82
|
+
* @param {*} defaultValue - Default value if path not found
|
|
83
|
+
* @returns {*} Configuration value
|
|
84
|
+
*/
|
|
85
|
+
export function getConfigValue(path, defaultValue = undefined) {
|
|
86
|
+
const config = getConfig();
|
|
87
|
+
const parts = path.split('.');
|
|
88
|
+
let value = config;
|
|
89
|
+
|
|
90
|
+
for (const part of parts) {
|
|
91
|
+
if (value === null || value === undefined || typeof value !== 'object') {
|
|
92
|
+
return defaultValue;
|
|
93
|
+
}
|
|
94
|
+
value = value[part];
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
return value !== undefined ? value : defaultValue;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
/**
|
|
101
|
+
* Set a specific configuration value by path
|
|
102
|
+
* @param {string} path - Dot-separated path (e.g., 'daemon.maxConcurrent')
|
|
103
|
+
* @param {*} value - Value to set
|
|
104
|
+
*/
|
|
105
|
+
export function setConfigValue(path, value) {
|
|
106
|
+
const config = getConfig();
|
|
107
|
+
const parts = path.split('.');
|
|
108
|
+
let current = config;
|
|
109
|
+
|
|
110
|
+
for (let i = 0; i < parts.length - 1; i++) {
|
|
111
|
+
const part = parts[i];
|
|
112
|
+
if (current[part] === undefined || typeof current[part] !== 'object') {
|
|
113
|
+
current[part] = {};
|
|
114
|
+
}
|
|
115
|
+
current = current[part];
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
current[parts[parts.length - 1]] = value;
|
|
119
|
+
saveConfig(config);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
/**
|
|
123
|
+
* Reset configuration to defaults
|
|
124
|
+
*/
|
|
125
|
+
export function resetConfig() {
|
|
126
|
+
saveConfig({});
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
/**
|
|
130
|
+
* Validate configuration object
|
|
131
|
+
* @param {object} config - Configuration to validate
|
|
132
|
+
* @returns {object} Validation result { valid: boolean, errors: string[] }
|
|
133
|
+
*/
|
|
134
|
+
export function validateConfig(config) {
|
|
135
|
+
const errors = [];
|
|
136
|
+
|
|
137
|
+
// Validate daemon settings
|
|
138
|
+
if (config.daemon) {
|
|
139
|
+
if (config.daemon.maxConcurrent !== undefined) {
|
|
140
|
+
if (typeof config.daemon.maxConcurrent !== 'number' || config.daemon.maxConcurrent < 1) {
|
|
141
|
+
errors.push('daemon.maxConcurrent must be a positive number');
|
|
142
|
+
}
|
|
143
|
+
}
|
|
144
|
+
if (config.daemon.shell !== undefined) {
|
|
145
|
+
if (typeof config.daemon.shell !== 'string' || config.daemon.shell.length === 0) {
|
|
146
|
+
errors.push('daemon.shell must be a non-empty string');
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
// Validate job defaults
|
|
152
|
+
if (config.jobs) {
|
|
153
|
+
if (config.jobs.defaultRetry !== undefined) {
|
|
154
|
+
if (typeof config.jobs.defaultRetry !== 'number' || config.jobs.defaultRetry < 0) {
|
|
155
|
+
errors.push('jobs.defaultRetry must be a non-negative number');
|
|
156
|
+
}
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
// Validate logging settings
|
|
161
|
+
if (config.logging) {
|
|
162
|
+
if (config.logging.level !== undefined) {
|
|
163
|
+
const validLevels = ['DEBUG', 'INFO', 'WARN', 'ERROR'];
|
|
164
|
+
if (!validLevels.includes(config.logging.level)) {
|
|
165
|
+
errors.push(`logging.level must be one of: ${validLevels.join(', ')}`);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
if (config.logging.maxFileSize !== undefined) {
|
|
169
|
+
if (typeof config.logging.maxFileSize !== 'number' || config.logging.maxFileSize < 1024) {
|
|
170
|
+
errors.push('logging.maxFileSize must be at least 1024 bytes');
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
// Validate history settings
|
|
176
|
+
if (config.history) {
|
|
177
|
+
if (config.history.maxEntriesPerJob !== undefined) {
|
|
178
|
+
if (typeof config.history.maxEntriesPerJob !== 'number' || config.history.maxEntriesPerJob < 1) {
|
|
179
|
+
errors.push('history.maxEntriesPerJob must be a positive number');
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
if (config.history.retentionDays !== undefined) {
|
|
183
|
+
if (typeof config.history.retentionDays !== 'number' || config.history.retentionDays < 1) {
|
|
184
|
+
errors.push('history.retentionDays must be a positive number');
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
return {
|
|
190
|
+
valid: errors.length === 0,
|
|
191
|
+
errors,
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
/**
|
|
196
|
+
* Deep merge two objects
|
|
197
|
+
* @param {object} target - Target object
|
|
198
|
+
* @param {object} source - Source object
|
|
199
|
+
* @returns {object} Merged object
|
|
200
|
+
*/
|
|
201
|
+
function deepMerge(target, source) {
|
|
202
|
+
const result = { ...target };
|
|
203
|
+
|
|
204
|
+
for (const key of Object.keys(source)) {
|
|
205
|
+
if (source[key] !== null && typeof source[key] === 'object' && !Array.isArray(source[key])) {
|
|
206
|
+
if (target[key] !== null && typeof target[key] === 'object' && !Array.isArray(target[key])) {
|
|
207
|
+
result[key] = deepMerge(target[key], source[key]);
|
|
208
|
+
} else {
|
|
209
|
+
result[key] = { ...source[key] };
|
|
210
|
+
}
|
|
211
|
+
} else {
|
|
212
|
+
result[key] = source[key];
|
|
213
|
+
}
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
return result;
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
export default {
|
|
220
|
+
DEFAULT_CONFIG,
|
|
221
|
+
getConfig,
|
|
222
|
+
saveConfig,
|
|
223
|
+
getConfigValue,
|
|
224
|
+
setConfigValue,
|
|
225
|
+
resetConfig,
|
|
226
|
+
validateConfig,
|
|
227
|
+
};
|
|
@@ -0,0 +1,439 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* SQLite-based history storage for JM2
|
|
3
|
+
* Provides efficient querying, indexing, and automatic cleanup
|
|
4
|
+
* Handles concurrent access through SQLite's file locking mechanism
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import Database from 'better-sqlite3';
|
|
8
|
+
import { getHistoryDbFile, ensureDataDir } from '../utils/paths.js';
|
|
9
|
+
import { getConfigValue } from './config.js';
|
|
10
|
+
|
|
11
|
+
// Database instance (singleton pattern)
|
|
12
|
+
let dbInstance = null;
|
|
13
|
+
|
|
14
|
+
/**
|
|
15
|
+
* Get or create the database instance
|
|
16
|
+
* Uses singleton pattern to avoid multiple connections in the same process
|
|
17
|
+
* @returns {Database} SQLite database instance
|
|
18
|
+
*/
|
|
19
|
+
export function getDatabase() {
|
|
20
|
+
if (!dbInstance) {
|
|
21
|
+
ensureDataDir();
|
|
22
|
+
const dbPath = getHistoryDbFile();
|
|
23
|
+
|
|
24
|
+
// Open database with busy timeout for concurrent access handling
|
|
25
|
+
dbInstance = new Database(dbPath);
|
|
26
|
+
|
|
27
|
+
// Set busy timeout to 5 seconds (5000ms) - waits for locks to be released
|
|
28
|
+
dbInstance.pragma('busy_timeout = 5000');
|
|
29
|
+
|
|
30
|
+
// Enable WAL mode for better concurrent read/write performance
|
|
31
|
+
dbInstance.pragma('journal_mode = WAL');
|
|
32
|
+
|
|
33
|
+
// Enable foreign keys
|
|
34
|
+
dbInstance.pragma('foreign_keys = ON');
|
|
35
|
+
|
|
36
|
+
// Initialize schema
|
|
37
|
+
initializeSchema();
|
|
38
|
+
}
|
|
39
|
+
return dbInstance;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
/**
|
|
43
|
+
* Initialize the database schema
|
|
44
|
+
* Creates tables and indexes if they don't exist
|
|
45
|
+
*/
|
|
46
|
+
function initializeSchema() {
|
|
47
|
+
const db = dbInstance;
|
|
48
|
+
|
|
49
|
+
// Create history table
|
|
50
|
+
db.exec(`
|
|
51
|
+
CREATE TABLE IF NOT EXISTS history (
|
|
52
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
53
|
+
job_id INTEGER NOT NULL,
|
|
54
|
+
job_name TEXT NOT NULL,
|
|
55
|
+
command TEXT NOT NULL,
|
|
56
|
+
status TEXT NOT NULL CHECK(status IN ('success', 'failed', 'running')),
|
|
57
|
+
exit_code INTEGER,
|
|
58
|
+
start_time TEXT NOT NULL,
|
|
59
|
+
end_time TEXT,
|
|
60
|
+
duration INTEGER, -- Duration in milliseconds
|
|
61
|
+
error TEXT,
|
|
62
|
+
timestamp TEXT NOT NULL,
|
|
63
|
+
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
|
64
|
+
)
|
|
65
|
+
`);
|
|
66
|
+
|
|
67
|
+
// Create indexes for efficient queries
|
|
68
|
+
db.exec(`
|
|
69
|
+
CREATE INDEX IF NOT EXISTS idx_history_job_id ON history(job_id);
|
|
70
|
+
CREATE INDEX IF NOT EXISTS idx_history_timestamp ON history(timestamp);
|
|
71
|
+
CREATE INDEX IF NOT EXISTS idx_history_job_id_timestamp ON history(job_id, timestamp DESC);
|
|
72
|
+
CREATE INDEX IF NOT EXISTS idx_history_status ON history(status);
|
|
73
|
+
`);
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/**
|
|
77
|
+
* Close the database connection
|
|
78
|
+
* Should be called on graceful shutdown
|
|
79
|
+
*/
|
|
80
|
+
export function closeDatabase() {
|
|
81
|
+
if (dbInstance) {
|
|
82
|
+
dbInstance.close();
|
|
83
|
+
dbInstance = null;
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
/**
|
|
88
|
+
* Add a history entry with automatic cleanup
|
|
89
|
+
* Enforces maxEntriesPerJob and retentionDays config settings
|
|
90
|
+
* @param {object} entry - History entry
|
|
91
|
+
* @param {number} entry.jobId - Job ID
|
|
92
|
+
* @param {string} entry.jobName - Job name
|
|
93
|
+
* @param {string} entry.command - Command that was executed
|
|
94
|
+
* @param {string} entry.status - Execution status ('success', 'failed', 'running')
|
|
95
|
+
* @param {number} [entry.exitCode] - Exit code of the command
|
|
96
|
+
* @param {string} entry.startTime - ISO timestamp when execution started
|
|
97
|
+
* @param {string} [entry.endTime] - ISO timestamp when execution ended
|
|
98
|
+
* @param {number} [entry.duration] - Duration in milliseconds
|
|
99
|
+
* @param {string} [entry.error] - Error message if failed
|
|
100
|
+
* @param {string} [entry.timestamp] - Entry timestamp (defaults to now)
|
|
101
|
+
* @returns {object} The inserted history entry with generated id
|
|
102
|
+
*/
|
|
103
|
+
export function addHistoryEntry(entry) {
|
|
104
|
+
const db = getDatabase();
|
|
105
|
+
|
|
106
|
+
const timestamp = entry.timestamp || new Date().toISOString();
|
|
107
|
+
|
|
108
|
+
const insertStmt = db.prepare(`
|
|
109
|
+
INSERT INTO history (job_id, job_name, command, status, exit_code, start_time, end_time, duration, error, timestamp)
|
|
110
|
+
VALUES (@jobId, @jobName, @command, @status, @exitCode, @startTime, @endTime, @duration, @error, @timestamp)
|
|
111
|
+
`);
|
|
112
|
+
|
|
113
|
+
const insertData = {
|
|
114
|
+
jobId: entry.jobId,
|
|
115
|
+
jobName: entry.jobName,
|
|
116
|
+
command: entry.command,
|
|
117
|
+
status: entry.status,
|
|
118
|
+
exitCode: entry.exitCode ?? null,
|
|
119
|
+
startTime: entry.startTime,
|
|
120
|
+
endTime: entry.endTime ?? null,
|
|
121
|
+
duration: entry.duration ?? null,
|
|
122
|
+
error: entry.error ?? null,
|
|
123
|
+
timestamp,
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
const result = insertStmt.run(insertData);
|
|
127
|
+
|
|
128
|
+
// Perform automatic cleanup based on config
|
|
129
|
+
enforceRetentionPolicy(entry.jobId);
|
|
130
|
+
|
|
131
|
+
return {
|
|
132
|
+
id: result.lastInsertRowid,
|
|
133
|
+
...insertData,
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Enforce retention policy based on config
|
|
139
|
+
* Deletes old entries exceeding maxEntriesPerJob and retentionDays
|
|
140
|
+
* @param {number} jobId - The job ID that just had an entry added
|
|
141
|
+
*/
|
|
142
|
+
function enforceRetentionPolicy(jobId) {
|
|
143
|
+
const db = getDatabase();
|
|
144
|
+
|
|
145
|
+
// Get config values
|
|
146
|
+
const maxEntriesPerJob = getConfigValue('history.maxEntriesPerJob', 100);
|
|
147
|
+
const retentionDays = getConfigValue('history.retentionDays', 30);
|
|
148
|
+
|
|
149
|
+
// Delete oldest entries if count for this job exceeds maxEntriesPerJob
|
|
150
|
+
if (maxEntriesPerJob > 0) {
|
|
151
|
+
const deleteExcessStmt = db.prepare(`
|
|
152
|
+
DELETE FROM history
|
|
153
|
+
WHERE id IN (
|
|
154
|
+
SELECT id FROM history
|
|
155
|
+
WHERE job_id = @jobId
|
|
156
|
+
ORDER BY timestamp DESC
|
|
157
|
+
LIMIT -1 OFFSET @maxEntries
|
|
158
|
+
)
|
|
159
|
+
`);
|
|
160
|
+
deleteExcessStmt.run({ jobId, maxEntries: maxEntriesPerJob });
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
// Delete entries older than retentionDays (based on created_at, not timestamp)
|
|
164
|
+
// This ensures we keep historical records based on when they were inserted,
|
|
165
|
+
// not based on the logical timestamp of when the job ran
|
|
166
|
+
if (retentionDays > 0) {
|
|
167
|
+
const cutoffDate = new Date();
|
|
168
|
+
cutoffDate.setDate(cutoffDate.getDate() - retentionDays);
|
|
169
|
+
const cutoffTimestamp = cutoffDate.toISOString();
|
|
170
|
+
|
|
171
|
+
const deleteOldStmt = db.prepare(`
|
|
172
|
+
DELETE FROM history
|
|
173
|
+
WHERE created_at < @cutoffTimestamp
|
|
174
|
+
`);
|
|
175
|
+
deleteOldStmt.run({ cutoffTimestamp });
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
/**
|
|
180
|
+
* Get execution history with filtering and pagination
|
|
181
|
+
* @param {object} options - Query options
|
|
182
|
+
* @param {number} [options.jobId] - Filter by job ID
|
|
183
|
+
* @param {string} [options.status] - Filter by status ('success', 'failed', 'running')
|
|
184
|
+
* @param {string} [options.since] - Filter entries after this ISO timestamp
|
|
185
|
+
* @param {string} [options.until] - Filter entries before this ISO timestamp
|
|
186
|
+
* @param {number} [options.limit=100] - Maximum number of entries to return
|
|
187
|
+
* @param {number} [options.offset=0] - Number of entries to skip
|
|
188
|
+
* @param {string} [options.order='desc'] - Sort order ('asc' or 'desc')
|
|
189
|
+
* @returns {Array} Array of history entries
|
|
190
|
+
*/
|
|
191
|
+
export function getHistory(options = {}) {
|
|
192
|
+
const db = getDatabase();
|
|
193
|
+
|
|
194
|
+
const {
|
|
195
|
+
jobId,
|
|
196
|
+
status,
|
|
197
|
+
since,
|
|
198
|
+
until,
|
|
199
|
+
limit = 100,
|
|
200
|
+
offset = 0,
|
|
201
|
+
order = 'desc',
|
|
202
|
+
} = options;
|
|
203
|
+
|
|
204
|
+
// Build query dynamically
|
|
205
|
+
const conditions = [];
|
|
206
|
+
const params = {};
|
|
207
|
+
|
|
208
|
+
if (jobId !== undefined) {
|
|
209
|
+
conditions.push('job_id = @jobId');
|
|
210
|
+
params.jobId = jobId;
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
if (status) {
|
|
214
|
+
conditions.push('status = @status');
|
|
215
|
+
params.status = status;
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
if (since) {
|
|
219
|
+
conditions.push('timestamp >= @since');
|
|
220
|
+
params.since = since;
|
|
221
|
+
}
|
|
222
|
+
|
|
223
|
+
if (until) {
|
|
224
|
+
conditions.push('timestamp <= @until');
|
|
225
|
+
params.until = until;
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
229
|
+
const orderClause = order.toLowerCase() === 'asc' ? 'ASC' : 'DESC';
|
|
230
|
+
|
|
231
|
+
const query = `
|
|
232
|
+
SELECT
|
|
233
|
+
id,
|
|
234
|
+
job_id as jobId,
|
|
235
|
+
job_name as jobName,
|
|
236
|
+
command,
|
|
237
|
+
status,
|
|
238
|
+
exit_code as exitCode,
|
|
239
|
+
start_time as startTime,
|
|
240
|
+
end_time as endTime,
|
|
241
|
+
duration,
|
|
242
|
+
error,
|
|
243
|
+
timestamp
|
|
244
|
+
FROM history
|
|
245
|
+
${whereClause}
|
|
246
|
+
ORDER BY timestamp ${orderClause}
|
|
247
|
+
LIMIT @limit OFFSET @offset
|
|
248
|
+
`;
|
|
249
|
+
|
|
250
|
+
params.limit = limit;
|
|
251
|
+
params.offset = offset;
|
|
252
|
+
|
|
253
|
+
const stmt = db.prepare(query);
|
|
254
|
+
return stmt.all(params);
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
/**
|
|
258
|
+
* Get history for a specific job
|
|
259
|
+
* @param {number} jobId - Job ID
|
|
260
|
+
* @param {number} [limit=10] - Maximum number of entries to return
|
|
261
|
+
* @param {number} [offset=0] - Number of entries to skip
|
|
262
|
+
* @returns {Array} Array of history entries for the job
|
|
263
|
+
*/
|
|
264
|
+
export function getJobHistory(jobId, limit = 10, offset = 0) {
|
|
265
|
+
return getHistory({
|
|
266
|
+
jobId,
|
|
267
|
+
limit,
|
|
268
|
+
offset,
|
|
269
|
+
order: 'desc',
|
|
270
|
+
});
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
/**
|
|
274
|
+
* Get a single history entry by ID
|
|
275
|
+
* @param {number} id - History entry ID
|
|
276
|
+
* @returns {object|null} History entry or null if not found
|
|
277
|
+
*/
|
|
278
|
+
export function getHistoryEntryById(id) {
|
|
279
|
+
const db = getDatabase();
|
|
280
|
+
|
|
281
|
+
const stmt = db.prepare(`
|
|
282
|
+
SELECT
|
|
283
|
+
id,
|
|
284
|
+
job_id as jobId,
|
|
285
|
+
job_name as jobName,
|
|
286
|
+
command,
|
|
287
|
+
status,
|
|
288
|
+
exit_code as exitCode,
|
|
289
|
+
start_time as startTime,
|
|
290
|
+
end_time as endTime,
|
|
291
|
+
duration,
|
|
292
|
+
error,
|
|
293
|
+
timestamp
|
|
294
|
+
FROM history
|
|
295
|
+
WHERE id = @id
|
|
296
|
+
`);
|
|
297
|
+
|
|
298
|
+
return stmt.get({ id }) || null;
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
/**
|
|
302
|
+
* Get the total count of history entries
|
|
303
|
+
* @param {object} options - Filter options (same as getHistory)
|
|
304
|
+
* @returns {number} Total count
|
|
305
|
+
*/
|
|
306
|
+
export function getHistoryCount(options = {}) {
|
|
307
|
+
const db = getDatabase();
|
|
308
|
+
|
|
309
|
+
const { jobId, status, since, until } = options;
|
|
310
|
+
|
|
311
|
+
const conditions = [];
|
|
312
|
+
const params = {};
|
|
313
|
+
|
|
314
|
+
if (jobId !== undefined) {
|
|
315
|
+
conditions.push('job_id = @jobId');
|
|
316
|
+
params.jobId = jobId;
|
|
317
|
+
}
|
|
318
|
+
|
|
319
|
+
if (status) {
|
|
320
|
+
conditions.push('status = @status');
|
|
321
|
+
params.status = status;
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
if (since) {
|
|
325
|
+
conditions.push('timestamp >= @since');
|
|
326
|
+
params.since = since;
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
if (until) {
|
|
330
|
+
conditions.push('timestamp <= @until');
|
|
331
|
+
params.until = until;
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
335
|
+
|
|
336
|
+
const query = `SELECT COUNT(*) as count FROM history ${whereClause}`;
|
|
337
|
+
const stmt = db.prepare(query);
|
|
338
|
+
const result = stmt.get(params);
|
|
339
|
+
|
|
340
|
+
return result?.count || 0;
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
/**
|
|
344
|
+
* Clear history older than a certain date
|
|
345
|
+
* @param {Date} beforeDate - Clear entries before this date
|
|
346
|
+
* @returns {number} Number of entries removed
|
|
347
|
+
*/
|
|
348
|
+
export function clearHistoryBefore(beforeDate) {
|
|
349
|
+
const db = getDatabase();
|
|
350
|
+
|
|
351
|
+
const cutoffTimestamp = beforeDate.toISOString();
|
|
352
|
+
|
|
353
|
+
const stmt = db.prepare(`
|
|
354
|
+
DELETE FROM history
|
|
355
|
+
WHERE timestamp < @cutoffTimestamp
|
|
356
|
+
`);
|
|
357
|
+
|
|
358
|
+
const result = stmt.run({ cutoffTimestamp });
|
|
359
|
+
return result.changes;
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
/**
|
|
363
|
+
* Clear all history
|
|
364
|
+
* @returns {number} Number of entries removed
|
|
365
|
+
*/
|
|
366
|
+
export function clearAllHistory() {
|
|
367
|
+
const db = getDatabase();
|
|
368
|
+
|
|
369
|
+
const stmt = db.prepare('DELETE FROM history');
|
|
370
|
+
const result = stmt.run();
|
|
371
|
+
|
|
372
|
+
// Vacuum to reclaim disk space
|
|
373
|
+
db.exec('VACUUM');
|
|
374
|
+
|
|
375
|
+
return result.changes;
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
/**
|
|
379
|
+
* Clear history for a specific job
|
|
380
|
+
* @param {number} jobId - Job ID
|
|
381
|
+
* @returns {number} Number of entries removed
|
|
382
|
+
*/
|
|
383
|
+
export function clearJobHistory(jobId) {
|
|
384
|
+
const db = getDatabase();
|
|
385
|
+
|
|
386
|
+
const stmt = db.prepare(`
|
|
387
|
+
DELETE FROM history
|
|
388
|
+
WHERE job_id = @jobId
|
|
389
|
+
`);
|
|
390
|
+
|
|
391
|
+
const result = stmt.run({ jobId });
|
|
392
|
+
return result.changes;
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
/**
|
|
396
|
+
* Get statistics for a job
|
|
397
|
+
* @param {number} jobId - Job ID
|
|
398
|
+
* @returns {object} Statistics object with successCount, failedCount, totalCount, lastRun, averageDuration
|
|
399
|
+
*/
|
|
400
|
+
export function getJobStats(jobId) {
|
|
401
|
+
const db = getDatabase();
|
|
402
|
+
|
|
403
|
+
const statsStmt = db.prepare(`
|
|
404
|
+
SELECT
|
|
405
|
+
COUNT(*) as totalCount,
|
|
406
|
+
SUM(CASE WHEN status = 'success' THEN 1 ELSE 0 END) as successCount,
|
|
407
|
+
SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failedCount,
|
|
408
|
+
AVG(duration) as averageDuration,
|
|
409
|
+
MAX(timestamp) as lastRun
|
|
410
|
+
FROM history
|
|
411
|
+
WHERE job_id = @jobId
|
|
412
|
+
`);
|
|
413
|
+
|
|
414
|
+
const result = statsStmt.get({ jobId });
|
|
415
|
+
|
|
416
|
+
return {
|
|
417
|
+
jobId,
|
|
418
|
+
totalCount: result?.totalCount || 0,
|
|
419
|
+
successCount: result?.successCount || 0,
|
|
420
|
+
failedCount: result?.failedCount || 0,
|
|
421
|
+
averageDuration: result?.averageDuration || 0,
|
|
422
|
+
lastRun: result?.lastRun || null,
|
|
423
|
+
};
|
|
424
|
+
}
|
|
425
|
+
|
|
426
|
+
// Export default object for compatibility
|
|
427
|
+
export default {
|
|
428
|
+
getDatabase,
|
|
429
|
+
closeDatabase,
|
|
430
|
+
addHistoryEntry,
|
|
431
|
+
getHistory,
|
|
432
|
+
getJobHistory,
|
|
433
|
+
getHistoryEntryById,
|
|
434
|
+
getHistoryCount,
|
|
435
|
+
clearHistoryBefore,
|
|
436
|
+
clearAllHistory,
|
|
437
|
+
clearJobHistory,
|
|
438
|
+
getJobStats,
|
|
439
|
+
};
|