@llmaudit/logship 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +65 -0
- package/bin/logship.js +124 -0
- package/package.json +28 -0
- package/src/batcher.js +73 -0
- package/src/config.js +50 -0
- package/src/index.js +56 -0
- package/src/registry.js +60 -0
- package/src/transport.js +90 -0
- package/src/watcher.js +199 -0
package/README.md
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
# Logship (by @llmaudit)
|
|
2
|
+
|
|
3
|
+
A high-performance, lightweight Node.js log shipping agent designed to tail files and pump logs to the LLM Audit Ingestion Server.
|
|
4
|
+
|
|
5
|
+
## Features
|
|
6
|
+
- **Zero Dependencies**: Built entirely with native Node.js modules for maximum stability and security.
|
|
7
|
+
- **Smart Tailing**: Monitors multiple files simultaneously and resumes from where it left off.
|
|
8
|
+
- **Resilient**: Automatic reconnection with exponential backoff and persistent registry for tracking file positions across restarts.
|
|
9
|
+
- **Performance Focused**: Batches logs automatically to minimize HTTP overhead and server load.
|
|
10
|
+
- **Zero Config Mode**: Run it entirely using CLI flags without creating any files.
|
|
11
|
+
|
|
12
|
+
## Installation
|
|
13
|
+
```bash
|
|
14
|
+
npm install @llmaudit/logship
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## CLI Usage (The easy way)
|
|
18
|
+
|
|
19
|
+
### Option A: Using CLI Flags (Zero Config)
|
|
20
|
+
Run Logship without creating a configuration file:
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
npx @llmaudit/logship -k your-key -f access.log,error.log
|
|
24
|
+
```
|
|
25
|
+
|
|
26
|
+
### Option B: Using a Config File
|
|
27
|
+
If you have many logs or complex settings, use a JSON file:
|
|
28
|
+
|
|
29
|
+
```bash
|
|
30
|
+
npx @llmaudit/logship logship.config.json
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
### Option C: Default Lookup
|
|
34
|
+
If you have a file named `logship.config.json` in your current folder, simply run:
|
|
35
|
+
|
|
36
|
+
```bash
|
|
37
|
+
npx @llmaudit/logship
|
|
38
|
+
```
|
|
39
|
+
|
|
40
|
+
## CLI Configuration Options
|
|
41
|
+
|
|
42
|
+
| Flag | Short | Description |
|
|
43
|
+
|------|-------|-------------|
|
|
44
|
+
| `--api-key` | `-k` | Your unique API Key |
|
|
45
|
+
| `--endpoint` | `-e` | Ingestion Server URL (Defaults to `https://log.llmaudit.ai`) |
|
|
46
|
+
| `--files` | `-f` | Comma-separated list of log files |
|
|
47
|
+
| `--env` | | Environment tag (`prod`, `staging`, `dev`) |
|
|
48
|
+
| `--service` | `-s` | Name of the service |
|
|
49
|
+
| `--interval` | | Flush interval in ms |
|
|
50
|
+
| `--batch-size` | | Max logs per batch |
|
|
51
|
+
|
|
52
|
+
## Library Usage
|
|
53
|
+
```javascript
|
|
54
|
+
const { initLogger } = require('@llmaudit/logship');
|
|
55
|
+
|
|
56
|
+
const agent = initLogger({
|
|
57
|
+
apiKey: "your-api-key",
|
|
58
|
+
logFiles: ["./app.log"],
|
|
59
|
+
env: "prod",
|
|
60
|
+
service: "my-app"
|
|
61
|
+
// endpoint: "https://log.llmaudit.ai" (Optional: Defaulted for you)
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
// agent.shutdown();
|
|
65
|
+
```
|
package/bin/logship.js
ADDED
|
@@ -0,0 +1,124 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
const { initLogger } = require('../src/index');
|
|
4
|
+
const fs = require('fs');
|
|
5
|
+
const path = require('path');
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Poor man's CLI parser (Zero dependencies)
|
|
9
|
+
*/
|
|
10
|
+
function parseArgs(args) {
|
|
11
|
+
const config = {};
|
|
12
|
+
let configPath = null;
|
|
13
|
+
|
|
14
|
+
for (let i = 0; i < args.length; i++) {
|
|
15
|
+
const arg = args[i];
|
|
16
|
+
|
|
17
|
+
if (arg === '--api-key' || arg === '-k') {
|
|
18
|
+
config.apiKey = args[++i];
|
|
19
|
+
} else if (arg === '--endpoint' || arg === '-e') {
|
|
20
|
+
config.endpoint = args[++i];
|
|
21
|
+
} else if (arg === '--env') {
|
|
22
|
+
config.env = args[++i];
|
|
23
|
+
} else if (arg === '--service' || arg === '-s') {
|
|
24
|
+
config.service = args[++i];
|
|
25
|
+
} else if (arg === '--files' || arg === '-f') {
|
|
26
|
+
const files = args[++i];
|
|
27
|
+
config.logFiles = files ? files.split(',') : [];
|
|
28
|
+
} else if (arg === '--interval') {
|
|
29
|
+
config.flushIntervalMs = parseInt(args[++i]);
|
|
30
|
+
} else if (arg === '--batch-size') {
|
|
31
|
+
config.maxBatchSize = parseInt(args[++i]);
|
|
32
|
+
} else if (arg === '--help' || arg === '-h') {
|
|
33
|
+
printHelp();
|
|
34
|
+
process.exit(0);
|
|
35
|
+
} else if (!arg.startsWith('-')) {
|
|
36
|
+
configPath = arg;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
return { config, configPath };
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function printHelp() {
|
|
44
|
+
console.log(`
|
|
45
|
+
Logship - High-performance log shipping agent
|
|
46
|
+
|
|
47
|
+
Usage:
|
|
48
|
+
logship [config-file.json] [options]
|
|
49
|
+
|
|
50
|
+
Options:
|
|
51
|
+
-k, --api-key <key> API Key for authentication
|
|
52
|
+
-f, --files <file1,file2> Comma-separated list of log files
|
|
53
|
+
-e, --endpoint <url> Override Ingestion Server URL (Optional)
|
|
54
|
+
--env <env> Environment (prod, staging, dev)
|
|
55
|
+
-s, --service <name> Service name
|
|
56
|
+
--interval <ms> Flush interval in milliseconds
|
|
57
|
+
--batch-size <num> Maximum logs per batch
|
|
58
|
+
-h, --help Show this help message
|
|
59
|
+
|
|
60
|
+
Examples:
|
|
61
|
+
npx @llmaudit/logship --api-key k_123 --endpoint https://log.llmaudit.ai --files access.log
|
|
62
|
+
npx @llmaudit/logship logship.config.json --env staging
|
|
63
|
+
`);
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async function main() {
|
|
67
|
+
const rawArgs = process.argv.slice(2);
|
|
68
|
+
const { config: cliConfig, configPath: cliPath } = parseArgs(rawArgs);
|
|
69
|
+
|
|
70
|
+
// Determine which config file to load
|
|
71
|
+
const configFilePath = cliPath || process.env.LOG_AGENT_CONFIG || 'logship.config.json';
|
|
72
|
+
const absolutePath = path.resolve(process.cwd(), configFilePath);
|
|
73
|
+
|
|
74
|
+
let finalConfig = {};
|
|
75
|
+
|
|
76
|
+
// 1. Try to load config file if it exists or was explicitly requested
|
|
77
|
+
if (fs.existsSync(absolutePath)) {
|
|
78
|
+
try {
|
|
79
|
+
console.log('Loading config from:', absolutePath);
|
|
80
|
+
const fileContent = fs.readFileSync(absolutePath, 'utf8');
|
|
81
|
+
finalConfig = JSON.parse(fileContent);
|
|
82
|
+
} catch (error) {
|
|
83
|
+
console.error(`Error parsing config file: ${error.message}`);
|
|
84
|
+
process.exit(1);
|
|
85
|
+
}
|
|
86
|
+
} else if (cliPath) {
|
|
87
|
+
// User provided a path that doesn't exist
|
|
88
|
+
console.error(`Error: Config file not found at ${absolutePath}`);
|
|
89
|
+
process.exit(1);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
// 2. Merge with CLI flags (Flags override file)
|
|
93
|
+
finalConfig = { ...finalConfig, ...cliConfig };
|
|
94
|
+
|
|
95
|
+
// 3. Last check for required fields before handing off to SDK
|
|
96
|
+
// SDK will also validate, but we want a nice error message here if no file AND no flags
|
|
97
|
+
if (!finalConfig.apiKey || !finalConfig.logFiles) {
|
|
98
|
+
if (!fs.existsSync(absolutePath)) {
|
|
99
|
+
console.error('Error: Required configuration missing. Provide a config file OR use CLI flags.');
|
|
100
|
+
console.error('Missing: ' + [!finalConfig.apiKey && 'api-key', !finalConfig.logFiles && 'logFiles'].filter(Boolean).join(', '));
|
|
101
|
+
printHelp();
|
|
102
|
+
process.exit(1);
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
try {
|
|
107
|
+
const logger = initLogger(finalConfig);
|
|
108
|
+
|
|
109
|
+
// Handle graceful shutdown
|
|
110
|
+
const stop = () => {
|
|
111
|
+
logger.shutdown();
|
|
112
|
+
process.exit(0);
|
|
113
|
+
};
|
|
114
|
+
|
|
115
|
+
process.on('SIGINT', stop);
|
|
116
|
+
process.on('SIGTERM', stop);
|
|
117
|
+
|
|
118
|
+
} catch (error) {
|
|
119
|
+
console.error('Error starting Logship:', error.message);
|
|
120
|
+
process.exit(1);
|
|
121
|
+
}
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
main();
|
package/package.json
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@llmaudit/logship",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Enterprise-grade, zero-dependency log shipping agent for @llmaudit. Lightweight, reliable, and performance-focused.",
|
|
5
|
+
"main": "src/index.js",
|
|
6
|
+
"bin": {
|
|
7
|
+
"logship": "bin/logship.js"
|
|
8
|
+
},
|
|
9
|
+
"publishConfig": {
|
|
10
|
+
"access": "public"
|
|
11
|
+
},
|
|
12
|
+
"scripts": {
|
|
13
|
+
"start": "node ./bin/logship.js",
|
|
14
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
15
|
+
},
|
|
16
|
+
"keywords": [
|
|
17
|
+
"logging",
|
|
18
|
+
"log-shipper",
|
|
19
|
+
"agent",
|
|
20
|
+
"telemetry",
|
|
21
|
+
"llmaudit",
|
|
22
|
+
"loki",
|
|
23
|
+
"monitoring"
|
|
24
|
+
],
|
|
25
|
+
"author": "Antigravity",
|
|
26
|
+
"license": "ISC",
|
|
27
|
+
"type": "commonjs"
|
|
28
|
+
}
|
package/src/batcher.js
ADDED
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
class Batcher {
|
|
2
|
+
constructor(config, transport) {
|
|
3
|
+
this.config = config;
|
|
4
|
+
this.transport = transport;
|
|
5
|
+
this.queue = [];
|
|
6
|
+
this.flushTimer = null;
|
|
7
|
+
this.isFlushing = false;
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
start() {
|
|
11
|
+
// Ensure flush runs periodically
|
|
12
|
+
if (!this.flushTimer) {
|
|
13
|
+
this.flushTimer = setInterval(() => {
|
|
14
|
+
this.flush();
|
|
15
|
+
}, this.config.flushIntervalMs);
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
stop() {
|
|
20
|
+
if (this.flushTimer) {
|
|
21
|
+
clearInterval(this.flushTimer);
|
|
22
|
+
this.flushTimer = null;
|
|
23
|
+
}
|
|
24
|
+
// Attempt final flush
|
|
25
|
+
return this.flush();
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
/**
|
|
29
|
+
* Adds a log line to the batch.
|
|
30
|
+
* @param {string} line - The raw log line.
|
|
31
|
+
*/
|
|
32
|
+
push(line) {
|
|
33
|
+
const timestamp = Date.now().toString() + '000000'; // Unix nano approximation (ms + 6 zeros)
|
|
34
|
+
this.queue.push({
|
|
35
|
+
timestamp,
|
|
36
|
+
line
|
|
37
|
+
});
|
|
38
|
+
|
|
39
|
+
if (this.queue.length >= this.config.maxBatchSize) {
|
|
40
|
+
this.flush();
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
async flush() {
|
|
45
|
+
if (this.queue.length === 0 || this.isFlushing) return;
|
|
46
|
+
|
|
47
|
+
this.isFlushing = true;
|
|
48
|
+
|
|
49
|
+
// Swap queue to process
|
|
50
|
+
const batch = [...this.queue];
|
|
51
|
+
this.queue = [];
|
|
52
|
+
|
|
53
|
+
const payload = {
|
|
54
|
+
env: this.config.env,
|
|
55
|
+
service: this.config.service,
|
|
56
|
+
logs: batch
|
|
57
|
+
};
|
|
58
|
+
|
|
59
|
+
try {
|
|
60
|
+
await this.transport.send(payload);
|
|
61
|
+
} catch (error) {
|
|
62
|
+
console.error('Logship: Failed to flush batch, logs lost.', error);
|
|
63
|
+
// In a more advanced system, we might re-enqueue or persist to disk here.
|
|
64
|
+
// For this implementation, we log the error. Retry logic is in Transport.
|
|
65
|
+
} finally {
|
|
66
|
+
this.isFlushing = false;
|
|
67
|
+
// If queue filled up while flushing, trigger another flush immediately?
|
|
68
|
+
// Maybe not immediately to avoid storm, wait for next tick or interval.
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
module.exports = Batcher;
|
package/src/config.js
ADDED
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
/**
|
|
5
|
+
* Validates the configuration object.
|
|
6
|
+
* @param {Object} config - The user provided config.
|
|
7
|
+
* @returns {Object} - The validated and defaulted config.
|
|
8
|
+
* @throws {Error} - If validation fails.
|
|
9
|
+
*/
|
|
10
|
+
function validateConfig(config) {
|
|
11
|
+
if (!config) {
|
|
12
|
+
throw new Error('Logship: Configuration object is required.');
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
const requiredFields = ['apiKey'];
|
|
16
|
+
for (const field of requiredFields) {
|
|
17
|
+
if (!config[field] || typeof config[field] !== 'string') {
|
|
18
|
+
throw new Error(`Logship: Missing or invalid required field: ${field}`);
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
if (config.env && !['prod', 'staging', 'dev'].includes(config.env)) {
|
|
23
|
+
throw new Error('Logship: Invalid env. Must be one of: prod, staging, dev');
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
if (!Array.isArray(config.logFiles) || config.logFiles.length === 0) {
|
|
27
|
+
throw new Error('Logship: logFiles must be a non-empty array of file paths.');
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const endpoint = config.endpoint || 'https://log.llmaudit.ai';
|
|
31
|
+
|
|
32
|
+
// Validate endpoints starts with http/https
|
|
33
|
+
if (!endpoint.startsWith('http')) {
|
|
34
|
+
throw new Error('Logship: endpoint must start with http:// or https://');
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Set defaults
|
|
38
|
+
return {
|
|
39
|
+
apiKey: config.apiKey,
|
|
40
|
+
endpoint: endpoint.replace(/\/$/, ''), // Remove trailing slash
|
|
41
|
+
websiteId: config.websiteId, // Optional now, will be ignored by server anyway
|
|
42
|
+
env: config.env || 'dev',
|
|
43
|
+
service: config.service || 'default-service',
|
|
44
|
+
logFiles: config.logFiles.map(p => path.resolve(p)),
|
|
45
|
+
flushIntervalMs: config.flushIntervalMs || 5000,
|
|
46
|
+
maxBatchSize: config.maxBatchSize || 100,
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
module.exports = { validateConfig };
|
package/src/index.js
ADDED
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
const { validateConfig } = require('./config');
|
|
2
|
+
const Transport = require('./transport');
|
|
3
|
+
const Batcher = require('./batcher');
|
|
4
|
+
const WatchManager = require('./watcher');
|
|
5
|
+
const Registry = require('./registry');
|
|
6
|
+
|
|
7
|
+
class LogAgent {
|
|
8
|
+
constructor(config) {
|
|
9
|
+
this.config = validateConfig(config);
|
|
10
|
+
this.isRunning = false;
|
|
11
|
+
|
|
12
|
+
this.registry = new Registry();
|
|
13
|
+
this.transport = new Transport(this.config);
|
|
14
|
+
this.batcher = new Batcher(this.config, this.transport);
|
|
15
|
+
this.watcher = new WatchManager(this.config.logFiles, this.registry, (line) => {
|
|
16
|
+
this.batcher.push(line);
|
|
17
|
+
});
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
init() {
|
|
21
|
+
if (this.isRunning) return;
|
|
22
|
+
|
|
23
|
+
console.log('Logship: Initializing...');
|
|
24
|
+
|
|
25
|
+
this.registry.load();
|
|
26
|
+
this.batcher.start();
|
|
27
|
+
this.watcher.start();
|
|
28
|
+
|
|
29
|
+
this.isRunning = true;
|
|
30
|
+
console.log(`Logship: Started for service ${this.config.service} on ${this.config.env}`);
|
|
31
|
+
console.log(`Logship: Watching ${this.config.logFiles} files.`);
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
shutdown() {
|
|
35
|
+
this.isRunning = false;
|
|
36
|
+
console.log('Logship: Shutting down...');
|
|
37
|
+
this.watcher.stop();
|
|
38
|
+
this.batcher.stop().then(() => {
|
|
39
|
+
this.registry.close();
|
|
40
|
+
console.log('Logship: Shutdown complete.');
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
/**
|
|
46
|
+
* Initializes the global log agent instance.
|
|
47
|
+
* @param {Object} config
|
|
48
|
+
* @returns {LogAgent}
|
|
49
|
+
*/
|
|
50
|
+
function initLogger(config) {
|
|
51
|
+
const agent = new LogAgent(config);
|
|
52
|
+
agent.init();
|
|
53
|
+
return agent;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
module.exports = { initLogger, LogAgent };
|
package/src/registry.js
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
class Registry {
|
|
5
|
+
constructor(storagePath) {
|
|
6
|
+
this.storagePath = storagePath || path.resolve(process.cwd(), '.agent-registry.json');
|
|
7
|
+
this.data = {};
|
|
8
|
+
this.saveTimer = null;
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
load() {
|
|
12
|
+
try {
|
|
13
|
+
if (fs.existsSync(this.storagePath)) {
|
|
14
|
+
const content = fs.readFileSync(this.storagePath, 'utf8');
|
|
15
|
+
this.data = JSON.parse(content);
|
|
16
|
+
}
|
|
17
|
+
} catch (e) {
|
|
18
|
+
console.error('Log Agent: Failed to load registry', e);
|
|
19
|
+
this.data = {};
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
get(filepath) {
|
|
24
|
+
return this.data[filepath];
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
set(filepath, offset, inode, signature) {
|
|
28
|
+
this.data[filepath] = { offset, inode, signature, lastUpdated: Date.now() };
|
|
29
|
+
this.scheduleSave();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
remove(filepath) {
|
|
33
|
+
delete this.data[filepath];
|
|
34
|
+
this.scheduleSave();
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
scheduleSave() {
|
|
38
|
+
if (this.saveTimer) return;
|
|
39
|
+
this.saveTimer = setTimeout(() => this.save(), 1000); // Debounce saves
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
save() {
|
|
43
|
+
try {
|
|
44
|
+
console.log(`Log Agent: Saving registry to ${this.storagePath}`);
|
|
45
|
+
fs.writeFileSync(this.storagePath, JSON.stringify(this.data, null, 2));
|
|
46
|
+
this.saveTimer = null;
|
|
47
|
+
} catch (e) {
|
|
48
|
+
console.error('Log Agent: Failed to save registry', e);
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
close() {
|
|
53
|
+
if (this.saveTimer) {
|
|
54
|
+
clearTimeout(this.saveTimer);
|
|
55
|
+
this.save();
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
module.exports = Registry;
|
package/src/transport.js
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
const https = require('https');
|
|
2
|
+
const http = require('http');
|
|
3
|
+
const paramURL = require('url');
|
|
4
|
+
|
|
5
|
+
class Transport {
|
|
6
|
+
constructor(config) {
|
|
7
|
+
this.config = config;
|
|
8
|
+
this.maxRetries = 5;
|
|
9
|
+
this.baseDelay = 1000; // 1 second
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Sends a batch of logs to the ingestion server.
|
|
14
|
+
* @param {Object} payload - The complete payload to send.
|
|
15
|
+
* @returns {Promise<void>}
|
|
16
|
+
*/
|
|
17
|
+
async send(payload) {
|
|
18
|
+
let attempt = 0;
|
|
19
|
+
|
|
20
|
+
while (attempt <= this.maxRetries) {
|
|
21
|
+
try {
|
|
22
|
+
await this._post(payload);
|
|
23
|
+
return; // Success
|
|
24
|
+
} catch (error) {
|
|
25
|
+
attempt++;
|
|
26
|
+
if (attempt > this.maxRetries) {
|
|
27
|
+
console.error(`Logship: Failed to send logs after ${this.maxRetries} attempts. Error: ${error.message}`);
|
|
28
|
+
throw error; // Give up
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
const delay = this.baseDelay * Math.pow(2, attempt - 1); // Exponential backoff
|
|
32
|
+
// Add some jitter
|
|
33
|
+
const jitter = Math.random() * 200;
|
|
34
|
+
const totalDelay = delay + jitter;
|
|
35
|
+
|
|
36
|
+
console.warn(`Logship: Send failed, retrying in ${Math.round(totalDelay)}ms. Attempt ${attempt}/${this.maxRetries}. Error: ${error.message}`);
|
|
37
|
+
await new Promise(resolve => setTimeout(resolve, totalDelay));
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
_post(payload) {
|
|
43
|
+
return new Promise((resolve, reject) => {
|
|
44
|
+
const data = JSON.stringify(payload);
|
|
45
|
+
const urlObj = paramURL.parse(this.config.endpoint);
|
|
46
|
+
|
|
47
|
+
const options = {
|
|
48
|
+
hostname: urlObj.hostname,
|
|
49
|
+
port: urlObj.port || (urlObj.protocol === 'https:' ? 443 : 80),
|
|
50
|
+
path: '/ingest/logs', // Use fixed path as per req, or could be part of endpoint config.
|
|
51
|
+
// Based on prompt: "endpoint: string, // central server URL", and server has "POST /ingest/logs"
|
|
52
|
+
// I'll assume endpoint is the BASE URL e.g. "https://api.logger.com".
|
|
53
|
+
method: 'POST',
|
|
54
|
+
headers: {
|
|
55
|
+
'Content-Type': 'application/json',
|
|
56
|
+
'Content-Length': Buffer.byteLength(data),
|
|
57
|
+
'x-api-key': this.config.apiKey
|
|
58
|
+
},
|
|
59
|
+
timeout: 10000 // 10s timeout
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
const lib = urlObj.protocol === 'https:' ? https : http;
|
|
63
|
+
|
|
64
|
+
const req = lib.request(options, (res) => {
|
|
65
|
+
let body = '';
|
|
66
|
+
res.on('data', (chunk) => body += chunk);
|
|
67
|
+
res.on('end', () => {
|
|
68
|
+
if (res.statusCode >= 200 && res.statusCode < 300) {
|
|
69
|
+
resolve(body);
|
|
70
|
+
} else {
|
|
71
|
+
reject(new Error(`HTTP Status ${res.statusCode}: ${body}`));
|
|
72
|
+
}
|
|
73
|
+
});
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
req.on('error', (e) => {
|
|
77
|
+
reject(e);
|
|
78
|
+
});
|
|
79
|
+
|
|
80
|
+
req.on('timeout', () => {
|
|
81
|
+
req.destroy(new Error('Request Timeout'));
|
|
82
|
+
});
|
|
83
|
+
|
|
84
|
+
req.write(data);
|
|
85
|
+
req.end();
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
module.exports = Transport;
|
package/src/watcher.js
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
|
|
3
|
+
class FileWatcher {
|
|
4
|
+
constructor(filepath, registry, onLogLine) {
|
|
5
|
+
this.filepath = filepath;
|
|
6
|
+
this.registry = registry;
|
|
7
|
+
this.onLogLine = onLogLine;
|
|
8
|
+
this.currSize = 0;
|
|
9
|
+
this.inode = null;
|
|
10
|
+
this.signature = null;
|
|
11
|
+
this.watcher = null;
|
|
12
|
+
this.checkInterval = null;
|
|
13
|
+
this.recreateTimer = null;
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
start() {
|
|
17
|
+
this._initFile();
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
stop() {
|
|
21
|
+
if (this.watcher) {
|
|
22
|
+
this.watcher.close();
|
|
23
|
+
this.watcher = null;
|
|
24
|
+
}
|
|
25
|
+
if (this.checkInterval) {
|
|
26
|
+
clearInterval(this.checkInterval);
|
|
27
|
+
this.checkInterval = null;
|
|
28
|
+
}
|
|
29
|
+
if (this.recreateTimer) {
|
|
30
|
+
clearTimeout(this.recreateTimer);
|
|
31
|
+
this.recreateTimer = null;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
_initFile() {
|
|
36
|
+
fs.stat(this.filepath, (err, stats) => {
|
|
37
|
+
if (err) {
|
|
38
|
+
if (err.code === 'ENOENT') {
|
|
39
|
+
console.log(`Log Agent: File ${this.filepath} not found, waiting...`);
|
|
40
|
+
this.recreateTimer = setTimeout(() => this._initFile(), 1000);
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
return;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
this.inode = stats.ino;
|
|
47
|
+
|
|
48
|
+
this._readSignature(this.filepath, (signature) => {
|
|
49
|
+
this.signature = signature;
|
|
50
|
+
const savedState = this.registry.get(this.filepath);
|
|
51
|
+
|
|
52
|
+
let isValid = false;
|
|
53
|
+
if (savedState && savedState.inode === this.inode) {
|
|
54
|
+
if (savedState.signature && savedState.signature !== signature) {
|
|
55
|
+
// Signature mismatch means content changed (reuse)
|
|
56
|
+
console.log(`Log Agent: Signature Mismatch! Resetting.`);
|
|
57
|
+
isValid = false;
|
|
58
|
+
} else if (stats.birthtimeMs && stats.birthtimeMs > savedState.lastUpdated) {
|
|
59
|
+
console.log(`Log Agent: Newer Birthtime Detected. Resetting.`);
|
|
60
|
+
isValid = false;
|
|
61
|
+
} else {
|
|
62
|
+
isValid = true;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
if (isValid) {
|
|
67
|
+
console.log(`Log Agent: Resuming ${this.filepath} from offset ${savedState.offset}`);
|
|
68
|
+
this.currSize = savedState.offset;
|
|
69
|
+
|
|
70
|
+
if (stats.size > this.currSize) {
|
|
71
|
+
this._read(this.currSize, stats.size);
|
|
72
|
+
this.currSize = stats.size;
|
|
73
|
+
this.registry.set(this.filepath, this.currSize, this.inode, this.signature);
|
|
74
|
+
} else if (stats.size < this.currSize) {
|
|
75
|
+
console.log('Log Agent: File truncated/rotated. Resetting.');
|
|
76
|
+
this.currSize = 0;
|
|
77
|
+
this._read(0, stats.size);
|
|
78
|
+
this.currSize = stats.size;
|
|
79
|
+
this.registry.set(this.filepath, this.currSize, this.inode, this.signature);
|
|
80
|
+
}
|
|
81
|
+
} else {
|
|
82
|
+
console.log(`Log Agent: New file/rotation/mismatch. Starting at ${stats.size}`);
|
|
83
|
+
this.currSize = stats.size;
|
|
84
|
+
this.registry.set(this.filepath, this.currSize, this.inode, this.signature);
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
this._watch();
|
|
88
|
+
});
|
|
89
|
+
});
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
_readSignature(filepath, callback) {
|
|
93
|
+
const stream = fs.createReadStream(filepath, { start: 0, end: 63 });
|
|
94
|
+
let buf = Buffer.alloc(0);
|
|
95
|
+
stream.on('data', chunk => {
|
|
96
|
+
buf = Buffer.concat([buf, chunk]);
|
|
97
|
+
});
|
|
98
|
+
stream.on('end', () => {
|
|
99
|
+
callback(buf.toString('hex'));
|
|
100
|
+
});
|
|
101
|
+
stream.on('error', () => callback(null));
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
_watch() {
|
|
105
|
+
try {
|
|
106
|
+
this.watcher = fs.watch(this.filepath, (eventType, filename) => {
|
|
107
|
+
if (eventType === 'rename') {
|
|
108
|
+
fs.stat(this.filepath, (err, stats) => {
|
|
109
|
+
if (err && err.code === 'ENOENT') {
|
|
110
|
+
this.stop();
|
|
111
|
+
this.recreateTimer = setTimeout(() => this.start(), 100);
|
|
112
|
+
} else {
|
|
113
|
+
if (stats.ino !== this.inode) {
|
|
114
|
+
this.inode = stats.ino;
|
|
115
|
+
this.currSize = 0;
|
|
116
|
+
this._processChange();
|
|
117
|
+
} else {
|
|
118
|
+
this._processChange();
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
} else {
|
|
123
|
+
this._processChange();
|
|
124
|
+
}
|
|
125
|
+
});
|
|
126
|
+
} catch (e) {
|
|
127
|
+
console.error(`Log Agent: Watch failed for ${this.filepath}`, e);
|
|
128
|
+
}
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
_processChange() {
|
|
132
|
+
fs.stat(this.filepath, (err, stats) => {
|
|
133
|
+
if (err) return;
|
|
134
|
+
|
|
135
|
+
if (stats.size > this.currSize) {
|
|
136
|
+
this._read(this.currSize, stats.size);
|
|
137
|
+
this.currSize = stats.size;
|
|
138
|
+
this.registry.set(this.filepath, this.currSize, this.inode, this.signature);
|
|
139
|
+
} else if (stats.size < this.currSize) {
|
|
140
|
+
this.currSize = 0;
|
|
141
|
+
this._read(0, stats.size);
|
|
142
|
+
this.currSize = stats.size;
|
|
143
|
+
this.registry.set(this.filepath, this.currSize, this.inode, this.signature);
|
|
144
|
+
}
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
_read(start, end) {
|
|
149
|
+
if (start >= end) return;
|
|
150
|
+
if (start < 0) start = 0;
|
|
151
|
+
|
|
152
|
+
const stream = fs.createReadStream(this.filepath, {
|
|
153
|
+
start: start,
|
|
154
|
+
end: end - 1,
|
|
155
|
+
encoding: 'utf8'
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
let buffer = '';
|
|
159
|
+
stream.on('data', (chunk) => {
|
|
160
|
+
buffer += chunk;
|
|
161
|
+
const lines = buffer.split('\n');
|
|
162
|
+
buffer = lines.pop();
|
|
163
|
+
|
|
164
|
+
for (const line of lines) {
|
|
165
|
+
if (line.trim()) this.onLogLine(line);
|
|
166
|
+
}
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
stream.on('end', () => {
|
|
170
|
+
if (buffer && buffer.trim()) {
|
|
171
|
+
this.onLogLine(buffer);
|
|
172
|
+
}
|
|
173
|
+
});
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
class WatchManager {
|
|
178
|
+
constructor(logFiles, registry, onLogLine) {
|
|
179
|
+
this.logFiles = logFiles;
|
|
180
|
+
this.registry = registry;
|
|
181
|
+
this.onLogLine = onLogLine;
|
|
182
|
+
this.watchers = [];
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
start() {
|
|
186
|
+
this.logFiles.forEach(file => {
|
|
187
|
+
const watcher = new FileWatcher(file, this.registry, this.onLogLine);
|
|
188
|
+
watcher.start();
|
|
189
|
+
this.watchers.push(watcher);
|
|
190
|
+
});
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
stop() {
|
|
194
|
+
this.watchers.forEach(w => w.stop());
|
|
195
|
+
this.watchers = [];
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
|
|
199
|
+
module.exports = WatchManager;
|