logtunnel 0.4.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/README.md +259 -0
  2. package/package.json +2 -2
  3. package/src/definition.js +33 -30
  4. package/src/log-source.js +1 -1
  5. package/src/main.js +3 -1
  6. package/src/pipeline.js +55 -21
  7. package/src/transformers/filters/field.js +18 -0
  8. package/src/transformers/filters/find.js +11 -0
  9. package/src/transformers/filters/ignore.js +11 -0
  10. package/src/transformers/outputs/bigodon.js +17 -0
  11. package/src/transformers/outputs/factory.js +21 -0
  12. package/src/transformers/outputs/inspect.js +22 -0
  13. package/src/transformers/outputs/json.js +11 -0
  14. package/src/transformers/outputs/logfmt.js +13 -0
  15. package/src/transformers/outputs/original.js +7 -0
  16. package/src/transformers/outputs/table.js +67 -0
  17. package/src/transformers/parsers/factory.js +16 -0
  18. package/src/transformers/parsers/json.js +16 -0
  19. package/src/transformers/parsers/logfmt.js +13 -0
  20. package/src/transformers/parsers/regex.js +11 -0
  21. package/src/transformers/parsers/table.js +31 -0
  22. package/test/filters.spec.js +40 -40
  23. package/test/output.spec.js +61 -22
  24. package/test/parse.spec.js +20 -22
  25. package/test/pipeline.spec.js +107 -21
  26. package/test/utils.js +8 -2
  27. package/src/transformers/field.js +0 -11
  28. package/src/transformers/filter.js +0 -4
  29. package/src/transformers/ignore.js +0 -4
  30. package/src/transformers/output-json.js +0 -7
  31. package/src/transformers/output-logfmt.js +0 -9
  32. package/src/transformers/output-mustache.js +0 -13
  33. package/src/transformers/output-original.js +0 -3
  34. package/src/transformers/output-unset.js +0 -12
  35. package/src/transformers/output.js +0 -15
  36. package/src/transformers/parse-json.js +0 -12
  37. package/src/transformers/parse-logfmt.js +0 -9
  38. package/src/transformers/parse-regex.js +0 -4
  39. package/src/transformers/parse-table.js +0 -26
  40. package/src/transformers/parse.js +0 -14
package/README.md ADDED
@@ -0,0 +1,259 @@
1
+ # logtunnel
2
+
3
+ `logtunnel` (`lt`) is a CLI tool that helps you search logs, parse them into structured data, filter by fields, and reformat them for reading or other tools.
4
+
5
+ ## Installation
6
+
7
+ ```bash
8
+ npm i -g logtunnel
9
+ ```
10
+
11
+ If you are on Linux, you might need `sudo` depending on your setup.
12
+
13
+ ## Tutorial
14
+
15
+ ### The simplest form: find lines
16
+
17
+ `lt <filter>` is shorthand for “keep only lines that match this regex” (case-insensitive).
18
+
19
+ ```bash
20
+ curl -s https://cdn.codetunnel.net/lt/text.log | lt error
21
+ ```
22
+
23
+ You can also use `-f`/`--filter` multiple times (AND behavior, all filters must match). For lines containing both checkout and alice, you could use:
24
+
25
+ ```bash
26
+ curl -s https://cdn.codetunnel.net/lt/text.log | lt -f checkout -f alice
27
+ ```
28
+
29
+ ### Ignore noise
30
+
31
+ Use `-i`/`--ignore` to drop lines that match those regexes:
32
+
33
+ ```bash
34
+ curl -s https://cdn.codetunnel.net/lt/text.log | lt -i healthz -i metrics
35
+ ```
36
+
37
+ Find something while ignoring other things:
38
+
39
+ ```bash
40
+ curl -s https://cdn.codetunnel.net/lt/text.log | lt -f error -i NullPointer -i "retrying in"
41
+ ```
42
+
43
+ Tip: `-f` and `-i` always run against the original input line (before parsing). If you want “filter by JSON fields”, use `-F` with a parser.
44
+
45
+ ### Parse logs (turn text into structured data)
46
+
47
+ Parsing makes each line become an “event object”, enabling field filters (`-F`) and structured outputs (`-o json`, `-o logfmt`, `-o table`, templates, etc).
48
+
49
+ Supported parsers:
50
+
51
+ - `-p json` (one JSON object per line)
52
+ - `-p logfmt` (key=value log lines)
53
+ - `-p table` (space-aligned tables like `kubectl get pods`)
54
+ - `-p '<regex with named groups>'` (custom parsing using RegExp named groups)
55
+
56
+ #### Parse JSON and format a clean line
57
+
58
+ ```bash
59
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -o '[{{ts}} {{upper level}}] {{message}}'
60
+ ```
61
+
62
+ #### Parse logfmt and convert to JSON (great for piping into other tools)
63
+
64
+ ```bash
65
+ curl -s https://cdn.codetunnel.net/lt/logfmt.log | lt -p logfmt -o json
66
+ ```
67
+
68
+ #### Parse JSON and show “human friendly structured output”
69
+
70
+ Default output (no `-o`) is “inspect”, objects are pretty-printed with colors.
71
+
72
+ ```bash
73
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json
74
+ ```
75
+
76
+ Use `-o inspect` to force multi-line output (useful for large or nested objects):
77
+
78
+ ```bash
79
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -o inspect
80
+ ```
81
+
82
+ ### Custom formats
83
+
84
+ When you pass a string to `-o` that isn’t one of `json|logfmt|inspect|original|table`, `lt` treats it as a Bigodon template (a safe Mustache/Handlebars-like language).
85
+
86
+ It supports:
87
+
88
+ - Variables: `{{message}}`, `{{ts}}`, `{{kubernetes.pod}}`
89
+ - Helpers: `{{upper level}}`, `{{lower user.email}}`, `{{toFixed delay_ms 2}}`
90
+ - Nested expressions: `{{capitalize (lower level)}}`
91
+
92
+ Example (compact “service log line”):
93
+
94
+ ```bash
95
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -o '[{{ts}}] {{service}} {{kubernetes.namespace}}/{{kubernetes.pod}} {{upper level}} {{message}}'
96
+ ```
97
+
98
+ You can find the bigodon language reference [here](https://github.com/gabriel-pinheiro/bigodon/blob/main/LANGUAGE.md) and the available helpers [here](https://github.com/gabriel-pinheiro/bigodon/blob/main/HELPERS.md).
99
+
100
+ ### Field filters (`-F <expression>`)
101
+
102
+ `-F/--field` filters *parsed objects* (so it requires `-p ...`). You can specify `-F` multiple times; all field filters must match (AND behavior).
103
+
104
+ Common helpers you’ll use in field filters:
105
+
106
+ - comparisons: `gt`, `gte`, `lt`, `lte`, `eq`, `and`, `or`, `not`
107
+ - strings: `lower`, `upper`, `startsWith`, `endsWith`
108
+ - `includes` works for strings and arrays
109
+
110
+ Show only slow requests (delay over 200ms):
111
+
112
+ ```bash
113
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -F 'gt delay_ms 200' -o inspect
114
+ ```
115
+
116
+ Case-insensitive “message contains alice”:
117
+
118
+ ```bash
119
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -F 'includes (lower message) "alice"' -o '[{{ts} {{upper level}}] {{message}}'
120
+ ```
121
+
122
+ Combine multiple conditions:
123
+
124
+ ```bash
125
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -F 'and (eq level "error") (gt http.status 499)' -o '[{{ts}} {{upper level}}] {{message}}'
126
+ ```
127
+
128
+ Show the *original raw line* after field filtering:
129
+
130
+ ```bash
131
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -F 'gt delay_ms 200' -o original
132
+ ```
133
+
134
+ #### Tip: General filter combined with expression filters
135
+
136
+ Inclusion (`-f`) and exclusion (`-i`) filters are ~5x faster than field filters (`-F`) because they skip the parsing step. If you can apply a broader filter with `-f`/`-i` before the more specific `-F` filter, it'll be much quicker on large files. If you are seeing poor performance on filters like:
137
+
138
+ ```
139
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -F 'eq level "error"' -o original
140
+ ```
141
+
142
+ And you can't use filters like this because they'd show loglines that are of level INFO but contain the "error" string on the message:
143
+
144
+ ```
145
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -f error
146
+ ```
147
+
148
+ You can combine both to reduce the amount of parsed lines with the `-f` filter before the more specific `-F`:
149
+
150
+ ```
151
+ curl -s https://cdn.codetunnel.net/lt/json.log | lt -f error -p json -F 'eq level "error"' -o original
152
+ ```
153
+
154
+ ### Kubernetes tables
155
+
156
+ `-p table` is designed for outputs like `kubectl get pods -A` (space-separated columns).
157
+
158
+ Find all pods (`kubectl get pods -A`) but ignore lines containing kube-system:
159
+
160
+ ```bash
161
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -i kube-system
162
+ ```
163
+
164
+ As the pods on the kube-system namespace had longer names, this log will be wider (requiring a wider terminal before wrapping) because kubernetes still considered their length to build the table, other things like the time since the last restart, longer status names (CrashLoopBackOff) can make the original table wider.
165
+
166
+ You can use `-p table` to parse the table, filters to include/exclude lines, and `-o table` to output a new table, considering the length of the selected lines, only. The command above printed the table as wide as kubernetes generated, this one prints a narrower one:
167
+
168
+ ```bash
169
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -p table -o table -i kube-system
170
+ ```
171
+
172
+ If you are looking for all pods containing the word `gateway`, you might end up excluding the headers row:
173
+
174
+ ```bash
175
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt gateway
176
+ ```
177
+
178
+ You can always print the headers row with `-H`:
179
+
180
+ ```bash
181
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -H gateway
182
+ ```
183
+
184
+ ### Kubernetes -k option and more examples
185
+
186
+ On kubectl commands, you most likely want to parse the table (`-p table`) and reformat as a new table (`-o table`). You can use the `-k` as an alias to `-p table -o table`:
187
+
188
+ ```bash
189
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -k payment
190
+ ```
191
+
192
+ When parsing the table with `-p table`, you can filter with custom logic using the field filters (`-F`), you can get pods with at least one restart:
193
+ ```bash
194
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -k -F 'gt RESTARTS 0'
195
+ ```
196
+
197
+ Show pods that are not fully ready (`READY` looks like `1/2`):
198
+
199
+ ```bash
200
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -p '(?<up>\d+)/(?<total>\d+)' -F 'lt up total' -H -o original
201
+ ```
202
+
203
+ Or, combining multiple lts to re-format the table:
204
+
205
+ ```bash
206
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -p '(?<up>\d+)/(?<total>\d+)' -F 'lt up total' -H -o original | lt -k
207
+ ```
208
+
209
+ Or, using just templates:
210
+
211
+ ```bash
212
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -k -F 'lt (itemAt (split READY "/") 0) (itemAt (split READY "/") 1)'
213
+ ```
214
+
215
+ Convert `kubectl` table output to logfmt for easier downstream filtering:
216
+
217
+ ```bash
218
+ curl -s https://cdn.codetunnel.net/lt/table.log | lt -p table -o logfmt
219
+ ```
220
+
221
+ ### Custom regex parsing (`-p '(?<name>...)'`)
222
+
223
+ Use a regex with named groups to “extract fields” from unstructured text:
224
+
225
+ ```bash
226
+ curl -s https://cdn.codetunnel.net/lt/text.log | lt -p '(?<ts>\S+) \[(?<level>\w+)\] (?<message>.*)' -o logfmt
227
+ ```
228
+
229
+ Then field-filter on extracted fields:
230
+
231
+ ```bash
232
+ curl -s https://cdn.codetunnel.net/lt/text.log | lt -p '(?<delay_ms>\d+)ms' -F 'gt delay_ms 200' -o original
233
+ ```
234
+
235
+ ## Reference
236
+
237
+ ### Options
238
+
239
+ - `lt <filter>`: shorthand for a single text filter
240
+ - `-f, --filter <regex>`: keep lines that match this regex (repeatable)
241
+ - `-i, --ignore <regex>`: drop lines that match this regex (repeatable)
242
+ - `-p, --parser <json|logfmt|table|regex>`: parse each line into an object
243
+ - `-F, --field <bigodon expression>`: filter parsed objects by expression (repeatable)
244
+ - `-o, --output <format|template>`: output `json|logfmt|inspect|original|table` or a Bigodon template
245
+ - `-H, --headers`: always output the first input line (table headers)
246
+ - `-k, --kubectl`: shortcut for `-p table -o table`
247
+ - `-h, --help`: show help
248
+ - `-v, --version`: show version
249
+
250
+ ### Formats at a glance
251
+
252
+ - `-o original`: print the original input line (even after parsing/filtering)
253
+ - `-o inspect` (or default): print objects with colors for humans
254
+ - `-o json`: emit JSON objects
255
+ - `-o logfmt`: emit `key=value` lines
256
+ - `-o table`: render a table from parsed objects (buffers until EOF)
257
+ - `-o '<bigodon template>'`: render a custom line from parsed objects
258
+
259
+ For the built-in help (includes examples): `lt --help`.
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "logtunnel",
3
- "version": "0.4.0",
3
+ "version": "1.1.0",
4
4
  "description": "CLI tool that allows you to format, filter and search your log output",
5
5
  "main": "src/main.js",
6
6
  "bin": {
@@ -34,9 +34,9 @@
34
34
  "@hapi/bossy": "^5.1.0",
35
35
  "@hapi/hoek": "^9.2.0",
36
36
  "@hapi/podium": "^4.1.3",
37
+ "bigodon": "^2.3.0",
37
38
  "colors": "^1.4.0",
38
39
  "debug": "^4.3.2",
39
- "jstache": "^0.1.0",
40
40
  "logfmt": "^1.3.2",
41
41
  "semver": "^7.3.5"
42
42
  }
package/src/definition.js CHANGED
@@ -29,12 +29,12 @@ const definition = {
29
29
  type: 'string',
30
30
  },
31
31
  o: {
32
- description: 'Formats the output using this template. Allowed: json, logfmt, original or a mustache template.',
32
+ description: 'Formats the output using this template. Allowed: json, logfmt, inspect, original, table or a bigodon template.',
33
33
  alias: 'output',
34
34
  type: 'string',
35
35
  },
36
36
  F: {
37
- description: 'Show only logs that match the field filter. You can use JavaScript.',
37
+ description: 'Show only logs that match the field filter (Bigodon expression). Requires a parser like -p json.',
38
38
  alias: 'field',
39
39
  type: 'string',
40
40
  multiple: true,
@@ -44,39 +44,42 @@ const definition = {
44
44
  alias: 'headers',
45
45
  type: 'boolean',
46
46
  },
47
+ k: {
48
+ description: 'Shortcut for kubectl tables (equivalent to -p table -o table).',
49
+ alias: 'kubectl',
50
+ type: 'boolean',
51
+ },
47
52
  };
48
53
 
49
54
  const $ = '$ '.gray;
50
55
  const examples = [
51
56
  '\n\nExamples:\n',
52
- 'Find logs that contain "alice":'.dim,
53
- $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt alice',
54
- 'Find logs that contain "alice" and "purchase":'.dim,
55
- $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt -f alice -f purchase',
56
- 'Find logs that contain "alice" and ignore the ones that contain "info"'.dim,
57
- $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt -f alice -i info',
58
- 'Parse logs as JSON and output them with that template'.dim,
59
- $ + 'curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -o \'[{{lvl}}] {{log}}\'',
60
- 'Parse logs as JSON, apply template and find the ones containing "alice"'.dim,
61
- $ + 'curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -o \'[{{lvl}}] {{log}}\' -f alice',
62
- 'Parse logs as JSON, apply template and show the ones with "delay > 200"'.dim,
63
- $ + 'curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -o \'[{{lvl}} in {{delay}}ms] {{log}}\' -F \'delay > 200\'',
64
- 'Parse logs as JSON, apply template and show the ones with "log" containing "Alice"'.dim,
65
- $ + 'curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -o \'[{{lvl}}] {{log}}\' -F \'log.toLowerCase().includes("alice")\'',
66
- 'Parse logs as logfmt, show the ones with "delay > 200" and show their original line (as if no parsing happened)'.dim,
67
- $ + 'curl -s https://cdn.codetunnel.net/lt/logfmt.log | lt -p logfmt -o original -F \'delay > 200\'',
68
- 'Parse logs with regex, and output in logfmt'.dim,
69
- $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt -p \'\\[(?<lvl>\\S*) in\\s*(?<delay>\\d*)ms\\] (?<log>.*)\' -o logfmt',
70
- 'Parse logs with regex, and show the ones with "delay > 200"'.dim,
71
- $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt -p \'(?<delay>\\d+)ms\' -o original -F \'delay > 200\'',
72
- 'Parse table and show rows containing "cilium"'.dim,
73
- $ + 'curl -s https://cdn.codetunnel.net/lt/table.log | lt -p table -o original -f cilium',
74
- 'Parse table, show rows containing "cilium" and the first headers row'.dim,
75
- $ + 'curl -s https://cdn.codetunnel.net/lt/table.log | lt -p table -o original -f cilium -H',
76
- 'Parse table, show rows with RESTARTS > 0'.dim,
77
- $ + 'curl -s https://cdn.codetunnel.net/lt/table.log | lt -p table -o original -F \'RESTARTS > 0\' -H',
78
- 'Show rows that are not ready'.dim,
79
- $ + 'curl -s https://cdn.codetunnel.net/lt/table.log | lt -p \'(?<up>\\d)/(?<total>\\d)\' -o original -F \'up < total\' -H',
57
+ 'Find lines containing "error" (shorthand for a single -f filter):'.dim,
58
+ $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt error',
59
+ 'Find lines containing "checkout" and "alice" (AND):'.dim,
60
+ $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt -f checkout -f alice',
61
+ 'Ignore noise (drop health/metrics lines):'.dim,
62
+ $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt -i healthz -i metrics',
63
+ 'Find errors while ignoring known spam:'.dim,
64
+ $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt -f error -i NullPointer -i "retrying in"',
65
+ 'Parse JSON (default output is inspect):'.dim,
66
+ $ + 'curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json',
67
+ 'Parse JSON and format a clean line:'.dim,
68
+ $ + 'curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -o \'[{{ts}} {{upper level}}] {{message}}\'',
69
+ 'Field filter: only slow requests (delay_ms > 200):'.dim,
70
+ $ + 'curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -F \'gt delay_ms 200\' -o original',
71
+ 'Field filter: combine conditions:'.dim,
72
+ $ + 'curl -s https://cdn.codetunnel.net/lt/json.log | lt -p json -F \'and (eq level "error") (gt http.status 499)\' -o \'[{{ts}} {{upper level}}] {{message}}\'',
73
+ 'Parse logfmt and convert to JSON:'.dim,
74
+ $ + 'curl -s https://cdn.codetunnel.net/lt/logfmt.log | lt -p logfmt -o json',
75
+ 'Kubernetes tables: find rows containing "gateway", keep headers:'.dim,
76
+ $ + 'curl -s https://cdn.codetunnel.net/lt/table.log | lt -H gateway',
77
+ 'Kubernetes tables: -k is shorthand for -p table -o table:'.dim,
78
+ $ + 'curl -s https://cdn.codetunnel.net/lt/table.log | lt -k payment',
79
+ 'Kubernetes tables: show pods that are not fully ready:'.dim,
80
+ $ + 'curl -s https://cdn.codetunnel.net/lt/table.log | lt -k -F \'lt (itemAt (split READY "/") 0) (itemAt (split READY "/") 1)\'',
81
+ 'Custom regex parsing (extract fields) and output as logfmt:'.dim,
82
+ $ + 'curl -s https://cdn.codetunnel.net/lt/text.log | lt -p \'(?<ts>\\S+) \\[(?<level>\\w+)\\] (?<message>.*)\' -o logfmt',
80
83
  ];
81
84
 
82
85
  const usage = Bossy.usage(definition, 'lt [options]\n Or: lt <filter>') + examples.join('\n');
package/src/log-source.js CHANGED
@@ -7,7 +7,7 @@ module.exports.logSource = emitter => {
7
7
  emitter.on('data', data => {
8
8
  const lines = data.toString()
9
9
  .split(/[\r\n|\n]/);
10
-
10
+
11
11
  lines[0] = incompleteLine + lines[0];
12
12
  incompleteLine = lines.pop(); // Either an incomplete line or an empty string due to the last \n
13
13
 
package/src/main.js CHANGED
@@ -32,7 +32,9 @@ function run() {
32
32
  debug('building pipeline');
33
33
  const pipeline = new LogPipeline(args, process.stdout);
34
34
  debug('registering stdin');
35
- logSource(process.stdin).on('log-line', l => pipeline.onLogLine(l));
35
+ logSource(process.stdin)
36
+ .on('log-line', l => pipeline.onLogLine(l))
37
+ .on('end', () => pipeline.onEnd());
36
38
  } catch(e) {
37
39
  console.error('Error:', e.message);
38
40
  process.exit(1);
package/src/pipeline.js CHANGED
@@ -1,25 +1,29 @@
1
1
  'use strict';
2
- const debug = require('debug')('logtunnel:pipeline');
3
2
 
4
- const filter = require('./transformers/filter');
5
- const ignore = require('./transformers/ignore');
6
- const parse = require('./transformers/parse');
7
- const field = require('./transformers/field');
8
- const output = require('./transformers/output');
3
+ const { FieldFilter } = require('./transformers/filters/field');
4
+ const { FindFilter } = require('./transformers/filters/find');
5
+ const { IgnoreFilter } = require('./transformers/filters/ignore');
6
+ const { outputFactory } = require('./transformers/outputs/factory');
7
+ const { parseFactory } = require('./transformers/parsers/factory');
8
+
9
+ const debug = require('debug')('logtunnel:pipeline');
9
10
 
10
11
  class LogPipeline {
11
12
  constructor(args, stdout) {
12
13
  this.firstLine = null;
13
- this.args = args;
14
+ this.args = this._normalizeArgs(args);
14
15
  this.stdout = stdout;
15
- this.transformers = this._buildTransformers().filter(t => t !== null);
16
+ this.outputTransformer = outputFactory(this.args.output);
17
+ this.isOutputBuffered = Boolean(this.outputTransformer.flush);
18
+ this.transformers = this._buildTransformers()
19
+ .filter(t => t !== null);
16
20
  }
17
21
 
18
22
  onLogLine(line) {
19
23
  try {
20
24
  debug('got line: ' + line)
21
- this._logLine(line);
22
- this._updateFirstLine(line);
25
+ const isFirstLine = this._updateFirstLine(line);
26
+ this._logLine(line, isFirstLine);
23
27
  } catch (e) {
24
28
  // Covering this would kill the process
25
29
  /* $lab:coverage:off$ */
@@ -29,11 +33,15 @@ class LogPipeline {
29
33
  }
30
34
  }
31
35
 
32
- _logLine(line) {
36
+ async _logLine(line, isFirstLine) {
33
37
  let output = line;
34
38
 
35
39
  for (let transformer of this.transformers) {
36
- const result = transformer(output, line, this);
40
+ const result = await transformer.run(
41
+ output,
42
+ line,
43
+ isFirstLine ? null : this.firstLine,
44
+ );
37
45
 
38
46
  // Transformer accepted the line
39
47
  if(result === true) {
@@ -49,7 +57,11 @@ class LogPipeline {
49
57
 
50
58
  // Transformer modified the line
51
59
  output = result;
52
- debug('line transformed: ' + JSON.stringify(output));
60
+ debug('line transformed: ' + JSON.stringify(output));
61
+ }
62
+
63
+ if (this.isOutputBuffered) {
64
+ return;
53
65
  }
54
66
 
55
67
  this.stdout.write(output + '\n');
@@ -57,29 +69,51 @@ class LogPipeline {
57
69
 
58
70
  _updateFirstLine(line) {
59
71
  if (this.firstLine) {
60
- return;
72
+ return false;
61
73
  }
62
74
 
63
75
  this.firstLine = line;
64
- if(this.args.headers) {
76
+ if(this.args.headers && !this.isOutputBuffered) {
65
77
  this.stdout.write(this.firstLine + '\n');
66
78
  }
79
+
80
+ return true;
81
+ }
82
+
83
+ onEnd() {
84
+ if (!this.isOutputBuffered) {
85
+ return;
86
+ }
87
+
88
+ const lines = this.outputTransformer.flush();
89
+ lines.forEach(line => this.stdout.write(line + '\n'));
67
90
  }
68
91
 
69
92
  _buildTransformers() {
70
93
  return [
71
94
  // First of all, filter which lines to accept
72
- this.args._ ? filter(this.args._) : null,
73
- ...this.args.filter.map(filter),
74
- ...this.args.ignore.map(ignore),
95
+ this.args._ ? new FindFilter(this.args._) : null,
96
+ ...this.args.filter.map(f => new FindFilter(f)),
97
+ ...this.args.ignore.map(i => new IgnoreFilter(i)),
75
98
  // Parse them...
76
- parse(this.args.parser),
99
+ parseFactory(this.args.parser),
77
100
  // ...and apply field filters
78
- ...this.args.field.map(field),
101
+ ...this.args.field.map(f => new FieldFilter(f)),
79
102
  // And finally format the output
80
- output(this.args.output),
103
+ this.outputTransformer,
81
104
  ];
82
105
  }
106
+
107
+ _normalizeArgs(args) {
108
+ const normalized = { ...args };
109
+
110
+ if (normalized.kubectl) {
111
+ normalized.parser = 'table';
112
+ normalized.output = 'table';
113
+ }
114
+
115
+ return normalized;
116
+ }
83
117
  }
84
118
 
85
119
  module.exports.LogPipeline = LogPipeline;
@@ -0,0 +1,18 @@
1
+ const { compileExpression } = require('bigodon');
2
+
3
+ class FieldFilter {
4
+ constructor(expression) {
5
+ this.expression = compileExpression(expression);
6
+ }
7
+
8
+ async run(line) {
9
+ if(typeof line !== 'object') {
10
+ throw new Error("To use a field filter, you need to specify a parser like '-p json' ");
11
+ }
12
+
13
+ const result = await this.expression(line);
14
+ return Boolean(result);
15
+ }
16
+ }
17
+
18
+ module.exports = { FieldFilter };
@@ -0,0 +1,11 @@
1
+ class FindFilter {
2
+ constructor(regexStr) {
3
+ this.regex = new RegExp(regexStr, 'i');
4
+ }
5
+
6
+ run(line) {
7
+ return this.regex.test(line);
8
+ }
9
+ }
10
+
11
+ module.exports = { FindFilter };
@@ -0,0 +1,11 @@
1
+ class IgnoreFilter {
2
+ constructor(regexStr) {
3
+ this.regex = new RegExp(regexStr, 'i');
4
+ }
5
+
6
+ run(line) {
7
+ return !this.regex.test(line);
8
+ }
9
+ }
10
+
11
+ module.exports = { IgnoreFilter };
@@ -0,0 +1,17 @@
1
+ const { compile } = require('bigodon');
2
+
3
+ class BigodonOutput {
4
+ constructor(template) {
5
+ this.template = compile(template);
6
+ }
7
+
8
+ async run(line) {
9
+ if (typeof line !== 'object') {
10
+ throw new Error("To use an output transformer, you need to specify a parser like '-p json' ");
11
+ }
12
+
13
+ return this.template(line);
14
+ }
15
+ }
16
+
17
+ module.exports = { BigodonOutput };
@@ -0,0 +1,21 @@
1
+ const { JsonOutput } = require('./json');
2
+ const { LogfmtOutput } = require('./logfmt');
3
+ const { BigodonOutput } = require('./bigodon');
4
+ const { OriginalOutput } = require('./original');
5
+ const { InspectOutput } = require('./inspect');
6
+ const { TableOutput } = require('./table');
7
+
8
+
9
+ function outputFactory(format) {
10
+ switch(format?.toLowerCase()) {
11
+ case undefined: return new InspectOutput(false);
12
+ case 'json': return new JsonOutput();
13
+ case 'table': return new TableOutput();
14
+ case 'logfmt': return new LogfmtOutput();
15
+ case 'inspect': return new InspectOutput(true);
16
+ case 'original': return new OriginalOutput();
17
+ default: return new BigodonOutput(format);
18
+ }
19
+ }
20
+
21
+ module.exports = { outputFactory };
@@ -0,0 +1,22 @@
1
+ const util = require('util');
2
+
3
+ class InspectOutput {
4
+ constructor(breakLines = false) {
5
+ this.options = {
6
+ colors: true,
7
+ depth: null,
8
+ breakLength: breakLines ? 80 : Infinity,
9
+ compact: !breakLines,
10
+ };
11
+ }
12
+
13
+ run(line) {
14
+ if (typeof line === 'string') {
15
+ return true;
16
+ }
17
+
18
+ return util.inspect({ ...line }, this.options);
19
+ }
20
+ }
21
+
22
+ module.exports = { InspectOutput };
@@ -0,0 +1,11 @@
1
+ class JsonOutput {
2
+ run(line) {
3
+ if (typeof line !== 'object') {
4
+ throw new Error("To use an output transformer, you need to specify a parser like '-p json'");
5
+ }
6
+
7
+ return JSON.stringify(line);
8
+ }
9
+ }
10
+
11
+ module.exports = { JsonOutput };
@@ -0,0 +1,13 @@
1
+ const logfmt = require('logfmt');
2
+
3
+ class LogfmtOutput {
4
+ run(line) {
5
+ if (typeof line !== 'object') {
6
+ throw new Error("To use an output transformer, you need to specify a parser like '-p json'");
7
+ }
8
+
9
+ return logfmt.stringify(line);
10
+ }
11
+ }
12
+
13
+ module.exports = { LogfmtOutput };
@@ -0,0 +1,7 @@
1
+ class OriginalOutput {
2
+ run(_line, original) {
3
+ return original;
4
+ }
5
+ }
6
+
7
+ module.exports = { OriginalOutput };