mailauth 2.3.1 → 2.3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/.gitattributes ADDED
@@ -0,0 +1 @@
1
+ *.js text eol=lf
package/README.md CHANGED
@@ -261,7 +261,7 @@ process.stdout.write(message);
261
261
  If you want to modify the message before sealing, you have to authenticate the message first and then use authentication results as input for the sealing step.
262
262
 
263
263
  ```js
264
- const { authenticate, sealMessage } = require('@postalsys/mailauth');
264
+ const { authenticate, sealMessage } = require('mailauth');
265
265
 
266
266
  // 1. authenticate the message
267
267
  const { arc, headers } = await authenticate(
@@ -1,9 +1,14 @@
1
- 'use strict';
1
+ /* eslint no-control-regex: 0 */
2
2
 
3
- // Calculates relaxed body hash for a message body stream
3
+ 'use strict';
4
4
 
5
5
  const crypto = require('crypto');
6
6
 
7
+ const CHAR_CR = 0x0d;
8
+ const CHAR_LF = 0x0a;
9
+ const CHAR_SPACE = 0x20;
10
+ const CHAR_TAB = 0x09;
11
+
7
12
  /**
8
13
  * Class for calculating body hash of an email message body stream
9
14
  * using the "relaxed" canonicalization
@@ -16,139 +21,244 @@ class RelaxedHash {
16
21
  * @param {Number} [maxBodyLength] Allowed body length count, the value from the l= parameter
17
22
  */
18
23
  constructor(algorithm, maxBodyLength) {
19
- algorithm = (algorithm || 'sha256').split('-').pop();
24
+ algorithm = (algorithm || 'sha256').split('-').pop().toLowerCase();
25
+
20
26
  this.bodyHash = crypto.createHash(algorithm);
21
27
 
22
- this.remainder = '';
28
+ this.remainder = false;
23
29
  this.byteLength = 0;
24
30
 
25
31
  this.bodyHashedBytes = 0;
26
32
  this.maxBodyLength = maxBodyLength;
33
+
34
+ this.maxSizeReached = false;
35
+
36
+ this.emptyLinesQueue = [];
27
37
  }
28
38
 
29
39
  _updateBodyHash(chunk) {
30
- // the following is needed for l= option
40
+ if (this.maxSizeReached) {
41
+ return;
42
+ }
43
+
44
+ // the following is needed for the l= option
31
45
  if (
32
46
  typeof this.maxBodyLength === 'number' &&
33
47
  !isNaN(this.maxBodyLength) &&
34
48
  this.maxBodyLength >= 0 &&
35
49
  this.bodyHashedBytes + chunk.length > this.maxBodyLength
36
50
  ) {
51
+ this.maxSizeReached = true;
37
52
  if (this.bodyHashedBytes >= this.maxBodyLength) {
38
53
  // nothing to do here, skip entire chunk
39
54
  return;
40
55
  }
56
+
41
57
  // only use allowed size of bytes
42
- chunk = chunk.slice(0, this.maxBodyLength - this.bodyHashedBytes);
58
+ chunk = chunk.subarray(0, this.maxBodyLength - this.bodyHashedBytes);
43
59
  }
44
60
 
45
61
  this.bodyHashedBytes += chunk.length;
46
62
  this.bodyHash.update(chunk);
63
+
64
+ //process.stdout.write(chunk);
47
65
  }
48
66
 
49
- update(chunk) {
50
- this.byteLength += chunk.length;
67
+ _drainPendingEmptyLines() {
68
+ if (this.emptyLinesQueue.length) {
69
+ for (let emptyLine of this.emptyLinesQueue) {
70
+ this._updateBodyHash(emptyLine);
71
+ }
72
+ this.emptyLinesQueue = [];
73
+ }
74
+ }
51
75
 
52
- let bodyStr;
76
+ _pushBodyHash(chunk) {
77
+ if (!chunk || !chunk.length) {
78
+ return;
79
+ }
53
80
 
54
- // find next remainder
55
- let nextRemainder = '';
81
+ // remove line endings
82
+ let foundNonLn = false;
56
83
 
57
- // This crux finds and removes the spaces from the last line and the newline characters after the last non-empty line
58
- // If we get another chunk that does not match this description then we can restore the previously processed data
59
- let state = 'file';
84
+ // buffer line endings and empty lines
60
85
  for (let i = chunk.length - 1; i >= 0; i--) {
61
- let c = chunk[i];
62
-
63
- if (state === 'file' && (c === 0x0a || c === 0x0d)) {
64
- // do nothing, found \n or \r at the end of chunk, stil end of file
65
- } else if (state === 'file' && (c === 0x09 || c === 0x20)) {
66
- // switch to line ending mode, this is the last non-empty line
67
- state = 'line';
68
- } else if (state === 'line' && (c === 0x09 || c === 0x20)) {
69
- // do nothing, found ' ' or \t at the end of line, keep processing the last non-empty line
70
- } else if (state === 'file' || state === 'line') {
71
- // non line/file ending character found, switch to body mode
72
- state = 'body';
73
- if (i === chunk.length - 1) {
74
- // final char is not part of line end or file end, so do nothing
75
- break;
86
+ if (chunk[i] !== CHAR_LF && chunk[i] !== CHAR_CR) {
87
+ this._drainPendingEmptyLines();
88
+ if (i < chunk.length - 1) {
89
+ this.emptyLinesQueue.push(chunk.subarray(i + 1));
90
+ chunk = chunk.subarray(0, i + 1);
76
91
  }
92
+ foundNonLn = true;
93
+ break;
77
94
  }
95
+ }
96
+
97
+ if (!foundNonLn) {
98
+ this.emptyLinesQueue.push(chunk);
99
+ return;
100
+ }
78
101
 
79
- if (i === 0) {
80
- // reached to the beginning of the chunk, check if it is still about the ending
81
- // and if the remainder also matches
82
- if (
83
- (state === 'file' && (!this.remainder || /[\r\n]$/.test(this.remainder))) ||
84
- (state === 'line' && (!this.remainder || /[ \t]$/.test(this.remainder)))
85
- ) {
86
- // keep everything
87
- this.remainder += chunk.toString('binary');
88
- return;
89
- } else if (state === 'line' || state === 'file') {
90
- // process existing remainder as normal line but store the current chunk
91
- nextRemainder = chunk.toString('binary');
92
- chunk = false;
93
- break;
102
+ this._updateBodyHash(chunk);
103
+ }
104
+
105
+ fixLineBuffer(line) {
106
+ let resultLine = [];
107
+
108
+ let nonWspFound = false;
109
+ let prevWsp = false;
110
+
111
+ for (let i = line.length - 1; i >= 0; i--) {
112
+ if (line[i] === CHAR_LF) {
113
+ resultLine.unshift(line[i]);
114
+ if (i === 0 || line[i - 1] !== CHAR_CR) {
115
+ // add missing carriage return
116
+ resultLine.unshift(CHAR_CR);
94
117
  }
118
+ continue;
95
119
  }
96
120
 
97
- if (state !== 'body') {
121
+ if (line[i] === CHAR_CR) {
122
+ resultLine.unshift(line[i]);
123
+ continue;
124
+ }
125
+
126
+ if (line[i] === CHAR_SPACE || line[i] === CHAR_TAB) {
127
+ if (nonWspFound) {
128
+ prevWsp = true;
129
+ }
98
130
  continue;
99
131
  }
100
132
 
101
- // reached first non ending byte
102
- nextRemainder = chunk.slice(i + 1).toString('binary');
103
- chunk = chunk.slice(0, i + 1);
104
- break;
133
+ if (prevWsp) {
134
+ resultLine.unshift(CHAR_SPACE);
135
+ prevWsp = false;
136
+ }
137
+
138
+ nonWspFound = true;
139
+ resultLine.unshift(line[i]);
140
+ }
141
+
142
+ if (prevWsp && nonWspFound) {
143
+ resultLine.unshift(CHAR_SPACE);
144
+ }
145
+
146
+ return Buffer.from(resultLine);
147
+ }
148
+
149
+ update(chunk, final) {
150
+ this.byteLength += (chunk && chunk.length) || 0;
151
+ if (this.maxSizeReached) {
152
+ return;
105
153
  }
106
154
 
107
- let needsFixing = !!this.remainder;
108
- if (chunk && !needsFixing) {
109
- // check if we even need to change anything
110
- for (let i = 0, len = chunk.length; i < len; i++) {
111
- if (i && chunk[i] === 0x0a && chunk[i - 1] !== 0x0d) {
112
- // missing \r before \n
113
- needsFixing = true;
114
- break;
115
- } else if (i && chunk[i] === 0x0d && chunk[i - 1] === 0x20) {
116
- // trailing WSP found
117
- needsFixing = true;
118
- break;
119
- } else if (i && chunk[i] === 0x20 && chunk[i - 1] === 0x20) {
120
- // multiple spaces found, needs to be replaced with just one
121
- needsFixing = true;
122
- break;
123
- } else if (chunk[i] === 0x09) {
124
- // TAB found, needs to be replaced with a space
125
- needsFixing = true;
126
- break;
155
+ // Canonicalize content by applying a and b in order:
156
+ // a.1. Ignore all whitespace at the end of lines.
157
+ // a.2. Reduce all sequences of WSP within a line to a single SP character.
158
+
159
+ // b.1. Ignore all empty lines at the end of the message body.
160
+ // b.2. If the body is non-empty but does not end with a CRLF, a CRLF is added.
161
+
162
+ let lineEndPos = -1;
163
+ let lineNeedsFixing = false;
164
+ let cursorPos = 0;
165
+
166
+ if (this.remainder && this.remainder.length) {
167
+ if (chunk) {
168
+ // concatting chunks might be bad for performance :S
169
+ chunk = Buffer.concat([this.remainder, chunk]);
170
+ } else {
171
+ chunk = this.remainder;
172
+ }
173
+ this.remainder = false;
174
+ }
175
+
176
+ if (chunk && chunk.length) {
177
+ for (let pos = 0; pos < chunk.length; pos++) {
178
+ switch (chunk[pos]) {
179
+ case CHAR_LF:
180
+ if (
181
+ !lineNeedsFixing &&
182
+ // previous character is not <CR>
183
+ ((pos >= 1 && chunk[pos - 1] !== CHAR_CR) ||
184
+ // LF is the first byte on the line
185
+ pos === 0 ||
186
+ // there's a space before line break
187
+ (pos >= 2 && chunk[pos - 1] === CHAR_CR && chunk[pos - 2] === CHAR_SPACE))
188
+ ) {
189
+ lineNeedsFixing = true;
190
+ }
191
+
192
+ // line break
193
+ if (lineNeedsFixing) {
194
+ // emit pending bytes up to the last line break before current line
195
+ if (lineEndPos >= 0 && lineEndPos >= cursorPos) {
196
+ let chunkPart = chunk.subarray(cursorPos, lineEndPos + 1);
197
+ this._pushBodyHash(chunkPart);
198
+ }
199
+
200
+ let line = chunk.subarray(lineEndPos + 1, pos + 1);
201
+ this._pushBodyHash(this.fixLineBuffer(line));
202
+
203
+ lineNeedsFixing = false;
204
+
205
+ // move cursor to the start of next line
206
+ cursorPos = pos + 1;
207
+ }
208
+
209
+ lineEndPos = pos;
210
+
211
+ break;
212
+
213
+ case CHAR_SPACE:
214
+ if (!lineNeedsFixing && pos && chunk[pos - 1] === CHAR_SPACE) {
215
+ lineNeedsFixing = true;
216
+ }
217
+ break;
218
+
219
+ case CHAR_TAB:
220
+ // non-space WSP always needs replacing
221
+ lineNeedsFixing = true;
222
+ break;
223
+
224
+ default:
127
225
  }
128
226
  }
129
227
  }
130
228
 
131
- if (needsFixing) {
132
- bodyStr = this.remainder + (chunk ? chunk.toString('binary') : '');
133
- this.remainder = nextRemainder;
134
- bodyStr = bodyStr
135
- .replace(/\r?\n/g, '\n') // use js line endings
136
- .replace(/[ \t]*$/gm, '') // remove line endings, rtrim
137
- .replace(/[ \t]+/gm, ' ') // single spaces
138
- .replace(/\n/g, '\r\n'); // restore rfc822 line endings
139
- chunk = Buffer.from(bodyStr, 'binary');
140
- } else if (nextRemainder) {
141
- this.remainder = nextRemainder;
229
+ if (chunk && cursorPos < chunk.length && cursorPos !== lineEndPos) {
230
+ // emit data from chunk
231
+
232
+ let chunkPart = chunk.subarray(cursorPos, lineEndPos + 1);
233
+
234
+ if (chunkPart.length) {
235
+ this._pushBodyHash(lineNeedsFixing ? this.fixLineBuffer(chunkPart) : chunkPart);
236
+ lineNeedsFixing = false;
237
+ }
238
+
239
+ cursorPos = lineEndPos + 1;
142
240
  }
143
241
 
144
- this._updateBodyHash(chunk);
242
+ if (chunk && !final && cursorPos < chunk.length) {
243
+ this.remainder = chunk.subarray(cursorPos);
244
+ }
245
+
246
+ if (final) {
247
+ let chunkPart = (cursorPos && chunk && chunk.subarray(cursorPos)) || chunk;
248
+ if (chunkPart && chunkPart.length) {
249
+ this._pushBodyHash(lineNeedsFixing ? this.fixLineBuffer(chunkPart) : chunkPart);
250
+ lineNeedsFixing = false;
251
+ }
252
+
253
+ if (this.bodyHashedBytes) {
254
+ // terminating line break for non-empty messages
255
+ this._updateBodyHash(Buffer.from([CHAR_CR, CHAR_LF]));
256
+ }
257
+ }
145
258
  }
146
259
 
147
260
  digest(encoding) {
148
- if (/[\r\n]$/.test(this.remainder) && this.bodyHashedBytes > 0) {
149
- // add terminating line end
150
- this._updateBodyHash(Buffer.from('\r\n'));
151
- }
261
+ this.update(null, true);
152
262
 
153
263
  // finalize
154
264
  return this.bodyHash.digest(encoding);
@@ -156,3 +266,27 @@ class RelaxedHash {
156
266
  }
157
267
 
158
268
  module.exports = { RelaxedHash };
269
+
270
+ /*
271
+ let fs = require('fs');
272
+
273
+ const getBody = message => {
274
+ message = message.toString('binary');
275
+ let match = message.match(/\r?\n\r?\n/);
276
+ if (match) {
277
+ message = message.substr(match.index + match[0].length);
278
+ }
279
+ return Buffer.from(message, 'binary');
280
+ };
281
+
282
+ let s = fs.readFileSync(process.argv[2]);
283
+
284
+ let k = new RelaxedHash('rsa-sha256', -1);
285
+
286
+ for (let byte of getBody(s)) {
287
+ k.update(Buffer.from([byte]));
288
+ }
289
+
290
+ console.error(k.digest('base64'));
291
+ console.error(k.byteLength, k.bodyHashedBytes);
292
+ */
@@ -22,6 +22,8 @@ class SimpleHash {
22
22
 
23
23
  this.bodyHashedBytes = 0;
24
24
  this.maxBodyLength = maxBodyLength;
25
+
26
+ this.lastNewline = false;
25
27
  }
26
28
 
27
29
  _updateBodyHash(chunk) {
@@ -42,6 +44,8 @@ class SimpleHash {
42
44
 
43
45
  this.bodyHashedBytes += chunk.length;
44
46
  this.bodyHash.update(chunk);
47
+
48
+ //process.stdout.write(chunk);
45
49
  }
46
50
 
47
51
  update(chunk) {
@@ -81,10 +85,11 @@ class SimpleHash {
81
85
  }
82
86
 
83
87
  this._updateBodyHash(chunk);
88
+ this.lastNewline = chunk[chunk.length - 1] === 0x0a;
84
89
  }
85
90
 
86
91
  digest(encoding) {
87
- if (this.remainder.length || !this.bodyHashedBytes) {
92
+ if (!this.lastNewline || !this.bodyHashedBytes) {
88
93
  // emit empty line buffer to keep the stream flowing
89
94
  this._updateBodyHash(Buffer.from('\r\n'));
90
95
  }
@@ -1,6 +1,6 @@
1
1
  'use strict';
2
2
 
3
- const { getSigningHeaderLines, getPublicKey, parseDkimHeaders, formatAuthHeaderRow, getAligment } = require('../../lib/tools');
3
+ const { getSigningHeaderLines, getPublicKey, parseDkimHeaders, formatAuthHeaderRow, getAlignment } = require('../../lib/tools');
4
4
  const { MessageParser } = require('./message-parser');
5
5
  const { dkimBody } = require('./body');
6
6
  const { generateCanonicalizedHeader } = require('./header');
@@ -195,7 +195,7 @@ class DkimVerifier extends MessageParser {
195
195
  };
196
196
 
197
197
  if (signatureHeader.type === 'DKIM' && this.headerFrom?.length) {
198
- status.aligned = this.headerFrom?.length ? getAligment(this.headerFrom[0].split('@').pop(), [signatureHeader.signingDomain]) : false;
198
+ status.aligned = this.headerFrom?.length ? getAlignment(this.headerFrom[0].split('@').pop(), [signatureHeader.signingDomain]) : false;
199
199
  }
200
200
 
201
201
  let bodyHash = this.bodyHashes.get(signatureHeader.bodyHashKey)?.hash;
@@ -3,7 +3,7 @@
3
3
  const dns = require('dns').promises;
4
4
  const punycode = require('punycode/');
5
5
  const psl = require('psl');
6
- const { formatAuthHeaderRow, getAligment } = require('../tools');
6
+ const { formatAuthHeaderRow, getAlignment } = require('../tools');
7
7
 
8
8
  const resolveTxt = async (domain, resolver) => {
9
9
  try {
@@ -146,8 +146,8 @@ const verifyDmarc = async opts => {
146
146
  // use "sp" if this is a subdomain of an org domain and "sp" is set, otherwise use "p"
147
147
  const policy = dmarcRecord.isOrgRecord && dmarcRecord.sp ? dmarcRecord.sp : dmarcRecord.p;
148
148
 
149
- const dkimAlignment = getAligment(domain, dkimDomains, { strict: dmarcRecord.adkim === 's' });
150
- const spfAlignment = getAligment(domain, spfDomains, { strict: dmarcRecord.aspf === 's' });
149
+ const dkimAlignment = getAlignment(domain, dkimDomains, { strict: dmarcRecord.adkim === 's' });
150
+ const spfAlignment = getAlignment(domain, spfDomains, { strict: dmarcRecord.aspf === 's' });
151
151
 
152
152
  if (dkimAlignment || spfAlignment) {
153
153
  // pass
@@ -164,7 +164,12 @@ const verifyDmarc = async opts => {
164
164
  p: dmarcRecord.p,
165
165
  sp: dmarcRecord.sp || dmarcRecord.p,
166
166
  pct: dmarcRecord.pct,
167
- rr: dmarcRecord.rr
167
+ rr: dmarcRecord.rr,
168
+
169
+ alignment: {
170
+ spf: { result: spfAlignment, strict: dmarcRecord.aspf === 's' },
171
+ dkim: { result: dkimAlignment, strict: dmarcRecord.adkim === 's' }
172
+ }
168
173
  });
169
174
  };
170
175
 
package/lib/mailauth.js CHANGED
@@ -6,6 +6,7 @@ const { dmarc } = require('./dmarc');
6
6
  const { arc, createSeal } = require('./arc');
7
7
  const { bimi } = require('./bimi');
8
8
  const { parseReceived } = require('./parse-received');
9
+ const { sealMessage } = require('./arc');
9
10
  const libmime = require('libmime');
10
11
  const os = require('os');
11
12
  const { isIP } = require('net');
@@ -179,4 +180,4 @@ const authenticate = async (input, opts) => {
179
180
  };
180
181
  };
181
182
 
182
- module.exports = { authenticate };
183
+ module.exports = { authenticate, sealMessage };
@@ -15,7 +15,7 @@ const matchIp = (addr, range) => {
15
15
  }
16
16
  };
17
17
 
18
- const parseCidrValue = (val, defaultValue) => {
18
+ const parseCidrValue = (val, defaultValue, type) => {
19
19
  val = val || '';
20
20
  let domain = '';
21
21
  let cidr4 = '';
@@ -29,8 +29,15 @@ const parseCidrValue = (val, defaultValue) => {
29
29
  throw err;
30
30
  }
31
31
  domain = cidrMatch[1] || '';
32
+
32
33
  cidr4 = cidrMatch[2] ? Number(cidrMatch[2].substr(1)) : '';
33
34
  cidr6 = cidrMatch[3] ? Number(cidrMatch[3].substr(2)) : '';
35
+
36
+ if (type === 'ip6' && cidr4 && !cidr6) {
37
+ // there is no dual cidr for IP addresses
38
+ cidr6 = cidr4;
39
+ cidr4 = '';
40
+ }
34
41
  }
35
42
 
36
43
  domain = domain.toLowerCase().trim() || defaultValue;
@@ -270,7 +277,7 @@ const spfVerify = async (domain, opts) => {
270
277
  case 'ip4':
271
278
  case 'ip6':
272
279
  {
273
- let { domain: range, cidr4, cidr6 } = parseCidrValue(val);
280
+ let { domain: range, cidr4, cidr6 } = parseCidrValue(val, false, type);
274
281
  if (!range) {
275
282
  let err = new Error('SPF failure');
276
283
  err.spfResult = { error: 'permerror', text: `bare IP address` };
@@ -315,7 +322,7 @@ const spfVerify = async (domain, opts) => {
315
322
 
316
323
  case 'a':
317
324
  {
318
- let { domain: a, cidr4, cidr6 } = parseCidrValue(val, domain);
325
+ let { domain: a, cidr4, cidr6 } = parseCidrValue(val, domain, type);
319
326
  let cidr = net.isIPv6(opts.ip) ? cidr6 : cidr4;
320
327
 
321
328
  a = macro(a, opts);
@@ -339,7 +346,7 @@ const spfVerify = async (domain, opts) => {
339
346
 
340
347
  case 'mx':
341
348
  {
342
- let { domain: mxDomain, cidr4, cidr6 } = parseCidrValue(val, domain);
349
+ let { domain: mxDomain, cidr4, cidr6 } = parseCidrValue(val, domain, type);
343
350
  let cidr = net.isIPv6(opts.ip) ? cidr6 : cidr4;
344
351
 
345
352
  try {
package/lib/tools.js CHANGED
@@ -429,7 +429,7 @@ const formatDomain = domain => {
429
429
  return domain;
430
430
  };
431
431
 
432
- const getAligment = (fromDomain, domainList, strict) => {
432
+ const getAlignment = (fromDomain, domainList, strict) => {
433
433
  domainList = [].concat(domainList || []);
434
434
  if (strict) {
435
435
  fromDomain = formatDomain(fromDomain);
@@ -530,7 +530,7 @@ module.exports = {
530
530
 
531
531
  validateAlgorithm,
532
532
 
533
- getAligment,
533
+ getAlignment,
534
534
 
535
535
  formatRelaxedLine,
536
536
 
package/licenses.txt CHANGED
@@ -1,11 +1,11 @@
1
1
  name license type link installed version author
2
2
  ---- ------------ ---- ----------------- ------
3
- @fidm/x509 MIT git+ssh://git@github.com/fidm/x509.git 1.2.1 n/a
4
- ipaddr.js MIT git://github.com/whitequark/ipaddr.js.git 2.0.1 whitequark
5
- joi BSD-3-Clause git://github.com/sideway/joi.git 17.5.0 n/a
6
- libmime MIT git://github.com/andris9/libmime.git 5.0.0 Andris Reinman
7
- node-forge (BSD-3-Clause OR GPL-2.0) git+https://github.com/digitalbazaar/forge.git 1.2.1 Digital Bazaar, Inc.
8
- nodemailer MIT git+https://github.com/nodemailer/nodemailer.git 6.7.2 Andris Reinman
9
- psl MIT git+ssh://git@github.com/lupomontero/psl.git 1.8.0 Lupo Montero
10
- punycode MIT git+https://github.com/bestiejs/punycode.js.git 2.1.1 Mathias Bynens
11
- yargs MIT git+https://github.com/yargs/yargs.git 17.3.1 n/a
3
+ @fidm/x509 MIT git+ssh://git@github.com/fidm/x509.git 1.2.1
4
+ ipaddr.js MIT git://github.com/whitequark/ipaddr.js.git 2.0.1 whitequark whitequark@whitequark.org
5
+ joi BSD-3-Clause git://github.com/sideway/joi.git 17.6.0
6
+ libmime MIT git://github.com/andris9/libmime.git 5.1.0 Andris Reinman andris@kreata.ee
7
+ node-forge (BSD-3-Clause OR GPL-2.0) git+https://github.com/digitalbazaar/forge.git 1.3.1 Digital Bazaar, Inc. support@digitalbazaar.com http://digitalbazaar.com/
8
+ nodemailer MIT git+https://github.com/nodemailer/nodemailer.git 6.7.5 Andris Reinman
9
+ psl MIT git+ssh://git@github.com/lupomontero/psl.git 1.8.0 Lupo Montero lupomontero@gmail.com https://lupomontero.com/
10
+ punycode MIT git+https://github.com/bestiejs/punycode.js.git 2.1.1 Mathias Bynens https://mathiasbynens.be/
11
+ yargs MIT git+https://github.com/yargs/yargs.git 17.5.0
package/man/mailauth.1 CHANGED
@@ -1,4 +1,4 @@
1
- .TH "MAILAUTH" "1" "January 2022" "v2.3.0" "Mailauth Help"
1
+ .TH "MAILAUTH" "1" "June 2022" "v2.3.3" "Mailauth Help"
2
2
  .SH "NAME"
3
3
  \fBmailauth\fR
4
4
  .QP
package/package.json CHANGED
@@ -1,13 +1,14 @@
1
1
  {
2
2
  "name": "mailauth",
3
- "version": "2.3.1",
3
+ "version": "2.3.4",
4
4
  "description": "Email authentication library for Node.js",
5
5
  "main": "lib/mailauth.js",
6
6
  "scripts": {
7
7
  "test": "eslint \"lib/**/*.js\" \"test/**/*.js\" && mocha --recursive \"./test/**/*.js\" --reporter spec",
8
8
  "prepublish": "npm run man || true",
9
9
  "man": "cd man && marked-man --version `node -e \"console.log('v'+require('../package.json').version)\"` --manual 'Mailauth Help' --section 1 man.md > mailauth.1",
10
- "build-dist": "npm run man && npm run licenses && pkg --compress Brotli package.json",
10
+ "build-source": "rm -rf node_modules package-lock.json && npm install && npm run man && npm run licenses && rm -rf node_modules package-lock.json && npm install --production && rm -rf package-lock.json",
11
+ "build-dist": "npx pkg --compress Brotli package.json && rm -rf package-lock.json && npm install",
11
12
  "licenses": "license-report --only=prod --output=table --config license-report-config.json > licenses.txt"
12
13
  },
13
14
  "repository": {
@@ -31,28 +32,28 @@
31
32
  },
32
33
  "homepage": "https://github.com/postalsys/mailauth",
33
34
  "devDependencies": {
34
- "chai": "4.3.4",
35
- "eslint": "8.7.0",
35
+ "chai": "4.3.6",
36
+ "eslint": "8.17.0",
36
37
  "eslint-config-nodemailer": "1.2.0",
37
- "eslint-config-prettier": "8.3.0",
38
+ "eslint-config-prettier": "8.5.0",
38
39
  "js-yaml": "4.1.0",
39
- "license-report": "4.5.0",
40
+ "license-report": "5.0.2",
40
41
  "marked": "0.7.0",
41
42
  "marked-man": "0.7.0",
42
43
  "mbox-reader": "1.1.5",
43
- "mocha": "9.1.4",
44
- "pkg": "5.5.2"
44
+ "mocha": "10.0.0",
45
+ "pkg": "5.7.0"
45
46
  },
46
47
  "dependencies": {
47
48
  "@fidm/x509": "1.2.1",
48
49
  "ipaddr.js": "2.0.1",
49
- "joi": "17.5.0",
50
- "libmime": "5.0.0",
51
- "node-forge": "1.2.1",
52
- "nodemailer": "6.7.2",
50
+ "joi": "17.6.0",
51
+ "libmime": "5.1.0",
52
+ "node-forge": "1.3.1",
53
+ "nodemailer": "6.7.5",
53
54
  "psl": "1.8.0",
54
55
  "punycode": "2.1.1",
55
- "yargs": "17.3.1"
56
+ "yargs": "17.5.1"
56
57
  },
57
58
  "engines": {
58
59
  "node": ">=14.0.0"
@@ -64,17 +65,16 @@
64
65
  "man/mailauth.1"
65
66
  ],
66
67
  "pkg": {
67
- "scripts": [
68
- "workers/**/*.js"
69
- ],
70
68
  "assets": [
71
69
  "man/**/*",
72
70
  "licenses.txt",
73
71
  "LICENSE.txt"
74
72
  ],
75
- "_targets": [
76
- "node16-macos-x64"
73
+ "targets": [
74
+ "node16-linux-x64",
75
+ "node16-macos-x64",
76
+ "node16-win-x64"
77
77
  ],
78
- "outputPath": "dist"
78
+ "outputPath": "ee-dist"
79
79
  }
80
80
  }