Change endpoint from persons to people

This commit is contained in:
xfarrow
2025-03-23 21:00:08 +01:00
parent 4ae263662c
commit d005193f63
7158 changed files with 700476 additions and 735 deletions

View File

@ -0,0 +1,251 @@
'use strict';
// FIXME:
// replace this Transform mess with a method that pipes input argument to output argument
const MessageParser = require('./message-parser');
const RelaxedBody = require('./relaxed-body');
const sign = require('./sign');
const PassThrough = require('stream').PassThrough;
const fs = require('fs');
const path = require('path');
const crypto = require('crypto');
const DKIM_ALGO = 'sha256';
const MAX_MESSAGE_SIZE = 128 * 1024; // buffer messages larger than this to disk
/*
// Usage:
let dkim = new DKIM({
domainName: 'example.com',
keySelector: 'key-selector',
privateKey,
cacheDir: '/tmp'
});
dkim.sign(input).pipe(process.stdout);
// Where inputStream is a rfc822 message (either a stream, string or Buffer)
// and outputStream is a DKIM signed rfc822 message
*/
class DKIMSigner {
constructor(options, keys, input, output) {
this.options = options || {};
this.keys = keys;
this.cacheTreshold = Number(this.options.cacheTreshold) || MAX_MESSAGE_SIZE;
this.hashAlgo = this.options.hashAlgo || DKIM_ALGO;
this.cacheDir = this.options.cacheDir || false;
this.chunks = [];
this.chunklen = 0;
this.readPos = 0;
this.cachePath = this.cacheDir ? path.join(this.cacheDir, 'message.' + Date.now() + '-' + crypto.randomBytes(14).toString('hex')) : false;
this.cache = false;
this.headers = false;
this.bodyHash = false;
this.parser = false;
this.relaxedBody = false;
this.input = input;
this.output = output;
this.output.usingCache = false;
this.hasErrored = false;
this.input.on('error', err => {
this.hasErrored = true;
this.cleanup();
output.emit('error', err);
});
}
cleanup() {
if (!this.cache || !this.cachePath) {
return;
}
fs.unlink(this.cachePath, () => false);
}
createReadCache() {
// pipe remainings to cache file
this.cache = fs.createReadStream(this.cachePath);
this.cache.once('error', err => {
this.cleanup();
this.output.emit('error', err);
});
this.cache.once('close', () => {
this.cleanup();
});
this.cache.pipe(this.output);
}
sendNextChunk() {
if (this.hasErrored) {
return;
}
if (this.readPos >= this.chunks.length) {
if (!this.cache) {
return this.output.end();
}
return this.createReadCache();
}
let chunk = this.chunks[this.readPos++];
if (this.output.write(chunk) === false) {
return this.output.once('drain', () => {
this.sendNextChunk();
});
}
setImmediate(() => this.sendNextChunk());
}
sendSignedOutput() {
let keyPos = 0;
let signNextKey = () => {
if (keyPos >= this.keys.length) {
this.output.write(this.parser.rawHeaders);
return setImmediate(() => this.sendNextChunk());
}
let key = this.keys[keyPos++];
let dkimField = sign(this.headers, this.hashAlgo, this.bodyHash, {
domainName: key.domainName,
keySelector: key.keySelector,
privateKey: key.privateKey,
headerFieldNames: this.options.headerFieldNames,
skipFields: this.options.skipFields
});
if (dkimField) {
this.output.write(Buffer.from(dkimField + '\r\n'));
}
return setImmediate(signNextKey);
};
if (this.bodyHash && this.headers) {
return signNextKey();
}
this.output.write(this.parser.rawHeaders);
this.sendNextChunk();
}
createWriteCache() {
this.output.usingCache = true;
// pipe remainings to cache file
this.cache = fs.createWriteStream(this.cachePath);
this.cache.once('error', err => {
this.cleanup();
// drain input
this.relaxedBody.unpipe(this.cache);
this.relaxedBody.on('readable', () => {
while (this.relaxedBody.read() !== null) {
// do nothing
}
});
this.hasErrored = true;
// emit error
this.output.emit('error', err);
});
this.cache.once('close', () => {
this.sendSignedOutput();
});
this.relaxedBody.removeAllListeners('readable');
this.relaxedBody.pipe(this.cache);
}
signStream() {
this.parser = new MessageParser();
this.relaxedBody = new RelaxedBody({
hashAlgo: this.hashAlgo
});
this.parser.on('headers', value => {
this.headers = value;
});
this.relaxedBody.on('hash', value => {
this.bodyHash = value;
});
this.relaxedBody.on('readable', () => {
let chunk;
if (this.cache) {
return;
}
while ((chunk = this.relaxedBody.read()) !== null) {
this.chunks.push(chunk);
this.chunklen += chunk.length;
if (this.chunklen >= this.cacheTreshold && this.cachePath) {
return this.createWriteCache();
}
}
});
this.relaxedBody.on('end', () => {
if (this.cache) {
return;
}
this.sendSignedOutput();
});
this.parser.pipe(this.relaxedBody);
setImmediate(() => this.input.pipe(this.parser));
}
}
class DKIM {
constructor(options) {
this.options = options || {};
this.keys = [].concat(
this.options.keys || {
domainName: options.domainName,
keySelector: options.keySelector,
privateKey: options.privateKey
}
);
}
sign(input, extraOptions) {
let output = new PassThrough();
let inputStream = input;
let writeValue = false;
if (Buffer.isBuffer(input)) {
writeValue = input;
inputStream = new PassThrough();
} else if (typeof input === 'string') {
writeValue = Buffer.from(input);
inputStream = new PassThrough();
}
let options = this.options;
if (extraOptions && Object.keys(extraOptions).length) {
options = {};
Object.keys(this.options || {}).forEach(key => {
options[key] = this.options[key];
});
Object.keys(extraOptions || {}).forEach(key => {
if (!(key in options)) {
options[key] = extraOptions[key];
}
});
}
let signer = new DKIMSigner(options, this.keys, inputStream, output);
setImmediate(() => {
signer.signStream();
if (writeValue) {
setImmediate(() => {
inputStream.end(writeValue);
});
}
});
return output;
}
}
module.exports = DKIM;

View File

@ -0,0 +1,155 @@
'use strict';
const Transform = require('stream').Transform;
/**
* MessageParser instance is a transform stream that separates message headers
* from the rest of the body. Headers are emitted with the 'headers' event. Message
* body is passed on as the resulting stream.
*/
class MessageParser extends Transform {
constructor(options) {
super(options);
this.lastBytes = Buffer.alloc(4);
this.headersParsed = false;
this.headerBytes = 0;
this.headerChunks = [];
this.rawHeaders = false;
this.bodySize = 0;
}
/**
* Keeps count of the last 4 bytes in order to detect line breaks on chunk boundaries
*
* @param {Buffer} data Next data chunk from the stream
*/
updateLastBytes(data) {
let lblen = this.lastBytes.length;
let nblen = Math.min(data.length, lblen);
// shift existing bytes
for (let i = 0, len = lblen - nblen; i < len; i++) {
this.lastBytes[i] = this.lastBytes[i + nblen];
}
// add new bytes
for (let i = 1; i <= nblen; i++) {
this.lastBytes[lblen - i] = data[data.length - i];
}
}
/**
* Finds and removes message headers from the remaining body. We want to keep
* headers separated until final delivery to be able to modify these
*
* @param {Buffer} data Next chunk of data
* @return {Boolean} Returns true if headers are already found or false otherwise
*/
checkHeaders(data) {
if (this.headersParsed) {
return true;
}
let lblen = this.lastBytes.length;
let headerPos = 0;
this.curLinePos = 0;
for (let i = 0, len = this.lastBytes.length + data.length; i < len; i++) {
let chr;
if (i < lblen) {
chr = this.lastBytes[i];
} else {
chr = data[i - lblen];
}
if (chr === 0x0a && i) {
let pr1 = i - 1 < lblen ? this.lastBytes[i - 1] : data[i - 1 - lblen];
let pr2 = i > 1 ? (i - 2 < lblen ? this.lastBytes[i - 2] : data[i - 2 - lblen]) : false;
if (pr1 === 0x0a) {
this.headersParsed = true;
headerPos = i - lblen + 1;
this.headerBytes += headerPos;
break;
} else if (pr1 === 0x0d && pr2 === 0x0a) {
this.headersParsed = true;
headerPos = i - lblen + 1;
this.headerBytes += headerPos;
break;
}
}
}
if (this.headersParsed) {
this.headerChunks.push(data.slice(0, headerPos));
this.rawHeaders = Buffer.concat(this.headerChunks, this.headerBytes);
this.headerChunks = null;
this.emit('headers', this.parseHeaders());
if (data.length - 1 > headerPos) {
let chunk = data.slice(headerPos);
this.bodySize += chunk.length;
// this would be the first chunk of data sent downstream
setImmediate(() => this.push(chunk));
}
return false;
} else {
this.headerBytes += data.length;
this.headerChunks.push(data);
}
// store last 4 bytes to catch header break
this.updateLastBytes(data);
return false;
}
_transform(chunk, encoding, callback) {
if (!chunk || !chunk.length) {
return callback();
}
if (typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding);
}
let headersFound;
try {
headersFound = this.checkHeaders(chunk);
} catch (E) {
return callback(E);
}
if (headersFound) {
this.bodySize += chunk.length;
this.push(chunk);
}
setImmediate(callback);
}
_flush(callback) {
if (this.headerChunks) {
let chunk = Buffer.concat(this.headerChunks, this.headerBytes);
this.bodySize += chunk.length;
this.push(chunk);
this.headerChunks = null;
}
callback();
}
parseHeaders() {
let lines = (this.rawHeaders || '').toString().split(/\r?\n/);
for (let i = lines.length - 1; i > 0; i--) {
if (/^\s/.test(lines[i])) {
lines[i - 1] += '\n' + lines[i];
lines.splice(i, 1);
}
}
return lines
.filter(line => line.trim())
.map(line => ({
key: line.substr(0, line.indexOf(':')).trim().toLowerCase(),
line
}));
}
}
module.exports = MessageParser;

View File

@ -0,0 +1,154 @@
'use strict';
// streams through a message body and calculates relaxed body hash
const Transform = require('stream').Transform;
const crypto = require('crypto');
class RelaxedBody extends Transform {
constructor(options) {
super();
options = options || {};
this.chunkBuffer = [];
this.chunkBufferLen = 0;
this.bodyHash = crypto.createHash(options.hashAlgo || 'sha1');
this.remainder = '';
this.byteLength = 0;
this.debug = options.debug;
this._debugBody = options.debug ? [] : false;
}
updateHash(chunk) {
let bodyStr;
// find next remainder
let nextRemainder = '';
// This crux finds and removes the spaces from the last line and the newline characters after the last non-empty line
// If we get another chunk that does not match this description then we can restore the previously processed data
let state = 'file';
for (let i = chunk.length - 1; i >= 0; i--) {
let c = chunk[i];
if (state === 'file' && (c === 0x0a || c === 0x0d)) {
// do nothing, found \n or \r at the end of chunk, stil end of file
} else if (state === 'file' && (c === 0x09 || c === 0x20)) {
// switch to line ending mode, this is the last non-empty line
state = 'line';
} else if (state === 'line' && (c === 0x09 || c === 0x20)) {
// do nothing, found ' ' or \t at the end of line, keep processing the last non-empty line
} else if (state === 'file' || state === 'line') {
// non line/file ending character found, switch to body mode
state = 'body';
if (i === chunk.length - 1) {
// final char is not part of line end or file end, so do nothing
break;
}
}
if (i === 0) {
// reached to the beginning of the chunk, check if it is still about the ending
// and if the remainder also matches
if (
(state === 'file' && (!this.remainder || /[\r\n]$/.test(this.remainder))) ||
(state === 'line' && (!this.remainder || /[ \t]$/.test(this.remainder)))
) {
// keep everything
this.remainder += chunk.toString('binary');
return;
} else if (state === 'line' || state === 'file') {
// process existing remainder as normal line but store the current chunk
nextRemainder = chunk.toString('binary');
chunk = false;
break;
}
}
if (state !== 'body') {
continue;
}
// reached first non ending byte
nextRemainder = chunk.slice(i + 1).toString('binary');
chunk = chunk.slice(0, i + 1);
break;
}
let needsFixing = !!this.remainder;
if (chunk && !needsFixing) {
// check if we even need to change anything
for (let i = 0, len = chunk.length; i < len; i++) {
if (i && chunk[i] === 0x0a && chunk[i - 1] !== 0x0d) {
// missing \r before \n
needsFixing = true;
break;
} else if (i && chunk[i] === 0x0d && chunk[i - 1] === 0x20) {
// trailing WSP found
needsFixing = true;
break;
} else if (i && chunk[i] === 0x20 && chunk[i - 1] === 0x20) {
// multiple spaces found, needs to be replaced with just one
needsFixing = true;
break;
} else if (chunk[i] === 0x09) {
// TAB found, needs to be replaced with a space
needsFixing = true;
break;
}
}
}
if (needsFixing) {
bodyStr = this.remainder + (chunk ? chunk.toString('binary') : '');
this.remainder = nextRemainder;
bodyStr = bodyStr
.replace(/\r?\n/g, '\n') // use js line endings
.replace(/[ \t]*$/gm, '') // remove line endings, rtrim
.replace(/[ \t]+/gm, ' ') // single spaces
.replace(/\n/g, '\r\n'); // restore rfc822 line endings
chunk = Buffer.from(bodyStr, 'binary');
} else if (nextRemainder) {
this.remainder = nextRemainder;
}
if (this.debug) {
this._debugBody.push(chunk);
}
this.bodyHash.update(chunk);
}
_transform(chunk, encoding, callback) {
if (!chunk || !chunk.length) {
return callback();
}
if (typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding);
}
this.updateHash(chunk);
this.byteLength += chunk.length;
this.push(chunk);
callback();
}
_flush(callback) {
// generate final hash and emit it
if (/[\r\n]$/.test(this.remainder) && this.byteLength > 2) {
// add terminating line end
this.bodyHash.update(Buffer.from('\r\n'));
}
if (!this.byteLength) {
// emit empty line buffer to keep the stream flowing
this.push(Buffer.from('\r\n'));
// this.bodyHash.update(Buffer.from('\r\n'));
}
this.emit('hash', this.bodyHash.digest('base64'), this.debug ? Buffer.concat(this._debugBody) : false);
callback();
}
}
module.exports = RelaxedBody;

View File

@ -0,0 +1,117 @@
'use strict';
const punycode = require('../punycode');
const mimeFuncs = require('../mime-funcs');
const crypto = require('crypto');
/**
* Returns DKIM signature header line
*
* @param {Object} headers Parsed headers object from MessageParser
* @param {String} bodyHash Base64 encoded hash of the message
* @param {Object} options DKIM options
* @param {String} options.domainName Domain name to be signed for
* @param {String} options.keySelector DKIM key selector to use
* @param {String} options.privateKey DKIM private key to use
* @return {String} Complete header line
*/
module.exports = (headers, hashAlgo, bodyHash, options) => {
options = options || {};
// all listed fields from RFC4871 #5.5
let defaultFieldNames =
'From:Sender:Reply-To:Subject:Date:Message-ID:To:' +
'Cc:MIME-Version:Content-Type:Content-Transfer-Encoding:Content-ID:' +
'Content-Description:Resent-Date:Resent-From:Resent-Sender:' +
'Resent-To:Resent-Cc:Resent-Message-ID:In-Reply-To:References:' +
'List-Id:List-Help:List-Unsubscribe:List-Subscribe:List-Post:' +
'List-Owner:List-Archive';
let fieldNames = options.headerFieldNames || defaultFieldNames;
let canonicalizedHeaderData = relaxedHeaders(headers, fieldNames, options.skipFields);
let dkimHeader = generateDKIMHeader(options.domainName, options.keySelector, canonicalizedHeaderData.fieldNames, hashAlgo, bodyHash);
let signer, signature;
canonicalizedHeaderData.headers += 'dkim-signature:' + relaxedHeaderLine(dkimHeader);
signer = crypto.createSign(('rsa-' + hashAlgo).toUpperCase());
signer.update(canonicalizedHeaderData.headers);
try {
signature = signer.sign(options.privateKey, 'base64');
} catch (E) {
return false;
}
return dkimHeader + signature.replace(/(^.{73}|.{75}(?!\r?\n|\r))/g, '$&\r\n ').trim();
};
module.exports.relaxedHeaders = relaxedHeaders;
function generateDKIMHeader(domainName, keySelector, fieldNames, hashAlgo, bodyHash) {
let dkim = [
'v=1',
'a=rsa-' + hashAlgo,
'c=relaxed/relaxed',
'd=' + punycode.toASCII(domainName),
'q=dns/txt',
's=' + keySelector,
'bh=' + bodyHash,
'h=' + fieldNames
].join('; ');
return mimeFuncs.foldLines('DKIM-Signature: ' + dkim, 76) + ';\r\n b=';
}
function relaxedHeaders(headers, fieldNames, skipFields) {
let includedFields = new Set();
let skip = new Set();
let headerFields = new Map();
(skipFields || '')
.toLowerCase()
.split(':')
.forEach(field => {
skip.add(field.trim());
});
(fieldNames || '')
.toLowerCase()
.split(':')
.filter(field => !skip.has(field.trim()))
.forEach(field => {
includedFields.add(field.trim());
});
for (let i = headers.length - 1; i >= 0; i--) {
let line = headers[i];
// only include the first value from bottom to top
if (includedFields.has(line.key) && !headerFields.has(line.key)) {
headerFields.set(line.key, relaxedHeaderLine(line.line));
}
}
let headersList = [];
let fields = [];
includedFields.forEach(field => {
if (headerFields.has(field)) {
fields.push(field);
headersList.push(field + ':' + headerFields.get(field));
}
});
return {
headers: headersList.join('\r\n') + '\r\n',
fieldNames: fields.join(':')
};
}
function relaxedHeaderLine(line) {
return line
.substr(line.indexOf(':') + 1)
.replace(/\r?\n/g, '')
.replace(/\s+/g, ' ')
.trim();
}