chore: remove old dkim files

This commit is contained in:
Saleel
2024-01-17 00:14:17 +05:30
parent ccdb8a2ea4
commit c708a2d8df
11 changed files with 0 additions and 2049 deletions

View File

@@ -1,14 +0,0 @@
import { SimpleHash } from './simple';
import { RelaxedHash } from './relaxed';
export const dkimBody = (canonicalization: any, ...options: [string, number]) => {
canonicalization = (canonicalization ?? 'simple/simple').toString().split('/').pop()?.toLowerCase().trim();
switch (canonicalization) {
case 'simple':
return new SimpleHash(...options);
case 'relaxed':
return new RelaxedHash(...options);
default:
throw new Error('Unknown body canonicalization');
}
};

View File

@@ -1,298 +0,0 @@
import * as crypto from 'crypto';
const CHAR_CR = 0x0d;
const CHAR_LF = 0x0a;
const CHAR_SPACE = 0x20;
const CHAR_TAB = 0x09;
/**
* Class for calculating body hash of an email message body stream
* using the "relaxed" canonicalization
*
* @class
*/
export class RelaxedHash {
byteLength: number;
bodyHashedBytes: number;
private remainder: Buffer | boolean;
private bodyHash: crypto.Hash;
private maxBodyLength: number;
private maxSizeReached: boolean;
private emptyLinesQueue: Array<Buffer>;
private fullBody: Buffer;
/**
* @param {String} [algorithm] Hashing algo, either "sha1" or "sha256"
* @param {Number} [maxBodyLength] Allowed body length count, the value from the l= parameter
*/
constructor(algorithm: string, maxBodyLength: number) {
algorithm = algorithm?.split('-')?.pop()?.toLowerCase() || 'sha256';
this.bodyHash = crypto.createHash(algorithm);
this.remainder = false;
this.byteLength = 0;
this.bodyHashedBytes = 0;
this.maxBodyLength = maxBodyLength;
this.maxSizeReached = false;
this.emptyLinesQueue = [];
this.fullBody = Buffer.alloc(0);
}
private updateBodyHash(chunk: Buffer) {
if (this.maxSizeReached) {
return;
}
// the following is needed for the l= option
if (
typeof this.maxBodyLength === 'number' &&
!isNaN(this.maxBodyLength) &&
this.maxBodyLength >= 0 &&
this.bodyHashedBytes + chunk.length > this.maxBodyLength
) {
this.maxSizeReached = true;
if (this.bodyHashedBytes >= this.maxBodyLength) {
// nothing to do here, skip entire chunk
return;
}
// only use allowed size of bytes
chunk = chunk.subarray(0, this.maxBodyLength - this.bodyHashedBytes);
}
this.bodyHashedBytes += chunk.length;
this.bodyHash.update(chunk);
this.fullBody = Buffer.concat([this.fullBody, Buffer.from(chunk)]);
//process.stdout.write(chunk);
}
private drainPendingEmptyLines() {
if (this.emptyLinesQueue.length) {
for (let emptyLine of this.emptyLinesQueue) {
this.updateBodyHash(emptyLine);
}
this.emptyLinesQueue = [];
}
}
private pushBodyHash(chunk: Buffer) {
if (!chunk || !chunk.length) {
return;
}
// remove line endings
let foundNonLn = false;
// buffer line endings and empty lines
for (let i = chunk.length - 1; i >= 0; i--) {
if (chunk[i] !== CHAR_LF && chunk[i] !== CHAR_CR) {
this.drainPendingEmptyLines();
if (i < chunk.length - 1) {
this.emptyLinesQueue.push(chunk.subarray(i + 1));
chunk = chunk.subarray(0, i + 1);
}
foundNonLn = true;
break;
}
}
if (!foundNonLn) {
this.emptyLinesQueue.push(chunk);
return;
}
this.updateBodyHash(chunk);
}
fixLineBuffer(line: Buffer) {
let resultLine = [];
let nonWspFound = false;
let prevWsp = false;
for (let i = line.length - 1; i >= 0; i--) {
if (line[i] === CHAR_LF) {
resultLine.unshift(line[i]);
if (i === 0 || line[i - 1] !== CHAR_CR) {
// add missing carriage return
resultLine.unshift(CHAR_CR);
}
continue;
}
if (line[i] === CHAR_CR) {
resultLine.unshift(line[i]);
continue;
}
if (line[i] === CHAR_SPACE || line[i] === CHAR_TAB) {
if (nonWspFound) {
prevWsp = true;
}
continue;
}
if (prevWsp) {
resultLine.unshift(CHAR_SPACE);
prevWsp = false;
}
nonWspFound = true;
resultLine.unshift(line[i]);
}
if (prevWsp && nonWspFound) {
resultLine.unshift(CHAR_SPACE);
}
return Buffer.from(resultLine);
}
update(chunk: Buffer | null, final: boolean) {
this.byteLength += (chunk && chunk.length) || 0;
if (this.maxSizeReached) {
return;
}
// Canonicalize content by applying a and b in order:
// a.1. Ignore all whitespace at the end of lines.
// a.2. Reduce all sequences of WSP within a line to a single SP character.
// b.1. Ignore all empty lines at the end of the message body.
// b.2. If the body is non-empty but does not end with a CRLF, a CRLF is added.
let lineEndPos = -1;
let lineNeedsFixing = false;
let cursorPos = 0;
if (this.remainder && this.remainder instanceof Buffer && this.remainder.length) {
if (chunk) {
// concatting chunks might be bad for performance :S
chunk = Buffer.concat([this.remainder, chunk]);
} else {
chunk = this.remainder;
}
this.remainder = false;
}
if (chunk && chunk.length) {
for (let pos = 0; pos < chunk.length; pos++) {
switch (chunk[pos]) {
case CHAR_LF:
if (
!lineNeedsFixing &&
// previous character is not <CR>
((pos >= 1 && chunk[pos - 1] !== CHAR_CR) ||
// LF is the first byte on the line
pos === 0 ||
// there's a space before line break
(pos >= 2 && chunk[pos - 1] === CHAR_CR && chunk[pos - 2] === CHAR_SPACE))
) {
lineNeedsFixing = true;
}
// line break
if (lineNeedsFixing) {
// emit pending bytes up to the last line break before current line
if (lineEndPos >= 0 && lineEndPos >= cursorPos) {
let chunkPart = chunk.subarray(cursorPos, lineEndPos + 1);
this.pushBodyHash(chunkPart);
}
let line = chunk.subarray(lineEndPos + 1, pos + 1);
this.pushBodyHash(this.fixLineBuffer(line));
lineNeedsFixing = false;
// move cursor to the start of next line
cursorPos = pos + 1;
}
lineEndPos = pos;
break;
case CHAR_SPACE:
if (!lineNeedsFixing && pos && chunk[pos - 1] === CHAR_SPACE) {
lineNeedsFixing = true;
}
break;
case CHAR_TAB:
// non-space WSP always needs replacing
lineNeedsFixing = true;
break;
default:
}
}
}
if (chunk && cursorPos < chunk.length && cursorPos !== lineEndPos) {
// emit data from chunk
let chunkPart = chunk.subarray(cursorPos, lineEndPos + 1);
if (chunkPart.length) {
this.pushBodyHash(lineNeedsFixing ? this.fixLineBuffer(chunkPart) : chunkPart);
lineNeedsFixing = false;
}
cursorPos = lineEndPos + 1;
}
if (chunk && !final && cursorPos < chunk.length) {
this.remainder = chunk.subarray(cursorPos);
}
if (final) {
let chunkPart = (cursorPos && chunk && chunk.subarray(cursorPos)) || chunk;
if (chunkPart && chunkPart.length) {
this.pushBodyHash(lineNeedsFixing ? this.fixLineBuffer(chunkPart) : chunkPart);
lineNeedsFixing = false;
}
if (this.bodyHashedBytes) {
// terminating line break for non-empty messages
this.updateBodyHash(Buffer.from([CHAR_CR, CHAR_LF]));
}
}
}
digest(encoding: crypto.BinaryToTextEncoding) {
this.update(null, true);
// finalize
return this.bodyHash.digest(encoding);
}
}
/*
let fs = require('fs');
const getBody = message => {
message = message.toString('binary');
let match = message.match(/\r?\n\r?\n/);
if (match) {
message = message.substr(match.index + match[0].length);
}
return Buffer.from(message, 'binary');
};
let s = fs.readFileSync(process.argv[2]);
let k = new RelaxedHash('rsa-sha256', -1);
for (let byte of getBody(s)) {
k.update(Buffer.from([byte]));
}
console.error(k.digest('base64'));
console.error(k.byteLength, k.bodyHashedBytes);
*/

View File

@@ -1,107 +0,0 @@
import * as crypto from 'crypto';
/**
* Class for calculating body hash of an email message body stream
* using the "simple" canonicalization
*
* @class
*/
export class SimpleHash {
byteLength: number;
bodyHashedBytes: number;
private remainder: Buffer[];
private bodyHash: crypto.Hash;
private maxBodyLength: number;
private fullBody: Buffer;
private lastNewline: boolean;
/**
* @param {String} [algorithm] Hashing algo, either "sha1" or "sha256"
* @param {Number} [maxBodyLength] Allowed body length count, the value from the l= parameter
*/
constructor(algorithm: string, maxBodyLength: number) {
algorithm = algorithm?.split('-')?.pop() || 'sha256';
this.bodyHash = crypto.createHash(algorithm);
this.remainder = [];
this.byteLength = 0;
this.bodyHashedBytes = 0;
this.maxBodyLength = maxBodyLength;
this.lastNewline = false;
this.fullBody = Buffer.alloc(0);
}
private updateBodyHash(chunk: Buffer) {
// the following is needed for l= option
if (
typeof this.maxBodyLength === 'number' &&
!isNaN(this.maxBodyLength) &&
this.maxBodyLength >= 0 &&
this.bodyHashedBytes + chunk.length > this.maxBodyLength
) {
if (this.bodyHashedBytes >= this.maxBodyLength) {
// nothing to do here, skip entire chunk
return;
}
// only use allowed size of bytes
chunk = chunk.subarray(0, this.maxBodyLength - this.bodyHashedBytes);
}
this.bodyHashedBytes += chunk.length;
this.bodyHash.update(chunk);
this.fullBody = Buffer.concat([this.fullBody, chunk]);
//process.stdout.write(chunk);
}
update(chunk: Buffer) {
if (this.remainder.length) {
// see if we can release the last remainder
for (let i = 0; i < chunk.length; i++) {
let c = chunk[i];
if (c !== 0x0a && c !== 0x0d) {
// found non-line terminator byte, can release previous chunk
for (let remainderChunk of this.remainder) {
this.updateBodyHash(remainderChunk);
}
this.remainder = [];
}
}
}
// find line terminators from the end of chunk
let matchStart: boolean | number = false;
for (let i = chunk.length - 1; i >= 0; i--) {
let c = chunk[i];
if (c === 0x0a || c === 0x0d) {
// stop looking
matchStart = i;
} else {
break;
}
}
if (matchStart === 0) {
// nothing but newlines in this chunk
this.remainder.push(chunk);
return;
} else if (matchStart !== false) {
this.remainder.push(chunk.subarray(matchStart));
chunk = chunk.subarray(0, matchStart);
}
this.updateBodyHash(chunk);
this.lastNewline = chunk[chunk.length - 1] === 0x0a;
}
digest(encoding: crypto.BinaryToTextEncoding) {
if (!this.lastNewline || !this.bodyHashedBytes) {
// emit empty line buffer to keep the stream flowing
this.updateBodyHash(Buffer.from('\r\n'));
}
return this.bodyHash.digest(encoding);
}
}

View File

@@ -1,354 +0,0 @@
var isNode = false;
if (typeof process === 'object') {
if (typeof process.versions === 'object') {
if (typeof process.versions.node !== 'undefined') {
isNode = true;
}
}
}
const LOCAL = isNode;
// @ts-ignore
import addressparser from "addressparser";
import { getSigningHeaderLines, getPublicKey, parseDkimHeaders, formatAuthHeaderRow, getAlignment } from "./tools";
import { MessageParser } from "./message-parser";
import { dkimBody } from "./body";
import { generateCanonicalizedHeader } from "./header";
import * as crypto from "crypto";
import { ParseDkimHeaders, ParsedHeaders } from "./index";
export class DkimVerifier extends MessageParser {
envelopeFrom: string | boolean;
headerFrom: string[];
results: { [key: string]: any }[];
private options: Record<string, any>;
private resolver: (...args: [name: string, type: string]) => Promise<any>;
private minBitLength: number;
private signatureHeaders: ParseDkimHeaders[] & { [key: string]: any }[];
private bodyHashes: Map<string, any>;
private arc: { chain: false };
private seal: { bodyHash: string; };
private sealBodyHashKey: string = '';
constructor(options: Record<string, any>) {
super();
this.options = options || {};
this.resolver = this.options.resolver;
this.minBitLength = this.options.minBitLength;
this.results = [];
this.signatureHeaders = [] as any;
this.bodyHashes = new Map();
this.headerFrom = [];
this.envelopeFrom = false;
// ARC verification info
this.arc = { chain: false };
// should we also seal this message using ARC
this.seal = this.options.seal;
if (this.seal) {
// calculate body hash for the seal
let bodyCanon = "relaxed";
let hashAlgo = "sha256";
this.sealBodyHashKey = `${bodyCanon}:${hashAlgo}:`;
this.bodyHashes.set(this.sealBodyHashKey, dkimBody(bodyCanon, hashAlgo, 0));
}
}
async messageHeaders(headers: ParsedHeaders) {
this.headers = headers;
this.signatureHeaders = headers.parsed
.filter((h) => h.key === "dkim-signature")
.map((h) => {
const value: ParseDkimHeaders & { [key: string]: any } = parseDkimHeaders(h.line);
value.type = "DKIM";
return value;
});
let fromHeaders = headers?.parsed?.filter((h) => h.key === "from");
for (const fromHeader of fromHeaders) {
let fromHeaderString = fromHeader.line.toString();
let splitterPos = fromHeaderString.indexOf(":");
if (splitterPos >= 0) {
fromHeaderString = fromHeaderString.substr(splitterPos + 1);
}
let from = addressparser(fromHeaderString.trim());
for (let addr of from) {
if (addr && addr.address) {
this.headerFrom.push(addr.address);
}
}
}
if (this.options.sender) {
let returnPath = addressparser(this.options.sender);
this.envelopeFrom = returnPath.length && returnPath[0].address ? returnPath[0].address : false;
} else {
let returnPathHeader = headers.parsed.filter((h) => h.key === "return-path").pop();
if (returnPathHeader) {
let returnPathHeaderString = returnPathHeader.line.toString();
let splitterPos = returnPathHeaderString.indexOf(":");
if (splitterPos >= 0) {
returnPathHeaderString = returnPathHeaderString.substr(splitterPos + 1);
}
let returnPath = addressparser(returnPathHeaderString.trim());
this.envelopeFrom = returnPath.length && returnPath[0].address ? returnPath[0].address : false;
}
}
for (let signatureHeader of this.signatureHeaders) {
signatureHeader.algorithm = signatureHeader.parsed?.a?.value || "";
signatureHeader.signAlgo = signatureHeader.algorithm.split("-").shift().toLowerCase().trim();
signatureHeader.hashAlgo = signatureHeader.algorithm.split("-").pop().toLowerCase().trim();
signatureHeader.canonicalization = signatureHeader.parsed?.c?.value || "";
signatureHeader.headerCanon = signatureHeader.canonicalization.split("/").shift().toLowerCase().trim() || "simple";
// if body canonicalization is not set, then defaults to 'simple'
signatureHeader.bodyCanon = (signatureHeader.canonicalization.split("/")[1] || "simple").toLowerCase().trim();
signatureHeader.signingDomain = signatureHeader.parsed?.d?.value || "";
signatureHeader.selector = signatureHeader.parsed?.s?.value || "";
signatureHeader.maxBodyLength = signatureHeader.parsed?.l?.value && !isNaN(signatureHeader.parsed?.l?.value) ? signatureHeader.parsed?.l?.value : "";
const validSignAlgo = ["rsa", "ed25519"];
const validHeaderAlgo = signatureHeader.type === "DKIM" ? ["sha256", "sha1"] : ["sha256"];
const validHeaderCanon = signatureHeader.type !== "AS" ? ["relaxed", "simple"] : ["relaxed"];
const validBodyCanon = signatureHeader.type !== "AS" ? ["relaxed", "simple"] : ["relaxed"];
if (
!validSignAlgo.includes(signatureHeader.signAlgo) ||
!validHeaderAlgo.includes(signatureHeader.hashAlgo) ||
!validHeaderCanon.includes(signatureHeader.headerCanon) ||
!validBodyCanon.includes(signatureHeader.bodyCanon) ||
!signatureHeader.signingDomain ||
!signatureHeader.selector
) {
signatureHeader.skip = true;
continue;
}
signatureHeader.bodyHashKey = [signatureHeader.bodyCanon, signatureHeader.hashAlgo, signatureHeader.maxBodyLength].join(":");
if (!this.bodyHashes.has(signatureHeader.bodyHashKey)) {
this.bodyHashes.set(signatureHeader.bodyHashKey, dkimBody(signatureHeader.bodyCanon, signatureHeader.hashAlgo, signatureHeader.maxBodyLength));
}
}
}
async nextChunk(chunk: Buffer) {
for (let bodyHash of this.bodyHashes.values()) {
bodyHash.update(chunk);
}
}
async finalChunk() {
try {
if (!this.headers || !this.bodyHashes.size) {
return;
}
// convert bodyHashes from hash objects to base64 strings
for (let [key, bodyHash] of this.bodyHashes.entries()) {
this.bodyHashes.get(key).hash = bodyHash.digest("base64");
}
for (let signatureHeader of this.signatureHeaders) {
if (signatureHeader.skip) {
// TODO: add failing header line?
continue;
}
let signingHeaderLines = getSigningHeaderLines((this.headers as { parsed: { key: string | null; casedKey: string | undefined; line: Buffer; }[]; original: Buffer; }).parsed, signatureHeader.parsed?.h?.value, true);
let { canonicalizedHeader } = generateCanonicalizedHeader(signatureHeader.type, signingHeaderLines as any, {
signatureHeaderLine: signatureHeader.original as string,
canonicalization: signatureHeader.canonicalization,
instance: ["ARC", "AS"].includes(signatureHeader.type) ? signatureHeader.parsed?.i?.value : false,
});
let signingHeaders = {
keys: signingHeaderLines.keys,
headers: signingHeaderLines.headers.map((l) => l.line.toString()),
};
let publicKey, rr, modulusLength;
let status: { [key: string]: any } = {
result: "neutral",
comment: false,
// ptype properties
header: {
// signing domain
i: signatureHeader.signingDomain ? `@${signatureHeader.signingDomain}` : false,
// dkim selector
s: signatureHeader.selector,
// algo
a: signatureHeader.parsed?.a?.value,
// signature value
b: signatureHeader.parsed?.b?.value ? `${signatureHeader.parsed?.b?.value.substr(0, 8)}` : false,
},
};
if (signatureHeader.type === "DKIM" && this.headerFrom?.length) {
status.aligned = this.headerFrom?.length ? getAlignment(this.headerFrom[0] ?? ''.split("@")?.pop(), [signatureHeader.signingDomain]) : false;
}
let bodyHashObj = this.bodyHashes.get(signatureHeader.bodyHashKey);
let bodyHash = bodyHashObj?.hash;
if (signatureHeader.parsed?.bh?.value !== bodyHash) {
status.result = "neutral";
status.comment = `body hash did not verify`;
} else {
try {
let res = await getPublicKey(signatureHeader.type, `${signatureHeader.selector}._domainkey.${signatureHeader.signingDomain}`, this.minBitLength, this.resolver);
publicKey = res?.publicKey;
rr = res?.rr;
modulusLength = res?.modulusLength;
try {
let ver_result = false;
if (LOCAL) {
ver_result = crypto.verify(
signatureHeader.signAlgo === "rsa" ? signatureHeader.algorithm : null,
canonicalizedHeader,
publicKey,
Buffer.from(signatureHeader.parsed?.b?.value, "base64")
);
} else {
let ver = crypto.createVerify("RSA-SHA256");
ver.update(canonicalizedHeader);
ver_result = ver.verify({ key: publicKey.toString(), format: "pem" }, Buffer.from(signatureHeader.parsed?.b?.value, "base64"));
}
status.signature_header = canonicalizedHeader;
status.signature_value = signatureHeader.parsed?.b?.value;
status.result = ver_result ? "pass" : "fail";
if (status?.result === "fail") {
status.comment = "bad signature";
}
} catch (err: any) {
status.comment = err.message;
status.result = "neutral";
}
} catch (err: any) {
if (err.rr) {
rr = err.rr;
}
switch (err.code) {
case "ENOTFOUND":
case "ENODATA":
status.result = "neutral";
status.comment = `no key`;
break;
case "EINVALIDVER":
status.result = "neutral";
status.comment = `unknown key version`;
break;
case "EINVALIDTYPE":
status.result = "neutral";
status.comment = `unknown key type`;
break;
case "EINVALIDVAL":
status.result = "neutral";
status.comment = `invalid public key`;
break;
case "ESHORTKEY":
status.result = "policy";
if (!status.policy) {
status.policy = {};
}
status.policy["dkim-rules"] = `weak-key`;
break;
default:
status.result = "temperror";
status.comment = `DNS failure: ${err.code || err.message}`;
}
}
}
signatureHeader.bodyHashedBytes = this.bodyHashes.get(signatureHeader.bodyHashKey)?.bodyHashedBytes;
if (typeof signatureHeader.maxBodyLength === "number" && signatureHeader.maxBodyLength !== signatureHeader.bodyHashedBytes) {
status.result = "fail";
status.comment = `invalid body length ${signatureHeader.bodyHashedBytes}`;
}
let result: { [key: string]: any } = {
signingDomain: signatureHeader.signingDomain,
selector: signatureHeader.selector,
signature: signatureHeader.parsed?.b?.value,
algo: signatureHeader.parsed?.a?.value,
format: signatureHeader.parsed?.c?.value,
bodyHash,
bodyHashExpecting: signatureHeader.parsed?.bh?.value,
body: bodyHashObj?.fullBody,
signingHeaders,
status,
};
if (typeof signatureHeader.bodyHashedBytes === "number") {
result.canonBodyLength = signatureHeader.bodyHashedBytes;
}
if (typeof signatureHeader.maxBodyLength === "number") {
result.bodyLengthCount = signatureHeader.maxBodyLength;
}
if (publicKey) {
result.publicKey = publicKey.toString();
}
if (modulusLength) {
result.modulusLength = modulusLength;
}
if (rr) {
result.rr = rr;
}
if (typeof result.status.comment === "boolean") {
delete result.status.comment;
}
switch (signatureHeader.type) {
case "ARC":
throw Error("ARC not possible");
break;
case "DKIM":
default:
this.results.push(result);
break;
}
}
} finally {
if (!this.results.length) {
this.results.push({
status: {
result: "none",
comment: "message not signed",
},
});
}
this.results.forEach((result) => {
result.info = formatAuthHeaderRow("dkim", result.status);
});
}
if (this.seal && this.bodyHashes.has(this.sealBodyHashKey) && typeof this.bodyHashes.get(this.sealBodyHashKey)?.hash === "string") {
this.seal.bodyHash = this.bodyHashes.get(this.sealBodyHashKey).hash;
}
}
}

View File

@@ -1,16 +0,0 @@
import { Options, SignatureType, SigningHeaderLines } from '../index';
import { relaxedHeaders } from './relaxed';
import { simpleHeaders } from './simple';
export const generateCanonicalizedHeader = (type: SignatureType, signingHeaderLines: SigningHeaderLines, options: Options) => {
options = options || {};
let canonicalization = (options.canonicalization || 'simple/simple').toString()?.split('/')?.shift()?.toLowerCase().trim();
switch (canonicalization) {
case 'simple':
return simpleHeaders(type, signingHeaderLines, options);
case 'relaxed':
return relaxedHeaders(type, signingHeaderLines, options);
default:
throw new Error('Unknown header canonicalization');
}
};

View File

@@ -1,70 +0,0 @@
import { Options, SignatureType, SigningHeaderLines } from '../index';
import { formatSignatureHeaderLine, formatRelaxedLine } from '../tools';
// generate headers for signing
export const relaxedHeaders = (type: SignatureType, signingHeaderLines: SigningHeaderLines, options: Options) => {
let { signatureHeaderLine, signingDomain, selector, algorithm, canonicalization, bodyHash, signTime, signature, instance, bodyHashedBytes } = options || {};
let chunks = [];
for (let signedHeaderLine of signingHeaderLines.headers) {
chunks.push(formatRelaxedLine(signedHeaderLine.line, '\r\n'));
}
let opts: boolean | Record<string, unknown> = false;
if (!signatureHeaderLine) {
opts = {
a: algorithm,
c: canonicalization,
s: selector,
d: signingDomain,
h: signingHeaderLines.keys,
bh: bodyHash
};
if (typeof bodyHashedBytes === 'number') {
opts.l = bodyHashedBytes;
}
if (instance) {
// ARC only
opts.i = instance;
}
if (signTime) {
if (typeof signTime === 'string' || typeof signTime === 'number') {
signTime = new Date(signTime);
}
if (Object.prototype.toString.call(signTime) === '[object Date]' && signTime.toString() !== 'Invalid Date') {
// we need a unix timestamp value
signTime = Math.round(signTime.getTime() / 1000);
opts.t = signTime;
}
}
signatureHeaderLine = formatSignatureHeaderLine(
type,
Object.assign(
{
// make sure that b= always has a value, otherwise folding would be different
b: signature || 'a'.repeat(73)
},
opts
) as Record<string, string | boolean>,
true
);
}
chunks.push(
Buffer.from(
formatRelaxedLine(signatureHeaderLine)
.toString('binary')
// remove value from b= key
.replace(/([;:\s]+b=)[^;]+/, '$1'),
'binary'
)
);
return { canonicalizedHeader: Buffer.concat(chunks), signatureHeaderLine, dkimHeaderOpts: opts };
};

View File

@@ -1,72 +0,0 @@
import { Options, SignatureType, SigningHeaderLines } from '../index';
import { formatSignatureHeaderLine } from '../tools';
const formatSimpleLine = (line: Buffer | string, suffix?: string) => Buffer.from(line.toString('binary') + (suffix ? suffix : ''), 'binary');
// generate headers for signing
export const simpleHeaders = (type: SignatureType, signingHeaderLines: SigningHeaderLines, options: Options) => {
let { signatureHeaderLine, signingDomain, selector, algorithm, canonicalization, bodyHash, signTime, signature, instance, bodyHashedBytes } = options || {};
let chunks = [];
for (let signedHeaderLine of signingHeaderLines.headers) {
chunks.push(formatSimpleLine(signedHeaderLine.line, '\r\n'));
}
let opts: boolean | Record<string, any> = false;
if (!signatureHeaderLine) {
opts = {
a: algorithm,
c: canonicalization,
s: selector,
d: signingDomain,
h: signingHeaderLines.keys,
bh: bodyHash
};
if (typeof bodyHashedBytes === 'number') {
opts.l = bodyHashedBytes;
}
if (instance) {
// ARC only (should never happen thoug as simple algo is not allowed)
opts.i = instance;
}
if (signTime) {
if (typeof signTime === 'string' || typeof signTime === 'number') {
signTime = new Date(signTime);
}
if (Object.prototype.toString.call(signTime) === '[object Date]' && signTime.toString() !== 'Invalid Date') {
// we need a unix timestamp value
signTime = Math.round(signTime.getTime() / 1000);
opts.t = signTime;
}
}
signatureHeaderLine = formatSignatureHeaderLine(
type,
Object.assign(
{
// make sure that b= has a value, otherwise folding would be different
b: signature || 'a'.repeat(73)
},
opts
) as Record<string, string | boolean>,
true
);
}
chunks.push(
Buffer.from(
formatSimpleLine(signatureHeaderLine)
.toString('binary')
// remove value from b= key
.replace(/([;:\s]+b=)[^;]+/, '$1'),
'binary'
)
);
return { canonicalizedHeader: Buffer.concat(chunks), signatureHeaderLine, dkimHeaderOpts: opts };
};

View File

@@ -1,91 +0,0 @@
import { pki } from "node-forge";
import { DkimVerifier } from "./dkim-verifier";
import { getSigningHeaderLines, parseDkimHeaders, parseHeaders, writeToStream } from "./tools";
export const dkimVerify = async (input: Buffer, options: any = {}) => {
let dkimVerifier = new DkimVerifier(options);
await writeToStream(dkimVerifier, input as any);
const result = {
//headers: dkimVerifier.headers,
headerFrom: dkimVerifier.headerFrom,
envelopeFrom: dkimVerifier.envelopeFrom,
results: dkimVerifier.results,
};
if (dkimVerifier.headers) {
Object.defineProperty(result, "headers", {
enumerable: false,
configurable: false,
writable: false,
value: dkimVerifier.headers,
});
}
return result;
};
export type DKIMVerificationResult = {
signature: bigint;
message: Buffer;
body: Buffer;
bodyHash: string;
publicKey: bigint;
}
export async function verifyDKIMSignature(email: Buffer): Promise<DKIMVerificationResult> {
const result = await dkimVerify(email);
if (!result.results[0]) {
throw new Error(`No result found on dkim output ${result}`);
}
const { publicKey, signature, status, body, bodyHash } = result.results[0];
if (!publicKey) {
if (status.message) { // Has error
throw new Error(result.results[0].status.message);
}
throw new Error(`No public key found on DKIM verification result`, result.results[0]);
}
const signatureBigInt = BigInt("0x" + Buffer.from(signature, "base64").toString("hex"));
const pubKeyData = pki.publicKeyFromPem(publicKey.toString());
return {
signature: signatureBigInt,
message: status.signature_header,
body,
bodyHash,
publicKey: BigInt(pubKeyData.n.toString()),
}
}
export type SignatureType = 'DKIM' | 'ARC' | 'AS';
export type ParsedHeaders = ReturnType<typeof parseHeaders>;
export type Parsed = ParsedHeaders['parsed'][0];
export type ParseDkimHeaders = ReturnType<typeof parseDkimHeaders>
export type SigningHeaderLines = ReturnType<typeof getSigningHeaderLines>
export interface Options {
signatureHeaderLine: string;
signingDomain?: string;
selector?: string;
algorithm?: string;
canonicalization: string;
bodyHash?: string;
signTime?: string | number | Date;
signature?: string;
instance: string | boolean;
bodyHashedBytes?: string;
}
// export dkim functions
export * from "./dkim-verifier";
export * from "./message-parser";
export * from "./parse-dkim-headers";
export * from "./tools";

View File

@@ -1,152 +0,0 @@
// Calculates relaxed body hash for a message body stream
import { ParsedHeaders } from './index';
import { parseHeaders } from './tools';
import { Writable, WritableOptions } from 'stream';
/**
* Class for separating header from body
*
* @class
* @extends Writable
*/
export class MessageParser extends Writable {
byteLength: number;
headers: ParsedHeaders | boolean;
private state: string;
private stateBytes: unknown[];
private headerChunks: Buffer[];
private lastByte: number = 0;
constructor(options?: WritableOptions) {
super(options);
this.byteLength = 0;
this.state = 'header';
this.stateBytes = [];
this.headers = false;
this.headerChunks = [];
}
async nextChunk(...args: any) {
// Override in child class
}
async finalChunk(...args: any) {
// Override in child class
}
async messageHeaders(headers: ParsedHeaders) {
// Override in child class
}
async processChunk(chunk: Buffer) {
if (!chunk || !chunk.length) {
return;
}
if (this.state === 'header') {
// wait until we have found body part
for (let i = 0; i < chunk.length; i++) {
let c = chunk[i];
this.stateBytes.push(c);
if (this.stateBytes.length > 4) {
this.stateBytes = this.stateBytes.slice(-4);
}
let b0 = this.stateBytes[this.stateBytes.length - 1];
let b1 = this.stateBytes.length > 1 && this.stateBytes[this.stateBytes.length - 2];
let b2 = this.stateBytes.length > 2 && this.stateBytes[this.stateBytes.length - 3];
if (b0 === 0x0a && (b1 === 0x0a || (b1 === 0x0d && b2 === 0x0a))) {
// found header ending
this.state = 'body';
if (i === chunk.length - 1) {
//end of chunk
this.headerChunks.push(chunk);
this.headers = parseHeaders(Buffer.concat(this.headerChunks));
await this.messageHeaders(this.headers);
return;
}
this.headerChunks.push(chunk.subarray(0, i + 1));
this.headers = parseHeaders(Buffer.concat(this.headerChunks));
await this.messageHeaders(this.headers);
chunk = chunk.subarray(i + 1);
break;
}
}
}
if (this.state !== 'body') {
this.headerChunks.push(chunk);
return;
}
await this.nextChunk(chunk);
}
*ensureLinebreaks(input: Buffer) {
let pos = 0;
for (let i = 0; i < input.length; i++) {
let c = input[i];
if (c !== 0x0a) {
this.lastByte = c;
} else if (this.lastByte !== 0x0d) {
// emit line break
let buf;
if (i === 0 || pos === i) {
buf = Buffer.from('\r\n');
} else {
buf = Buffer.concat([input.subarray(pos, i), Buffer.from('\r\n')]);
}
yield buf;
pos = i + 1;
}
}
if (pos === 0) {
yield input;
} else if (pos < input.length) {
let buf = input.subarray(pos);
yield buf;
}
}
async writeAsync(chunk: any, encoding: BufferEncoding) {
if (!chunk || !chunk.length) {
return;
}
if (typeof chunk === 'string') {
chunk = Buffer.from(chunk, encoding);
}
for (let partialChunk of this.ensureLinebreaks(chunk)) {
// separate chunk is emitted for every line that uses \n instead of \r\n
await this.processChunk(partialChunk);
this.byteLength += partialChunk.length;
}
}
_write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void) {
this.writeAsync(chunk, encoding)
.then(() => callback())
.catch(err => callback(err));
}
async finish() {
// generate final hash and emit it
await this.finalChunk();
if (!this.headers && this.headerChunks.length) {
this.headers = parseHeaders(Buffer.concat(this.headerChunks));
await this.messageHeaders(this.headers);
}
}
_final(callback: (error?: Error | null) => void) {
this.finish()
.then(() => callback())
.catch(err => callback(err));
}
}

View File

@@ -1,308 +0,0 @@
// NB! fails to properly parse nested comments (should be rare enough though)
interface Part {
[key: string]: string;
}
const valueParser = (str: string) => {
let line = str.replace(/\s+/g, ' ').trim();
let parts: Part[] = [];
let lastState: string | boolean = false;
const createPart = () => {
let part: Part = {
key: '',
value: ''
};
parts.push(part);
return part;
};
const parse = () => {
let state = 'key';
let escaped;
let quote;
let curPart = createPart();
for (let i = 0; i < line.length; i++) {
let c = line.charAt(i);
switch (state) {
// @ts-ignore
case 'key':
if (c === '=') {
state = 'value';
break;
}
// falls through
case 'value': {
if (escaped === true) {
curPart[state] += c;
break;
}
switch (c) {
case ' ':
// start new part
curPart = createPart();
state = 'key';
break;
case '\\':
escaped = true;
break;
case '"':
case "'":
lastState = state;
state = 'quoted';
quote = c;
break;
default:
curPart[state] += c;
break;
}
break;
}
case 'quoted':
if (escaped === true && typeof lastState === 'string') {
curPart[lastState] += c;
break;
}
switch (c) {
case '\\':
escaped = true;
break;
case quote:
state = lastState as string;
break;
default:
if (typeof lastState === 'string') {
curPart[lastState] += c;
}
break;
}
break;
}
}
let result: { [key: string]: any } = {
value: parts[0].key
};
parts.slice(1).forEach(part => {
if (part.key || part.value) {
let path = part.key.split('.');
let curRes = result;
let final = path.pop();
for (let p of path) {
if (typeof curRes[p] !== 'object' || !curRes[p]) {
curRes[p] = {};
}
curRes = curRes[p];
}
curRes[final ?? ''] = part.value;
}
});
return result;
};
return parse();
};
const headerParser = (buf: Buffer | string) => {
let line = (buf || '').toString().trim();
let splitterPos = line.indexOf(':');
let headerKey: string;
if (splitterPos >= 0) {
headerKey = line.substr(0, splitterPos).trim().toLowerCase();
line = line.substr(splitterPos + 1).trim();
}
let parts: { [key: string]: any }[] = [];
let lastState: string | boolean = false;
const createPart = (): { [key: string]: string | boolean } => {
let part = {
key: '',
value: '',
comment: '',
hasValue: false
};
parts.push(part);
return part;
};
const parse = () => {
let state = 'key';
let escaped;
let quote;
let curPart = createPart();
for (let i = 0; i < line.length; i++) {
let c = line.charAt(i);
switch (state) {
// @ts-ignore
case 'key':
if (c === '=') {
state = 'value';
curPart.hasValue = true;
break;
}
// falls through
case 'value': {
if (escaped === true) {
curPart[state] += c;
}
switch (c) {
case ';':
// start new part
curPart = createPart();
state = 'key';
break;
case '\\':
escaped = true;
break;
case '(':
lastState = state;
state = 'comment';
break;
case '"':
case "'":
lastState = state;
curPart[state] += c;
state = 'quoted';
quote = c;
break;
default:
curPart[state] += c;
break;
}
break;
}
case 'comment':
switch (c) {
case '\\':
escaped = true;
break;
case ')':
state = lastState as string;
break;
default:
curPart[state] += c;
break;
}
break;
case 'quoted':
switch (c) {
case '\\':
escaped = true;
break;
// @ts-ignore
case quote:
state = lastState as string;
// falls through
default:
if (typeof lastState === 'string') {
curPart[lastState] += c;
}
break;
}
break;
}
}
for (let i = parts.length - 1; i >= 0; i--) {
for (let key of Object.keys(parts[i])) {
if (typeof parts[i][key] === 'string') {
parts[i][key] = parts[i][key].replace(/\s+/g, ' ').trim();
}
}
parts[i].key = (parts[i].key).toLowerCase();
if (!parts[i].key) {
// remove empty value
parts.splice(i, 1);
} else if (['bh', 'b', 'p', 'h'].includes(parts[i].key)) {
// remove unneeded whitespace
parts[i].value = parts[i].value?.replace(/\s+/g, '');
} else if (['l', 'v', 't'].includes(parts[i].key) && !isNaN(parts[i].value)) {
parts[i].value = Number(parts[i].value);
} else if (parts[i].key === 'i' && /^arc-/i.test(headerKey)) {
parts[i].value = Number(parts[i].value);
}
}
let result: { [key: string]: any } = {
header: headerKey
};
for (let i = 0; i < parts.length; i++) {
// find the first entry with key only and use it as the default value
if (parts[i].key && !parts[i].hasValue) {
result.value = parts[i].key;
parts.splice(i, 1);
break;
}
}
parts.forEach(part => {
let entry: { [key: string]: any } = {
value: part.value
};
if (['arc-authentication-results', 'authentication-results'].includes(headerKey) && typeof part.value === 'string') {
// parse value into subparts as well
entry = Object.assign(entry, valueParser(entry.value));
}
if (part.comment) {
entry.comment = part.comment;
}
if (['arc-authentication-results', 'authentication-results'].includes(headerKey) && part.key === 'dkim') {
if (!result[part.key]) {
result[part.key] = [];
}
if (Array.isArray(result[part.key])) {
result[part.key].push(entry);
}
} else {
result[part.key] = entry;
}
});
return result;
};
return { parsed: parse(), original: buf };
};
export default headerParser;

View File

@@ -1,567 +0,0 @@
// @ts-ignore
import libmime from "libmime";
// @ts-ignore
import psl from "psl";
import { setImmediate } from "timers";
import { pki } from "node-forge";
import punycode from "punycode";
import crypto, { KeyObject } from "crypto";
import parseDkimHeaders from "./parse-dkim-headers";
import { Parsed, SignatureType } from "./index";
import { DkimVerifier } from "./dkim-verifier";
var isNode = false;
if (typeof process === "object") {
if (typeof process.versions === "object") {
if (typeof process.versions.node !== "undefined") {
isNode = true;
}
}
}
const LOCAL = isNode;
let dns: any;
if (LOCAL) {
dns = require("dns").promises;
}
export const defaultDKIMFieldNames =
"From:Sender:Reply-To:Subject:Date:Message-ID:To:" +
"Cc:MIME-Version:Content-Type:Content-Transfer-Encoding:Content-ID:" +
"Content-Description:Resent-Date:Resent-From:Resent-Sender:" +
"Resent-To:Resent-Cc:Resent-Message-ID:In-Reply-To:References:" +
"List-Id:List-Help:List-Unsubscribe:List-Subscribe:List-Post:" +
"List-Owner:List-Archive:BIMI-Selector";
const keyOrderingDKIM = [
"v",
"a",
"c",
"d",
"h",
"i",
"l",
"q",
"s",
"t",
"x",
"z",
"bh",
"b",
];
export const writeToStream = async (
stream: DkimVerifier,
input: Buffer & { pipe: (...args: any) => void; on: (...args: any) => void },
chunkSize: number = 0
) => {
chunkSize = chunkSize || 64 * 1024;
if (typeof input === "string") {
input = Buffer.from(input) as Buffer & {
pipe: (...args: any) => void;
on: (...args: any) => void;
};
}
return new Promise((resolve, reject) => {
if (typeof input?.on === "function") {
// pipe as stream
console.log("pipe");
input.pipe(stream);
input.on("error", reject);
} else {
let pos = 0;
let writeChunk = () => {
if (pos >= input.length) {
return stream.end();
}
let chunk;
if (pos + chunkSize >= input.length) {
chunk = input.subarray(pos);
} else {
chunk = input.subarray(pos, pos + chunkSize);
}
pos += chunk.length;
if (stream.write(chunk) === false) {
stream.once("drain", () => writeChunk());
return;
}
setImmediate(writeChunk);
};
setImmediate(writeChunk);
}
stream.on("end", resolve);
stream.on("finish", resolve);
stream.on("error", reject);
});
};
export const parseHeaders = (buf: Buffer) => {
let rows: string[][] = buf
.toString("binary")
.replace(/[\r\n]+$/, "")
.split(/\r?\n/)
.map((row) => [row]);
for (let i = rows.length - 1; i >= 0; i--) {
if (i > 0 && /^\s/.test(rows[i][0])) {
rows[i - 1] = rows[i - 1].concat(rows[i]);
rows.splice(i, 1);
}
}
const mappedRows: {
key: string | null;
casedKey: string | undefined;
line: Buffer;
}[] = rows.map((row) => {
const str = row.join("\r\n");
let key: RegExpMatchArray | string | null = str.match(/^[^:]+/);
let casedKey;
if (key) {
casedKey = key[0].trim();
key = casedKey.toLowerCase();
}
return { key, casedKey, line: Buffer.from(str, "binary") };
});
return { parsed: mappedRows, original: buf };
};
export const getSigningHeaderLines = (
parsedHeaders: Parsed[],
fieldNames: string | string[],
verify: boolean
) => {
fieldNames = (
typeof fieldNames === "string" ? fieldNames : defaultDKIMFieldNames
)
.split(":")
.map((key) => key.trim().toLowerCase())
.filter((key) => key);
let signingList = [];
if (verify) {
let parsedList = ([] as Parsed[]).concat(parsedHeaders);
for (let fieldName of fieldNames) {
for (let i = parsedList.length - 1; i >= 0; i--) {
let header = parsedList[i];
if (fieldName === header.key) {
signingList.push(header);
parsedList.splice(i, 1);
break;
}
}
}
} else {
for (let i = parsedHeaders.length - 1; i >= 0; i--) {
let header = parsedHeaders[i];
if (fieldNames.includes(header.key ?? "")) {
signingList.push(header);
}
}
}
return {
keys: signingList.map((entry) => entry.casedKey).join(": "),
headers: signingList,
};
};
/**
* Generates `DKIM-Signature: ...` header for selected values
* @param {Object} values
*/
export const formatSignatureHeaderLine = (
type: SignatureType,
values: Record<string, string | boolean>,
folded: boolean
): string => {
type = (type ?? "").toString().toUpperCase() as SignatureType;
let keyOrdering: string[], headerKey: string;
switch (type) {
case "DKIM":
headerKey = "DKIM-Signature";
keyOrdering = keyOrderingDKIM;
values = Object.assign(
{
v: 1,
t: Math.round(Date.now() / 1000),
q: "dns/txt",
},
values
);
break;
case "ARC":
case "AS":
throw Error("err");
default:
throw new Error("Unknown Signature type");
}
const header =
`${headerKey}: ` +
Object.keys(values)
.filter(
(key) =>
values[key] !== false &&
typeof values[key] !== "undefined" &&
values.key !== null &&
keyOrdering.includes(key)
)
.sort((a, b) => keyOrdering.indexOf(a) - keyOrdering.indexOf(b))
.map((key) => {
let val = values[key] ?? "";
if (key === "b" && folded && val) {
// fold signature value
return `${key}=${val}`.replace(/.{75}/g, "$& ").trim();
}
if (["d", "s"].includes(key) && typeof val === "string") {
try {
// convert to A-label if needed
val = punycode.toASCII(val);
} catch (err) {
// ignore
}
}
if (key === "i" && type === "DKIM" && typeof val === "string") {
let atPos = val.indexOf("@");
if (atPos >= 0) {
let domainPart = val.substr(atPos + 1);
try {
// convert to A-label if needed
domainPart = punycode.toASCII(domainPart);
} catch (err) {
// ignore
}
val = val.substr(0, atPos + 1) + domainPart;
}
}
return `${key}=${val}`;
})
.join("; ");
if (folded) {
return libmime.foldLines(header);
}
return header;
};
async function resolveDNSHTTP(name: string, type: string) {
const resp = await fetch(
"https://dns.google/resolve?" +
new URLSearchParams({
name: name,
type: type,
})
);
const out = await resp.json();
// For some DNS, the Answer response here contains more than 1 element in the array. The last element is the one containing the public key
return [out.Answer[out.Answer.length - 1].data];
}
// from https://developers.google.com/web/updates/2012/06/How-to-convert-ArrayBuffer-to-and-from-String
function str2ab(str: string) {
const buf = new ArrayBuffer(str.length);
const bufView = new Uint8Array(buf);
for (let i = 0, strLen = str.length; i < strLen; i++) {
bufView[i] = str.charCodeAt(i);
}
return buf;
}
function importRsaKey(pem: string) {
// fetch the part of the PEM string between header and footer
const pemHeader = "-----BEGIN PUBLIC KEY-----";
const pemFooter = "-----END PUBLIC KEY-----";
const pemContents = pem.substring(
pemHeader.length,
pem.length - pemFooter.length
);
// base64 decode the string to get the binary data
const binaryDerString = window.atob(pemContents);
// convert from a binary string to an ArrayBuffer
const binaryDer = str2ab(binaryDerString);
return window.crypto.subtle.importKey(
"spki",
binaryDer,
{
name: "RSA-OAEP",
hash: "SHA-256",
},
true,
["encrypt"]
);
}
export const getPublicKey = async (
type: string,
name: string,
minBitLength: number,
resolver: (...args: [name: string, type: string]) => Promise<any>
) => {
minBitLength = minBitLength || 1024;
if (LOCAL) {
resolver = resolver || dns.resolve;
} else {
resolver = resolveDNSHTTP;
}
let list = await resolver(name, "TXT");
let rr =
list &&
[]
.concat(list[0] || [])
.join("")
.replaceAll(/\s+/g, "")
.replaceAll('"', "");
if (rr) {
// prefix value for parsing as there is no default value
let entry = parseDkimHeaders("DNS: TXT;" + rr);
const publicKeyValue = entry?.parsed?.p?.value;
//'v=DKIM1;p=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwe34ubzrMzM9sT0XVkcc3UXd7W+EHCyHoqn70l2AxXox52lAZzH/UnKwAoO+5qsuP7T9QOifIJ9ddNH9lEQ95Y/GdHBsPLGdgSJIs95mXNxscD6MSyejpenMGL9TPQAcxfqY5xPViZ+1wA1qcryjdZKRqf1f4fpMY+x3b8k7H5Qyf/Smz0sv4xFsx1r+THNIz0rzk2LO3GvE0f1ybp6P+5eAelYU4mGeZQqsKw/eB20I3jHWEyGrXuvzB67nt6ddI+N2eD5K38wg/aSytOsb5O+bUSEe7P0zx9ebRRVknCD6uuqG3gSmQmttlD5OrMWSXzrPIXe8eTBaaPd+e/jfxwIDAQAB'
// v=DKIM1;p=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwe34ubzrMzM9sT0XVkcc3UXd7W+EHCyHoqn70l2AxXox52lAZzH/UnKwAoO+5qsuP7T9QOifIJ9ddNH9lEQ95Y/GdHBsPLGdgSJIs95mXNxscD6MSyejpenMGL9TPQAcxfqY5xPViZ+1wA1qcr""yjdZKRqf1f4fpMY+x3b8k7H5Qyf/Smz0sv4xFsx1r+THNIz0rzk2LO3GvE0f1ybp6P+5eAelYU4mGeZQqsKw/eB20I3jHWEyGrXuvzB67nt6ddI+N2eD5K38wg/aSytOsb5O+bUSEe7P0zx9ebRRVknCD6uuqG3gSmQmttlD5OrMWSXzrPIXe8eTBaaPd+e/jfxwIDAQAB
if (!publicKeyValue) {
const err = new CustomError("Missing key value", "EINVALIDVAL", rr);
throw err;
}
/*let validation = base64Schema.validate(publicKeyValue);
if (validation.error) {
throw new Error('Invalid base64 format for public key');
err.code = 'EINVALIDVAL';
err.rr = rr;
err.details = validation.error;
throw err;
}*/
if (
type === "DKIM" &&
entry?.parsed?.v &&
(entry?.parsed?.v?.value || "").toString().toLowerCase().trim() !==
"dkim1"
) {
const err = new CustomError("Unknown key version", "EINVALIDVER", rr);
throw err;
}
let paddingNeeded =
publicKeyValue.length % 4 ? 4 - (publicKeyValue.length % 4) : 0;
const publicKeyPem = Buffer.from(
`-----BEGIN PUBLIC KEY-----\n${(
publicKeyValue + "=".repeat(paddingNeeded)
).replace(/.{64}/g, "$&\n")}\n-----END PUBLIC KEY-----`
);
let publicKeyObj;
if (LOCAL) {
publicKeyObj = crypto.createPublicKey({
key: publicKeyPem,
format: "pem",
});
} else {
publicKeyObj = await importRsaKey(publicKeyPem.toString());
}
let keyType;
if (LOCAL) {
keyType = (publicKeyObj as KeyObject).asymmetricKeyType;
} else {
keyType = (publicKeyObj as CryptoKey).algorithm.name
.split("-")[0]
.toLowerCase();
}
if (
!["rsa", "ed25519"].includes(keyType ?? "") ||
(entry?.parsed?.k && entry?.parsed?.k?.value?.toLowerCase() !== keyType)
) {
throw new CustomError(
"Unknown key type (${keyType})",
"EINVALIDTYPE",
rr
);
}
let modulusLength;
if ((publicKeyObj as CryptoKey).algorithm) {
modulusLength = (
publicKeyObj as CryptoKey & { algorithm: { modulusLength: number } }
).algorithm?.modulusLength;
} else {
// fall back to node-forge
const pubKeyData = pki.publicKeyFromPem(publicKeyPem.toString());
// const pubKeyData = CryptoJS.parseKey(publicKeyPem.toString(), 'pem');
modulusLength = pubKeyData.n.bitLength();
}
if (keyType === "rsa" && modulusLength < 1024) {
throw new CustomError("RSA key too short", "ESHORTKEY", rr);
}
return {
publicKey: publicKeyPem,
rr,
modulusLength,
};
}
throw new CustomError("Missing key value", "EINVALIDVAL", rr);
};
export const escapePropValue = (value: string) => {
value = (value || "")
.toString()
.replace(/[\x00-\x1F]+/g, " ")
.replace(/\s+/g, " ")
.trim();
if (!/[\s\x00-\x1F\x7F-\uFFFF()<>,;:\\"/[\]?=]/.test(value)) {
// return token value
return value;
}
// return quoted string with escaped quotes
return `"${value.replace(/["\\]/g, (c) => `\\${c}`)}"`;
};
export const escapeCommentValue = (value: string) => {
value = (value || "")
.toString()
.replace(/[\x00-\x1F]+/g, " ")
.replace(/\s+/g, " ")
.trim();
return `${value.replace(/[\\)]/g, (c) => `\\${c}`)}`;
};
export const formatAuthHeaderRow = (
method: string,
status: Record<string, any>
) => {
status = status || {};
let parts = [];
parts.push(`${method}=${status.result || "none"}`);
if (status.comment) {
parts.push(`(${escapeCommentValue(status.comment)})`);
}
for (let ptype of ["policy", "smtp", "body", "header"]) {
if (!status[ptype] || typeof status[ptype] !== "object") {
continue;
}
for (let prop of Object.keys(status[ptype])) {
if (status[ptype][prop]) {
parts.push(`${ptype}.${prop}=${escapePropValue(status[ptype][prop])}`);
}
}
}
return parts.join(" ");
};
export const formatRelaxedLine = (line: Buffer | string, suffix?: string) => {
let result =
line
?.toString("binary")
// unfold
.replace(/\r?\n/g, "")
// key to lowercase, trim around :
.replace(/^([^:]*):\s*/, (m, k) => k.toLowerCase().trim() + ":")
// single WSP
.replace(/\s+/g, " ")
.trim() + (suffix ? suffix : "");
return Buffer.from(result, "binary");
};
export const formatDomain = (domain: string) => {
domain = domain.toLowerCase().trim();
try {
domain = punycode.toASCII(domain).toLowerCase().trim();
} catch (err) {
// ignore punycode errors
}
return domain;
};
export const getAlignment = (
fromDomain: string,
domainList: string[],
strict: boolean = false
) => {
domainList = ([] as string[]).concat(domainList || []);
if (strict) {
fromDomain = formatDomain(fromDomain);
for (let domain of domainList) {
domain = formatDomain(psl.get(domain) || domain);
if (formatDomain(domain) === fromDomain) {
return domain;
}
}
}
// match org domains
fromDomain = formatDomain(psl.get(fromDomain) || fromDomain);
for (let domain of domainList) {
domain = formatDomain(psl.get(domain) || domain);
if (domain === fromDomain) {
return domain;
}
}
return false;
};
export const validateAlgorithm = (algorithm: string, strict: boolean) => {
try {
if (!algorithm || !/^[^-]+-[^-]+$/.test(algorithm)) {
throw new Error("Invalid algorithm format");
}
let [signAlgo, hashAlgo] = algorithm.toLowerCase().split("-");
if (!["rsa", "ed25519"].includes(signAlgo)) {
throw new Error("Unknown signing algorithm: " + signAlgo);
}
if (!["sha256"].concat(!strict ? "sha1" : []).includes(hashAlgo)) {
throw new Error("Unknown hashing algorithm: " + hashAlgo);
}
} catch (err: unknown) {
if (err !== null && typeof err === "object" && Object.hasOwn(err, "code")) {
(err as { code: string }).code = "EINVALIDALGO";
}
throw err;
}
};
export class CustomError extends Error {
code: string;
rr: string;
constructor(message: string, code: string, rr?: string) {
super(message);
this.code = code;
this.rr = rr ?? "";
}
}
export { parseDkimHeaders };