mirror of
https://github.com/zkemail/zk-email-verify.git
synced 2026-01-08 21:18:09 -05:00
Merge pull request #137 from isidroamv/feat/migrate-ts
Feat/migrate ts with fix
This commit is contained in:
@@ -16,6 +16,7 @@
|
||||
"@babel/preset-env": "^7.22.2",
|
||||
"@babel/preset-react": "^7.22.0",
|
||||
"@babel/preset-typescript": "^7.21.5",
|
||||
"@types/libmime": "^5.0.3",
|
||||
"babel-jest": "^29.5.0",
|
||||
"babel-preset-jest": "^29.5.0",
|
||||
"husky": "^8.0.3",
|
||||
|
||||
@@ -21,6 +21,14 @@
|
||||
"snarkjs": "https://github.com/sampritipanda/snarkjs.git#fef81fc51d17a734637555c6edbd585ecda02d9e"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/addressparser": "^1.0.3",
|
||||
"@types/atob": "^2.1.2",
|
||||
"@types/jest": "^29.5.1",
|
||||
"@types/lodash": "^4.14.181",
|
||||
"@types/mocha": "^10.0.1",
|
||||
"@types/node": "^18.0.6",
|
||||
"@types/node-forge": "^1.3.2",
|
||||
"@types/psl": "^1.1.2",
|
||||
"msw": "^1.2.2"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { SimpleHash } from './simple';
|
||||
import { RelaxedHash } from './relaxed';
|
||||
|
||||
const dkimBody = (canonicalization, ...options) => {
|
||||
canonicalization = (canonicalization || 'simple/simple').toString().split('/').pop().toLowerCase().trim();
|
||||
export const dkimBody = (canonicalization: any, ...options: [string, number]) => {
|
||||
canonicalization = (canonicalization ?? 'simple/simple').toString().split('/').pop()?.toLowerCase().trim();
|
||||
switch (canonicalization) {
|
||||
case 'simple':
|
||||
return new SimpleHash(...options);
|
||||
@@ -12,5 +12,3 @@ const dkimBody = (canonicalization, ...options) => {
|
||||
throw new Error('Unknown body canonicalization');
|
||||
}
|
||||
};
|
||||
|
||||
export { dkimBody };
|
||||
@@ -1,8 +1,4 @@
|
||||
/* eslint no-control-regex: 0 */
|
||||
|
||||
'use strict';
|
||||
|
||||
import crypto from 'crypto';
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
const CHAR_CR = 0x0d;
|
||||
const CHAR_LF = 0x0a;
|
||||
@@ -15,13 +11,22 @@ const CHAR_TAB = 0x09;
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class RelaxedHash {
|
||||
export class RelaxedHash {
|
||||
byteLength: number;
|
||||
bodyHashedBytes: number;
|
||||
private remainder: Buffer | boolean;
|
||||
private bodyHash: crypto.Hash;
|
||||
private maxBodyLength: number;
|
||||
private maxSizeReached: boolean;
|
||||
private emptyLinesQueue: Array<Buffer>;
|
||||
private fullBody: Buffer;
|
||||
|
||||
/**
|
||||
* @param {String} [algorithm] Hashing algo, either "sha1" or "sha256"
|
||||
* @param {Number} [maxBodyLength] Allowed body length count, the value from the l= parameter
|
||||
*/
|
||||
constructor(algorithm, maxBodyLength) {
|
||||
algorithm = (algorithm || 'sha256').split('-').pop().toLowerCase();
|
||||
constructor(algorithm: string, maxBodyLength: number) {
|
||||
algorithm = algorithm?.split('-')?.pop()?.toLowerCase() || 'sha256';
|
||||
|
||||
this.bodyHash = crypto.createHash(algorithm);
|
||||
|
||||
@@ -38,7 +43,7 @@ class RelaxedHash {
|
||||
this.fullBody = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
_updateBodyHash(chunk) {
|
||||
private updateBodyHash(chunk: Buffer) {
|
||||
if (this.maxSizeReached) {
|
||||
return;
|
||||
}
|
||||
@@ -67,16 +72,16 @@ class RelaxedHash {
|
||||
//process.stdout.write(chunk);
|
||||
}
|
||||
|
||||
_drainPendingEmptyLines() {
|
||||
private drainPendingEmptyLines() {
|
||||
if (this.emptyLinesQueue.length) {
|
||||
for (let emptyLine of this.emptyLinesQueue) {
|
||||
this._updateBodyHash(emptyLine);
|
||||
this.updateBodyHash(emptyLine);
|
||||
}
|
||||
this.emptyLinesQueue = [];
|
||||
}
|
||||
}
|
||||
|
||||
_pushBodyHash(chunk) {
|
||||
private pushBodyHash(chunk: Buffer) {
|
||||
if (!chunk || !chunk.length) {
|
||||
return;
|
||||
}
|
||||
@@ -87,7 +92,7 @@ class RelaxedHash {
|
||||
// buffer line endings and empty lines
|
||||
for (let i = chunk.length - 1; i >= 0; i--) {
|
||||
if (chunk[i] !== CHAR_LF && chunk[i] !== CHAR_CR) {
|
||||
this._drainPendingEmptyLines();
|
||||
this.drainPendingEmptyLines();
|
||||
if (i < chunk.length - 1) {
|
||||
this.emptyLinesQueue.push(chunk.subarray(i + 1));
|
||||
chunk = chunk.subarray(0, i + 1);
|
||||
@@ -102,10 +107,10 @@ class RelaxedHash {
|
||||
return;
|
||||
}
|
||||
|
||||
this._updateBodyHash(chunk);
|
||||
this.updateBodyHash(chunk);
|
||||
}
|
||||
|
||||
fixLineBuffer(line) {
|
||||
fixLineBuffer(line: Buffer) {
|
||||
let resultLine = [];
|
||||
|
||||
let nonWspFound = false;
|
||||
@@ -149,7 +154,7 @@ class RelaxedHash {
|
||||
return Buffer.from(resultLine);
|
||||
}
|
||||
|
||||
update(chunk, final) {
|
||||
update(chunk: Buffer | null, final: boolean) {
|
||||
this.byteLength += (chunk && chunk.length) || 0;
|
||||
if (this.maxSizeReached) {
|
||||
return;
|
||||
@@ -166,7 +171,7 @@ class RelaxedHash {
|
||||
let lineNeedsFixing = false;
|
||||
let cursorPos = 0;
|
||||
|
||||
if (this.remainder && this.remainder.length) {
|
||||
if (this.remainder && this.remainder instanceof Buffer && this.remainder.length) {
|
||||
if (chunk) {
|
||||
// concatting chunks might be bad for performance :S
|
||||
chunk = Buffer.concat([this.remainder, chunk]);
|
||||
@@ -197,11 +202,11 @@ class RelaxedHash {
|
||||
// emit pending bytes up to the last line break before current line
|
||||
if (lineEndPos >= 0 && lineEndPos >= cursorPos) {
|
||||
let chunkPart = chunk.subarray(cursorPos, lineEndPos + 1);
|
||||
this._pushBodyHash(chunkPart);
|
||||
this.pushBodyHash(chunkPart);
|
||||
}
|
||||
|
||||
let line = chunk.subarray(lineEndPos + 1, pos + 1);
|
||||
this._pushBodyHash(this.fixLineBuffer(line));
|
||||
this.pushBodyHash(this.fixLineBuffer(line));
|
||||
|
||||
lineNeedsFixing = false;
|
||||
|
||||
@@ -235,7 +240,7 @@ class RelaxedHash {
|
||||
let chunkPart = chunk.subarray(cursorPos, lineEndPos + 1);
|
||||
|
||||
if (chunkPart.length) {
|
||||
this._pushBodyHash(lineNeedsFixing ? this.fixLineBuffer(chunkPart) : chunkPart);
|
||||
this.pushBodyHash(lineNeedsFixing ? this.fixLineBuffer(chunkPart) : chunkPart);
|
||||
lineNeedsFixing = false;
|
||||
}
|
||||
|
||||
@@ -249,18 +254,18 @@ class RelaxedHash {
|
||||
if (final) {
|
||||
let chunkPart = (cursorPos && chunk && chunk.subarray(cursorPos)) || chunk;
|
||||
if (chunkPart && chunkPart.length) {
|
||||
this._pushBodyHash(lineNeedsFixing ? this.fixLineBuffer(chunkPart) : chunkPart);
|
||||
this.pushBodyHash(lineNeedsFixing ? this.fixLineBuffer(chunkPart) : chunkPart);
|
||||
lineNeedsFixing = false;
|
||||
}
|
||||
|
||||
if (this.bodyHashedBytes) {
|
||||
// terminating line break for non-empty messages
|
||||
this._updateBodyHash(Buffer.from([CHAR_CR, CHAR_LF]));
|
||||
this.updateBodyHash(Buffer.from([CHAR_CR, CHAR_LF]));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
digest(encoding) {
|
||||
digest(encoding: crypto.BinaryToTextEncoding) {
|
||||
this.update(null, true);
|
||||
|
||||
// finalize
|
||||
@@ -268,8 +273,6 @@ class RelaxedHash {
|
||||
}
|
||||
}
|
||||
|
||||
export { RelaxedHash };
|
||||
|
||||
/*
|
||||
let fs = require('fs');
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
'use strict';
|
||||
|
||||
import crypto from 'crypto';
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* Class for calculating body hash of an email message body stream
|
||||
@@ -8,13 +6,20 @@ import crypto from 'crypto';
|
||||
*
|
||||
* @class
|
||||
*/
|
||||
class SimpleHash {
|
||||
export class SimpleHash {
|
||||
byteLength: number;
|
||||
bodyHashedBytes: number;
|
||||
private remainder: Buffer[];
|
||||
private bodyHash: crypto.Hash;
|
||||
private maxBodyLength: number;
|
||||
private fullBody: Buffer;
|
||||
private lastNewline: boolean;
|
||||
/**
|
||||
* @param {String} [algorithm] Hashing algo, either "sha1" or "sha256"
|
||||
* @param {Number} [maxBodyLength] Allowed body length count, the value from the l= parameter
|
||||
*/
|
||||
constructor(algorithm, maxBodyLength) {
|
||||
algorithm = (algorithm || 'sha256').split('-').pop();
|
||||
constructor(algorithm: string, maxBodyLength: number) {
|
||||
algorithm = algorithm?.split('-')?.pop() || 'sha256';
|
||||
this.bodyHash = crypto.createHash(algorithm);
|
||||
|
||||
this.remainder = [];
|
||||
@@ -28,7 +33,7 @@ class SimpleHash {
|
||||
this.fullBody = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
_updateBodyHash(chunk) {
|
||||
private updateBodyHash(chunk: Buffer) {
|
||||
// the following is needed for l= option
|
||||
if (
|
||||
typeof this.maxBodyLength === 'number' &&
|
||||
@@ -41,7 +46,7 @@ class SimpleHash {
|
||||
return;
|
||||
}
|
||||
// only use allowed size of bytes
|
||||
chunk = chunk.slice(0, this.maxBodyLength - this.bodyHashedBytes);
|
||||
chunk = chunk.subarray(0, this.maxBodyLength - this.bodyHashedBytes);
|
||||
}
|
||||
|
||||
this.bodyHashedBytes += chunk.length;
|
||||
@@ -51,7 +56,7 @@ class SimpleHash {
|
||||
//process.stdout.write(chunk);
|
||||
}
|
||||
|
||||
update(chunk) {
|
||||
update(chunk: Buffer) {
|
||||
if (this.remainder.length) {
|
||||
// see if we can release the last remainder
|
||||
for (let i = 0; i < chunk.length; i++) {
|
||||
@@ -59,7 +64,7 @@ class SimpleHash {
|
||||
if (c !== 0x0a && c !== 0x0d) {
|
||||
// found non-line terminator byte, can release previous chunk
|
||||
for (let remainderChunk of this.remainder) {
|
||||
this._updateBodyHash(remainderChunk);
|
||||
this.updateBodyHash(remainderChunk);
|
||||
}
|
||||
this.remainder = [];
|
||||
}
|
||||
@@ -67,7 +72,7 @@ class SimpleHash {
|
||||
}
|
||||
|
||||
// find line terminators from the end of chunk
|
||||
let matchStart = false;
|
||||
let matchStart: boolean | number = false;
|
||||
for (let i = chunk.length - 1; i >= 0; i--) {
|
||||
let c = chunk[i];
|
||||
if (c === 0x0a || c === 0x0d) {
|
||||
@@ -83,22 +88,20 @@ class SimpleHash {
|
||||
this.remainder.push(chunk);
|
||||
return;
|
||||
} else if (matchStart !== false) {
|
||||
this.remainder.push(chunk.slice(matchStart));
|
||||
chunk = chunk.slice(0, matchStart);
|
||||
this.remainder.push(chunk.subarray(matchStart));
|
||||
chunk = chunk.subarray(0, matchStart);
|
||||
}
|
||||
|
||||
this._updateBodyHash(chunk);
|
||||
this.updateBodyHash(chunk);
|
||||
this.lastNewline = chunk[chunk.length - 1] === 0x0a;
|
||||
}
|
||||
|
||||
digest(encoding) {
|
||||
digest(encoding: crypto.BinaryToTextEncoding) {
|
||||
if (!this.lastNewline || !this.bodyHashedBytes) {
|
||||
// emit empty line buffer to keep the stream flowing
|
||||
this._updateBodyHash(Buffer.from('\r\n'));
|
||||
this.updateBodyHash(Buffer.from('\r\n'));
|
||||
}
|
||||
|
||||
return this.bodyHash.digest(encoding);
|
||||
}
|
||||
}
|
||||
|
||||
export { SimpleHash };
|
||||
@@ -1,4 +1,4 @@
|
||||
var isNode = false;
|
||||
var isNode = false;
|
||||
if (typeof process === 'object') {
|
||||
if (typeof process.versions === 'object') {
|
||||
if (typeof process.versions.node !== 'undefined') {
|
||||
@@ -13,10 +13,22 @@ import { MessageParser } from "./message-parser";
|
||||
import { dkimBody } from "./body";
|
||||
import { generateCanonicalizedHeader } from "./header";
|
||||
import addressparser from "addressparser";
|
||||
import crypto from "crypto";
|
||||
import * as crypto from "crypto";
|
||||
import { ParseDkimHeaders, ParsedHeaders } from "./index";
|
||||
|
||||
class DkimVerifier extends MessageParser {
|
||||
constructor(options) {
|
||||
export class DkimVerifier extends MessageParser {
|
||||
envelopeFrom: string | boolean;
|
||||
headerFrom: string[];
|
||||
results: { [key: string]: any }[];
|
||||
private options: Record<string, any>;
|
||||
private resolver: (...args: [name: string, type: string]) => Promise<any>;
|
||||
private minBitLength: number;
|
||||
private signatureHeaders: ParseDkimHeaders[] & { [key: string]: any }[];
|
||||
private bodyHashes: Map<string, any>;
|
||||
private arc: { chain: false };
|
||||
private seal: { bodyHash: string; };
|
||||
private sealBodyHashKey: string = '';
|
||||
constructor(options: Record<string, any>) {
|
||||
super();
|
||||
|
||||
this.options = options || {};
|
||||
@@ -25,7 +37,7 @@ class DkimVerifier extends MessageParser {
|
||||
|
||||
this.results = [];
|
||||
|
||||
this.signatureHeaders = [];
|
||||
this.signatureHeaders = [] as any;
|
||||
this.bodyHashes = new Map();
|
||||
|
||||
this.headerFrom = [];
|
||||
@@ -42,29 +54,29 @@ class DkimVerifier extends MessageParser {
|
||||
let bodyCanon = "relaxed";
|
||||
let hashAlgo = "sha256";
|
||||
this.sealBodyHashKey = `${bodyCanon}:${hashAlgo}:`;
|
||||
this.bodyHashes.set(this.sealBodyHashKey, dkimBody(bodyCanon, hashAlgo, false));
|
||||
this.bodyHashes.set(this.sealBodyHashKey, dkimBody(bodyCanon, hashAlgo, 0));
|
||||
}
|
||||
}
|
||||
|
||||
async messageHeaders(headers) {
|
||||
async messageHeaders(headers: ParsedHeaders) {
|
||||
this.headers = headers;
|
||||
|
||||
this.signatureHeaders = headers.parsed
|
||||
.filter((h) => h.key === "dkim-signature")
|
||||
.map((h) => {
|
||||
const value = parseDkimHeaders(h.line);
|
||||
const value: ParseDkimHeaders & { [key: string]: any } = parseDkimHeaders(h.line);
|
||||
value.type = "DKIM";
|
||||
return value;
|
||||
});
|
||||
|
||||
let fromHeaders = headers?.parsed?.filter((h) => h.key === "from");
|
||||
for (let fromHeader of fromHeaders) {
|
||||
fromHeader = fromHeader.line.toString();
|
||||
let splitterPos = fromHeader.indexOf(":");
|
||||
for (const fromHeader of fromHeaders) {
|
||||
let fromHeaderString = fromHeader.line.toString();
|
||||
let splitterPos = fromHeaderString.indexOf(":");
|
||||
if (splitterPos >= 0) {
|
||||
fromHeader = fromHeader.substr(splitterPos + 1);
|
||||
fromHeaderString = fromHeaderString.substr(splitterPos + 1);
|
||||
}
|
||||
let from = addressparser(fromHeader.trim());
|
||||
let from = addressparser(fromHeaderString.trim());
|
||||
for (let addr of from) {
|
||||
if (addr && addr.address) {
|
||||
this.headerFrom.push(addr.address);
|
||||
@@ -78,12 +90,12 @@ class DkimVerifier extends MessageParser {
|
||||
} else {
|
||||
let returnPathHeader = headers.parsed.filter((h) => h.key === "return-path").pop();
|
||||
if (returnPathHeader) {
|
||||
returnPathHeader = returnPathHeader.line.toString();
|
||||
let splitterPos = returnPathHeader.indexOf(":");
|
||||
let returnPathHeaderString = returnPathHeader.line.toString();
|
||||
let splitterPos = returnPathHeaderString.indexOf(":");
|
||||
if (splitterPos >= 0) {
|
||||
returnPathHeader = returnPathHeader.substr(splitterPos + 1);
|
||||
returnPathHeaderString = returnPathHeaderString.substr(splitterPos + 1);
|
||||
}
|
||||
let returnPath = addressparser(returnPathHeader.trim());
|
||||
let returnPath = addressparser(returnPathHeaderString.trim());
|
||||
this.envelopeFrom = returnPath.length && returnPath[0].address ? returnPath[0].address : false;
|
||||
}
|
||||
}
|
||||
@@ -127,7 +139,7 @@ class DkimVerifier extends MessageParser {
|
||||
}
|
||||
}
|
||||
|
||||
async nextChunk(chunk) {
|
||||
async nextChunk(chunk: Buffer) {
|
||||
for (let bodyHash of this.bodyHashes.values()) {
|
||||
bodyHash.update(chunk);
|
||||
}
|
||||
@@ -150,10 +162,10 @@ class DkimVerifier extends MessageParser {
|
||||
continue;
|
||||
}
|
||||
|
||||
let signingHeaderLines = getSigningHeaderLines(this.headers.parsed, signatureHeader.parsed?.h?.value, true);
|
||||
let signingHeaderLines = getSigningHeaderLines((this.headers as { parsed: { key: string | null; casedKey: string | undefined; line: Buffer; }[]; original: Buffer; }).parsed, signatureHeader.parsed?.h?.value, true);
|
||||
|
||||
let { canonicalizedHeader } = generateCanonicalizedHeader(signatureHeader.type, signingHeaderLines, {
|
||||
signatureHeaderLine: signatureHeader.original,
|
||||
let { canonicalizedHeader } = generateCanonicalizedHeader(signatureHeader.type, signingHeaderLines as any, {
|
||||
signatureHeaderLine: signatureHeader.original as string,
|
||||
canonicalization: signatureHeader.canonicalization,
|
||||
instance: ["ARC", "AS"].includes(signatureHeader.type) ? signatureHeader.parsed?.i?.value : false,
|
||||
});
|
||||
@@ -164,7 +176,7 @@ class DkimVerifier extends MessageParser {
|
||||
};
|
||||
|
||||
let publicKey, rr, modulusLength;
|
||||
let status = {
|
||||
let status: { [key: string]: any } = {
|
||||
result: "neutral",
|
||||
comment: false,
|
||||
// ptype properties
|
||||
@@ -181,7 +193,7 @@ class DkimVerifier extends MessageParser {
|
||||
};
|
||||
|
||||
if (signatureHeader.type === "DKIM" && this.headerFrom?.length) {
|
||||
status.aligned = this.headerFrom?.length ? getAlignment(this.headerFrom[0].split("@").pop(), [signatureHeader.signingDomain]) : false;
|
||||
status.aligned = this.headerFrom?.length ? getAlignment(this.headerFrom[0] ?? ''.split("@")?.pop(), [signatureHeader.signingDomain]) : false;
|
||||
}
|
||||
|
||||
let bodyHashObj = this.bodyHashes.get(signatureHeader.bodyHashKey);
|
||||
@@ -216,14 +228,14 @@ class DkimVerifier extends MessageParser {
|
||||
status.signature_value = signatureHeader.parsed?.b?.value;
|
||||
status.result = ver_result ? "pass" : "fail";
|
||||
|
||||
if (status === "fail") {
|
||||
if (status?.result === "fail") {
|
||||
status.comment = "bad signature";
|
||||
}
|
||||
} catch (err) {
|
||||
status.result = "neutral";
|
||||
} catch (err: any) {
|
||||
status.comment = err.message;
|
||||
status.result = "neutral";
|
||||
}
|
||||
} catch (err) {
|
||||
} catch (err: any) {
|
||||
if (err.rr) {
|
||||
rr = err.rr;
|
||||
}
|
||||
@@ -272,7 +284,7 @@ class DkimVerifier extends MessageParser {
|
||||
status.comment = `invalid body length ${signatureHeader.bodyHashedBytes}`;
|
||||
}
|
||||
|
||||
let result = {
|
||||
let result: { [key: string]: any } = {
|
||||
signingDomain: signatureHeader.signingDomain,
|
||||
selector: signatureHeader.selector,
|
||||
signature: signatureHeader.parsed?.b?.value,
|
||||
@@ -339,5 +351,3 @@ class DkimVerifier extends MessageParser {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export { DkimVerifier };
|
||||
@@ -1,11 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
import { Options, SignatureType, SigningHeaderLines } from '../index';
|
||||
import { relaxedHeaders } from './relaxed';
|
||||
import { simpleHeaders } from './simple';
|
||||
|
||||
const generateCanonicalizedHeader = (type, signingHeaderLines, options) => {
|
||||
export const generateCanonicalizedHeader = (type: SignatureType, signingHeaderLines: SigningHeaderLines, options: Options) => {
|
||||
options = options || {};
|
||||
let canonicalization = (options.canonicalization || 'simple/simple').toString().split('/').shift().toLowerCase().trim();
|
||||
let canonicalization = (options.canonicalization || 'simple/simple').toString()?.split('/')?.shift()?.toLowerCase().trim();
|
||||
switch (canonicalization) {
|
||||
case 'simple':
|
||||
return simpleHeaders(type, signingHeaderLines, options);
|
||||
@@ -15,5 +14,3 @@ const generateCanonicalizedHeader = (type, signingHeaderLines, options) => {
|
||||
throw new Error('Unknown header canonicalization');
|
||||
}
|
||||
};
|
||||
|
||||
export { generateCanonicalizedHeader };
|
||||
@@ -1,9 +1,8 @@
|
||||
'use strict';
|
||||
|
||||
import { Options, SignatureType, SigningHeaderLines } from '../index';
|
||||
import { formatSignatureHeaderLine, formatRelaxedLine } from '../tools';
|
||||
|
||||
// generate headers for signing
|
||||
const relaxedHeaders = (type, signingHeaderLines, options) => {
|
||||
export const relaxedHeaders = (type: SignatureType, signingHeaderLines: SigningHeaderLines, options: Options) => {
|
||||
let { signatureHeaderLine, signingDomain, selector, algorithm, canonicalization, bodyHash, signTime, signature, instance, bodyHashedBytes } = options || {};
|
||||
let chunks = [];
|
||||
|
||||
@@ -11,7 +10,7 @@ const relaxedHeaders = (type, signingHeaderLines, options) => {
|
||||
chunks.push(formatRelaxedLine(signedHeaderLine.line, '\r\n'));
|
||||
}
|
||||
|
||||
let opts = false;
|
||||
let opts: boolean | Record<string, unknown> = false;
|
||||
|
||||
if (!signatureHeaderLine) {
|
||||
opts = {
|
||||
@@ -52,7 +51,7 @@ const relaxedHeaders = (type, signingHeaderLines, options) => {
|
||||
b: signature || 'a'.repeat(73)
|
||||
},
|
||||
opts
|
||||
),
|
||||
) as Record<string, string | boolean>,
|
||||
true
|
||||
);
|
||||
}
|
||||
@@ -69,5 +68,3 @@ const relaxedHeaders = (type, signingHeaderLines, options) => {
|
||||
|
||||
return { canonicalizedHeader: Buffer.concat(chunks), signatureHeaderLine, dkimHeaderOpts: opts };
|
||||
};
|
||||
|
||||
export { relaxedHeaders };
|
||||
@@ -1,11 +1,10 @@
|
||||
'use strict';
|
||||
|
||||
import { Options, SignatureType, SigningHeaderLines } from '../index';
|
||||
import { formatSignatureHeaderLine } from '../tools';
|
||||
|
||||
const formatSimpleLine = (line, suffix) => Buffer.from(line.toString('binary') + (suffix ? suffix : ''), 'binary');
|
||||
const formatSimpleLine = (line: Buffer | string, suffix?: string) => Buffer.from(line.toString('binary') + (suffix ? suffix : ''), 'binary');
|
||||
|
||||
// generate headers for signing
|
||||
const simpleHeaders = (type, signingHeaderLines, options) => {
|
||||
export const simpleHeaders = (type: SignatureType, signingHeaderLines: SigningHeaderLines, options: Options) => {
|
||||
let { signatureHeaderLine, signingDomain, selector, algorithm, canonicalization, bodyHash, signTime, signature, instance, bodyHashedBytes } = options || {};
|
||||
let chunks = [];
|
||||
|
||||
@@ -13,7 +12,7 @@ const simpleHeaders = (type, signingHeaderLines, options) => {
|
||||
chunks.push(formatSimpleLine(signedHeaderLine.line, '\r\n'));
|
||||
}
|
||||
|
||||
let opts = false;
|
||||
let opts: boolean | Record<string, any> = false;
|
||||
|
||||
if (!signatureHeaderLine) {
|
||||
opts = {
|
||||
@@ -54,7 +53,7 @@ const simpleHeaders = (type, signingHeaderLines, options) => {
|
||||
b: signature || 'a'.repeat(73)
|
||||
},
|
||||
opts
|
||||
),
|
||||
) as Record<string, string | boolean>,
|
||||
true
|
||||
);
|
||||
}
|
||||
@@ -71,5 +70,3 @@ const simpleHeaders = (type, signingHeaderLines, options) => {
|
||||
|
||||
return { canonicalizedHeader: Buffer.concat(chunks), signatureHeaderLine, dkimHeaderOpts: opts };
|
||||
};
|
||||
|
||||
export { simpleHeaders };
|
||||
@@ -1,10 +1,10 @@
|
||||
import { pki } from "node-forge";
|
||||
import { DkimVerifier } from "./dkim-verifier";
|
||||
import { writeToStream } from "./tools";
|
||||
import { getSigningHeaderLines, parseDkimHeaders, parseHeaders, writeToStream } from "./tools";
|
||||
|
||||
export const dkimVerify = async (input: Buffer, options: any = {}) => {
|
||||
let dkimVerifier = new DkimVerifier(options);
|
||||
await writeToStream(dkimVerifier, input);
|
||||
await writeToStream(dkimVerifier, input as any);
|
||||
|
||||
const result = {
|
||||
//headers: dkimVerifier.headers,
|
||||
@@ -33,7 +33,7 @@ export type DKIMVerificationResult = {
|
||||
publicKey: bigint;
|
||||
}
|
||||
|
||||
export async function verifyDKIMSignature(email: Buffer) : Promise<DKIMVerificationResult> {
|
||||
export async function verifyDKIMSignature(email: Buffer): Promise<DKIMVerificationResult> {
|
||||
const result = await dkimVerify(email);
|
||||
|
||||
if (!result.results[0]) {
|
||||
@@ -55,8 +55,31 @@ export async function verifyDKIMSignature(email: Buffer) : Promise<DKIMVerificat
|
||||
return {
|
||||
signature: signatureBigInt,
|
||||
message: status.signature_header,
|
||||
body,
|
||||
body,
|
||||
bodyHash,
|
||||
publicKey: BigInt(pubKeyData.n.toString()),
|
||||
}
|
||||
}
|
||||
|
||||
export type SignatureType = 'DKIM' | 'ARC' | 'AS';
|
||||
|
||||
export type ParsedHeaders = ReturnType<typeof parseHeaders>;
|
||||
|
||||
export type Parsed = ParsedHeaders['parsed'][0];
|
||||
|
||||
export type ParseDkimHeaders = ReturnType<typeof parseDkimHeaders>
|
||||
|
||||
export type SigningHeaderLines = ReturnType<typeof getSigningHeaderLines>
|
||||
|
||||
export interface Options {
|
||||
signatureHeaderLine: string;
|
||||
signingDomain?: string;
|
||||
selector?: string;
|
||||
algorithm?: string;
|
||||
canonicalization: string;
|
||||
bodyHash?: string;
|
||||
signTime?: string | number | Date;
|
||||
signature?: string;
|
||||
instance: string | boolean;
|
||||
bodyHashedBytes?: string;
|
||||
}
|
||||
@@ -1,7 +1,7 @@
|
||||
// Calculates relaxed body hash for a message body stream
|
||||
|
||||
import { ParsedHeaders } from './index';
|
||||
import { parseHeaders } from './tools';
|
||||
import { Writable } from 'stream';
|
||||
import { Writable, WritableOptions } from 'stream';
|
||||
|
||||
/**
|
||||
* Class for separating header from body
|
||||
@@ -9,8 +9,14 @@ import { Writable } from 'stream';
|
||||
* @class
|
||||
* @extends Writable
|
||||
*/
|
||||
class MessageParser extends Writable {
|
||||
constructor(options) {
|
||||
export class MessageParser extends Writable {
|
||||
byteLength: number;
|
||||
headers: ParsedHeaders | boolean;
|
||||
private state: string;
|
||||
private stateBytes: unknown[];
|
||||
private headerChunks: Buffer[];
|
||||
private lastByte: number = 0;
|
||||
constructor(options?: WritableOptions) {
|
||||
super(options);
|
||||
|
||||
this.byteLength = 0;
|
||||
@@ -22,19 +28,19 @@ class MessageParser extends Writable {
|
||||
this.headerChunks = [];
|
||||
}
|
||||
|
||||
async nextChunk(/* chunk */) {
|
||||
async nextChunk(...args: any) {
|
||||
// Override in child class
|
||||
}
|
||||
|
||||
async finalChunk() {
|
||||
async finalChunk(...args: any) {
|
||||
// Override in child class
|
||||
}
|
||||
|
||||
async messageHeaders() {
|
||||
async messageHeaders(headers: ParsedHeaders) {
|
||||
// Override in child class
|
||||
}
|
||||
|
||||
async processChunk(chunk) {
|
||||
async processChunk(chunk: Buffer) {
|
||||
if (!chunk || !chunk.length) {
|
||||
return;
|
||||
}
|
||||
@@ -62,10 +68,10 @@ class MessageParser extends Writable {
|
||||
await this.messageHeaders(this.headers);
|
||||
return;
|
||||
}
|
||||
this.headerChunks.push(chunk.slice(0, i + 1));
|
||||
this.headerChunks.push(chunk.subarray(0, i + 1));
|
||||
this.headers = parseHeaders(Buffer.concat(this.headerChunks));
|
||||
await this.messageHeaders(this.headers);
|
||||
chunk = chunk.slice(i + 1);
|
||||
chunk = chunk.subarray(i + 1);
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -79,7 +85,7 @@ class MessageParser extends Writable {
|
||||
await this.nextChunk(chunk);
|
||||
}
|
||||
|
||||
*ensureLinebreaks(input) {
|
||||
*ensureLinebreaks(input: Buffer) {
|
||||
let pos = 0;
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
let c = input[i];
|
||||
@@ -91,7 +97,7 @@ class MessageParser extends Writable {
|
||||
if (i === 0 || pos === i) {
|
||||
buf = Buffer.from('\r\n');
|
||||
} else {
|
||||
buf = Buffer.concat([input.slice(pos, i), Buffer.from('\r\n')]);
|
||||
buf = Buffer.concat([input.subarray(pos, i), Buffer.from('\r\n')]);
|
||||
}
|
||||
yield buf;
|
||||
|
||||
@@ -101,12 +107,12 @@ class MessageParser extends Writable {
|
||||
if (pos === 0) {
|
||||
yield input;
|
||||
} else if (pos < input.length) {
|
||||
let buf = input.slice(pos);
|
||||
let buf = input.subarray(pos);
|
||||
yield buf;
|
||||
}
|
||||
}
|
||||
|
||||
async writeAsync(chunk, encoding) {
|
||||
async writeAsync(chunk: any, encoding: BufferEncoding) {
|
||||
if (!chunk || !chunk.length) {
|
||||
return;
|
||||
}
|
||||
@@ -122,7 +128,7 @@ class MessageParser extends Writable {
|
||||
}
|
||||
}
|
||||
|
||||
_write(chunk, encoding, callback) {
|
||||
_write(chunk: any, encoding: BufferEncoding, callback: (error?: Error | null) => void) {
|
||||
this.writeAsync(chunk, encoding)
|
||||
.then(() => callback())
|
||||
.catch(err => callback(err));
|
||||
@@ -138,11 +144,9 @@ class MessageParser extends Writable {
|
||||
}
|
||||
}
|
||||
|
||||
_final(callback) {
|
||||
_final(callback: (error?: Error | null) => void) {
|
||||
this.finish()
|
||||
.then(() => callback())
|
||||
.catch(err => callback(err));
|
||||
}
|
||||
}
|
||||
|
||||
export { MessageParser };
|
||||
@@ -1,15 +1,17 @@
|
||||
'use strict';
|
||||
|
||||
// NB! fails to properly parse nested comments (should be rare enough though)
|
||||
|
||||
const valueParser = str => {
|
||||
interface Part {
|
||||
[key: string]: string;
|
||||
}
|
||||
|
||||
const valueParser = (str: string) => {
|
||||
let line = str.replace(/\s+/g, ' ').trim();
|
||||
|
||||
let parts = [];
|
||||
let lastState = false;
|
||||
let parts: Part[] = [];
|
||||
let lastState: string | boolean = false;
|
||||
|
||||
const createPart = () => {
|
||||
let part = {
|
||||
let part: Part = {
|
||||
key: '',
|
||||
value: ''
|
||||
};
|
||||
@@ -28,6 +30,7 @@ const valueParser = str => {
|
||||
let c = line.charAt(i);
|
||||
|
||||
switch (state) {
|
||||
// @ts-ignore
|
||||
case 'key':
|
||||
if (c === '=') {
|
||||
state = 'value';
|
||||
@@ -68,7 +71,7 @@ const valueParser = str => {
|
||||
}
|
||||
|
||||
case 'quoted':
|
||||
if (escaped === true) {
|
||||
if (escaped === true && typeof lastState === 'string') {
|
||||
curPart[lastState] += c;
|
||||
break;
|
||||
}
|
||||
@@ -79,11 +82,13 @@ const valueParser = str => {
|
||||
break;
|
||||
|
||||
case quote:
|
||||
state = lastState;
|
||||
state = lastState as string;
|
||||
break;
|
||||
|
||||
default:
|
||||
curPart[lastState] += c;
|
||||
if (typeof lastState === 'string') {
|
||||
curPart[lastState] += c;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -91,7 +96,7 @@ const valueParser = str => {
|
||||
}
|
||||
}
|
||||
|
||||
let result = {
|
||||
let result: { [key: string]: any } = {
|
||||
value: parts[0].key
|
||||
};
|
||||
parts.slice(1).forEach(part => {
|
||||
@@ -105,7 +110,7 @@ const valueParser = str => {
|
||||
}
|
||||
curRes = curRes[p];
|
||||
}
|
||||
curRes[final] = part.value;
|
||||
curRes[final ?? ''] = part.value;
|
||||
}
|
||||
});
|
||||
|
||||
@@ -115,19 +120,19 @@ const valueParser = str => {
|
||||
return parse();
|
||||
};
|
||||
|
||||
const headerParser = buf => {
|
||||
const headerParser = (buf: Buffer | string) => {
|
||||
let line = (buf || '').toString().trim();
|
||||
let splitterPos = line.indexOf(':');
|
||||
let headerKey;
|
||||
let headerKey: string;
|
||||
if (splitterPos >= 0) {
|
||||
headerKey = line.substr(0, splitterPos).trim().toLowerCase();
|
||||
line = line.substr(splitterPos + 1).trim();
|
||||
}
|
||||
|
||||
let parts = [];
|
||||
let lastState = false;
|
||||
let parts: { [key: string]: any }[] = [];
|
||||
let lastState: string | boolean = false;
|
||||
|
||||
const createPart = () => {
|
||||
const createPart = (): { [key: string]: string | boolean } => {
|
||||
let part = {
|
||||
key: '',
|
||||
value: '',
|
||||
@@ -149,6 +154,7 @@ const headerParser = buf => {
|
||||
let c = line.charAt(i);
|
||||
|
||||
switch (state) {
|
||||
// @ts-ignore
|
||||
case 'key':
|
||||
if (c === '=') {
|
||||
state = 'value';
|
||||
@@ -201,7 +207,7 @@ const headerParser = buf => {
|
||||
break;
|
||||
|
||||
case ')':
|
||||
state = lastState;
|
||||
state = lastState as string;
|
||||
break;
|
||||
|
||||
default:
|
||||
@@ -216,13 +222,15 @@ const headerParser = buf => {
|
||||
case '\\':
|
||||
escaped = true;
|
||||
break;
|
||||
|
||||
// @ts-ignore
|
||||
case quote:
|
||||
state = lastState;
|
||||
state = lastState as string;
|
||||
// falls through
|
||||
|
||||
default:
|
||||
curPart[lastState] += c;
|
||||
if (typeof lastState === 'string') {
|
||||
curPart[lastState] += c;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -237,14 +245,14 @@ const headerParser = buf => {
|
||||
}
|
||||
}
|
||||
|
||||
parts[i].key = parts[i].key.toLowerCase();
|
||||
parts[i].key = (parts[i].key).toLowerCase();
|
||||
|
||||
if (!parts[i].key) {
|
||||
// remove empty value
|
||||
parts.splice(i, 1);
|
||||
} else if (['bh', 'b', 'p', 'h'].includes(parts[i].key)) {
|
||||
// remove unneeded whitespace
|
||||
parts[i].value = parts[i].value.replace(/\s+/g, '');
|
||||
parts[i].value = parts[i].value?.replace(/\s+/g, '');
|
||||
} else if (['l', 'v', 't'].includes(parts[i].key) && !isNaN(parts[i].value)) {
|
||||
parts[i].value = Number(parts[i].value);
|
||||
} else if (parts[i].key === 'i' && /^arc-/i.test(headerKey)) {
|
||||
@@ -252,7 +260,7 @@ const headerParser = buf => {
|
||||
}
|
||||
}
|
||||
|
||||
let result = {
|
||||
let result: { [key: string]: any } = {
|
||||
header: headerKey
|
||||
};
|
||||
|
||||
@@ -266,7 +274,7 @@ const headerParser = buf => {
|
||||
}
|
||||
|
||||
parts.forEach(part => {
|
||||
let entry = {
|
||||
let entry: { [key: string]: any } = {
|
||||
value: part.value
|
||||
};
|
||||
|
||||
@@ -283,7 +291,9 @@ const headerParser = buf => {
|
||||
if (!result[part.key]) {
|
||||
result[part.key] = [];
|
||||
}
|
||||
result[part.key].push(entry);
|
||||
if (Array.isArray(result[part.key])) {
|
||||
result[part.key].push(entry);
|
||||
}
|
||||
} else {
|
||||
result[part.key] = entry;
|
||||
}
|
||||
@@ -1,7 +1,12 @@
|
||||
import { setImmediate } from 'timers';
|
||||
import { pki } from 'node-forge';
|
||||
|
||||
/* eslint no-control-regex: 0 */
|
||||
import punycode from "punycode";
|
||||
import libmime from "libmime";
|
||||
import crypto, { KeyObject } from "crypto";
|
||||
import parseDkimHeaders from "./parse-dkim-headers";
|
||||
import psl from "psl";
|
||||
import { Parsed, SignatureType } from './index';
|
||||
import { DkimVerifier } from './dkim-verifier';
|
||||
|
||||
var isNode = false;
|
||||
if (typeof process === "object") {
|
||||
@@ -12,18 +17,12 @@ if (typeof process === "object") {
|
||||
}
|
||||
}
|
||||
const LOCAL = isNode;
|
||||
|
||||
import punycode from "punycode";
|
||||
import libmime from "libmime";
|
||||
let dns;
|
||||
let dns: any;
|
||||
if (LOCAL) {
|
||||
dns = require("dns").promises;
|
||||
}
|
||||
import crypto from "crypto";
|
||||
import parseDkimHeaders from "./parse-dkim-headers";
|
||||
import psl from "psl";
|
||||
|
||||
const defaultDKIMFieldNames =
|
||||
export const defaultDKIMFieldNames =
|
||||
"From:Sender:Reply-To:Subject:Date:Message-ID:To:" +
|
||||
"Cc:MIME-Version:Content-Type:Content-Transfer-Encoding:Content-ID:" +
|
||||
"Content-Description:Resent-Date:Resent-From:Resent-Sender:" +
|
||||
@@ -33,15 +32,15 @@ const defaultDKIMFieldNames =
|
||||
|
||||
const keyOrderingDKIM = ["v", "a", "c", "d", "h", "i", "l", "q", "s", "t", "x", "z", "bh", "b"];
|
||||
|
||||
const writeToStream = async (stream, input, chunkSize) => {
|
||||
export const writeToStream = async (stream: DkimVerifier, input: Buffer & { pipe: (...args: any) => void, on: (...args: any) => void }, chunkSize: number = 0) => {
|
||||
chunkSize = chunkSize || 64 * 1024;
|
||||
|
||||
if (typeof input === "string") {
|
||||
input = Buffer.from(input);
|
||||
input = Buffer.from(input) as Buffer & { pipe: (...args: any) => void, on: (...args: any) => void };
|
||||
}
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof input.on === "function") {
|
||||
if (typeof input?.on === "function") {
|
||||
// pipe as stream
|
||||
console.log('pipe')
|
||||
input.pipe(stream);
|
||||
@@ -55,9 +54,9 @@ const writeToStream = async (stream, input, chunkSize) => {
|
||||
|
||||
let chunk;
|
||||
if (pos + chunkSize >= input.length) {
|
||||
chunk = input.slice(pos);
|
||||
chunk = input.subarray(pos);
|
||||
} else {
|
||||
chunk = input.slice(pos, pos + chunkSize);
|
||||
chunk = input.subarray(pos, pos + chunkSize);
|
||||
}
|
||||
pos += chunk.length;
|
||||
|
||||
@@ -76,8 +75,8 @@ const writeToStream = async (stream, input, chunkSize) => {
|
||||
});
|
||||
};
|
||||
|
||||
const parseHeaders = (buf) => {
|
||||
let rows = buf
|
||||
export const parseHeaders = (buf: Buffer) => {
|
||||
let rows: string[][] = buf
|
||||
.toString("binary")
|
||||
.replace(/[\r\n]+$/, "")
|
||||
.split(/\r?\n/)
|
||||
@@ -89,22 +88,22 @@ const parseHeaders = (buf) => {
|
||||
}
|
||||
}
|
||||
|
||||
rows = rows.map((row) => {
|
||||
row = row.join("\r\n");
|
||||
let key = row.match(/^[^:]+/);
|
||||
const mappedRows: { key: string | null; casedKey: string | undefined; line: Buffer }[] = rows.map((row) => {
|
||||
const str = row.join("\r\n");
|
||||
let key: RegExpMatchArray | string | null = str.match(/^[^:]+/);
|
||||
let casedKey;
|
||||
if (key) {
|
||||
casedKey = key[0].trim();
|
||||
key = casedKey.toLowerCase();
|
||||
}
|
||||
|
||||
return { key, casedKey, line: Buffer.from(row, "binary") };
|
||||
return { key, casedKey, line: Buffer.from(str, "binary") };
|
||||
});
|
||||
|
||||
return { parsed: rows, original: buf };
|
||||
return { parsed: mappedRows, original: buf };
|
||||
};
|
||||
|
||||
const getSigningHeaderLines = (parsedHeaders, fieldNames, verify) => {
|
||||
export const getSigningHeaderLines = (parsedHeaders: Parsed[], fieldNames: string | string[], verify: boolean) => {
|
||||
fieldNames = (typeof fieldNames === "string" ? fieldNames : defaultDKIMFieldNames)
|
||||
.split(":")
|
||||
.map((key) => key.trim().toLowerCase())
|
||||
@@ -113,7 +112,7 @@ const getSigningHeaderLines = (parsedHeaders, fieldNames, verify) => {
|
||||
let signingList = [];
|
||||
|
||||
if (verify) {
|
||||
let parsedList = [].concat(parsedHeaders);
|
||||
let parsedList = ([] as Parsed[]).concat(parsedHeaders);
|
||||
for (let fieldName of fieldNames) {
|
||||
for (let i = parsedList.length - 1; i >= 0; i--) {
|
||||
let header = parsedList[i];
|
||||
@@ -127,7 +126,7 @@ const getSigningHeaderLines = (parsedHeaders, fieldNames, verify) => {
|
||||
} else {
|
||||
for (let i = parsedHeaders.length - 1; i >= 0; i--) {
|
||||
let header = parsedHeaders[i];
|
||||
if (fieldNames.includes(header.key)) {
|
||||
if (fieldNames.includes(header.key ?? '')) {
|
||||
signingList.push(header);
|
||||
}
|
||||
}
|
||||
@@ -143,10 +142,10 @@ const getSigningHeaderLines = (parsedHeaders, fieldNames, verify) => {
|
||||
* Generates `DKIM-Signature: ...` header for selected values
|
||||
* @param {Object} values
|
||||
*/
|
||||
const formatSignatureHeaderLine = (type, values, folded) => {
|
||||
type = (type || "").toString().toUpperCase();
|
||||
export const formatSignatureHeaderLine = (type: SignatureType, values: Record<string, string | boolean>, folded: boolean): string => {
|
||||
type = (type ?? "").toString().toUpperCase() as SignatureType;
|
||||
|
||||
let keyOrdering, headerKey;
|
||||
let keyOrdering: string[], headerKey: string;
|
||||
switch (type) {
|
||||
case "DKIM":
|
||||
headerKey = "DKIM-Signature";
|
||||
@@ -162,8 +161,6 @@ const formatSignatureHeaderLine = (type, values, folded) => {
|
||||
break;
|
||||
|
||||
case "ARC":
|
||||
throw Error("err");
|
||||
|
||||
case "AS":
|
||||
throw Error("err");
|
||||
|
||||
@@ -177,13 +174,13 @@ const formatSignatureHeaderLine = (type, values, folded) => {
|
||||
.filter((key) => values[key] !== false && typeof values[key] !== "undefined" && values.key !== null && keyOrdering.includes(key))
|
||||
.sort((a, b) => keyOrdering.indexOf(a) - keyOrdering.indexOf(b))
|
||||
.map((key) => {
|
||||
let val = values[key] || "";
|
||||
let val = values[key] ?? "";
|
||||
if (key === "b" && folded && val) {
|
||||
// fold signature value
|
||||
return `${key}=${val}`.replace(/.{75}/g, "$& ").trim();
|
||||
}
|
||||
|
||||
if (["d", "s"].includes(key)) {
|
||||
if (["d", "s"].includes(key) && typeof val === 'string') {
|
||||
try {
|
||||
// convert to A-label if needed
|
||||
val = punycode.toASCII(val);
|
||||
@@ -192,7 +189,7 @@ const formatSignatureHeaderLine = (type, values, folded) => {
|
||||
}
|
||||
}
|
||||
|
||||
if (key === "i" && type === "DKIM") {
|
||||
if (key === "i" && type === "DKIM" && typeof val === 'string') {
|
||||
let atPos = val.indexOf("@");
|
||||
if (atPos >= 0) {
|
||||
let domainPart = val.substr(atPos + 1);
|
||||
@@ -217,7 +214,7 @@ const formatSignatureHeaderLine = (type, values, folded) => {
|
||||
return header;
|
||||
};
|
||||
|
||||
async function resolveDNSHTTP(name, type) {
|
||||
async function resolveDNSHTTP(name: string, type: string) {
|
||||
const resp = await fetch(
|
||||
"https://dns.google/resolve?" +
|
||||
new URLSearchParams({
|
||||
@@ -231,7 +228,7 @@ async function resolveDNSHTTP(name, type) {
|
||||
}
|
||||
|
||||
// from https://developers.google.com/web/updates/2012/06/How-to-convert-ArrayBuffer-to-and-from-String
|
||||
function str2ab(str) {
|
||||
function str2ab(str: string) {
|
||||
const buf = new ArrayBuffer(str.length);
|
||||
const bufView = new Uint8Array(buf);
|
||||
for (let i = 0, strLen = str.length; i < strLen; i++) {
|
||||
@@ -240,7 +237,7 @@ function str2ab(str) {
|
||||
return buf;
|
||||
}
|
||||
|
||||
function importRsaKey(pem) {
|
||||
function importRsaKey(pem: string) {
|
||||
// fetch the part of the PEM string between header and footer
|
||||
const pemHeader = "-----BEGIN PUBLIC KEY-----";
|
||||
const pemFooter = "-----END PUBLIC KEY-----";
|
||||
@@ -262,7 +259,7 @@ function importRsaKey(pem) {
|
||||
);
|
||||
}
|
||||
|
||||
const getPublicKey = async (type, name, minBitLength, resolver) => {
|
||||
export const getPublicKey = async (type: string, name: string, minBitLength: number, resolver: (...args: [name: string, type: string]) => Promise<any>) => {
|
||||
minBitLength = minBitLength || 1024;
|
||||
if (LOCAL) {
|
||||
resolver = resolver || dns.resolve;
|
||||
@@ -288,15 +285,13 @@ const getPublicKey = async (type, name, minBitLength, resolver) => {
|
||||
//'v=DKIM1;p=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwe34ubzrMzM9sT0XVkcc3UXd7W+EHCyHoqn70l2AxXox52lAZzH/UnKwAoO+5qsuP7T9QOifIJ9ddNH9lEQ95Y/GdHBsPLGdgSJIs95mXNxscD6MSyejpenMGL9TPQAcxfqY5xPViZ+1wA1qcryjdZKRqf1f4fpMY+x3b8k7H5Qyf/Smz0sv4xFsx1r+THNIz0rzk2LO3GvE0f1ybp6P+5eAelYU4mGeZQqsKw/eB20I3jHWEyGrXuvzB67nt6ddI+N2eD5K38wg/aSytOsb5O+bUSEe7P0zx9ebRRVknCD6uuqG3gSmQmttlD5OrMWSXzrPIXe8eTBaaPd+e/jfxwIDAQAB'
|
||||
// v=DKIM1;p=MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwe34ubzrMzM9sT0XVkcc3UXd7W+EHCyHoqn70l2AxXox52lAZzH/UnKwAoO+5qsuP7T9QOifIJ9ddNH9lEQ95Y/GdHBsPLGdgSJIs95mXNxscD6MSyejpenMGL9TPQAcxfqY5xPViZ+1wA1qcr""yjdZKRqf1f4fpMY+x3b8k7H5Qyf/Smz0sv4xFsx1r+THNIz0rzk2LO3GvE0f1ybp6P+5eAelYU4mGeZQqsKw/eB20I3jHWEyGrXuvzB67nt6ddI+N2eD5K38wg/aSytOsb5O+bUSEe7P0zx9ebRRVknCD6uuqG3gSmQmttlD5OrMWSXzrPIXe8eTBaaPd+e/jfxwIDAQAB
|
||||
if (!publicKeyValue) {
|
||||
let err = new Error("Missing key value");
|
||||
err.code = "EINVALIDVAL";
|
||||
err.rr = rr;
|
||||
const err = new CustomError("Missing key value", "EINVALIDVAL", rr);
|
||||
throw err;
|
||||
}
|
||||
|
||||
/*let validation = base64Schema.validate(publicKeyValue);
|
||||
if (validation.error) {
|
||||
let err = new Error('Invalid base64 format for public key');
|
||||
throw new Error('Invalid base64 format for public key');
|
||||
err.code = 'EINVALIDVAL';
|
||||
err.rr = rr;
|
||||
err.details = validation.error;
|
||||
@@ -304,9 +299,7 @@ const getPublicKey = async (type, name, minBitLength, resolver) => {
|
||||
}*/
|
||||
|
||||
if (type === "DKIM" && entry?.parsed?.v && (entry?.parsed?.v?.value || "").toString().toLowerCase().trim() !== "dkim1") {
|
||||
let err = new Error("Unknown key version");
|
||||
err.code = "EINVALIDVER";
|
||||
err.rr = rr;
|
||||
const err = new CustomError("Unknown key version", "EINVALIDVER", rr);
|
||||
throw err;
|
||||
}
|
||||
|
||||
@@ -325,21 +318,18 @@ const getPublicKey = async (type, name, minBitLength, resolver) => {
|
||||
|
||||
let keyType;
|
||||
if (LOCAL) {
|
||||
keyType = publicKeyObj.asymmetricKeyType;
|
||||
keyType = (publicKeyObj as KeyObject).asymmetricKeyType;
|
||||
} else {
|
||||
keyType = publicKeyObj.algorithm.name.split("-")[0].toLowerCase();
|
||||
keyType = (publicKeyObj as CryptoKey).algorithm.name.split("-")[0].toLowerCase();
|
||||
}
|
||||
|
||||
if (!["rsa", "ed25519"].includes(keyType) || (entry?.parsed?.k && entry?.parsed?.k?.value?.toLowerCase() !== keyType)) {
|
||||
let err = new Error("Unknown key type (${keyType})");
|
||||
err.code = "EINVALIDTYPE";
|
||||
err.rr = rr;
|
||||
throw err;
|
||||
if (!["rsa", "ed25519"].includes(keyType ?? '') || (entry?.parsed?.k && entry?.parsed?.k?.value?.toLowerCase() !== keyType)) {
|
||||
throw new CustomError("Unknown key type (${keyType})", "EINVALIDTYPE", rr);
|
||||
}
|
||||
|
||||
let modulusLength;
|
||||
if (publicKeyObj.algorithm) {
|
||||
modulusLength = publicKeyObj.algorithm.modulusLength;
|
||||
if ((publicKeyObj as CryptoKey).algorithm) {
|
||||
modulusLength = (publicKeyObj as CryptoKey & { algorithm: { modulusLength: number } }).algorithm?.modulusLength;
|
||||
} else {
|
||||
// fall back to node-forge
|
||||
const pubKeyData = pki.publicKeyFromPem(publicKeyPem.toString());
|
||||
@@ -348,10 +338,7 @@ const getPublicKey = async (type, name, minBitLength, resolver) => {
|
||||
}
|
||||
|
||||
if (keyType === "rsa" && modulusLength < 1024) {
|
||||
let err = new Error("RSA key too short");
|
||||
err.code = "ESHORTKEY";
|
||||
err.rr = rr;
|
||||
throw err;
|
||||
throw new CustomError("RSA key too short", "ESHORTKEY", rr);
|
||||
}
|
||||
|
||||
return {
|
||||
@@ -361,12 +348,10 @@ const getPublicKey = async (type, name, minBitLength, resolver) => {
|
||||
};
|
||||
}
|
||||
|
||||
let err = new Error("Missing key value");
|
||||
err.code = "EINVALIDVAL";
|
||||
throw err;
|
||||
throw new CustomError("Missing key value", "EINVALIDVAL", rr);
|
||||
};
|
||||
|
||||
const escapePropValue = (value) => {
|
||||
export const escapePropValue = (value: string) => {
|
||||
value = (value || "")
|
||||
.toString()
|
||||
.replace(/[\x00-\x1F]+/g, " ")
|
||||
@@ -382,7 +367,7 @@ const escapePropValue = (value) => {
|
||||
return `"${value.replace(/["\\]/g, (c) => `\\${c}`)}"`;
|
||||
};
|
||||
|
||||
const escapeCommentValue = (value) => {
|
||||
export const escapeCommentValue = (value: string) => {
|
||||
value = (value || "")
|
||||
.toString()
|
||||
.replace(/[\x00-\x1F]+/g, " ")
|
||||
@@ -392,7 +377,7 @@ const escapeCommentValue = (value) => {
|
||||
return `${value.replace(/[\\)]/g, (c) => `\\${c}`)}`;
|
||||
};
|
||||
|
||||
const formatAuthHeaderRow = (method, status) => {
|
||||
export const formatAuthHeaderRow = (method: string, status: Record<string, any>) => {
|
||||
status = status || {};
|
||||
let parts = [];
|
||||
|
||||
@@ -417,7 +402,7 @@ const formatAuthHeaderRow = (method, status) => {
|
||||
return parts.join(" ");
|
||||
};
|
||||
|
||||
const formatRelaxedLine = (line, suffix) => {
|
||||
export const formatRelaxedLine = (line: Buffer | string, suffix?: string) => {
|
||||
let result =
|
||||
line
|
||||
?.toString("binary")
|
||||
@@ -432,7 +417,7 @@ const formatRelaxedLine = (line, suffix) => {
|
||||
return Buffer.from(result, "binary");
|
||||
};
|
||||
|
||||
const formatDomain = (domain) => {
|
||||
export const formatDomain = (domain: string) => {
|
||||
domain = domain.toLowerCase().trim();
|
||||
try {
|
||||
domain = punycode.toASCII(domain).toLowerCase().trim();
|
||||
@@ -442,8 +427,8 @@ const formatDomain = (domain) => {
|
||||
return domain;
|
||||
};
|
||||
|
||||
const getAlignment = (fromDomain, domainList, strict) => {
|
||||
domainList = [].concat(domainList || []);
|
||||
export const getAlignment = (fromDomain: string, domainList: string[], strict: boolean = false) => {
|
||||
domainList = ([] as string[]).concat(domainList || []);
|
||||
if (strict) {
|
||||
fromDomain = formatDomain(fromDomain);
|
||||
for (let domain of domainList) {
|
||||
@@ -466,7 +451,7 @@ const getAlignment = (fromDomain, domainList, strict) => {
|
||||
return false;
|
||||
};
|
||||
|
||||
const validateAlgorithm = (algorithm, strict) => {
|
||||
export const validateAlgorithm = (algorithm: string, strict: boolean) => {
|
||||
try {
|
||||
if (!algorithm || !/^[^-]+-[^-]+$/.test(algorithm)) {
|
||||
throw new Error("Invalid algorithm format");
|
||||
@@ -481,29 +466,22 @@ const validateAlgorithm = (algorithm, strict) => {
|
||||
if (!["sha256"].concat(!strict ? "sha1" : []).includes(hashAlgo)) {
|
||||
throw new Error("Unknown hashing algorithm: " + hashAlgo);
|
||||
}
|
||||
} catch (err) {
|
||||
err.code = "EINVALIDALGO";
|
||||
} catch (err: unknown) {
|
||||
if (err !== null && typeof err === 'object' && Object.hasOwn(err, 'code')) {
|
||||
(err as { code: string }).code = "EINVALIDALGO";
|
||||
}
|
||||
throw err;
|
||||
}
|
||||
};
|
||||
|
||||
export {
|
||||
writeToStream,
|
||||
parseHeaders,
|
||||
export class CustomError extends Error {
|
||||
code: string;
|
||||
rr: string;
|
||||
constructor(message: string, code: string, rr?: string) {
|
||||
super(message);
|
||||
this.code = code;
|
||||
this.rr = rr ?? '';
|
||||
}
|
||||
}
|
||||
|
||||
defaultDKIMFieldNames,
|
||||
|
||||
getSigningHeaderLines,
|
||||
formatSignatureHeaderLine,
|
||||
parseDkimHeaders,
|
||||
getPublicKey,
|
||||
formatAuthHeaderRow,
|
||||
escapeCommentValue,
|
||||
|
||||
validateAlgorithm,
|
||||
|
||||
getAlignment,
|
||||
|
||||
formatRelaxedLine,
|
||||
formatDomain,
|
||||
};
|
||||
export { parseDkimHeaders }
|
||||
34
yarn.lock
34
yarn.lock
@@ -3651,6 +3651,15 @@ __metadata:
|
||||
languageName: unknown
|
||||
linkType: soft
|
||||
|
||||
"@types/addressparser@npm:^1.0.3":
|
||||
version: 1.0.3
|
||||
resolution: "@types/addressparser@npm:1.0.3"
|
||||
dependencies:
|
||||
"@types/node": "*"
|
||||
checksum: 7e7b6d3dfa8133b878c92a7cc1a654244a22a9f5848254c3531c0897e8e55863af194120dc247b811ce90df771903056048b1ecbdaee1c866b2c9b43bd41ead2
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/aria-query@npm:^5.0.1":
|
||||
version: 5.0.1
|
||||
resolution: "@types/aria-query@npm:5.0.1"
|
||||
@@ -3840,6 +3849,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/libmime@npm:^5.0.3":
|
||||
version: 5.0.3
|
||||
resolution: "@types/libmime@npm:5.0.3"
|
||||
dependencies:
|
||||
"@types/node": "*"
|
||||
checksum: 9fcda9aa901927d78410721fd7222366fe547a4df29d9d3b4f5151ccb6cda36e9ba7886488e415cab485790d30a08b19d94b78039d65057b9ddf024f34850bd9
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/lodash@npm:^4.14.181":
|
||||
version: 4.14.195
|
||||
resolution: "@types/lodash@npm:4.14.195"
|
||||
@@ -3912,6 +3930,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/psl@npm:^1.1.2":
|
||||
version: 1.1.2
|
||||
resolution: "@types/psl@npm:1.1.2"
|
||||
checksum: fc0a7ae56ca53157035226d964f5a37749187804c07787d25a3f8e0235130c277b52d027139d1a7058d7826014a8019d68d46e2719b0404ac8545d39d41fc43a
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/react-dom@npm:<18.0.0":
|
||||
version: 17.0.20
|
||||
resolution: "@types/react-dom@npm:17.0.20"
|
||||
@@ -4750,6 +4775,14 @@ __metadata:
|
||||
version: 0.0.0-use.local
|
||||
resolution: "@zk-email/helpers@workspace:packages/helpers"
|
||||
dependencies:
|
||||
"@types/addressparser": ^1.0.3
|
||||
"@types/atob": ^2.1.2
|
||||
"@types/jest": ^29.5.1
|
||||
"@types/lodash": ^4.14.181
|
||||
"@types/mocha": ^10.0.1
|
||||
"@types/node": ^18.0.6
|
||||
"@types/node-forge": ^1.3.2
|
||||
"@types/psl": ^1.1.2
|
||||
addressparser: ^1.0.1
|
||||
atob: ^2.1.2
|
||||
circomlibjs: ^0.1.7
|
||||
@@ -14313,6 +14346,7 @@ __metadata:
|
||||
"@babel/preset-env": ^7.22.2
|
||||
"@babel/preset-react": ^7.22.0
|
||||
"@babel/preset-typescript": ^7.21.5
|
||||
"@types/libmime": ^5.0.3
|
||||
babel-jest: ^29.5.0
|
||||
babel-preset-jest: ^29.5.0
|
||||
dotenv: ^16.3.1
|
||||
|
||||
Reference in New Issue
Block a user