Intial Commit
This commit is contained in:
55
nodered/rootfs/data/node_modules/mailsplit/lib/flowed-decoder.js
generated
vendored
Normal file
55
nodered/rootfs/data/node_modules/mailsplit/lib/flowed-decoder.js
generated
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
'use strict';
|
||||
|
||||
// Helper class to rewrite nodes with specific mime type
|
||||
|
||||
const Transform = require('stream').Transform;
|
||||
const libmime = require('libmime');
|
||||
|
||||
/**
|
||||
* Really bad "stream" transform to parse format=flowed content
|
||||
*
|
||||
* @constructor
|
||||
* @param {String} delSp True if delsp option was used
|
||||
*/
|
||||
class FlowedDecoder extends Transform {
|
||||
constructor(config) {
|
||||
super();
|
||||
this.config = config || {};
|
||||
|
||||
this.chunks = [];
|
||||
this.chunklen = 0;
|
||||
|
||||
this.libmime = new libmime.Libmime({ Iconv: config.Iconv });
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, callback) {
|
||||
if (!chunk || !chunk.length) {
|
||||
return callback();
|
||||
}
|
||||
|
||||
if (!encoding !== 'buffer') {
|
||||
chunk = Buffer.from(chunk, encoding);
|
||||
}
|
||||
|
||||
this.chunks.push(chunk);
|
||||
this.chunklen += chunk.length;
|
||||
|
||||
callback();
|
||||
}
|
||||
|
||||
_flush(callback) {
|
||||
if (this.chunklen) {
|
||||
let currentBody = Buffer.concat(this.chunks, this.chunklen);
|
||||
|
||||
if (this.config.encoding === 'base64') {
|
||||
currentBody = Buffer.from(currentBody.toString('binary'), 'base64');
|
||||
}
|
||||
|
||||
let content = this.libmime.decodeFlowed(currentBody.toString('binary'), this.config.delSp);
|
||||
this.push(Buffer.from(content, 'binary'));
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = FlowedDecoder;
|
||||
206
nodered/rootfs/data/node_modules/mailsplit/lib/headers.js
generated
vendored
Normal file
206
nodered/rootfs/data/node_modules/mailsplit/lib/headers.js
generated
vendored
Normal file
@@ -0,0 +1,206 @@
|
||||
'use strict';
|
||||
|
||||
const libmime = require('libmime');
|
||||
|
||||
/**
|
||||
* Class Headers to parse and handle message headers. Headers instance allows to
|
||||
* check existing, delete or add new headers
|
||||
*/
|
||||
class Headers {
|
||||
constructor(headers, config) {
|
||||
config = config || {};
|
||||
|
||||
if (Array.isArray(headers)) {
|
||||
// already using parsed headers
|
||||
this.changed = true;
|
||||
this.headers = false;
|
||||
this.parsed = true;
|
||||
this.lines = headers;
|
||||
} else {
|
||||
// using original string/buffer headers
|
||||
this.changed = false;
|
||||
this.headers = headers;
|
||||
this.parsed = false;
|
||||
this.lines = false;
|
||||
}
|
||||
this.mbox = false;
|
||||
this.http = false;
|
||||
|
||||
this.libmime = new libmime.Libmime({ Iconv: config.Iconv });
|
||||
}
|
||||
|
||||
get(key) {
|
||||
if (!this.parsed) {
|
||||
this._parseHeaders();
|
||||
}
|
||||
key = this._normalizeHeader(key);
|
||||
let lines = this.lines.filter(line => line.key === key).map(line => line.line);
|
||||
|
||||
return lines;
|
||||
}
|
||||
|
||||
getDecoded(key) {
|
||||
return this.get(key)
|
||||
.map(line => this.libmime.decodeHeader(line))
|
||||
.filter(line => line && line.value);
|
||||
}
|
||||
|
||||
getFirst(key) {
|
||||
if (!this.parsed) {
|
||||
this._parseHeaders();
|
||||
}
|
||||
key = this._normalizeHeader(key);
|
||||
let header = this.lines.find(line => line.key === key);
|
||||
if (!header) {
|
||||
return '';
|
||||
}
|
||||
return ((this.libmime.decodeHeader(header.line) || {}).value || '').toString().trim();
|
||||
}
|
||||
|
||||
getList() {
|
||||
if (!this.parsed) {
|
||||
this._parseHeaders();
|
||||
}
|
||||
return this.lines;
|
||||
}
|
||||
|
||||
add(key, value, index) {
|
||||
if (typeof value === 'undefined') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (typeof value === 'number') {
|
||||
value = value.toString();
|
||||
}
|
||||
|
||||
if (typeof value === 'string') {
|
||||
value = Buffer.from(value);
|
||||
}
|
||||
|
||||
value = value.toString('binary');
|
||||
this.addFormatted(key, this.libmime.foldLines(key + ': ' + value.replace(/\r?\n/g, ''), 76, false), index);
|
||||
}
|
||||
|
||||
addFormatted(key, line, index) {
|
||||
if (!this.parsed) {
|
||||
this._parseHeaders();
|
||||
}
|
||||
index = index || 0;
|
||||
this.changed = true;
|
||||
let header = {
|
||||
key: this._normalizeHeader(key),
|
||||
line
|
||||
};
|
||||
|
||||
if (index < 1) {
|
||||
this.lines.unshift(header);
|
||||
} else if (index >= this.lines.length) {
|
||||
this.lines.push(header);
|
||||
} else {
|
||||
this.lines.splice(index, 0, header);
|
||||
}
|
||||
}
|
||||
|
||||
remove(key) {
|
||||
if (!this.parsed) {
|
||||
this._parseHeaders();
|
||||
}
|
||||
key = this._normalizeHeader(key);
|
||||
for (let i = this.lines.length - 1; i >= 0; i--) {
|
||||
if (this.lines[i].key === key) {
|
||||
this.changed = true;
|
||||
this.lines.splice(i, 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
update(key, value) {
|
||||
if (!this.parsed) {
|
||||
this._parseHeaders();
|
||||
}
|
||||
let keyName = key;
|
||||
let index = 0;
|
||||
key = this._normalizeHeader(key);
|
||||
for (let i = this.lines.length - 1; i >= 0; i--) {
|
||||
if (this.lines[i].key === key) {
|
||||
index = i;
|
||||
this.changed = true;
|
||||
this.lines.splice(i, 1);
|
||||
}
|
||||
}
|
||||
this.add(keyName, value, index);
|
||||
}
|
||||
|
||||
build(lineEnd) {
|
||||
if (!this.changed && !lineEnd) {
|
||||
return typeof this.headers === 'string' ? Buffer.from(this.headers, 'binary') : this.headers;
|
||||
}
|
||||
|
||||
if (!this.parsed) {
|
||||
this._parseHeaders();
|
||||
}
|
||||
|
||||
lineEnd = lineEnd || '\r\n';
|
||||
|
||||
let headers = this.lines.map(line => line.line.replace(/\r?\n/g, lineEnd)).join(lineEnd) + `${lineEnd}${lineEnd}`;
|
||||
|
||||
if (this.mbox) {
|
||||
headers = this.mbox + lineEnd + headers;
|
||||
}
|
||||
|
||||
if (this.http) {
|
||||
headers = this.http + lineEnd + headers;
|
||||
}
|
||||
|
||||
return Buffer.from(headers, 'binary');
|
||||
}
|
||||
|
||||
_normalizeHeader(key) {
|
||||
return (key || '').toLowerCase().trim();
|
||||
}
|
||||
|
||||
_parseHeaders() {
|
||||
if (!this.headers) {
|
||||
this.lines = [];
|
||||
this.parsed = true;
|
||||
return;
|
||||
}
|
||||
|
||||
let lines = this.headers
|
||||
.toString('binary')
|
||||
.replace(/[\r\n]+$/, '')
|
||||
.split(/\r?\n/);
|
||||
|
||||
for (let i = lines.length - 1; i >= 0; i--) {
|
||||
let chr = lines[i].charAt(0);
|
||||
if (i && (chr === ' ' || chr === '\t')) {
|
||||
lines[i - 1] += '\r\n' + lines[i];
|
||||
lines.splice(i, 1);
|
||||
} else {
|
||||
let line = lines[i];
|
||||
if (!i && /^From /i.test(line)) {
|
||||
// mbox file
|
||||
this.mbox = line;
|
||||
lines.splice(i, 1);
|
||||
continue;
|
||||
} else if (!i && /^POST /i.test(line)) {
|
||||
// HTTP POST request
|
||||
this.http = line;
|
||||
lines.splice(i, 1);
|
||||
continue;
|
||||
}
|
||||
let key = this._normalizeHeader(line.substr(0, line.indexOf(':')));
|
||||
lines[i] = {
|
||||
key,
|
||||
line
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
this.lines = lines;
|
||||
this.parsed = true;
|
||||
}
|
||||
}
|
||||
|
||||
// expose to the world
|
||||
module.exports = Headers;
|
||||
30
nodered/rootfs/data/node_modules/mailsplit/lib/message-joiner.js
generated
vendored
Normal file
30
nodered/rootfs/data/node_modules/mailsplit/lib/message-joiner.js
generated
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
'use strict';
|
||||
|
||||
const Transform = require('stream').Transform;
|
||||
|
||||
class MessageJoiner extends Transform {
|
||||
constructor() {
|
||||
let options = {
|
||||
readableObjectMode: false,
|
||||
writableObjectMode: true
|
||||
};
|
||||
super(options);
|
||||
}
|
||||
|
||||
_transform(obj, encoding, callback) {
|
||||
if (Buffer.isBuffer(obj)) {
|
||||
this.push(obj);
|
||||
} else if (obj.type === 'node') {
|
||||
this.push(obj.getHeaders());
|
||||
} else if (obj.value) {
|
||||
this.push(obj.value);
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
|
||||
_flush(callback) {
|
||||
return callback();
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MessageJoiner;
|
||||
422
nodered/rootfs/data/node_modules/mailsplit/lib/message-splitter.js
generated
vendored
Normal file
422
nodered/rootfs/data/node_modules/mailsplit/lib/message-splitter.js
generated
vendored
Normal file
@@ -0,0 +1,422 @@
|
||||
'use strict';
|
||||
|
||||
const Transform = require('stream').Transform;
|
||||
const MimeNode = require('./mime-node');
|
||||
|
||||
const MAX_HEAD_SIZE = 1 * 1024 * 1024;
|
||||
const MAX_CHILD_NODES = 1000;
|
||||
|
||||
const HEAD = 0x01;
|
||||
const BODY = 0x02;
|
||||
|
||||
class MessageSplitter extends Transform {
|
||||
constructor(config) {
|
||||
let options = {
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: false
|
||||
};
|
||||
super(options);
|
||||
|
||||
this.config = config || {};
|
||||
this.maxHeadSize = this.config.maxHeadSize || MAX_HEAD_SIZE;
|
||||
this.maxChildNodes = this.config.maxChildNodes || MAX_CHILD_NODES;
|
||||
this.tree = [];
|
||||
this.nodeCounter = 0;
|
||||
this.newNode();
|
||||
this.tree.push(this.node);
|
||||
this.line = false;
|
||||
this.errored = false;
|
||||
}
|
||||
|
||||
_transform(chunk, encoding, callback) {
|
||||
// process line by line
|
||||
// find next line ending
|
||||
let pos = 0;
|
||||
let i = 0;
|
||||
let group = {
|
||||
type: 'none'
|
||||
};
|
||||
let groupstart = this.line ? -this.line.length : 0;
|
||||
let groupend = 0;
|
||||
|
||||
let checkTrailingLinebreak = data => {
|
||||
if (data.type === 'body' && data.node.parentNode && data.value && data.value.length) {
|
||||
if (data.value[data.value.length - 1] === 0x0a) {
|
||||
groupstart--;
|
||||
groupend--;
|
||||
pos--;
|
||||
if (data.value.length > 1 && data.value[data.value.length - 2] === 0x0d) {
|
||||
groupstart--;
|
||||
groupend--;
|
||||
pos--;
|
||||
if (groupstart < 0 && !this.line) {
|
||||
// store only <CR> as <LF> should be on the positive side
|
||||
this.line = Buffer.allocUnsafe(1);
|
||||
this.line[0] = 0x0d;
|
||||
}
|
||||
data.value = data.value.slice(0, data.value.length - 2);
|
||||
} else {
|
||||
data.value = data.value.slice(0, data.value.length - 1);
|
||||
}
|
||||
} else if (data.value[data.value.length - 1] === 0x0d) {
|
||||
groupstart--;
|
||||
groupend--;
|
||||
pos--;
|
||||
data.value = data.value.slice(0, data.value.length - 1);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let iterateData = () => {
|
||||
for (let len = chunk.length; i < len; i++) {
|
||||
// find next <LF>
|
||||
if (chunk[i] === 0x0a) {
|
||||
// line end
|
||||
|
||||
let start = Math.max(pos, 0);
|
||||
pos = ++i;
|
||||
|
||||
return this.processLine(chunk.slice(start, i), false, (err, data, flush) => {
|
||||
if (err) {
|
||||
this.errored = true;
|
||||
return setImmediate(() => callback(err));
|
||||
}
|
||||
|
||||
if (!data) {
|
||||
return setImmediate(iterateData);
|
||||
}
|
||||
|
||||
if (flush) {
|
||||
if (group && group.type !== 'none') {
|
||||
if (group.type === 'body' && groupend >= groupstart && group.node.parentNode) {
|
||||
// do not include the last line ending for body
|
||||
if (chunk[groupend - 1] === 0x0a) {
|
||||
groupend--;
|
||||
if (groupend >= groupstart && chunk[groupend - 1] === 0x0d) {
|
||||
groupend--;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (groupstart !== groupend) {
|
||||
group.value = chunk.slice(groupstart, groupend);
|
||||
if (groupend < i) {
|
||||
data.value = chunk.slice(groupend, i);
|
||||
}
|
||||
}
|
||||
this.push(group);
|
||||
group = {
|
||||
type: 'none'
|
||||
};
|
||||
groupstart = groupend = i;
|
||||
}
|
||||
this.push(data);
|
||||
groupend = i;
|
||||
return setImmediate(iterateData);
|
||||
}
|
||||
|
||||
if (data.type === group.type) {
|
||||
// shift slice end position forward
|
||||
groupend = i;
|
||||
} else {
|
||||
if (group.type === 'body' && groupend >= groupstart && group.node.parentNode) {
|
||||
// do not include the last line ending for body
|
||||
if (chunk[groupend - 1] === 0x0a) {
|
||||
groupend--;
|
||||
if (groupend >= groupstart && chunk[groupend - 1] === 0x0d) {
|
||||
groupend--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (group.type !== 'none' && group.type !== 'node') {
|
||||
// we have a previous data/body chunk to output
|
||||
if (groupstart !== groupend) {
|
||||
group.value = chunk.slice(groupstart, groupend);
|
||||
if (group.value && group.value.length) {
|
||||
this.push(group);
|
||||
group = {
|
||||
type: 'none'
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (data.type === 'node') {
|
||||
this.push(data);
|
||||
groupstart = i;
|
||||
groupend = i;
|
||||
} else if (groupstart < 0) {
|
||||
groupstart = i;
|
||||
groupend = i;
|
||||
checkTrailingLinebreak(data);
|
||||
if (data.value && data.value.length) {
|
||||
this.push(data);
|
||||
}
|
||||
} else {
|
||||
// start new body/data chunk
|
||||
group = data;
|
||||
groupstart = groupend;
|
||||
groupend = i;
|
||||
}
|
||||
}
|
||||
return setImmediate(iterateData);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// skip last linebreak for body
|
||||
if (pos >= groupstart + 1 && group.type === 'body' && group.node.parentNode) {
|
||||
// do not include the last line ending for body
|
||||
if (chunk[pos - 1] === 0x0a) {
|
||||
pos--;
|
||||
if (pos >= groupstart && chunk[pos - 1] === 0x0d) {
|
||||
pos--;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (group.type !== 'none' && group.type !== 'node' && pos > groupstart) {
|
||||
// we have a leftover data/body chunk to push out
|
||||
group.value = chunk.slice(groupstart, pos);
|
||||
|
||||
if (group.value && group.value.length) {
|
||||
this.push(group);
|
||||
group = {
|
||||
type: 'none'
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
if (pos < chunk.length) {
|
||||
if (this.line) {
|
||||
this.line = Buffer.concat([this.line, chunk.slice(pos)]);
|
||||
} else {
|
||||
this.line = chunk.slice(pos);
|
||||
}
|
||||
}
|
||||
callback();
|
||||
};
|
||||
|
||||
setImmediate(iterateData);
|
||||
}
|
||||
|
||||
_flush(callback) {
|
||||
if (this.errored) {
|
||||
return callback();
|
||||
}
|
||||
this.processLine(false, true, (err, data) => {
|
||||
if (err) {
|
||||
return setImmediate(() => callback(err));
|
||||
}
|
||||
if (data && (data.type === 'node' || (data.value && data.value.length))) {
|
||||
this.push(data);
|
||||
}
|
||||
callback();
|
||||
});
|
||||
}
|
||||
|
||||
compareBoundary(line, startpos, boundary) {
|
||||
// --{boundary}\r\n or --{boundary}--\r\n
|
||||
if (line.length < boundary.length + 3 + startpos || line.length > boundary.length + 6 + startpos) {
|
||||
return false;
|
||||
}
|
||||
for (let i = 0; i < boundary.length; i++) {
|
||||
if (line[i + 2 + startpos] !== boundary[i]) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
let pos = 0;
|
||||
for (let i = boundary.length + 2 + startpos; i < line.length; i++) {
|
||||
let c = line[i];
|
||||
if (pos === 0 && (c === 0x0d || c === 0x0a)) {
|
||||
// 1: next node
|
||||
return 1;
|
||||
}
|
||||
if (pos === 0 && c !== 0x2d) {
|
||||
// expecting "-"
|
||||
return false;
|
||||
}
|
||||
if (pos === 1 && c !== 0x2d) {
|
||||
// expecting "-"
|
||||
return false;
|
||||
}
|
||||
if (pos === 2 && c !== 0x0d && c !== 0x0a) {
|
||||
// expecting line terminator, either <CR> or <LF>
|
||||
return false;
|
||||
}
|
||||
if (pos === 3 && c !== 0x0a) {
|
||||
// expecting line terminator <LF>
|
||||
return false;
|
||||
}
|
||||
pos++;
|
||||
}
|
||||
|
||||
// 2: multipart end
|
||||
return 2;
|
||||
}
|
||||
|
||||
checkBoundary(line) {
|
||||
let startpos = 0;
|
||||
if (line.length >= 1 && (line[0] === 0x0d || line[0] === 0x0a)) {
|
||||
startpos++;
|
||||
if (line.length >= 2 && (line[0] === 0x0d || line[1] === 0x0a)) {
|
||||
startpos++;
|
||||
}
|
||||
}
|
||||
if (line.length < 4 || line[startpos] !== 0x2d || line[startpos + 1] !== 0x2d) {
|
||||
// defnitely not a boundary
|
||||
return false;
|
||||
}
|
||||
|
||||
let boundary;
|
||||
if (this.node._boundary && (boundary = this.compareBoundary(line, startpos, this.node._boundary))) {
|
||||
// 1: next child
|
||||
// 2: multipart end
|
||||
return boundary;
|
||||
}
|
||||
|
||||
if (this.node._parentBoundary && (boundary = this.compareBoundary(line, startpos, this.node._parentBoundary))) {
|
||||
// 3: next sibling
|
||||
// 4: parent end
|
||||
return boundary + 2;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
processLine(line, final, next) {
|
||||
let flush = false;
|
||||
|
||||
if (this.line && line) {
|
||||
line = Buffer.concat([this.line, line]);
|
||||
this.line = false;
|
||||
} else if (this.line && !line) {
|
||||
line = this.line;
|
||||
this.line = false;
|
||||
}
|
||||
|
||||
if (!line) {
|
||||
line = Buffer.alloc(0);
|
||||
}
|
||||
|
||||
if (this.nodeCounter > this.maxChildNodes) {
|
||||
let err = new Error('Max allowed child nodes exceeded');
|
||||
err.code = 'EMAXLEN';
|
||||
return next(err);
|
||||
}
|
||||
|
||||
// we check boundary outside the HEAD/BODY scope as it may appear anywhere
|
||||
let boundary = this.checkBoundary(line);
|
||||
if (boundary) {
|
||||
// reached boundary, switch context
|
||||
switch (boundary) {
|
||||
case 1:
|
||||
// next child
|
||||
this.newNode(this.node);
|
||||
flush = true;
|
||||
break;
|
||||
case 2:
|
||||
// reached end of children, keep current node
|
||||
break;
|
||||
case 3: {
|
||||
// next sibling
|
||||
let parentNode = this.node.parentNode;
|
||||
if (parentNode && parentNode.contentType === 'message/rfc822') {
|
||||
// special case where immediate parent is an inline message block
|
||||
// move up another step
|
||||
parentNode = parentNode.parentNode;
|
||||
}
|
||||
this.newNode(parentNode);
|
||||
flush = true;
|
||||
break;
|
||||
}
|
||||
case 4:
|
||||
// special case when boundary close a node with only header.
|
||||
if (this.node && this.node._headerlen && !this.node.headers) {
|
||||
this.node.parseHeaders();
|
||||
this.push(this.node);
|
||||
}
|
||||
// move up
|
||||
if (this.tree.length) {
|
||||
this.node = this.tree.pop();
|
||||
}
|
||||
this.state = BODY;
|
||||
break;
|
||||
}
|
||||
|
||||
return next(
|
||||
null,
|
||||
{
|
||||
node: this.node,
|
||||
type: 'data',
|
||||
value: line
|
||||
},
|
||||
flush
|
||||
);
|
||||
}
|
||||
|
||||
switch (this.state) {
|
||||
case HEAD: {
|
||||
this.node.addHeaderChunk(line);
|
||||
if (this.node._headerlen > this.maxHeadSize) {
|
||||
let err = new Error('Max header size for a MIME node exceeded');
|
||||
err.code = 'EMAXLEN';
|
||||
return next(err);
|
||||
}
|
||||
if (final || (line.length === 1 && line[0] === 0x0a) || (line.length === 2 && line[0] === 0x0d && line[1] === 0x0a)) {
|
||||
let currentNode = this.node;
|
||||
|
||||
currentNode.parseHeaders();
|
||||
|
||||
// if the content is attached message then just continue
|
||||
if (
|
||||
currentNode.contentType === 'message/rfc822' &&
|
||||
!this.config.ignoreEmbedded &&
|
||||
(!currentNode.encoding || ['7bit', '8bit', 'binary'].includes(currentNode.encoding)) &&
|
||||
currentNode.disposition !== 'attachment'
|
||||
) {
|
||||
currentNode.messageNode = true;
|
||||
this.newNode(currentNode);
|
||||
if (currentNode.parentNode) {
|
||||
this.node._parentBoundary = currentNode.parentNode._boundary;
|
||||
}
|
||||
} else {
|
||||
if (currentNode.contentType === 'message/rfc822') {
|
||||
currentNode.messageNode = false;
|
||||
}
|
||||
this.state = BODY;
|
||||
if (currentNode.multipart && currentNode._boundary) {
|
||||
this.tree.push(currentNode);
|
||||
}
|
||||
}
|
||||
|
||||
return next(null, currentNode, flush);
|
||||
}
|
||||
|
||||
return next();
|
||||
}
|
||||
case BODY: {
|
||||
return next(
|
||||
null,
|
||||
{
|
||||
node: this.node,
|
||||
type: this.node.multipart ? 'data' : 'body',
|
||||
value: line
|
||||
},
|
||||
flush
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
next(null, false);
|
||||
}
|
||||
|
||||
newNode(parent) {
|
||||
this.node = new MimeNode(parent || false, this.config);
|
||||
this.state = HEAD;
|
||||
this.nodeCounter++;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MessageSplitter;
|
||||
235
nodered/rootfs/data/node_modules/mailsplit/lib/mime-node.js
generated
vendored
Normal file
235
nodered/rootfs/data/node_modules/mailsplit/lib/mime-node.js
generated
vendored
Normal file
@@ -0,0 +1,235 @@
|
||||
'use strict';
|
||||
|
||||
const Headers = require('./headers');
|
||||
const libmime = require('libmime');
|
||||
const libqp = require('libqp');
|
||||
const libbase64 = require('libbase64');
|
||||
const PassThrough = require('stream').PassThrough;
|
||||
|
||||
class MimeNode {
|
||||
constructor(parentNode, config) {
|
||||
this.type = 'node';
|
||||
this.root = !parentNode;
|
||||
this.parentNode = parentNode;
|
||||
|
||||
this._parentBoundary = this.parentNode && this.parentNode._boundary;
|
||||
this._headersLines = [];
|
||||
this._headerlen = 0;
|
||||
|
||||
this._parsedContentType = false;
|
||||
this._boundary = false;
|
||||
|
||||
this.multipart = false;
|
||||
this.encoding = false;
|
||||
this.headers = false;
|
||||
this.contentType = false;
|
||||
this.flowed = false;
|
||||
this.delSp = false;
|
||||
|
||||
this.config = config || {};
|
||||
this.libmime = new libmime.Libmime({ Iconv: this.config.Iconv });
|
||||
|
||||
this.parentPartNumber = (parentNode && this.partNr) || [];
|
||||
this.partNr = false; // resolved later
|
||||
this.childPartNumbers = 0;
|
||||
}
|
||||
|
||||
getPartNr(provided) {
|
||||
if (provided) {
|
||||
return []
|
||||
.concat(this.partNr || [])
|
||||
.filter(nr => !isNaN(nr))
|
||||
.concat(provided);
|
||||
}
|
||||
let childPartNr = ++this.childPartNumbers;
|
||||
return []
|
||||
.concat(this.partNr || [])
|
||||
.filter(nr => !isNaN(nr))
|
||||
.concat(childPartNr);
|
||||
}
|
||||
|
||||
addHeaderChunk(line) {
|
||||
if (!line) {
|
||||
return;
|
||||
}
|
||||
this._headersLines.push(line);
|
||||
this._headerlen += line.length;
|
||||
}
|
||||
|
||||
parseHeaders() {
|
||||
if (this.headers) {
|
||||
return;
|
||||
}
|
||||
this.headers = new Headers(Buffer.concat(this._headersLines, this._headerlen), this.config);
|
||||
this._parsedContentType = this.libmime.parseHeaderValue(this.headers.getFirst('Content-Type'));
|
||||
this._parsedContentDisposition = this.libmime.parseHeaderValue(this.headers.getFirst('Content-Disposition'));
|
||||
|
||||
this.encoding = this.headers
|
||||
.getFirst('Content-Transfer-Encoding')
|
||||
.replace(/\(.*\)/g, '')
|
||||
.toLowerCase()
|
||||
.trim();
|
||||
this.contentType = (this._parsedContentType.value || '').toLowerCase().trim() || false;
|
||||
this.charset = this._parsedContentType.params.charset || false;
|
||||
this.disposition = (this._parsedContentDisposition.value || '').toLowerCase().trim() || false;
|
||||
this.filename = this._parsedContentDisposition.params.filename || this._parsedContentType.params.name || false;
|
||||
|
||||
if (this._parsedContentType.params.format && this._parsedContentType.params.format.toLowerCase().trim() === 'flowed') {
|
||||
this.flowed = true;
|
||||
if (this._parsedContentType.params.delsp && this._parsedContentType.params.delsp.toLowerCase().trim() === 'yes') {
|
||||
this.delSp = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.filename) {
|
||||
try {
|
||||
this.filename = this.libmime.decodeWords(this.filename);
|
||||
} catch (E) {
|
||||
// failed to parse filename, keep as is (most probably an unknown charset is used)
|
||||
}
|
||||
}
|
||||
|
||||
this.multipart =
|
||||
(this.contentType &&
|
||||
this.contentType.substr(0, this.contentType.indexOf('/')) === 'multipart' &&
|
||||
this.contentType.substr(this.contentType.indexOf('/') + 1)) ||
|
||||
false;
|
||||
this._boundary = (this._parsedContentType.params.boundary && Buffer.from(this._parsedContentType.params.boundary)) || false;
|
||||
|
||||
this.rfc822 = this.contentType === 'message/rfc822';
|
||||
|
||||
if (!this.parentNode || this.parentNode.rfc822) {
|
||||
this.partNr = this.parentNode ? this.parentNode.getPartNr('TEXT') : ['TEXT'];
|
||||
} else {
|
||||
this.partNr = this.parentNode ? this.parentNode.getPartNr() : [];
|
||||
}
|
||||
}
|
||||
|
||||
getHeaders() {
|
||||
if (!this.headers) {
|
||||
this.parseHeaders();
|
||||
}
|
||||
return this.headers.build();
|
||||
}
|
||||
|
||||
setContentType(contentType) {
|
||||
if (!this.headers) {
|
||||
this.parseHeaders();
|
||||
}
|
||||
|
||||
contentType = (contentType || '').toLowerCase().trim();
|
||||
if (contentType) {
|
||||
this._parsedContentType.value = contentType;
|
||||
}
|
||||
|
||||
if (!this.flowed && this._parsedContentType.params.format) {
|
||||
delete this._parsedContentType.params.format;
|
||||
}
|
||||
|
||||
if (!this.delSp && this._parsedContentType.params.delsp) {
|
||||
delete this._parsedContentType.params.delsp;
|
||||
}
|
||||
|
||||
this.headers.update('Content-Type', this.libmime.buildHeaderValue(this._parsedContentType));
|
||||
}
|
||||
|
||||
setCharset(charset) {
|
||||
if (!this.headers) {
|
||||
this.parseHeaders();
|
||||
}
|
||||
|
||||
charset = (charset || '').toLowerCase().trim();
|
||||
|
||||
if (charset === 'ascii') {
|
||||
charset = '';
|
||||
}
|
||||
|
||||
if (!charset) {
|
||||
if (!this._parsedContentType.value) {
|
||||
// nothing to set or update
|
||||
return;
|
||||
}
|
||||
delete this._parsedContentType.params.charset;
|
||||
} else {
|
||||
this._parsedContentType.params.charset = charset;
|
||||
}
|
||||
|
||||
if (!this._parsedContentType.value) {
|
||||
this._parsedContentType.value = 'text/plain';
|
||||
}
|
||||
|
||||
this.headers.update('Content-Type', this.libmime.buildHeaderValue(this._parsedContentType));
|
||||
}
|
||||
|
||||
setFilename(filename) {
|
||||
if (!this.headers) {
|
||||
this.parseHeaders();
|
||||
}
|
||||
|
||||
this.filename = (filename || '').toLowerCase().trim();
|
||||
|
||||
if (this._parsedContentType.params.name) {
|
||||
delete this._parsedContentType.params.name;
|
||||
this.headers.update('Content-Type', this.libmime.buildHeaderValue(this._parsedContentType));
|
||||
}
|
||||
|
||||
if (!this.filename) {
|
||||
if (!this._parsedContentDisposition.value) {
|
||||
// nothing to set or update
|
||||
return;
|
||||
}
|
||||
delete this._parsedContentDisposition.params.filename;
|
||||
} else {
|
||||
this._parsedContentDisposition.params.filename = this.filename;
|
||||
}
|
||||
|
||||
if (!this._parsedContentDisposition.value) {
|
||||
this._parsedContentDisposition.value = 'attachment';
|
||||
}
|
||||
|
||||
this.headers.update('Content-Disposition', this.libmime.buildHeaderValue(this._parsedContentDisposition));
|
||||
}
|
||||
|
||||
getDecoder() {
|
||||
if (!this.headers) {
|
||||
this.parseHeaders();
|
||||
}
|
||||
|
||||
switch (this.encoding) {
|
||||
case 'base64':
|
||||
return new libbase64.Decoder();
|
||||
case 'quoted-printable':
|
||||
return new libqp.Decoder();
|
||||
default:
|
||||
return new PassThrough();
|
||||
}
|
||||
}
|
||||
|
||||
getEncoder(encoding) {
|
||||
if (!this.headers) {
|
||||
this.parseHeaders();
|
||||
}
|
||||
|
||||
encoding = (encoding || '')
|
||||
.toString()
|
||||
.toLowerCase()
|
||||
.trim();
|
||||
|
||||
if (encoding && encoding !== this.encoding) {
|
||||
this.headers.update('Content-Transfer-Encoding', encoding);
|
||||
} else {
|
||||
encoding = this.encoding;
|
||||
}
|
||||
|
||||
switch (encoding) {
|
||||
case 'base64':
|
||||
return new libbase64.Encoder();
|
||||
case 'quoted-printable':
|
||||
return new libqp.Encoder();
|
||||
default:
|
||||
return new PassThrough();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = MimeNode;
|
||||
194
nodered/rootfs/data/node_modules/mailsplit/lib/node-rewriter.js
generated
vendored
Normal file
194
nodered/rootfs/data/node_modules/mailsplit/lib/node-rewriter.js
generated
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
'use strict';
|
||||
|
||||
// Helper class to rewrite nodes with specific mime type
|
||||
|
||||
const Transform = require('stream').Transform;
|
||||
const FlowedDecoder = require('./flowed-decoder');
|
||||
|
||||
/**
|
||||
* NodeRewriter Transform stream. Updates content for all nodes with specified mime type
|
||||
*
|
||||
* @constructor
|
||||
* @param {String} mimeType Define the Mime-Type to look for
|
||||
* @param {Function} rewriteAction Function to run with the node content
|
||||
*/
|
||||
class NodeRewriter extends Transform {
|
||||
constructor(filterFunc, rewriteAction) {
|
||||
let options = {
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true
|
||||
};
|
||||
super(options);
|
||||
|
||||
this.filterFunc = filterFunc;
|
||||
this.rewriteAction = rewriteAction;
|
||||
|
||||
this.decoder = false;
|
||||
this.encoder = false;
|
||||
this.continue = false;
|
||||
}
|
||||
|
||||
_transform(data, encoding, callback) {
|
||||
this.processIncoming(data, callback);
|
||||
}
|
||||
|
||||
_flush(callback) {
|
||||
if (this.decoder) {
|
||||
// emit an empty node just in case there is pending data to end
|
||||
return this.processIncoming(
|
||||
{
|
||||
type: 'none'
|
||||
},
|
||||
callback
|
||||
);
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
|
||||
processIncoming(data, callback) {
|
||||
if (this.decoder && data.type === 'body') {
|
||||
// data to parse
|
||||
if (!this.decoder.write(data.value)) {
|
||||
return this.decoder.once('drain', callback);
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
} else if (this.decoder && data.type !== 'body') {
|
||||
// stop decoding.
|
||||
// we can not process the current data chunk as we need to wait until
|
||||
// the parsed data is completely processed, so we store a reference to the
|
||||
// continue callback
|
||||
this.continue = () => {
|
||||
this.continue = false;
|
||||
this.decoder = false;
|
||||
this.encoder = false;
|
||||
this.processIncoming(data, callback);
|
||||
};
|
||||
return this.decoder.end();
|
||||
} else if (data.type === 'node' && this.filterFunc(data)) {
|
||||
// found matching node, create new handler
|
||||
this.emit('node', this.createDecodePair(data));
|
||||
} else if (this.readable && data.type !== 'none') {
|
||||
// we don't care about this data, just pass it over to the joiner
|
||||
this.push(data);
|
||||
}
|
||||
callback();
|
||||
}
|
||||
|
||||
createDecodePair(node) {
|
||||
this.decoder = node.getDecoder();
|
||||
|
||||
if (['base64', 'quoted-printable'].includes(node.encoding)) {
|
||||
this.encoder = node.getEncoder();
|
||||
} else {
|
||||
this.encoder = node.getEncoder('quoted-printable');
|
||||
}
|
||||
|
||||
let lastByte = false;
|
||||
|
||||
let decoder = this.decoder;
|
||||
let encoder = this.encoder;
|
||||
let firstChunk = true;
|
||||
decoder.$reading = false;
|
||||
|
||||
let readFromEncoder = () => {
|
||||
decoder.$reading = true;
|
||||
|
||||
let data = encoder.read();
|
||||
if (data === null) {
|
||||
decoder.$reading = false;
|
||||
return;
|
||||
}
|
||||
|
||||
if (firstChunk) {
|
||||
firstChunk = false;
|
||||
if (this.readable) {
|
||||
this.push(node);
|
||||
if (node.type === 'body') {
|
||||
lastByte = node.value && node.value.length && node.value[node.value.length - 1];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let writeMore = true;
|
||||
if (this.readable) {
|
||||
writeMore = this.push({
|
||||
node,
|
||||
type: 'body',
|
||||
value: data
|
||||
});
|
||||
lastByte = data && data.length && data[data.length - 1];
|
||||
}
|
||||
|
||||
if (writeMore) {
|
||||
return setImmediate(readFromEncoder);
|
||||
} else {
|
||||
encoder.pause();
|
||||
// no idea how to catch drain? use timeout for now as poor man's substitute
|
||||
// this.once('drain', () => encoder.resume());
|
||||
setTimeout(() => {
|
||||
encoder.resume();
|
||||
setImmediate(readFromEncoder);
|
||||
}, 100);
|
||||
}
|
||||
};
|
||||
|
||||
encoder.on('readable', () => {
|
||||
if (!decoder.$reading) {
|
||||
return readFromEncoder();
|
||||
}
|
||||
});
|
||||
|
||||
encoder.on('end', () => {
|
||||
if (firstChunk) {
|
||||
firstChunk = false;
|
||||
if (this.readable) {
|
||||
this.push(node);
|
||||
if (node.type === 'body') {
|
||||
lastByte = node.value && node.value.length && node.value[node.value.length - 1];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (lastByte !== 0x0a) {
|
||||
// make sure there is a terminating line break
|
||||
this.push({
|
||||
node,
|
||||
type: 'body',
|
||||
value: Buffer.from([0x0a])
|
||||
});
|
||||
}
|
||||
|
||||
if (this.continue) {
|
||||
return this.continue();
|
||||
}
|
||||
});
|
||||
|
||||
if (/^text\//.test(node.contentType) && node.flowed) {
|
||||
// text/plain; format=flowed is a special case
|
||||
let flowDecoder = decoder;
|
||||
decoder = new FlowedDecoder({
|
||||
delSp: node.delSp,
|
||||
encoding: node.encoding
|
||||
});
|
||||
flowDecoder.on('error', err => {
|
||||
decoder.emit('error', err);
|
||||
});
|
||||
flowDecoder.pipe(decoder);
|
||||
|
||||
// we don't know what kind of data we are going to get, does it comply with the
|
||||
// requirements of format=flowed, so we just cancel it
|
||||
node.flowed = false;
|
||||
node.delSp = false;
|
||||
node.setContentType();
|
||||
}
|
||||
|
||||
return {
|
||||
node,
|
||||
decoder,
|
||||
encoder
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NodeRewriter;
|
||||
121
nodered/rootfs/data/node_modules/mailsplit/lib/node-streamer.js
generated
vendored
Normal file
121
nodered/rootfs/data/node_modules/mailsplit/lib/node-streamer.js
generated
vendored
Normal file
@@ -0,0 +1,121 @@
|
||||
'use strict';
|
||||
|
||||
// Helper class to rewrite nodes with specific mime type
|
||||
|
||||
const Transform = require('stream').Transform;
|
||||
const FlowedDecoder = require('./flowed-decoder');
|
||||
|
||||
/**
|
||||
* NodeRewriter Transform stream. Updates content for all nodes with specified mime type
|
||||
*
|
||||
* @constructor
|
||||
* @param {String} mimeType Define the Mime-Type to look for
|
||||
* @param {Function} streamAction Function to run with the node content
|
||||
*/
|
||||
class NodeStreamer extends Transform {
|
||||
constructor(filterFunc, streamAction) {
|
||||
let options = {
|
||||
readableObjectMode: true,
|
||||
writableObjectMode: true
|
||||
};
|
||||
super(options);
|
||||
|
||||
this.filterFunc = filterFunc;
|
||||
this.streamAction = streamAction;
|
||||
|
||||
this.decoder = false;
|
||||
this.canContinue = false;
|
||||
this.continue = false;
|
||||
}
|
||||
|
||||
_transform(data, encoding, callback) {
|
||||
this.processIncoming(data, callback);
|
||||
}
|
||||
|
||||
_flush(callback) {
|
||||
if (this.decoder) {
|
||||
// emit an empty node just in case there is pending data to end
|
||||
return this.processIncoming(
|
||||
{
|
||||
type: 'none'
|
||||
},
|
||||
callback
|
||||
);
|
||||
}
|
||||
return callback();
|
||||
}
|
||||
|
||||
processIncoming(data, callback) {
|
||||
if (this.decoder && data.type === 'body') {
|
||||
// data to parse
|
||||
this.push(data);
|
||||
if (!this.decoder.write(data.value)) {
|
||||
return this.decoder.once('drain', callback);
|
||||
} else {
|
||||
return callback();
|
||||
}
|
||||
} else if (this.decoder && data.type !== 'body') {
|
||||
// stop decoding.
|
||||
// we can not process the current data chunk as we need to wait until
|
||||
// the parsed data is completely processed, so we store a reference to the
|
||||
// continue callback
|
||||
|
||||
let doContinue = () => {
|
||||
this.continue = false;
|
||||
this.decoder = false;
|
||||
this.canContinue = false;
|
||||
this.processIncoming(data, callback);
|
||||
};
|
||||
|
||||
if (this.canContinue) {
|
||||
setImmediate(doContinue);
|
||||
} else {
|
||||
this.continue = () => doContinue();
|
||||
}
|
||||
|
||||
return this.decoder.end();
|
||||
} else if (data.type === 'node' && this.filterFunc(data)) {
|
||||
this.push(data);
|
||||
// found matching node, create new handler
|
||||
this.emit('node', this.createDecoder(data));
|
||||
} else if (this.readable && data.type !== 'none') {
|
||||
// we don't care about this data, just pass it over to the joiner
|
||||
this.push(data);
|
||||
}
|
||||
callback();
|
||||
}
|
||||
|
||||
createDecoder(node) {
|
||||
this.decoder = node.getDecoder();
|
||||
|
||||
let decoder = this.decoder;
|
||||
decoder.$reading = false;
|
||||
|
||||
if (/^text\//.test(node.contentType) && node.flowed) {
|
||||
let flowDecoder = decoder;
|
||||
decoder = new FlowedDecoder({
|
||||
delSp: node.delSp
|
||||
});
|
||||
flowDecoder.on('error', err => {
|
||||
decoder.emit('error', err);
|
||||
});
|
||||
flowDecoder.pipe(decoder);
|
||||
}
|
||||
|
||||
return {
|
||||
node,
|
||||
decoder,
|
||||
done: () => {
|
||||
if (typeof this.continue === 'function') {
|
||||
// called once input stream is processed
|
||||
this.continue();
|
||||
} else {
|
||||
// called before input stream is processed
|
||||
this.canContinue = true;
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = NodeStreamer;
|
||||
Reference in New Issue
Block a user