Skip to content

Commit

Permalink
[Buffer] Taint API harmonization (nodejs#56)
Browse files Browse the repository at this point in the history
* [Refactoring] change taint to _taint on buffer

* [Refactor] Added _taint in unit tests

* [Feature] Added method for buffer

* [Test] Buffer.new() test for each encoding

* [Test] Buffer.from() tests

* [Test] Buffer.new() tests

* [Test] Buffer.new() tests for one string + each encoding

* [Test] Buffer.new() tests for concatenated ascii and utf8 encoding

* [Test] Buffer.new() test enhanced for utf8 encoding

* [Test] Buffer.new() tests for hex (ascii) encoding

* [Test] Buffer.new() removed bug in unit tests

* [Test] Buffer.new() tests for hex encoding with utf8 character

* [Test] Buffer.new() unit tests for base64 encoding

* [Test] Buffer.new() unit tests for base64 unicode encoding

* [Test] Buffer.new() refactoring for utf8 + ascii encoding

* [Buffer] Reactor taint propagation toString and slice

* [Buffer] Remove previously inserted newlines

* [Buffer] Taint propagation for Buffer.prototype.toString('hex')

* [Test] Buffer.new() unit tests for utf8 encoding

* [Feature] Buffer.alloc() keep taint

* [Feature] Buffer.alloc() keep (sub-)taint for hex encoding

* [Test] Buffer.fill() unit tests

* [Feature] Buffer.write() ascii/uft8 taint propagation

* [Test] Buffer.concat() unit tests

* Buffer.write() refactored

* Â[Feature] apply Taint to Partial Buffer

* [Feature] Keep taint outsite taint ranges for write/fill

* [Fix] Buffer.concat() taint propagation

* [Merge] buffer refactoring

* [Refactoring] Final buffer status

* [Feature] Final status of buffer

* [Feature] Final refactored buffer api status
  • Loading branch information
Tobias Simolik authored and dacappo committed Jan 24, 2019
1 parent b327728 commit 4eff241
Show file tree
Hide file tree
Showing 26 changed files with 3,353 additions and 90 deletions.
2 changes: 1 addition & 1 deletion lib/_http_common.js
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ function parserOnBody(b, start, len) {
var slice = b.slice(start, start + len);
// TaintNode
// TODO: enable again
//slice.taint = [{ 'begin': 0, 'end': slice.length }];
//slice._taint = [{ 'begin': 0, 'end': slice.length }];
var ret = stream.push(slice);
if (!ret)
readStop(this.socket);
Expand Down
38 changes: 19 additions & 19 deletions lib/_pathTraversalCheck.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,9 @@ const Buffer = require('buffer').Buffer;
function removePathTraversal(path) {
if (!path)
return path;
if (path.isTainted && path.isTainted()) {
if (typeof path === 'string' && path.isTainted()) {
path = removeAllTaintedString(path, '..');
} else if (path.taint && path.taint.length > 0) {
} else if (Buffer.isBuffer(path) && path.isTainted()) {
path = removeAllTaintedBuffer(path, '..');
}
return path;
Expand Down Expand Up @@ -68,12 +68,12 @@ function removeAllTaintedBuffer(path, toRemove) {
var arr = [];
var i;
var endOfTaintTraversal = false;
for (i = 0; i < path.taint.length; i++) {
var currentTaintLength = path.taint.length;
for (i = 0; i < path._taint.length; i++) {
var currentTaintLength = path._taint.length;
arr.push(addBeginning(path, i, endOfTaintTraversal));
endOfTaintTraversal = checkEndOfTaintTraversal(path, i);
arr.push(mitigateOneTaintBuffer(path, i, toRemove));
if (path.taint.length < currentTaintLength)
if (path._taint.length < currentTaintLength)
i--;
}
arr.push(addEndOfPath(path, endOfTaintTraversal));
Expand All @@ -82,37 +82,37 @@ function removeAllTaintedBuffer(path, toRemove) {
}

function checkEndOfTaintTraversal(path, i) {
if (path.length > path.taint[i].end &&
(path.slice(path.taint[i].end - 2,
path.taint[i].end + 1).compare(Buffer.from('../')) === 0 ||
path.slice(path.taint[i].end - 2,
path.taint[i].end + 1).compare(Buffer.from('..\\')) === 0)) {
if (path.length > path._taint[i].end &&
(path.slice(path._taint[i].end - 2,
path._taint[i].end + 1).compare(Buffer.from('../')) === 0 ||
path.slice(path._taint[i].end - 2,
path._taint[i].end + 1).compare(Buffer.from('..\\')) === 0)) {
return true;
}
return false;
}

function addBeginning(path, i, endOfTaintTraversal) {
if (i === 0) {
return path.slice(0, path.taint[i].begin);
} else if (endOfTaintTraversal && path.taint[i].begin >
path.taint[i - 1].end + 1) {
return path.slice(path.taint[i - 1].end + 1, path.taint[i].begin);
return path.slice(0, path._taint[i].begin);
} else if (endOfTaintTraversal && path._taint[i].begin >
path._taint[i - 1].end + 1) {
return path.slice(path._taint[i - 1].end + 1, path._taint[i].begin);
} else if (!endOfTaintTraversal) {
return path.slice(path.taint[i - 1].end, path.taint[i].begin);
return path.slice(path._taint[i - 1].end, path._taint[i].begin);
}
}

function addEndOfPath(path, endOfTaintTraversal) {
if (endOfTaintTraversal)
return path.slice(path.taint[path.taint.length - 1].end + 1);
return path.slice(path._taint[path._taint.length - 1].end + 1);
else
return path.slice(path.taint[path.taint.length - 1].end);
return path.slice(path._taint[path._taint.length - 1].end);
}

function mitigateOneTaintBuffer(path, i, toRemove) {
var removedCharCounter = [];
var tainted = path.slice(path.taint[i].begin, path.taint[i].end);
var tainted = path.slice(path._taint[i].begin, path._taint[i].end);
var toRemoveList = [];
var fixedBuffer = [];

Expand All @@ -132,7 +132,7 @@ function mitigateOneTaintBuffer(path, i, toRemove) {
function removeOneInstanceBuffer(path, tainted, fixedBuffer,
toRemoveList, removedCharCounter,
i, toRemove, counterTotal) {
var endOfRemovedIndex = path.taint[i].begin + tainted.indexOf(toRemove) +
var endOfRemovedIndex = path._taint[i].begin + tainted.indexOf(toRemove) +
toRemove.length + counterTotal;
if (endOfRemovedIndex < path.length &&
(path.slice(endOfRemovedIndex, endOfRemovedIndex + 1)
Expand Down
153 changes: 153 additions & 0 deletions lib/_taint_buffer.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
'use strict';

const { byteLength } = require('internal/buffer_util');

exports.utf8Slice = (buf, start, end) => {
return slice(buf, start, end, 'utf8Slice');
};

exports.asciiSlice = (buf, start, end) => {
return slice(buf, start, end, 'asciiSlice');
};

exports.ucs2Slice = (buf, start, end) => {
return slice(buf, start, end, 'ucs2Slice');
};

exports.latin1Slice = (buf, start, end) => {
return slice(buf, start, end, 'latin1Slice');
};

exports.hexSlice = (buf, start, end) => {
return slice(buf, start, end, 'hexSlice');
};

function slice(buf, start, end, encodingSlice) {
let result = '';
let i = start;
for (const taint of getSubtaint(buf._taint, start, end)) {
result += buf[encodingSlice](i, taint.begin);
result += buf[encodingSlice](taint.begin, taint.end).setTaint('buffer');
i = taint.end;
}
result += buf[encodingSlice](i, end);
return result;
}

function getSubtaint(taint, begin, end) {
const result = [];
for (var i in taint) {
const range = taint[i];
if (range.end < begin || range.begin > end) {
continue;
} else if (range.begin >= begin && range.end <= end) {
result.push({ begin: range.begin, end: range.end });
} else if (range.begin < begin && range.end <= end) {
result.push({ begin: begin, end: range.end });
} else if (range.begin < begin && range.end > end) {
result.push({ begin: begin, end: end });
} else if (range.begin >= begin && range.end > end) {
result.push({ begin: range.begin, end: end });
}
}
return result;
}

exports.applyTaintToBuffer = applyTaintToBuffer;

function applyTaintToBuffer(buf, val, start, end, encoding) {

buf._taint = [];

if (typeof val !== 'string') return buf;

for (const taint of val.getTaint()) {
const offset = byteLength(val.slice(0, taint.begin)
, encoding);
const offsetEnd = offset + byteLength(val.slice(taint.begin
, taint.end), encoding);
const helpTaint = [{ begin: offset, end: offsetEnd }];
buf._taint.push(getSubtaint(helpTaint, start, end)[0]);
}
return buf;
}

exports.ucs2Write = (buf, string, offset, length) => {
return write(buf, string, offset, length, 'utf16le', 'ucs2Write');
};

exports.utf8Write = (buf, string, offset, length) => {
return write(buf, string, offset, length, 'utf8', 'utf8Write');
};

exports.asciiWrite = (buf, string, offset, length) => {
return write(buf, string, offset, length, 'ascii', 'asciiWrite');
};

function write(buf, string, offset, length, encoding, encodingSlice) {
const result = buf[encodingSlice](string, offset, length);
applyTaintToPartialBuffer(buf, string, offset, length, encoding);
return result;
}

// exports.applyTaintToPartialBuffer = applyTaintToPartialBuffer;

function applyTaintToPartialBuffer(buf, string, offset, length, encoding) {

const end = offset + length;
const taintResult = [];

// Step 1: Keep taint before string range to be inserted.
for (const taint of buf._taint) {
if (taint.end < offset) {
taintResult.push(taint);
} else if (taint.begin < offset && taint.end >= offset) {
const helpTaint = [{ begin: taint.begin, end: offset - 1 }];
taintResult.push(helpTaint);
} else {
break;
}
}
// Step 2: Keep taint from string
for (const taint of string.getTaint()) {
const taintBegin = byteLength(string.slice(0, taint.begin)
, encoding);
const taintEnd = byteLength(string.slice(taint.begin, taint.end)
, encoding);
const helpTaint = [{ begin: offset + taintBegin,
end: offset + taintEnd }];
taintResult.push(getSubtaint(helpTaint, offset, end)[0]);
}
// Step 3: Keep taint after string range to be inserted.
for (const taint of buf._taint) {
if (taint.end <= end) {
continue;
} else if (taint.begin <= end && taint.end > end) {
const helpTaint = [{ begin: end + 1, end: taint.end }];
taintResult.push(helpTaint);
} else if (taint.begin > end) {
taintResult.push(taint);
}
}
// Save the result
buf._taint = taintResult;
}

exports.concatBufferArrayTaint = concatBufferArrayTaint;

function concatBufferArrayTaint(list) {

const taintResult = [];
var offset = 0;

for (const buffer of list) {
if (buffer && buffer._taint) {
for (const taint of buffer._taint) {
taintResult.push({ begin: offset + taint.begin,
end: offset + taint.end });
}
}
offset += buffer.length;
}
return taintResult;
}
41 changes: 20 additions & 21 deletions lib/_taint_buffer_util.js
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,10 @@ const binding = process.binding('buffer');
exports.reapplyTaintToString = (string, buffer, start, end, encoding) => {
var i;

if (!buffer.taint || buffer.taint.length < 1) {
if (!buffer._taint || buffer._taint.length < 1) {
return string;
}
const taint = buffer.taint;
const taint = buffer._taint;
const resultString = '';
// For hex, ascii, ucs2 I perform the reverse action of applyTaintToBuffer
//TODO: das geht so nicht, ich schmeiß ja alles ohne taint weg!!!
Expand Down Expand Up @@ -108,8 +108,7 @@ exports.applyTaintToBuffer = (buffer, string, encoding,
writeOffset, length, written) => {
// Some attention seeking comment here
//if (!string.isTainted()) {
buffer.taint.length;
buffer.taint = [];
buffer._taint = [];
return buffer;
//}
const taint = []; //string.getTaint();
Expand Down Expand Up @@ -177,7 +176,7 @@ exports.applyTaintToBuffer = (buffer, string, encoding,
if (written > 0 && alreadyWritten > written)
break;
}
buffer.taint = taint;
buffer._taint = taint;
}
if (['ucs2', 'ucs-2', 'utf16le', 'utf-16le'].indexOf(encoding) > -1) {
if (written < 0) {
Expand All @@ -201,7 +200,7 @@ exports.applyTaintToBuffer = (buffer, string, encoding,
}
}
}
buffer.taint = taint;
buffer._taint = taint;
}
if (['hex'].indexOf(encoding) > -1) {
if (written < 0) {
Expand All @@ -220,21 +219,21 @@ exports.applyTaintToBuffer = (buffer, string, encoding,
}
}
}
buffer.taint = taint;
buffer._taint = taint;
}
//TODO
if (['base64'].indexOf(encoding) > -1) {
// nothing written
if (written === 0) {
buffer.taint = [];
buffer._taint = [];
return buffer;
}
// whole original string is tainted with the same range
if (taint.length === 1 && taint[0].begin === 0 &&
taint[0].end === string.length) {
taint[0].begin += writeOffset;
taint[0].end = written + writeOffset;
buffer.taint = taint;
buffer._taint = taint;
return buffer;
}

Expand Down Expand Up @@ -264,7 +263,7 @@ exports.applyTaintToBuffer = (buffer, string, encoding,
newTaint.push({ begin: newBegin, end: newEnd, flow: curr.flow });
}
}
buffer.taint = newTaint;
buffer._taint = newTaint;
}
if (['ascii', 'binary', 'raw', 'raws'].indexOf(encoding) > -1) {
// one byte stays one byte
Expand All @@ -278,16 +277,16 @@ exports.applyTaintToBuffer = (buffer, string, encoding,
}
}
}
buffer.taint = taint;
buffer._taint = taint;
}

return buffer;
};

exports.concatBufferArrayTaint = (list) => {
return list.reduce((acc, val) => {
if (typeof val === 'object' && val.taint) {
val.taint.forEach((range) => {
if (typeof val === 'object' && val._taint) {
val._taint.forEach((range) => {
acc.taint.push({ 'begin': range.begin + acc.len,
'end': range.end + acc.len,
'flow': range.flow
Expand Down Expand Up @@ -354,11 +353,11 @@ exports.writeBytesToBuffer = (offset, byteLength, string,
}
newEnd = (curr.begin - string.length) * -1;
}
buffer.taint.push({
buffer._taint.push({
begin: (newBegin / 2) + offset,
end: (newEnd / 2) + offset, flow: curr.flow });
} else {
buffer.taint.push({
buffer._taint.push({
begin: (curr.begin / 2) + offset,
end: (curr.end / 2) + offset, flow: curr.flow });
}
Expand All @@ -372,8 +371,8 @@ exports.writeBytesToBuffer = (offset, byteLength, string,
*/
exports.subtaint = (buffer, begin, end) => {
const newTaint = [];
for (var i = 0; i < buffer.taint.length; i++) {
const element = buffer.taint[i];
for (var i = 0; i < buffer._taint.length; i++) {
const element = buffer._taint[i];
if (element.begin < end && element.end > begin) {
newTaint.push({
begin: Math.max(element.begin, begin) - begin,
Expand All @@ -389,8 +388,8 @@ exports.subtaint = (buffer, begin, end) => {
exports.insert = (buffer, index, taints) => {
const newTaint = [];

for (var i = 0; i < buffer.taint.length; i++) {
const range = buffer.taint[i];
for (var i = 0; i < buffer._taint.length; i++) {
const range = buffer._taint[i];
if (range.end <= index) {
newTaint.push({ begin: range.begin, end: range.end, flow: range.flow });
}
Expand All @@ -405,8 +404,8 @@ exports.insert = (buffer, index, taints) => {
last = range.end + index;
}

for (i = 0; i < buffer.taint.length; i++) {
const range = buffer.taint[i];
for (i = 0; i < buffer._taint.length; i++) {
const range = buffer._taint[i];
if (range.begin >= last) {
newTaint.push({ begin: range.begin, end: range.end, flow: range.flow });
}
Expand Down
Loading

0 comments on commit 4eff241

Please sign in to comment.