Skip to content

Commit

Permalink
A faster and smaller binary parser and protocol
Browse files Browse the repository at this point in the history
This is a squash of a few commits. Below is a small summary of commits.

Results from it: before the build size of socket.io-client was ~250K.
Now it is ~215K.
Tests I was doing here
(https://github.com/kevin-roark/socketio-binaryexample/tree/speed-testing)
take about 1/4 - 1/5 as long with this commit compared to msgpack.

The first was the initial rewrite of the encoding, which removes msgpack
and instead uses a sequence of engine.write's for a binary event. The
first write is the packet metadata with placeholders in the json for
any binary data. Then the following events are the raw binary data that
get filled by the placeholders.

The second commit was bug fixes that made the tests pass.

The third commit was removing unnecssary packages from package.json.

Fourth commit was adding nice comments, and 5th commit was merging
upstream.

The remaining commits involved merging with actual socket.io-parser,
rather than the protocol repository. Oops.
  • Loading branch information
kevin-roark committed Feb 27, 2014
1 parent 36f8aa8 commit 299849b
Show file tree
Hide file tree
Showing 8 changed files with 248 additions and 177 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
language: node_js
node_js:
- 0.10
- 0.10
notifications:
irc: irc.freenode.org##socket.io
env:
Expand Down
142 changes: 142 additions & 0 deletions binary.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,142 @@
/**
* Modle requirements
*/

var isArray = require('isarray');

/**
* Replaces every Buffer | ArrayBuffer in packet with a numbered placeholder.
* Anything with blobs or files should be fed through removeBlobs before coming
* here.
*
* @param {Object} packet - socket.io event packet
* @return {Object} with deconstructed packet and list of buffers
* @api public
*/

exports.deconstructPacket = function(packet) {
var buffers = [];
var packetData = packet.data;

function deconstructBinPackRecursive(data) {
if (!data) return data;

if ((global.Buffer && Buffer.isBuffer(data)) ||
(global.ArrayBuffer && data instanceof ArrayBuffer)) { // replace binary
var placeholder = {_placeholder: true, num: buffers.length};
buffers.push(data);
return placeholder;
} else if (isArray(data)) {
var newData = new Array(data.length);
for (var i = 0; i < data.length; i++) {
newData[i] = deconstructBinPackRecursive(data[i]);
}
return newData;
} else if ('object' == typeof data) {
var newData = {};
for (var key in data) {
newData[key] = deconstructBinPackRecursive(data[key]);
}
return newData;
}
return data;
}

var pack = packet;
pack.data = deconstructBinPackRecursive(packetData);
pack.attachments = buffers.length; // number of binary 'attachments'
return {packet: pack, buffers: buffers};
}

/**
* Reconstructs a binary packet from its placeholder packet and buffers
*
* @param {Object} packet - event packet with placeholders
* @param {Array} buffers - binary buffers to put in placeholder positions
* @return {Object} reconstructed packet
* @api public
*/

exports.reconstructPacket = function(packet, buffers) {
var curPlaceHolder = 0;

function reconstructBinPackRecursive(data) {
if (data._placeholder) {
var buf = buffers[data.num]; // appropriate buffer (should be natural order anyway)
return buf;
} else if (isArray(data)) {
for (var i = 0; i < data.length; i++) {
data[i] = reconstructBinPackRecursive(data[i]);
}
return data;
} else if ('object' == typeof data) {
for (var key in data) {
data[key] = reconstructBinPackRecursive(data[key]);
}
return data;
}
return data;
}

packet.data = reconstructBinPackRecursive(packet.data);
packet.attachments = undefined; // no longer useful
return packet;
}

/**
* Asynchronously removes Blobs or Files from data via
* FileReader's readAsArrayBuffer method. Used before encoding
* data as msgpack. Calls callback with the blobless data.
*
* @param {Object} data
* @param {Function} callback
* @api private
*/

exports.removeBlobs = function(data, callback) {

function removeBlobsRecursive(obj, curKey, containingObject) {
if (!obj) return obj;

// convert any blob
if ((global.Blob && obj instanceof Blob) ||
(global.File && obj instanceof File)) {
pendingBlobs++;

// async filereader
var fileReader = new FileReader();
fileReader.onload = function() { // this.result == arraybuffer
if (containingObject) {
containingObject[curKey] = this.result;
}
else {
bloblessData = this.result;
}

// if nothing pending its callback time
if(! --pendingBlobs) {
callback(bloblessData);
}
};

fileReader.readAsArrayBuffer(obj); // blob -> arraybuffer
}

if (isArray(obj)) { // handle array
for (var i = 0; i < obj.length; i++) {
removeBlobsRecursive(obj[i], i, obj);
}
} else if (obj && 'object' == typeof obj) { // and object
for (var key in obj) {
removeBlobsRecursive(obj[key], key, obj);
}
}
}

var pendingBlobs = 0;
var bloblessData = data;
removeBlobsRecursive(bloblessData);
if (!pendingBlobs) {
callback(bloblessData);
}
}
Loading

0 comments on commit 299849b

Please sign in to comment.