Skip to content

Commit

Permalink
tests: use mockery
Browse files Browse the repository at this point in the history
  • Loading branch information
stephenplusplus committed Dec 22, 2014
1 parent cde8c45 commit e24fa50
Show file tree
Hide file tree
Showing 11 changed files with 286 additions and 202 deletions.
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -65,13 +65,13 @@
"istanbul": "^0.3.0",
"jshint": "^2.5.2",
"mocha": "^2.0.1",
"sandboxed-module": "^1.0.1",
"mockery": "^1.4.0",
"tmp": "0.0.24"
},
"scripts": {
"docs": "./scripts/docs.sh",
"lint": "jshint lib/ regression/ test/",
"test": "mocha --recursive",
"test": "mocha test/*",
"regression-test": "mocha regression/* --timeout 20000",
"cover": "istanbul cover -x 'regression/*' _mocha -- --timeout 20000 test/* regression/*",
"coveralls": "istanbul cover -x 'regression/*' _mocha --report lcovonly -- --timeout 20000 test/* regression/* -R spec && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js && rm -rf ./coverage"
Expand Down
70 changes: 31 additions & 39 deletions test/bigquery/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,52 +14,27 @@
* limitations under the License.
*/

/*global describe, it, beforeEach */
/*global describe, it, beforeEach, before, after */

'use strict';

var assert = require('assert');
var Dataset = require('../../lib/bigquery/dataset');
var Job = require('../../lib/bigquery/job');
var request = require('request');
var mockery = require('mockery');
var Stream = require('stream').Stream;
var Table = require('../../lib/bigquery/table');
var util = require('../../lib/common/util');

var BigQuery = require('sandboxed-module')
.require('../../lib/bigquery', {
requires: {
'./dataset': Dataset,
'./job': Job,
'./table': FakeTable,
request: fakeRequest,
'google-service-account': fakeGsa
}
});

var mergeSchemaWithRows_Cached = Table.mergeSchemaWithRows_;
var mergeSchemaWithRows_Override;

function FakeTable(a, b) {
Table.call(this, a, b);
}

var mergeSchemaWithRows_Override;
FakeTable.mergeSchemaWithRows_ = function() {
var args = [].slice.apply(arguments);
return (mergeSchemaWithRows_Override || mergeSchemaWithRows_Cached)
return (mergeSchemaWithRows_Override || Table.mergeSchemaWithRows_)
.apply(null, args);
};

var request_Cached = request;
var request_Override;

function fakeRequest() {
var args = [].slice.apply(arguments);
var results = (request_Override || request_Cached).apply(null, args);
request_Override = null;
return results;
}

function fakeGsa() {
return function(req, callback) {
callback(null, req);
Expand All @@ -69,8 +44,25 @@ function fakeGsa() {
describe('BigQuery', function() {
var JOB_ID = JOB_ID;
var PROJECT_ID = 'test-project';

var BigQuery;
var bq;

before(function() {
mockery.registerMock('./table.js', FakeTable);
mockery.registerMock('google-service-account', fakeGsa);
mockery.enable({
useCleanCache: true,
warnOnUnregistered: false
});
BigQuery = require('../../lib/bigquery');
});

after(function() {
mockery.deregisterAll();
mockery.disable();
});

beforeEach(function() {
bq = new BigQuery({ projectId: PROJECT_ID });
});
Expand Down Expand Up @@ -110,7 +102,7 @@ describe('BigQuery', function() {
};
bq.createDataset(DATASET_ID, function(err, dataset) {
assert.ifError(err);
assert(dataset instanceof Dataset);
assert.equal(dataset.constructor.name, 'Dataset');
done();
});
});
Expand All @@ -133,7 +125,7 @@ describe('BigQuery', function() {

it('returns a Dataset instance', function() {
var ds = bq.dataset(DATASET_ID);
assert(ds instanceof Dataset);
assert.equal(ds.constructor.name, 'Dataset');
});

it('should scope the correct dataset', function() {
Expand Down Expand Up @@ -183,7 +175,7 @@ describe('BigQuery', function() {
};
bq.getDatasets(function(err, datasets) {
assert.ifError(err);
assert(datasets[0] instanceof Dataset);
assert.equal(datasets[0].constructor.name, 'Dataset');
done();
});
});
Expand Down Expand Up @@ -266,7 +258,7 @@ describe('BigQuery', function() {
};
bq.getJobs(function(err, jobs) {
assert.ifError(err);
assert(jobs[0] instanceof Job);
assert.equal(jobs[0].constructor.name, 'Job');
done();
});
});
Expand Down Expand Up @@ -300,7 +292,7 @@ describe('BigQuery', function() {
describe('job', function() {
it('should return a Job instance', function() {
var job = bq.job(JOB_ID);
assert(job instanceof Job);
assert.equal(job.constructor.name, 'Job');
});

it('should scope the correct job', function() {
Expand Down Expand Up @@ -377,7 +369,7 @@ describe('BigQuery', function() {
it('should populate nextQuery when job is incomplete', function(done) {
bq.query({}, function(err, rows, nextQuery) {
assert.ifError(err);
assert(nextQuery.job instanceof Job);
assert.equal(nextQuery.job.constructor.name, 'Job');
assert.equal(nextQuery.job.id, JOB_ID);
done();
});
Expand Down Expand Up @@ -408,7 +400,7 @@ describe('BigQuery', function() {
it('should populate nextQuery when more results exist', function(done) {
bq.query(options, function(err, rows, nextQuery) {
assert.ifError(err);
assert(nextQuery.job instanceof Job);
assert.equal(nextQuery.job.constructor.name, 'Job');
assert.equal(nextQuery.job.id, JOB_ID);
assert.equal(nextQuery.pageToken, pageToken);
done();
Expand Down Expand Up @@ -663,7 +655,7 @@ describe('BigQuery', function() {

bq.startQuery('query', function(err, job) {
assert.ifError(err);
assert(job instanceof Job);
assert.equal(job.constructor.name, 'Job');
assert.equal(job.id, JOB_ID);
assert.deepEqual(job.metadata, jobsResource);
done();
Expand All @@ -678,7 +670,7 @@ describe('BigQuery', function() {
var body = { hi: 'there' };

it('should make correct request', function(done) {
request_Override = function(request) {
bq.makeAuthorizedRequest_ = function(request) {
var basePath = 'https://www.googleapis.com/bigquery/v2/projects/';
assert.equal(request.method, method);
assert.equal(request.uri, basePath + bq.projectId + path);
Expand All @@ -690,7 +682,7 @@ describe('BigQuery', function() {
});

it('should execute callback', function(done) {
request_Override = function(request, callback) {
bq.makeAuthorizedRequest_ = function(request, callback) {
callback();
};
bq.makeReq_(method, path, query, body, done);
Expand Down
71 changes: 44 additions & 27 deletions test/bigquery/table.js
Original file line number Diff line number Diff line change
Expand Up @@ -14,16 +14,15 @@
* limitations under the License.
*/

/*global describe, it, beforeEach */
/*global describe, it, beforeEach, before, after */

'use strict';

var assert = require('assert');
var extend = require('extend');
var File = require('../../lib/storage/file');
var Stream = require('stream');
var sandbox = require('sandboxed-module');
var through = require('through2');
var mockery = require('mockery');
var stream = require('stream');
var util = require('../../lib/common/util');

function FakeFile(a, b) {
Expand All @@ -35,14 +34,6 @@ var fakeUtil = extend({}, util, {
makeWritableStream: function() {
var args = [].slice.call(arguments);
(makeWritableStream_Override || util.makeWritableStream).apply(null, args);
makeWritableStream_Override = null;
}
});

var Table = sandbox.require('../../lib/bigquery/table', {
requires: {
'../storage/file': FakeFile,
'../common/util': fakeUtil
}
});

Expand All @@ -54,7 +45,7 @@ describe('BigQuery/Table', function() {
job: function(id) {
return { id: id };
},
projectId: 'project-id',
projectId: 'project-id'
}
};

Expand All @@ -68,10 +59,27 @@ describe('BigQuery/Table', function() {
};
var SCHEMA_STRING = 'id:integer,breed,name,dob:timestamp';

var Table;
var TABLE_ID = 'kittens';
var table;

before(function() {
mockery.registerMock('../storage/file', FakeFile);
mockery.registerMock('../common/util', fakeUtil);
mockery.enable({
useCleanCache: true,
warnOnUnregistered: false
});
Table = require('../../lib/bigquery/table');
});

after(function() {
mockery.deregisterAll();
mockery.disable();
});

beforeEach(function() {
makeWritableStream_Override = null;
table = new Table(DATASET, TABLE_ID);
});

Expand Down Expand Up @@ -111,7 +119,11 @@ describe('BigQuery/Table', function() {
});

describe('copy', function() {
var DEST_TABLE = new Table(DATASET, 'destination-table');
var DEST_TABLE;

before(function() {
DEST_TABLE = new Table(DATASET, 'destination-table');
});

it('should throw if a destination is not a Table', function() {
assert.throws(function() {
Expand Down Expand Up @@ -208,7 +220,7 @@ describe('BigQuery/Table', function() {

describe('createReadStream', function() {
it('should return a stream', function() {
assert(table.createReadStream() instanceof Stream);
assert(table.createReadStream() instanceof stream.Stream);
});

it('should call getRows() when asked for data', function(done) {
Expand Down Expand Up @@ -326,7 +338,7 @@ describe('BigQuery/Table', function() {
});

it('should return a stream', function() {
assert(table.createWriteStream() instanceof Stream);
assert(table.createWriteStream() instanceof stream.Stream);
});

describe('writable stream', function() {
Expand Down Expand Up @@ -686,30 +698,34 @@ describe('BigQuery/Table', function() {

it('should accept just a File and a callback', function(done) {
table.createWriteStream = function() {
var stream = through();
var ws = new stream.Writable();
setImmediate(function() {
stream.emit('complete');
ws.emit('complete');
ws.end();
});
return stream;
return ws;
};

table.import(FILEPATH, done);
});

it('should return a stream when a string is given', function() {
table.createWriteStream = through;
table.createWriteStream = function() {
return new stream.Writable();
};

assert(table.import(FILEPATH) instanceof Stream);
assert(table.import(FILEPATH) instanceof stream.Stream);
});

it('should infer the file format from the given filepath', function(done) {
table.createWriteStream = function(metadata) {
assert.equal(metadata.sourceFormat, 'NEWLINE_DELIMITED_JSON');
var stream = through();
var ws = new stream.Writable();
setImmediate(function() {
stream.emit('complete');
ws.emit('complete');
ws.end();
});
return stream;
return ws;
};

table.import(FILEPATH, done);
Expand All @@ -718,11 +734,12 @@ describe('BigQuery/Table', function() {
it('should not infer the file format if one is given', function(done) {
table.createWriteStream = function(metadata) {
assert.equal(metadata.sourceFormat, 'CSV');
var stream = through();
var ws = new stream.Writable();
setImmediate(function() {
stream.emit('complete');
ws.emit('complete');
ws.end();
});
return stream;
return ws;
};

table.import(FILEPATH, { sourceFormat: 'CSV' }, done);
Expand Down
Loading

0 comments on commit e24fa50

Please sign in to comment.