Skip to content

Commit

Permalink
Merge pull request #728 from callmehiphop/style-checker
Browse files Browse the repository at this point in the history
JavaScript Code Style Linter
  • Loading branch information
stephenplusplus committed Jul 19, 2015
2 parents c01e9fd + 44883f2 commit d4d3d06
Show file tree
Hide file tree
Showing 22 changed files with 253 additions and 224 deletions.
18 changes: 18 additions & 0 deletions .jscsrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
{
"preset": "node-style-guide",
"requireTrailingComma": null,
"requireCapitalizedComments": null,
"requireSpaceAfterKeywords": [
"if",
"else",
"for",
"while",
"do",
"switch",
"case",
"return",
"try",
"typeof"
],
"requireCamelCaseOrUpperCaseIdentifiers": "ignoreProperties"
}
6 changes: 3 additions & 3 deletions lib/datastore/entity.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ var OP_TO_OPERATOR = {
'>=': 'GREATER_THAN_OR_EQUAL',
'<': 'LESS_THAN',
'<=': 'LESS_THAN_OR_EQUAL',
'HAS_ANCESTOR': 'HAS_ANCESTOR'
HAS_ANCESTOR: 'HAS_ANCESTOR'
};

/** @const {object} Conversion map for query sign -> order protocol value. */
Expand Down Expand Up @@ -221,7 +221,7 @@ function keyToKeyProto(key) {
var path = [];
for (var i = 0; i < keyPath.length; i += 2) {
var p = { kind: keyPath[i] };
var val = keyPath[i+1];
var val = keyPath[i + 1];
if (val) {
// if not numeric, set key name.
if (isNaN(val)) {
Expand Down Expand Up @@ -343,7 +343,7 @@ function propertyToValue(property) {
if (exists(property.timestamp_microseconds_value)) {
var microSecs = parseInt(
property.timestamp_microseconds_value.toString(), 10);
return new Date(microSecs/1000);
return new Date(microSecs / 1000);
}
if (exists(property.key_value)) {
return keyFromKeyProto(property.key_value);
Expand Down
12 changes: 8 additions & 4 deletions lib/datastore/request.js
Original file line number Diff line number Diff line change
Expand Up @@ -365,19 +365,23 @@ DatastoreRequest.prototype.save = function(entities, callback) {

if (method) {
switch (method) {
case 'insert':
case 'insert': {
acc.insert.push(ent);
break;
case 'update':
}
case 'update': {
acc.update.push(ent);
break;
case 'upsert':
}
case 'upsert': {
acc.upsert.push(ent);
break;
case 'insert_auto_id':
}
case 'insert_auto_id': {
insertIndexes.push(index);
acc.insert_auto_id.push(ent);
break;
}
}
} else {
if (entity.isKeyComplete(entityObject.key)) {
Expand Down
2 changes: 1 addition & 1 deletion lib/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@ gcloud.pubsub = function(config) {
* keyFilename: '/path/to/keyfile.json'
* });
*/
gcloud.search = function (config) {
gcloud.search = function(config) {
return new Search(config);
};

Expand Down
4 changes: 2 additions & 2 deletions lib/storage/file.js
Original file line number Diff line number Diff line change
Expand Up @@ -1201,8 +1201,8 @@ File.prototype.setMetadata = function(metadata, callback) {
File.prototype.makePrivate = function(options, callback) {
var that = this;
if (util.is(options, 'function')) {
callback = options;
options = {};
callback = options;
options = {};
}
var path = '/o/' + encodeURIComponent(this.name);
var query = { predefinedAcl: options.strict ? 'private' : 'projectPrivate' };
Expand Down
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -72,6 +72,7 @@
"dox": "^0.7.0",
"glob": "^5.0.9",
"istanbul": "^0.3.5",
"jscs": "^1.13.1",
"jshint": "^2.6.0",
"mitm": "^1.1.0",
"mocha": "^2.1.0",
Expand All @@ -80,7 +81,7 @@
},
"scripts": {
"docs": "./scripts/docs.sh",
"lint": "jshint lib/ system-test/ test/",
"lint": "jshint lib/ system-test/ test/ && jscs lib/ system-test/ test/",
"test": "npm run docs && mocha test/*/*.js test/docs.js",
"system-test": "mocha system-test/* --timeout 30000",
"cover": "istanbul cover -x 'system-test/*' _mocha -- --timeout 30000 test/*/*.js test/docs.js system-test/*",
Expand Down
2 changes: 1 addition & 1 deletion system-test/datastore.js
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ describe('datastore', function() {
buf: new Buffer('010100000000000000000059400000000000006940', 'hex')
};

ds.save({ key: postKey, data: data }, function (err) {
ds.save({ key: postKey, data: data }, function(err) {
assert.ifError(err);

var assignedId = postKey.path[1];
Expand Down
6 changes: 3 additions & 3 deletions system-test/storage.js
Original file line number Diff line number Diff line change
Expand Up @@ -491,14 +491,14 @@ describe('storage', function() {

var fileSize = file.metadata.size;
var byteRange = {
start: Math.floor(fileSize * 1/3),
end: Math.floor(fileSize * 2/3)
start: Math.floor(fileSize * 1 / 3),
end: Math.floor(fileSize * 2 / 3)
};
var expectedContentSize = byteRange.start + 1;

var sizeStreamed = 0;
file.createReadStream(byteRange)
.on('data', function (chunk) {
.on('data', function(chunk) {
sizeStreamed += chunk.length;
})
.on('error', done)
Expand Down
12 changes: 6 additions & 6 deletions test/bigquery/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ function FakeTable(a, b) {
Table.call(this, a, b);
}

var mergeSchemaWithRows_Override;
var mergeSchemaWithRowsOverride;
FakeTable.mergeSchemaWithRows_ = function() {
var args = [].slice.apply(arguments);
return (mergeSchemaWithRows_Override || Table.mergeSchemaWithRows_)
return (mergeSchemaWithRowsOverride || Table.mergeSchemaWithRows_)
.apply(null, args);
};

Expand Down Expand Up @@ -481,8 +481,8 @@ describe('BigQuery', function() {
var rows = [{ row: 'a' }, { row: 'b' }, { row: 'c' }];
var schema = [{ fields: [] }];

mergeSchemaWithRows_Override = function(s, r) {
mergeSchemaWithRows_Override = null;
mergeSchemaWithRowsOverride = function(s, r) {
mergeSchemaWithRowsOverride = null;
assert.deepEqual(s, schema);
assert.deepEqual(r, rows);
done();
Expand Down Expand Up @@ -523,8 +523,8 @@ describe('BigQuery', function() {
});
};

mergeSchemaWithRows_Override = function() {
mergeSchemaWithRows_Override = null;
mergeSchemaWithRowsOverride = function() {
mergeSchemaWithRowsOverride = null;
return ROWS;
};

Expand Down
18 changes: 9 additions & 9 deletions test/bigquery/table.js
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,11 @@ function FakeFile(a, b) {
File.call(this, a, b);
}

var makeWritableStream_Override;
var makeWritableStreamOverride;
var fakeUtil = extend({}, util, {
makeWritableStream: function() {
var args = [].slice.call(arguments);
(makeWritableStream_Override || util.makeWritableStream).apply(null, args);
(makeWritableStreamOverride || util.makeWritableStream).apply(null, args);
}
});

Expand Down Expand Up @@ -95,7 +95,7 @@ describe('BigQuery/Table', function() {
});

beforeEach(function() {
makeWritableStream_Override = null;
makeWritableStreamOverride = null;
table = new Table(DATASET, TABLE_ID);
});

Expand Down Expand Up @@ -271,7 +271,7 @@ describe('BigQuery/Table', function() {
it('should use a string as the file type', function(done) {
var fileType = 'csv';

makeWritableStream_Override = function(stream, options) {
makeWritableStreamOverride = function(stream, options) {
var load = options.metadata.configuration.load;
assert.equal(load.sourceFormat, 'CSV');
done();
Expand Down Expand Up @@ -305,7 +305,7 @@ describe('BigQuery/Table', function() {
it('should make a writable stream when written to', function(done) {
var stream;

makeWritableStream_Override = function(s) {
makeWritableStreamOverride = function(s) {
assert.equal(s, stream);
done();
};
Expand All @@ -315,7 +315,7 @@ describe('BigQuery/Table', function() {
});

it('should pass the connection', function(done) {
makeWritableStream_Override = function(stream, options) {
makeWritableStreamOverride = function(stream, options) {
assert.deepEqual(options.connection, table.connection);
done();
};
Expand All @@ -324,7 +324,7 @@ describe('BigQuery/Table', function() {
});

it('should pass extended metadata', function(done) {
makeWritableStream_Override = function(stream, options) {
makeWritableStreamOverride = function(stream, options) {
assert.deepEqual(options.metadata, {
configuration: {
load: {
Expand All @@ -345,7 +345,7 @@ describe('BigQuery/Table', function() {
});

it('should pass the correct request uri', function(done) {
makeWritableStream_Override = function(stream, options) {
makeWritableStreamOverride = function(stream, options) {
var uri = 'https://www.googleapis.com/upload/bigquery/v2/projects/' +
table.bigQuery.projectId + '/jobs';
assert.equal(options.request.uri, uri);
Expand All @@ -363,7 +363,7 @@ describe('BigQuery/Table', function() {
return { id: id };
};

makeWritableStream_Override = function(stream, options, callback) {
makeWritableStreamOverride = function(stream, options, callback) {
callback(metadata);
};

Expand Down
4 changes: 2 additions & 2 deletions test/common/stream-router.js
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ describe('streamRouter', function() {
warnOnUnregistered: false
});
streamRouter = require('../../lib/common/stream-router.js');
var streamRouter_Cached = extend(true, {}, streamRouter);
var streamRouterCached = extend(true, {}, streamRouter);

Object.keys(streamRouter).forEach(function(streamRouterMethod) {
if (typeof streamRouter[streamRouterMethod] !== 'function') {
Expand All @@ -50,7 +50,7 @@ describe('streamRouter', function() {
if (streamRouterOverrides[streamRouterMethod]) {
return streamRouterOverrides[streamRouterMethod].apply(this, args);
} else {
return streamRouter_Cached[streamRouterMethod].apply(this, args);
return streamRouterCached[streamRouterMethod].apply(this, args);
}
};
});
Expand Down
Loading

0 comments on commit d4d3d06

Please sign in to comment.