diff --git a/.travis.yml b/.travis.yml index b7232fa..9032acb 100644 --- a/.travis.yml +++ b/.travis.yml @@ -15,8 +15,6 @@ notifications: node_js: - '6' - - '5' - - '4' env: - COVERAGE=false diff --git a/README.md b/README.md index 82c6336..5786fe7 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ Currently works only with PostgreSQL (including PostGIS), MySQL (with spatial ca ## Table of Contents * [Installation](#installation) -* [Usage](#usage) +* [API](#api) ## Installation @@ -19,6 +19,7 @@ In order to use this library, you must also install the additional libraries in npm install pg --save npm install pg-copy-streams --save + npm install pg-query-stream --save npm install pg-hstore --save #### With pg and node v0.10.x @@ -37,66 +38,13 @@ You must also install the package `promise-polyfill` and write additional code. Usage with SQLite requires that sqlite is installed and is available via a unix command line. -## Usage: +## API: -### Downloading the GTFS File: +### GTFS(options) -```js - var GTFS = require('gtfs-sequelize'); - - var downloadConfig = { - gtfsUrl: 'http://feed.rvtd.org/googleFeeds/static/google_transit.zip', - downloadsDir: 'downloads' - }; - - var gtfs = GTFS(downloadConfig); - gtfs.downloadGtfs(function() { - //download has finished callback - }); - ``` - -### Loading GTFS into Database: - -```js - var GTFS = require('gtfs-sequelize'); - - var pgConfig = { - database: 'postgres://gtfs_sequelize:gtfs_sequelize@localhost:5432/gtfs-sequelize-test', - downloadsDir: 'downloads', - gtfsFileOrFolder: 'google_transit.zip', - sequelizeOptions: { - logging: false - } - } +Create a new GTFS API. - var gtfs = GTFS(pgConfig); - gtfs.loadGtfs(function() { - //database loading has finished callback - }); - ``` - -### Loading into a DB with PostGIS installed: - -```js - var GTFS = require('gtfs-sequelize'); - - var pgConfig = { - database: 'postgres://gtfs_sequelize:gtfs_sequelize@localhost:5432/gtfs-sequelize-test', - downloadsDir: 'downloads', - gtfsFileOrFolder: 'google_transit.zip', - spatial: true, - sequelizeOptions: { - logging: false - } - } - - var gtfs = GTFS(pgConfig); - gtfs.loadGtfs(function() { - //database loading has finished callback - }); - ``` - -### Querying a specific schema within a DB: +Example: ```js var GTFS = require('gtfs-sequelize'); @@ -107,8 +55,7 @@ var pgConfig = { gtfsFileOrFolder: 'google_transit.zip', spatial: true, sequelizeOptions: { - logging: false, - schema: 'test_schema' + logging: false } } @@ -117,3 +64,41 @@ gtfs.loadGtfs(function() { //database loading has finished callback }); ``` + +#### options + +| Key | Value | +| -- | -- | +| database | A database connection string. You must specify a user and a database in your connection string. The database must already exist, but the tables within the db do not need to exist. | +| downloadsDir | The directory where you want the feed zip fils downloaded to or where you're going to read the feed read from. | +| gtfsFileOrFolder | The (zip) file or folder to load the gtfs from | +| interpolateStopTimes | Default is undefined. If true, after loading the stop_times table, all stop_times with undefined arrival and departure times will be updated to include interpolated arrival and departure times. | +| sequelizeOptions | Options to pass to sequelize. Note: to use a specific schema you'll want to pass something like this: `{ schema: 'your_schema' }` | +| spatial | Default is undefined. If true, spatial tables for the shapes and stops will be created. | + +### gtfs.connectToDatabase() + +Return a sequelize api of the database. + +Example: + +```js +var db = gtfs.connectToDatabase() + +db.stop.findAll() + .then(stops => { + console.log(stops) + }) +``` + +### gtfs.downloadGtfs(callback) + +If a url is provided, the feed will be attempted to be downloaded. Works with `http`, `https` and `ftp`. + +### gtfs.interpolateStopTimes(callback) + +Interpolate stop_times with undefined arrival and departure times. If you load a gtfs with the `interpolateStopTimes` flag set to true, you don't need to call this. + +### gtfs.loadGtfs(callback) + +Load the gtfs into the database. diff --git a/index.js b/index.js index d767dae..097035d 100644 --- a/index.js +++ b/index.js @@ -1,40 +1,47 @@ -var path = require('path'), - downloadGtfs = require('./lib/download.js'), - Database = require('./models'), - loadgtfs = require('./lib/gtfsLoader.js'); -module.exports = function(config) { +const downloadGtfs = require('./lib/download.js') +const loadgtfs = require('./lib/gtfsLoader.js') +const operations = require('./lib/operations') +const Database = require('./models') - var connectToDatabase = function(rawModels) { - var db = Database(config.database, config.sequelizeOptions ? config.sequelizeOptions : {}); - if(!rawModels && config.spatial) { - db.stop = db.sequelize.import('models/spatial/stop.js'); - db.shape_gis = db.sequelize.import('models/spatial/shape_gis.js'); - db.trip = db.sequelize.import('models/spatial/trip.js'); +module.exports = function (config) { + const connectToDatabase = function (rawModels) { + const db = Database(config.database, config.sequelizeOptions ? config.sequelizeOptions : {}) + if (!rawModels && config.spatial) { + db.stop = db.sequelize.import('models/spatial/stop.js') + db.shape_gis = db.sequelize.import('models/spatial/shape_gis.js') + db.trip = db.sequelize.import('models/spatial/trip.js') // reassociate spatially-enable models - db.stop.associate(db); - db.shape_gis.associate(db); - db.trip.associate(db); + db.stop.associate(db) + db.shape_gis.associate(db) + db.trip.associate(db) } - return db; + return db } - var download = function(callback) { - downloadGtfs(config.gtfsUrl, config.downloadsDir, callback); + const download = function (callback) { + downloadGtfs(config.gtfsUrl, config.downloadsDir, callback) } - var loadGtfs = function(callback) { + const interpolateStopTimes = function (callback) { + const db = connectToDatabase() + operations.interpolateStopTimes(db, callback) + } + + const loadGtfs = function (callback) { loadgtfs(config.downloadsDir, config.gtfsFileOrFolder, connectToDatabase(true), config.spatial, - callback); + config.interpolateStopTimes, + callback) } return { config: config, connectToDatabase: connectToDatabase, downloadGtfs: download, + interpolateStopTimes: interpolateStopTimes, loadGtfs: loadGtfs } } diff --git a/lib/download.js b/lib/download.js index a7fba38..ff05937 100644 --- a/lib/download.js +++ b/lib/download.js @@ -40,7 +40,7 @@ module.exports = function (gtfsUrl, downloadsDir, callback) { rimraf(dlFile, cb) }, dl: ['rm', function (results, cb) { - download(gtfsUrl, dlFile) + download(gtfsUrl, downloadsDir, { filename: 'google_transit.zip' }) .then(() => cb()) .catch(cb) }] diff --git a/lib/gtfsLoader.js b/lib/gtfsLoader.js index 4313b6c..cf4f2e8 100644 --- a/lib/gtfsLoader.js +++ b/lib/gtfsLoader.js @@ -9,10 +9,12 @@ var rimraf = require('rimraf') var unzip = require('unzip2') var uuid = require('uuid') +const operations = require('./operations') var util = require('./util.js') var DATE_FORMAT = 'YYYYMMDD' var lastAgencyId, numAgencies +let hasShapeTable = false // convert dateString to moment var toMoment = function (dateString) { @@ -30,7 +32,7 @@ var toSecondsAfterMidnight = function (timeString) { parseInt(timeArr[2]) } -var loadGtfs = function (extractedFolder, db, isSpatial, callback) { +var loadGtfs = function (extractedFolder, db, isSpatial, interpolateStopTimes, callback) { numAgencies = 0 lastAgencyId = null @@ -74,24 +76,31 @@ var loadGtfs = function (extractedFolder, db, isSpatial, callback) { } var postProcess = function (postProcessCallback) { + const postprocesses = [] if (isSpatial) { var dialect = db.sequelize.options.dialect if (['postgres', 'mysql'].indexOf(dialect) === -1) { var err = Error('Spatial columns not supported for dialect ' + dialect + '.') - postProcessCallback(err) - } else { - async.series([ - makeStopGeom, - makeShapeTable - ], - function (err, results) { - postProcessCallback(err) - } - ) + return postProcessCallback(err) } - } else { - postProcessCallback() + postprocesses.push(makeStopGeom) + postprocesses.push(makeShapeTable) + } + + if (interpolateStopTimes) { + postprocesses.push(doInterpolation) } + + async.series( + postprocesses, + function (err, results) { + postProcessCallback(err) + } + ) + } + + function doInterpolation (interpolationCallback) { + operations.interpolateStopTimes(db, interpolationCallback) } var makeStopGeom = function (seriesCallback) { @@ -141,6 +150,10 @@ var loadGtfs = function (extractedFolder, db, isSpatial, callback) { } var makeShapeTable = function (seriesCallback) { + if (!hasShapeTable) { + console.log('shape table does not exist, skipping creation of shape_gis table') + return seriesCallback() + } console.log('creating shape_gis table') var processShape = function (shapePoint, shapeCallback) { db.shape.findAll({ @@ -315,7 +328,7 @@ var insertCSVInTable = function (insertCfg, callback) { // prepare processing function, but don't run it until file existance is confirmed var processTable = function () { insertCfg.model.sync({force: true}).then(function () { - var streamInserterCfg = util.makeInserterConfig(insertCfg.model) + var streamInserterCfg = util.makeStreamerConfig(insertCfg.model) var inserter = dbStreamer.getInserter(streamInserterCfg) inserter.connect(function (err) { @@ -487,13 +500,13 @@ var loadCalendarDates = function (extractedFolder, db, callback) { var processCalendarDates = function () { db.calendar_date.sync({force: true}).then(function () { var serviceIdsNotInCalendar = [] - var calendarInserterConfig = util.makeInserterConfig(db.calendar) + var calendarInserterConfig = util.makeStreamerConfig(db.calendar) // create inserter for calendar dates var calendarInserter = dbStreamer.getInserter(calendarInserterConfig) calendarInserter.connect(function (err, client) { if (err) return callback(err) - var calendarDateInserterConfig = util.makeInserterConfig(db.calendar_date) + var calendarDateInserterConfig = util.makeStreamerConfig(db.calendar_date) calendarDateInserterConfig.client = client calendarDateInserterConfig.deferUntilEnd = true @@ -602,6 +615,12 @@ var loadStopTimes = function (extractedFolder, db, callback) { // change arrival and departure times into integer of seconds after midnight line.arrival_time = toSecondsAfterMidnight(line.arrival_time) line.departure_time = toSecondsAfterMidnight(line.departure_time) + + if (line.arrival_time !== null && line.departure_time === null) { + line.departure_time = line.arrival_time + } else if (line.departure_time !== null && line.arrival_time === null) { + line.arrival_time = line.departure_time + } return line } }, @@ -625,11 +644,28 @@ var loadFareRules = function (extractedFolder, db, callback) { } var loadShapes = function (extractedFolder, db, callback) { - insertCSVInTable({ - filename: path.join(extractedFolder, 'shapes.txt'), - model: db.shape - }, - callback) + const filename = path.join(extractedFolder, 'shapes.txt') + fs.stat( + filename, + function (err, stats) { + if (!err) { + // shapes.txt exists + hasShapeTable = true + insertCSVInTable( + { + filename: path.join(extractedFolder, 'shapes.txt'), + model: db.shape + }, + callback + ) + } else if (err && err.code === 'ENOENT') { + console.log(`${filename} <--- FILE NOT FOUND. SKIPPING.`) + callback() + } else if (err) { + callback(err) + } + } + ) } var loadFrequencies = function (extractedFolder, db, callback) { @@ -661,7 +697,14 @@ var loadFeedInfo = function (extractedFolder, db, callback) { callback) } -module.exports = function (downloadsDir, gtfsFileOrFolder, db, isSpatial, callback) { +module.exports = function ( + downloadsDir, + gtfsFileOrFolder, + db, + isSpatial, + interpolateStopTimes, + callback +) { // determine if gtfs is a file or folder var gtfsPath = path.join(downloadsDir, gtfsFileOrFolder) fs.lstat(gtfsPath, function (err, stats) { @@ -671,7 +714,7 @@ module.exports = function (downloadsDir, gtfsFileOrFolder, db, isSpatial, callba } if (stats.isDirectory()) { - loadGtfs(gtfsPath, db, isSpatial, callback) + loadGtfs(gtfsPath, db, isSpatial, interpolateStopTimes, callback) } else { // create unzipper (assuming gtfs is in zip file) var extractFolder = path.join(downloadsDir, 'google_transit') @@ -680,7 +723,7 @@ module.exports = function (downloadsDir, gtfsFileOrFolder, db, isSpatial, callba // create handler to process gtfs upon completion of unzip extractor.on('close', function () { - loadGtfs(extractFolder, db, isSpatial, callback) + loadGtfs(extractFolder, db, isSpatial, interpolateStopTimes, callback) } ) diff --git a/lib/operations.js b/lib/operations.js new file mode 100644 index 0000000..25c88f2 --- /dev/null +++ b/lib/operations.js @@ -0,0 +1,134 @@ +const async = require('async') +const dbStreamer = require('db-streamer') + +const util = require('./util') + +/** + * Make an update query to the db to set the interpolated times in + * a particular range of a particular trip + */ +function updateInterpolatedTimes (cfg, callback) { + const db = cfg.db + const lastTimepoint = cfg.lastTimepoint + const nextTimepoint = cfg.nextTimepoint + const timeDiff = nextTimepoint.arrival_time - lastTimepoint.departure_time + let literal + // sqlite null is a string + if (nextTimepoint.shape_dist_traveled && nextTimepoint.shape_dist_traveled !== 'NULL') { + // calculate interpolation based off of distance ratios + const distanceTraveled = nextTimepoint.shape_dist_traveled - lastTimepoint.shape_dist_traveled + literal = `${lastTimepoint.departure_time} + + ${timeDiff} * + (shape_dist_traveled - ${lastTimepoint.shape_dist_traveled}) / + ${distanceTraveled}` + } else { + // calculate interpolation based off of stop sequence ratios + const numStopsPassed = nextTimepoint.stop_sequence - lastTimepoint.stop_sequence + literal = `${lastTimepoint.departure_time} + + ${timeDiff} * + (stop_sequence - ${lastTimepoint.stop_sequence}) / + ${numStopsPassed}` + } + const updateLiteral = db.sequelize.literal(literal) + db.stop_time + .update( + { + arrival_time: updateLiteral, + departure_time: updateLiteral + }, + { + where: { + trip_id: lastTimepoint.trip_id, + stop_sequence: { + $gt: lastTimepoint.stop_sequence, + $lt: nextTimepoint.stop_sequence + } + } + } + ) + .then(() => { + callback() + }) + .catch(callback) +} + +/** + * Calculate and assign an approximate arrival and departure time + * at all stop_times that have an undefined arrival and departure time + */ +function interpolateStopTimes (db, callback) { + console.log('interpolating stop times') + const streamerConfig = util.makeStreamerConfig(db.trip) + const querier = dbStreamer.getQuerier(streamerConfig) + const maxUpdateConcurrency = db.trip.sequelize.getDialect() === 'sqlite' ? 1 : 100 + const updateQueue = async.queue(updateInterpolatedTimes, maxUpdateConcurrency) + + /** + * Helper function to call upon completion of interpolation + */ + function onComplete (err) { + if (err) { + console.log('interpolation encountered an error: ', err) + return callback(err) + } + updateQueue.drain = () => { + callback(err) + } + } + + // TODO: fix this cause it doesn't work w/ sqlite with a schema for some reason + const statement = `SELECT trip_id FROM ${streamerConfig.tableName}` + querier.execute( + statement, + row => { + // get all stop_times for trip + db.stop_time + .findAll({ + where: { + trip_id: row.trip_id + } + }) + // iterate through stop times to determine null arrival or departure times + .then(stopTimes => { + let lastStopTime + let lastTimepoint + let lookingForNextTimepoint = false + + stopTimes.forEach(stopTime => { + if (lookingForNextTimepoint) { + // check if current stop time has a time + // mysql null stop times are showin up as 0, which might be bug elsewhere + // sqlite null shows up as 'NULL' + if ( + stopTime.arrival_time !== null && + stopTime.arrival_time !== 'NULL' && + stopTime.arrival_time >= lastTimepoint.departure_time + ) { + // found next timepoint + // make update query to set interpolated times + updateQueue.push({ + db: db, + lastTimepoint: lastTimepoint, + nextTimepoint: stopTime + }) + lookingForNextTimepoint = false + } + } else { + // sqlite uninterpolated shows up ass 'NULL' + if (!stopTime.arrival_time || stopTime.arrival_time === 'NULL') { + lastTimepoint = lastStopTime + lookingForNextTimepoint = true + } + } + lastStopTime = stopTime + }) + }) + .catch(onComplete) + }, + onComplete + ) +} + +module.exports = { + interpolateStopTimes: interpolateStopTimes +} diff --git a/lib/util.js b/lib/util.js index 05d46fd..4fe6746 100644 --- a/lib/util.js +++ b/lib/util.js @@ -9,7 +9,7 @@ util.getConnectionString = function(sequelize) { sequelize.config.database; } -util.makeInserterConfig = function(model) { +util.makeStreamerConfig = function(model) { var dialect = model.sequelize.getDialect() var schema = model.sequelize.options.schema var config = { diff --git a/models/route.js b/models/route.js index 74daa22..cc1369a 100644 --- a/models/route.js +++ b/models/route.js @@ -15,7 +15,7 @@ module.exports = function(sequelize, DataTypes) { }, route_short_name: DataTypes.STRING(50), route_long_name: DataTypes.STRING(255), - route_desc: DataTypes.STRING(255), + route_desc: DataTypes.TEXT, route_type: DataTypes.INTEGER, route_url: DataTypes.STRING(255), route_color: DataTypes.STRING(255), diff --git a/models/spatial/stop.js b/models/spatial/stop.js index 538a92b..a21f23f 100644 --- a/models/spatial/stop.js +++ b/models/spatial/stop.js @@ -8,7 +8,7 @@ module.exports = function(sequelize, DataTypes) { }, stop_code: DataTypes.STRING(20), stop_name: DataTypes.STRING(255), - stop_desc: DataTypes.STRING(255), + stop_desc: DataTypes.TEXT, stop_lat: DataTypes.FLOAT(7), stop_lon: DataTypes.FLOAT(7), zone_id: DataTypes.STRING(255), diff --git a/models/stop.js b/models/stop.js index 5e01992..72656b9 100644 --- a/models/stop.js +++ b/models/stop.js @@ -8,7 +8,7 @@ module.exports = function(sequelize, DataTypes) { }, stop_code: DataTypes.STRING(20), stop_name: DataTypes.STRING(255), - stop_desc: DataTypes.STRING(255), + stop_desc: DataTypes.TEXT, stop_lat: DataTypes.FLOAT(7), stop_lon: DataTypes.FLOAT(7), zone_id: DataTypes.STRING(255), diff --git a/package.json b/package.json index 1559747..14a6217 100644 --- a/package.json +++ b/package.json @@ -3,6 +3,9 @@ "version": "0.0.0-semantically-release", "description": "A model for the static GTFS using sequelize.", "main": "index.js", + "engines": { + "node": ">=6" + }, "scripts": { "lint": "./node_modules/.bin/eslint lib", "test": "npm run test-download && npm run test-mysql && npm run test-mysql-spatial && npm run test-postgres && npm run test-postgis && npm run test-sqlite", @@ -55,7 +58,7 @@ "dependencies": { "async": "^2.0.0-rc.5", "csvtojson": "^1.1.9", - "db-streamer": "^1.1.0", + "db-streamer": "^1.2.1", "download": "^6.2.5", "ftp": "^0.3.10", "moment": "^2.10.6", @@ -84,6 +87,7 @@ "pg": "^6.0.0", "pg-copy-streams": "^1.0.0", "pg-hstore": "^2.3.2", + "pg-query-stream": "^1.1.1", "promise-polyfill": "^6.0.0", "semantic-release": "^6.3.2", "sqlite3": "^3.1.8", diff --git a/tests/db.load.test.js b/tests/db.load.test.js index 2900091..ba22fd2 100644 --- a/tests/db.load.test.js +++ b/tests/db.load.test.js @@ -2,10 +2,6 @@ var assert = require('chai').assert var GTFS = require('../index.js') -if (typeof Promise === 'undefined') { - global.Promise = require('promise-polyfill') -} - var util = require('./util.js') // prepare config for tests @@ -16,14 +12,11 @@ describe(process.env.DIALECT, function () { util.zipMockAgency(done) }) - beforeEach(function (done) { + afterEach(function (done) { // drop and create the database before each test var config = util.getConfig() var gtfs = GTFS(config) - var db = gtfs.connectToDatabase() - db.sequelize.drop() - .then(() => done()) - .catch(done) + util.dropDb(gtfs, done) }) it('data should load from folder', function (done) { @@ -51,7 +44,7 @@ describe(process.env.DIALECT, function () { var config = util.getConfig() this.timeout(config.maxLoadTimeout) - config.downloadsDir = 'tests' + config.downloadsDir = 'tests/feeds' config.gtfsFileOrFolder = 'invalid_feed_1' var gtfs = GTFS(config) @@ -65,7 +58,7 @@ describe(process.env.DIALECT, function () { var config = util.getConfig() this.timeout(config.maxLoadTimeout) - config.downloadsDir = 'tests' + config.downloadsDir = 'tests/feeds' config.gtfsFileOrFolder = 'invalid_feed_2' var gtfs = GTFS(config) @@ -79,8 +72,8 @@ describe(process.env.DIALECT, function () { var config = util.getConfig() this.timeout(config.maxLoadTimeout) - config.downloadsDir = 'tests' - config.gtfsFileOrFolder = 'feed_with_wide_range_in_calendar_dates' + config.downloadsDir = 'tests/feeds' + config.gtfsFileOrFolder = 'wide_range_in_calendar_dates' var gtfs = GTFS(config) gtfs.loadGtfs(function (err) { @@ -107,38 +100,57 @@ describe(process.env.DIALECT, function () { }) }) - it('should load into a specific schema', function (done) { + it('should load a gtfs with only calendar_dates.txt', function (done) { var config = util.getConfig() this.timeout(config.maxLoadTimeout) - config.downloadsDir = 'tests' - config.gtfsFileOrFolder = 'mock_agency' - config.sequelizeOptions.logging = false - config.sequelizeOptions.schema = 'test_schema' + config.gtfsFileOrFolder = 'only_calendar_dates' var gtfs = GTFS(config) - gtfs.loadGtfs(done) }) - it('should load a gtfs with only calendar_dates.txt', function (done) { + it('should load a gtfs without calendar_dates.txt', function (done) { var config = util.getConfig() this.timeout(config.maxLoadTimeout) - config.gtfsFileOrFolder = 'feed_with_only_calendar_dates' + config.gtfsFileOrFolder = 'only_calendar' var gtfs = GTFS(config) gtfs.loadGtfs(done) }) - it('should load a gtfs without calendar_dates.txt', function (done) { + it('should load a gtfs and interpolate stop times', function (done) { var config = util.getConfig() this.timeout(config.maxLoadTimeout) - config.gtfsFileOrFolder = 'feed_with_only_calendar' + config.gtfsFileOrFolder = 'interpolated_no_shapes' + config.interpolateStopTimes = true var gtfs = GTFS(config) gtfs.loadGtfs(done) }) + + describe('with schema', () => { + afterEach(function (done) { + // drop and create the database before each test + var config = util.getConfig() + config.sequelizeOptions.schema = 'test_schema' + var gtfs = GTFS(config) + util.dropDb(gtfs, done) + }) + + it('should load into a specific schema', function (done) { + var config = util.getConfig() + this.timeout(config.maxLoadTimeout) + + config.gtfsFileOrFolder = 'mock_agency' + config.sequelizeOptions.schema = 'test_schema' + + var gtfs = GTFS(config) + + gtfs.loadGtfs(done) + }) + }) }) }) diff --git a/tests/db.operations.test.js b/tests/db.operations.test.js new file mode 100644 index 0000000..e4e8bf6 --- /dev/null +++ b/tests/db.operations.test.js @@ -0,0 +1,154 @@ +const assert = require('chai').assert +const rimraf = require('rimraf') + +const GTFS = require('../index.js') + +const util = require('./util.js') + +// prepare config for tests +describe(process.env.DIALECT, function () { + describe('operations', function () { + describe('interpolated stop times', () => { + const expectedStopTimesNoShapes = [ + { + arrival_time: 28800, + departure_time: 28800 + }, + { + arrival_time: 28880, + departure_time: 28880 + }, + { + arrival_time: 28960, + departure_time: 28960 + }, + { + arrival_time: 29040, + departure_time: 29040 + }, + { + arrival_time: 29160, + departure_time: 29160 + }, + { + arrival_time: 29280, + departure_time: 29280 + } + ] + + const expectedStopTimesWithShapes = [ + { + arrival_time: 28800, + departure_time: 28800 + }, + { + arrival_time: 28903, + departure_time: 28903 + }, + { + arrival_time: 28954, + departure_time: 28954 + }, + { + arrival_time: 29040, + departure_time: 29040 + }, + { + arrival_time: 29172, + departure_time: 29172 + }, + { + arrival_time: 29280, + departure_time: 29280 + } + ] + + const testConfigs = [ + { + describe: 'no shapes', + expectedStopTimes: expectedStopTimesNoShapes, + gtfsFileOrFolder: 'interpolated_no_shapes' + }, + { + describe: 'no shapes, with schema', + expectedStopTimes: expectedStopTimesNoShapes, + gtfsFileOrFolder: 'interpolated_no_shapes', + schema: 'test_schema' + }, + { + describe: 'with shapes', + expectedStopTimes: expectedStopTimesWithShapes, + gtfsFileOrFolder: 'interpolated_with_shapes' + } + ] + + testConfigs.forEach(testConfig => { + describe(testConfig.describe, () => { + const config = util.getConfig() + + config.gtfsFileOrFolder = testConfig.gtfsFileOrFolder + if (testConfig.schema) { + if (process.env.DIALECT === 'sqlite') { + console.warn('skipping sqlite test w/ schema cause I dunno why it\'s not working') + return + } + config.sequelizeOptions.schema = testConfig.schema + } + + const gtfs = GTFS(config) + + after(function (done) { + const sqliteStorage = config.sequelizeOptions.storage + if (sqliteStorage) { + console.log('remove sqlite storage') + rimraf(sqliteStorage, done) + } else { + util.dropDb(gtfs, done) + } + }) + + before(done => { + this.timeout(config.maxLoadTimeout) + gtfs.loadGtfs(done) + }) + + it('should correctly calculate interpolated stop times', (done) => { + this.timeout(config.maxLoadTimeout) + + // interpolate the stop times + gtfs.interpolateStopTimes(err => { + if (err) return done(err) + + const db = gtfs.connectToDatabase() + db.stop_time + .findAll({ + where: { + trip_id: '1' + }, + order: [ + ['stop_sequence', 'ASC'] + ] + }) + .then(stopTimes => { + for (let i = 0; i < stopTimes.length; i++) { + const expectedStopTime = testConfig.expectedStopTimes[i] + const actualStopTime = stopTimes[i] + assert.strictEqual( + Math.round(actualStopTime.arrival_time), + expectedStopTime.arrival_time + ) + assert.strictEqual( + Math.round(actualStopTime.departure_time), + expectedStopTime.departure_time + ) + } + done() + }) + .catch(done) + }) + }) + }) + }) + }) + }) +}) diff --git a/tests/db.query.test.js b/tests/db.query.test.js index 69286a5..a306217 100644 --- a/tests/db.query.test.js +++ b/tests/db.query.test.js @@ -2,10 +2,6 @@ var assert = require('chai').assert var moment = require('moment') var rimraf = require('rimraf') -if (typeof Promise === 'undefined') { - global.Promise = require('promise-polyfill') -} - var util = require('./util.js') // prepare config for tests @@ -22,7 +18,7 @@ describe(process.env.DIALECT, function () { if (sqliteStorage) { rimraf(sqliteStorage, done) } else { - done() + util.dropDb(gtfs, done) } }) @@ -30,7 +26,7 @@ describe(process.env.DIALECT, function () { this.timeout(maxLoadTimeout) // load mock gtfs file before running querying tests - config.downloadsDir = 'tests' + config.downloadsDir = 'tests/feeds' config.gtfsFileOrFolder = 'mock_agency' config.sequelizeOptions.logging = false config.sequelizeOptions.schema = undefined diff --git a/tests/feed_with_only_calendar/agency.txt b/tests/feeds/interpolated_no_shapes/agency.txt similarity index 100% rename from tests/feed_with_only_calendar/agency.txt rename to tests/feeds/interpolated_no_shapes/agency.txt diff --git a/tests/feed_with_only_calendar/calendar.txt b/tests/feeds/interpolated_no_shapes/calendar.txt similarity index 100% rename from tests/feed_with_only_calendar/calendar.txt rename to tests/feeds/interpolated_no_shapes/calendar.txt diff --git a/tests/feeds/interpolated_no_shapes/routes.txt b/tests/feeds/interpolated_no_shapes/routes.txt new file mode 100644 index 0000000..0cb3909 --- /dev/null +++ b/tests/feeds/interpolated_no_shapes/routes.txt @@ -0,0 +1,2 @@ +route_id,route_short_name,route_long_name,route_desc,route_type,route_url +1,1,Route 1,,3, diff --git a/tests/feeds/interpolated_no_shapes/stop_times.txt b/tests/feeds/interpolated_no_shapes/stop_times.txt new file mode 100644 index 0000000..3a1d0c7 --- /dev/null +++ b/tests/feeds/interpolated_no_shapes/stop_times.txt @@ -0,0 +1,7 @@ +trip_id,arrival_time,departure_time,stop_id,stop_sequence +1,08:00:00,08:00:00,1,1 +1,,,2,2 +1,,,3,3 +1,08:04:00,08:04:00,4,4 +1,,,5,5 +1,08:08:00,08:08:00,6,6 diff --git a/tests/feeds/interpolated_no_shapes/stops.txt b/tests/feeds/interpolated_no_shapes/stops.txt new file mode 100644 index 0000000..b6466bd --- /dev/null +++ b/tests/feeds/interpolated_no_shapes/stops.txt @@ -0,0 +1,7 @@ +stop_id,stop_code,stop_name,stop_desc,stop_lat,stop_lon,zone_id,stop_url,location_type,parent_station,stop_timezone,wheelchair_boarding +1,,Bean Creek,,37.04469,-122.02294,,,0,,, +2,,Erba,,37.04692,-122.01877,,,0,,, +3,,Disc,,37.04825,-122.01693,,,0,,, +4,,Carbonero,,37.05109,-122.0147,,,0,,, +5,,El Pueblo,,37.05541,-122.01199,,,0,,, +6,,Victor Sq,,37.05904,-122.01002,,,0,,, diff --git a/tests/feeds/interpolated_no_shapes/trips.txt b/tests/feeds/interpolated_no_shapes/trips.txt new file mode 100644 index 0000000..b2627b3 --- /dev/null +++ b/tests/feeds/interpolated_no_shapes/trips.txt @@ -0,0 +1,2 @@ +route_id,trip_id,direction_id,service_id +1,1,0,weekday diff --git a/tests/feeds/interpolated_with_shapes/agency.txt b/tests/feeds/interpolated_with_shapes/agency.txt new file mode 100644 index 0000000..f0dd9e1 --- /dev/null +++ b/tests/feeds/interpolated_with_shapes/agency.txt @@ -0,0 +1,2 @@ +agency_name,agency_url,agency_timezone,agency_lang,agency_phone +test,http://www.example.org,America/Los_Angeles,en,5555555555 diff --git a/tests/feeds/interpolated_with_shapes/calendar.txt b/tests/feeds/interpolated_with_shapes/calendar.txt new file mode 100644 index 0000000..a8af80e --- /dev/null +++ b/tests/feeds/interpolated_with_shapes/calendar.txt @@ -0,0 +1,2 @@ +service_id,monday,tuesday,wednesday,thursday,friday,saturday,sunday,start_date,end_date +weekday,1,1,1,1,1,0,0,20000101,21001231 diff --git a/tests/feeds/interpolated_with_shapes/routes.txt b/tests/feeds/interpolated_with_shapes/routes.txt new file mode 100644 index 0000000..0cb3909 --- /dev/null +++ b/tests/feeds/interpolated_with_shapes/routes.txt @@ -0,0 +1,2 @@ +route_id,route_short_name,route_long_name,route_desc,route_type,route_url +1,1,Route 1,,3, diff --git a/tests/feeds/interpolated_with_shapes/shapes.txt b/tests/feeds/interpolated_with_shapes/shapes.txt new file mode 100644 index 0000000..11a157a --- /dev/null +++ b/tests/feeds/interpolated_with_shapes/shapes.txt @@ -0,0 +1,7 @@ +shape_id,shape_pt_lat,shape_pt_lon,shape_pt_sequence +1,37.04469,-122.02294,1 +1,37.04692,-122.01877,2 +1,37.04825,-122.01693,3 +1,37.05109,-122.0147,4 +1,37.05541,-122.01199,5 +1,37.05904,-122.01002,6 diff --git a/tests/feeds/interpolated_with_shapes/stop_times.txt b/tests/feeds/interpolated_with_shapes/stop_times.txt new file mode 100644 index 0000000..a3fb7c9 --- /dev/null +++ b/tests/feeds/interpolated_with_shapes/stop_times.txt @@ -0,0 +1,7 @@ +trip_id,arrival_time,departure_time,stop_id,stop_sequence,shape_dist_traveled +1,08:00:00,08:00:00,1,1,0 +1,,,2,2,0.445481 +1,,,3,3,0.665792 +1,08:04:00,08:04:00,4,4,1.038474 +1,,,5,5,1.575674 +1,08:08:00,08:08:00,6,6,2.015540 diff --git a/tests/feeds/interpolated_with_shapes/stops.txt b/tests/feeds/interpolated_with_shapes/stops.txt new file mode 100644 index 0000000..b6466bd --- /dev/null +++ b/tests/feeds/interpolated_with_shapes/stops.txt @@ -0,0 +1,7 @@ +stop_id,stop_code,stop_name,stop_desc,stop_lat,stop_lon,zone_id,stop_url,location_type,parent_station,stop_timezone,wheelchair_boarding +1,,Bean Creek,,37.04469,-122.02294,,,0,,, +2,,Erba,,37.04692,-122.01877,,,0,,, +3,,Disc,,37.04825,-122.01693,,,0,,, +4,,Carbonero,,37.05109,-122.0147,,,0,,, +5,,El Pueblo,,37.05541,-122.01199,,,0,,, +6,,Victor Sq,,37.05904,-122.01002,,,0,,, diff --git a/tests/feeds/interpolated_with_shapes/trips.txt b/tests/feeds/interpolated_with_shapes/trips.txt new file mode 100644 index 0000000..bf063cc --- /dev/null +++ b/tests/feeds/interpolated_with_shapes/trips.txt @@ -0,0 +1,2 @@ +route_id,trip_id,direction_id,service_id,shape_id +1,1,0,weekday,1 diff --git a/tests/invalid_feed_1/bazin.ga b/tests/feeds/invalid_feed_1/bazin.ga similarity index 100% rename from tests/invalid_feed_1/bazin.ga rename to tests/feeds/invalid_feed_1/bazin.ga diff --git a/tests/feed_with_only_calendar_dates/agency.txt b/tests/feeds/invalid_feed_2/agency.txt similarity index 100% rename from tests/feed_with_only_calendar_dates/agency.txt rename to tests/feeds/invalid_feed_2/agency.txt diff --git a/tests/invalid_feed_2/fare_attributes.txt b/tests/feeds/invalid_feed_2/fare_attributes.txt similarity index 100% rename from tests/invalid_feed_2/fare_attributes.txt rename to tests/feeds/invalid_feed_2/fare_attributes.txt diff --git a/tests/invalid_feed_2/fare_rules.txt b/tests/feeds/invalid_feed_2/fare_rules.txt similarity index 100% rename from tests/invalid_feed_2/fare_rules.txt rename to tests/feeds/invalid_feed_2/fare_rules.txt diff --git a/tests/invalid_feed_2/feed_info.txt b/tests/feeds/invalid_feed_2/feed_info.txt similarity index 100% rename from tests/invalid_feed_2/feed_info.txt rename to tests/feeds/invalid_feed_2/feed_info.txt diff --git a/tests/invalid_feed_2/frequencies.txt b/tests/feeds/invalid_feed_2/frequencies.txt similarity index 100% rename from tests/invalid_feed_2/frequencies.txt rename to tests/feeds/invalid_feed_2/frequencies.txt diff --git a/tests/feed_with_only_calendar_dates/routes.txt b/tests/feeds/invalid_feed_2/routes.txt similarity index 100% rename from tests/feed_with_only_calendar_dates/routes.txt rename to tests/feeds/invalid_feed_2/routes.txt diff --git a/tests/feed_with_only_calendar_dates/shapes.txt b/tests/feeds/invalid_feed_2/shapes.txt similarity index 100% rename from tests/feed_with_only_calendar_dates/shapes.txt rename to tests/feeds/invalid_feed_2/shapes.txt diff --git a/tests/feed_with_wide_range_in_calendar_dates/stop_times.txt b/tests/feeds/invalid_feed_2/stop_times.txt similarity index 100% rename from tests/feed_with_wide_range_in_calendar_dates/stop_times.txt rename to tests/feeds/invalid_feed_2/stop_times.txt diff --git a/tests/feed_with_only_calendar_dates/stops.txt b/tests/feeds/invalid_feed_2/stops.txt similarity index 100% rename from tests/feed_with_only_calendar_dates/stops.txt rename to tests/feeds/invalid_feed_2/stops.txt diff --git a/tests/invalid_feed_2/transfers.txt b/tests/feeds/invalid_feed_2/transfers.txt similarity index 100% rename from tests/invalid_feed_2/transfers.txt rename to tests/feeds/invalid_feed_2/transfers.txt diff --git a/tests/feed_with_wide_range_in_calendar_dates/trips.txt b/tests/feeds/invalid_feed_2/trips.txt similarity index 100% rename from tests/feed_with_wide_range_in_calendar_dates/trips.txt rename to tests/feeds/invalid_feed_2/trips.txt diff --git a/tests/feed_with_wide_range_in_calendar_dates/agency.txt b/tests/feeds/mock_agency/agency.txt similarity index 100% rename from tests/feed_with_wide_range_in_calendar_dates/agency.txt rename to tests/feeds/mock_agency/agency.txt diff --git a/tests/mock_agency/calendar.txt b/tests/feeds/mock_agency/calendar.txt similarity index 100% rename from tests/mock_agency/calendar.txt rename to tests/feeds/mock_agency/calendar.txt diff --git a/tests/mock_agency/calendar_dates.txt b/tests/feeds/mock_agency/calendar_dates.txt similarity index 100% rename from tests/mock_agency/calendar_dates.txt rename to tests/feeds/mock_agency/calendar_dates.txt diff --git a/tests/mock_agency/fare_attributes.txt b/tests/feeds/mock_agency/fare_attributes.txt similarity index 100% rename from tests/mock_agency/fare_attributes.txt rename to tests/feeds/mock_agency/fare_attributes.txt diff --git a/tests/mock_agency/fare_rules.txt b/tests/feeds/mock_agency/fare_rules.txt similarity index 100% rename from tests/mock_agency/fare_rules.txt rename to tests/feeds/mock_agency/fare_rules.txt diff --git a/tests/mock_agency/feed_info.txt b/tests/feeds/mock_agency/feed_info.txt similarity index 100% rename from tests/mock_agency/feed_info.txt rename to tests/feeds/mock_agency/feed_info.txt diff --git a/tests/mock_agency/frequencies.txt b/tests/feeds/mock_agency/frequencies.txt similarity index 100% rename from tests/mock_agency/frequencies.txt rename to tests/feeds/mock_agency/frequencies.txt diff --git a/tests/feed_with_wide_range_in_calendar_dates/routes.txt b/tests/feeds/mock_agency/routes.txt similarity index 100% rename from tests/feed_with_wide_range_in_calendar_dates/routes.txt rename to tests/feeds/mock_agency/routes.txt diff --git a/tests/mock_agency/shapes.txt b/tests/feeds/mock_agency/shapes.txt similarity index 84% rename from tests/mock_agency/shapes.txt rename to tests/feeds/mock_agency/shapes.txt index 7c37f50..dfac8a0 100644 --- a/tests/mock_agency/shapes.txt +++ b/tests/feeds/mock_agency/shapes.txt @@ -3,4 +3,4 @@ la-sea-shp,34.056313,-118.234014,1 la-sea-shp,36.970318,-118.705662,2 la-sea-shp,42.939383,-122.099206,3 la-sea-shp,46.662348,-122.101693,4 -la-sea-shp,47.598398,-122.329480,5 \ No newline at end of file +la-sea-shp,47.598398,-122.329480,5 diff --git a/tests/invalid_feed_2/stop_times.txt b/tests/feeds/mock_agency/stop_times.txt similarity index 100% rename from tests/invalid_feed_2/stop_times.txt rename to tests/feeds/mock_agency/stop_times.txt diff --git a/tests/feed_with_wide_range_in_calendar_dates/stops.txt b/tests/feeds/mock_agency/stops.txt similarity index 100% rename from tests/feed_with_wide_range_in_calendar_dates/stops.txt rename to tests/feeds/mock_agency/stops.txt diff --git a/tests/mock_agency/transfers.txt b/tests/feeds/mock_agency/transfers.txt similarity index 100% rename from tests/mock_agency/transfers.txt rename to tests/feeds/mock_agency/transfers.txt diff --git a/tests/invalid_feed_2/trips.txt b/tests/feeds/mock_agency/trips.txt similarity index 100% rename from tests/invalid_feed_2/trips.txt rename to tests/feeds/mock_agency/trips.txt diff --git a/tests/feeds/only_calendar/agency.txt b/tests/feeds/only_calendar/agency.txt new file mode 100644 index 0000000..f0dd9e1 --- /dev/null +++ b/tests/feeds/only_calendar/agency.txt @@ -0,0 +1,2 @@ +agency_name,agency_url,agency_timezone,agency_lang,agency_phone +test,http://www.example.org,America/Los_Angeles,en,5555555555 diff --git a/tests/feeds/only_calendar/calendar.txt b/tests/feeds/only_calendar/calendar.txt new file mode 100644 index 0000000..a8af80e --- /dev/null +++ b/tests/feeds/only_calendar/calendar.txt @@ -0,0 +1,2 @@ +service_id,monday,tuesday,wednesday,thursday,friday,saturday,sunday,start_date,end_date +weekday,1,1,1,1,1,0,0,20000101,21001231 diff --git a/tests/feed_with_only_calendar/routes.txt b/tests/feeds/only_calendar/routes.txt similarity index 100% rename from tests/feed_with_only_calendar/routes.txt rename to tests/feeds/only_calendar/routes.txt diff --git a/tests/feed_with_only_calendar/stop_times.txt b/tests/feeds/only_calendar/stop_times.txt similarity index 100% rename from tests/feed_with_only_calendar/stop_times.txt rename to tests/feeds/only_calendar/stop_times.txt diff --git a/tests/feed_with_only_calendar/stops.txt b/tests/feeds/only_calendar/stops.txt similarity index 100% rename from tests/feed_with_only_calendar/stops.txt rename to tests/feeds/only_calendar/stops.txt diff --git a/tests/feed_with_only_calendar/trips.txt b/tests/feeds/only_calendar/trips.txt similarity index 100% rename from tests/feed_with_only_calendar/trips.txt rename to tests/feeds/only_calendar/trips.txt diff --git a/tests/invalid_feed_2/agency.txt b/tests/feeds/only_calendar_dates/agency.txt similarity index 100% rename from tests/invalid_feed_2/agency.txt rename to tests/feeds/only_calendar_dates/agency.txt diff --git a/tests/feed_with_only_calendar_dates/calendar_dates.txt b/tests/feeds/only_calendar_dates/calendar_dates.txt similarity index 100% rename from tests/feed_with_only_calendar_dates/calendar_dates.txt rename to tests/feeds/only_calendar_dates/calendar_dates.txt diff --git a/tests/invalid_feed_2/routes.txt b/tests/feeds/only_calendar_dates/routes.txt similarity index 100% rename from tests/invalid_feed_2/routes.txt rename to tests/feeds/only_calendar_dates/routes.txt diff --git a/tests/feed_with_wide_range_in_calendar_dates/shapes.txt b/tests/feeds/only_calendar_dates/shapes.txt similarity index 100% rename from tests/feed_with_wide_range_in_calendar_dates/shapes.txt rename to tests/feeds/only_calendar_dates/shapes.txt diff --git a/tests/feed_with_only_calendar_dates/stop_times.txt b/tests/feeds/only_calendar_dates/stop_times.txt similarity index 100% rename from tests/feed_with_only_calendar_dates/stop_times.txt rename to tests/feeds/only_calendar_dates/stop_times.txt diff --git a/tests/invalid_feed_2/stops.txt b/tests/feeds/only_calendar_dates/stops.txt similarity index 100% rename from tests/invalid_feed_2/stops.txt rename to tests/feeds/only_calendar_dates/stops.txt diff --git a/tests/feed_with_only_calendar_dates/trips.txt b/tests/feeds/only_calendar_dates/trips.txt similarity index 100% rename from tests/feed_with_only_calendar_dates/trips.txt rename to tests/feeds/only_calendar_dates/trips.txt diff --git a/tests/mock_agency/agency.txt b/tests/feeds/wide_range_in_calendar_dates/agency.txt similarity index 100% rename from tests/mock_agency/agency.txt rename to tests/feeds/wide_range_in_calendar_dates/agency.txt diff --git a/tests/feed_with_wide_range_in_calendar_dates/calendar.txt b/tests/feeds/wide_range_in_calendar_dates/calendar.txt similarity index 100% rename from tests/feed_with_wide_range_in_calendar_dates/calendar.txt rename to tests/feeds/wide_range_in_calendar_dates/calendar.txt diff --git a/tests/feed_with_wide_range_in_calendar_dates/calendar_dates.txt b/tests/feeds/wide_range_in_calendar_dates/calendar_dates.txt similarity index 100% rename from tests/feed_with_wide_range_in_calendar_dates/calendar_dates.txt rename to tests/feeds/wide_range_in_calendar_dates/calendar_dates.txt diff --git a/tests/mock_agency/routes.txt b/tests/feeds/wide_range_in_calendar_dates/routes.txt similarity index 100% rename from tests/mock_agency/routes.txt rename to tests/feeds/wide_range_in_calendar_dates/routes.txt diff --git a/tests/invalid_feed_2/shapes.txt b/tests/feeds/wide_range_in_calendar_dates/shapes.txt similarity index 100% rename from tests/invalid_feed_2/shapes.txt rename to tests/feeds/wide_range_in_calendar_dates/shapes.txt diff --git a/tests/mock_agency/stop_times.txt b/tests/feeds/wide_range_in_calendar_dates/stop_times.txt similarity index 100% rename from tests/mock_agency/stop_times.txt rename to tests/feeds/wide_range_in_calendar_dates/stop_times.txt diff --git a/tests/mock_agency/stops.txt b/tests/feeds/wide_range_in_calendar_dates/stops.txt similarity index 100% rename from tests/mock_agency/stops.txt rename to tests/feeds/wide_range_in_calendar_dates/stops.txt diff --git a/tests/mock_agency/trips.txt b/tests/feeds/wide_range_in_calendar_dates/trips.txt similarity index 100% rename from tests/mock_agency/trips.txt rename to tests/feeds/wide_range_in_calendar_dates/trips.txt diff --git a/tests/util.js b/tests/util.js index 008d2e7..583f36b 100644 --- a/tests/util.js +++ b/tests/util.js @@ -3,12 +3,33 @@ var path = require('path') var yazl = require('yazl') +if (typeof Promise === 'undefined') { + global.Promise = require('promise-polyfill') +} + +/** + * Helper to drop the db before or after a test + */ +function dropDb (gtfs, done) { + var db = gtfs.connectToDatabase() + db.sequelize.drop() + .then(() => { + console.log('dropped') + return db.sequelize.close() + }) + .then(() => { + console.log('closed') + done() + }) + .catch(done) +} + /** * Get gtfs config for a test suite */ function getConfig () { var config = { - downloadsDir: 'tests', + downloadsDir: 'tests/feeds', maxLoadTimeout: 60000, sequelizeOptions: { logging: false @@ -47,7 +68,7 @@ var zipMockAgency = function (callback) { var zipfile = new yazl.ZipFile() // add all files in mock agency folder - var zipSourceDir = 'tests/mock_agency' + var zipSourceDir = 'tests/feeds/mock_agency' fs.readdirSync(zipSourceDir) .forEach(function (file) { zipfile.addFile(path.join(zipSourceDir, file), file) @@ -69,6 +90,7 @@ var zipMockAgency = function (callback) { } module.exports = { + dropDb: dropDb, getConfig: getConfig, zipMockAgency: zipMockAgency }