Skip to content

Commit

Permalink
Fix off-by-one error storing offline content
Browse files Browse the repository at this point in the history
The final chunk was only being stored when the endTime of the final
segment happened to be null.  An index with explicit end times for
all segments, even on one stream, would trigger this bug.

For example, the end time of the final video segment may be null, but
the end time of the final audio segment may be equal to the content
duration.  The stored audio would be missing a chunk, causing the
computed stream limit to be too short on playback.

This corrects the off-by-one error in the storage code by explicitly
signalling when a segment is the last one, rather than checking for
a null endTime.

Closes #157

Change-Id: Ic99d53b9e2b071409d4d9472b8bc4ae0cf76f940
  • Loading branch information
joeyparrish committed Aug 26, 2015
1 parent 419f26a commit e4d3626
Show file tree
Hide file tree
Showing 2 changed files with 30 additions and 4 deletions.
10 changes: 6 additions & 4 deletions lib/util/content_database_writer.js
Original file line number Diff line number Diff line change
Expand Up @@ -317,8 +317,10 @@ shaka.util.ContentDatabaseWriter.prototype.insertStreamContent_ = function(

for (var i = 0; i < segmentIndex.length(); ++i) {
var reference = segmentIndex.get(i);
var isLast = (i == segmentIndex.length() - 1);
var requestSegment = this.requestSegment_.bind(this, reference);
var appendSegment = this.appendSegment_.bind(this, reference, state);
var appendSegment = this.appendSegment_.bind(this, reference, state,
isLast);
segmentPromise = segmentPromise.then(requestSegment);
segmentPromise = segmentPromise.then(appendSegment);
}
Expand All @@ -342,12 +344,13 @@ shaka.util.ContentDatabaseWriter.prototype.insertStreamContent_ = function(
* current segment.
* @param {shaka.util.ContentDatabaseWriter.InsertStreamState} state The state
* of the current stream being inserted.
* @param {boolean} isLast True for the last segment in a stream.
* @param {!ArrayBuffer} segment The current segment of the stream.
* @return {!Promise}
* @private
*/
shaka.util.ContentDatabaseWriter.prototype.appendSegment_ = function(
ref, state, segment) {
ref, state, isLast, segment) {
var p = new shaka.util.PublicPromise();

if (state.segment.byteLength == 0) {
Expand All @@ -361,8 +364,7 @@ shaka.util.ContentDatabaseWriter.prototype.appendSegment_ = function(
detail: percent,
bubbles: true });
var size = state.segment.byteLength;
if (size >= shaka.util.ContentDatabaseWriter.TARGET_SEGMENT_SIZE_ ||
ref.endTime == null) {
if (size >= shaka.util.ContentDatabaseWriter.TARGET_SEGMENT_SIZE_ || isLast) {
var data = {
'stream_id': state.streamId,
'segment_id': state.segmentId,
Expand Down
24 changes: 24 additions & 0 deletions spec/content_database_spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -203,6 +203,30 @@ describe('ContentDatabase', function() {
});
});

// Bug #157:
it('stores a stream with an explicit end time', function(done) {
var references = [
new shaka.media.SegmentReference(0, 100, createFailover(url))
];
var index = new shaka.media.SegmentIndex(references);
streamInfo.segmentIndexSource = {
create: function() { return Promise.resolve(index); }
};
p.then(function() {
return writer.insertStream_(streamInfo, index, testInitData, 1, 0);
}).then(function(streamId) {
return reader.retrieveStreamIndex(streamId);
}).then(function(streamIndex) {
expect(streamIndex.references.length).toEqual(1);
expect(streamIndex.references[0]).toMatchReference(
{ start_time: 0, end_time: 100 });
done();
}).catch(function(err) {
fail(err);
done();
});
});

it('throws an error when trying to store an invalid stream', function(done) {
p.then(function() {
return writer.insertStream_(null, null, null, 0, 0);
Expand Down

0 comments on commit e4d3626

Please sign in to comment.