Skip to content

Commit

Permalink
feat: add dts-based timestamp offset calculation with feature toggle (#…
Browse files Browse the repository at this point in the history
  • Loading branch information
dzianis-dashkevich authored Mar 14, 2022
1 parent 42fe383 commit 450eb2d
Show file tree
Hide file tree
Showing 8 changed files with 229 additions and 2 deletions.
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ Video.js Compatibility: 6.0, 7.0
- [handlePartialData](#handlepartialdata)
- [liveRangeSafeTimeDelta](#liverangesafetimedelta)
- [useNetworkInformationApi](#usenetworkinformationapi)
- [useDtsForTimestampOffset](#usedtsfortimestampoffset)
- [captionServices](#captionservices)
- [Format](#format)
- [Example](#example)
Expand Down Expand Up @@ -479,6 +480,11 @@ This option defaults to `false`.
* Default: `false`
* Use [window.networkInformation.downlink](https://developer.mozilla.org/en-US/docs/Web/API/NetworkInformation/downlink) to estimate the network's bandwidth. Per mdn, _The value is never greater than 10 Mbps, as a non-standard anti-fingerprinting measure_. Given this, if bandwidth estimates from both the player and networkInfo are >= 10 Mbps, the player will use the larger of the two values as its bandwidth estimate.

##### useDtsForTimestampOffset
* Type: `boolean`,
* Default: `false`
* Use [Decode Timestamp](https://www.w3.org/TR/media-source/#decode-timestamp) instead of [Presentation Timestamp](https://www.w3.org/TR/media-source/#presentation-timestamp) for [timestampOffset](https://www.w3.org/TR/media-source/#dom-sourcebuffer-timestampoffset) calculation. This option was introduced to align with DTS-based browsers. This option affects only transmuxed data (eg: transport stream). For more info please check the following [issue](https://github.com/videojs/http-streaming/issues/1247).

##### captionServices
* Type: `object`
* Default: undefined
Expand Down
5 changes: 5 additions & 0 deletions index.html
Original file line number Diff line number Diff line change
Expand Up @@ -151,6 +151,11 @@
<label class="form-check-label" for="network-info">Use networkInfo API for bandwidth estimations (reloads player)</label>
</div>

<div class="form-check">
<input id=dts-offset type="checkbox" class="form-check-input">
<label class="form-check-label" for="dts-offset">Use DTS instead of PTS for Timestamp Offset calculation (reloads player)</label>
</div>

<div class="form-check">
<input id=llhls type="checkbox" class="form-check-input">
<label class="form-check-label" for="llhls">[EXPERIMENTAL] Enables support for ll-hls (reloads player)</label>
Expand Down
5 changes: 4 additions & 1 deletion scripts/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,7 @@
'exact-manifest-timings',
'pixel-diff-selector',
'network-info',
'dts-offset',
'override-native',
'preload',
'mirror-source'
Expand Down Expand Up @@ -501,6 +502,7 @@
'liveui',
'pixel-diff-selector',
'network-info',
'dts-offset',
'exact-manifest-timings'
].forEach(function(name) {
stateEls[name].addEventListener('change', function(event) {
Expand Down Expand Up @@ -568,7 +570,8 @@
experimentalLLHLS: getInputValue(stateEls.llhls),
experimentalExactManifestTimings: getInputValue(stateEls['exact-manifest-timings']),
experimentalLeastPixelDiffSelector: getInputValue(stateEls['pixel-diff-selector']),
useNetworkInformationApi: getInputValue(stateEls['network-info'])
useNetworkInformationApi: getInputValue(stateEls['network-info']),
useDtsForTimestampOffset: getInputValue(stateEls['dts-offset'])
}
}
});
Expand Down
1 change: 1 addition & 0 deletions src/master-playlist-controller.js
Original file line number Diff line number Diff line change
Expand Up @@ -238,6 +238,7 @@ export class MasterPlaylistController extends videojs.EventTarget {
const segmentLoaderSettings = {
vhs: this.vhs_,
parse708captions: options.parse708captions,
useDtsForTimestampOffset: options.useDtsForTimestampOffset,
captionServices,
mediaSource: this.mediaSource,
currentTime: this.tech_.currentTime.bind(this.tech_),
Expand Down
25 changes: 24 additions & 1 deletion src/segment-loader.js
Original file line number Diff line number Diff line change
Expand Up @@ -559,6 +559,7 @@ export default class SegmentLoader extends videojs.EventTarget {
this.timelineChangeController_ = settings.timelineChangeController;
this.shouldSaveSegmentTimingInfo_ = true;
this.parse708captions_ = settings.parse708captions;
this.useDtsForTimestampOffset_ = settings.useDtsForTimestampOffset;
this.captionServices_ = settings.captionServices;
this.experimentalExactManifestTimings = settings.experimentalExactManifestTimings;

Expand Down Expand Up @@ -2905,7 +2906,11 @@ export default class SegmentLoader extends videojs.EventTarget {
// the timing info here comes from video. In the event that the audio is longer than
// the video, this will trim the start of the audio.
// This also trims any offset from 0 at the beginning of the media
segmentInfo.timestampOffset -= segmentInfo.timingInfo.start;
segmentInfo.timestampOffset -= this.getSegmentStartTimeForTimestampOffsetCalculation_({
videoTimingInfo: segmentInfo.segment.videoTimingInfo,
audioTimingInfo: segmentInfo.segment.audioTimingInfo,
timingInfo: segmentInfo.timingInfo
});
// In the event that there are part segment downloads, each will try to update the
// timestamp offset. Retaining this bit of state prevents us from updating in the
// future (within the same segment), however, there may be a better way to handle it.
Expand All @@ -2926,6 +2931,24 @@ export default class SegmentLoader extends videojs.EventTarget {
}
}

getSegmentStartTimeForTimestampOffsetCalculation_({ videoTimingInfo, audioTimingInfo, timingInfo }) {
if (!this.useDtsForTimestampOffset_) {
return timingInfo.start;
}

if (videoTimingInfo && typeof videoTimingInfo.transmuxedDecodeStart === 'number') {
return videoTimingInfo.transmuxedDecodeStart;
}

// handle audio only
if (audioTimingInfo && typeof audioTimingInfo.transmuxedDecodeStart === 'number') {
return audioTimingInfo.transmuxedDecodeStart;
}

// handle content not transmuxed (e.g., MP4)
return timingInfo.start;
}

updateTimingInfoEnd_(segmentInfo) {
segmentInfo.timingInfo = segmentInfo.timingInfo || {};
const trackInfo = this.getMediaInfo_();
Expand Down
2 changes: 2 additions & 0 deletions src/videojs-http-streaming.js
Original file line number Diff line number Diff line change
Expand Up @@ -631,6 +631,7 @@ class VhsHandler extends Component {
this.source_.useBandwidthFromLocalStorage :
this.options_.useBandwidthFromLocalStorage || false;
this.options_.useNetworkInformationApi = this.options_.useNetworkInformationApi || false;
this.options_.useDtsForTimestampOffset = this.options_.useDtsForTimestampOffset || false;
this.options_.customTagParsers = this.options_.customTagParsers || [];
this.options_.customTagMappers = this.options_.customTagMappers || [];
this.options_.cacheEncryptionKeys = this.options_.cacheEncryptionKeys || false;
Expand Down Expand Up @@ -684,6 +685,7 @@ class VhsHandler extends Component {
'liveRangeSafeTimeDelta',
'experimentalLLHLS',
'useNetworkInformationApi',
'useDtsForTimestampOffset',
'experimentalExactManifestTimings',
'experimentalLeastPixelDiffSelector'
].forEach((option) => {
Expand Down
187 changes: 187 additions & 0 deletions test/segment-loader.test.js
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ import {
oneSecond as oneSecondSegment,
audio as audioSegment,
video as videoSegment,
videoDiffPtsDts as videoDiffPtsDtsSegment,
videoOneSecond as videoOneSecondSegment,
videoOneSecond1 as videoOneSecond1Segment,
videoOneSecond2 as videoOneSecond2Segment,
Expand Down Expand Up @@ -1145,6 +1146,192 @@ QUnit.module('SegmentLoader', function(hooks) {
});
});

QUnit.test('should use video PTS value for timestamp offset calculation when useDtsForTimestampOffset set as false', function(assert) {
loader = new SegmentLoader(LoaderCommonSettings.call(this, {
loaderType: 'main',
segmentMetadataTrack: this.segmentMetadataTrack,
useDtsForTimestampOffset: false
}), {});

const playlist = playlistWithDuration(20, { uri: 'playlist.m3u8' });

return this.setupMediaSource(loader.mediaSource_, loader.sourceUpdater_).then(() => {
return new Promise((resolve, reject) => {
loader.one('appended', resolve);
loader.one('error', reject);

loader.playlist(playlist);
loader.load();

this.clock.tick(100);

standardXHRResponse(this.requests.shift(), videoDiffPtsDtsSegment());
});
}).then(() => {
assert.equal(
loader.sourceUpdater_.videoTimestampOffset(),
-playlist.segments[0].videoTimingInfo.transmuxedPresentationStart,
'set video timestampOffset'
);

assert.equal(
loader.sourceUpdater_.audioTimestampOffset(),
-playlist.segments[0].videoTimingInfo.transmuxedPresentationStart,
'set audio timestampOffset'
);
});
});

QUnit.test('should use video DTS value for timestamp offset calculation when useDtsForTimestampOffset set as true', function(assert) {
loader = new SegmentLoader(LoaderCommonSettings.call(this, {
loaderType: 'main',
segmentMetadataTrack: this.segmentMetadataTrack,
useDtsForTimestampOffset: true
}), {});

const playlist = playlistWithDuration(20, { uri: 'playlist.m3u8' });

return this.setupMediaSource(loader.mediaSource_, loader.sourceUpdater_).then(() => {
return new Promise((resolve, reject) => {
loader.one('appended', resolve);
loader.one('error', reject);

loader.playlist(playlist);
loader.load();

this.clock.tick(100);
// segment
standardXHRResponse(this.requests.shift(), videoDiffPtsDtsSegment());
});
}).then(() => {
assert.equal(
loader.sourceUpdater_.videoTimestampOffset(),
-playlist.segments[0].videoTimingInfo.transmuxedDecodeStart,
'set video timestampOffset'
);

assert.equal(
loader.sourceUpdater_.audioTimestampOffset(),
-playlist.segments[0].videoTimingInfo.transmuxedDecodeStart,
'set audio timestampOffset'
);
});
});

QUnit.test('should use video DTS value as primary for muxed segments (eg: audio and video together) for timestamp offset calculation when useDtsForTimestampOffset set as true', function(assert) {
loader = new SegmentLoader(LoaderCommonSettings.call(this, {
loaderType: 'main',
segmentMetadataTrack: this.segmentMetadataTrack,
useDtsForTimestampOffset: true
}), {});

const playlist = playlistWithDuration(20, { uri: 'playlist.m3u8' });

return this.setupMediaSource(loader.mediaSource_, loader.sourceUpdater_).then(() => {
return new Promise((resolve, reject) => {
loader.one('appended', resolve);
loader.one('error', reject);

loader.playlist(playlist);
loader.load();

this.clock.tick(100);

standardXHRResponse(this.requests.shift(), muxedSegment());
});
}).then(() => {
assert.equal(
loader.sourceUpdater_.videoTimestampOffset(),
-playlist.segments[0].videoTimingInfo.transmuxedDecodeStart,
'set video timestampOffset'
);

assert.equal(
loader.sourceUpdater_.audioTimestampOffset(),
-playlist.segments[0].videoTimingInfo.transmuxedDecodeStart,
'set audio timestampOffset'
);
});
});

QUnit.test('should use audio DTS value for timestamp offset calculation when useDtsForTimestampOffset set as true and only audio', function(assert) {
loader = new SegmentLoader(LoaderCommonSettings.call(this, {
loaderType: 'main',
segmentMetadataTrack: this.segmentMetadataTrack,
useDtsForTimestampOffset: true
}), {});

const playlist = playlistWithDuration(20, { uri: 'playlist.m3u8' });

return this.setupMediaSource(loader.mediaSource_, loader.sourceUpdater_, { isAudioOnly: true }).then(() => {
return new Promise((resolve, reject) => {
loader.one('appended', resolve);
loader.one('error', reject);

loader.playlist(playlist);
loader.load();

this.clock.tick(100);
// segment
standardXHRResponse(this.requests.shift(), audioSegment());
});
}).then(() => {
assert.equal(
loader.sourceUpdater_.audioTimestampOffset(),
-playlist.segments[0].audioTimingInfo.transmuxedDecodeStart,
'set audio timestampOffset'
);
});
});

QUnit.test('should fallback to segment\'s start time when there is no transmuxed content (eg: mp4) and useDtsForTimestampOffset is set as true', function(assert) {
loader = new SegmentLoader(LoaderCommonSettings.call(this, {
loaderType: 'main',
segmentMetadataTrack: this.segmentMetadataTrack,
useDtsForTimestampOffset: true
}), {});

const playlist = playlistWithDuration(10);
const ogPost = loader.transmuxer_.postMessage;

loader.transmuxer_.postMessage = (message) => {
if (message.action === 'probeMp4StartTime') {
const evt = newEvent('message');

evt.data = {action: 'probeMp4StartTime', startTime: 11, data: message.data};

loader.transmuxer_.dispatchEvent(evt);
return;
}
return ogPost.call(loader.transmuxer_, message);
};

return this.setupMediaSource(loader.mediaSource_, loader.sourceUpdater_).then(() => {
return new Promise((resolve, reject) => {
loader.one('appended', resolve);
loader.one('error', reject);

playlist.segments.forEach((segment) => {
segment.map = {
resolvedUri: 'init.mp4',
byterange: { length: Infinity, offset: 0 }
};
});
loader.playlist(playlist);
loader.load();

this.clock.tick(100);
// init
standardXHRResponse(this.requests.shift(), mp4VideoInitSegment());
// segment
standardXHRResponse(this.requests.shift(), mp4VideoSegment());
});
}).then(() => {
assert.equal(loader.sourceUpdater_.videoTimestampOffset(), -11, 'set video timestampOffset');
assert.equal(loader.sourceUpdater_.audioTimestampOffset(), -11, 'set audio timestampOffset');
});
});

QUnit.test('updates timestamps when segments do not start at zero', function(assert) {
const playlist = playlistWithDuration(10);
const ogPost = loader.transmuxer_.postMessage;
Expand Down
Binary file added test/segments/videoDiffPtsDts.ts
Binary file not shown.

0 comments on commit 450eb2d

Please sign in to comment.