Skip to content

Commit

Permalink
Use ceiling divide logic in AudioTrackPositionTracker.hasPendingData
Browse files Browse the repository at this point in the history
This fixes a bug with playing very short audio files, introduced by
fe71087

The existing code using floor integer division results in playback never
transitioning to `STATE_ENDED` because at the end of playback for the
short sample clip provided `currentPositionUs=189937`,
`outputSampleRate=16000` and `(189937 * 16000) / 1000000 = 3038.992`,
while `writtenFrames=3039`. This is fixed by using `Util.ceilDivide`
so we return `3039`, which means
`AudioTrackPositionTracker.hasPendingData()` returns `false` (since
`writtenFrames ==
durationUsToFrames(getCurrentPositionUs(/* sourceEnded= */ false))`).

Issue: #538
PiperOrigin-RevId: 554481782
(cherry picked from commit 6e91f0d)
  • Loading branch information
icbaker authored and tianyif committed Aug 7, 2023
1 parent 4b3a4b3 commit 7e58fde
Show file tree
Hide file tree
Showing 5 changed files with 74 additions and 27 deletions.
4 changes: 4 additions & 0 deletions RELEASENOTES.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,10 @@
* Add additional fields to Common Media Client Data (CMCD) logging:
streaming format (sf), stream type (st), version (v), top birate (tb),
object duration (d) and measured throughput (mtp).
* Audio:
* Fix a bug where `Player.getState()` never transitioned to `STATE_ENDED`
when playing very short files
([#538](https://github.com/androidx/media/issues/538)).
* Audio Offload:
* Prepend Ogg ID Header and Comment Header Pages to bitstream for
offloaded Opus playback in accordance with RFC 7845.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1430,6 +1430,40 @@ public static long msToUs(long timeMs) {
return (timeMs == C.TIME_UNSET || timeMs == C.TIME_END_OF_SOURCE) ? timeMs : (timeMs * 1000);
}

/**
* Returns the total duration (in microseconds) of {@code sampleCount} samples of equal duration
* at {@code sampleRate}.
*
* <p>If {@code sampleRate} is less than {@link C#MICROS_PER_SECOND}, the duration produced by
* this method can be reversed to the original sample count using {@link
* #durationUsToSampleCount(long, int)}.
*
* @param sampleCount The number of samples.
* @param sampleRate The sample rate, in samples per second.
* @return The total duration, in microseconds, of {@code sampleCount} samples.
*/
@UnstableApi
public static long sampleCountToDurationUs(long sampleCount, int sampleRate) {
return (sampleCount * C.MICROS_PER_SECOND) / sampleRate;
}

/**
* Returns the number of samples required to represent {@code durationUs} of media at {@code
* sampleRate}, assuming all samples are equal duration except the last one which may be shorter.
*
* <p>The result of this method <b>cannot</b> be generally reversed to the original duration with
* {@link #sampleCountToDurationUs(long, int)}, due to information lost when rounding to a whole
* number of samples.
*
* @param durationUs The duration in microseconds.
* @param sampleRate The sample rate in samples per second.
* @return The number of samples required to represent {@code durationUs}.
*/
@UnstableApi
public static long durationUsToSampleCount(long durationUs, int sampleRate) {
return Util.ceilDivide(durationUs * sampleRate, C.MICROS_PER_SECOND);
}

/**
* Parses an xs:duration attribute value, returning the parsed duration in milliseconds.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import static androidx.media3.common.util.Util.parseXsDuration;
import static androidx.media3.common.util.Util.unescapeFileName;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static org.junit.Assert.assertThrows;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
Expand Down Expand Up @@ -832,6 +833,21 @@ public void sparseLongArrayMaxValue_emptyArray_throws() {
assertThrows(NoSuchElementException.class, () -> maxValue(new SparseLongArray()));
}

@Test
public void sampleCountToDuration_thenDurationToSampleCount_returnsOriginalValue() {
// Use co-prime increments, to maximise 'discord' between sampleCount and sampleRate.
for (long originalSampleCount = 0; originalSampleCount < 100_000; originalSampleCount += 97) {
for (int sampleRate = 89; sampleRate < 1_000_000; sampleRate += 89) {
long calculatedSampleCount =
Util.durationUsToSampleCount(
Util.sampleCountToDurationUs(originalSampleCount, sampleRate), sampleRate);
assertWithMessage("sampleCount=%s, sampleRate=%s", originalSampleCount, sampleRate)
.that(calculatedSampleCount)
.isEqualTo(originalSampleCount);
}
}
}

@Test
public void parseXsDuration_returnsParsedDurationInMillis() {
assertThat(parseXsDuration("PT150.279S")).isEqualTo(150279L);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@

import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Util.castNonNull;
import static androidx.media3.common.util.Util.durationUsToSampleCount;
import static androidx.media3.common.util.Util.sampleCountToDurationUs;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.annotation.ElementType.TYPE_USE;
Expand Down Expand Up @@ -238,7 +240,10 @@ public void setAudioTrack(
outputSampleRate = audioTrack.getSampleRate();
needsPassthroughWorkarounds = isPassthrough && needsPassthroughWorkarounds(outputEncoding);
isOutputPcm = Util.isEncodingLinearPcm(outputEncoding);
bufferSizeUs = isOutputPcm ? framesToDurationUs(bufferSize / outputPcmFrameSize) : C.TIME_UNSET;
bufferSizeUs =
isOutputPcm
? sampleCountToDurationUs(bufferSize / outputPcmFrameSize, outputSampleRate)
: C.TIME_UNSET;
rawPlaybackHeadPosition = 0;
rawPlaybackHeadWrapCount = 0;
passthroughWorkaroundPauseOffset = 0;
Expand Down Expand Up @@ -274,7 +279,7 @@ public long getCurrentPositionUs(boolean sourceEnded) {
if (useGetTimestampMode) {
// Calculate the speed-adjusted position using the timestamp (which may be in the future).
long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
long timestampPositionUs = framesToDurationUs(timestampPositionFrames);
long timestampPositionUs = sampleCountToDurationUs(timestampPositionFrames, outputSampleRate);
long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs();
elapsedSinceTimestampUs =
Util.getMediaDurationForPlayoutDuration(elapsedSinceTimestampUs, audioTrackPlaybackSpeed);
Expand Down Expand Up @@ -420,7 +425,8 @@ public void handleEndOfStream(long writtenFrames) {
* @return Whether the audio track has any pending data to play out.
*/
public boolean hasPendingData(long writtenFrames) {
return writtenFrames > durationUsToFrames(getCurrentPositionUs(/* sourceEnded= */ false))
long currentPositionUs = getCurrentPositionUs(/* sourceEnded= */ false);
return writtenFrames > durationUsToSampleCount(currentPositionUs, outputSampleRate)
|| forceHasPendingData();
}

Expand Down Expand Up @@ -491,23 +497,18 @@ private void maybePollAndCheckTimestamp(long systemTimeUs) {
}

// Check the timestamp and accept/reject it.
long audioTimestampSystemTimeUs = audioTimestampPoller.getTimestampSystemTimeUs();
long audioTimestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
long timestampSystemTimeUs = audioTimestampPoller.getTimestampSystemTimeUs();
long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
long playbackPositionUs = getPlaybackHeadPositionUs();
if (Math.abs(audioTimestampSystemTimeUs - systemTimeUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
if (Math.abs(timestampSystemTimeUs - systemTimeUs) > MAX_AUDIO_TIMESTAMP_OFFSET_US) {
listener.onSystemTimeUsMismatch(
audioTimestampPositionFrames,
audioTimestampSystemTimeUs,
systemTimeUs,
playbackPositionUs);
timestampPositionFrames, timestampSystemTimeUs, systemTimeUs, playbackPositionUs);
audioTimestampPoller.rejectTimestamp();
} else if (Math.abs(framesToDurationUs(audioTimestampPositionFrames) - playbackPositionUs)
} else if (Math.abs(
sampleCountToDurationUs(timestampPositionFrames, outputSampleRate) - playbackPositionUs)
> MAX_AUDIO_TIMESTAMP_OFFSET_US) {
listener.onPositionFramesMismatch(
audioTimestampPositionFrames,
audioTimestampSystemTimeUs,
systemTimeUs,
playbackPositionUs);
timestampPositionFrames, timestampSystemTimeUs, systemTimeUs, playbackPositionUs);
audioTimestampPoller.rejectTimestamp();
} else {
audioTimestampPoller.acceptTimestamp();
Expand Down Expand Up @@ -539,14 +540,6 @@ private void maybeUpdateLatency(long systemTimeUs) {
}
}

private long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate;
}

private long durationUsToFrames(long durationUs) {
return (durationUs * outputSampleRate) / C.MICROS_PER_SECOND;
}

private void resetSyncParams() {
smoothedPlayheadOffsetUs = 0;
playheadOffsetCount = 0;
Expand Down Expand Up @@ -578,7 +571,7 @@ private static boolean needsPassthroughWorkarounds(@C.Encoding int outputEncodin
}

private long getPlaybackHeadPositionUs() {
return framesToDurationUs(getPlaybackHeadPosition());
return sampleCountToDurationUs(getPlaybackHeadPosition(), outputSampleRate);
}

/**
Expand All @@ -596,7 +589,7 @@ private long getPlaybackHeadPosition() {
long elapsedTimeSinceStopUs = (currentTimeMs * 1000) - stopTimestampUs;
long mediaTimeSinceStopUs =
Util.getMediaDurationForPlayoutDuration(elapsedTimeSinceStopUs, audioTrackPlaybackSpeed);
long framesSinceStop = durationUsToFrames(mediaTimeSinceStopUs);
long framesSinceStop = durationUsToSampleCount(mediaTimeSinceStopUs, outputSampleRate);
return min(endPlaybackHeadPosition, stopPlaybackHeadPosition + framesSinceStop);
}
if (currentTimeMs - lastRawPlaybackHeadPositionSampleTimeMs
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2086,11 +2086,11 @@ public boolean canReuseAudioTrack(Configuration newConfiguration) {
}

public long inputFramesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / inputFormat.sampleRate;
return Util.sampleCountToDurationUs(frameCount, inputFormat.sampleRate);
}

public long framesToDurationUs(long frameCount) {
return (frameCount * C.MICROS_PER_SECOND) / outputSampleRate;
return Util.sampleCountToDurationUs(frameCount, outputSampleRate);
}

public AudioTrack buildAudioTrack(
Expand Down

0 comments on commit 7e58fde

Please sign in to comment.