Use correct last timestamp for C2 MP3 workaround

The C2 MP3 decoder produces an extra output buffer when draining after
end-of-stream is queued. This output buffer has a later timestamp than the last
queued input buffer so we need to calculate its timestamp to detect a stream
change in the correct position.

Before this CL we used the original input buffer timestamp as the largest
queued timestamp, which caused the stream change to be detected at the correct
position because the original input buffer timestamp was slightly larger than
the actual last output buffer timestamp. After this change we use exact
calculated timestamp as the largest queued timestamp. I manually verified
gapless continues to work on a device using the C2 MP3 decoder by comparing
output of the MP3 gapless and MP3 gapless stripped playlists in the demo app,
and that the last buffer timestamp now matches.

#exofixit

PiperOrigin-RevId: 395428928
This commit is contained in:
andrewlewis 2021-09-08 10:31:49 +01:00 committed by Christos Tsilopoulos
parent 6452364c1c
commit ee2ef1c3d5
3 changed files with 81 additions and 45 deletions

View file

@ -15,6 +15,8 @@
*/
package com.google.android.exoplayer2.mediacodec;
import static java.lang.Math.max;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.audio.MpegAudioUtil;
@ -29,13 +31,11 @@ import java.nio.ByteBuffer;
*/
/* package */ final class C2Mp3TimestampTracker {
// Mirroring the actual codec, as can be found at
// https://cs.android.com/android/platform/superproject/+/main:frameworks/av/media/codec2/components/mp3/C2SoftMp3Dec.h;l=55;drc=3665390c9d32a917398b240c5a46ced07a3b65eb
private static final long DECODER_DELAY_SAMPLES = 529;
private static final long DECODER_DELAY_FRAMES = 529;
private static final String TAG = "C2Mp3TimestampTracker";
private long processedSamples;
private long anchorTimestampUs;
private long processedFrames;
private boolean seenInvalidMpegAudioHeader;
/**
@ -44,8 +44,8 @@ import java.nio.ByteBuffer;
* <p>This should be done when the codec is flushed.
*/
public void reset() {
processedSamples = 0;
anchorTimestampUs = 0;
processedFrames = 0;
seenInvalidMpegAudioHeader = false;
}
@ -57,6 +57,10 @@ import java.nio.ByteBuffer;
* @return The expected output presentation time, in microseconds.
*/
public long updateAndGetPresentationTimeUs(Format format, DecoderInputBuffer buffer) {
if (processedFrames == 0) {
anchorTimestampUs = buffer.timeUs;
}
if (seenInvalidMpegAudioHeader) {
return buffer.timeUs;
}
@ -71,23 +75,32 @@ import java.nio.ByteBuffer;
int frameCount = MpegAudioUtil.parseMpegAudioFrameSampleCount(sampleHeaderData);
if (frameCount == C.LENGTH_UNSET) {
seenInvalidMpegAudioHeader = true;
processedFrames = 0;
anchorTimestampUs = buffer.timeUs;
Log.w(TAG, "MPEG audio header is invalid.");
return buffer.timeUs;
}
// These calculations mirror the timestamp calculations in the Codec2 Mp3 Decoder.
// https://cs.android.com/android/platform/superproject/+/main:frameworks/av/media/codec2/components/mp3/C2SoftMp3Dec.cpp;l=464;drc=ed134640332fea70ca4b05694289d91a5265bb46
if (processedSamples == 0) {
anchorTimestampUs = buffer.timeUs;
processedSamples = frameCount - DECODER_DELAY_SAMPLES;
return anchorTimestampUs;
}
long processedDurationUs = getProcessedDurationUs(format);
processedSamples += frameCount;
return anchorTimestampUs + processedDurationUs;
long currentBufferTimestampUs = getBufferTimestampUs(format.sampleRate);
processedFrames += frameCount;
return currentBufferTimestampUs;
}
private long getProcessedDurationUs(Format format) {
return processedSamples * C.MICROS_PER_SECOND / format.sampleRate;
/**
* Returns the timestamp of the last buffer that will be produced if the stream ends at the
* current position, in microseconds.
*
* @param format The format associated with input buffers.
* @return The timestamp of the last buffer that will be produced if the stream ends at the
* current position, in microseconds.
*/
public long getLastOutputBufferPresentationTimeUs(Format format) {
return getBufferTimestampUs(format.sampleRate);
}
private long getBufferTimestampUs(long sampleRate) {
// This calculation matches the timestamp calculation in the Codec2 Mp3 Decoder.
// https://cs.android.com/android/platform/superproject/+/main:frameworks/av/media/codec2/components/mp3/C2SoftMp3Dec.cpp;l=464;drc=ed134640332fea70ca4b05694289d91a5265bb46
return anchorTimestampUs
+ max(0, (processedFrames - DECODER_DELAY_FRAMES) * C.MICROS_PER_SECOND / sampleRate);
}
}

View file

@ -1338,6 +1338,14 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
if (c2Mp3TimestampTracker != null) {
presentationTimeUs =
c2Mp3TimestampTracker.updateAndGetPresentationTimeUs(inputFormat, buffer);
// When draining the C2 MP3 decoder it produces an extra non-empty buffer with a timestamp
// after all queued input buffer timestamps (unlike other decoders, which generally propagate
// the input timestamps to output buffers 1:1). To detect the end of the stream when this
// buffer is dequeued we override the largest queued timestamp accordingly.
largestQueuedPresentationTimeUs =
max(
largestQueuedPresentationTimeUs,
c2Mp3TimestampTracker.getLastOutputBufferPresentationTimeUs(inputFormat));
}
if (buffer.isDecodeOnly()) {
@ -1347,14 +1355,7 @@ public abstract class MediaCodecRenderer extends BaseRenderer {
formatQueue.add(presentationTimeUs, inputFormat);
waitingForFirstSampleInFormat = false;
}
// TODO(b/158483277): Find the root cause of why a gap is introduced in MP3 playback when using
// presentationTimeUs from the c2Mp3TimestampTracker.
if (c2Mp3TimestampTracker != null) {
largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, buffer.timeUs);
} else {
largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, presentationTimeUs);
}
largestQueuedPresentationTimeUs = max(largestQueuedPresentationTimeUs, presentationTimeUs);
buffer.flip();
if (buffer.hasSupplementalData()) {
handleInputBufferSupplementalData(buffer);

View file

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer2.mediacodec;
import static com.google.android.exoplayer2.testutil.TestUtil.createByteArray;
import static com.google.common.truth.Truth.assertThat;
import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -22,6 +23,8 @@ import com.google.android.exoplayer2.Format;
import com.google.android.exoplayer2.decoder.DecoderInputBuffer;
import com.google.android.exoplayer2.util.MimeTypes;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -30,49 +33,68 @@ import org.junit.runner.RunWith;
@RunWith(AndroidJUnit4.class)
public final class C2Mp3TimestampTrackerTest {
private static final Format AUDIO_MP3 =
private static final Format FORMAT =
new Format.Builder()
.setSampleMimeType(MimeTypes.AUDIO_MPEG)
.setChannelCount(2)
.setSampleRate(44_100)
.build();
private DecoderInputBuffer buffer;
private C2Mp3TimestampTracker timestampTracker;
private DecoderInputBuffer buffer;
private DecoderInputBuffer invalidBuffer;
@Before
public void setUp() {
buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
timestampTracker = new C2Mp3TimestampTracker();
buffer.data = ByteBuffer.wrap(new byte[] {-1, -5, -24, 60});
buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
buffer.data = ByteBuffer.wrap(createByteArray(0xFF, 0xFB, 0xE8, 0x3C));
buffer.timeUs = 100_000;
invalidBuffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
invalidBuffer.data = ByteBuffer.wrap(createByteArray(0, 0, 0, 0));
invalidBuffer.timeUs = 120_000;
}
@Test
public void whenUpdateCalledMultipleTimes_timestampsIncrease() {
long first = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer);
long second = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer);
long third = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer);
public void handleBuffers_outputsCorrectTimestamps() {
List<Long> presentationTimesUs = new ArrayList<>();
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.getLastOutputBufferPresentationTimeUs(FORMAT));
assertThat(second).isGreaterThan(first);
assertThat(third).isGreaterThan(second);
assertThat(presentationTimesUs).containsExactly(100_000L, 114_126L, 140_249L, 166_371L);
}
@Test
public void whenResetCalled_timestampsDecrease() {
long first = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer);
long second = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer);
public void handleBuffersWithReset_resetsTimestamps() {
List<Long> presentationTimesUs = new ArrayList<>();
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
timestampTracker.reset();
long third = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer);
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.getLastOutputBufferPresentationTimeUs(FORMAT));
assertThat(second).isGreaterThan(first);
assertThat(third).isLessThan(second);
assertThat(presentationTimesUs).containsExactly(100_000L, 114_126L, 100_000L, 114_126L);
}
@Test
public void whenBufferTimeIsNotZero_firstSampleIsOffset() {
long first = timestampTracker.updateAndGetPresentationTimeUs(AUDIO_MP3, buffer);
public void handleInvalidBuffer_stopsUpdatingTimestamps() {
List<Long> presentationTimesUs = new ArrayList<>();
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer));
presentationTimesUs.add(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, invalidBuffer));
presentationTimesUs.add(timestampTracker.getLastOutputBufferPresentationTimeUs(FORMAT));
assertThat(first).isEqualTo(buffer.timeUs);
assertThat(presentationTimesUs).containsExactly(100_000L, 114_126L, 120_000L, 120_000L);
}
@Test
public void firstTimestamp_matchesBuffer() {
assertThat(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, buffer))
.isEqualTo(buffer.timeUs);
timestampTracker.reset();
assertThat(timestampTracker.updateAndGetPresentationTimeUs(FORMAT, invalidBuffer))
.isEqualTo(invalidBuffer.timeUs);
}
}