Move underrun detection into AudioTrack.

This removes duplication from SimpleDecoderAudioRenderer and
MediaCodecAudioRenderer.

-------------
Created by MOE: https://github.com/google/moe
MOE_MIGRATED_REVID=139187254
This commit is contained in:
andrewlewis 2016-11-15 04:49:34 -08:00 committed by Oliver Woodman
parent 74383716a6
commit 348b58021d
3 changed files with 106 additions and 75 deletions

View file

@ -15,6 +15,7 @@
*/
package com.google.android.exoplayer2.audio;
import android.annotation.SuppressLint;
import android.annotation.TargetApi;
import android.media.AudioFormat;
import android.media.AudioTimestamp;
@ -54,6 +55,24 @@ import java.nio.ByteBuffer;
*/
public final class AudioTrack {
/**
* Listener for audio track events.
*/
public interface Listener {
/**
* Called when the audio track underruns.
*
* @param bufferSize The size of the track's buffer, in bytes.
* @param bufferSizeMs The size of the track's buffer, in milliseconds, if it is configured for
* PCM output. {@link C#TIME_UNSET} if it is configured for passthrough output, as the
* buffered media can have a variable bitrate so the duration may be unknown.
* @param elapsedSinceLastFeedMs The time since the track was last fed data, in milliseconds.
*/
void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs);
}
/**
* Thrown when a failure occurs initializing an {@link android.media.AudioTrack}.
*/
@ -152,6 +171,40 @@ public final class AudioTrack {
*/
private static final int BUFFER_MULTIPLICATION_FACTOR = 4;
/**
* @see android.media.AudioTrack#PLAYSTATE_STOPPED
*/
private static final int PLAYSTATE_STOPPED = android.media.AudioTrack.PLAYSTATE_STOPPED;
/**
* @see android.media.AudioTrack#PLAYSTATE_PAUSED
*/
private static final int PLAYSTATE_PAUSED = android.media.AudioTrack.PLAYSTATE_PAUSED;
/**
* @see android.media.AudioTrack#PLAYSTATE_PLAYING
*/
private static final int PLAYSTATE_PLAYING = android.media.AudioTrack.PLAYSTATE_PLAYING;
/**
* @see android.media.AudioTrack#ERROR_BAD_VALUE
*/
private static final int ERROR_BAD_VALUE = android.media.AudioTrack.ERROR_BAD_VALUE;
/**
* @see android.media.AudioTrack#MODE_STATIC
*/
private static final int MODE_STATIC = android.media.AudioTrack.MODE_STATIC;
/**
* @see android.media.AudioTrack#MODE_STREAM
*/
private static final int MODE_STREAM = android.media.AudioTrack.MODE_STREAM;
/**
* @see android.media.AudioTrack#STATE_INITIALIZED
*/
private static final int STATE_INITIALIZED = android.media.AudioTrack.STATE_INITIALIZED;
/**
* @see android.media.AudioTrack#WRITE_NON_BLOCKING
*/
@SuppressLint("InlinedApi")
private static final int WRITE_NON_BLOCKING = android.media.AudioTrack.WRITE_NON_BLOCKING;
private static final String TAG = "AudioTrack";
/**
@ -197,6 +250,7 @@ public final class AudioTrack {
private final AudioCapabilities audioCapabilities;
private final int streamType;
private final Listener listener;
private final ConditionVariable releasingConditionVariable;
private final long[] playheadOffsets;
private final AudioTrackUtil audioTrackUtil;
@ -242,13 +296,18 @@ public final class AudioTrack {
private ByteBuffer resampledBuffer;
private boolean useResampledBuffer;
private boolean hasData;
private long lastFeedElapsedRealtimeMs;
/**
* @param audioCapabilities The current audio capabilities.
* @param streamType The type of audio stream for the underlying {@link android.media.AudioTrack}.
* @param listener Listener for audio track events.
*/
public AudioTrack(AudioCapabilities audioCapabilities, int streamType) {
public AudioTrack(AudioCapabilities audioCapabilities, int streamType, Listener listener) {
this.audioCapabilities = audioCapabilities;
this.streamType = streamType;
this.listener = listener;
releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 18) {
try {
@ -305,7 +364,7 @@ public final class AudioTrack {
return CURRENT_POSITION_NOT_SET;
}
if (audioTrack.getPlayState() == android.media.AudioTrack.PLAYSTATE_PLAYING) {
if (audioTrack.getPlayState() == PLAYSTATE_PLAYING) {
maybeSampleSyncParams();
}
@ -424,7 +483,7 @@ public final class AudioTrack {
} else {
int minBufferSize =
android.media.AudioTrack.getMinBufferSize(sampleRate, channelConfig, targetEncoding);
Assertions.checkState(minBufferSize != android.media.AudioTrack.ERROR_BAD_VALUE);
Assertions.checkState(minBufferSize != ERROR_BAD_VALUE);
int multipliedBufferSize = minBufferSize * BUFFER_MULTIPLICATION_FACTOR;
int minAppBufferSize = (int) durationUsToFrames(MIN_BUFFER_DURATION_US) * pcmFrameSize;
int maxAppBufferSize = (int) Math.max(minBufferSize,
@ -453,11 +512,11 @@ public final class AudioTrack {
if (sessionId == SESSION_ID_NOT_SET) {
audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig,
targetEncoding, bufferSize, android.media.AudioTrack.MODE_STREAM);
targetEncoding, bufferSize, MODE_STREAM);
} else {
// Re-attach to the same audio session.
audioTrack = new android.media.AudioTrack(streamType, sampleRate, channelConfig,
targetEncoding, bufferSize, android.media.AudioTrack.MODE_STREAM, sessionId);
targetEncoding, bufferSize, MODE_STREAM, sessionId);
}
checkAudioTrackInitialized();
@ -476,42 +535,17 @@ public final class AudioTrack {
@C.PcmEncoding int encoding = C.ENCODING_PCM_16BIT;
int bufferSize = 2; // Use a two byte buffer, as it is not actually used for playback.
keepSessionIdAudioTrack = new android.media.AudioTrack(streamType, sampleRate,
channelConfig, encoding, bufferSize, android.media.AudioTrack.MODE_STATIC, sessionId);
channelConfig, encoding, bufferSize, MODE_STATIC, sessionId);
}
}
}
audioTrackUtil.reconfigure(audioTrack, needsPassthroughWorkarounds());
setAudioTrackVolume();
hasData = false;
return sessionId;
}
/**
* Returns the size of this {@link AudioTrack}'s buffer in bytes.
* <p>
* The value returned from this method may change as a result of calling one of the
* {@link #configure} methods.
*
* @return The size of the buffer in bytes.
*/
public int getBufferSize() {
return bufferSize;
}
/**
* Returns the size of the buffer in microseconds for PCM {@link AudioTrack}s, or
* {@link C#TIME_UNSET} for passthrough {@link AudioTrack}s.
* <p>
* The value returned from this method may change as a result of calling one of the
* {@link #configure} methods.
*
* @return The size of the buffer in microseconds for PCM {@link AudioTrack}s, or
* {@link C#TIME_UNSET} for passthrough {@link AudioTrack}s.
*/
public long getBufferSizeUs() {
return bufferSizeUs;
}
/**
* Starts or resumes playing audio if the audio track has been initialized.
*/
@ -553,6 +587,18 @@ public final class AudioTrack {
* @throws WriteException If an error occurs writing the audio data.
*/
public int handleBuffer(ByteBuffer buffer, long presentationTimeUs) throws WriteException {
boolean hadData = hasData;
hasData = hasPendingData();
if (hadData && !hasData && audioTrack.getPlayState() != PLAYSTATE_STOPPED) {
long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
listener.onUnderrun(bufferSize, C.usToMs(bufferSizeUs), elapsedSinceLastFeedMs);
}
int result = writeBuffer(buffer, presentationTimeUs);
lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
return result;
}
private int writeBuffer(ByteBuffer buffer, long presentationTimeUs) throws WriteException {
boolean isNewSourceBuffer = currentSourceBuffer == null;
Assertions.checkState(isNewSourceBuffer || currentSourceBuffer == buffer);
currentSourceBuffer = buffer;
@ -560,14 +606,14 @@ public final class AudioTrack {
if (needsPassthroughWorkarounds()) {
// An AC-3 audio track continues to play data written while it is paused. Stop writing so its
// buffer empties. See [Internal: b/18899620].
if (audioTrack.getPlayState() == android.media.AudioTrack.PLAYSTATE_PAUSED) {
if (audioTrack.getPlayState() == PLAYSTATE_PAUSED) {
return 0;
}
// A new AC-3 audio track's playback position continues to increase from the old track's
// position for a short time after is has been released. Avoid writing data until the playback
// head position actually returns to zero.
if (audioTrack.getPlayState() == android.media.AudioTrack.PLAYSTATE_STOPPED
if (audioTrack.getPlayState() == PLAYSTATE_STOPPED
&& audioTrackUtil.getPlaybackHeadPosition() != 0) {
return 0;
}
@ -745,7 +791,7 @@ public final class AudioTrack {
latencyUs = 0;
resetSyncParams();
int playState = audioTrack.getPlayState();
if (playState == android.media.AudioTrack.PLAYSTATE_PLAYING) {
if (playState == PLAYSTATE_PLAYING) {
audioTrack.pause();
}
// AudioTrack.release can take some time, so we call it on a background thread.
@ -894,7 +940,7 @@ public final class AudioTrack {
*/
private void checkAudioTrackInitialized() throws InitializationException {
int state = audioTrack.getState();
if (state == android.media.AudioTrack.STATE_INITIALIZED) {
if (state == STATE_INITIALIZED) {
return;
}
// The track is not successfully initialized. Release and null the track.
@ -952,7 +998,7 @@ public final class AudioTrack {
*/
private boolean overrideHasPendingData() {
return needsPassthroughWorkarounds()
&& audioTrack.getPlayState() == android.media.AudioTrack.PLAYSTATE_PAUSED
&& audioTrack.getPlayState() == PLAYSTATE_PAUSED
&& audioTrack.getPlaybackHeadPosition() == 0;
}
@ -1063,7 +1109,7 @@ public final class AudioTrack {
@TargetApi(21)
private static int writeNonBlockingV21(
android.media.AudioTrack audioTrack, ByteBuffer buffer, int size) {
return audioTrack.write(buffer, size, android.media.AudioTrack.WRITE_NON_BLOCKING);
return audioTrack.write(buffer, size, WRITE_NON_BLOCKING);
}
@TargetApi(21)
@ -1156,7 +1202,7 @@ public final class AudioTrack {
}
int state = audioTrack.getPlayState();
if (state == android.media.AudioTrack.PLAYSTATE_STOPPED) {
if (state == PLAYSTATE_STOPPED) {
// The audio track hasn't been started.
return 0;
}
@ -1166,7 +1212,7 @@ public final class AudioTrack {
// Work around an issue with passthrough/direct AudioTracks on platform API versions 21/22
// where the playback head position jumps back to zero on paused passthrough/direct audio
// tracks. See [Internal: b/19187573].
if (state == android.media.AudioTrack.PLAYSTATE_PAUSED && rawPlaybackHeadPosition == 0) {
if (state == PLAYSTATE_PAUSED && rawPlaybackHeadPosition == 0) {
passthroughWorkaroundPauseOffset = lastRawPlaybackHeadPosition;
}
rawPlaybackHeadPosition += passthroughWorkaroundPauseOffset;

View file

@ -23,7 +23,6 @@ import android.media.MediaFormat;
import android.media.PlaybackParams;
import android.media.audiofx.Virtualizer;
import android.os.Handler;
import android.os.SystemClock;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.ExoPlaybackException;
import com.google.android.exoplayer2.Format;
@ -43,7 +42,8 @@ import java.nio.ByteBuffer;
* Decodes and renders audio using {@link MediaCodec} and {@link AudioTrack}.
*/
@TargetApi(16)
public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock {
public class MediaCodecAudioRenderer extends MediaCodecRenderer implements MediaClock,
AudioTrack.Listener {
private final EventDispatcher eventDispatcher;
private final AudioTrack audioTrack;
@ -55,9 +55,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private long currentPositionUs;
private boolean allowPositionDiscontinuity;
private boolean audioTrackHasData;
private long lastFeedElapsedRealtimeMs;
/**
* @param mediaCodecSelector A decoder selector.
*/
@ -136,7 +133,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
int streamType) {
super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
audioTrack = new AudioTrack(audioCapabilities, streamType);
audioTrack = new AudioTrack(audioCapabilities, streamType, this);
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
}
@ -341,29 +338,17 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
} else {
audioTrack.initialize(audioSessionId);
}
audioTrackHasData = false;
} catch (AudioTrack.InitializationException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
if (getState() == STATE_STARTED) {
audioTrack.play();
}
} else {
// Check for AudioTrack underrun.
boolean audioTrackHadData = audioTrackHasData;
audioTrackHasData = audioTrack.hasPendingData();
if (audioTrackHadData && !audioTrackHasData && getState() == STATE_STARTED) {
long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
long bufferSizeMs = C.usToMs(audioTrack.getBufferSizeUs());
eventDispatcher.audioTrackUnderrun(audioTrack.getBufferSize(), bufferSizeMs,
elapsedSinceLastFeedMs);
}
}
int handleBufferResult;
try {
handleBufferResult = audioTrack.handleBuffer(buffer, bufferPresentationTimeUs);
lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
} catch (AudioTrack.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
@ -408,4 +393,11 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
}
}
// AudioTrack.Listener implementation.
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}

View file

@ -41,7 +41,8 @@ import com.google.android.exoplayer2.util.Util;
/**
* Decodes and renders audio using a {@link SimpleDecoder}.
*/
public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock {
public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements MediaClock,
AudioTrack.Listener {
private final boolean playClearSamplesWithoutKeys;
@ -67,9 +68,6 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
private final AudioTrack audioTrack;
private int audioSessionId;
private boolean audioTrackHasData;
private long lastFeedElapsedRealtimeMs;
public SimpleDecoderAudioRenderer() {
this(null, null);
}
@ -122,7 +120,7 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
audioSessionId = AudioTrack.SESSION_ID_NOT_SET;
audioTrack = new AudioTrack(audioCapabilities, streamType);
audioTrack = new AudioTrack(audioCapabilities, streamType, this);
formatHolder = new FormatHolder();
}
@ -245,24 +243,12 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
} else {
audioTrack.initialize(audioSessionId);
}
audioTrackHasData = false;
if (getState() == STATE_STARTED) {
audioTrack.play();
}
} else {
// Check for AudioTrack underrun.
boolean audioTrackHadData = audioTrackHasData;
audioTrackHasData = audioTrack.hasPendingData();
if (audioTrackHadData && !audioTrackHasData && getState() == STATE_STARTED) {
long elapsedSinceLastFeedMs = SystemClock.elapsedRealtime() - lastFeedElapsedRealtimeMs;
long bufferSizeMs = C.usToMs(audioTrack.getBufferSizeUs());
eventDispatcher.audioTrackUnderrun(audioTrack.getBufferSize(), bufferSizeMs,
elapsedSinceLastFeedMs);
}
}
int handleBufferResult = audioTrack.handleBuffer(outputBuffer.data, outputBuffer.timeUs);
lastFeedElapsedRealtimeMs = SystemClock.elapsedRealtime();
// If we are out of sync, allow currentPositionUs to jump backwards.
if ((handleBufferResult & AudioTrack.RESULT_POSITION_DISCONTINUITY) != 0) {
@ -493,4 +479,11 @@ public abstract class SimpleDecoderAudioRenderer extends BaseRenderer implements
}
}
// AudioTrack.Listener implementation.
@Override
public void onUnderrun(int bufferSize, long bufferSizeMs, long elapsedSinceLastFeedMs) {
eventDispatcher.audioTrackUnderrun(bufferSize, bufferSizeMs, elapsedSinceLastFeedMs);
}
}