Add support for using framework speed adjustment

AudioTrack.setPlaybackParams can be used to adjust playback speed.
This is preferable to application-level speed adjustment (currently
implemented in ExoPlayer) from API 23 because the speed change
occurs in the mixer, which means that the audio track buffer doesn't
need to drain out before the speed adjustment takes effect.

Issue: #7502
PiperOrigin-RevId: 326392301
This commit is contained in:
andrewlewis 2020-08-13 07:51:04 +01:00 committed by kim-vde
parent b95c984d16
commit 501f464217
6 changed files with 109 additions and 14 deletions

View file

@ -122,6 +122,9 @@
* Add floating point PCM output capability in `MediaCodecAudioRenderer`,
and `LibopusAudioRenderer`.
* Do not use a MediaCodec for PCM formats if AudioTrack supports it.
* Add optional support for using framework audio speed adjustment instead
of application-level audio speed adjustment
([#7502](https://github.com/google/ExoPlayer/issues/7502)).
* Text:
* Recreate the decoder when handling and swallowing decode errors in
`TextRenderer`. This fixes a case where playback would never end when

View file

@ -17,6 +17,7 @@ package com.google.android.exoplayer2;
import android.content.Context;
import android.media.MediaCodec;
import android.media.PlaybackParams;
import android.os.Handler;
import android.os.Looper;
import androidx.annotation.IntDef;
@ -94,6 +95,7 @@ public class DefaultRenderersFactory implements RenderersFactory {
private @MediaCodecRenderer.MediaCodecOperationMode int audioMediaCodecOperationMode;
private @MediaCodecRenderer.MediaCodecOperationMode int videoMediaCodecOperationMode;
private boolean enableFloatOutput;
private boolean enableAudioTrackPlaybackParams;
private boolean enableOffload;
/** @param context A {@link Context}. */
@ -258,6 +260,30 @@ public class DefaultRenderersFactory implements RenderersFactory {
return this;
}
/**
* Sets whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, which is supported from API level
* 23, rather than using application-level audio speed adjustment. This setting has no effect on
* builds before API level 23 (application-level speed adjustment will be used in all cases).
*
* <p>If enabled and supported, new playback speed settings will take effect more quickly because
* they are applied at the audio mixer, rather than at the point of writing data to the track.
*
* <p>When using this mode, the maximum supported playback speed is limited by the size of the
* audio track's buffer. If the requested speed is not supported the player's event listener will
* be notified twice on setting playback speed, once with the requested speed, then again with the
* old playback speed reflecting the fact that the requested speed was not supported.
*
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}.
* @return This factory, for convenience.
*/
public DefaultRenderersFactory setEnableAudioTrackPlaybackParams(
boolean enableAudioTrackPlaybackParams) {
this.enableAudioTrackPlaybackParams = enableAudioTrackPlaybackParams;
return this;
}
/**
* Sets the maximum duration for which video renderers can attempt to seamlessly join an ongoing
* playback.
@ -290,7 +316,9 @@ public class DefaultRenderersFactory implements RenderersFactory {
videoRendererEventListener,
allowedVideoJoiningTimeMs,
renderersList);
@Nullable AudioSink audioSink = buildAudioSink(context, enableFloatOutput, enableOffload);
@Nullable
AudioSink audioSink =
buildAudioSink(context, enableFloatOutput, enableAudioTrackPlaybackParams, enableOffload);
if (audioSink != null) {
buildAudioRenderers(
context,
@ -611,6 +639,8 @@ public class DefaultRenderersFactory implements RenderersFactory {
*
* @param context The {@link Context} associated with the player.
* @param enableFloatOutput Whether to enable use of floating point audio output, if available.
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, if supported.
* @param enableOffload Whether to enable use of audio offload for supported formats, if
* available.
* @return The {@link AudioSink} to which the audio renderers will output. May be {@code null} if
@ -619,11 +649,15 @@ public class DefaultRenderersFactory implements RenderersFactory {
*/
@Nullable
protected AudioSink buildAudioSink(
Context context, boolean enableFloatOutput, boolean enableOffload) {
Context context,
boolean enableFloatOutput,
boolean enableAudioTrackPlaybackParams,
boolean enableOffload) {
return new DefaultAudioSink(
AudioCapabilities.getCapabilities(context),
new DefaultAudioProcessorChain(),
enableFloatOutput,
enableAudioTrackPlaybackParams,
enableOffload);
}
}

View file

@ -625,7 +625,7 @@ public interface ExoPlayer extends Player {
* <li>audio offload rendering is enabled in {@link
* DefaultRenderersFactory#setEnableAudioOffload} or the equivalent option passed to {@link
* com.google.android.exoplayer2.audio.DefaultAudioSink#DefaultAudioSink(AudioCapabilities,
* DefaultAudioSink.AudioProcessorChain, boolean, boolean)}.
* DefaultAudioSink.AudioProcessorChain, boolean, boolean, boolean)}.
* <li>an audio track is playing in a format which the device supports offloading (for example
* MP3 or AAC).
* <li>The {@link com.google.android.exoplayer2.audio.AudioSink} is playing with an offload

View file

@ -144,6 +144,7 @@ import java.lang.reflect.Method;
private int outputSampleRate;
private boolean needsPassthroughWorkarounds;
private long bufferSizeUs;
private float audioTrackPlaybackSpeed;
private long smoothedPlayheadOffsetUs;
private long lastPlayheadSampleTimeUs;
@ -223,6 +224,16 @@ import java.lang.reflect.Method;
forceResetWorkaroundTimeMs = C.TIME_UNSET;
lastLatencySampleTimeUs = 0;
latencyUs = 0;
audioTrackPlaybackSpeed = 1f;
}
public void setAudioTrackPlaybackSpeed(float audioTrackPlaybackSpeed) {
this.audioTrackPlaybackSpeed = audioTrackPlaybackSpeed;
// Extrapolation from the last audio timestamp relies on the audio rate being constant, so we
// reset audio timestamp tracking and wait for a new timestamp.
if (audioTimestampPoller != null) {
audioTimestampPoller.reset();
}
}
public long getCurrentPositionUs(boolean sourceEnded) {
@ -241,6 +252,8 @@ import java.lang.reflect.Method;
long timestampPositionFrames = audioTimestampPoller.getTimestampPositionFrames();
long timestampPositionUs = framesToDurationUs(timestampPositionFrames);
long elapsedSinceTimestampUs = systemTimeUs - audioTimestampPoller.getTimestampSystemTimeUs();
elapsedSinceTimestampUs =
Util.getMediaDurationForPlayoutDuration(elapsedSinceTimestampUs, audioTrackPlaybackSpeed);
positionUs = timestampPositionUs + elapsedSinceTimestampUs;
} else {
if (playheadOffsetCount == 0) {

View file

@ -22,6 +22,7 @@ import android.annotation.SuppressLint;
import android.media.AudioFormat;
import android.media.AudioManager;
import android.media.AudioTrack;
import android.media.PlaybackParams;
import android.os.ConditionVariable;
import android.os.Handler;
import android.os.SystemClock;
@ -273,6 +274,7 @@ public final class DefaultAudioSink implements AudioSink {
private final ConditionVariable releasingConditionVariable;
private final AudioTrackPositionTracker audioTrackPositionTracker;
private final ArrayDeque<MediaPositionParameters> mediaPositionParametersCheckpoints;
private final boolean enableAudioTrackPlaybackParams;
private final boolean enableOffload;
@MonotonicNonNull private StreamEventCallbackV29 offloadStreamEventCallbackV29;
@ -287,6 +289,7 @@ public final class DefaultAudioSink implements AudioSink {
private AudioAttributes audioAttributes;
@Nullable private MediaPositionParameters afterDrainParameters;
private MediaPositionParameters mediaPositionParameters;
private float audioTrackPlaybackSpeed;
@Nullable private ByteBuffer avSyncHeader;
private int bytesUntilNextAvSync;
@ -359,6 +362,7 @@ public final class DefaultAudioSink implements AudioSink {
audioCapabilities,
new DefaultAudioProcessorChain(audioProcessors),
enableFloatOutput,
/* enableAudioTrackPlaybackParams= */ false,
/* enableOffload= */ false);
}
@ -375,6 +379,8 @@ public final class DefaultAudioSink implements AudioSink {
* (24-bit or 32-bit) integer PCM. Float output is supported from API level 21. Audio
* processing (for example, speed adjustment) will not be available when float output is in
* use.
* @param enableAudioTrackPlaybackParams Whether to enable setting playback speed using {@link
* android.media.AudioTrack#setPlaybackParams(PlaybackParams)}, if supported.
* @param enableOffload Whether to enable audio offload. If an audio format can be both played
* with offload and encoded audio passthrough, it will be played in offload. Audio offload is
* supported from API level 29. Most Android devices can only support one offload {@link
@ -386,10 +392,12 @@ public final class DefaultAudioSink implements AudioSink {
@Nullable AudioCapabilities audioCapabilities,
AudioProcessorChain audioProcessorChain,
boolean enableFloatOutput,
boolean enableAudioTrackPlaybackParams,
boolean enableOffload) {
this.audioCapabilities = audioCapabilities;
this.audioProcessorChain = Assertions.checkNotNull(audioProcessorChain);
this.enableFloatOutput = Util.SDK_INT >= 21 && enableFloatOutput;
this.enableAudioTrackPlaybackParams = Util.SDK_INT >= 23 && enableAudioTrackPlaybackParams;
this.enableOffload = Util.SDK_INT >= 29 && enableOffload;
releasingConditionVariable = new ConditionVariable(true);
audioTrackPositionTracker = new AudioTrackPositionTracker(new PositionTrackerListener());
@ -414,6 +422,7 @@ public final class DefaultAudioSink implements AudioSink {
DEFAULT_SKIP_SILENCE,
/* mediaTimeUs= */ 0,
/* audioTrackPositionUs= */ 0);
audioTrackPlaybackSpeed = 1f;
drainingAudioProcessorIndex = C.INDEX_UNSET;
activeAudioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0];
@ -641,7 +650,10 @@ public final class DefaultAudioSink implements AudioSink {
startMediaTimeUs = max(0, presentationTimeUs);
startMediaTimeUsNeedsSync = false;
applyPlaybackSpeedAndSkipSilence(presentationTimeUs);
if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) {
setAudioTrackPlaybackSpeedV23(audioTrackPlaybackSpeed);
}
applyAudioProcessorPlaybackSpeedAndSkipSilence(presentationTimeUs);
audioTrackPositionTracker.setAudioTrack(
audioTrack,
@ -701,7 +713,7 @@ public final class DefaultAudioSink implements AudioSink {
}
}
// Re-apply playback parameters.
applyPlaybackSpeedAndSkipSilence(presentationTimeUs);
applyAudioProcessorPlaybackSpeedAndSkipSilence(presentationTimeUs);
}
if (!isInitialized()) {
@ -740,7 +752,7 @@ public final class DefaultAudioSink implements AudioSink {
// Don't process any more input until draining completes.
return false;
}
applyPlaybackSpeedAndSkipSilence(presentationTimeUs);
applyAudioProcessorPlaybackSpeedAndSkipSilence(presentationTimeUs);
afterDrainParameters = null;
}
@ -771,7 +783,7 @@ public final class DefaultAudioSink implements AudioSink {
startMediaTimeUs += adjustmentUs;
startMediaTimeUsNeedsSync = false;
// Re-apply playback parameters because the startMediaTimeUs changed.
applyPlaybackSpeedAndSkipSilence(presentationTimeUs);
applyAudioProcessorPlaybackSpeedAndSkipSilence(presentationTimeUs);
if (listener != null && adjustmentUs != 0) {
listener.onPositionDiscontinuity();
}
@ -985,17 +997,24 @@ public final class DefaultAudioSink implements AudioSink {
@Override
public void setPlaybackSpeed(float playbackSpeed) {
setPlaybackSpeedAndSkipSilence(playbackSpeed, getSkipSilenceEnabled());
if (enableAudioTrackPlaybackParams && Util.SDK_INT >= 23) {
setAudioTrackPlaybackSpeedV23(playbackSpeed);
} else {
setAudioProcessorPlaybackSpeedAndSkipSilence(playbackSpeed, getSkipSilenceEnabled());
}
}
@Override
public float getPlaybackSpeed() {
return getMediaPositionParameters().playbackSpeed;
// We use either audio processor speed adjustment or AudioTrack playback parameters, so one of
// the operands is always 1f.
return getAudioProcessorPlaybackSpeed() * audioTrackPlaybackSpeed;
}
@Override
public void setSkipSilenceEnabled(boolean skipSilenceEnabled) {
setPlaybackSpeedAndSkipSilence(getPlaybackSpeed(), skipSilenceEnabled);
setAudioProcessorPlaybackSpeedAndSkipSilence(
getAudioProcessorPlaybackSpeed(), skipSilenceEnabled);
}
@Override
@ -1147,7 +1166,7 @@ public final class DefaultAudioSink implements AudioSink {
framesPerEncodedSample = 0;
mediaPositionParameters =
new MediaPositionParameters(
getPlaybackSpeed(),
getAudioProcessorPlaybackSpeed(),
getSkipSilenceEnabled(),
/* mediaTimeUs= */ 0,
/* audioTrackPositionUs= */ 0);
@ -1183,7 +1202,28 @@ public final class DefaultAudioSink implements AudioSink {
}.start();
}
private void setPlaybackSpeedAndSkipSilence(float playbackSpeed, boolean skipSilence) {
@RequiresApi(23)
private void setAudioTrackPlaybackSpeedV23(float audioTrackPlaybackSpeed) {
if (isInitialized()) {
PlaybackParams playbackParams =
new PlaybackParams()
.allowDefaults()
.setSpeed(audioTrackPlaybackSpeed)
.setAudioFallbackMode(PlaybackParams.AUDIO_FALLBACK_MODE_FAIL);
try {
audioTrack.setPlaybackParams(playbackParams);
} catch (IllegalArgumentException e) {
Log.w(TAG, "Failed to set playback params", e);
}
// Update the speed using the actual effective speed from the audio track.
audioTrackPlaybackSpeed = audioTrack.getPlaybackParams().getSpeed();
audioTrackPositionTracker.setAudioTrackPlaybackSpeed(audioTrackPlaybackSpeed);
}
this.audioTrackPlaybackSpeed = audioTrackPlaybackSpeed;
}
private void setAudioProcessorPlaybackSpeedAndSkipSilence(
float playbackSpeed, boolean skipSilence) {
MediaPositionParameters currentMediaPositionParameters = getMediaPositionParameters();
if (playbackSpeed != currentMediaPositionParameters.playbackSpeed
|| skipSilence != currentMediaPositionParameters.skipSilence) {
@ -1205,6 +1245,10 @@ public final class DefaultAudioSink implements AudioSink {
}
}
private float getAudioProcessorPlaybackSpeed() {
return getMediaPositionParameters().playbackSpeed;
}
private MediaPositionParameters getMediaPositionParameters() {
// Mask the already set parameters.
return afterDrainParameters != null
@ -1214,10 +1258,10 @@ public final class DefaultAudioSink implements AudioSink {
: mediaPositionParameters;
}
private void applyPlaybackSpeedAndSkipSilence(long presentationTimeUs) {
private void applyAudioProcessorPlaybackSpeedAndSkipSilence(long presentationTimeUs) {
float playbackSpeed =
configuration.canApplyPlaybackParameters
? audioProcessorChain.applyPlaybackSpeed(getPlaybackSpeed())
? audioProcessorChain.applyPlaybackSpeed(getAudioProcessorPlaybackSpeed())
: DEFAULT_PLAYBACK_SPEED;
boolean skipSilenceEnabled =
configuration.canApplyPlaybackParameters

View file

@ -62,6 +62,7 @@ public final class DefaultAudioSinkTest {
AudioCapabilities.DEFAULT_AUDIO_CAPABILITIES,
new DefaultAudioSink.DefaultAudioProcessorChain(teeAudioProcessor),
/* enableFloatOutput= */ false,
/* enableAudioTrackPlaybackParams= */ false,
/* enableOffload= */ false);
}