diff --git a/RELEASENOTES.md b/RELEASENOTES.md index b65ea50c08..70da743164 100644 --- a/RELEASENOTES.md +++ b/RELEASENOTES.md @@ -37,6 +37,12 @@ * Replace DownloadState.action with DownloadAction fields. * DRM: Fix black flicker when keys rotate in DRM protected content ([#3561](https://github.com/google/ExoPlayer/issues/3561)). +* Audio: + * Allow `AudioProcessor`s to be drained of pending output after they are + reconfigured. + * Fix an issue that caused audio to be truncated at the end of a period + when switching to a new period where gapless playback information was newly + present or newly absent. * Add support for SHOUTcast ICY metadata ([#3735](https://github.com/google/ExoPlayer/issues/3735)). * CEA-608: Improved conformance to the specification diff --git a/extensions/gvr/src/main/java/com/google/android/exoplayer2/ext/gvr/GvrAudioProcessor.java b/extensions/gvr/src/main/java/com/google/android/exoplayer2/ext/gvr/GvrAudioProcessor.java index b3429ef06d..d0aa922d38 100644 --- a/extensions/gvr/src/main/java/com/google/android/exoplayer2/ext/gvr/GvrAudioProcessor.java +++ b/extensions/gvr/src/main/java/com/google/android/exoplayer2/ext/gvr/GvrAudioProcessor.java @@ -38,9 +38,11 @@ public final class GvrAudioProcessor implements AudioProcessor { private static final int FRAMES_PER_OUTPUT_BUFFER = 1024; private static final int OUTPUT_CHANNEL_COUNT = 2; private static final int OUTPUT_FRAME_SIZE = OUTPUT_CHANNEL_COUNT * 2; // 16-bit stereo output. + private static final int NO_SURROUND_FORMAT = GvrAudioSurround.SurroundFormat.INVALID; private int sampleRateHz; private int channelCount; + private int pendingGvrAudioSurroundFormat; @Nullable private GvrAudioSurround gvrAudioSurround; private ByteBuffer buffer; private boolean inputEnded; @@ -57,6 +59,7 @@ public final class GvrAudioProcessor implements AudioProcessor { sampleRateHz = Format.NO_VALUE; channelCount = Format.NO_VALUE; buffer = EMPTY_BUFFER; + pendingGvrAudioSurroundFormat = NO_SURROUND_FORMAT; } /** @@ -92,33 +95,28 @@ public final class GvrAudioProcessor implements AudioProcessor { } this.sampleRateHz = sampleRateHz; this.channelCount = channelCount; - maybeReleaseGvrAudioSurround(); - int surroundFormat; switch (channelCount) { case 1: - surroundFormat = GvrAudioSurround.SurroundFormat.SURROUND_MONO; + pendingGvrAudioSurroundFormat = GvrAudioSurround.SurroundFormat.SURROUND_MONO; break; case 2: - surroundFormat = GvrAudioSurround.SurroundFormat.SURROUND_STEREO; + pendingGvrAudioSurroundFormat = GvrAudioSurround.SurroundFormat.SURROUND_STEREO; break; case 4: - surroundFormat = GvrAudioSurround.SurroundFormat.FIRST_ORDER_AMBISONICS; + pendingGvrAudioSurroundFormat = GvrAudioSurround.SurroundFormat.FIRST_ORDER_AMBISONICS; break; case 6: - surroundFormat = GvrAudioSurround.SurroundFormat.SURROUND_FIVE_DOT_ONE; + pendingGvrAudioSurroundFormat = GvrAudioSurround.SurroundFormat.SURROUND_FIVE_DOT_ONE; break; case 9: - surroundFormat = GvrAudioSurround.SurroundFormat.SECOND_ORDER_AMBISONICS; + pendingGvrAudioSurroundFormat = GvrAudioSurround.SurroundFormat.SECOND_ORDER_AMBISONICS; break; case 16: - surroundFormat = GvrAudioSurround.SurroundFormat.THIRD_ORDER_AMBISONICS; + pendingGvrAudioSurroundFormat = GvrAudioSurround.SurroundFormat.THIRD_ORDER_AMBISONICS; break; default: throw new UnhandledFormatException(sampleRateHz, channelCount, encoding); } - gvrAudioSurround = new GvrAudioSurround(surroundFormat, sampleRateHz, channelCount, - FRAMES_PER_OUTPUT_BUFFER); - gvrAudioSurround.updateNativeOrientation(w, x, y, z); if (buffer == EMPTY_BUFFER) { buffer = ByteBuffer.allocateDirect(FRAMES_PER_OUTPUT_BUFFER * OUTPUT_FRAME_SIZE) .order(ByteOrder.nativeOrder()); @@ -128,7 +126,7 @@ public final class GvrAudioProcessor implements AudioProcessor { @Override public boolean isActive() { - return gvrAudioSurround != null; + return pendingGvrAudioSurroundFormat != NO_SURROUND_FORMAT || gvrAudioSurround != null; } @Override @@ -156,14 +154,17 @@ public final class GvrAudioProcessor implements AudioProcessor { @Override public void queueEndOfStream() { - Assertions.checkNotNull(gvrAudioSurround); + if (gvrAudioSurround != null) { + gvrAudioSurround.triggerProcessing(); + } inputEnded = true; - gvrAudioSurround.triggerProcessing(); } @Override public ByteBuffer getOutput() { - Assertions.checkNotNull(gvrAudioSurround); + if (gvrAudioSurround == null) { + return EMPTY_BUFFER; + } int writtenBytes = gvrAudioSurround.getOutput(buffer, 0, buffer.capacity()); buffer.position(0).limit(writtenBytes); return buffer; @@ -171,13 +172,20 @@ public final class GvrAudioProcessor implements AudioProcessor { @Override public boolean isEnded() { - Assertions.checkNotNull(gvrAudioSurround); - return inputEnded && gvrAudioSurround.getAvailableOutputSize() == 0; + return inputEnded + && (gvrAudioSurround == null || gvrAudioSurround.getAvailableOutputSize() == 0); } @Override public void flush() { - if (gvrAudioSurround != null) { + if (pendingGvrAudioSurroundFormat != NO_SURROUND_FORMAT) { + maybeReleaseGvrAudioSurround(); + gvrAudioSurround = + new GvrAudioSurround( + pendingGvrAudioSurroundFormat, sampleRateHz, channelCount, FRAMES_PER_OUTPUT_BUFFER); + gvrAudioSurround.updateNativeOrientation(w, x, y, z); + pendingGvrAudioSurroundFormat = NO_SURROUND_FORMAT; + } else if (gvrAudioSurround != null) { gvrAudioSurround.flush(); } inputEnded = false; @@ -191,6 +199,7 @@ public final class GvrAudioProcessor implements AudioProcessor { sampleRateHz = Format.NO_VALUE; channelCount = Format.NO_VALUE; buffer = EMPTY_BUFFER; + pendingGvrAudioSurroundFormat = NO_SURROUND_FORMAT; } private void maybeReleaseGvrAudioSurround() { diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java index 4cf8ef2a8b..1bf141cb43 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/AudioProcessor.java @@ -24,11 +24,10 @@ import java.nio.ByteOrder; * modifying its channel count, encoding and/or sample rate. * *
Call {@link #configure(int, int, int)} to configure the processor to receive input audio, then - * call {@link #isActive()} to determine whether the processor is active. {@link - * #queueInput(ByteBuffer)}, {@link #queueEndOfStream()}, {@link #getOutput()}, {@link #isEnded()}, - * {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} and {@link - * #getOutputSampleRateHz()} may only be called if the processor is active. Call {@link #reset()} to - * reset the processor to its unconfigured state and release any resources. + * call {@link #isActive()} to determine whether the processor is active in the new configuration. + * {@link #queueInput(ByteBuffer)}, {@link #getOutputChannelCount()}, {@link #getOutputEncoding()} + * and {@link #getOutputSampleRateHz()} may only be called if the processor is active. Call {@link + * #reset()} to reset the processor to its unconfigured state and release any resources. * *
In addition to being able to modify the format of audio, implementations may allow parameters * to be set that affect the output audio and whether the processor is active/inactive. @@ -50,15 +49,21 @@ public interface AudioProcessor { ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder()); /** - * Configures the processor to process input audio with the specified format and returns whether - * to {@link #flush()} it. After calling this method, if the processor is active, {@link - * #getOutputSampleRateHz()}, {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} - * return its output format. + * Configures the processor to process input audio with the specified format. After calling this + * method, call {@link #isActive()} to determine whether the audio processor is active. + * + *
If the audio processor is active after configuration, call {@link #getOutputSampleRateHz()}, + * {@link #getOutputChannelCount()} and {@link #getOutputEncoding()} to get its new output format. + * + *
If this method returns {@code true}, it is necessary to {@link #flush()} the processor + * before queueing more data, but you can (optionally) first drain output in the previous + * configuration by calling {@link #queueEndOfStream()} and {@link #getOutput()}. If this method + * returns {@code false}, it is safe to queue new input immediately. * * @param sampleRateHz The sample rate of input audio in Hz. * @param channelCount The number of interleaved channels in input audio. * @param encoding The encoding of input audio. - * @return Whether to {@link #flush()} the processor. + * @return Whether the processor must be {@link #flush() flushed} before queueing more input. * @throws UnhandledFormatException Thrown if the specified format can't be handled as input. */ boolean configure(int sampleRateHz, int channelCount, @C.PcmEncoding int encoding) @@ -69,23 +74,20 @@ public interface AudioProcessor { /** * Returns the number of audio channels in the data output by the processor. The value may change - * as a result of calling {@link #configure(int, int, int)} and is undefined if the instance is - * not active. + * as a result of calling {@link #configure(int, int, int)}. */ int getOutputChannelCount(); /** * Returns the audio encoding used in the data output by the processor. The value may change as a - * result of calling {@link #configure(int, int, int)} and is undefined if the instance is not - * active. + * result of calling {@link #configure(int, int, int)}. */ @C.PcmEncoding int getOutputEncoding(); /** * Returns the sample rate of audio output by the processor, in hertz. The value may change as a - * result of calling {@link #configure(int, int, int)} and is undefined if the instance is not - * active. + * result of calling {@link #configure(int, int, int)}. */ int getOutputSampleRateHz(); @@ -124,7 +126,10 @@ public interface AudioProcessor { */ boolean isEnded(); - /** Clears any state in preparation for receiving a new stream of input buffers. */ + /** + * Clears any buffered data and pending output. If the audio processor is active, also prepares + * the audio processor to receive a new stream of input in the last configured (pending) format. + */ void flush(); /** Resets the processor to its unconfigured state. */ diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java index baeea07683..e48d33294a 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/DefaultAudioSink.java @@ -242,6 +242,7 @@ public final class DefaultAudioSink implements AudioSink { /** Used to keep the audio session active on pre-V21 builds (see {@link #initialize()}). */ @Nullable private AudioTrack keepSessionIdAudioTrack; + @Nullable private Configuration pendingConfiguration; private Configuration configuration; private AudioTrack audioTrack; @@ -423,13 +424,13 @@ public final class DefaultAudioSink implements AudioSink { shouldConvertHighResIntPcmToFloat ? toFloatPcmAvailableAudioProcessors : toIntPcmAvailableAudioProcessors; - boolean flush = false; + boolean flushAudioProcessors = false; if (processingEnabled) { trimmingAudioProcessor.setTrimFrameCount(trimStartFrames, trimEndFrames); channelMappingAudioProcessor.setChannelMap(outputChannels); for (AudioProcessor audioProcessor : availableAudioProcessors) { try { - flush |= audioProcessor.configure(sampleRate, channelCount, encoding); + flushAudioProcessors |= audioProcessor.configure(sampleRate, channelCount, encoding); } catch (AudioProcessor.UnhandledFormatException e) { throw new ConfigurationException(e); } @@ -464,8 +465,14 @@ public final class DefaultAudioSink implements AudioSink { processingEnabled, canApplyPlaybackParameters, availableAudioProcessors); - if (flush || configuration == null || !pendingConfiguration.canReuseAudioTrack(configuration)) { + if (configuration == null || !pendingConfiguration.canReuseAudioTrack(configuration)) { + // We need a new AudioTrack before we can handle more input. We should first stop() the track + // (if we have one) and wait for audio to play out. Tracked by [Internal: b/33161961]. flush(); + } else if (flushAudioProcessors) { + // We don't need a new AudioTrack but audio processors need to be flushed. + this.pendingConfiguration = pendingConfiguration; + return; } configuration = pendingConfiguration; } @@ -567,6 +574,21 @@ public final class DefaultAudioSink implements AudioSink { public boolean handleBuffer(ByteBuffer buffer, long presentationTimeUs) throws InitializationException, WriteException { Assertions.checkArgument(inputBuffer == null || buffer == inputBuffer); + + if (pendingConfiguration != null) { + // We are waiting for audio processors to drain before applying a the new configuration. + if (!drainAudioProcessorsToEndOfStream()) { + return false; + } + configuration = pendingConfiguration; + pendingConfiguration = null; + playbackParameters = + configuration.canApplyPlaybackParameters + ? audioProcessorChain.applyPlaybackParameters(playbackParameters) + : PlaybackParameters.DEFAULT; + setupAudioProcessors(); + } + if (!isInitialized()) { initialize(); if (playing) { @@ -948,9 +970,9 @@ public final class DefaultAudioSink implements AudioSink { playbackParametersOffsetUs = 0; playbackParametersPositionUs = 0; trimmingAudioProcessor.resetTrimmedFrameCount(); + flushAudioProcessors(); inputBuffer = null; outputBuffer = null; - flushAudioProcessors(); handledEndOfStream = false; drainingAudioProcessorIndex = C.INDEX_UNSET; avSyncHeader = null; @@ -962,6 +984,10 @@ public final class DefaultAudioSink implements AudioSink { // AudioTrack.release can take some time, so we call it on a background thread. final AudioTrack toRelease = audioTrack; audioTrack = null; + if (pendingConfiguration != null) { + configuration = pendingConfiguration; + pendingConfiguration = null; + } audioTrackPositionTracker.reset(); releasingConditionVariable.close(); new Thread() { diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java index 28fd38cccb..9bf9917a9d 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/SonicAudioProcessor.java @@ -69,6 +69,7 @@ public final class SonicAudioProcessor implements AudioProcessor { private int outputSampleRateHz; private int pendingOutputSampleRateHz; + private boolean pendingSonicRecreation; @Nullable private Sonic sonic; private ByteBuffer buffer; private ShortBuffer shortBuffer; @@ -103,7 +104,7 @@ public final class SonicAudioProcessor implements AudioProcessor { speed = Util.constrainValue(speed, MINIMUM_SPEED, MAXIMUM_SPEED); if (this.speed != speed) { this.speed = speed; - sonic = null; + pendingSonicRecreation = true; } flush(); return speed; @@ -120,7 +121,7 @@ public final class SonicAudioProcessor implements AudioProcessor { pitch = Util.constrainValue(pitch, MINIMUM_PITCH, MAXIMUM_PITCH); if (this.pitch != pitch) { this.pitch = pitch; - sonic = null; + pendingSonicRecreation = true; } flush(); return pitch; @@ -172,7 +173,7 @@ public final class SonicAudioProcessor implements AudioProcessor { this.sampleRateHz = sampleRateHz; this.channelCount = channelCount; this.outputSampleRateHz = outputSampleRateHz; - sonic = null; + pendingSonicRecreation = true; return true; } @@ -227,7 +228,9 @@ public final class SonicAudioProcessor implements AudioProcessor { @Override public void queueEndOfStream() { - Assertions.checkNotNull(sonic).queueEndOfStream(); + if (sonic != null) { + sonic.queueEndOfStream(); + } inputEnded = true; } @@ -246,9 +249,9 @@ public final class SonicAudioProcessor implements AudioProcessor { @Override public void flush() { if (isActive()) { - if (sonic == null) { + if (pendingSonicRecreation) { sonic = new Sonic(sampleRateHz, channelCount, speed, pitch, outputSampleRateHz); - } else { + } else if (sonic != null) { sonic.flush(); } } @@ -269,6 +272,7 @@ public final class SonicAudioProcessor implements AudioProcessor { shortBuffer = buffer.asShortBuffer(); outputBuffer = EMPTY_BUFFER; pendingOutputSampleRateHz = SAMPLE_RATE_NO_CHANGE; + pendingSonicRecreation = false; sonic = null; inputBytes = 0; outputBytes = 0; diff --git a/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java b/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java index 09325ca14b..c9e9f921c7 100644 --- a/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java +++ b/library/core/src/main/java/com/google/android/exoplayer2/audio/TrimmingAudioProcessor.java @@ -147,10 +147,10 @@ import java.nio.ByteBuffer; public ByteBuffer getOutput() { if (super.isEnded() && endBufferSize > 0) { // Because audio processors may be drained in the middle of the stream we assume that the - // contents of the end buffer need to be output. Gapless transitions don't involve a call to - // queueEndOfStream so won't be affected. When audio is actually ending we play the padding - // data which is incorrect. This behavior can be fixed once we have the timestamps associated - // with input buffers. + // contents of the end buffer need to be output. For gapless transitions, configure will be + // always be called, which clears the end buffer as needed. When audio is actually ending we + // play the padding data which is incorrect. This behavior can be fixed once we have the + // timestamps associated with input buffers. replaceOutputBuffer(endBufferSize).put(endBuffer, 0, endBufferSize).flip(); endBufferSize = 0; }