moved floatresample into defaultaudiosink and added new constructor in defaultaudiosync to use that resample when audio input is 24/32bit pcm and the new flag is enabled

This commit is contained in:
Drew Hill 2018-01-06 00:26:18 -05:00
parent ca0c090c1a
commit 821ea0e58b
3 changed files with 60 additions and 102 deletions

View file

@ -164,10 +164,12 @@ public final class DefaultAudioSink implements AudioSink {
public static boolean failOnSpuriousAudioTimestamp = false;
@Nullable private final AudioCapabilities audioCapabilities;
private final boolean canConvertHiResPcmToFloat;
private final ChannelMappingAudioProcessor channelMappingAudioProcessor;
private final TrimmingAudioProcessor trimmingAudioProcessor;
private final SonicAudioProcessor sonicAudioProcessor;
private final AudioProcessor[] availableAudioProcessors;
private final AudioProcessor[] hiResAvailableAudioProcessors;
private final ConditionVariable releasingConditionVariable;
private final long[] playheadOffsets;
private final AudioTrackUtil audioTrackUtil;
@ -180,6 +182,7 @@ public final class DefaultAudioSink implements AudioSink {
private AudioTrack keepSessionIdAudioTrack;
private AudioTrack audioTrack;
private boolean isInputPcm;
private boolean shouldUpResPCMAudio;
private int inputSampleRate;
private int sampleRate;
private int channelConfig;
@ -233,6 +236,8 @@ public final class DefaultAudioSink implements AudioSink {
private boolean hasData;
private long lastFeedElapsedRealtimeMs;
/**
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
@ -241,7 +246,23 @@ public final class DefaultAudioSink implements AudioSink {
*/
public DefaultAudioSink(@Nullable AudioCapabilities audioCapabilities,
AudioProcessor[] audioProcessors) {
this(audioCapabilities, audioProcessors, false);
}
/**
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty.
* @param canConvertHiResPcmToFloat Flag to convert > 16bit PCM Audio to 32bit Float PCM Audio to
* avoid dithering the input audio. If enabled other audio processors that expect 16bit PCM
* are disabled
*/
public DefaultAudioSink(@Nullable AudioCapabilities audioCapabilities,
AudioProcessor[] audioProcessors, boolean canConvertHiResPcmToFloat) {
this.audioCapabilities = audioCapabilities;
this.canConvertHiResPcmToFloat = canConvertHiResPcmToFloat;
releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 18) {
try {
@ -265,6 +286,8 @@ public final class DefaultAudioSink implements AudioSink {
availableAudioProcessors[2] = trimmingAudioProcessor;
System.arraycopy(audioProcessors, 0, availableAudioProcessors, 3, audioProcessors.length);
availableAudioProcessors[3 + audioProcessors.length] = sonicAudioProcessor;
hiResAvailableAudioProcessors = new AudioProcessor[1];
hiResAvailableAudioProcessors[0] = new FloatResamplingAudioProcessor();
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
volume = 1.0f;
startMediaTimeState = START_NOT_SET;
@ -342,15 +365,20 @@ public final class DefaultAudioSink implements AudioSink {
int channelCount = inputChannelCount;
int sampleRate = inputSampleRate;
isInputPcm = isEncodingPcm(inputEncoding);
shouldUpResPCMAudio = canConvertHiResPcmToFloat &&
(inputEncoding == C.ENCODING_PCM_24BIT || inputEncoding == C.ENCODING_PCM_32BIT);
if (isInputPcm) {
pcmFrameSize = Util.getPcmFrameSize(inputEncoding, channelCount);
pcmFrameSize = Util.getPcmFrameSize(shouldUpResPCMAudio
? C.ENCODING_PCM_FLOAT : inputEncoding, channelCount);
}
@C.Encoding int encoding = inputEncoding;
boolean processingEnabled = isInputPcm && inputEncoding != C.ENCODING_PCM_FLOAT;
if (processingEnabled) {
AudioProcessor[] activeAudioProcessors = shouldUpResPCMAudio ?
hiResAvailableAudioProcessors : availableAudioProcessors;
trimmingAudioProcessor.setTrimSampleCount(trimStartSamples, trimEndSamples);
channelMappingAudioProcessor.setChannelMap(outputChannels);
for (AudioProcessor audioProcessor : availableAudioProcessors) {
for (AudioProcessor audioProcessor : activeAudioProcessors) {
try {
flush |= audioProcessor.configure(sampleRate, channelCount, encoding);
} catch (AudioProcessor.UnhandledFormatException e) {
@ -460,7 +488,9 @@ public final class DefaultAudioSink implements AudioSink {
private void resetAudioProcessors() {
ArrayList<AudioProcessor> newAudioProcessors = new ArrayList<>();
for (AudioProcessor audioProcessor : availableAudioProcessors) {
AudioProcessor[] activeAudioProcessors = shouldUpResPCMAudio ?
hiResAvailableAudioProcessors : availableAudioProcessors;
for (AudioProcessor audioProcessor : activeAudioProcessors) {
if (audioProcessor.isActive()) {
newAudioProcessors.add(audioProcessor);
} else {
@ -967,6 +997,9 @@ public final class DefaultAudioSink implements AudioSink {
for (AudioProcessor audioProcessor : availableAudioProcessors) {
audioProcessor.reset();
}
for (AudioProcessor audioProcessor : hiResAvailableAudioProcessors) {
audioProcessor.reset();
}
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playing = false;
}

View file

@ -36,7 +36,7 @@ import java.nio.ByteOrder;
@Override
public boolean configure(int sampleRateHz, int channelCount, @C.Encoding int encoding)
throws AudioProcessor.UnhandledFormatException {
if (encoding != C.ENCODING_PCM_24BIT) {
if (encoding != C.ENCODING_PCM_24BIT && encoding != C.ENCODING_PCM_32BIT) {
throw new AudioProcessor.UnhandledFormatException(sampleRateHz, channelCount, encoding);
}
if (this.sampleRateHz == sampleRateHz && this.channelCount == channelCount
@ -51,7 +51,9 @@ import java.nio.ByteOrder;
}
@Override
public boolean isActive() { return sourceEncoding == C.ENCODING_PCM_24BIT; }
public boolean isActive() {
return sourceEncoding == C.ENCODING_PCM_24BIT || sourceEncoding == C.ENCODING_PCM_32BIT;
}
@Override
public int getOutputChannelCount() { return channelCount; }
@ -76,6 +78,8 @@ import java.nio.ByteOrder;
resampledSize = (size / 3) * 4;
break;
case C.ENCODING_PCM_32BIT:
resampledSize = size;
break;
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_FLOAT:
@ -103,6 +107,13 @@ import java.nio.ByteOrder;
}
break;
case C.ENCODING_PCM_32BIT:
// 32->32 bit conversion.
for (int i = offset; i < limit; i += 4) {
int val = inputBuffer.get(i) & 0x000000ff | (inputBuffer.get(i) << 8) & 0x0000ff00 |
(inputBuffer.get(i + 1) << 16) & 0x00ff0000 | (inputBuffer.get(i + 2) << 24) & 0xff000000;
writePcm32bitFloat(val, buffer);
}
break;
case C.ENCODING_PCM_8BIT:
case C.ENCODING_PCM_16BIT:
case C.ENCODING_PCM_FLOAT:

View file

@ -58,9 +58,6 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
private int encoderPadding;
private long currentPositionUs;
private boolean allowPositionDiscontinuity;
private final boolean dontDither24bitPCM;
private ByteBuffer resampledBuffer;
private FloatResamplingAudioProcessor floatResamplingAudioProcessor;
/**
* @param mediaCodecSelector A decoder selector.
@ -140,37 +137,7 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Nullable AudioRendererEventListener eventListener,
@Nullable AudioCapabilities audioCapabilities, AudioProcessor... audioProcessors) {
this(mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys,
eventHandler, eventListener, new DefaultAudioSink(audioCapabilities, audioProcessors),
false);
}
/**
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
* For example a media file may start with a short clear region so as to allow playback to
* begin in parallel with key acquisition. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param dontDither24bitPCM If the input is 24bit PCM audio convert to 32bit Float PCM
* @param audioProcessors Optional {@link AudioProcessor}s that will process PCM audio before
* output.
*/
public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener,
@Nullable AudioCapabilities audioCapabilities, boolean dontDither24bitPCM,
AudioProcessor... audioProcessors) {
this(mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys,
eventHandler, eventListener, new DefaultAudioSink(audioCapabilities, audioProcessors),
dontDither24bitPCM);
eventHandler, eventListener, new DefaultAudioSink(audioCapabilities, audioProcessors));
}
/**
@ -191,34 +158,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener, AudioSink audioSink) {
this(mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys, eventHandler,
eventListener, audioSink, false);
}
/**
* @param mediaCodecSelector A decoder selector.
* @param drmSessionManager For use with encrypted content. May be null if support for encrypted
* content is not required.
* @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
* For example a media file may start with a short clear region so as to allow playback to
* begin in parallel with key acquisition. This parameter specifies whether the renderer is
* permitted to play clear regions of encrypted media files before {@code drmSessionManager}
* has obtained the keys necessary to decrypt encrypted regions of the media.
* @param eventHandler A handler to use when delivering events to {@code eventListener}. May be
* null if delivery of events is not required.
* @param eventListener A listener of events. May be null if delivery of events is not required.
* @param audioSink The sink to which audio will be output.
* @param dontDither24bitPCM If the input is 24bit PCM audio convert to 32bit Float PCM
*/
public MediaCodecAudioRenderer(MediaCodecSelector mediaCodecSelector,
@Nullable DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
boolean playClearSamplesWithoutKeys, @Nullable Handler eventHandler,
@Nullable AudioRendererEventListener eventListener, AudioSink audioSink,
boolean dontDither24bitPCM) {
super(C.TRACK_TYPE_AUDIO, mediaCodecSelector, drmSessionManager, playClearSamplesWithoutKeys);
eventDispatcher = new EventDispatcher(eventHandler, eventListener);
this.audioSink = audioSink;
this.dontDither24bitPCM = dontDither24bitPCM;
audioSink.setListener(new AudioSinkListener());
}
@ -326,20 +268,10 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
protected void onInputFormatChanged(Format newFormat) throws ExoPlaybackException {
super.onInputFormatChanged(newFormat);
eventDispatcher.inputFormatChanged(newFormat);
// if the input is 24bit pcm audio and we explicitly said not to dither then convert it to float
if (dontDither24bitPCM && newFormat.pcmEncoding == C.ENCODING_PCM_24BIT) {
if (floatResamplingAudioProcessor == null)
floatResamplingAudioProcessor = new FloatResamplingAudioProcessor();
pcmEncoding = floatResamplingAudioProcessor.getOutputEncoding();
} else {
// If the input format is anything other than PCM then we assume that the audio decoder will
// output 16-bit PCM.
pcmEncoding = MimeTypes.AUDIO_RAW.equals(newFormat.sampleMimeType) ? newFormat.pcmEncoding
: C.ENCODING_PCM_16BIT;
floatResamplingAudioProcessor = null;
}
// If the input format is anything other than PCM then we assume that the audio decoder will
// output 16-bit PCM.
pcmEncoding = MimeTypes.AUDIO_RAW.equals(newFormat.sampleMimeType) ? newFormat.pcmEncoding
: C.ENCODING_PCM_16BIT;
channelCount = newFormat.channelCount;
encoderDelay = newFormat.encoderDelay != Format.NO_VALUE ? newFormat.encoderDelay : 0;
encoderPadding = newFormat.encoderPadding != Format.NO_VALUE ? newFormat.encoderPadding : 0;
@ -370,11 +302,9 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
}
try {
if (floatResamplingAudioProcessor != null)
floatResamplingAudioProcessor.configure(sampleRate, channelCount, C.ENCODING_PCM_24BIT);
audioSink.configure(encoding, channelCount, sampleRate, 0, channelMap, encoderDelay,
encoderPadding);
} catch (AudioSink.ConfigurationException | AudioProcessor.UnhandledFormatException e) {
} catch (AudioSink.ConfigurationException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
}
@ -490,35 +420,19 @@ public class MediaCodecAudioRenderer extends MediaCodecRenderer implements Media
codec.releaseOutputBuffer(bufferIndex, false);
return true;
}
if (shouldSkip) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.skippedOutputBufferCount++;
audioSink.handleDiscontinuity();
resampledBuffer = null;
return true;
}
try {
if (floatResamplingAudioProcessor != null) {
boolean draining = resampledBuffer != null;
if (!draining) {
floatResamplingAudioProcessor.queueInput(buffer);
resampledBuffer = floatResamplingAudioProcessor.getOutput();
}
if (audioSink.handleBuffer(resampledBuffer, bufferPresentationTimeUs))
resampledBuffer = null;
if (!draining) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.renderedOutputBufferCount++;
return true;
}
}
else {
if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.renderedOutputBufferCount++;
return true;
}
if (audioSink.handleBuffer(buffer, bufferPresentationTimeUs)) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.renderedOutputBufferCount++;
return true;
}
} catch (AudioSink.InitializationException | AudioSink.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());