diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/audio/AudioMixer.java b/libraries/transformer/src/main/java/androidx/media3/transformer/audio/AudioMixer.java new file mode 100644 index 0000000000..fffad68444 --- /dev/null +++ b/libraries/transformer/src/main/java/androidx/media3/transformer/audio/AudioMixer.java @@ -0,0 +1,163 @@ +/* + * Copyright 2022 The Android Open Source Project + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package androidx.media3.transformer.audio; + +import androidx.media3.common.audio.AudioProcessor.AudioFormat; +import androidx.media3.common.audio.AudioProcessor.UnhandledAudioFormatException; +import androidx.media3.common.util.UnstableApi; +import java.nio.ByteBuffer; + +/** + * An audio component which combines audio data from multiple sources into a single output. + * + *
The mixer supports an arbitrary number of concurrent sources and will ensure audio data from + * all sources are aligned and mixed before producing output. Any periods without sources will be + * filled with silence. The total duration of the mixed track is controlled with {@link + * #setEndTimeUs}, or is unbounded if left unset. + * + *
Updates: The mixer supports the following updates at any time without the need for a + * {@link #reset()}. + * + *
{@linkplain #configure Changes} to the output audio format, buffer size, or mixer start time + * require the mixer to first be {@linkplain #reset() reset}, discarding all buffered data. + * + *
Operation: The mixer must be {@linkplain #configure configured} before any methods are + * called. Once configured, sources can queue audio data via {@link #queueInput} and the mixer will + * consume input audio up to the configured buffer size and end time. Once all sources have produced + * data for a period then {@link getOutput()} will return the mixed result. The cycle repeats until + * the mixer {@link #isEnded()}. + */ +@UnstableApi +public interface AudioMixer { + /** Creates an unconfigured instance. */ + public static AudioMixer create() { + return new AudioMixerImpl(); + } + + /** + * Configures the mixer. + * + *
The mixer must be configured before use and can only be reconfigured after a call to {@link + * reset()}. + * + *
The mixing buffer size is set by {@code bufferSizeMs} and indicates how much audio can be + * queued before {@link getOutput()} is called. + * + * @param outputAudioFormat The audio format of buffers returned from {@link getOutput()}. + * @param bufferSizeMs The mixing buffer size in milliseconds. + * @param startTimeUs The start time of the mixer output in microseconds. + * @throws UnhandledAudioFormatException If the output audio format is not supported. + */ + void configure(AudioFormat outputAudioFormat, int bufferSizeMs, long startTimeUs) + throws UnhandledAudioFormatException; + + /** + * Sets the end time of the output audio. + * + *
The mixer will not accept input nor produce output past this point. + * + * @param endTimeUs The end time in microseconds. + * @throws IllegalArgumentException If {@code endTimeUs} is before the configured start time. + */ + void setEndTimeUs(long endTimeUs); + + /** Indicates whether the mixer supports mixing sources with the given audio format. */ + boolean supportsSourceAudioFormat(AudioFormat sourceFormat); + + /** + * Adds an audio source to mix starting at the given time. + * + *
If the mixer has already {@linkplain #getOutput() output} samples past the {@code + * startTimeUs}, audio from this source will be discarded up to the last output end timestamp. + * + *
If the source start time is earlier than the configured mixer start time then audio from + * this source will be discarded up to the mixer start time. + * + *
All audio sources start with a volume of 1.0 on all channels. + * + * @param sourceFormat Audio format of source buffers. + * @param startTimeUs Source start time in microseconds. + * @return Non-negative integer identifying the source ({@code sourceId}). + * @throws UnhandledAudioFormatException If the source format is not supported. + */ + int addSource(AudioFormat sourceFormat, long startTimeUs) throws UnhandledAudioFormatException; + + /** + * Sets the volume applied to future samples queued from the given source. + * + * @param sourceId Source identifier from {@link #addSource}. + * @param volume Non-negative scalar applied to all source channels. + */ + void setSourceVolume(int sourceId, float volume); + + /** + * Removes an audio source. + * + *
No more audio can be queued from this source. All audio queued before removal will be + * output. + * + * @param sourceId Source identifier from {@link #addSource}. + */ + void removeSource(int sourceId); + + /** + * Queues audio data between the position and limit of the {@code sourceBuffer}. + * + *
After calling this method output may be available via {@link #getOutput()} if all sources + * have queued data. + * + * @param sourceId Source identifier from {@link #addSource}. + * @param sourceBuffer The source buffer to mix. It must be a direct byte buffer with native byte + * order. Its contents are treated as read-only. Its position will be advanced by the number + * of bytes consumed (which may be zero). The caller retains ownership of the provided buffer. + */ + void queueInput(int sourceId, ByteBuffer sourceBuffer); + + /** + * Returns a buffer containing output audio data between its position and limit. + * + *
The buffer will be no larger than the configured buffer size and will include no more than + * the frames that have been queued from all sources, up to the {@linkplain #setEndTimeUs end + * time}. Silence will be generated for any periods with no sources. + * + *
The buffer will always be a direct byte buffer with native byte order. Calling this method + * invalidates any previously returned buffer. The buffer will be empty if no output is available. + * + * @return A buffer containing output data between its position and limit. + */ + ByteBuffer getOutput(); + + /** + * Returns whether the mixer can accept more {@linkplain #queueInput input} or produce more + * {@linkplain #getOutput() output}, based on the {@link #setEndTimeUs end time}. + * + *
Note: If no end time is set this will always return {@code false}.
+ */
+ boolean isEnded();
+
+ /** Resets the mixer to its unconfigured state, releasing any resources. */
+ void reset();
+}
diff --git a/libraries/transformer/src/main/java/androidx/media3/transformer/audio/AudioMixerImpl.java b/libraries/transformer/src/main/java/androidx/media3/transformer/audio/AudioMixerImpl.java
new file mode 100644
index 0000000000..57dc923981
--- /dev/null
+++ b/libraries/transformer/src/main/java/androidx/media3/transformer/audio/AudioMixerImpl.java
@@ -0,0 +1,342 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media3.transformer.audio;
+
+import static androidx.media3.common.util.Assertions.checkArgument;
+import static androidx.media3.common.util.Assertions.checkNotNull;
+import static androidx.media3.common.util.Assertions.checkState;
+import static androidx.media3.common.util.Assertions.checkStateNotNull;
+import static java.lang.Math.min;
+
+import android.util.SparseArray;
+import androidx.annotation.Nullable;
+import androidx.media3.common.C;
+import androidx.media3.common.audio.AudioProcessor.AudioFormat;
+import androidx.media3.common.audio.AudioProcessor.UnhandledAudioFormatException;
+import androidx.media3.common.util.Util;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/** An {@link AudioMixer} that incrementally mixes source audio into a fixed size mixing buffer. */
+/* package */ final class AudioMixerImpl implements AudioMixer {
+
+ private static final ByteBuffer EMPTY_BUFFER =
+ ByteBuffer.allocateDirect(0).order(ByteOrder.nativeOrder());
+
+ private final SparseArray Note: The position can be negative if the source start time is less than the mixer start
+ * time.
+ */
+ public long position;
+
+ private final AudioFormat audioFormat;
+ private final ChannelMixingMatrix baseChannelMixingMatrix;
+ private ChannelMixingMatrix channelMixingMatrix;
+
+ public SourceInfo(
+ AudioFormat audioFormat,
+ ChannelMixingMatrix baseChannelMixingMatrix,
+ long startFrameOffset) {
+ this.audioFormat = audioFormat;
+ this.baseChannelMixingMatrix = baseChannelMixingMatrix;
+ position = startFrameOffset;
+ channelMixingMatrix = baseChannelMixingMatrix; // Volume = 1f.
+ }
+
+ public ChannelMixingMatrix getChannelMixingMatrix() {
+ return channelMixingMatrix;
+ }
+
+ public void setVolume(float volume) {
+ channelMixingMatrix = baseChannelMixingMatrix.scaleBy(volume);
+ }
+
+ /** Returns the position of the next audio frame after {@code sourceBuffer}. */
+ public long getPositionAfterBuffer(ByteBuffer sourceBuffer) {
+ int sourceBufferFrameCount = sourceBuffer.remaining() / audioFormat.bytesPerFrame;
+ return position + sourceBufferFrameCount;
+ }
+
+ /** Discards audio frames within {@code sourceBuffer} to the new source position. */
+ public void discardTo(ByteBuffer sourceBuffer, long newPosition) {
+ checkArgument(newPosition >= position);
+ int framesToDiscard = (int) (newPosition - position);
+ sourceBuffer.position(sourceBuffer.position() + framesToDiscard * audioFormat.bytesPerFrame);
+ position = newPosition;
+ }
+
+ /** Mixes audio frames from {@code sourceBuffer} to the new source position. */
+ public void mixTo(
+ ByteBuffer sourceBuffer,
+ long newPosition,
+ AudioMixingAlgorithm mixingAlgorithm,
+ ByteBuffer mixingBuffer) {
+ checkArgument(newPosition >= position);
+ int framesToMix = (int) (newPosition - position);
+ mixingAlgorithm.mix(
+ sourceBuffer, audioFormat, channelMixingMatrix, framesToMix, mixingBuffer);
+ position = newPosition;
+ }
+ }
+}
diff --git a/libraries/transformer/src/test/java/androidx/media3/transformer/audio/AudioMixerImplTest.java b/libraries/transformer/src/test/java/androidx/media3/transformer/audio/AudioMixerImplTest.java
new file mode 100644
index 0000000000..2f9569d064
--- /dev/null
+++ b/libraries/transformer/src/test/java/androidx/media3/transformer/audio/AudioMixerImplTest.java
@@ -0,0 +1,342 @@
+/*
+ * Copyright 2022 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package androidx.media3.transformer.audio;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import androidx.media3.common.C;
+import androidx.media3.common.audio.AudioProcessor.AudioFormat;
+import androidx.test.ext.junit.runners.AndroidJUnit4;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.nio.FloatBuffer;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+
+/** Unit tests for {@link AudioMixerImpl}. */
+@RunWith(AndroidJUnit4.class)
+public final class AudioMixerImplTest {
+
+ private static final int SAMPLE_RATE = 1000; // 1 ms = 1 frame.
+ private static final AudioFormat AUDIO_FORMAT_STEREO_PCM_FLOAT =
+ new AudioFormat(SAMPLE_RATE, /* channelCount= */ 2, C.ENCODING_PCM_FLOAT);
+ private static final AudioFormat AUDIO_FORMAT_STEREO_PCM_16BIT =
+ new AudioFormat(SAMPLE_RATE, /* channelCount= */ 2, C.ENCODING_PCM_16BIT);
+
+ private final AudioMixer mixer = new AudioMixerImpl();
+
+ @Test
+ public void output_withNoSource_isSilence() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[6]);
+ // Repeated calls produce more silence.
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[6]);
+ }
+
+ @Test
+ public void output_withOneSource_isInput() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer sourceBuffer = createByteBuffer(new float[] {0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f});
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(0);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .isEqualTo(new float[] {0.1f, 0.2f, 0.3f, 0.4f, 0.5f, 0.6f});
+ }
+
+ @Test
+ public void output_withTwoConcurrentSources_isMixed() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int firstSourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer firstSourceBuffer =
+ createByteBuffer(new float[] {0.0625f, 0.125f, 0.1875f, 0.25f, 0.3125f, 0.375f});
+ mixer.queueInput(firstSourceId, firstSourceBuffer);
+ assertThat(firstSourceBuffer.remaining()).isEqualTo(0);
+
+ int secondSourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer secondSourceBuffer =
+ createByteBuffer(new float[] {0.4375f, 0.375f, 0.3125f, 0.25f, 0.1875f, 0.125f});
+ mixer.queueInput(secondSourceId, secondSourceBuffer);
+ assertThat(secondSourceBuffer.remaining()).isEqualTo(0);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .isEqualTo(new float[] {0.5f, 0.5f, 0.5f, 0.5f, 0.5f, 0.5f});
+ }
+
+ @Test
+ public void output_withTwoConcurrentSources_isMixedToSmallerInput() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int firstSourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer firstSourceBuffer = createByteBuffer(new float[] {0.5f, -0.5f, 0.25f, -0.25f});
+ mixer.queueInput(firstSourceId, firstSourceBuffer);
+ assertThat(firstSourceBuffer.remaining()).isEqualTo(0);
+
+ int secondSourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer secondSourceBuffer = createByteBuffer(new float[] {-0.25f, 0.25f});
+ mixer.queueInput(secondSourceId, secondSourceBuffer);
+ assertThat(secondSourceBuffer.remaining()).isEqualTo(0);
+
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[] {0.25f, -0.25f});
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[0]);
+ }
+
+ @Test
+ public void input_afterPartialOutput_isConsumedToBufferSize() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int firstSourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+
+ int secondSourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+
+ mixer.queueInput(firstSourceId, createByteBuffer(new float[] {0.5f, -0.5f, 0.25f, -0.25f}));
+ mixer.queueInput(secondSourceId, createByteBuffer(new float[] {-0.25f, 0.25f}));
+ assertThat(mixer.getOutput().remaining()).isEqualTo(8 /* 2 floats = 1 frame */);
+
+ ByteBuffer firstSourceBuffer =
+ createByteBuffer(new float[] {0.125f, -0.125f, 0.0625f, -0.0625f, 0.75f, -0.75f});
+ mixer.queueInput(firstSourceId, firstSourceBuffer);
+ assertThat(firstSourceBuffer.remaining()).isEqualTo(8 /* 2 floats = 1 frame */);
+
+ ByteBuffer secondSourceBuffer =
+ createByteBuffer(new float[] {-0.375f, 0.375f, -0.5f, 0.5f, -0.625f, 0.625f});
+ mixer.queueInput(secondSourceId, secondSourceBuffer);
+ assertThat(secondSourceBuffer.remaining()).isEqualTo(0);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .isEqualTo(new float[] {-0.125f, 0.125f, -0.375f, 0.375f});
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[] {-0.5625f, 0.5625f});
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[0]);
+ }
+
+ @Test
+ public void output_withOneLaterSource_isSilenceThenInput() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 2_000);
+ ByteBuffer sourceBuffer = createByteBuffer(new float[] {0.1f, -0.1f, 0.2f, -0.2f, 0.3f, -0.3f});
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(16 /* 4 floats = 2 frames */);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .isEqualTo(new float[] {0f, 0f, 0f, 0f, 0.1f, -0.1f});
+ }
+
+ @Test
+ public void output_withOneEarlierSource_omitsEarlyInput() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 2_000);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer sourceBuffer = createByteBuffer(new float[] {0.1f, -0.1f, 0.2f, -0.2f, 0.3f, -0.3f});
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(0);
+
+ // First two frames are discarded.
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[] {0.3f, -0.3f});
+ }
+
+ @Test
+ public void output_withOneSourceTwoSmallInputs_isConcatenatedInput() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer firstSourceBuffer = createByteBuffer(new float[] {0.1f, -0.1f, 0.2f, -0.2f});
+ mixer.queueInput(sourceId, firstSourceBuffer);
+ assertThat(firstSourceBuffer.remaining()).isEqualTo(0);
+
+ ByteBuffer secondSourceBuffer = createByteBuffer(new float[] {0.3f, -0.3f});
+ mixer.queueInput(sourceId, secondSourceBuffer);
+ assertThat(secondSourceBuffer.remaining()).isEqualTo(0);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .isEqualTo(new float[] {0.1f, -0.1f, 0.2f, -0.2f, 0.3f, -0.3f});
+ }
+
+ @Test
+ public void output_withOneSourceTwoLargeInputs_isConcatenatedInput() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer sourceBuffer =
+ createByteBuffer(new float[] {0.1f, -0.1f, 0.2f, -0.2f, 0.3f, -0.3f, 0.4f, -0.4f});
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(8 /* 2 floats = 1 frame */);
+
+ assertThat(mixer.getOutput().remaining()).isEqualTo(24 /* 6 floats = 3 frames */);
+
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(0);
+
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[] {0.4f, -0.4f});
+ }
+
+ @Test
+ public void output_withOneSourceHavingOneSmallOneLargeInput_isConcatenatedInput()
+ throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ ByteBuffer firstSourceBuffer = createByteBuffer(new float[] {0.1f, -0.1f, 0.2f, -0.2f});
+ mixer.queueInput(sourceId, firstSourceBuffer);
+ assertThat(firstSourceBuffer.remaining()).isEqualTo(0);
+
+ ByteBuffer secondSourceBuffer =
+ createByteBuffer(new float[] {0.3f, -0.3f, 0.4f, -0.4f, 0.5f, 5f});
+ mixer.queueInput(sourceId, secondSourceBuffer);
+ assertThat(secondSourceBuffer.remaining()).isEqualTo(16 /* 4 floats = 2 frames */);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .isEqualTo(new float[] {0.1f, -0.1f, 0.2f, -0.2f, 0.3f, -0.3f});
+ }
+
+ @Test
+ public void output_withOneSourceHalfVolume_isInputHalfAmplitude() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ mixer.setSourceVolume(sourceId, 0.5f);
+ ByteBuffer sourceBuffer = createByteBuffer(new float[] {0.25f, 0.5f, 0.25f, 0.5f, 0.25f, 0.5f});
+ mixer.queueInput(sourceId, sourceBuffer);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .isEqualTo(new float[] {0.125f, 0.25f, 0.125f, 0.25f, 0.125f, 0.25f});
+ }
+
+ @Test
+ public void output_withOneEndedSource_isInputThenSilence() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 0);
+ mixer.queueInput(sourceId, createByteBuffer(new float[] {0.1f, -0.1f}));
+ mixer.removeSource(sourceId);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .isEqualTo(new float[] {0.1f, -0.1f, 0f, 0f, 0f, 0f});
+ }
+
+ @Test
+ public void output_withOneSourceAndEndTime_isInputUntilEndTime() throws Exception {
+ mixer.configure(
+ AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 10_000);
+ mixer.setEndTimeUs(11_000);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 10_000);
+ ByteBuffer sourceBuffer = createByteBuffer(new float[] {0.1f, -0.1f, 0.2f, -0.2f, 0.3f, -0.3f});
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(16 /* 4 floats = 2 frames */);
+
+ assertThat(mixer.isEnded()).isFalse();
+
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[] {0.1f, -0.1f});
+ assertThat(mixer.isEnded()).isTrue();
+ }
+
+ @Test
+ public void input_whileIsEnded_isNotConsumed() throws Exception {
+ mixer.configure(
+ AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 10_000);
+ mixer.setEndTimeUs(11_000);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 10_000);
+ ByteBuffer sourceBuffer = createByteBuffer(new float[] {0.1f, -0.1f, 0.2f, -0.2f, 0.3f, -0.3f});
+
+ mixer.queueInput(sourceId, sourceBuffer);
+ mixer.getOutput();
+ assertThat(mixer.isEnded()).isTrue();
+
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(16 /* 4 floats = 2 frames */);
+ }
+
+ @Test
+ public void setEndTime_afterIsEnded_changesIsEnded() throws Exception {
+ mixer.configure(
+ AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 10_000);
+ mixer.setEndTimeUs(11_000);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ 10_000);
+ ByteBuffer sourceBuffer = createByteBuffer(new float[] {0.1f, -0.1f, 0.2f, -0.2f, 0.3f, -0.3f});
+ mixer.queueInput(sourceId, sourceBuffer);
+
+ mixer.getOutput();
+ assertThat(mixer.isEnded()).isTrue();
+
+ mixer.setEndTimeUs(12_000);
+ assertThat(mixer.isEnded()).isFalse();
+
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(8 /* 2 floats = 2 frames */);
+
+ assertThat(createFloatArray(mixer.getOutput())).isEqualTo(new float[] {0.2f, -0.2f});
+ assertThat(mixer.isEnded()).isTrue();
+ }
+
+ @Test
+ public void output_withOneInt16Source_isInputConvertedToFloat() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ int sourceId = mixer.addSource(AUDIO_FORMAT_STEREO_PCM_16BIT, /* startTimeUs= */ 0);
+ ByteBuffer sourceBuffer =
+ createByteBuffer(
+ new short[] {
+ -16384 /* -0.5f */,
+ 8192 /* 0.25000762962f */,
+ -8192 /* -0.25f */,
+ 16384 /* 0.50001525925f */
+ });
+ mixer.queueInput(sourceId, sourceBuffer);
+ assertThat(sourceBuffer.remaining()).isEqualTo(0);
+
+ assertThat(createFloatArray(mixer.getOutput()))
+ .usingTolerance(1f / Short.MAX_VALUE)
+ .containsExactly(new float[] {-0.5f, 0.25f, -0.25f, 0.5f})
+ .inOrder();
+ }
+
+ @Test
+ public void output_withOneEarlySource_isEmpty() throws Exception {
+ mixer.configure(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* bufferSizeMs= */ 3, /* startTimeUs= */ 0);
+
+ mixer.addSource(AUDIO_FORMAT_STEREO_PCM_FLOAT, /* startTimeUs= */ -1_000);
+
+ assertThat(mixer.getOutput().remaining()).isEqualTo(0);
+ }
+
+ private static ByteBuffer createByteBuffer(float[] content) {
+ ByteBuffer buffer =
+ ByteBuffer.allocateDirect(content.length * 4).order(ByteOrder.nativeOrder());
+ buffer.asFloatBuffer().put(content);
+ return buffer;
+ }
+
+ private static ByteBuffer createByteBuffer(short[] content) {
+ ByteBuffer byteBuffer =
+ ByteBuffer.allocateDirect(content.length * 2).order(ByteOrder.nativeOrder());
+ byteBuffer.asShortBuffer().put(content);
+ return byteBuffer;
+ }
+
+ private static float[] createFloatArray(ByteBuffer byteBuffer) {
+ FloatBuffer buffer = byteBuffer.asFloatBuffer();
+ float[] content = new float[buffer.remaining()];
+ buffer.get(content);
+ return content;
+ }
+}