Remove earlier frame extraction prototypes

Earlier this year, we explored frame extraction built on top of Transformer
or ImageReader.

We decided against these approaches because random-access of frames
is a key requirement. And ImageReader behaviour is unreliable.

PiperOrigin-RevId: 702303490
This commit is contained in:
dancho 2024-12-03 05:29:05 -08:00 committed by Copybara-Service
parent 7821e7702b
commit e8f60d4fec
3 changed files with 0 additions and 323 deletions

View file

@ -86,7 +86,6 @@ import androidx.media3.common.util.CodecSpecificDataUtil;
import androidx.media3.common.util.GlUtil;
import androidx.media3.common.util.Util;
import androidx.media3.datasource.DataSourceBitmapLoader;
import androidx.media3.effect.ByteBufferGlEffect;
import androidx.media3.effect.Contrast;
import androidx.media3.effect.DefaultGlObjectsProvider;
import androidx.media3.effect.DefaultVideoFrameProcessor;
@ -105,7 +104,6 @@ import androidx.media3.test.utils.FakeExtractorOutput;
import androidx.media3.test.utils.FakeTrackOutput;
import androidx.media3.test.utils.TestSpeedProvider;
import androidx.media3.test.utils.TestUtil;
import androidx.media3.transformer.AndroidTestUtil.FrameCountingByteBufferProcessor;
import androidx.media3.transformer.AssetLoader.CompositionSettings;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
@ -2016,70 +2014,6 @@ public class TransformerEndToEndTest {
assertThat(result.exportResult.fileSizeBytes).isEqualTo(C.LENGTH_UNSET);
}
@Test
public void extractFrames_completesSuccessfully() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
/* outputFormat= */ null);
AtomicInteger imagesOutput = new AtomicInteger(/* initialValue= */ 0);
Transformer transformer =
ExperimentalFrameExtractorFactory.buildFrameExtractorTransformer(
context, image -> imagesOutput.incrementAndGet());
AtomicInteger videoFramesSeen = new AtomicInteger(/* initialValue= */ 0);
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(
MediaItem.fromUri(
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)))
.setRemoveAudio(true)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
ImmutableList.of(createFrameCountingEffect(videoFramesSeen))))
.build();
ExportTestResult result =
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
assertThat(videoFramesSeen.get()).isEqualTo(932);
assertThat(imagesOutput.get()).isEqualTo(932);
assertThat(result.exportResult.videoFrameCount).isEqualTo(932);
// Confirm no data was written to file.
assertThat(result.exportResult.fileSizeBytes).isEqualTo(C.LENGTH_UNSET);
}
@Test
public void extractFrames_usingAnalyzerMode_completesSuccessfully() throws Exception {
assumeFormatsSupported(
context,
testId,
/* inputFormat= */ MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.videoFormat,
/* outputFormat= */ null);
Transformer transformer = ExperimentalAnalyzerModeFactory.buildAnalyzer(context);
FrameCountingByteBufferProcessor frameCountingProcessor =
new FrameCountingByteBufferProcessor();
// Analysis must be added to item effects because composition effects are not applied to single
// input video.
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(
MediaItem.fromUri(
Uri.parse(MP4_ASSET_WITH_INCREASING_TIMESTAMPS_320W_240H_15S.uri)))
.setEffects(
new Effects(
ImmutableList.of(),
ImmutableList.of(new ByteBufferGlEffect<>(frameCountingProcessor))))
.build();
new TransformerAndroidTestRunner.Builder(context, transformer)
.build()
.run(testId, editedMediaItem);
assertThat(frameCountingProcessor.frameCount.get()).isEqualTo(932);
}
@Test
public void transcode_withOutputVideoMimeTypeAv1_completesSuccessfully() throws Exception {
assumeFormatsSupported(

View file

@ -32,10 +32,8 @@ import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Clock;
import androidx.media3.common.util.Util;
import androidx.media3.effect.ByteBufferGlEffect;
import androidx.media3.effect.Presentation;
import androidx.media3.transformer.AndroidTestUtil.ForceEncodeEncoderFactory;
import androidx.media3.transformer.AndroidTestUtil.FrameCountingByteBufferProcessor;
import androidx.media3.transformer.AssetLoader;
import androidx.media3.transformer.Codec;
import androidx.media3.transformer.DefaultAssetLoaderFactory;
@ -156,44 +154,6 @@ public class TranscodeSpeedTest {
assertThat(result.throughputFps).isAtLeast(isHighPerformance ? 400 : 20);
}
@Test
public void extractFrames_onHighPerformanceDevice_usingAnalyzerMode_completesWithHighThroughput()
throws Exception {
assumeTrue(
Ascii.toLowerCase(Util.MODEL).contains("pixel")
&& (Ascii.toLowerCase(Util.MODEL).contains("6")
|| Ascii.toLowerCase(Util.MODEL).contains("7")
|| Ascii.toLowerCase(Util.MODEL).contains("8")
|| Ascii.toLowerCase(Util.MODEL).contains("fold")
|| Ascii.toLowerCase(Util.MODEL).contains("tablet")));
// Pixel 6 is usually quick, unless it's on API 33. See b/358519058.
assumeFalse(Util.SDK_INT == 33 && Ascii.toLowerCase(Util.MODEL).contains("pixel 6"));
FrameCountingByteBufferProcessor frameCountingProcessor =
new FrameCountingByteBufferProcessor();
MediaItem mediaItem =
MediaItem.fromUri(Uri.parse(MP4_LONG_ASSET_WITH_INCREASING_TIMESTAMPS.uri))
.buildUpon()
.setClippingConfiguration(
new MediaItem.ClippingConfiguration.Builder().setEndPositionMs(45_000L).build())
.build();
EditedMediaItem editedMediaItem =
new EditedMediaItem.Builder(mediaItem)
.setRemoveAudio(true)
.setEffects(
new Effects(
/* audioProcessors= */ ImmutableList.of(),
ImmutableList.of(
Presentation.createForHeight(240),
new ByteBufferGlEffect<>(frameCountingProcessor))))
.build();
ExportTestResult result = analyzeVideoWithConfiguredOperatingRate(testId, editedMediaItem);
assertThat(frameCountingProcessor.frameCount.get()).isEqualTo(1350);
float throughputFps = 1000f * frameCountingProcessor.frameCount.get() / result.elapsedTimeMs;
assertThat(throughputFps).isAtLeast(350);
}
@Test
public void
analyzeVideo_onHighPerformanceDevice_withConfiguredOperatingRate_completesWithHighThroughput()

View file

@ -1,217 +0,0 @@
/*
* Copyright 2024 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.transformer;
import android.content.Context;
import android.graphics.PixelFormat;
import android.media.Image;
import android.media.ImageReader;
import android.media.MediaCodec.BufferInfo;
import android.view.Surface;
import androidx.annotation.Nullable;
import androidx.media3.common.C;
import androidx.media3.common.Format;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.util.Util;
import androidx.media3.decoder.DecoderInputBuffer;
import com.google.common.collect.ImmutableList;
import java.nio.ByteBuffer;
import java.util.Queue;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.checkerframework.checker.nullness.qual.EnsuresNonNullIf;
/**
* Factory for creating instances of {@link Transformer} that can be used to extract frames.
*
* <p>This class is experimental and will be renamed or removed in a future release.
*/
/* package */ final class ExperimentalFrameExtractorFactory {
private ExperimentalFrameExtractorFactory() {}
/** A callback to be notified when a new image is available. */
public interface Listener {
// TODO: b/350498258 - Make this more user-friendly before making it a public API.
/**
* Called when a new {@link Image} is available. When this method returns, the {@link Image}
* will be closed and can no longer be used.
*/
void onImageAvailable(Image image);
}
/**
* Builds a {@link Transformer} that runs as an analyzer.
*
* <p>No encoding or muxing is performed, therefore no data is written to any output files.
*
* @param context The {@link Context}.
* @param listener The {@link Listener} to be used for generated images.
* @return The fame extracting {@link Transformer}.
*/
public static Transformer buildFrameExtractorTransformer(Context context, Listener listener) {
return new Transformer.Builder(context)
.experimentalSetTrimOptimizationEnabled(false)
.setEncoderFactory(new ImageReaderEncoder.Factory(listener))
.setMaxDelayBetweenMuxerSamplesMs(C.TIME_UNSET)
.setMuxerFactory(
new NoWriteMuxer.Factory(
/* audioMimeTypes= */ ImmutableList.of(MimeTypes.AUDIO_AAC),
/* videoMimeTypes= */ ImmutableList.of(MimeTypes.VIDEO_H264)))
.setAudioMimeType(MimeTypes.AUDIO_AAC)
.setVideoMimeType(MimeTypes.VIDEO_H264)
.experimentalSetMaxFramesInEncoder(1) // Work around ImageReader frame dropping.
.build();
}
/** A {@linkplain Codec encoder} implementation that outputs frames via {@link ImageReader}. */
private static final class ImageReaderEncoder implements Codec {
public static final class Factory implements Codec.EncoderFactory {
private final Listener listener;
public Factory(Listener listener) {
this.listener = listener;
}
@Override
public Codec createForAudioEncoding(Format format) {
throw new UnsupportedOperationException();
}
@Override
public Codec createForVideoEncoding(Format format) {
return new ImageReaderEncoder(format, listener);
}
}
private static final String TAG = "ImageReaderEncoder";
private static final ByteBuffer EMPTY_BUFFER = ByteBuffer.allocateDirect(0);
private final Format configurationFormat;
private final ImageReader imageReader;
private final Queue<Long> processedImageTimestampsNs;
private final BufferInfo outputBufferInfo;
private boolean hasOutputBuffer;
private boolean inputStreamEnded;
public ImageReaderEncoder(Format format, Listener listener) {
this.configurationFormat = format;
imageReader =
ImageReader.newInstance(
format.width, format.height, PixelFormat.RGBA_8888, /* maxImages= */ 1);
processedImageTimestampsNs = new ConcurrentLinkedQueue<>();
imageReader.setOnImageAvailableListener(
reader -> {
try (Image image = reader.acquireNextImage()) {
processedImageTimestampsNs.add(image.getTimestamp());
listener.onImageAvailable(image);
}
},
Util.createHandlerForCurrentOrMainLooper());
outputBufferInfo = new BufferInfo();
}
@Override
public String getName() {
return TAG;
}
@Override
public Format getConfigurationFormat() {
return configurationFormat;
}
@Override
public Surface getInputSurface() {
return imageReader.getSurface();
}
@Override
@EnsuresNonNullIf(expression = "#1.data", result = true)
public boolean maybeDequeueInputBuffer(DecoderInputBuffer inputBuffer) {
throw new UnsupportedOperationException();
}
@Override
public void queueInputBuffer(DecoderInputBuffer inputBuffer) {
throw new UnsupportedOperationException();
}
@Override
public void signalEndOfInputStream() {
inputStreamEnded = true;
}
@Override
public Format getOutputFormat() {
return configurationFormat;
}
@Override
@Nullable
public ByteBuffer getOutputBuffer() {
return maybeGenerateOutputBuffer() ? EMPTY_BUFFER : null;
}
@Override
@Nullable
public BufferInfo getOutputBufferInfo() {
return maybeGenerateOutputBuffer() ? outputBufferInfo : null;
}
@Override
public boolean isEnded() {
return inputStreamEnded && processedImageTimestampsNs.isEmpty();
}
@Override
public void releaseOutputBuffer(boolean render) {
releaseOutputBuffer();
}
@Override
public void releaseOutputBuffer(long renderPresentationTimeUs) {
releaseOutputBuffer();
}
private void releaseOutputBuffer() {
hasOutputBuffer = false;
}
@Override
public void release() {}
private boolean maybeGenerateOutputBuffer() {
if (hasOutputBuffer) {
return true;
}
Long timeNs = processedImageTimestampsNs.poll();
if (timeNs == null) {
return false;
}
hasOutputBuffer = true;
outputBufferInfo.presentationTimeUs = timeNs / 1000;
return true;
}
}
}