Add debug trace logs for input/output events for codecs.

PiperOrigin-RevId: 633244952
This commit is contained in:
samrobinson 2024-05-13 09:59:11 -07:00 committed by Copybara-Service
parent 1a5cf6718b
commit acf1ede644
4 changed files with 149 additions and 38 deletions

View file

@ -60,8 +60,11 @@ public final class DebugTraceUtil {
@Retention(RetentionPolicy.SOURCE)
@StringDef({
EVENT_INPUT_FORMAT,
EVENT_DECODED_FRAME,
EVENT_ENCODED_FRAME,
EVENT_OUTPUT_FORMAT,
EVENT_ACCEPTED_INPUT,
EVENT_PRODUCED_OUTPUT,
EVENT_INPUT_ENDED,
EVENT_OUTPUT_ENDED,
EVENT_REGISTER_NEW_INPUT_STREAM,
EVENT_SURFACE_TEXTURE_INPUT,
EVENT_QUEUE_FRAME,
@ -84,8 +87,11 @@ public final class DebugTraceUtil {
public @interface Event {}
public static final String EVENT_INPUT_FORMAT = "InputFormat";
public static final String EVENT_DECODED_FRAME = "DecodedFrame";
public static final String EVENT_ENCODED_FRAME = "EncodedFrame";
public static final String EVENT_OUTPUT_FORMAT = "OutputFormat";
public static final String EVENT_ACCEPTED_INPUT = "AcceptedInput";
public static final String EVENT_PRODUCED_OUTPUT = "ProducedOutput";
public static final String EVENT_INPUT_ENDED = "InputEnded";
public static final String EVENT_OUTPUT_ENDED = "OutputEnded";
public static final String EVENT_REGISTER_NEW_INPUT_STREAM = "RegisterNewInputStream";
public static final String EVENT_SURFACE_TEXTURE_INPUT = "SurfaceTextureInput";
public static final String EVENT_QUEUE_FRAME = "QueueFrame";
@ -109,13 +115,15 @@ public final class DebugTraceUtil {
@Retention(RetentionPolicy.SOURCE)
@StringDef({
COMPONENT_VIDEO,
COMPONENT_DECODER,
COMPONENT_AUDIO_DECODER,
COMPONENT_VIDEO_DECODER,
COMPONENT_VFP,
COMPONENT_BITMAP_TEXTURE_MANAGER,
COMPONENT_EXTERNAL_TEXTURE_MANAGER,
COMPONENT_TEX_ID_TEXTURE_MANAGER,
COMPONENT_COMPOSITOR,
COMPONENT_ENCODER,
COMPONENT_AUDIO_ENCODER,
COMPONENT_VIDEO_ENCODER,
COMPONENT_MUXER
})
@Target(TYPE_USE)
@ -124,22 +132,41 @@ public final class DebugTraceUtil {
// TODO - b/339639306: Migrate COMPONENT_VIDEO usage to COMPONENT_ASSETLOADER.
public static final String COMPONENT_VIDEO = "Video";
public static final String COMPONENT_DECODER = "Decoder";
public static final String COMPONENT_AUDIO_DECODER = "AudioDecoder";
public static final String COMPONENT_VIDEO_DECODER = "VideoDecoder";
public static final String COMPONENT_VFP = "VFP";
public static final String COMPONENT_EXTERNAL_TEXTURE_MANAGER = "ExternalTextureManager";
public static final String COMPONENT_BITMAP_TEXTURE_MANAGER = "BitmapTextureManager";
public static final String COMPONENT_TEX_ID_TEXTURE_MANAGER = "TexIdTextureManager";
public static final String COMPONENT_COMPOSITOR = "Compositor";
public static final String COMPONENT_ENCODER = "Encoder";
public static final String COMPONENT_AUDIO_ENCODER = "AudioEncoder";
public static final String COMPONENT_VIDEO_ENCODER = "VideoEncoder";
public static final String COMPONENT_MUXER = "Muxer";
// For a given component, events are in the rough expected order that they occur.
private static final ImmutableMap<@Component String, List<@Event String>> COMPONENTS_TO_EVENTS =
ImmutableMap.of(
COMPONENT_VIDEO, ImmutableList.of(EVENT_INPUT_FORMAT),
COMPONENT_DECODER,
ImmutableList.of(EVENT_DECODED_FRAME, EVENT_RECEIVE_EOS, EVENT_SIGNAL_EOS),
COMPONENT_VFP,
ImmutableMap.<String, List<String>>builder()
.put(COMPONENT_VIDEO, ImmutableList.of(EVENT_INPUT_FORMAT))
.put(
COMPONENT_AUDIO_DECODER,
ImmutableList.of(
EVENT_INPUT_FORMAT,
EVENT_OUTPUT_FORMAT,
EVENT_ACCEPTED_INPUT,
EVENT_PRODUCED_OUTPUT,
EVENT_INPUT_ENDED,
EVENT_OUTPUT_ENDED))
.put(
COMPONENT_VIDEO_DECODER,
ImmutableList.of(
EVENT_INPUT_FORMAT,
EVENT_OUTPUT_FORMAT,
EVENT_ACCEPTED_INPUT,
EVENT_PRODUCED_OUTPUT,
EVENT_INPUT_ENDED,
EVENT_OUTPUT_ENDED))
.put(
COMPONENT_VFP,
ImmutableList.of(
EVENT_REGISTER_NEW_INPUT_STREAM,
EVENT_SURFACE_TEXTURE_INPUT,
@ -149,20 +176,39 @@ public final class DebugTraceUtil {
EVENT_RENDERED_TO_OUTPUT_SURFACE,
EVENT_OUTPUT_TEXTURE_RENDERED,
EVENT_RECEIVE_END_OF_ALL_INPUT,
EVENT_SIGNAL_ENDED),
COMPONENT_EXTERNAL_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS),
COMPONENT_BITMAP_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS),
COMPONENT_TEX_ID_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS),
COMPONENT_COMPOSITOR, ImmutableList.of(EVENT_OUTPUT_TEXTURE_RENDERED),
COMPONENT_ENCODER, ImmutableList.of(EVENT_ENCODED_FRAME, EVENT_RECEIVE_EOS),
COMPONENT_MUXER,
EVENT_SIGNAL_ENDED))
.put(COMPONENT_EXTERNAL_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS))
.put(COMPONENT_BITMAP_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS))
.put(COMPONENT_TEX_ID_TEXTURE_MANAGER, ImmutableList.of(EVENT_SIGNAL_EOS))
.put(COMPONENT_COMPOSITOR, ImmutableList.of(EVENT_OUTPUT_TEXTURE_RENDERED))
.put(
COMPONENT_AUDIO_ENCODER,
ImmutableList.of(
EVENT_INPUT_FORMAT,
EVENT_OUTPUT_FORMAT,
EVENT_ACCEPTED_INPUT,
EVENT_PRODUCED_OUTPUT,
EVENT_INPUT_ENDED,
EVENT_OUTPUT_ENDED))
.put(
COMPONENT_VIDEO_ENCODER,
ImmutableList.of(
EVENT_INPUT_FORMAT,
EVENT_OUTPUT_FORMAT,
EVENT_ACCEPTED_INPUT,
EVENT_PRODUCED_OUTPUT,
EVENT_INPUT_ENDED,
EVENT_OUTPUT_ENDED))
.put(
COMPONENT_MUXER,
ImmutableList.of(
EVENT_CAN_WRITE_SAMPLE_VIDEO,
EVENT_WRITE_SAMPLE_VIDEO,
EVENT_CAN_WRITE_SAMPLE_AUDIO,
EVENT_WRITE_SAMPLE_AUDIO,
EVENT_TRACK_ENDED_AUDIO,
EVENT_TRACK_ENDED_VIDEO));
EVENT_TRACK_ENDED_VIDEO))
.buildOrThrow();
private static final int MAX_FIRST_LAST_LOGS = 10;
@ -226,6 +272,33 @@ public final class DebugTraceUtil {
logEvent(component, event, presentationTimeUs, /* extraFormat= */ "");
}
/**
* Logs an {@link Event} for a codec, if debug logging is enabled.
*
* @param isDecoder Whether the codec is a decoder.
* @param isVideo Whether the codec is for video.
* @param eventName The {@link Event} to log.
* @param presentationTimeUs The current presentation time of the media. Use {@link C#TIME_UNSET}
* if unknown, {@link C#TIME_END_OF_SOURCE} if EOS.
* @param extraFormat Format string for optional extra information. See {@link
* Util#formatInvariant(String, Object...)}.
* @param extraArgs Arguments for optional extra information.
*/
public static synchronized void logCodecEvent(
boolean isDecoder,
boolean isVideo,
@Event String eventName,
long presentationTimeUs,
String extraFormat,
Object... extraArgs) {
logEvent(
getCodecComponent(isDecoder, isVideo),
eventName,
presentationTimeUs,
extraFormat,
extraArgs);
}
/**
* Generate a summary of the logged events, containing the total number of times an event happened
* and the detailed log of a window of the oldest and newest events.
@ -304,6 +377,22 @@ public final class DebugTraceUtil {
}
}
private static @Component String getCodecComponent(boolean isDecoder, boolean isVideo) {
if (isDecoder) {
if (isVideo) {
return COMPONENT_VIDEO_DECODER;
} else {
return COMPONENT_AUDIO_DECODER;
}
} else {
if (isVideo) {
return COMPONENT_VIDEO_ENCODER;
} else {
return COMPONENT_AUDIO_ENCODER;
}
}
}
private static final class EventLog {
public final long presentationTimeUs;
public final long eventTimeMs;

View file

@ -21,9 +21,12 @@ import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.common.util.Util.SDK_INT;
import static androidx.media3.effect.DebugTraceUtil.COMPONENT_DECODER;
import static androidx.media3.effect.DebugTraceUtil.COMPONENT_ENCODER;
import static androidx.media3.effect.DebugTraceUtil.EVENT_RECEIVE_EOS;
import static androidx.media3.effect.DebugTraceUtil.EVENT_ACCEPTED_INPUT;
import static androidx.media3.effect.DebugTraceUtil.EVENT_INPUT_ENDED;
import static androidx.media3.effect.DebugTraceUtil.EVENT_INPUT_FORMAT;
import static androidx.media3.effect.DebugTraceUtil.EVENT_OUTPUT_ENDED;
import static androidx.media3.effect.DebugTraceUtil.EVENT_OUTPUT_FORMAT;
import static androidx.media3.effect.DebugTraceUtil.EVENT_PRODUCED_OUTPUT;
import android.content.Context;
import android.media.MediaCodec;
@ -109,6 +112,8 @@ public final class DefaultCodec implements Codec {
outputBufferInfo = new BufferInfo();
inputBufferIndex = C.INDEX_UNSET;
outputBufferIndex = C.INDEX_UNSET;
DebugTraceUtil.logCodecEvent(
isDecoder, isVideo, EVENT_INPUT_FORMAT, C.TIME_UNSET, "%s", configurationFormat);
@Nullable MediaCodec mediaCodec = null;
@Nullable Surface inputSurface = null;
@ -221,10 +226,8 @@ public final class DefaultCodec implements Codec {
inputStreamEnded = true;
flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
debugTraceLogEvent(EVENT_INPUT_ENDED, C.TIME_END_OF_SOURCE);
if (isDecoder) {
if (isVideo) {
DebugTraceUtil.logEvent(COMPONENT_DECODER, EVENT_RECEIVE_EOS, C.TIME_END_OF_SOURCE);
}
// EOS buffer on the decoder input should never carry data.
checkState(inputBuffer.data == null || !inputBuffer.data.hasRemaining());
offset = 0;
@ -238,13 +241,14 @@ public final class DefaultCodec implements Codec {
Log.d(TAG, "MediaCodec error", e);
throw createExportException(e);
}
debugTraceLogEvent(EVENT_ACCEPTED_INPUT, timestampUs, "bytes=%s", size);
inputBufferIndex = C.INDEX_UNSET;
inputBuffer.data = null;
}
@Override
public void signalEndOfInputStream() throws ExportException {
DebugTraceUtil.logEvent(COMPONENT_ENCODER, EVENT_RECEIVE_EOS, C.TIME_END_OF_SOURCE);
debugTraceLogEvent(EVENT_INPUT_ENDED, C.TIME_END_OF_SOURCE);
try {
mediaCodec.signalEndOfInputStream();
} catch (RuntimeException e) {
@ -264,7 +268,16 @@ public final class DefaultCodec implements Codec {
@Override
@Nullable
public ByteBuffer getOutputBuffer() throws ExportException {
return maybeDequeueOutputBuffer(/* setOutputBuffer= */ true) ? outputBuffer : null;
boolean outputAvailable = maybeDequeueOutputBuffer(/* setOutputBuffer= */ true);
if (!outputAvailable) {
return null;
}
debugTraceLogEvent(
EVENT_PRODUCED_OUTPUT,
outputBufferInfo.presentationTimeUs,
"bytesOutput=%s",
outputBufferInfo.size);
return outputBuffer;
}
@Override
@ -295,6 +308,7 @@ public final class DefaultCodec implements Codec {
if (render) {
mediaCodec.releaseOutputBuffer(
outputBufferIndex, /* renderTimestampNs= */ renderPresentationTimeUs * 1000);
debugTraceLogEvent(EVENT_PRODUCED_OUTPUT, renderPresentationTimeUs);
} else {
mediaCodec.releaseOutputBuffer(outputBufferIndex, /* render= */ false);
}
@ -373,11 +387,15 @@ public final class DefaultCodec implements Codec {
.setPcmEncoding(configurationFormat.pcmEncoding)
.build();
}
debugTraceLogEvent(
EVENT_OUTPUT_FORMAT, outputBufferInfo.presentationTimeUs, "%s", outputFormat);
}
return false;
}
if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
outputStreamEnded = true;
debugTraceLogEvent(EVENT_OUTPUT_ENDED, C.TIME_END_OF_SOURCE);
if (outputBufferInfo.size == 0) {
releaseOutputBuffer(/* render= */ false);
return false;
@ -466,6 +484,19 @@ public final class DefaultCodec implements Codec {
== MediaFormat.COLOR_TRANSFER_SDR_VIDEO;
}
private void debugTraceLogEvent(@DebugTraceUtil.Event String event, long presentationTimeUs) {
debugTraceLogEvent(event, presentationTimeUs, /* extraFormat= */ "");
}
private void debugTraceLogEvent(
@DebugTraceUtil.Event String event,
long presentationTimeUs,
String extraFormat,
Object... extraArgs) {
DebugTraceUtil.logCodecEvent(
isDecoder, isVideo, event, presentationTimeUs, extraFormat, extraArgs);
}
@RequiresApi(29)
private static final class Api29 {
@DoNotInline

View file

@ -17,11 +17,8 @@ package androidx.media3.transformer;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.effect.DebugTraceUtil.COMPONENT_DECODER;
import static androidx.media3.effect.DebugTraceUtil.COMPONENT_VIDEO;
import static androidx.media3.effect.DebugTraceUtil.EVENT_DECODED_FRAME;
import static androidx.media3.effect.DebugTraceUtil.EVENT_INPUT_FORMAT;
import static androidx.media3.effect.DebugTraceUtil.EVENT_SIGNAL_EOS;
import static androidx.media3.transformer.TransformerUtil.getDecoderOutputColor;
import android.media.MediaCodec;
@ -157,7 +154,6 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
@RequiresNonNull({"sampleConsumer", "decoder"})
protected boolean feedConsumerFromDecoder() throws ExportException {
if (decoder.isEnded()) {
DebugTraceUtil.logEvent(COMPONENT_DECODER, EVENT_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
sampleConsumer.signalEndOfVideoInput();
isEnded = true;
return false;
@ -184,7 +180,6 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
}
decoder.releaseOutputBuffer(presentationTimeUs);
DebugTraceUtil.logEvent(COMPONENT_DECODER, EVENT_DECODED_FRAME, presentationTimeUs);
return true;
}

View file

@ -24,8 +24,6 @@ import static androidx.media3.common.ColorInfo.SRGB_BT709_FULL;
import static androidx.media3.common.ColorInfo.isTransferHdr;
import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.effect.DebugTraceUtil.COMPONENT_ENCODER;
import static androidx.media3.effect.DebugTraceUtil.EVENT_ENCODED_FRAME;
import static androidx.media3.transformer.Composition.HDR_MODE_KEEP_HDR;
import static androidx.media3.transformer.Composition.HDR_MODE_TONE_MAP_HDR_TO_SDR_USING_OPEN_GL;
import static androidx.media3.transformer.TransformerUtil.getOutputMimeTypeAndHdrModeAfterFallback;
@ -50,7 +48,6 @@ import androidx.media3.common.VideoGraph;
import androidx.media3.common.util.Consumer;
import androidx.media3.common.util.Util;
import androidx.media3.decoder.DecoderInputBuffer;
import androidx.media3.effect.DebugTraceUtil;
import androidx.media3.effect.VideoCompositorSettings;
import com.google.common.util.concurrent.MoreExecutors;
import java.nio.ByteBuffer;
@ -195,7 +192,6 @@ import org.checkerframework.dataflow.qual.Pure;
hasMuxedTimestampZero = true;
}
}
DebugTraceUtil.logEvent(COMPONENT_ENCODER, EVENT_ENCODED_FRAME, bufferInfo.presentationTimeUs);
encoderOutputBuffer.timeUs = bufferInfo.presentationTimeUs;
encoderOutputBuffer.setFlags(bufferInfo.flags);
return encoderOutputBuffer;