Improve DebugTraceUtil logging

PiperOrigin-RevId: 554872827
This commit is contained in:
claincly 2023-08-08 17:40:46 +00:00 committed by Tianyi Feng
parent ef54364478
commit e8a18e208a
7 changed files with 85 additions and 38 deletions

View file

@ -195,6 +195,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
downstreamShaderProgramCapacity--;
shaderProgram.queueInputFrame(
glObjectsProvider, checkNotNull(currentGlTextureInfo), round(currentPresentationTimeUs));
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_QUEUE_BITMAP,
(long) currentPresentationTimeUs,
/* extra= */ currentBitmapInfo.frameInfo.width + "x" + currentBitmapInfo.frameInfo.height);
currentPresentationTimeUs += currentBitmapInfo.frameDurationUs;
if (framesToQueueForCurrentBitmap == 0) {

View file

@ -48,23 +48,27 @@ public final class DebugTraceUtil {
@Retention(RetentionPolicy.SOURCE)
@StringDef({
EVENT_VIDEO_INPUT_FORMAT,
EVENT_MUXER_INPUT_AUDIO,
EVENT_MUXER_INPUT_VIDEO,
EVENT_DECODER_DECODED_FRAME,
EVENT_VFP_RENDERED_TO_INPUT,
EVENT_VFP_FRAME_DEQUEUED,
EVENT_VFP_RENDERED_TO_OUTPUT,
EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_CAN_WRITE_SAMPLE_AUDIO,
EVENT_VFP_REGISTER_NEW_INPUT_STREAM,
EVENT_VFP_SURFACE_TEXTURE_INPUT,
EVENT_VFP_QUEUE_FRAME,
EVENT_VFP_QUEUE_BITMAP,
EVENT_VFP_QUEUE_TEXTURE,
EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE,
EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM,
EVENT_ENCODER_ENCODED_FRAME,
EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_CAN_WRITE_SAMPLE_AUDIO,
EVENT_MUXER_WRITE_SAMPLE_AUDIO,
EVENT_DECODER_RECEIVE_EOS,
EVENT_VFP_RECEIVE_DECODER_EOS,
EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS,
EVENT_DECODER_SIGNAL_EOS,
EVENT_VFP_RECEIVE_END_OF_INPUT,
EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_TEX_ID_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_VFP_SIGNAL_EOS,
EVENT_VFP_SIGNAL_ENDED,
EVENT_ENCODER_RECEIVE_EOS,
EVENT_DECODER_SIGNAL_EOS,
EVENT_MUXER_TRACK_ENDED_AUDIO,
EVENT_MUXER_TRACK_ENDED_VIDEO
})
@ -74,24 +78,28 @@ public final class DebugTraceUtil {
public static final String EVENT_VIDEO_INPUT_FORMAT = "VideoInputFormat";
public static final String EVENT_DECODER_DECODED_FRAME = "Decoder-DecodedFrame";
public static final String EVENT_VFP_RENDERED_TO_INPUT = "VFP-RenderedToInput";
public static final String EVENT_VFP_FRAME_DEQUEUED = "VFP-FrameDequeued";
public static final String EVENT_VFP_RENDERED_TO_OUTPUT = "VFP-RenderedToOutput";
public static final String EVENT_VFP_REGISTER_NEW_INPUT_STREAM = "VFP-RegisterNewInputStream";
public static final String EVENT_VFP_SURFACE_TEXTURE_INPUT = "VFP-SurfaceTextureInput";
public static final String EVENT_VFP_QUEUE_FRAME = "VFP-QueueFrame";
public static final String EVENT_VFP_QUEUE_BITMAP = "VFP-QueueBitmap";
public static final String EVENT_VFP_QUEUE_TEXTURE = "VFP-QueueTexture";
public static final String EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE = "VFP-RenderedToOutputSurface";
public static final String EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM = "VFP-FinishOneInputStream";
public static final String EVENT_ENCODER_ENCODED_FRAME = "Encoder-EncodedFrame";
public static final String EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO = "Muxer-CanWriteSample_Video";
public static final String EVENT_MUXER_INPUT_VIDEO = "Muxer-Input_Video";
public static final String EVENT_MUXER_WRITE_SAMPLE_VIDEO = "Muxer-WriteSample_Video";
public static final String EVENT_MUXER_CAN_WRITE_SAMPLE_AUDIO = "Muxer-CanWriteSample_Audio";
public static final String EVENT_MUXER_INPUT_AUDIO = "Muxer-Input_Audio";
public static final String EVENT_MUXER_WRITE_SAMPLE_AUDIO = "Muxer-WriteSample_Audio";
public static final String EVENT_DECODER_RECEIVE_EOS = "Decoder-ReceiveEOS";
public static final String EVENT_DECODER_SIGNAL_EOS = "Decoder-SignalEOS";
public static final String EVENT_VFP_RECEIVE_DECODER_EOS = "VFP-ReceiveDecoderEOS";
public static final String EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS =
"ExternalInputManager-SignalEOS";
public static final String EVENT_VFP_RECEIVE_END_OF_INPUT = "VFP-ReceiveEndOfAllInput";
public static final String EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS =
"ExternalTextureManager-SignalEOS";
public static final String EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS =
"BitmapTextureManager-SignalEOS";
public static final String EVENT_TEX_ID_TEXTURE_MANAGER_SIGNAL_EOS =
"TexIdTextureManager-SignalEOS";
public static final String EVENT_VFP_SIGNAL_EOS = "VFP-SignalEOS";
public static final String EVENT_VFP_SIGNAL_ENDED = "VFP-SignalEnded";
public static final String EVENT_ENCODER_RECEIVE_EOS = "Encoder-ReceiveEOS";
public static final String EVENT_MUXER_TRACK_ENDED_AUDIO = "Muxer-TrackEnded_Audio";
public static final String EVENT_MUXER_TRACK_ENDED_VIDEO = "Muxer-TrackEnded_Video";
@ -101,21 +109,25 @@ public final class DebugTraceUtil {
ImmutableList.of(
EVENT_VIDEO_INPUT_FORMAT,
EVENT_DECODER_DECODED_FRAME,
EVENT_VFP_RENDERED_TO_INPUT,
EVENT_VFP_FRAME_DEQUEUED,
EVENT_VFP_RENDERED_TO_OUTPUT,
EVENT_VFP_REGISTER_NEW_INPUT_STREAM,
EVENT_VFP_SURFACE_TEXTURE_INPUT,
EVENT_VFP_QUEUE_FRAME,
EVENT_VFP_QUEUE_BITMAP,
EVENT_VFP_QUEUE_TEXTURE,
EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE,
EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM,
EVENT_ENCODER_ENCODED_FRAME,
EVENT_MUXER_CAN_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_INPUT_VIDEO,
EVENT_MUXER_WRITE_SAMPLE_VIDEO,
EVENT_MUXER_CAN_WRITE_SAMPLE_AUDIO,
EVENT_MUXER_INPUT_AUDIO,
EVENT_MUXER_WRITE_SAMPLE_AUDIO,
EVENT_DECODER_RECEIVE_EOS,
EVENT_DECODER_SIGNAL_EOS,
EVENT_VFP_RECEIVE_DECODER_EOS,
EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS,
EVENT_VFP_RECEIVE_END_OF_INPUT,
EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_BITMAP_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_TEX_ID_TEXTURE_MANAGER_SIGNAL_EOS,
EVENT_VFP_SIGNAL_EOS,
EVENT_VFP_SIGNAL_ENDED,
EVENT_ENCODER_RECEIVE_EOS,
EVENT_MUXER_TRACK_ENDED_AUDIO,
EVENT_MUXER_TRACK_ENDED_VIDEO);
@ -160,7 +172,7 @@ public final class DebugTraceUtil {
*/
public static synchronized void logEvent(
@DebugTraceEvent String eventName, long presentationTimeUs) {
logEvent(eventName, presentationTimeUs, null);
logEvent(eventName, presentationTimeUs, /* extra= */ null);
}
/**

View file

@ -20,6 +20,11 @@ import static androidx.media3.common.util.Assertions.checkArgument;
import static androidx.media3.common.util.Assertions.checkNotNull;
import static androidx.media3.common.util.Assertions.checkState;
import static androidx.media3.common.util.Assertions.checkStateNotNull;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_RECEIVE_END_OF_INPUT;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_REGISTER_NEW_INPUT_STREAM;
import static androidx.media3.effect.DebugTraceUtil.EVENT_VFP_SIGNAL_ENDED;
import static androidx.media3.effect.DebugTraceUtil.logEvent;
import static com.google.common.collect.Iterables.getFirst;
import android.content.Context;
@ -383,10 +388,11 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
}
if (inputEndedAfterThisInputStream) {
listenerExecutor.execute(listener::onEnded);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
logEvent(EVENT_VFP_SIGNAL_ENDED, C.TIME_END_OF_SOURCE);
}
});
this.intermediateGlShaderPrograms = new ArrayList<>();
logEvent(EVENT_VFP_FINISH_PROCESSING_INPUT_STREAM, C.TIME_END_OF_SOURCE);
}
/** Returns the task executor that runs video frame processing tasks. */
@ -448,6 +454,12 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override
public void registerInputStream(
@InputType int inputType, List<Effect> effects, FrameInfo frameInfo) {
logEvent(
EVENT_VFP_REGISTER_NEW_INPUT_STREAM,
/* presentationTimeUs= */ frameInfo.offsetToAddUs,
/* extra= */ Util.formatInvariant(
"InputType %s - %dx%d",
getInputTypeString(inputType), frameInfo.width, frameInfo.height));
nextInputFrameInfo = adjustForPixelWidthHeightRatio(frameInfo);
hasRefreshedNextInputFrameInfo = true;
synchronized (lock) {
@ -527,7 +539,7 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
@Override
public void signalEndOfInput() {
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_RECEIVE_DECODER_EOS, C.TIME_END_OF_SOURCE);
logEvent(EVENT_VFP_RECEIVE_END_OF_INPUT, C.TIME_END_OF_SOURCE);
checkState(!inputStreamEnded);
inputStreamEnded = true;
inputSwitcher.signalEndOfCurrentInputStream();
@ -771,6 +783,19 @@ public final class DefaultVideoFrameProcessor implements VideoFrameProcessor {
}
}
private static String getInputTypeString(@InputType int inputType) {
switch (inputType) {
case INPUT_TYPE_SURFACE:
return "Surface";
case INPUT_TYPE_BITMAP:
return "Bitmap";
case INPUT_TYPE_TEXTURE_ID:
return "Texture ID";
default:
throw new IllegalArgumentException(String.valueOf(inputType));
}
}
/** Configures the {@link GlShaderProgram} instances for {@code effects}. */
private void configureEffects(List<Effect> effects) throws VideoFrameProcessingException {
if (!intermediateGlShaderPrograms.isEmpty()) {

View file

@ -123,7 +123,8 @@ import java.util.concurrent.atomic.AtomicInteger;
unused ->
videoFrameProcessingTaskExecutor.submit(
() -> {
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_RENDERED_TO_INPUT, C.TIME_UNSET);
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_SURFACE_TEXTURE_INPUT, C.TIME_UNSET);
if (numberOfFramesToDropOnBecomingAvailable > 0) {
numberOfFramesToDropOnBecomingAvailable--;
surfaceTexture.updateTexImage();
@ -174,7 +175,7 @@ import java.util.concurrent.atomic.AtomicInteger;
currentInputStreamEnded = false;
externalShaderProgram.signalEndOfCurrentInputStream();
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
DebugTraceUtil.EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
cancelForceSignalEndOfStreamTimer();
} else {
maybeQueueFrameToExternalShaderProgram();
@ -223,7 +224,7 @@ import java.util.concurrent.atomic.AtomicInteger;
if (pendingFrames.isEmpty() && currentFrame == null) {
externalShaderProgram.signalEndOfCurrentInputStream();
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_EXTERNAL_INPUT_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
DebugTraceUtil.EVENT_EXTERNAL_TEXTURE_MANAGER_SIGNAL_EOS, C.TIME_END_OF_SOURCE);
cancelForceSignalEndOfStreamTimer();
} else {
currentInputStreamEnded = true;
@ -331,7 +332,7 @@ import java.util.concurrent.atomic.AtomicInteger;
currentFrame.height),
presentationTimeUs);
checkStateNotNull(pendingFrames.remove());
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_FRAME_DEQUEUED, presentationTimeUs);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_QUEUE_FRAME, presentationTimeUs);
// If the queued frame is the last frame, end of stream will be signaled onInputFrameProcessed.
}
}

View file

@ -375,7 +375,8 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
? System.nanoTime()
: renderTimeNs);
EGL14.eglSwapBuffers(eglDisplay, outputEglSurface);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_VFP_RENDERED_TO_OUTPUT, presentationTimeUs);
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_RENDERED_TO_OUTPUT_SURFACE, presentationTimeUs);
}
private void renderFrameToOutputTexture(GlTextureInfo inputTexture, long presentationTimeUs)

View file

@ -91,6 +91,10 @@ import org.checkerframework.checker.nullness.qual.MonotonicNonNull;
frameInfo.width,
frameInfo.height);
frameConsumptionManager.queueInputFrame(inputTexture, presentationTimeUs);
DebugTraceUtil.logEvent(
DebugTraceUtil.EVENT_VFP_QUEUE_TEXTURE,
presentationTimeUs,
/* extra= */ frameInfo.width + "x" + frameInfo.height);
});
}

View file

@ -245,9 +245,9 @@ import org.checkerframework.checker.nullness.qual.RequiresNonNull;
muxer.writeSampleData(
trackInfo.index, data, presentationTimeUs, isKeyFrame ? C.BUFFER_FLAG_KEY_FRAME : 0);
if (trackType == C.TRACK_TYPE_VIDEO) {
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_MUXER_INPUT_VIDEO, presentationTimeUs);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_MUXER_WRITE_SAMPLE_VIDEO, presentationTimeUs);
} else if (trackType == C.TRACK_TYPE_AUDIO) {
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_MUXER_INPUT_AUDIO, presentationTimeUs);
DebugTraceUtil.logEvent(DebugTraceUtil.EVENT_MUXER_WRITE_SAMPLE_AUDIO, presentationTimeUs);
}
previousTrackType = trackType;
return true;